diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 9e83961662..4d644f4bef 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -25,14 +25,16 @@ jobs: ) ) steps: - - uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 + - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4 id: app-token with: - app-id: ${{ vars.APP_ID }} + app-id: ${{ vars.BACKPORT_APP_ID }} private-key: ${{ secrets.PRIVATE_KEY }} permission-contents: write # push branch to Github permission-pull-requests: write # create PR / add comment for manual backport permission-workflows: write # modify files in .github/workflows - - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 + - uses: pylint-dev/backport@94367840595495e101f9a31415897c05da1f08d9 # v2.1.1 with: github_token: ${{ steps.app-token.outputs.token }} + user_name: ${{ vars.BACKPORT_USER_NAME }} + user_email: ${{ vars.BACKPORT_USER_EMAIL }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7c4e1b57c8..98edfb7ed5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -5,7 +5,8 @@ on: branches: - main - 2.* - pull_request: ~ + pull_request: + workflow_dispatch: env: CACHE_VERSION: 3 @@ -23,11 +24,13 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 20 steps: - - name: Check out code from GitHub - uses: actions/checkout@v4.2.2 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} + - &checkout + name: Check out code from GitHub + uses: actions/checkout@v5.0.0 + - &setup-python-default + name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v6.0.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -37,9 +40,10 @@ jobs: echo "key=base-venv-${{ env.CACHE_VERSION }}-${{ hashFiles('pyproject.toml', 'requirements_dev.txt', 'requirements_full.txt', 'requirements_minimal.txt') }}" >> $GITHUB_OUTPUT - - name: Restore Python virtual environment + - &cache-python + name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.3.0 with: path: venv key: >- @@ -50,7 +54,7 @@ jobs: run: | python -m venv venv . venv/bin/activate - python -m pip install -U pip setuptools wheel + python -m pip install -U pip pip install -U -r requirements_full.txt - name: Generate pre-commit restore key id: generate-pre-commit-key @@ -59,7 +63,7 @@ jobs: hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT - name: Restore pre-commit environment id: cache-precommit - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.3.0 with: path: ${{ env.PRE_COMMIT_CACHE }} key: >- @@ -72,7 +76,7 @@ jobs: - name: Run pre-commit checks run: | . venv/bin/activate - pre-commit run pylint --all-files + pre-commit run --hook-stage manual pylint-ci --all-files tests-linux: name: tests / run / ${{ matrix.python-version }} / Linux @@ -81,17 +85,17 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.9, "3.10", "3.11", "3.12", "3.13"] + python-version: &matrix-python-version ["3.10", "3.11", "3.12", "3.13", "3.14"] outputs: python-key: ${{ steps.generate-python-key.outputs.key }} steps: - - name: Check out code from GitHub - uses: actions/checkout@v4.2.2 + - *checkout - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v6.0.0 with: python-version: ${{ matrix.python-version }} + allow-prereleases: true check-latest: true - name: Install Qt if: ${{ matrix.python-version == '3.10' }} @@ -104,14 +108,7 @@ jobs: echo "key=${{ env.KEY_PREFIX }}-${{ env.CACHE_VERSION }}-${{ hashFiles('pyproject.toml', 'requirements_dev.txt', 'requirements_full.txt', 'requirements_minimal.txt') }}" >> $GITHUB_OUTPUT - - name: Restore Python virtual environment - id: cache-venv - uses: actions/cache@v4.2.2 - with: - path: venv - key: >- - ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ - steps.generate-python-key.outputs.key }} + - *cache-python - name: Create Python virtual environment if: steps.cache-venv.outputs.cache-hit != 'true' run: | @@ -125,7 +122,7 @@ jobs: . venv/bin/activate pytest --cov - name: Upload coverage artifact - uses: actions/upload-artifact@v4.6.1 + uses: &actions-upload-artifact actions/upload-artifact@v4.6.2 with: name: coverage-linux-${{ matrix.python-version }} path: .coverage @@ -135,44 +132,38 @@ jobs: name: tests / run / ${{ matrix.python-version }} / Windows runs-on: windows-latest timeout-minutes: 20 - needs: tests-linux + needs: [tests-linux] strategy: fail-fast: false matrix: - python-version: [3.9, "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] steps: - name: Set temp directory run: echo "TEMP=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV # Workaround to set correct temp directory on Windows # https://github.com/actions/virtual-environments/issues/712 - - name: Check out code from GitHub - uses: actions/checkout@v4.2.2 - - name: Set up Python ${{ matrix.python-version }} + - *checkout + - &setup-python-matrix + name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v6.0.0 with: python-version: ${{ matrix.python-version }} + allow-prereleases: true check-latest: true - name: Generate partial Python venv restore key id: generate-python-key run: >- echo "key=${{ env.KEY_PREFIX }}-${{ env.CACHE_VERSION }}-${{ hashFiles('pyproject.toml', 'requirements_dev.txt', - 'requirements_full.txt', 'requirements_minimal.txt') }}" >> $GITHUB_OUTPUT - - name: Restore Python virtual environment - id: cache-venv - uses: actions/cache@v4.2.2 - with: - path: venv - key: >- - ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ - steps.generate-python-key.outputs.key }} + 'requirements_full.txt', 'requirements_minimal.txt') }}" >> $env:GITHUB_OUTPUT + - *cache-python - name: Create Python virtual environment if: steps.cache-venv.outputs.cache-hit != 'true' run: | python -m venv venv . venv\\Scripts\\activate - python -m pip install -U pip setuptools wheel + python -m pip install -U pip pip install -U -r requirements_full.txt pip install -e . - name: Run pytest @@ -180,7 +171,7 @@ jobs: . venv\\Scripts\\activate pytest --cov - name: Upload coverage artifact - uses: actions/upload-artifact@v4.6.1 + uses: *actions-upload-artifact with: name: coverage-windows-${{ matrix.python-version }} path: .coverage @@ -194,36 +185,23 @@ jobs: fail-fast: false matrix: # We only test on the lowest and highest supported PyPy versions - python-version: ["pypy3.9", "pypy3.10"] + python-version: ["pypy3.10"] steps: - - name: Check out code from GitHub - uses: actions/checkout@v4.2.2 - - name: Set up Python ${{ matrix.python-version }} - id: python - uses: actions/setup-python@v5.4.0 - with: - python-version: ${{ matrix.python-version }} - check-latest: true + - *checkout + - *setup-python-matrix - name: Generate partial Python venv restore key id: generate-python-key run: >- echo "key=${{ env.KEY_PREFIX }}-${{ env.CACHE_VERSION }}-${{ hashFiles('pyproject.toml', 'requirements_minimal.txt') }}" >> $GITHUB_OUTPUT - - name: Restore Python virtual environment - id: cache-venv - uses: actions/cache@v4.2.2 - with: - path: venv - key: >- - ${{ runner.os }}-${{ matrix.python-version }}-${{ - steps.generate-python-key.outputs.key }} + - *cache-python - name: Create Python virtual environment if: steps.cache-venv.outputs.cache-hit != 'true' run: | python -m venv venv . venv/bin/activate - python -m pip install -U pip setuptools wheel + python -m pip install -U pip pip install -U -r requirements_minimal.txt pip install -e . - name: Run pytest @@ -231,7 +209,7 @@ jobs: . venv/bin/activate pytest --cov - name: Upload coverage artifact - uses: actions/upload-artifact@v4.6.1 + uses: *actions-upload-artifact with: name: coverage-pypy-${{ matrix.python-version }} path: .coverage @@ -243,23 +221,17 @@ jobs: timeout-minutes: 10 needs: ["tests-linux", "tests-windows", "tests-pypy"] steps: - - name: Check out code from GitHub - uses: actions/checkout@v4.2.2 - - name: Set up Python 3.13 - id: python - uses: actions/setup-python@v5.4.0 - with: - python-version: "3.13" - check-latest: true + - *checkout + - *setup-python-default - name: Install dependencies run: pip install -U -r requirements_minimal.txt - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v5.0.0 - name: Combine Linux coverage results run: | coverage combine coverage-linux*/.coverage coverage xml -o coverage-linux.xml - - uses: codecov/codecov-action@v5 + - uses: &actions-codecov codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: true @@ -270,7 +242,7 @@ jobs: run: | coverage combine coverage-windows*/.coverage coverage xml -o coverage-windows.xml - - uses: codecov/codecov-action@v5 + - uses: *actions-codecov with: token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: true @@ -281,7 +253,7 @@ jobs: run: | coverage combine coverage-pypy*/.coverage coverage xml -o coverage-pypy.xml - - uses: codecov/codecov-action@v5 + - uses: *actions-codecov with: token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: true diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index fc639bf325..9b30ac2a97 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -46,7 +46,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v5.0.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/release-tests.yml b/.github/workflows/release-tests.yml deleted file mode 100644 index 11a6e2b384..0000000000 --- a/.github/workflows/release-tests.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Release tests - -on: workflow_dispatch - -permissions: - contents: read - -jobs: - virtualenv-15-windows-test: - # Regression test added in https://github.com/pylint-dev/astroid/pull/1386 - name: Regression test for virtualenv==15.1.0 on Windows - runs-on: windows-latest - timeout-minutes: 5 - steps: - - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 - - name: Set up Python 3.9 - id: python - uses: actions/setup-python@v5.1.1 - with: - # virtualenv 15.1.0 cannot be installed on Python 3.10+ - python-version: 3.9 - env: - PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" - - name: Create Python virtual environment with virtualenv==15.1.0 - env: - PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" - run: | - python -m pip install virtualenv==15.1.0 - python -m virtualenv venv2 - . venv2\scripts\activate - python -m pip install pylint - python -m pip install -e . - - name: Test no import-error from distutils.util - run: | - . venv2\scripts\activate - echo "import distutils.util # pylint: disable=unused-import" > test.py - pylint test.py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6d55570b23..2c6cbdbee9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,10 +18,10 @@ jobs: if: github.event_name == 'release' steps: - name: Check out code from Github - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v5.0.0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v6.0.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -34,7 +34,7 @@ jobs: run: | python -m build - name: Upload release assets - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: release-assets path: dist/ @@ -50,7 +50,7 @@ jobs: id-token: write steps: - name: Download release assets - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v5.0.0 with: name: release-assets path: dist/ @@ -67,13 +67,13 @@ jobs: id-token: write steps: - name: Download release assets - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v5.0.0 with: name: release-assets path: dist/ - name: Sign the dists with Sigstore and upload assets to Github release if: github.event_name == 'release' - uses: sigstore/gh-action-sigstore-python@v3.0.0 + uses: sigstore/gh-action-sigstore-python@v3.0.1 with: inputs: | ./dist/*.tar.gz diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b93a92e822..70095b1af7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,17 +3,16 @@ ci: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - id: trailing-whitespace exclude: .github/|tests/testdata - id: end-of-file-fixer exclude: tests/testdata - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.11.4" + rev: "v0.13.2" hooks: - - id: ruff - exclude: tests/testdata + - id: ruff-check args: ["--fix"] - repo: https://github.com/Pierre-Sassoulas/copyright_notice_precommit rev: 0.1.2 @@ -23,18 +22,18 @@ repos: exclude: tests/testdata|setup.py types: [python] - repo: https://github.com/asottile/pyupgrade - rev: v3.19.1 + rev: v3.20.0 hooks: - id: pyupgrade exclude: tests/testdata - args: [--py39-plus] + args: [--py310-plus] - repo: https://github.com/Pierre-Sassoulas/black-disable-checker/ rev: v1.1.3 hooks: - id: black-disable-checker exclude: tests/test_nodes_lineno.py - repo: https://github.com/psf/black - rev: 25.1.0 + rev: 25.9.0 hooks: - id: black args: [--safe, --quiet] @@ -42,6 +41,20 @@ repos: - repo: local hooks: - id: pylint + name: pylint + entry: pylint + language: system + types: [python] + args: [ + "-rn", + "-sn", + "--rcfile=pylintrc", + # "--load-plugins=pylint.extensions.docparams", We're not ready for that + ] + # We define an additional manual step to allow running pylint + # with the proper output for CI. + - id: pylint + alias: pylint-ci name: pylint entry: pylint language: system @@ -53,25 +66,21 @@ repos: "--output-format=github", # "--load-plugins=pylint.extensions.docparams", We're not ready for that ] - exclude: tests/testdata|conf.py + stages: [manual] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.15.0 + rev: v1.18.2 hooks: - id: mypy - name: mypy - entry: mypy language: python - types: [python] - args: [] + pass_filenames: false require_serial: true additional_dependencies: ["types-typed-ast"] - exclude: tests/testdata| # exclude everything, we're not ready - repo: https://github.com/rbubley/mirrors-prettier - rev: v3.5.3 + rev: v3.6.2 hooks: - id: prettier args: [--prose-wrap=always, --print-width=88] - repo: https://github.com/tox-dev/pyproject-fmt - rev: "v2.5.1" + rev: "v2.6.0" hooks: - id: pyproject-fmt diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 45db79de53..67068c0c04 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -32,17 +32,22 @@ Contributors ------------ - Emile Anclin - Nick Drozd +- correctmost <134317971+correctmost@users.noreply.github.com> - Andrew Haigh - Julien Cristau +- Artem Yurchenko <44875844+temyurchenko@users.noreply.github.com> - David Liu - Alexandre Fayolle - Eevee (Alex Munroe) - David Gilman - Tushar Sadhwani +- Matus Valo - Julien Jehannet +- Hugo van Kemenade - Calen Pennington - Antonio -- Hugo van Kemenade +- Akhil Kamat +- Zen Lee <53538590+zenlyj@users.noreply.github.com> - Tim Martin - Phil Schaf - Alex Hall @@ -50,12 +55,13 @@ Contributors - Radosław Ganczarek - Paligot Gérard - Ioana Tagirta +- Eric Vergnaud - Derek Gustafson - David Shea - Daniel Harding - Christian Clauss -- correctmost <134317971+correctmost@users.noreply.github.com> - Ville Skyttä +- Synrom <30272537+Synrom@users.noreply.github.com> - Rene Zhang - Philip Lorenz - Nicolas Chauvat @@ -69,6 +75,7 @@ Contributors - Dani Alcala <112832187+clavedeluna@users.noreply.github.com> - Adrien Di Mascio - tristanlatr <19967168+tristanlatr@users.noreply.github.com> +- grayjk - emile@crater.logilab.fr - doranid - brendanator @@ -78,12 +85,14 @@ Contributors - Stefan Scherfke - Sergei Lebedev <185856+superbobry@users.noreply.github.com> - Saugat Pachhai (सौगात) +- Robert Hofer <1058012+hofrob@users.noreply.github.com> - Ram Rachum - Pierre-Yves David - Peter Pentchev - Peter Kolbus - Omer Katz - Moises Lopez +- Mitch Harding - Michal Vasilek - Keichi Takahashi - Kavins Singh @@ -92,6 +101,7 @@ Contributors - John Vandenberg - Jacob Bogdanov - Google, Inc. +- Emmanuel Ferdman - David Euresti - David Douard - David Cain @@ -99,18 +109,20 @@ Contributors - Anthony Sottile - Alexander Shadchin - wgehalo +- tejaschauhan36912 <59693377+tejaschauhan36912@users.noreply.github.com> - rr- - raylu - plucury +- pavan-msys <149513767+pavan-msys@users.noreply.github.com> - ostr00000 - noah-weingarden <33741795+noah-weingarden@users.noreply.github.com> - nathannaveen <42319948+nathannaveen@users.noreply.github.com> - mathieui - markmcclain - ioanatia -- grayjk - alm - adam-grant-hendry <59346180+adam-grant-hendry@users.noreply.github.com> +- aatle <168398276+aatle@users.noreply.github.com> - Zbigniew Jędrzejewski-Szmek - Zac Hatfield-Dodds - Vilnis Termanis @@ -119,6 +131,7 @@ Contributors - Tomas Novak - Thirumal Venkat - SupImDos <62866982+SupImDos@users.noreply.github.com> +- Stéphane Brunner - Stanislav Levin - Simon Hewitt - Serhiy Storchaka @@ -133,10 +146,10 @@ Contributors - Oleh Prypin - Nicolas Noirbent - Neil Girdhar -- Mitch Harding - Miro Hrončok - Michał Masłowski - Mateusz Bysiek +- Matej Aleksandrov - Marcelo Trylesinski - Leandro T. C. Melo - Konrad Weihmann @@ -167,7 +180,6 @@ Contributors - Francis Charette Migneault - Felix Mölder - Federico Bond -- Eric Vergnaud - DudeNr33 <3929834+DudeNr33@users.noreply.github.com> - Dmitry Shachnev - Denis Laxalde @@ -183,6 +195,7 @@ Contributors - Cole Robinson - Christoph Reiter - Chris Philip +- Charlie Ringström <34444482+Chasarr@users.noreply.github.com> - BioGeek - Bianca Power <30207144+biancapower@users.noreply.github.com> - Benjamin Elven <25181435+S3ntinelX@users.noreply.github.com> @@ -201,7 +214,6 @@ Contributors - Alexander Presnyakov - Ahmed Azzaoui - Co-Author --------- The following persons were credited manually but did not commit themselves diff --git a/ChangeLog b/ChangeLog index 0d7dfb5ef7..df6f72e53a 100644 --- a/ChangeLog +++ b/ChangeLog @@ -3,17 +3,155 @@ astroid's ChangeLog =================== -What's New in astroid 3.4.0? +What's New in astroid 4.1.0? ============================ Release date: TBA + +What's New in astroid 4.0.4? +============================ +Release date: TBA + + + +What's New in astroid 4.0.3? +============================ +Release date: 2026-01-03 + +* Fix inference of ``IfExp`` (ternary expression) nodes to avoid prematurely narrowing + results in the face of inference ambiguity. + + Closes #2899 + +* Fix base class inference for dataclasses using the PEP 695 typing syntax. + + Refs pylint-dev/pylint#10788 + + +What's New in astroid 4.0.2? +============================ +Release date: 2025-11-09 + +* Handle FunctionDef blockstart_tolineno edge cases correctly. + + Refs #2880 + +* Add ``HTTPMethod`` enum support to brain module for Python 3.11+. + + Refs pylint-dev/pylint#10624 + Closes #2877 + +What's New in astroid 4.0.1? +============================ +Release date: 2025-10-11 + +* Suppress ``SyntaxWarning`` for invalid escape sequences and return in finally on + Python 3.14 when parsing modules. + +* Assign ``Import`` and ``ImportFrom`` nodes to module locals if used with ``global``. + + Closes pylint-dev/pylint#10632 + + +What's New in astroid 4.0.0? +============================ +Release date: 2025-10-05 + +* Support constraints from ternary expressions in inference. + + Closes pylint-dev/pylint#9729 + +* Handle deprecated `bool(NotImplemented)` cast in const nodes. + +* Add support for boolean truthiness constraints (`x`, `not x`) in inference. + + Closes pylint-dev/pylint#9515 + +* Fix false positive `invalid-name` on `attrs` classes with `ClassVar` annotated variables. + + Closes pylint-dev/pylint#10525 + +* Prevent crash when parsing deeply nested parentheses causing MemoryError in python's built-in ast. + + Closes #2643 + +* Fix crash when inferring namedtuple with invalid field name looking like f-string formatting. + + Closes #2519 + +* Fix false positive no-member in except * handler. + + Closes pylint-dev/pylint#9056 + +* Fix crash when comparing invalid dict literal + + Closes #2522 + +* Removed internal functions ``infer_numpy_member``, ``name_looks_like_numpy_member``, and + ``attribute_looks_like_numpy_member`` from ``astroid.brain.brain_numpy_utils``. + +* To alleviate circular imports, the ``manager`` argument to ``AstroidBuilder()`` is now required. + +* Constants now have a parent of ``nodes.SYNTHETIC_ROOT``. + +* Fix crashes with large positive and negative list multipliers. + + Closes #2521 + Closes #2523 + +* Fix precedence of `path` arg in `modpath_from_file_with_callback` to be higher than `sys.path` + +* Following a deprecation period, the ``future`` argument was removed from ``statement()`` and ``frame()``. + +* Improve consistency of ``JoinedStr`` inference by not raising ``InferenceError`` and + returning either ``Uninferable`` or a fully resolved ``Const``. + + Closes #2621 + +* Fix crash when typing._alias() call is missing arguments. + + Closes #2513 + +* Remove support for Python 3.9 (and constant `PY310_PLUS`). + * Include subclasses of standard property classes as `property` decorators Closes #10377 * Modify ``astroid.bases`` and ``tests.test_nodes`` to reflect that `enum.property` was added in Python 3.11, not 3.10 +* Fix incorrect result in `_get_relative_base_path` when the target directory name starts with the base path + + Closes #2608 + +* The brain for nose was dropped. nose has been deprecated for 10 years and the brain required some maintenance. + + Refs #2765 + +* Fix a crash when the root of a node is not a module but is unknown. + + Closes #2672 + +* Add basic support for ``ast.TemplateStr`` and ``ast.Interpolation``added in Python 3.14. + + Refs #2789 + +* Add support for type parameter defaults added in Python 3.13. + +* Improve ``as_string()`` representation for ``TypeVar``, ``ParamSpec`` and ``TypeVarTuple`` nodes, as well as + type parameter in ``ClassDef``, ``FuncDef`` and ``TypeAlias`` nodes (PEP 695). + +* Astroid now correctly supports the ``exceptions`` attribute of ``ExceptionGroup``. + + Closes pylint-dev/pylint#8985 + Closes pylint-dev/pylint#10558 + +* Deprecate importing node classes from ``astroid`` directly. This will be removed in v5. + It's recommended to import them from ``astroid.nodes`` instead. + + Refs #2837 + What's New in astroid 3.3.11? ============================= @@ -45,7 +183,6 @@ What's New in astroid 3.3.9? ============================ Release date: 2025-03-09 - * Fix crash when `sys.modules` contains lazy loader objects during checking. Closes #2686 @@ -113,7 +250,7 @@ Release date: 2024-09-23 Closes pylint-dev/pylint#9947 -* Fix bug with ``manager.clear_cache()`` not fully clearing cache +* Fix bug with ``manager.clear_cache()`` not fully clearing cache. Refs https://github.com/pylint-dev/pylint/pull/9932#issuecomment-2364985551 diff --git a/astroid/__init__.py b/astroid/__init__.py index f04b4dfdc8..abb45cf51f 100644 --- a/astroid/__init__.py +++ b/astroid/__init__.py @@ -30,9 +30,6 @@ * builder contains the class responsible to build astroid trees """ -import functools -import tokenize - # isort: off # We have an isort: off on 'astroid.nodes' because of a circular import. from astroid.nodes import node_classes, scoped_nodes @@ -44,7 +41,7 @@ from astroid.bases import BaseInstance, BoundMethod, Instance, UnboundMethod from astroid.brain.helpers import register_module_extender from astroid.builder import extract_node, parse -from astroid.const import PY310_PLUS, Context +from astroid.const import Context from astroid.exceptions import ( AstroidBuildingError, AstroidError, @@ -83,89 +80,91 @@ from astroid.astroid_manager import MANAGER from astroid.nodes import ( CONST_CLS, - AnnAssign, - Arguments, - Assert, - Assign, - AssignAttr, - AssignName, - AsyncFor, - AsyncFunctionDef, - AsyncWith, - Attribute, - AugAssign, - Await, - BinOp, - BoolOp, - Break, - Call, - ClassDef, - Compare, - Comprehension, - ComprehensionScope, - Const, - Continue, - Decorators, - DelAttr, - Delete, - DelName, - Dict, - DictComp, - DictUnpack, - EmptyNode, - EvaluatedObject, - ExceptHandler, - Expr, - For, - FormattedValue, - FunctionDef, - GeneratorExp, - Global, - If, - IfExp, - Import, - ImportFrom, - JoinedStr, - Keyword, - Lambda, - List, - ListComp, - Match, - MatchAs, - MatchCase, - MatchClass, - MatchMapping, - MatchOr, - MatchSequence, - MatchSingleton, - MatchStar, - MatchValue, - Module, - Name, - NamedExpr, - NodeNG, - Nonlocal, - ParamSpec, - Pass, - Raise, - Return, - Set, - SetComp, - Slice, - Starred, - Subscript, - Try, - TryStar, - Tuple, - TypeAlias, - TypeVar, - TypeVarTuple, - UnaryOp, - Unknown, - While, - With, - Yield, - YieldFrom, + AnnAssign as _DEPRECATED_AnnAssign, + Arguments as _DEPRECATED_Arguments, + Assert as _DEPRECATED_Assert, + Assign as _DEPRECATED_Assign, + AssignAttr as _DEPRECATED_AssignAttr, + AssignName as _DEPRECATED_AssignName, + AsyncFor as _DEPRECATED_AsyncFor, + AsyncFunctionDef as _DEPRECATED_AsyncFunctionDef, + AsyncWith as _DEPRECATED_AsyncWith, + Attribute as _DEPRECATED_Attribute, + AugAssign as _DEPRECATED_AugAssign, + Await as _DEPRECATED_Await, + BinOp as _DEPRECATED_BinOp, + BoolOp as _DEPRECATED_BoolOp, + Break as _DEPRECATED_Break, + Call as _DEPRECATED_Call, + ClassDef as _DEPRECATED_ClassDef, + Compare as _DEPRECATED_Compare, + Comprehension as _DEPRECATED_Comprehension, + ComprehensionScope as _DEPRECATED_ComprehensionScope, + Const as _DEPRECATED_Const, + Continue as _DEPRECATED_Continue, + Decorators as _DEPRECATED_Decorators, + DelAttr as _DEPRECATED_DelAttr, + Delete as _DEPRECATED_Delete, + DelName as _DEPRECATED_DelName, + Dict as _DEPRECATED_Dict, + DictComp as _DEPRECATED_DictComp, + DictUnpack as _DEPRECATED_DictUnpack, + EmptyNode as _DEPRECATED_EmptyNode, + EvaluatedObject as _DEPRECATED_EvaluatedObject, + ExceptHandler as _DEPRECATED_ExceptHandler, + Expr as _DEPRECATED_Expr, + For as _DEPRECATED_For, + FormattedValue as _DEPRECATED_FormattedValue, + FunctionDef as _DEPRECATED_FunctionDef, + GeneratorExp as _DEPRECATED_GeneratorExp, + Global as _DEPRECATED_Global, + If as _DEPRECATED_If, + IfExp as _DEPRECATED_IfExp, + Import as _DEPRECATED_Import, + ImportFrom as _DEPRECATED_ImportFrom, + Interpolation as _DEPRECATED_Interpolation, + JoinedStr as _DEPRECATED_JoinedStr, + Keyword as _DEPRECATED_Keyword, + Lambda as _DEPRECATED_Lambda, + List as _DEPRECATED_List, + ListComp as _DEPRECATED_ListComp, + Match as _DEPRECATED_Match, + MatchAs as _DEPRECATED_MatchAs, + MatchCase as _DEPRECATED_MatchCase, + MatchClass as _DEPRECATED_MatchClass, + MatchMapping as _DEPRECATED_MatchMapping, + MatchOr as _DEPRECATED_MatchOr, + MatchSequence as _DEPRECATED_MatchSequence, + MatchSingleton as _DEPRECATED_MatchSingleton, + MatchStar as _DEPRECATED_MatchStar, + MatchValue as _DEPRECATED_MatchValue, + Module as _DEPRECATED_Module, + Name as _DEPRECATED_Name, + NamedExpr as _DEPRECATED_NamedExpr, + NodeNG as _DEPRECATED_NodeNG, + Nonlocal as _DEPRECATED_Nonlocal, + ParamSpec as _DEPRECATED_ParamSpec, + Pass as _DEPRECATED_Pass, + Raise as _DEPRECATED_Raise, + Return as _DEPRECATED_Return, + Set as _DEPRECATED_Set, + SetComp as _DEPRECATED_SetComp, + Slice as _DEPRECATED_Slice, + Starred as _DEPRECATED_Starred, + Subscript as _DEPRECATED_Subscript, + TemplateStr as _DEPRECATED_TemplateStr, + Try as _DEPRECATED_Try, + TryStar as _DEPRECATED_TryStar, + Tuple as _DEPRECATED_Tuple, + TypeAlias as _DEPRECATED_TypeAlias, + TypeVar as _DEPRECATED_TypeVar, + TypeVarTuple as _DEPRECATED_TypeVarTuple, + UnaryOp as _DEPRECATED_UnaryOp, + Unknown as _DEPRECATED_Unknown, + While as _DEPRECATED_While, + With as _DEPRECATED_With, + Yield as _DEPRECATED_Yield, + YieldFrom as _DEPRECATED_YieldFrom, are_exclusive, builtin_lookup, unpack_infer, @@ -176,11 +175,68 @@ from astroid.util import Uninferable -# Performance hack for tokenize. See https://bugs.python.org/issue43014 -# Adapted from https://github.com/PyCQA/pycodestyle/pull/993 -if ( - not PY310_PLUS - and callable(getattr(tokenize, "_compile", None)) - and getattr(tokenize._compile, "__wrapped__", None) is None # type: ignore[attr-defined] -): - tokenize._compile = functools.lru_cache(tokenize._compile) # type: ignore[attr-defined] +__all__ = [ + "CONST_CLS", + "MANAGER", + "AstroidBuildingError", + "AstroidError", + "AstroidImportError", + "AstroidIndexError", + "AstroidSyntaxError", + "AstroidTypeError", + "AstroidValueError", + "AttributeInferenceError", + "BaseInstance", + "BoundMethod", + "Context", + "DuplicateBasesError", + "ExceptionInstance", + "InconsistentMroError", + "InferenceError", + "InferenceOverwriteError", + "Instance", + "MroError", + "NameInferenceError", + "NoDefault", + "NotFoundError", + "ParentMissingError", + "ResolveError", + "StatementMissing", + "SuperArgumentTypeError", + "SuperError", + "TooManyLevelsError", + "UnboundMethod", + "Uninferable", + "UnresolvableName", + "UseInferenceDefault", + "__version__", + "_inference_tip_cached", + "are_exclusive", + "builtin_lookup", + "extract_node", + "function_to_method", + "inference_tip", + "node_classes", + "parse", + "raw_building", + "register_module_extender", + "scoped_nodes", + "unpack_infer", + "version", +] + + +def __getattr__(name: str): + if (val := globals().get(f"_DEPRECATED_{name}")) is None: + msg = f"module '{__name__}' has no attribute '{name}" + raise AttributeError(msg) + + # pylint: disable-next=import-outside-toplevel + import warnings + + msg = ( + f"importing '{name}' from 'astroid' is deprecated and will be removed in v5, " + "import it from 'astroid.nodes' instead" + ) + warnings.warn(msg, DeprecationWarning, stacklevel=2) + return val diff --git a/astroid/__pkginfo__.py b/astroid/__pkginfo__.py index 6a09f40590..feea93f107 100644 --- a/astroid/__pkginfo__.py +++ b/astroid/__pkginfo__.py @@ -2,5 +2,5 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt -__version__ = "3.3.11" +__version__ = "4.0.3" version = __version__ diff --git a/astroid/_ast.py b/astroid/_ast.py index 44800e3829..e3ad97db77 100644 --- a/astroid/_ast.py +++ b/astroid/_ast.py @@ -32,7 +32,7 @@ def parse( def parse_function_type_comment(type_comment: str) -> FunctionType | None: """Given a correct type comment, obtain a FunctionType object.""" - func_type = ast.parse(type_comment, "", "func_type") # type: ignore[attr-defined] + func_type = ast.parse(type_comment, "", "func_type") return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns) diff --git a/astroid/_backport_stdlib_names.py b/astroid/_backport_stdlib_names.py deleted file mode 100644 index 901f90b90d..0000000000 --- a/astroid/_backport_stdlib_names.py +++ /dev/null @@ -1,352 +0,0 @@ -# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html -# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE -# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt - -""" -Shim to support Python versions < 3.10 that don't have sys.stdlib_module_names - -These values were created by cherry-picking the commits from -https://bugs.python.org/issue42955 into each version, but may be updated -manually if changes are needed. -""" - -import sys - -# TODO: Remove this file when Python 3.9 is no longer supported - -PY_3_7 = frozenset( - { - "__future__", - "_abc", - "_ast", - "_asyncio", - "_bisect", - "_blake2", - "_bootlocale", - "_bz2", - "_codecs", - "_codecs_cn", - "_codecs_hk", - "_codecs_iso2022", - "_codecs_jp", - "_codecs_kr", - "_codecs_tw", - "_collections", - "_collections_abc", - "_compat_pickle", - "_compression", - "_contextvars", - "_crypt", - "_csv", - "_ctypes", - "_curses", - "_curses_panel", - "_datetime", - "_dbm", - "_decimal", - "_dummy_thread", - "_elementtree", - "_functools", - "_gdbm", - "_hashlib", - "_heapq", - "_imp", - "_io", - "_json", - "_locale", - "_lsprof", - "_lzma", - "_markupbase", - "_md5", - "_msi", - "_multibytecodec", - "_multiprocessing", - "_opcode", - "_operator", - "_osx_support", - "_pickle", - "_posixsubprocess", - "_py_abc", - "_pydecimal", - "_pyio", - "_queue", - "_random", - "_sha1", - "_sha256", - "_sha3", - "_sha512", - "_signal", - "_sitebuiltins", - "_socket", - "_sqlite3", - "_sre", - "_ssl", - "_stat", - "_string", - "_strptime", - "_struct", - "_symtable", - "_thread", - "_threading_local", - "_tkinter", - "_tracemalloc", - "_uuid", - "_warnings", - "_weakref", - "_weakrefset", - "_winapi", - "abc", - "aifc", - "antigravity", - "argparse", - "array", - "ast", - "asynchat", - "asyncio", - "asyncore", - "atexit", - "audioop", - "base64", - "bdb", - "binascii", - "binhex", - "bisect", - "builtins", - "bz2", - "cProfile", - "calendar", - "cgi", - "cgitb", - "chunk", - "cmath", - "cmd", - "code", - "codecs", - "codeop", - "collections", - "colorsys", - "compileall", - "concurrent", - "configparser", - "contextlib", - "contextvars", - "copy", - "copyreg", - "crypt", - "csv", - "ctypes", - "curses", - "dataclasses", - "datetime", - "dbm", - "decimal", - "difflib", - "dis", - "distutils", - "doctest", - "dummy_threading", - "email", - "encodings", - "ensurepip", - "enum", - "errno", - "faulthandler", - "fcntl", - "filecmp", - "fileinput", - "fnmatch", - "formatter", - "fractions", - "ftplib", - "functools", - "gc", - "genericpath", - "getopt", - "getpass", - "gettext", - "glob", - "grp", - "gzip", - "hashlib", - "heapq", - "hmac", - "html", - "http", - "idlelib", - "imaplib", - "imghdr", - "imp", - "importlib", - "inspect", - "io", - "ipaddress", - "itertools", - "json", - "keyword", - "lib2to3", - "linecache", - "locale", - "logging", - "lzma", - "macpath", - "mailbox", - "mailcap", - "marshal", - "math", - "mimetypes", - "mmap", - "modulefinder", - "msilib", - "msvcrt", - "multiprocessing", - "netrc", - "nis", - "nntplib", - "nt", - "ntpath", - "nturl2path", - "numbers", - "opcode", - "operator", - "optparse", - "os", - "ossaudiodev", - "parser", - "pathlib", - "pdb", - "pickle", - "pickletools", - "pipes", - "pkgutil", - "platform", - "plistlib", - "poplib", - "posix", - "posixpath", - "pprint", - "profile", - "pstats", - "pty", - "pwd", - "py_compile", - "pyclbr", - "pydoc", - "pydoc_data", - "pyexpat", - "queue", - "quopri", - "random", - "re", - "readline", - "reprlib", - "resource", - "rlcompleter", - "runpy", - "sched", - "secrets", - "select", - "selectors", - "shelve", - "shlex", - "shutil", - "signal", - "site", - "smtpd", - "smtplib", - "sndhdr", - "socket", - "socketserver", - "spwd", - "sqlite3", - "sre_compile", - "sre_constants", - "sre_parse", - "ssl", - "stat", - "statistics", - "string", - "stringprep", - "struct", - "subprocess", - "sunau", - "symbol", - "symtable", - "sys", - "sysconfig", - "syslog", - "tabnanny", - "tarfile", - "telnetlib", - "tempfile", - "termios", - "textwrap", - "this", - "threading", - "time", - "timeit", - "tkinter", - "token", - "tokenize", - "trace", - "traceback", - "tracemalloc", - "tty", - "turtle", - "turtledemo", - "types", - "typing", - "unicodedata", - "unittest", - "urllib", - "uu", - "uuid", - "venv", - "warnings", - "wave", - "weakref", - "webbrowser", - "winreg", - "winsound", - "wsgiref", - "xdrlib", - "xml", - "xmlrpc", - "zipapp", - "zipfile", - "zipimport", - "zlib", - } -) - -PY_3_8 = frozenset( - PY_3_7 - - { - "macpath", - } - | { - "_posixshmem", - "_statistics", - "_xxsubinterpreters", - } -) - -PY_3_9 = frozenset( - PY_3_8 - - { - "_dummy_thread", - "dummy_threading", - } - | { - "_aix_support", - "_bootsubprocess", - "_peg_parser", - "_zoneinfo", - "graphlib", - "zoneinfo", - } -) - -if sys.version_info[:2] == (3, 9): - stdlib_module_names = PY_3_9 -else: - raise AssertionError("This module is only intended as a backport for Python <= 3.9") diff --git a/astroid/arguments.py b/astroid/arguments.py index d2dca776d5..3781889b7c 100644 --- a/astroid/arguments.py +++ b/astroid/arguments.py @@ -54,7 +54,7 @@ def __init__( } @classmethod - def from_call(cls, call_node, context: InferenceContext | None = None): + def from_call(cls, call_node: nodes.Call, context: InferenceContext | None = None): """Get a CallSite object from the given Call node. context will be used to force a single inference path. @@ -65,7 +65,7 @@ def from_call(cls, call_node, context: InferenceContext | None = None): callcontext = CallContext(call_node.args, call_node.keywords) return cls(callcontext, context=context) - def has_invalid_arguments(self): + def has_invalid_arguments(self) -> bool: """Check if in the current CallSite were passed *invalid* arguments. This can mean multiple things. For instance, if an unpacking @@ -89,7 +89,7 @@ def _unpack_keywords( self, keywords: list[tuple[str | None, nodes.NodeNG]], context: InferenceContext | None = None, - ): + ) -> dict[str | None, InferenceResult]: values: dict[str | None, InferenceResult] = {} context = context or InferenceContext() context.extra_context = self.argument_context_map @@ -142,6 +142,8 @@ def infer_argument( self, funcnode: InferenceResult, name: str, context: InferenceContext ): # noqa: C901 """Infer a function argument value according to the call context.""" + # pylint: disable = too-many-branches + if not isinstance(funcnode, (nodes.FunctionDef, nodes.Lambda)): raise InferenceError( f"Can not infer function argument value for non-function node {funcnode!r}.", diff --git a/astroid/bases.py b/astroid/bases.py index c7097ebde3..a029da6d4f 100644 --- a/astroid/bases.py +++ b/astroid/bases.py @@ -686,7 +686,7 @@ class Generator(BaseInstance): # We defer initialization of special_attributes to the __init__ method since the constructor # of GeneratorModel requires the raw_building to be complete # TODO: This should probably be refactored. - special_attributes: objectmodel.GeneratorModel + special_attributes: objectmodel.GeneratorBaseModel def __init__( self, @@ -725,6 +725,10 @@ def __str__(self) -> str: class AsyncGenerator(Generator): """Special node representing an async generator.""" + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + AsyncGenerator.special_attributes = objectmodel.AsyncGeneratorModel() + def pytype(self) -> Literal["builtins.async_generator"]: return "builtins.async_generator" diff --git a/astroid/brain/brain_argparse.py b/astroid/brain/brain_argparse.py index d0da4080a3..6bde22f2e5 100644 --- a/astroid/brain/brain_argparse.py +++ b/astroid/brain/brain_argparse.py @@ -4,9 +4,10 @@ from __future__ import annotations -from astroid import arguments, inference_tip, nodes +from astroid import arguments, nodes from astroid.context import InferenceContext from astroid.exceptions import UseInferenceDefault +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager @@ -20,13 +21,10 @@ def infer_namespace(node, context: InferenceContext | None = None): "Namespace", lineno=node.lineno, col_offset=node.col_offset, - parent=nodes.Unknown(), + parent=nodes.SYNTHETIC_ROOT, # this class is not real end_lineno=node.end_lineno, end_col_offset=node.end_col_offset, ) - # Set parent manually until ClassDef constructor fixed: - # https://github.com/pylint-dev/astroid/issues/1490 - class_node.parent = node.parent for attr in set(callsite.keyword_arguments): fake_node = nodes.EmptyNode() fake_node.parent = class_node diff --git a/astroid/brain/brain_attrs.py b/astroid/brain/brain_attrs.py index 23ec9f66a4..b619bb3f49 100644 --- a/astroid/brain/brain_attrs.py +++ b/astroid/brain/brain_attrs.py @@ -8,9 +8,9 @@ Without this hook pylint reports unsupported-assignment-operation for attrs classes """ +from astroid import nodes +from astroid.brain.helpers import is_class_var from astroid.manager import AstroidManager -from astroid.nodes.node_classes import AnnAssign, Assign, AssignName, Call, Unknown -from astroid.nodes.scoped_nodes import ClassDef from astroid.util import safe_infer ATTRIB_NAMES = frozenset( @@ -50,7 +50,7 @@ def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES) -> bool: if not node.decorators: return False for decorator_attribute in node.decorators.nodes: - if isinstance(decorator_attribute, Call): # decorator with arguments + if isinstance(decorator_attribute, nodes.Call): # decorator with arguments decorator_attribute = decorator_attribute.func if decorator_attribute.as_string() in decorator_names: return True @@ -61,35 +61,42 @@ def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES) -> bool: return False -def attr_attributes_transform(node: ClassDef) -> None: +def attr_attributes_transform(node: nodes.ClassDef) -> None: """Given that the ClassNode has an attr decorator, rewrite class attributes as instance attributes """ # Astroid can't infer this attribute properly # Prevents https://github.com/pylint-dev/pylint/issues/1884 - node.locals["__attrs_attrs__"] = [Unknown(parent=node)] + node.locals["__attrs_attrs__"] = [nodes.Unknown(parent=node)] use_bare_annotations = is_decorated_with_attrs(node, NEW_ATTRS_NAMES) for cdef_body_node in node.body: - if not isinstance(cdef_body_node, (Assign, AnnAssign)): + if not isinstance(cdef_body_node, (nodes.Assign, nodes.AnnAssign)): continue - if isinstance(cdef_body_node.value, Call): + if isinstance(cdef_body_node.value, nodes.Call): if cdef_body_node.value.func.as_string() not in ATTRIB_NAMES: continue elif not use_bare_annotations: continue + + # Skip attributes that are explicitly annotated as class variables + if isinstance(cdef_body_node, nodes.AnnAssign) and is_class_var( + cdef_body_node.annotation + ): + continue + targets = ( cdef_body_node.targets if hasattr(cdef_body_node, "targets") else [cdef_body_node.target] ) for target in targets: - rhs_node = Unknown( + rhs_node = nodes.Unknown( lineno=cdef_body_node.lineno, col_offset=cdef_body_node.col_offset, parent=cdef_body_node, ) - if isinstance(target, AssignName): + if isinstance(target, nodes.AssignName): # Could be a subscript if the code analysed is # i = Optional[str] = "" # See https://github.com/pylint-dev/pylint/issues/4439 @@ -99,5 +106,5 @@ def attr_attributes_transform(node: ClassDef) -> None: def register(manager: AstroidManager) -> None: manager.register_transform( - ClassDef, attr_attributes_transform, is_decorated_with_attrs + nodes.ClassDef, attr_attributes_transform, is_decorated_with_attrs ) diff --git a/astroid/brain/brain_boto3.py b/astroid/brain/brain_boto3.py index 55bca14fc8..3a95feb816 100644 --- a/astroid/brain/brain_boto3.py +++ b/astroid/brain/brain_boto3.py @@ -4,14 +4,14 @@ """Astroid hooks for understanding ``boto3.ServiceRequest()``.""" -from astroid import extract_node +from astroid.builder import extract_node from astroid.manager import AstroidManager from astroid.nodes.scoped_nodes import ClassDef BOTO_SERVICE_FACTORY_QUALIFIED_NAME = "boto3.resources.base.ServiceResource" -def service_request_transform(node): +def service_request_transform(node: ClassDef) -> ClassDef: """Transform ServiceResource to look like dynamic classes.""" code = """ def __getattr__(self, attr): @@ -22,7 +22,7 @@ def __getattr__(self, attr): return node -def _looks_like_boto3_service_request(node) -> bool: +def _looks_like_boto3_service_request(node: ClassDef) -> bool: return node.qname() == BOTO_SERVICE_FACTORY_QUALIFIED_NAME diff --git a/astroid/brain/brain_builtin_inference.py b/astroid/brain/brain_builtin_inference.py index e9d00e2e1a..e21d36141c 100644 --- a/astroid/brain/brain_builtin_inference.py +++ b/astroid/brain/brain_builtin_inference.py @@ -9,9 +9,9 @@ import itertools from collections.abc import Callable, Iterable, Iterator from functools import partial -from typing import TYPE_CHECKING, Any, NoReturn, Union, cast +from typing import TYPE_CHECKING, Any, NoReturn, cast -from astroid import arguments, helpers, inference_tip, nodes, objects, util +from astroid import arguments, helpers, nodes, objects, util from astroid.builder import AstroidBuilder from astroid.context import InferenceContext from astroid.exceptions import ( @@ -21,6 +21,7 @@ MroError, UseInferenceDefault, ) +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager from astroid.nodes import scoped_nodes from astroid.typing import ( @@ -32,26 +33,13 @@ if TYPE_CHECKING: from astroid.bases import Instance -ContainerObjects = Union[ - objects.FrozenSet, - objects.DictItems, - objects.DictKeys, - objects.DictValues, -] - -BuiltContainers = Union[ - type[tuple], - type[list], - type[set], - type[frozenset], -] - -CopyResult = Union[ - nodes.Dict, - nodes.List, - nodes.Set, - objects.FrozenSet, -] +ContainerObjects = ( + objects.FrozenSet | objects.DictItems | objects.DictKeys | objects.DictValues +) + +BuiltContainers = type[tuple] | type[list] | type[set] | type[frozenset] + +CopyResult = nodes.Dict | nodes.List | nodes.Set | objects.FrozenSet OBJECT_DUNDER_NEW = "object.__new__" @@ -172,6 +160,7 @@ def on_bootstrap(): def _builtin_filter_predicate(node, builtin_name) -> bool: + # pylint: disable = too-many-boolean-expressions if ( builtin_name == "type" and node.root().name == "re" @@ -189,8 +178,8 @@ def _builtin_filter_predicate(node, builtin_name) -> bool: # Match = type(...) # ``` return False - if isinstance(node.func, nodes.Name) and node.func.name == builtin_name: - return True + if isinstance(node.func, nodes.Name): + return node.func.name == builtin_name if isinstance(node.func, nodes.Attribute): return ( node.func.attrname == "fromkeys" @@ -280,7 +269,7 @@ def _container_generic_transform( if isinstance(arg, klass): return arg if isinstance(arg, iterables): - arg = cast(Union[nodes.BaseContainer, ContainerObjects], arg) + arg = cast((nodes.BaseContainer | ContainerObjects), arg) if all(isinstance(elt, nodes.Const) for elt in arg.elts): elts = [cast(nodes.Const, elt).value for elt in arg.elts] else: @@ -371,7 +360,7 @@ def _infer_builtin_container( def _get_elts(arg, context): - def is_iterable(n): + def is_iterable(n) -> bool: return isinstance(n, (nodes.List, nodes.Tuple, nodes.Set)) try: @@ -641,12 +630,15 @@ def infer_property( prop_func = objects.Property( function=inferred, - name=inferred.name, + name="", lineno=node.lineno, col_offset=node.col_offset, + # ↓ semantically, the definition of the class of property isn't within + # node.frame. It's somewhere in the builtins module, but we are special + # casing it for each "property()" call, so we are making up the + # definition on the spot, ad-hoc. + parent=scoped_nodes.SYNTHETIC_ROOT, ) - # Set parent outside __init__: https://github.com/pylint-dev/astroid/issues/1490 - prop_func.parent = node prop_func.postinit( body=[], args=inferred.args, @@ -764,7 +756,9 @@ def infer_issubclass(callnode, context: InferenceContext | None = None): except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if not isinstance(obj_type, nodes.ClassDef): - raise UseInferenceDefault("TypeError: arg 1 must be class") + raise UseInferenceDefault( + f"TypeError: arg 1 must be class, not {type(obj_type)!r}" + ) # The right hand argument is the class(es) that the given # object is to be checked against. @@ -844,7 +838,7 @@ def _class_or_tuple_to_container( return class_container -def infer_len(node, context: InferenceContext | None = None): +def infer_len(node, context: InferenceContext | None = None) -> nodes.Const: """Infer length calls. :param nodes.Call node: len call to infer @@ -867,7 +861,7 @@ def infer_len(node, context: InferenceContext | None = None): raise UseInferenceDefault(str(exc)) from exc -def infer_str(node, context: InferenceContext | None = None): +def infer_str(node, context: InferenceContext | None = None) -> nodes.Const: """Infer str() calls. :param nodes.Call node: str() call to infer @@ -926,7 +920,7 @@ def infer_dict_fromkeys(node, context: InferenceContext | None = None): will be inferred instead. """ - def _build_dict_with_elements(elements): + def _build_dict_with_elements(elements: list) -> nodes.Dict: new_node = nodes.Dict( col_offset=node.col_offset, lineno=node.lineno, @@ -1004,7 +998,7 @@ def _infer_copy_method( def _is_str_format_call(node: nodes.Call) -> bool: """Catch calls to str.format().""" - if not isinstance(node.func, nodes.Attribute) or not node.func.attrname == "format": + if not (isinstance(node.func, nodes.Attribute) and node.func.attrname == "format"): return False if isinstance(node.func.expr, nodes.Name): @@ -1024,8 +1018,9 @@ def _infer_str_format_call( value: nodes.Const if isinstance(node.func.expr, nodes.Name): - if not (inferred := util.safe_infer(node.func.expr)) or not isinstance( - inferred, nodes.Const + if not ( + (inferred := util.safe_infer(node.func.expr)) + and isinstance(inferred, nodes.Const) ): return iter([util.Uninferable]) value = inferred diff --git a/astroid/brain/brain_crypt.py b/astroid/brain/brain_crypt.py index 2a6abbd7ca..71f9dfcb34 100644 --- a/astroid/brain/brain_crypt.py +++ b/astroid/brain/brain_crypt.py @@ -2,12 +2,13 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def _re_transform(): +def _re_transform() -> nodes.Module: return parse( """ from collections import namedtuple diff --git a/astroid/brain/brain_ctypes.py b/astroid/brain/brain_ctypes.py index 863ea1874a..8ae10bc952 100644 --- a/astroid/brain/brain_ctypes.py +++ b/astroid/brain/brain_ctypes.py @@ -12,12 +12,13 @@ """ import sys +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def enrich_ctypes_redefined_types(): +def enrich_ctypes_redefined_types() -> nodes.Module: """ For each ctypes redefined types, overload 'value' and '_type_' members definition. diff --git a/astroid/brain/brain_curses.py b/astroid/brain/brain_curses.py index f06c52f979..5824fd7f35 100644 --- a/astroid/brain/brain_curses.py +++ b/astroid/brain/brain_curses.py @@ -2,12 +2,13 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def _curses_transform(): +def _curses_transform() -> nodes.Module: return parse( """ A_ALTCHARSET = 1 diff --git a/astroid/brain/brain_dataclasses.py b/astroid/brain/brain_dataclasses.py index 845295bf9b..244665e080 100644 --- a/astroid/brain/brain_dataclasses.py +++ b/astroid/brain/brain_dataclasses.py @@ -15,22 +15,23 @@ from __future__ import annotations from collections.abc import Iterator -from typing import Literal, Union +from typing import Literal from astroid import bases, context, nodes +from astroid.brain.helpers import is_class_var from astroid.builder import parse -from astroid.const import PY310_PLUS, PY313_PLUS +from astroid.const import PY313_PLUS from astroid.exceptions import AstroidSyntaxError, InferenceError, UseInferenceDefault from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager from astroid.typing import InferenceResult from astroid.util import Uninferable, UninferableBase, safe_infer -_FieldDefaultReturn = Union[ - None, - tuple[Literal["default"], nodes.NodeNG], - tuple[Literal["default_factory"], nodes.Call], -] +_FieldDefaultReturn = ( + None + | tuple[Literal["default"], nodes.NodeNG] + | tuple[Literal["default_factory"], nodes.Call] +) DATACLASSES_DECORATORS = frozenset(("dataclass",)) FIELD_NAME = "field" @@ -44,7 +45,7 @@ def is_decorated_with_dataclass( node: nodes.ClassDef, decorator_names: frozenset[str] = DATACLASSES_DECORATORS ) -> bool: """Return True if a decorated node has a `dataclass` decorator applied.""" - if not isinstance(node, nodes.ClassDef) or not node.decorators: + if not (isinstance(node, nodes.ClassDef) and node.decorators): return False return any( @@ -53,7 +54,7 @@ def is_decorated_with_dataclass( ) -def dataclass_transform(node: nodes.ClassDef) -> None: +def dataclass_transform(node: nodes.ClassDef) -> nodes.ClassDef | None: """Rewrite a dataclass to be easily understood by pylint.""" node.is_dataclass = True @@ -69,17 +70,17 @@ def dataclass_transform(node: nodes.ClassDef) -> None: node.instance_attrs[name] = [rhs_node] if not _check_generate_dataclass_init(node): - return + return None kw_only_decorated = False - if PY310_PLUS and node.decorators.nodes: + if node.decorators.nodes: for decorator in node.decorators.nodes: if not isinstance(decorator, nodes.Call): kw_only_decorated = False break for keyword in decorator.keywords: if keyword.arg == "kw_only": - kw_only_decorated = keyword.value.bool_value() + kw_only_decorated = keyword.value.bool_value() is True init_str = _generate_dataclass_init( node, @@ -101,6 +102,7 @@ def dataclass_transform(node: nodes.ClassDef) -> None: new_assign = parse(f"{DEFAULT_FACTORY} = object()").body[0] new_assign.parent = root root.locals[DEFAULT_FACTORY] = [new_assign.targets[0]] + return node def _get_dataclass_attributes( @@ -111,13 +113,14 @@ def _get_dataclass_attributes( If init is True, also include InitVars. """ for assign_node in node.body: - if not isinstance(assign_node, nodes.AnnAssign) or not isinstance( - assign_node.target, nodes.AssignName + if not ( + isinstance(assign_node, nodes.AnnAssign) + and isinstance(assign_node.target, nodes.AssignName) ): continue # Annotation is never None - if _is_class_var(assign_node.annotation): # type: ignore[arg-type] + if is_class_var(assign_node.annotation): # type: ignore[arg-type] continue if _is_keyword_only_sentinel(assign_node.annotation): @@ -155,7 +158,7 @@ def _check_generate_dataclass_init(node: nodes.ClassDef) -> bool: # Check for keyword arguments of the form init=False return not any( keyword.arg == "init" - and not keyword.value.bool_value() # type: ignore[union-attr] # value is never None + and keyword.value.bool_value() is False # type: ignore[union-attr] # value is never None for keyword in found.keywords ) @@ -238,10 +241,12 @@ def _get_previous_field_default(node: nodes.ClassDef, name: str) -> nodes.NodeNG return None -def _generate_dataclass_init( # pylint: disable=too-many-locals +def _generate_dataclass_init( node: nodes.ClassDef, assigns: list[nodes.AnnAssign], kw_only_decorated: bool ) -> str: """Return an init method for a dataclass given the targets.""" + # pylint: disable = too-many-locals, too-many-branches, too-many-statements + params: list[str] = [] kw_only_params: list[str] = [] assignments: list[str] = [] @@ -269,7 +274,7 @@ def _generate_dataclass_init( # pylint: disable=too-many-locals if is_field: # Skip any fields that have `init=False` if any( - keyword.arg == "init" and not keyword.value.bool_value() + keyword.arg == "init" and (keyword.value.bool_value() is False) for keyword in value.keywords # type: ignore[union-attr] # value is never None ): # Also remove the name from the previous arguments to be inserted later @@ -339,7 +344,7 @@ def _generate_dataclass_init( # pylint: disable=too-many-locals if is_field: kw_only = [k for k in value.keywords if k.arg == "kw_only"] # type: ignore[union-attr] if kw_only: - if kw_only[0].value.bool_value(): + if kw_only[0].value.bool_value() is True: kw_only_params.append(param_str) else: params.append(param_str) @@ -548,20 +553,8 @@ def _get_field_default(field_call: nodes.Call) -> _FieldDefaultReturn: return None -def _is_class_var(node: nodes.NodeNG) -> bool: - """Return True if node is a ClassVar, with or without subscripting.""" - try: - inferred = next(node.infer()) - except (InferenceError, StopIteration): - return False - - return getattr(inferred, "name", "") == "ClassVar" - - def _is_keyword_only_sentinel(node: nodes.NodeNG) -> bool: """Return True if node is the KW_ONLY sentinel.""" - if not PY310_PLUS: - return False inferred = safe_infer(node) return ( isinstance(inferred, bases.Instance) diff --git a/astroid/brain/brain_datetime.py b/astroid/brain/brain_datetime.py index 06b011ce49..f4cb6670bd 100644 --- a/astroid/brain/brain_datetime.py +++ b/astroid/brain/brain_datetime.py @@ -2,13 +2,14 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import AstroidBuilder from astroid.const import PY312_PLUS from astroid.manager import AstroidManager -def datetime_transform(): +def datetime_transform() -> nodes.Module: """The datetime module was C-accelerated in Python 3.12, so use the Python source.""" return AstroidBuilder(AstroidManager()).string_build("from _pydatetime import *") diff --git a/astroid/brain/brain_dateutil.py b/astroid/brain/brain_dateutil.py index 3630639b0a..c27343f961 100644 --- a/astroid/brain/brain_dateutil.py +++ b/astroid/brain/brain_dateutil.py @@ -6,12 +6,13 @@ import textwrap +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import AstroidBuilder from astroid.manager import AstroidManager -def dateutil_transform(): +def dateutil_transform() -> nodes.Module: return AstroidBuilder(AstroidManager()).string_build( textwrap.dedent( """ diff --git a/astroid/brain/brain_functools.py b/astroid/brain/brain_functools.py index 2adf2b604f..1cb8442161 100644 --- a/astroid/brain/brain_functools.py +++ b/astroid/brain/brain_functools.py @@ -10,14 +10,13 @@ from functools import partial from itertools import chain -from astroid import BoundMethod, arguments, extract_node, nodes, objects +from astroid import BoundMethod, arguments, nodes, objects +from astroid.builder import extract_node from astroid.context import InferenceContext from astroid.exceptions import InferenceError, UseInferenceDefault from astroid.inference_tip import inference_tip from astroid.interpreter import objectmodel from astroid.manager import AstroidManager -from astroid.nodes.node_classes import AssignName, Attribute, Call, Name -from astroid.nodes.scoped_nodes import FunctionDef from astroid.typing import InferenceResult, SuccessfulInferenceResult from astroid.util import UninferableBase, safe_infer @@ -91,7 +90,7 @@ def _functools_partial_inference( raise UseInferenceDefault from exc if isinstance(inferred_wrapped_function, UninferableBase): raise UseInferenceDefault("Cannot infer the wrapped function") - if not isinstance(inferred_wrapped_function, FunctionDef): + if not isinstance(inferred_wrapped_function, nodes.FunctionDef): raise UseInferenceDefault("The wrapped function is not a function") # Determine if the passed keywords into the callsite are supported @@ -105,7 +104,9 @@ def _functools_partial_inference( inferred_wrapped_function.args.kwonlyargs or (), ) parameter_names = { - param.name for param in function_parameters if isinstance(param, AssignName) + param.name + for param in function_parameters + if isinstance(param, nodes.AssignName) } if set(call.keyword_arguments) - parameter_names: raise UseInferenceDefault("wrapped function received unknown parameters") @@ -134,23 +135,25 @@ def _looks_like_lru_cache(node) -> bool: if not node.decorators: return False for decorator in node.decorators.nodes: - if not isinstance(decorator, (Attribute, Call)): + if not isinstance(decorator, (nodes.Attribute, nodes.Call)): continue if _looks_like_functools_member(decorator, "lru_cache"): return True return False -def _looks_like_functools_member(node: Attribute | Call, member: str) -> bool: +def _looks_like_functools_member( + node: nodes.Attribute | nodes.Call, member: str +) -> bool: """Check if the given Call node is the wanted member of functools.""" - if isinstance(node, Attribute): + if isinstance(node, nodes.Attribute): return node.attrname == member - if isinstance(node.func, Name): + if isinstance(node.func, nodes.Name): return node.func.name == member - if isinstance(node.func, Attribute): + if isinstance(node.func, nodes.Attribute): return ( node.func.attrname == member - and isinstance(node.func.expr, Name) + and isinstance(node.func.expr, nodes.Name) and node.func.expr.name == "functools" ) return False @@ -160,10 +163,12 @@ def _looks_like_functools_member(node: Attribute | Call, member: str) -> bool: def register(manager: AstroidManager) -> None: - manager.register_transform(FunctionDef, _transform_lru_cache, _looks_like_lru_cache) + manager.register_transform( + nodes.FunctionDef, _transform_lru_cache, _looks_like_lru_cache + ) manager.register_transform( - Call, + nodes.Call, inference_tip(_functools_partial_inference), _looks_like_partial, ) diff --git a/astroid/brain/brain_gi.py b/astroid/brain/brain_gi.py index 4ebbdde2ab..fa600775dc 100644 --- a/astroid/brain/brain_gi.py +++ b/astroid/brain/brain_gi.py @@ -59,6 +59,8 @@ def _gi_build_stub(parent): # noqa: C901 Inspect the passed module recursively and build stubs for functions, classes, etc. """ + # pylint: disable = too-many-branches, too-many-statements + classes = {} functions = {} constants = {} diff --git a/astroid/brain/brain_hashlib.py b/astroid/brain/brain_hashlib.py index 91aa4c4277..a17645a8dd 100644 --- a/astroid/brain/brain_hashlib.py +++ b/astroid/brain/brain_hashlib.py @@ -2,12 +2,13 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def _hashlib_transform(): +def _hashlib_transform() -> nodes.Module: init_signature = "value='', usedforsecurity=True" digest_signature = "self" shake_digest_signature = "self, length" diff --git a/astroid/brain/brain_http.py b/astroid/brain/brain_http.py index f34f381df8..9802c0f7e7 100644 --- a/astroid/brain/brain_http.py +++ b/astroid/brain/brain_http.py @@ -5,18 +5,30 @@ """Astroid brain hints for some of the `http` module.""" import textwrap +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import AstroidBuilder from astroid.manager import AstroidManager -def _http_transform(): +def _http_transform() -> nodes.Module: code = textwrap.dedent( """ - from enum import IntEnum + from enum import IntEnum, StrEnum from collections import namedtuple _HTTPStatus = namedtuple('_HTTPStatus', 'value phrase description') + class HTTPMethod(StrEnum): + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + HEAD = "HEAD" + OPTIONS = "OPTIONS" + PATCH = "PATCH" + TRACE = "TRACE" + CONNECT = "CONNECT" + class HTTPStatus(IntEnum): @property @@ -34,6 +46,7 @@ def description(self): SWITCHING_PROTOCOLS = _HTTPStatus(101, 'Switching Protocols', 'Switching to new protocol; obey Upgrade header') PROCESSING = _HTTPStatus(102, 'Processing', '') + EARLY_HINTS = _HTTPStatus(103, 'Early Hints') OK = _HTTPStatus(200, 'OK', 'Request fulfilled, document follows') CREATED = _HTTPStatus(201, 'Created', 'Document created, URL follows') ACCEPTED = _HTTPStatus(202, 'Accepted', @@ -86,22 +99,27 @@ def description(self): 'Client must specify Content-Length') PRECONDITION_FAILED = _HTTPStatus(412, 'Precondition Failed', 'Precondition in headers is false') - REQUEST_ENTITY_TOO_LARGE = _HTTPStatus(413, 'Request Entity Too Large', - 'Entity is too large') - REQUEST_URI_TOO_LONG = _HTTPStatus(414, 'Request-URI Too Long', - 'URI is too long') + CONTENT_TOO_LARGE = _HTTPStatus(413, 'Content Too Large', + 'Content is too large') + REQUEST_ENTITY_TOO_LARGE = CONTENT_TOO_LARGE + URI_TOO_LONG = _HTTPStatus(414, 'URI Too Long', 'URI is too long') + REQUEST_URI_TOO_LONG = URI_TOO_LONG UNSUPPORTED_MEDIA_TYPE = _HTTPStatus(415, 'Unsupported Media Type', 'Entity body in unsupported format') - REQUESTED_RANGE_NOT_SATISFIABLE = _HTTPStatus(416, - 'Requested Range Not Satisfiable', - 'Cannot satisfy request range') + RANGE_NOT_SATISFIABLE = (416, 'Range Not Satisfiable', + 'Cannot satisfy request range') + REQUESTED_RANGE_NOT_SATISFIABLE = RANGE_NOT_SATISFIABLE EXPECTATION_FAILED = _HTTPStatus(417, 'Expectation Failed', 'Expect condition could not be satisfied') + IM_A_TEAPOT = _HTTPStatus(418, 'I\\\'m a Teapot', + 'Server refuses to brew coffee because it is a teapot.') MISDIRECTED_REQUEST = _HTTPStatus(421, 'Misdirected Request', 'Server is not able to produce a response') - UNPROCESSABLE_ENTITY = _HTTPStatus(422, 'Unprocessable Entity') + UNPROCESSABLE_CONTENT = _HTTPStatus(422, 'Unprocessable Content') + UNPROCESSABLE_ENTITY = UNPROCESSABLE_CONTENT LOCKED = _HTTPStatus(423, 'Locked') FAILED_DEPENDENCY = _HTTPStatus(424, 'Failed Dependency') + TOO_EARLY = _HTTPStatus(425, 'Too Early') UPGRADE_REQUIRED = _HTTPStatus(426, 'Upgrade Required') PRECONDITION_REQUIRED = _HTTPStatus(428, 'Precondition Required', 'The origin server requires the request to be conditional') @@ -140,7 +158,7 @@ def description(self): return AstroidBuilder(AstroidManager()).string_build(code) -def _http_client_transform(): +def _http_client_transform() -> nodes.Module: return AstroidBuilder(AstroidManager()).string_build( textwrap.dedent( """ @@ -149,6 +167,7 @@ def _http_client_transform(): CONTINUE = HTTPStatus.CONTINUE SWITCHING_PROTOCOLS = HTTPStatus.SWITCHING_PROTOCOLS PROCESSING = HTTPStatus.PROCESSING + EARLY_HINTS = HTTPStatus.EARLY_HINTS OK = HTTPStatus.OK CREATED = HTTPStatus.CREATED ACCEPTED = HTTPStatus.ACCEPTED @@ -180,14 +199,20 @@ def _http_client_transform(): GONE = HTTPStatus.GONE LENGTH_REQUIRED = HTTPStatus.LENGTH_REQUIRED PRECONDITION_FAILED = HTTPStatus.PRECONDITION_FAILED - REQUEST_ENTITY_TOO_LARGE = HTTPStatus.REQUEST_ENTITY_TOO_LARGE - REQUEST_URI_TOO_LONG = HTTPStatus.REQUEST_URI_TOO_LONG + CONTENT_TOO_LARGE = HTTPStatus.CONTENT_TOO_LARGE + REQUEST_ENTITY_TOO_LARGE = HTTPStatus.CONTENT_TOO_LARGE + URI_TOO_LONG = HTTPStatus.URI_TOO_LONG + REQUEST_URI_TOO_LONG = HTTPStatus.URI_TOO_LONG UNSUPPORTED_MEDIA_TYPE = HTTPStatus.UNSUPPORTED_MEDIA_TYPE - REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE + RANGE_NOT_SATISFIABLE = HTTPStatus.RANGE_NOT_SATISFIABLE + REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.RANGE_NOT_SATISFIABLE EXPECTATION_FAILED = HTTPStatus.EXPECTATION_FAILED - UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_ENTITY + IM_A_TEAPOT = HTTPStatus.IM_A_TEAPOT + UNPROCESSABLE_CONTENT = HTTPStatus.UNPROCESSABLE_CONTENT + UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_CONTENT LOCKED = HTTPStatus.LOCKED FAILED_DEPENDENCY = HTTPStatus.FAILED_DEPENDENCY + TOO_EARLY = HTTPStatus.TOO_EARLY UPGRADE_REQUIRED = HTTPStatus.UPGRADE_REQUIRED PRECONDITION_REQUIRED = HTTPStatus.PRECONDITION_REQUIRED TOO_MANY_REQUESTS = HTTPStatus.TOO_MANY_REQUESTS diff --git a/astroid/brain/brain_hypothesis.py b/astroid/brain/brain_hypothesis.py index 6180520f30..ba20f0683c 100644 --- a/astroid/brain/brain_hypothesis.py +++ b/astroid/brain/brain_hypothesis.py @@ -27,7 +27,7 @@ def a_strategy(draw): ) -def is_decorated_with_st_composite(node) -> bool: +def is_decorated_with_st_composite(node: FunctionDef) -> bool: """Return whether a decorated node has @st.composite applied.""" if node.decorators and node.args.args and node.args.args[0].name == "draw": for decorator_attribute in node.decorators.nodes: @@ -36,11 +36,12 @@ def is_decorated_with_st_composite(node) -> bool: return False -def remove_draw_parameter_from_composite_strategy(node): +def remove_draw_parameter_from_composite_strategy(node: FunctionDef) -> FunctionDef: """Given that the FunctionDef is decorated with @st.composite, remove the first argument (`draw`) - it's always supplied by Hypothesis so we don't need to emit the no-value-for-parameter lint. """ + assert isinstance(node.args.args, list) del node.args.args[0] del node.args.annotations[0] del node.args.type_comment_args[0] diff --git a/astroid/brain/brain_mechanize.py b/astroid/brain/brain_mechanize.py index 0f0d0193bd..62cc2d05cb 100644 --- a/astroid/brain/brain_mechanize.py +++ b/astroid/brain/brain_mechanize.py @@ -2,12 +2,13 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import AstroidBuilder from astroid.manager import AstroidManager -def mechanize_transform(): +def mechanize_transform() -> nodes.Module: return AstroidBuilder(AstroidManager()).string_build( """class Browser(object): def __getattr__(self, name): diff --git a/astroid/brain/brain_namedtuple_enum.py b/astroid/brain/brain_namedtuple_enum.py index 71091d8872..ff5b7154d9 100644 --- a/astroid/brain/brain_namedtuple_enum.py +++ b/astroid/brain/brain_namedtuple_enum.py @@ -12,8 +12,7 @@ from textwrap import dedent from typing import Final -import astroid -from astroid import arguments, bases, inference_tip, nodes, util +from astroid import arguments, bases, nodes, util from astroid.builder import AstroidBuilder, _extract_single_node, extract_node from astroid.context import InferenceContext from astroid.exceptions import ( @@ -22,7 +21,9 @@ InferenceError, UseInferenceDefault, ) +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager +from astroid.nodes.scoped_nodes.scoped_nodes import SYNTHETIC_ROOT ENUM_QNAME: Final[str] = "enum.Enum" TYPING_NAMEDTUPLE_QUALIFIED: Final = { @@ -73,7 +74,9 @@ def _extract_namedtuple_arg_or_keyword( # pylint: disable=inconsistent-return-s def infer_func_form( node: nodes.Call, - base_type: list[nodes.NodeNG], + base_type: nodes.NodeNG, + *, + parent: nodes.NodeNG, context: InferenceContext | None = None, enum: bool = False, ) -> tuple[nodes.ClassDef, str, list[str]]: @@ -146,15 +149,10 @@ def infer_func_form( col_offset=node.col_offset, end_lineno=node.end_lineno, end_col_offset=node.end_col_offset, - parent=nodes.Unknown(), + parent=parent, ) - # A typical ClassDef automatically adds its name to the parent scope, - # but doing so causes problems, so defer setting parent until after init - # see: https://github.com/pylint-dev/pylint/issues/5982 - class_node.parent = node.parent class_node.postinit( - # set base class=tuple - bases=base_type, + bases=[base_type], body=[], decorators=None, ) @@ -194,29 +192,21 @@ def infer_named_tuple( node: nodes.Call, context: InferenceContext | None = None ) -> Iterator[nodes.ClassDef]: """Specific inference function for namedtuple Call node.""" - tuple_base_name: list[nodes.NodeNG] = [ - nodes.Name( - name="tuple", - parent=node.root(), - lineno=0, - col_offset=0, - end_lineno=None, - end_col_offset=None, - ) - ] + tuple_base: nodes.Name = _extract_single_node("tuple") class_node, name, attributes = infer_func_form( - node, tuple_base_name, context=context + node, tuple_base, parent=SYNTHETIC_ROOT, context=context ) + call_site = arguments.CallSite.from_call(node, context=context) - node = extract_node("import collections; collections.namedtuple") - try: - func = next(node.infer()) - except StopIteration as e: - raise InferenceError(node=node) from e + func = util.safe_infer( + _extract_single_node("import collections; collections.namedtuple") + ) + assert isinstance(func, nodes.NodeNG) try: - rename = next( + rename_arg_bool_value = next( call_site.infer_argument(func, "rename", context or InferenceContext()) ).bool_value() + rename = rename_arg_bool_value is True except (InferenceError, StopIteration): rename = False @@ -267,6 +257,7 @@ def _get_renamed_namedtuple_attributes(field_names): names = list(field_names) seen = set() for i, name in enumerate(field_names): + # pylint: disable = too-many-boolean-expressions if ( not all(c.isalnum() or c == "_" for c in name) or keyword.iskeyword(name) @@ -289,7 +280,9 @@ def _check_namedtuple_attributes(typename, attributes, rename=False): # for name in (typename, *attributes): if not isinstance(name, str): - raise AstroidTypeError("Type names and field names must be strings") + raise AstroidTypeError( + f"Type names and field names must be strings, not {type(name)!r}" + ) if not name.isidentifier(): raise AstroidValueError( "Type names and field names must be valid" + f"identifiers: {name!r}" @@ -363,7 +356,17 @@ def value(self): __members__ = [''] """ ) - class_node = infer_func_form(node, [enum_meta], context=context, enum=True)[0] + + # FIXME arguably, the base here shouldn't be the EnumMeta class definition + # itself, but a reference (Name) to it. Otherwise, the invariant that all + # children of a node have that node as their parent is broken. + class_node = infer_func_form( + node, + enum_meta, + parent=SYNTHETIC_ROOT, + context=context, + enum=True, + )[0] return iter([class_node.instantiate_class()]) @@ -515,11 +518,16 @@ def _name_(self): # know that it should be a string, so infer that as a guess. if "name" not in target_names: code = dedent( - """ - @property - def name(self): - return '' - """ + ''' + @property + def name(self): + """The name of the Enum member. + + This is a reconstruction by astroid: enums are too dynamic to understand, but we at least + know 'name' should be a string, so this is astroid's best guess. + """ + return '' + ''' ) name_dynamicclassattr = AstroidBuilder(AstroidManager()).string_build(code)[ "name" @@ -642,7 +650,7 @@ def _get_namedtuple_fields(node: nodes.Call) -> str: return field_names -def _is_enum_subclass(cls: astroid.ClassDef) -> bool: +def _is_enum_subclass(cls: nodes.ClassDef) -> bool: """Return whether cls is a subclass of an Enum.""" return cls.is_subtype_of("enum.Enum") diff --git a/astroid/brain/brain_nose.py b/astroid/brain/brain_nose.py deleted file mode 100644 index 742418f2d5..0000000000 --- a/astroid/brain/brain_nose.py +++ /dev/null @@ -1,79 +0,0 @@ -# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html -# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE -# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt - -"""Hooks for nose library.""" - -import re -import textwrap - -from astroid.bases import BoundMethod -from astroid.brain.helpers import register_module_extender -from astroid.builder import AstroidBuilder -from astroid.exceptions import InferenceError -from astroid.manager import AstroidManager -from astroid.nodes import List, Module - -CAPITALS = re.compile("([A-Z])") - - -def _pep8(name, caps=CAPITALS): - return caps.sub(lambda m: "_" + m.groups()[0].lower(), name) - - -def _nose_tools_functions(): - """Get an iterator of names and bound methods.""" - module = AstroidBuilder().string_build( - textwrap.dedent( - """ - import unittest - - class Test(unittest.TestCase): - pass - a = Test() - """ - ) - ) - try: - case = next(module["a"].infer()) - except (InferenceError, StopIteration): - return - for method in case.methods(): - if method.name.startswith("assert") and "_" not in method.name: - pep8_name = _pep8(method.name) - yield pep8_name, BoundMethod(method, case) - if method.name == "assertEqual": - # nose also exports assert_equals. - yield "assert_equals", BoundMethod(method, case) - - -def _nose_tools_transform(node): - for method_name, method in _nose_tools_functions(): - node.locals[method_name] = [method] - - -def _nose_tools_trivial_transform(): - """Custom transform for the nose.tools module.""" - stub = AstroidBuilder().string_build("""__all__ = []""") - all_entries = ["ok_", "eq_"] - - for pep8_name, method in _nose_tools_functions(): - all_entries.append(pep8_name) - stub[pep8_name] = method - - # Update the __all__ variable, since nose.tools - # does this manually with .append. - all_assign = stub["__all__"].parent - all_object = List(all_entries) - all_object.parent = all_assign - all_assign.value = all_object - return stub - - -def register(manager: AstroidManager) -> None: - register_module_extender( - manager, "nose.tools.trivial", _nose_tools_trivial_transform - ) - manager.register_transform( - Module, _nose_tools_transform, lambda n: n.name == "nose.tools" - ) diff --git a/astroid/brain/brain_numpy_core_fromnumeric.py b/astroid/brain/brain_numpy_core_fromnumeric.py index c6be20b6ea..ce4173c110 100644 --- a/astroid/brain/brain_numpy_core_fromnumeric.py +++ b/astroid/brain/brain_numpy_core_fromnumeric.py @@ -3,12 +3,13 @@ # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt """Astroid hooks for numpy.core.fromnumeric module.""" +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def numpy_core_fromnumeric_transform(): +def numpy_core_fromnumeric_transform() -> nodes.Module: return parse( """ def sum(a, axis=None, dtype=None, out=None, keepdims=None, initial=None): diff --git a/astroid/brain/brain_numpy_core_function_base.py b/astroid/brain/brain_numpy_core_function_base.py index 17e1ad11d2..b66ba5f567 100644 --- a/astroid/brain/brain_numpy_core_function_base.py +++ b/astroid/brain/brain_numpy_core_function_base.py @@ -6,13 +6,13 @@ import functools +from astroid import nodes from astroid.brain.brain_numpy_utils import ( - attribute_looks_like_numpy_member, - infer_numpy_member, + attribute_name_looks_like_numpy_member, + infer_numpy_attribute, ) from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -from astroid.nodes.node_classes import Attribute METHODS_TO_BE_INFERRED = { "linspace": """def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, axis=0): @@ -25,10 +25,11 @@ def register(manager: AstroidManager) -> None: - for func_name, func_src in METHODS_TO_BE_INFERRED.items(): - inference_function = functools.partial(infer_numpy_member, func_src) - manager.register_transform( - Attribute, - inference_tip(inference_function), - functools.partial(attribute_looks_like_numpy_member, func_name), - ) + manager.register_transform( + nodes.Attribute, + inference_tip(functools.partial(infer_numpy_attribute, METHODS_TO_BE_INFERRED)), + functools.partial( + attribute_name_looks_like_numpy_member, + frozenset(METHODS_TO_BE_INFERRED.keys()), + ), + ) diff --git a/astroid/brain/brain_numpy_core_multiarray.py b/astroid/brain/brain_numpy_core_multiarray.py index 404e21cf1b..19850d3cde 100644 --- a/astroid/brain/brain_numpy_core_multiarray.py +++ b/astroid/brain/brain_numpy_core_multiarray.py @@ -6,19 +6,20 @@ import functools +from astroid import nodes from astroid.brain.brain_numpy_utils import ( - attribute_looks_like_numpy_member, - infer_numpy_member, - name_looks_like_numpy_member, + attribute_name_looks_like_numpy_member, + infer_numpy_attribute, + infer_numpy_name, + member_name_looks_like_numpy_member, ) from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -from astroid.nodes.node_classes import Attribute, Name -def numpy_core_multiarray_transform(): +def numpy_core_multiarray_transform() -> nodes.Module: return parse( """ # different functions defined in multiarray.py @@ -91,15 +92,15 @@ def register(manager: AstroidManager) -> None: manager, "numpy.core.multiarray", numpy_core_multiarray_transform ) - for method_name, function_src in METHODS_TO_BE_INFERRED.items(): - inference_function = functools.partial(infer_numpy_member, function_src) - manager.register_transform( - Attribute, - inference_tip(inference_function), - functools.partial(attribute_looks_like_numpy_member, method_name), - ) - manager.register_transform( - Name, - inference_tip(inference_function), - functools.partial(name_looks_like_numpy_member, method_name), - ) + method_names = frozenset(METHODS_TO_BE_INFERRED.keys()) + + manager.register_transform( + nodes.Attribute, + inference_tip(functools.partial(infer_numpy_attribute, METHODS_TO_BE_INFERRED)), + functools.partial(attribute_name_looks_like_numpy_member, method_names), + ) + manager.register_transform( + nodes.Name, + inference_tip(functools.partial(infer_numpy_name, METHODS_TO_BE_INFERRED)), + functools.partial(member_name_looks_like_numpy_member, method_names), + ) diff --git a/astroid/brain/brain_numpy_core_numeric.py b/astroid/brain/brain_numpy_core_numeric.py index 7149c85daf..ee08e02139 100644 --- a/astroid/brain/brain_numpy_core_numeric.py +++ b/astroid/brain/brain_numpy_core_numeric.py @@ -6,18 +6,18 @@ import functools +from astroid import nodes from astroid.brain.brain_numpy_utils import ( - attribute_looks_like_numpy_member, - infer_numpy_member, + attribute_name_looks_like_numpy_member, + infer_numpy_attribute, ) from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -from astroid.nodes.node_classes import Attribute -def numpy_core_numeric_transform(): +def numpy_core_numeric_transform() -> nodes.Module: return parse( """ # different functions defined in numeric.py @@ -40,10 +40,11 @@ def register(manager: AstroidManager) -> None: manager, "numpy.core.numeric", numpy_core_numeric_transform ) - for method_name, function_src in METHODS_TO_BE_INFERRED.items(): - inference_function = functools.partial(infer_numpy_member, function_src) - manager.register_transform( - Attribute, - inference_tip(inference_function), - functools.partial(attribute_looks_like_numpy_member, method_name), - ) + manager.register_transform( + nodes.Attribute, + inference_tip(functools.partial(infer_numpy_attribute, METHODS_TO_BE_INFERRED)), + functools.partial( + attribute_name_looks_like_numpy_member, + frozenset(METHODS_TO_BE_INFERRED.keys()), + ), + ) diff --git a/astroid/brain/brain_numpy_core_numerictypes.py b/astroid/brain/brain_numpy_core_numerictypes.py index 6de299d72e..7111c837a3 100644 --- a/astroid/brain/brain_numpy_core_numerictypes.py +++ b/astroid/brain/brain_numpy_core_numerictypes.py @@ -5,13 +5,14 @@ # TODO(hippo91) : correct the methods signature. """Astroid hooks for numpy.core.numerictypes module.""" +from astroid import nodes from astroid.brain.brain_numpy_utils import numpy_supports_type_hints from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def numpy_core_numerictypes_transform(): +def numpy_core_numerictypes_transform() -> nodes.Module: # TODO: Uniformize the generic API with the ndarray one. # According to numpy doc the generic object should expose # the same API than ndarray. This has been done here partially diff --git a/astroid/brain/brain_numpy_core_umath.py b/astroid/brain/brain_numpy_core_umath.py index 61f3354408..a048a1c06b 100644 --- a/astroid/brain/brain_numpy_core_umath.py +++ b/astroid/brain/brain_numpy_core_umath.py @@ -7,12 +7,13 @@ # typecheck in `_emit_no_member` function) """Astroid hooks for numpy.core.umath module.""" +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def numpy_core_umath_transform(): +def numpy_core_umath_transform() -> nodes.Module: ufunc_optional_keyword_arguments = ( """out=None, where=True, casting='same_kind', order='K', """ """dtype=None, subok=True""" diff --git a/astroid/brain/brain_numpy_ma.py b/astroid/brain/brain_numpy_ma.py index 743e462d20..e61acb5c15 100644 --- a/astroid/brain/brain_numpy_ma.py +++ b/astroid/brain/brain_numpy_ma.py @@ -4,12 +4,13 @@ """Astroid hooks for numpy ma module.""" +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def numpy_ma_transform(): +def numpy_ma_transform() -> nodes.Module: """ Infer the call of various numpy.ma functions. diff --git a/astroid/brain/brain_numpy_ndarray.py b/astroid/brain/brain_numpy_ndarray.py index 5748421fb9..c98adb1560 100644 --- a/astroid/brain/brain_numpy_ndarray.py +++ b/astroid/brain/brain_numpy_ndarray.py @@ -5,12 +5,12 @@ """Astroid hooks for numpy ndarray class.""" from __future__ import annotations +from astroid import nodes from astroid.brain.brain_numpy_utils import numpy_supports_type_hints from astroid.builder import extract_node from astroid.context import InferenceContext from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -from astroid.nodes.node_classes import Attribute def infer_numpy_ndarray(node, context: InferenceContext | None = None): @@ -151,13 +151,13 @@ def __class_getitem__(cls, value): return node.infer(context=context) -def _looks_like_numpy_ndarray(node) -> bool: - return isinstance(node, Attribute) and node.attrname == "ndarray" +def _looks_like_numpy_ndarray(node: nodes.Attribute) -> bool: + return node.attrname == "ndarray" def register(manager: AstroidManager) -> None: manager.register_transform( - Attribute, + nodes.Attribute, inference_tip(infer_numpy_ndarray), _looks_like_numpy_ndarray, ) diff --git a/astroid/brain/brain_numpy_random_mtrand.py b/astroid/brain/brain_numpy_random_mtrand.py index 83b1ab06ad..be1c957861 100644 --- a/astroid/brain/brain_numpy_random_mtrand.py +++ b/astroid/brain/brain_numpy_random_mtrand.py @@ -4,12 +4,13 @@ # TODO(hippo91) : correct the functions return types """Astroid hooks for numpy.random.mtrand module.""" +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def numpy_random_mtrand_transform(): +def numpy_random_mtrand_transform() -> nodes.Module: return parse( """ def beta(a, b, size=None): return uninferable diff --git a/astroid/brain/brain_numpy_utils.py b/astroid/brain/brain_numpy_utils.py index 47f24433bd..1a8f66573d 100644 --- a/astroid/brain/brain_numpy_utils.py +++ b/astroid/brain/brain_numpy_utils.py @@ -6,9 +6,9 @@ from __future__ import annotations +from astroid import nodes from astroid.builder import extract_node from astroid.context import InferenceContext -from astroid.nodes.node_classes import Attribute, Import, Name # Class subscript is available in numpy starting with version 1.20.0 NUMPY_VERSION_TYPE_HINTS_SUPPORT = ("1", "20", "0") @@ -34,12 +34,23 @@ def _get_numpy_version() -> tuple[str, str, str]: return ("0", "0", "0") -def infer_numpy_member(src, node, context: InferenceContext | None = None): - node = extract_node(src) - return node.infer(context=context) +def infer_numpy_name( + sources: dict[str, str], node: nodes.Name, context: InferenceContext | None = None +): + extracted_node = extract_node(sources[node.name]) + return extracted_node.infer(context=context) -def _is_a_numpy_module(node: Name) -> bool: +def infer_numpy_attribute( + sources: dict[str, str], + node: nodes.Attribute, + context: InferenceContext | None = None, +): + extracted_node = extract_node(sources[node.attrname]) + return extracted_node.infer(context=context) + + +def _is_a_numpy_module(node: nodes.Name) -> bool: """ Returns True if the node is a representation of a numpy module. @@ -53,7 +64,7 @@ def _is_a_numpy_module(node: Name) -> bool: """ module_nickname = node.name potential_import_target = [ - x for x in node.lookup(module_nickname)[1] if isinstance(x, Import) + x for x in node.lookup(module_nickname)[1] if isinstance(x, nodes.Import) ] return any( ("numpy", module_nickname) in target.names or ("numpy", None) in target.names @@ -61,21 +72,23 @@ def _is_a_numpy_module(node: Name) -> bool: ) -def name_looks_like_numpy_member(member_name: str, node: Name) -> bool: +def member_name_looks_like_numpy_member( + member_names: frozenset[str], node: nodes.Name +) -> bool: """ - Returns True if the Name is a member of numpy whose - name is member_name. + Returns True if the Name node's name matches a member name from numpy """ - return node.name == member_name and node.root().name.startswith("numpy") + return node.name in member_names and node.root().name.startswith("numpy") -def attribute_looks_like_numpy_member(member_name: str, node: Attribute) -> bool: +def attribute_name_looks_like_numpy_member( + member_names: frozenset[str], node: nodes.Attribute +) -> bool: """ - Returns True if the Attribute is a member of numpy whose - name is member_name. + Returns True if the Attribute node's name matches a member name from numpy """ return ( - node.attrname == member_name - and isinstance(node.expr, Name) + node.attrname in member_names + and isinstance(node.expr, nodes.Name) and _is_a_numpy_module(node.expr) ) diff --git a/astroid/brain/brain_pathlib.py b/astroid/brain/brain_pathlib.py index 186a0c4f38..d1d1bda7f2 100644 --- a/astroid/brain/brain_pathlib.py +++ b/astroid/brain/brain_pathlib.py @@ -6,10 +6,11 @@ from collections.abc import Iterator -from astroid import bases, context, inference_tip, nodes +from astroid import bases, context, nodes from astroid.builder import _extract_single_node from astroid.const import PY313 from astroid.exceptions import InferenceError, UseInferenceDefault +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager PATH_TEMPLATE = """ diff --git a/astroid/brain/brain_pkg_resources.py b/astroid/brain/brain_pkg_resources.py index a844d15b31..e2bd669100 100644 --- a/astroid/brain/brain_pkg_resources.py +++ b/astroid/brain/brain_pkg_resources.py @@ -2,12 +2,13 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt -from astroid import parse +from astroid import nodes from astroid.brain.helpers import register_module_extender +from astroid.builder import parse from astroid.manager import AstroidManager -def pkg_resources_transform(): +def pkg_resources_transform() -> nodes.Module: return parse( """ def require(*requirements): diff --git a/astroid/brain/brain_pytest.py b/astroid/brain/brain_pytest.py index 0e0db39041..6d06267b3d 100644 --- a/astroid/brain/brain_pytest.py +++ b/astroid/brain/brain_pytest.py @@ -3,12 +3,13 @@ # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt """Astroid hooks for pytest.""" +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import AstroidBuilder from astroid.manager import AstroidManager -def pytest_transform(): +def pytest_transform() -> nodes.Module: return AstroidBuilder(AstroidManager()).string_build( """ diff --git a/astroid/brain/brain_qt.py b/astroid/brain/brain_qt.py index 4badfce840..30581e0ea5 100644 --- a/astroid/brain/brain_qt.py +++ b/astroid/brain/brain_qt.py @@ -4,9 +4,9 @@ """Astroid hooks for the PyQT library.""" -from astroid import nodes, parse +from astroid import nodes from astroid.brain.helpers import register_module_extender -from astroid.builder import AstroidBuilder +from astroid.builder import AstroidBuilder, parse from astroid.manager import AstroidManager diff --git a/astroid/brain/brain_random.py b/astroid/brain/brain_random.py index 48cc121461..84b4f4eb05 100644 --- a/astroid/brain/brain_random.py +++ b/astroid/brain/brain_random.py @@ -6,27 +6,18 @@ import random +from astroid import nodes from astroid.context import InferenceContext from astroid.exceptions import UseInferenceDefault from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -from astroid.nodes.node_classes import ( - Attribute, - Call, - Const, - EvaluatedObject, - List, - Name, - Set, - Tuple, -) from astroid.util import safe_infer -ACCEPTED_ITERABLES_FOR_SAMPLE = (List, Set, Tuple) +ACCEPTED_ITERABLES_FOR_SAMPLE = (nodes.List, nodes.Set, nodes.Tuple) def _clone_node_with_lineno(node, parent, lineno): - if isinstance(node, EvaluatedObject): + if isinstance(node, nodes.EvaluatedObject): node = node.original cls = node.__class__ other_fields = node._other_fields @@ -52,7 +43,7 @@ def infer_random_sample(node, context: InferenceContext | None = None): raise UseInferenceDefault inferred_length = safe_infer(node.args[1], context=context) - if not isinstance(inferred_length, Const): + if not isinstance(inferred_length, nodes.Const): raise UseInferenceDefault if not isinstance(inferred_length.value, int): raise UseInferenceDefault @@ -73,7 +64,7 @@ def infer_random_sample(node, context: InferenceContext | None = None): except ValueError as exc: raise UseInferenceDefault from exc - new_node = List( + new_node = nodes.List( lineno=node.lineno, col_offset=node.col_offset, parent=node.scope(), @@ -90,14 +81,14 @@ def infer_random_sample(node, context: InferenceContext | None = None): def _looks_like_random_sample(node) -> bool: func = node.func - if isinstance(func, Attribute): + if isinstance(func, nodes.Attribute): return func.attrname == "sample" - if isinstance(func, Name): + if isinstance(func, nodes.Name): return func.name == "sample" return False def register(manager: AstroidManager) -> None: manager.register_transform( - Call, inference_tip(infer_random_sample), _looks_like_random_sample + nodes.Call, inference_tip(infer_random_sample), _looks_like_random_sample ) diff --git a/astroid/brain/brain_re.py b/astroid/brain/brain_re.py index 19f2a5b39c..64646454b6 100644 --- a/astroid/brain/brain_re.py +++ b/astroid/brain/brain_re.py @@ -4,10 +4,11 @@ from __future__ import annotations -from astroid import context, inference_tip, nodes +from astroid import context, nodes from astroid.brain.helpers import register_module_extender from astroid.builder import _extract_single_node, parse from astroid.const import PY311_PLUS +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager diff --git a/astroid/brain/brain_regex.py b/astroid/brain/brain_regex.py index 5a2d81e809..70fb94610c 100644 --- a/astroid/brain/brain_regex.py +++ b/astroid/brain/brain_regex.py @@ -4,9 +4,10 @@ from __future__ import annotations -from astroid import context, inference_tip, nodes +from astroid import context, nodes from astroid.brain.helpers import register_module_extender from astroid.builder import _extract_single_node, parse +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager diff --git a/astroid/brain/brain_responses.py b/astroid/brain/brain_responses.py index 0a0de8b558..f2e606976b 100644 --- a/astroid/brain/brain_responses.py +++ b/astroid/brain/brain_responses.py @@ -10,12 +10,13 @@ See: https://github.com/getsentry/responses/blob/master/responses.py """ +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def responses_funcs(): +def responses_funcs() -> nodes.Module: return parse( """ DELETE = "DELETE" diff --git a/astroid/brain/brain_scipy_signal.py b/astroid/brain/brain_scipy_signal.py index 7d17a1e953..a7a257628a 100755 --- a/astroid/brain/brain_scipy_signal.py +++ b/astroid/brain/brain_scipy_signal.py @@ -3,12 +3,13 @@ # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt """Astroid hooks for scipy.signal module.""" +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def scipy_signal(): +def scipy_signal() -> nodes.Module: return parse( """ # different functions defined in scipy.signals diff --git a/astroid/brain/brain_sqlalchemy.py b/astroid/brain/brain_sqlalchemy.py index d37b505bf2..8410d9e12f 100644 --- a/astroid/brain/brain_sqlalchemy.py +++ b/astroid/brain/brain_sqlalchemy.py @@ -2,12 +2,13 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def _session_transform(): +def _session_transform() -> nodes.Module: return parse( """ from sqlalchemy.orm.session import Session diff --git a/astroid/brain/brain_ssl.py b/astroid/brain/brain_ssl.py index 23d7ee4f73..6b4fc5c212 100644 --- a/astroid/brain/brain_ssl.py +++ b/astroid/brain/brain_ssl.py @@ -4,9 +4,10 @@ """Astroid hooks for the ssl library.""" -from astroid import parse +from astroid import nodes from astroid.brain.helpers import register_module_extender -from astroid.const import PY310_PLUS, PY312_PLUS +from astroid.builder import parse +from astroid.const import PY312_PLUS from astroid.manager import AstroidManager @@ -17,9 +18,7 @@ class VerifyFlags(_IntFlag): VERIFY_CRL_CHECK_LEAF = 1 VERIFY_CRL_CHECK_CHAIN = 2 VERIFY_X509_STRICT = 3 - VERIFY_X509_TRUSTED_FIRST = 4""" - if PY310_PLUS: - enum += """ + VERIFY_X509_TRUSTED_FIRST = 4 VERIFY_ALLOW_PROXY_CERTS = 5 VERIFY_X509_PARTIAL_CHAIN = 6 """ @@ -49,7 +48,7 @@ class Options(_IntFlag): return enum -def ssl_transform(): +def ssl_transform() -> nodes.Module: return parse( f""" # Import necessary for conversion of objects defined in C into enums diff --git a/astroid/brain/brain_statistics.py b/astroid/brain/brain_statistics.py new file mode 100644 index 0000000000..5420ef902c --- /dev/null +++ b/astroid/brain/brain_statistics.py @@ -0,0 +1,73 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt + +"""Astroid hooks for understanding statistics library module. + +Provides inference improvements for statistics module functions that have +complex runtime behavior difficult to analyze statically. +""" + +from __future__ import annotations + +from collections.abc import Iterator +from typing import TYPE_CHECKING + +from astroid import nodes +from astroid.context import InferenceContext +from astroid.inference_tip import inference_tip +from astroid.manager import AstroidManager +from astroid.util import Uninferable + +if TYPE_CHECKING: + from astroid.typing import InferenceResult + + +def _looks_like_statistics_quantiles(node: nodes.Call) -> bool: + """Check if this is a call to statistics.quantiles.""" + match node.func: + case nodes.Attribute(expr=nodes.Name(name="statistics"), attrname="quantiles"): + # Case 1: statistics.quantiles(...) + return True + case nodes.Name(name="quantiles"): + # Case 2: from statistics import quantiles; quantiles(...) + # Check if quantiles was imported from statistics + try: + frame = node.frame() + if "quantiles" in frame.locals: + # Look for import from statistics + for stmt in frame.body: + if ( + isinstance(stmt, nodes.ImportFrom) + and stmt.modname == "statistics" + and any(name[0] == "quantiles" for name in stmt.names or []) + ): + return True + except (AttributeError, TypeError): + # If we can't determine the import context, be conservative + pass + return False + + +def infer_statistics_quantiles( + node: nodes.Call, context: InferenceContext | None = None +) -> Iterator[InferenceResult]: + """Infer the result of statistics.quantiles() calls. + + Returns Uninferable because quantiles() has complex runtime behavior + that cannot be statically analyzed, preventing false positives in + pylint's unbalanced-tuple-unpacking checker. + + statistics.quantiles() returns a list with (n-1) elements, but static + analysis sees only the empty list initializations in the function body. + """ + yield Uninferable + + +def register(manager: AstroidManager) -> None: + """Register statistics-specific inference improvements.""" + manager.register_transform( + nodes.Call, + inference_tip(infer_statistics_quantiles), + _looks_like_statistics_quantiles, + ) diff --git a/astroid/brain/brain_subprocess.py b/astroid/brain/brain_subprocess.py index fbc088a680..3a99802c97 100644 --- a/astroid/brain/brain_subprocess.py +++ b/astroid/brain/brain_subprocess.py @@ -4,13 +4,14 @@ import textwrap +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse -from astroid.const import PY310_PLUS, PY311_PLUS +from astroid.const import PY311_PLUS from astroid.manager import AstroidManager -def _subprocess_transform(): +def _subprocess_transform() -> nodes.Module: communicate = (bytes("string", "ascii"), bytes("string", "ascii")) communicate_signature = "def communicate(self, input=None, timeout=None)" args = """\ @@ -18,10 +19,7 @@ def _subprocess_transform(): preexec_fn=None, close_fds=True, shell=False, cwd=None, env=None, universal_newlines=None, startupinfo=None, creationflags=0, restore_signals=True, start_new_session=False, pass_fds=(), *, encoding=None, errors=None, text=None, - user=None, group=None, extra_groups=None, umask=-1""" - - if PY310_PLUS: - args += ", pipesize=-1" + user=None, group=None, extra_groups=None, umask=-1, pipesize=-1""" if PY311_PLUS: args += ", process_group=None" diff --git a/astroid/brain/brain_threading.py b/astroid/brain/brain_threading.py index 6c6f29bf06..95af2db069 100644 --- a/astroid/brain/brain_threading.py +++ b/astroid/brain/brain_threading.py @@ -2,12 +2,13 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def _thread_transform(): +def _thread_transform() -> nodes.Module: return parse( """ class lock(object): diff --git a/astroid/brain/brain_type.py b/astroid/brain/brain_type.py index d3461e68d4..8391e59971 100644 --- a/astroid/brain/brain_type.py +++ b/astroid/brain/brain_type.py @@ -22,21 +22,23 @@ from __future__ import annotations -from astroid import extract_node, inference_tip, nodes +from astroid import nodes +from astroid.builder import extract_node from astroid.context import InferenceContext from astroid.exceptions import UseInferenceDefault +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -def _looks_like_type_subscript(node) -> bool: +def _looks_like_type_subscript(node: nodes.Name) -> bool: """ Try to figure out if a Name node is used inside a type related subscript. :param node: node to check - :type node: astroid.nodes.node_classes.NodeNG + :type node: astroid.nodes.NodeNG :return: whether the node is a Name node inside a type related subscript """ - if isinstance(node, nodes.Name) and isinstance(node.parent, nodes.Subscript): + if isinstance(node.parent, nodes.Subscript): return node.name == "type" return False @@ -46,12 +48,12 @@ def infer_type_sub(node, context: InferenceContext | None = None): Infer a type[...] subscript. :param node: node to infer - :type node: astroid.nodes.node_classes.NodeNG + :type node: astroid.nodes.NodeNG :return: the inferred node :rtype: nodes.NodeNG """ node_scope, _ = node.scope().lookup("type") - if not isinstance(node_scope, nodes.Module) or node_scope.qname() != "builtins": + if not (isinstance(node_scope, nodes.Module) and node_scope.qname() == "builtins"): raise UseInferenceDefault() class_src = """ class type: diff --git a/astroid/brain/brain_typing.py b/astroid/brain/brain_typing.py index ed6dc46874..217a803173 100644 --- a/astroid/brain/brain_typing.py +++ b/astroid/brain/brain_typing.py @@ -12,9 +12,9 @@ from functools import partial from typing import Final -from astroid import context, extract_node, inference_tip +from astroid import context, nodes from astroid.brain.helpers import register_module_extender -from astroid.builder import AstroidBuilder, _extract_single_node +from astroid.builder import AstroidBuilder, _extract_single_node, extract_node from astroid.const import PY312_PLUS, PY313_PLUS, PY314_PLUS from astroid.exceptions import ( AstroidSyntaxError, @@ -22,19 +22,8 @@ InferenceError, UseInferenceDefault, ) +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -from astroid.nodes.node_classes import ( - Assign, - AssignName, - Attribute, - Call, - Const, - JoinedStr, - Name, - NodeNG, - Subscript, -) -from astroid.nodes.scoped_nodes import ClassDef, FunctionDef TYPING_TYPEVARS = {"TypeVar", "NewType"} TYPING_TYPEVARS_QUALIFIED: Final = { @@ -77,7 +66,7 @@ class {0}(metaclass=Meta): "typing.MutableMapping", "typing.Sequence", "typing.MutableSequence", - "typing.ByteString", # removed in 3.14 + "typing.ByteString", # scheduled for removal in 3.17 "typing.Tuple", "typing.List", "typing.Deque", @@ -111,16 +100,16 @@ def __class_getitem__(cls, item): def looks_like_typing_typevar_or_newtype(node) -> bool: func = node.func - if isinstance(func, Attribute): + if isinstance(func, nodes.Attribute): return func.attrname in TYPING_TYPEVARS - if isinstance(func, Name): + if isinstance(func, nodes.Name): return func.name in TYPING_TYPEVARS return False def infer_typing_typevar_or_newtype( - node: Call, context_itton: context.InferenceContext | None = None -) -> Iterator[ClassDef]: + node: nodes.Call, context_itton: context.InferenceContext | None = None +) -> Iterator[nodes.ClassDef]: """Infer a typing.TypeVar(...) or typing.NewType(...) call.""" try: func = next(node.func.infer(context=context_itton)) @@ -132,7 +121,7 @@ def infer_typing_typevar_or_newtype( if not node.args: raise UseInferenceDefault # Cannot infer from a dynamic class name (f-string) - if isinstance(node.args[0], JoinedStr): + if isinstance(node.args[0], nodes.JoinedStr): raise UseInferenceDefault typename = node.args[0].as_string().strip("'") @@ -145,18 +134,18 @@ def infer_typing_typevar_or_newtype( def _looks_like_typing_subscript(node) -> bool: """Try to figure out if a Subscript node *might* be a typing-related subscript.""" - if isinstance(node, Name): + if isinstance(node, nodes.Name): return node.name in TYPING_MEMBERS - if isinstance(node, Attribute): + if isinstance(node, nodes.Attribute): return node.attrname in TYPING_MEMBERS - if isinstance(node, Subscript): + if isinstance(node, nodes.Subscript): return _looks_like_typing_subscript(node.value) return False def infer_typing_attr( - node: Subscript, ctx: context.InferenceContext | None = None -) -> Iterator[ClassDef]: + node: nodes.Subscript, ctx: context.InferenceContext | None = None +) -> Iterator[nodes.ClassDef]: """Infer a typing.X[...] subscript.""" try: value = next(node.value.infer()) # type: ignore[union-attr] # value shouldn't be None for Subscript. @@ -169,14 +158,14 @@ def infer_typing_attr( if ( PY313_PLUS - and isinstance(value, FunctionDef) + and isinstance(value, nodes.FunctionDef) and value.qname() == "typing.Annotated" ): # typing.Annotated is a FunctionDef on 3.13+ node._explicit_inference = lambda node, context: iter([value]) return iter([value]) - if isinstance(value, ClassDef) and value.qname() in { + if isinstance(value, nodes.ClassDef) and value.qname() in { "typing.Generic", "typing.Annotated", "typing_extensions.Annotated", @@ -187,7 +176,7 @@ def infer_typing_attr( func_to_add = _extract_single_node(CLASS_GETITEM_TEMPLATE) value.locals["__class_getitem__"] = [func_to_add] if ( - isinstance(node.parent, ClassDef) + isinstance(node.parent, nodes.ClassDef) and node in node.parent.bases and getattr(node.parent, "__cache", None) ): @@ -204,14 +193,14 @@ def infer_typing_attr( return node.infer(context=ctx) -def _looks_like_generic_class_pep695(node: ClassDef) -> bool: +def _looks_like_generic_class_pep695(node: nodes.ClassDef) -> bool: """Check if class is using type parameter. Python 3.12+.""" return len(node.type_params) > 0 def infer_typing_generic_class_pep695( - node: ClassDef, ctx: context.InferenceContext | None = None -) -> Iterator[ClassDef]: + node: nodes.ClassDef, ctx: context.InferenceContext | None = None +) -> Iterator[nodes.ClassDef]: """Add __class_getitem__ for generic classes. Python 3.12+.""" func_to_add = _extract_single_node(CLASS_GETITEM_TEMPLATE) node.locals["__class_getitem__"] = [func_to_add] @@ -219,17 +208,17 @@ def infer_typing_generic_class_pep695( def _looks_like_typedDict( # pylint: disable=invalid-name - node: FunctionDef | ClassDef, + node: nodes.FunctionDef | nodes.ClassDef, ) -> bool: """Check if node is TypedDict FunctionDef.""" return node.qname() in TYPING_TYPEDDICT_QUALIFIED def infer_typedDict( # pylint: disable=invalid-name - node: FunctionDef, ctx: context.InferenceContext | None = None -) -> Iterator[ClassDef]: + node: nodes.FunctionDef, ctx: context.InferenceContext | None = None +) -> Iterator[nodes.ClassDef]: """Replace TypedDict FunctionDef with ClassDef.""" - class_def = ClassDef( + class_def = nodes.ClassDef( name="TypedDict", lineno=node.lineno, col_offset=node.col_offset, @@ -243,7 +232,7 @@ def infer_typedDict( # pylint: disable=invalid-name return iter([class_def]) -def _looks_like_typing_alias(node: Call) -> bool: +def _looks_like_typing_alias(node: nodes.Call) -> bool: """ Returns True if the node corresponds to a call to _alias function. @@ -254,18 +243,18 @@ def _looks_like_typing_alias(node: Call) -> bool: :param node: call node """ return ( - isinstance(node.func, Name) + isinstance(node.func, nodes.Name) # TODO: remove _DeprecatedGenericAlias when Py3.14 min and node.func.name in {"_alias", "_DeprecatedGenericAlias"} and len(node.args) == 2 and ( # _alias function works also for builtins object such as list and dict - isinstance(node.args[0], (Attribute, Name)) + isinstance(node.args[0], (nodes.Attribute, nodes.Name)) ) ) -def _forbid_class_getitem_access(node: ClassDef) -> None: +def _forbid_class_getitem_access(node: nodes.ClassDef) -> None: """Disable the access to __class_getitem__ method for the node in parameters.""" def full_raiser(origin_func, attr, *args, **kwargs): @@ -290,8 +279,8 @@ def full_raiser(origin_func, attr, *args, **kwargs): def infer_typing_alias( - node: Call, ctx: context.InferenceContext | None = None -) -> Iterator[ClassDef]: + node: nodes.Call, ctx: context.InferenceContext | None = None +) -> Iterator[nodes.ClassDef]: """ Infers the call to _alias function Insert ClassDef, with same name as aliased class, @@ -302,10 +291,10 @@ def infer_typing_alias( # TODO: evaluate if still necessary when Py3.12 is minimum """ - if ( - not isinstance(node.parent, Assign) - or not len(node.parent.targets) == 1 - or not isinstance(node.parent.targets[0], AssignName) + if not ( + isinstance(node.parent, nodes.Assign) + and len(node.parent.targets) == 1 + and isinstance(node.parent.targets[0], nodes.AssignName) ): raise UseInferenceDefault try: @@ -315,7 +304,7 @@ def infer_typing_alias( assign_name = node.parent.targets[0] - class_def = ClassDef( + class_def = nodes.ClassDef( name=assign_name.name, lineno=assign_name.lineno, col_offset=assign_name.col_offset, @@ -323,13 +312,13 @@ def infer_typing_alias( end_lineno=assign_name.end_lineno, end_col_offset=assign_name.end_col_offset, ) - if isinstance(res, ClassDef): + if isinstance(res, nodes.ClassDef): # Only add `res` as base if it's a `ClassDef` # This isn't the case for `typing.Pattern` and `typing.Match` class_def.postinit(bases=[res], body=[], decorators=None) maybe_type_var = node.args[1] - if isinstance(maybe_type_var, Const) and maybe_type_var.value > 0: + if isinstance(maybe_type_var, nodes.Const) and maybe_type_var.value > 0: # If typing alias is subscriptable, add `__class_getitem__` to ClassDef func_to_add = _extract_single_node(CLASS_GETITEM_TEMPLATE) class_def.locals["__class_getitem__"] = [func_to_add] @@ -344,7 +333,7 @@ def infer_typing_alias( return iter([class_def]) -def _looks_like_special_alias(node: Call) -> bool: +def _looks_like_special_alias(node: nodes.Call) -> bool: """Return True if call is for Tuple or Callable alias. In PY37 and PY38 the call is to '_VariadicGenericAlias' with 'tuple' as @@ -356,28 +345,32 @@ def _looks_like_special_alias(node: Call) -> bool: PY37: Callable = _VariadicGenericAlias(collections.abc.Callable, (), special=True) PY39: Callable = _CallableType(collections.abc.Callable, 2) """ - return isinstance(node.func, Name) and ( - ( - node.func.name == "_TupleType" - and isinstance(node.args[0], Name) - and node.args[0].name == "tuple" - ) - or ( - node.func.name == "_CallableType" - and isinstance(node.args[0], Attribute) - and node.args[0].as_string() == "collections.abc.Callable" + return ( + isinstance(node.func, nodes.Name) + and node.args + and ( + ( + node.func.name == "_TupleType" + and isinstance(node.args[0], nodes.Name) + and node.args[0].name == "tuple" + ) + or ( + node.func.name == "_CallableType" + and isinstance(node.args[0], nodes.Attribute) + and node.args[0].as_string() == "collections.abc.Callable" + ) ) ) def infer_special_alias( - node: Call, ctx: context.InferenceContext | None = None -) -> Iterator[ClassDef]: + node: nodes.Call, ctx: context.InferenceContext | None = None +) -> Iterator[nodes.ClassDef]: """Infer call to tuple alias as new subscriptable class typing.Tuple.""" if not ( - isinstance(node.parent, Assign) + isinstance(node.parent, nodes.Assign) and len(node.parent.targets) == 1 - and isinstance(node.parent.targets[0], AssignName) + and isinstance(node.parent.targets[0], nodes.AssignName) ): raise UseInferenceDefault try: @@ -386,7 +379,7 @@ def infer_special_alias( raise InferenceError(node=node.args[0], context=ctx) from e assign_name = node.parent.targets[0] - class_def = ClassDef( + class_def = nodes.ClassDef( name=assign_name.name, parent=node.parent, lineno=assign_name.lineno, @@ -402,28 +395,27 @@ def infer_special_alias( return iter([class_def]) -def _looks_like_typing_cast(node: Call) -> bool: - return isinstance(node, Call) and ( - (isinstance(node.func, Name) and node.func.name == "cast") - or (isinstance(node.func, Attribute) and node.func.attrname == "cast") +def _looks_like_typing_cast(node: nodes.Call) -> bool: + return (isinstance(node.func, nodes.Name) and node.func.name == "cast") or ( + isinstance(node.func, nodes.Attribute) and node.func.attrname == "cast" ) def infer_typing_cast( - node: Call, ctx: context.InferenceContext | None = None -) -> Iterator[NodeNG]: + node: nodes.Call, ctx: context.InferenceContext | None = None +) -> Iterator[nodes.NodeNG]: """Infer call to cast() returning same type as casted-from var.""" - if not isinstance(node.func, (Name, Attribute)): + if not isinstance(node.func, (nodes.Name, nodes.Attribute)): raise UseInferenceDefault try: func = next(node.func.infer(context=ctx)) except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc - if ( - not isinstance(func, FunctionDef) - or func.qname() != "typing.cast" - or len(node.args) != 2 + if not ( + isinstance(func, nodes.FunctionDef) + and func.qname() == "typing.cast" + and len(node.args) == 2 ): raise UseInferenceDefault @@ -470,6 +462,7 @@ def __class_getitem__(cls, item): return cls if PY314_PLUS: code += textwrap.dedent( """ + from annotationlib import ForwardRef class Union: @classmethod def __class_getitem__(cls, item): return cls @@ -480,32 +473,32 @@ def __class_getitem__(cls, item): return cls def register(manager: AstroidManager) -> None: manager.register_transform( - Call, + nodes.Call, inference_tip(infer_typing_typevar_or_newtype), looks_like_typing_typevar_or_newtype, ) manager.register_transform( - Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript + nodes.Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript ) manager.register_transform( - Call, inference_tip(infer_typing_cast), _looks_like_typing_cast + nodes.Call, inference_tip(infer_typing_cast), _looks_like_typing_cast ) manager.register_transform( - FunctionDef, inference_tip(infer_typedDict), _looks_like_typedDict + nodes.FunctionDef, inference_tip(infer_typedDict), _looks_like_typedDict ) manager.register_transform( - Call, inference_tip(infer_typing_alias), _looks_like_typing_alias + nodes.Call, inference_tip(infer_typing_alias), _looks_like_typing_alias ) manager.register_transform( - Call, inference_tip(infer_special_alias), _looks_like_special_alias + nodes.Call, inference_tip(infer_special_alias), _looks_like_special_alias ) if PY312_PLUS: register_module_extender(manager, "typing", _typing_transform) manager.register_transform( - ClassDef, + nodes.ClassDef, inference_tip(infer_typing_generic_class_pep695), _looks_like_generic_class_pep695, ) diff --git a/astroid/brain/brain_unittest.py b/astroid/brain/brain_unittest.py index a94df0a68e..4103ce0600 100644 --- a/astroid/brain/brain_unittest.py +++ b/astroid/brain/brain_unittest.py @@ -3,12 +3,13 @@ # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt """Astroid hooks for unittest module.""" +from astroid import nodes from astroid.brain.helpers import register_module_extender from astroid.builder import parse from astroid.manager import AstroidManager -def IsolatedAsyncioTestCaseImport(): +def IsolatedAsyncioTestCaseImport() -> nodes.Module: """ In the unittest package, the IsolatedAsyncioTestCase class is imported lazily. diff --git a/astroid/brain/brain_uuid.py b/astroid/brain/brain_uuid.py index 37800b8e03..4405a62b38 100644 --- a/astroid/brain/brain_uuid.py +++ b/astroid/brain/brain_uuid.py @@ -3,17 +3,16 @@ # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt """Astroid hooks for the UUID module.""" +from astroid import nodes from astroid.manager import AstroidManager -from astroid.nodes.node_classes import Const -from astroid.nodes.scoped_nodes import ClassDef -def _patch_uuid_class(node: ClassDef) -> None: +def _patch_uuid_class(node: nodes.ClassDef) -> None: # The .int member is patched using __dict__ - node.locals["int"] = [Const(0, parent=node)] + node.locals["int"] = [nodes.Const(0, parent=node)] def register(manager: AstroidManager) -> None: manager.register_transform( - ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID" + nodes.ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID" ) diff --git a/astroid/brain/helpers.py b/astroid/brain/helpers.py index 79d778b5a3..0064a1f18b 100644 --- a/astroid/brain/helpers.py +++ b/astroid/brain/helpers.py @@ -5,10 +5,15 @@ from __future__ import annotations from collections.abc import Callable +from typing import TYPE_CHECKING +from astroid.exceptions import InferenceError from astroid.manager import AstroidManager from astroid.nodes.scoped_nodes import Module +if TYPE_CHECKING: + from astroid.nodes.node_ng import NodeNG + def register_module_extender( manager: AstroidManager, module_name: str, get_extension_mod: Callable[[], Module] @@ -47,7 +52,6 @@ def register_all_brains(manager: AstroidManager) -> None: brain_mechanize, brain_multiprocessing, brain_namedtuple_enum, - brain_nose, brain_numpy_core_einsumfunc, brain_numpy_core_fromnumeric, brain_numpy_core_function_base, @@ -71,6 +75,7 @@ def register_all_brains(manager: AstroidManager) -> None: brain_six, brain_sqlalchemy, brain_ssl, + brain_statistics, brain_subprocess, brain_threading, brain_type, @@ -99,7 +104,6 @@ def register_all_brains(manager: AstroidManager) -> None: brain_mechanize.register(manager) brain_multiprocessing.register(manager) brain_namedtuple_enum.register(manager) - brain_nose.register(manager) brain_numpy_core_einsumfunc.register(manager) brain_numpy_core_fromnumeric.register(manager) brain_numpy_core_function_base.register(manager) @@ -123,9 +127,20 @@ def register_all_brains(manager: AstroidManager) -> None: brain_six.register(manager) brain_sqlalchemy.register(manager) brain_ssl.register(manager) + brain_statistics.register(manager) brain_subprocess.register(manager) brain_threading.register(manager) brain_type.register(manager) brain_typing.register(manager) brain_unittest.register(manager) brain_uuid.register(manager) + + +def is_class_var(node: NodeNG) -> bool: + """Return True if node is a ClassVar, with or without subscripting.""" + try: + inferred = next(node.infer()) + except (InferenceError, StopIteration): + return False + + return getattr(inferred, "name", "") == "ClassVar" diff --git a/astroid/builder.py b/astroid/builder.py index 22724fa9af..f166ab492d 100644 --- a/astroid/builder.py +++ b/astroid/builder.py @@ -16,15 +16,18 @@ import textwrap import types import warnings -from collections.abc import Iterator, Sequence +from collections.abc import Collection, Iterator, Sequence from io import TextIOWrapper from tokenize import detect_encoding +from typing import TYPE_CHECKING, cast from astroid import bases, modutils, nodes, raw_building, rebuilder, util from astroid._ast import ParserModule, get_parser_module -from astroid.const import PY312_PLUS +from astroid.const import PY312_PLUS, PY314_PLUS from astroid.exceptions import AstroidBuildingError, AstroidSyntaxError, InferenceError -from astroid.manager import AstroidManager + +if TYPE_CHECKING: + from astroid.manager import AstroidManager # The name of the transient function that is used to # wrap expressions to be extracted when calling @@ -36,7 +39,11 @@ _STATEMENT_SELECTOR = "#@" if PY312_PLUS: - warnings.filterwarnings("ignore", "invalid escape sequence", SyntaxWarning) + warnings.filterwarnings("ignore", ".*invalid escape sequence", SyntaxWarning) +if PY314_PLUS: + warnings.filterwarnings( + "ignore", "'(return|continue|break)' in a 'finally'", SyntaxWarning + ) def open_source_file(filename: str) -> tuple[TextIOWrapper, str, str]: @@ -62,20 +69,17 @@ def _can_assign_attr(node: nodes.ClassDef, attrname: str | None) -> bool: class AstroidBuilder(raw_building.InspectBuilder): """Class for building an astroid tree from source code or from a live module. - The param *manager* specifies the manager class which should be used. - If no manager is given, then the default one will be used. The + The param *manager* specifies the manager class which should be used. The param *apply_transforms* determines if the transforms should be applied after the tree was built from source or from a live object, by default being True. """ - def __init__( - self, manager: AstroidManager | None = None, apply_transforms: bool = True - ) -> None: + def __init__(self, manager: AstroidManager, apply_transforms: bool = True) -> None: super().__init__(manager) self._apply_transforms = apply_transforms if not raw_building.InspectBuilder.bootstrapped: - raw_building._astroid_bootstrapping() + manager.bootstrap() def module_build( self, module: types.ModuleType, modname: str | None = None @@ -159,11 +163,11 @@ def _post_build( module.file_encoding = encoding self._manager.cache_module(module) # post tree building steps after we stored the module in the cache: - for from_node in builder._import_from_nodes: + for from_node, global_names in builder._import_from_nodes: if from_node.modname == "__future__": for symbol, _ in from_node.names: module.future_imports.add(symbol) - self.add_from_names_to_locals(from_node) + self.add_from_names_to_locals(from_node, global_names) # handle delayed assattr nodes for delayed in builder._delayed_assattr: self.delayed_assattr(delayed) @@ -181,7 +185,7 @@ def _data_build( node, parser_module = _parse_string( data, type_comments=True, modname=modname ) - except (TypeError, ValueError, SyntaxError) as exc: + except (TypeError, ValueError, SyntaxError, MemoryError) as exc: raise AstroidSyntaxError( "Parsing Python code failed:\n{error}", source=data, @@ -206,19 +210,23 @@ def _data_build( module = builder.visit_module(node, modname, node_file, package) return module, builder - def add_from_names_to_locals(self, node: nodes.ImportFrom) -> None: + def add_from_names_to_locals( + self, node: nodes.ImportFrom, global_name: Collection[str] + ) -> None: """Store imported names to the locals. Resort the locals if coming from a delayed node """ - def _key_func(node: nodes.NodeNG) -> int: - return node.fromlineno or 0 - - def sort_locals(my_list: list[nodes.NodeNG]) -> None: - my_list.sort(key=_key_func) + def add_local(parent_or_root: nodes.NodeNG, name: str) -> None: + parent_or_root.set_local(name, node) + my_list = parent_or_root.scope().locals[name] + if TYPE_CHECKING: + my_list = cast(list[nodes.NodeNG], my_list) + my_list.sort(key=lambda n: n.fromlineno or 0) assert node.parent # It should always default to the module + module = node.root() for name, asname in node.names: if name == "*": try: @@ -226,14 +234,19 @@ def sort_locals(my_list: list[nodes.NodeNG]) -> None: except AstroidBuildingError: continue for name in imported.public_names(): - node.parent.set_local(name, node) - sort_locals(node.parent.scope().locals[name]) # type: ignore[arg-type] + if name in global_name: + add_local(module, name) + else: + add_local(node.parent, name) else: - node.parent.set_local(asname or name, node) - sort_locals(node.parent.scope().locals[asname or name]) # type: ignore[arg-type] + name = asname or name + if name in global_name: + add_local(module, name) + else: + add_local(node.parent, name) def delayed_assattr(self, node: nodes.AssignAttr) -> None: - """Visit a AssAttr node. + """Visit an AssignAttr node. This adds name to locals and handle members definition. """ @@ -244,8 +257,7 @@ def delayed_assattr(self, node: nodes.AssignAttr) -> None: if isinstance(inferred, util.UninferableBase): continue try: - # pylint: disable=unidiomatic-typecheck # We want a narrow check on the - # parent type, not all of its subclasses + # We want a narrow check on the parent type, not all of its subclasses if type(inferred) in {bases.Instance, objects.ExceptionInstance}: inferred = inferred._proxied iattrs = inferred.instance_attrs @@ -293,10 +305,11 @@ def parse( Apply the transforms for the give code. Use it if you don't want the default transforms to be applied. """ + # pylint: disable-next=import-outside-toplevel + from astroid.manager import AstroidManager + code = textwrap.dedent(code) - builder = AstroidBuilder( - manager=AstroidManager(), apply_transforms=apply_transforms - ) + builder = AstroidBuilder(AstroidManager(), apply_transforms=apply_transforms) return builder.string_build(code, modname=module_name, path=path) diff --git a/astroid/const.py b/astroid/const.py index 0bc98c2e14..dcce0740c0 100644 --- a/astroid/const.py +++ b/astroid/const.py @@ -5,7 +5,6 @@ import enum import sys -PY310_PLUS = sys.version_info >= (3, 10) PY311_PLUS = sys.version_info >= (3, 11) PY312_PLUS = sys.version_info >= (3, 12) PY313 = sys.version_info[:2] == (3, 13) diff --git a/astroid/constraint.py b/astroid/constraint.py index 08bb80e3c9..692d22d03b 100644 --- a/astroid/constraint.py +++ b/astroid/constraint.py @@ -8,7 +8,7 @@ import sys from abc import ABC, abstractmethod from collections.abc import Iterator -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING from astroid import nodes, util from astroid.typing import InferenceResult @@ -21,7 +21,7 @@ if TYPE_CHECKING: from astroid import bases -_NameNodes = Union[nodes.AssignAttr, nodes.Attribute, nodes.AssignName, nodes.Name] +_NameNodes = nodes.AssignAttr | nodes.Attribute | nodes.AssignName | nodes.Name class Constraint(ABC): @@ -84,9 +84,50 @@ def satisfied_by(self, inferred: InferenceResult) -> bool: return self.negate ^ _matches(inferred, self.CONST_NONE) +class BooleanConstraint(Constraint): + """Represents an "x" or "not x" constraint.""" + + @classmethod + def match( + cls, node: _NameNodes, expr: nodes.NodeNG, negate: bool = False + ) -> Self | None: + """Return a new constraint for node if expr matches one of these patterns: + + - direct match (expr == node): use given negate value + - negated match (expr == `not node`): flip negate value + + Return None if no pattern matches. + """ + if _matches(expr, node): + return cls(node=node, negate=negate) + + if ( + isinstance(expr, nodes.UnaryOp) + and expr.op == "not" + and _matches(expr.operand, node) + ): + return cls(node=node, negate=not negate) + + return None + + def satisfied_by(self, inferred: InferenceResult) -> bool: + """Return True for uninferable results, or depending on negate flag: + + - negate=False: satisfied if boolean value is True + - negate=True: satisfied if boolean value is False + """ + inferred_booleaness = inferred.bool_value() + if isinstance(inferred, util.UninferableBase) or isinstance( + inferred_booleaness, util.UninferableBase + ): + return True + + return self.negate ^ inferred_booleaness + + def get_constraints( expr: _NameNodes, frame: nodes.LocalsDictNodeNG -) -> dict[nodes.If, set[Constraint]]: +) -> dict[nodes.If | nodes.IfExp, set[Constraint]]: """Returns the constraints for the given expression. The returned dictionary maps the node where the constraint was generated to the @@ -96,10 +137,10 @@ def get_constraints( Currently this only supports constraints generated from if conditions. """ current_node: nodes.NodeNG | None = expr - constraints_mapping: dict[nodes.If, set[Constraint]] = {} + constraints_mapping: dict[nodes.If | nodes.IfExp, set[Constraint]] = {} while current_node is not None and current_node is not frame: parent = current_node.parent - if isinstance(parent, nodes.If): + if isinstance(parent, (nodes.If, nodes.IfExp)): branch, _ = parent.locate_child(current_node) constraints: set[Constraint] | None = None if branch == "body": @@ -114,7 +155,12 @@ def get_constraints( return constraints_mapping -ALL_CONSTRAINT_CLASSES = frozenset((NoneConstraint,)) +ALL_CONSTRAINT_CLASSES = frozenset( + ( + NoneConstraint, + BooleanConstraint, + ) +) """All supported constraint types.""" diff --git a/astroid/context.py b/astroid/context.py index 8af9c564f1..fa9ed22655 100644 --- a/astroid/context.py +++ b/astroid/context.py @@ -9,16 +9,15 @@ import contextlib import pprint from collections.abc import Iterator, Sequence -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from astroid.typing import InferenceResult, SuccessfulInferenceResult if TYPE_CHECKING: from astroid import constraint, nodes - from astroid.nodes.node_classes import Keyword, NodeNG _InferenceCache = dict[ - tuple["NodeNG", Optional[str], Optional[str], Optional[str]], Sequence["NodeNG"] + tuple["nodes.NodeNG", str | None, str | None, str | None], Sequence["nodes.NodeNG"] ] _INFERENCE_CACHE: _InferenceCache = {} @@ -79,7 +78,9 @@ def __init__( self.extra_context: dict[SuccessfulInferenceResult, InferenceContext] = {} """Context that needs to be passed down through call stacks for call arguments.""" - self.constraints: dict[str, dict[nodes.If, set[constraint.Constraint]]] = {} + self.constraints: dict[ + str, dict[nodes.If | nodes.IfExp, set[constraint.Constraint]] + ] = {} """The constraints on nodes.""" @property @@ -167,8 +168,8 @@ class CallContext: def __init__( self, - args: list[NodeNG], - keywords: list[Keyword] | None = None, + args: list[nodes.NodeNG], + keywords: list[nodes.Keyword] | None = None, callee: InferenceResult | None = None, ): self.args = args # Call positional arguments diff --git a/astroid/decorators.py b/astroid/decorators.py index 70337b6d00..05d2dd391e 100644 --- a/astroid/decorators.py +++ b/astroid/decorators.py @@ -11,18 +11,13 @@ import sys import warnings from collections.abc import Callable, Generator -from typing import TypeVar +from typing import ParamSpec, TypeVar from astroid import util from astroid.context import InferenceContext from astroid.exceptions import InferenceError from astroid.typing import InferenceResult -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - _R = TypeVar("_R") _P = ParamSpec("_P") @@ -137,6 +132,7 @@ def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R: raise ValueError( f"Can't find argument '{arg}' for '{args[0].__class__.__qualname__}'" ) from None + # pylint: disable = too-many-boolean-expressions if ( # Check kwargs # - if found, check it's not None diff --git a/astroid/exceptions.py b/astroid/exceptions.py index 126acb954d..e523b70c35 100644 --- a/astroid/exceptions.py +++ b/astroid/exceptions.py @@ -64,7 +64,10 @@ def __init__(self, message: str = "", **kws: Any) -> None: setattr(self, key, value) def __str__(self) -> str: - return self.message.format(**vars(self)) + try: + return self.message.format(**vars(self)) + except ValueError: + return self.message # Return raw message if formatting fails class AstroidBuildingError(AstroidError): diff --git a/astroid/filter_statements.py b/astroid/filter_statements.py index 627e68edc9..a48b6e7d9f 100644 --- a/astroid/filter_statements.py +++ b/astroid/filter_statements.py @@ -67,6 +67,8 @@ def _filter_stmts( :returns: The filtered statements. """ + # pylint: disable = too-many-branches, too-many-statements + # if offset == -1, my actual frame is not the inner frame but its parent # # class A(B): pass diff --git a/astroid/helpers.py b/astroid/helpers.py index 429d48ccd9..9c370aa323 100644 --- a/astroid/helpers.py +++ b/astroid/helpers.py @@ -37,8 +37,7 @@ def safe_infer( def _build_proxy_class(cls_name: str, builtins: nodes.Module) -> nodes.ClassDef: - proxy = raw_building.build_class(cls_name) - proxy.parent = builtins + proxy = raw_building.build_class(cls_name, builtins) return proxy @@ -67,11 +66,10 @@ def _object_type( for inferred in node.infer(context=context): if isinstance(inferred, scoped_nodes.ClassDef): - if inferred.newstyle: - metaclass = inferred.metaclass(context=context) - if metaclass: - yield metaclass - continue + metaclass = inferred.metaclass(context=context) + if metaclass: + yield metaclass + continue yield builtins.getattr("type")[0] elif isinstance( inferred, @@ -106,7 +104,7 @@ def object_type( types = set(_object_type(node, context)) except InferenceError: return util.Uninferable - if len(types) > 1 or not types: + if len(types) != 1: return util.Uninferable return next(iter(types)) @@ -131,7 +129,9 @@ def _object_type_is_subclass( # issubclass(object, (1, type)) raises TypeError for klass in class_seq: if isinstance(klass, util.UninferableBase): - raise AstroidTypeError("arg 2 must be a type or tuple of types") + raise AstroidTypeError( + f"arg 2 must be a type or tuple of types, not {type(klass)!r}" + ) for obj_subclass in obj_type.mro(): if obj_subclass == klass: @@ -166,7 +166,7 @@ def object_issubclass( or its type's mro doesn't work """ if not isinstance(node, nodes.ClassDef): - raise TypeError(f"{node} needs to be a ClassDef node") + raise TypeError(f"{node} needs to be a ClassDef node, not {type(node)!r}") return _object_type_is_subclass(node, class_or_seq, context=context) @@ -194,8 +194,6 @@ def _type_check(type1, type2) -> bool: if not all(map(has_known_bases, (type1, type2))): raise _NonDeducibleTypeHierarchy - if not all([type1.newstyle, type2.newstyle]): - return False try: return type1 in type2.mro()[:-1] except MroError as e: diff --git a/astroid/interpreter/_import/spec.py b/astroid/interpreter/_import/spec.py index 91782d8f5c..af7c55bcfa 100644 --- a/astroid/interpreter/_import/spec.py +++ b/astroid/interpreter/_import/spec.py @@ -18,10 +18,7 @@ from collections.abc import Iterable, Iterator, Sequence from functools import lru_cache from pathlib import Path -from typing import Any, Literal, NamedTuple, Protocol - -from astroid.const import PY310_PLUS -from astroid.modutils import EXT_LIB_DIRS +from typing import Literal, NamedTuple, Protocol from . import util @@ -86,13 +83,13 @@ class Finder: def __init__(self, path: Sequence[str] | None = None) -> None: self._path = path or sys.path + @staticmethod @abc.abstractmethod def find_module( - self, modname: str, - module_parts: Sequence[str], - processed: list[str], - submodule_path: Sequence[str] | None, + module_parts: tuple[str, ...], + processed: tuple[str, ...], + submodule_path: tuple[str, ...] | None, ) -> ModuleSpec | None: """Find the given module. @@ -100,12 +97,12 @@ def find_module( they all return a ModuleSpec. :param modname: The module which needs to be searched. - :param module_parts: It should be a list of strings, + :param module_parts: It should be a tuple of strings, where each part contributes to the module's namespace. :param processed: What parts from the module parts were processed so far. - :param submodule_path: A list of paths where the module + :param submodule_path: A tuple of paths where the module can be looked into. :returns: A ModuleSpec, describing how and where the module was found, None, otherwise. @@ -126,13 +123,17 @@ class ImportlibFinder(Finder): + [(s, ModuleType.PY_COMPILED) for s in importlib.machinery.BYTECODE_SUFFIXES] ) + @staticmethod + @lru_cache(maxsize=1024) def find_module( - self, modname: str, - module_parts: Sequence[str], - processed: list[str], - submodule_path: Sequence[str] | None, + module_parts: tuple[str, ...], + processed: tuple[str, ...], + submodule_path: tuple[str, ...] | None, ) -> ModuleSpec | None: + # pylint: disable-next=import-outside-toplevel + from astroid.modutils import cached_os_path_isfile + # Although we should be able to use `find_spec` this doesn't work on PyPy for builtins. # Therefore, we use the `builtin_module_nams` heuristic for these. if submodule_path is None and modname in sys.builtin_module_names: @@ -153,7 +154,7 @@ def find_module( for suffix in suffixes: package_file_name = "__init__" + suffix file_path = os.path.join(package_directory, package_file_name) - if os.path.isfile(file_path): + if cached_os_path_isfile(file_path): return ModuleSpec( name=modname, location=package_directory, @@ -162,59 +163,33 @@ def find_module( for suffix, type_ in ImportlibFinder._SUFFIXES: file_name = modname + suffix file_path = os.path.join(entry, file_name) - if os.path.isfile(file_path): + if cached_os_path_isfile(file_path): return ModuleSpec(name=modname, location=file_path, type=type_) - # sys.stdlib_module_names was added in Python 3.10 - if PY310_PLUS: - # If the module name matches a stdlib module name, check whether this is a frozen - # module. Note that `find_spec` actually imports parent modules, so we want to make - # sure we only run this code for stuff that can be expected to be frozen. For now - # this is only stdlib. - if (modname in sys.stdlib_module_names and not processed) or ( - processed and processed[0] in sys.stdlib_module_names + # If the module name matches a stdlib module name, check whether this is a frozen + # module. Note that `find_spec` actually imports parent modules, so we want to make + # sure we only run this code for stuff that can be expected to be frozen. For now + # this is only stdlib. + if (modname in sys.stdlib_module_names and not processed) or ( + processed and processed[0] in sys.stdlib_module_names + ): + try: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=Warning) + spec = importlib.util.find_spec(".".join((*processed, modname))) + except ValueError: + spec = None + + if ( + spec + and spec.loader # type: ignore[comparison-overlap] # noqa: E501 + is importlib.machinery.FrozenImporter ): - try: - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=Warning) - spec = importlib.util.find_spec(".".join((*processed, modname))) - except ValueError: - spec = None - - if ( - spec - and spec.loader # type: ignore[comparison-overlap] # noqa: E501 - is importlib.machinery.FrozenImporter - ): - return ModuleSpec( - name=modname, - location=getattr(spec.loader_state, "filename", None), - type=ModuleType.PY_FROZEN, - ) - else: - # NOTE: This is broken code. It doesn't work on Python 3.13+ where submodules can also - # be frozen. However, we don't want to worry about this and we don't want to break - # support for older versions of Python. This is just copy-pasted from the old non - # working version to at least have no functional behaviour change on <=3.10. - # It can be removed after 3.10 is no longer supported in favour of the logic above. - if submodule_path is None: # pylint: disable=else-if-used - try: - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=UserWarning) - spec = importlib.util.find_spec(modname) - if ( - spec - and spec.loader # type: ignore[comparison-overlap] # noqa: E501 - is importlib.machinery.FrozenImporter - ): - # No need for BuiltinImporter; builtins handled above - return ModuleSpec( - name=modname, - location=getattr(spec.loader_state, "filename", None), - type=ModuleType.PY_FROZEN, - ) - except ValueError: - pass + return ModuleSpec( + name=modname, + location=getattr(spec.loader_state, "filename", None), + type=ModuleType.PY_FROZEN, + ) return None @@ -224,6 +199,8 @@ def contribute_to_path( if spec.location is None: # Builtin. return None + # pylint: disable-next=import-outside-toplevel + from astroid.modutils import EXT_LIB_DIRS if _is_setuptools_namespace(Path(spec.location)): # extend_path is called, search sys.path for module/packages @@ -259,12 +236,13 @@ def contribute_to_path( class ExplicitNamespacePackageFinder(ImportlibFinder): """A finder for the explicit namespace packages.""" + @staticmethod + @lru_cache(maxsize=1024) def find_module( - self, modname: str, - module_parts: Sequence[str], - processed: list[str], - submodule_path: Sequence[str] | None, + module_parts: tuple[str, ...], + processed: tuple[str, ...], + submodule_path: tuple[str, ...] | None, ) -> ModuleSpec | None: if processed: modname = ".".join([*processed, modname]) @@ -292,18 +270,19 @@ def __init__(self, path: Sequence[str]) -> None: for entry_path in path: if entry_path not in sys.path_importer_cache: try: - sys.path_importer_cache[entry_path] = zipimport.zipimporter( # type: ignore[assignment] + sys.path_importer_cache[entry_path] = zipimport.zipimporter( entry_path ) except zipimport.ZipImportError: continue + @staticmethod + @lru_cache(maxsize=1024) def find_module( - self, modname: str, - module_parts: Sequence[str], - processed: list[str], - submodule_path: Sequence[str] | None, + module_parts: tuple[str, ...], + processed: tuple[str, ...], + submodule_path: tuple[str, ...] | None, ) -> ModuleSpec | None: try: file_type, filename, path = _search_zip(module_parts) @@ -318,16 +297,22 @@ def find_module( submodule_search_locations=path, ) + def contribute_to_path( + self, spec: ModuleSpec, processed: list[str] + ) -> Sequence[str] | None: + return spec.submodule_search_locations + class PathSpecFinder(Finder): """Finder based on importlib.machinery.PathFinder.""" + @staticmethod + @lru_cache(maxsize=1024) def find_module( - self, modname: str, - module_parts: Sequence[str], - processed: list[str], - submodule_path: Sequence[str] | None, + module_parts: tuple[str, ...], + processed: tuple[str, ...], + submodule_path: tuple[str, ...] | None, ) -> ModuleSpec | None: spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path) if spec is not None: @@ -359,6 +344,7 @@ def contribute_to_path( ) +@lru_cache(maxsize=1024) def _is_setuptools_namespace(location: pathlib.Path) -> bool: try: with open(location / "__init__.py", "rb") as stream: @@ -379,22 +365,12 @@ def _get_zipimporters() -> Iterator[tuple[str, zipimport.zipimporter]]: def _search_zip( - modpath: Sequence[str], + modpath: tuple[str, ...], ) -> tuple[Literal[ModuleType.PY_ZIPMODULE], str, str]: for filepath, importer in _get_zipimporters(): - if PY310_PLUS: - found: Any = importer.find_spec(modpath[0]) - else: - found = importer.find_module(modpath[0]) + found = importer.find_spec(modpath[0]) if found: - if PY310_PLUS: - if not importer.find_spec(os.path.sep.join(modpath)): - raise ImportError( - "No module named {} in {}/{}".format( - ".".join(modpath[1:]), filepath, modpath - ) - ) - elif not importer.find_module(os.path.sep.join(modpath)): + if not importer.find_spec(os.path.sep.join(modpath)): raise ImportError( "No module named {} in {}/{}".format( ".".join(modpath[1:]), filepath, modpath @@ -411,18 +387,16 @@ def _search_zip( def _find_spec_with_path( search_path: Sequence[str], modname: str, - module_parts: list[str], - processed: list[str], - submodule_path: Sequence[str] | None, + module_parts: tuple[str, ...], + processed: tuple[str, ...], + submodule_path: tuple[str, ...] | None, ) -> tuple[Finder | _MetaPathFinder, ModuleSpec]: for finder in _SPEC_FINDERS: finder_instance = finder(search_path) - spec = finder_instance.find_module( - modname, module_parts, processed, submodule_path - ) - if spec is None: + mod_spec = finder.find_module(modname, module_parts, processed, submodule_path) + if mod_spec is None: continue - return finder_instance, spec + return finder_instance, mod_spec # Support for custom finders for meta_finder in sys.meta_path: @@ -485,32 +459,38 @@ def find_spec(modpath: Iterable[str], path: Iterable[str] | None = None) -> Modu @lru_cache(maxsize=1024) -def _find_spec(module_path: tuple, path: tuple) -> ModuleSpec: +def _find_spec( + module_path: tuple[str, ...], path: tuple[str, ...] | None +) -> ModuleSpec: _path = path or sys.path # Need a copy for not mutating the argument. modpath = list(module_path) - submodule_path = None - module_parts = modpath[:] + search_paths = None processed: list[str] = [] while modpath: modname = modpath.pop(0) + + submodule_path = search_paths or path + if submodule_path is not None: + submodule_path = tuple(submodule_path) + finder, spec = _find_spec_with_path( - _path, modname, module_parts, processed, submodule_path or path + _path, modname, module_path, tuple(processed), submodule_path ) processed.append(modname) if modpath: if isinstance(finder, Finder): - submodule_path = finder.contribute_to_path(spec, processed) - # If modname is a package from an editable install, update submodule_path + search_paths = finder.contribute_to_path(spec, processed) + # If modname is a package from an editable install, update search_paths # so that the next module in the path will be found inside of it using importlib. # Existence of __name__ is guaranteed by _find_spec_with_path. elif finder.__name__ in _EditableFinderClasses: # type: ignore[attr-defined] - submodule_path = spec.submodule_search_locations + search_paths = spec.submodule_search_locations if spec.type == ModuleType.PKG_DIRECTORY: - spec = spec._replace(submodule_search_locations=submodule_path) + spec = spec._replace(submodule_search_locations=search_paths) return spec diff --git a/astroid/interpreter/_import/util.py b/astroid/interpreter/_import/util.py index 511ec4f977..8b8725f91b 100644 --- a/astroid/interpreter/_import/util.py +++ b/astroid/interpreter/_import/util.py @@ -7,7 +7,7 @@ import pathlib import sys from functools import lru_cache -from importlib._bootstrap_external import _NamespacePath +from importlib._bootstrap_external import _NamespacePath # type: ignore[attr-defined] from importlib.util import _find_spec_from_path # type: ignore[attr-defined] from astroid.const import IS_PYPY @@ -83,7 +83,6 @@ def is_namespace(modname: str) -> bool: # Repair last_submodule_search_locations if last_submodule_search_locations: - # pylint: disable=unsubscriptable-object last_item = last_submodule_search_locations[-1] # e.g. for failure example above, add 'a/b' and keep going # so that find_spec('a.b.c', path=['a', 'a/b']) succeeds diff --git a/astroid/interpreter/dunder_lookup.py b/astroid/interpreter/dunder_lookup.py index 727c1ad462..8eab35c4ff 100644 --- a/astroid/interpreter/dunder_lookup.py +++ b/astroid/interpreter/dunder_lookup.py @@ -18,20 +18,20 @@ from typing import TYPE_CHECKING import astroid +from astroid import nodes from astroid.exceptions import AttributeInferenceError if TYPE_CHECKING: - from astroid import nodes from astroid.context import InferenceContext def _lookup_in_mro(node, name) -> list: attrs = node.locals.get(name, []) - nodes = itertools.chain.from_iterable( + nodes_ = itertools.chain.from_iterable( ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True) ) - values = list(itertools.chain(attrs, nodes)) + values = list(itertools.chain(attrs, nodes_)) if not values: raise AttributeInferenceError(attribute=name, target=node) @@ -47,13 +47,11 @@ def lookup( will be returned. Otherwise, `astroid.AttributeInferenceError` is going to be raised. """ - if isinstance( - node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set) - ): + if isinstance(node, (nodes.List, nodes.Tuple, nodes.Const, nodes.Dict, nodes.Set)): return _builtin_lookup(node, name) if isinstance(node, astroid.Instance): return _lookup_in_mro(node, name) - if isinstance(node, astroid.ClassDef): + if isinstance(node, nodes.ClassDef): return _class_lookup(node, name, context=context) raise AttributeInferenceError(attribute=name, target=node) diff --git a/astroid/interpreter/objectmodel.py b/astroid/interpreter/objectmodel.py index 0f553ab084..eac9e43084 100644 --- a/astroid/interpreter/objectmodel.py +++ b/astroid/interpreter/objectmodel.py @@ -228,17 +228,17 @@ def attr___package__(self): @property def attr___spec__(self): # No handling for now. - return node_classes.Unknown() + return node_classes.Unknown(parent=self._instance) @property def attr___loader__(self): # No handling for now. - return node_classes.Unknown() + return node_classes.Unknown(parent=self._instance) @property def attr___cached__(self): # No handling for now. - return node_classes.Unknown() + return node_classes.Unknown(parent=self._instance) class FunctionModel(ObjectModel): @@ -427,13 +427,13 @@ def test(self): we get a new object which has two parameters, *self* and *type*. """ nonlocal func - arguments = astroid.Arguments( + arguments = nodes.Arguments( parent=func.args.parent, vararg=None, kwarg=None ) positional_or_keyword_params = func.args.args.copy() positional_or_keyword_params.append( - astroid.AssignName( + nodes.AssignName( name="type", lineno=0, col_offset=0, @@ -462,7 +462,7 @@ def test(self): # These are here just for completion. @property def attr___ne__(self): - return node_classes.Unknown() + return node_classes.Unknown(parent=self._instance) attr___subclasshook__ = attr___ne__ attr___str__ = attr___ne__ @@ -493,8 +493,8 @@ def __init__(self): super().__init__() @property - def attr___annotations__(self) -> node_classes.Unkown: - return node_classes.Unknown() + def attr___annotations__(self) -> node_classes.Unknown: + return node_classes.Unknown(parent=self._instance) @property def attr___module__(self): @@ -514,9 +514,6 @@ def attr___doc__(self): @property def attr___mro__(self): - if not self._instance.newstyle: - raise AttributeInferenceError(target=self._instance, attribute="__mro__") - mro = self._instance.mro() obj = node_classes.Tuple(parent=self._instance) obj.postinit(mro) @@ -524,9 +521,6 @@ def attr___mro__(self): @property def attr_mro(self): - if not self._instance.newstyle: - raise AttributeInferenceError(target=self._instance, attribute="mro") - other_self = self # Cls.mro is a method and we need to return one in order to have a proper inference. @@ -565,10 +559,6 @@ def attr___subclasses__(self): This looks only in the current module for retrieving the subclasses, thus it might miss a couple of them. """ - if not self._instance.newstyle: - raise AttributeInferenceError( - target=self._instance, attribute="__subclasses__" - ) qname = self._instance.qname() root = self._instance.root() @@ -704,20 +694,18 @@ def attr___self__(self): return self._instance.bound -class GeneratorModel(FunctionModel, ContextManagerModel): - def __new__(cls, *args, **kwargs): - # Append the values from the GeneratorType unto this object. - ret = super().__new__(cls, *args, **kwargs) - generator = AstroidManager().builtins_module["generator"] - for name, values in generator.locals.items(): +class GeneratorBaseModel(FunctionModel, ContextManagerModel): + def __init__(self, gen_module: nodes.Module): + super().__init__() + for name, values in gen_module.locals.items(): method = values[0] + if isinstance(method, nodes.FunctionDef): + method = bases.BoundMethod(method, _get_bound_node(self)) def patched(cls, meth=method): return meth - setattr(type(ret), IMPL_PREFIX + name, property(patched)) - - return ret + setattr(type(self), IMPL_PREFIX + name, property(patched)) @property def attr___name__(self): @@ -733,25 +721,14 @@ def attr___doc__(self): ) -class AsyncGeneratorModel(GeneratorModel): - def __new__(cls, *args, **kwargs): - # Append the values from the AGeneratorType unto this object. - ret = super().__new__(cls, *args, **kwargs) - astroid_builtins = AstroidManager().builtins_module - generator = astroid_builtins.get("async_generator") - if generator is None: - # Make it backward compatible. - generator = astroid_builtins.get("generator") - - for name, values in generator.locals.items(): - method = values[0] - - def patched(cls, meth=method): - return meth +class GeneratorModel(GeneratorBaseModel): + def __init__(self): + super().__init__(AstroidManager().builtins_module["generator"]) - setattr(type(ret), IMPL_PREFIX + name, property(patched)) - return ret +class AsyncGeneratorModel(GeneratorBaseModel): + def __init__(self): + super().__init__(AstroidManager().builtins_module["async_generator"]) class InstanceModel(ObjectModel): @@ -793,6 +770,12 @@ def attr_text(self): return node_classes.Const("") +class GroupExceptionInstanceModel(ExceptionInstanceModel): + @property + def attr_exceptions(self) -> nodes.Tuple: + return node_classes.Tuple(parent=self._instance) + + class OSErrorInstanceModel(ExceptionInstanceModel): @property def attr_filename(self): @@ -827,6 +810,7 @@ def attr_object(self): BUILTIN_EXCEPTIONS = { "builtins.SyntaxError": SyntaxErrorInstanceModel, + "builtins.ExceptionGroup": GroupExceptionInstanceModel, "builtins.ImportError": ImportErrorInstanceModel, "builtins.UnicodeDecodeError": UnicodeDecodeErrorInstanceModel, # These are all similar to OSError in terms of attributes @@ -969,7 +953,7 @@ def infer_call_result( def attr_fset(self): func = self._instance - def find_setter(func: Property) -> astroid.FunctionDef | None: + def find_setter(func: Property) -> nodes.FunctionDef | None: """ Given a property, find the corresponding setter function and returns it. diff --git a/astroid/manager.py b/astroid/manager.py index e5398c45a9..e2328862a2 100644 --- a/astroid/manager.py +++ b/astroid/manager.py @@ -17,6 +17,7 @@ from typing import Any, ClassVar from astroid import nodes +from astroid.builder import AstroidBuilder, build_namespace_package_module from astroid.context import InferenceContext, _invalidate_cache from astroid.exceptions import AstroidBuildingError, AstroidImportError from astroid.interpreter._import import spec, util @@ -24,6 +25,7 @@ NoSourceFile, _cache_normalize_path_, _has_init, + cached_os_path_isfile, file_info_from_modpath, get_source_file, is_module_name_part_of_extension_package_whitelist, @@ -160,9 +162,6 @@ def ast_from_file( ): return self.astroid_cache[modname] if source: - # pylint: disable=import-outside-toplevel; circular import - from astroid.builder import AstroidBuilder - return AstroidBuilder(self).file_build(filepath, modname) if fallback and modname: return self.ast_from_module_name(modname) @@ -174,23 +173,14 @@ def ast_from_string( """Given some source code as a string, return its corresponding astroid object. """ - # pylint: disable=import-outside-toplevel; circular import - from astroid.builder import AstroidBuilder - return AstroidBuilder(self).string_build(data, modname, filepath) def _build_stub_module(self, modname: str) -> nodes.Module: - # pylint: disable=import-outside-toplevel; circular import - from astroid.builder import AstroidBuilder - return AstroidBuilder(self).string_build("", modname) def _build_namespace_module( self, modname: str, path: Sequence[str] ) -> nodes.Module: - # pylint: disable=import-outside-toplevel; circular import - from astroid.builder import build_namespace_package_module - return build_namespace_package_module(modname, path) def _can_load_extension(self, modname: str) -> bool: @@ -289,9 +279,6 @@ def zip_import_data(self, filepath: str) -> nodes.Module | None: if zipimport is None: return None - # pylint: disable=import-outside-toplevel; circular import - from astroid.builder import AstroidBuilder - builder = AstroidBuilder(self) for ext in ZIP_IMPORT_EXTS: try: @@ -350,9 +337,6 @@ def ast_from_module( except AttributeError: pass - # pylint: disable=import-outside-toplevel; circular import - from astroid.builder import AstroidBuilder - return AstroidBuilder(self).module_build(module, modname) def ast_from_class(self, klass: type, modname: str | None = None) -> nodes.ClassDef: @@ -428,7 +412,7 @@ def register_failed_import_hook(self, hook: Callable[[str], nodes.Module]) -> No `hook` must be a function that accepts a single argument `modname` which contains the name of the module or package that could not be imported. - If `hook` can resolve the import, must return a node of type `astroid.Module`, + If `hook` can resolve the import, must return a node of type `nodes.Module`, otherwise, it must raise `AstroidBuildingError`. """ self._failed_import_hooks.append(hook) @@ -455,7 +439,10 @@ def clear_cache(self) -> None: # pylint: disable=import-outside-toplevel from astroid.brain.helpers import register_all_brains from astroid.inference_tip import clear_inference_tip_cache - from astroid.interpreter._import.spec import _find_spec + from astroid.interpreter._import.spec import ( + _find_spec, + _is_setuptools_namespace, + ) from astroid.interpreter.objectmodel import ObjectModel from astroid.nodes._base_nodes import LookupMixIn from astroid.nodes.scoped_nodes import ClassDef @@ -473,13 +460,18 @@ def clear_cache(self) -> None: LookupMixIn.lookup, _cache_normalize_path_, _has_init, + cached_os_path_isfile, util.is_namespace, ObjectModel.attributes, ClassDef._metaclass_lookup_attribute, _find_spec, + _is_setuptools_namespace, ): lru_cache.cache_clear() # type: ignore[attr-defined] + for finder in spec._SPEC_FINDERS: + finder.find_module.cache_clear() + self.bootstrap() # Reload brain plugins. During initialisation this is done in astroid.manager.py diff --git a/astroid/modutils.py b/astroid/modutils.py index 0a933054df..0868c60c0a 100644 --- a/astroid/modutils.py +++ b/astroid/modutils.py @@ -30,15 +30,11 @@ from collections.abc import Callable, Iterable, Sequence from contextlib import redirect_stderr, redirect_stdout from functools import lru_cache +from sys import stdlib_module_names -from astroid.const import IS_JYTHON, PY310_PLUS +from astroid.const import IS_JYTHON from astroid.interpreter._import import spec, util -if PY310_PLUS: - from sys import stdlib_module_names -else: - from astroid._backport_stdlib_names import stdlib_module_names - logger = logging.getLogger(__name__) @@ -236,6 +232,14 @@ def check_modpath_has_init(path: str, mod_path: list[str]) -> bool: return True +def _is_subpath(path: str, base: str) -> bool: + path = os.path.normcase(os.path.normpath(path)) + base = os.path.normcase(os.path.normpath(base)) + if not path.startswith(base): + return False + return (len(path) == len(base)) or (path[len(base)] == os.path.sep) + + def _get_relative_base_path(filename: str, path_to_check: str) -> list[str] | None: """Extracts the relative mod path of the file to import from. @@ -252,19 +256,18 @@ def _get_relative_base_path(filename: str, path_to_check: str) -> list[str] | No _get_relative_base_path("/a/b/c/d.py", "/a/b") -> ["c","d"] _get_relative_base_path("/a/b/c/d.py", "/dev") -> None """ - importable_path = None - path_to_check = os.path.normcase(path_to_check) + path_to_check = os.path.normcase(os.path.normpath(path_to_check)) + abs_filename = os.path.abspath(filename) - if os.path.normcase(abs_filename).startswith(path_to_check): - importable_path = abs_filename + if _is_subpath(abs_filename, path_to_check): + base_path = os.path.splitext(abs_filename)[0] + relative_base_path = base_path[len(path_to_check) :].lstrip(os.path.sep) + return [pkg for pkg in relative_base_path.split(os.sep) if pkg] real_filename = os.path.realpath(filename) - if os.path.normcase(real_filename).startswith(path_to_check): - importable_path = real_filename - - if importable_path: - base_path = os.path.splitext(importable_path)[0] - relative_base_path = base_path[len(path_to_check) :] + if _is_subpath(real_filename, path_to_check): + base_path = os.path.splitext(real_filename)[0] + relative_base_path = base_path[len(path_to_check) :].lstrip(os.path.sep) return [pkg for pkg in relative_base_path.split(os.sep) if pkg] return None @@ -272,13 +275,13 @@ def _get_relative_base_path(filename: str, path_to_check: str) -> list[str] | No def modpath_from_file_with_callback( filename: str, - path: Sequence[str] | None = None, + path: list[str] | None = None, is_package_cb: Callable[[str, list[str]], bool] | None = None, ) -> list[str]: filename = os.path.expanduser(_path_from_filename(filename)) paths_to_check = sys.path.copy() if path: - paths_to_check += path + paths_to_check = path + paths_to_check for pathname in itertools.chain( paths_to_check, map(_cache_normalize_path, paths_to_check) ): @@ -292,11 +295,13 @@ def modpath_from_file_with_callback( return modpath raise ImportError( - "Unable to find module for {} in {}".format(filename, ", \n".join(sys.path)) + "Unable to find module for {} in {}".format( + filename, ", \n".join(paths_to_check) + ) ) -def modpath_from_file(filename: str, path: Sequence[str] | None = None) -> list[str]: +def modpath_from_file(filename: str, path: list[str] | None = None) -> list[str]: """Get the corresponding split module's name from a filename. This function will return the name of a module or package split on `.`. @@ -305,8 +310,8 @@ def modpath_from_file(filename: str, path: Sequence[str] | None = None) -> list[ :param filename: file's path for which we want the module's name :type Optional[List[str]] path: - Optional list of path where the module or package should be - searched (use sys.path if nothing or None is given) + Optional list of paths where the module or package should be + searched, additionally to sys.path :raise ImportError: if the corresponding module's name has not been found @@ -363,7 +368,7 @@ def file_info_from_modpath( if modpath[0] == "xml": # handle _xmlplus try: - return _spec_from_modpath(["_xmlplus"] + modpath[1:], path, context) + return _spec_from_modpath(["_xmlplus", *modpath[1:]], path, context) except ImportError: return _spec_from_modpath(modpath, path, context) elif modpath == ["os", "path"]: @@ -605,6 +610,12 @@ def is_relative(modname: str, from_file: str) -> bool: ) +@lru_cache(maxsize=1024) +def cached_os_path_isfile(path: str | os.PathLike[str]) -> bool: + """A cached version of os.path.isfile that helps avoid repetitive I/O""" + return os.path.isfile(path) + + # internal only functions ##################################################### diff --git a/astroid/nodes/__init__.py b/astroid/nodes/__init__.py index 5033d4cca2..6a6751612c 100644 --- a/astroid/nodes/__init__.py +++ b/astroid/nodes/__init__.py @@ -50,6 +50,7 @@ IfExp, Import, ImportFrom, + Interpolation, JoinedStr, Keyword, List, @@ -76,6 +77,7 @@ Slice, Starred, Subscript, + TemplateStr, Try, TryStar, Tuple, @@ -93,6 +95,7 @@ unpack_infer, ) from astroid.nodes.scoped_nodes import ( + SYNTHETIC_ROOT, AsyncFunctionDef, ClassDef, ComprehensionScope, @@ -202,6 +205,7 @@ __all__ = ( "CONST_CLS", + "SYNTHETIC_ROOT", "AnnAssign", "Arguments", "Assert", @@ -245,6 +249,7 @@ "IfExp", "Import", "ImportFrom", + "Interpolation", "JoinedStr", "Keyword", "Lambda", @@ -276,6 +281,7 @@ "Slice", "Starred", "Subscript", + "TemplateStr", "Try", "TryStar", "Tuple", diff --git a/astroid/nodes/_base_nodes.py b/astroid/nodes/_base_nodes.py index 177bb696e1..df452cb2b8 100644 --- a/astroid/nodes/_base_nodes.py +++ b/astroid/nodes/_base_nodes.py @@ -12,10 +12,9 @@ import itertools from collections.abc import Callable, Generator, Iterator from functools import cached_property, lru_cache, partial -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union +from typing import TYPE_CHECKING, Any, ClassVar from astroid import bases, nodes, util -from astroid.const import PY310_PLUS from astroid.context import ( CallContext, InferenceContext, @@ -35,10 +34,10 @@ GetFlowFactory = Callable[ [ InferenceResult, - Optional[InferenceResult], - Union[nodes.AugAssign, nodes.BinOp], + InferenceResult | None, + nodes.AugAssign | nodes.BinOp, InferenceResult, - Optional[InferenceResult], + InferenceResult | None, InferenceContext, InferenceContext, ], @@ -603,9 +602,9 @@ def _get_binop_flow( ), ] + # pylint: disable = too-many-boolean-expressions if ( - PY310_PLUS - and op == "|" + op == "|" and ( isinstance(left, (bases.UnionType, nodes.ClassDef)) or (isinstance(left, nodes.Const) and left.value is None) diff --git a/astroid/nodes/as_string.py b/astroid/nodes/as_string.py index 35bc134d21..01007b96e1 100644 --- a/astroid/nodes/as_string.py +++ b/astroid/nodes/as_string.py @@ -14,20 +14,6 @@ if TYPE_CHECKING: from astroid import objects - from astroid.nodes import Const - from astroid.nodes.node_classes import ( - Match, - MatchAs, - MatchCase, - MatchClass, - MatchMapping, - MatchOr, - MatchSequence, - MatchSingleton, - MatchStar, - MatchValue, - Unknown, - ) # pylint: disable=unused-argument @@ -42,11 +28,11 @@ class AsStringVisitor: def __init__(self, indent: str = " "): self.indent: str = indent - def __call__(self, node) -> str: + def __call__(self, node: nodes.NodeNG) -> str: """Makes this visitor behave as a simple function""" return node.accept(self).replace(DOC_NEWLINE, "\n") - def _docs_dedent(self, doc_node: Const | None) -> str: + def _docs_dedent(self, doc_node: nodes.Const | None) -> str: """Stop newlines in docs being indented by self._stmt_list""" if not doc_node: return "" @@ -65,14 +51,18 @@ def _stmt_list(self, stmts: list, indent: bool = True) -> str: return self.indent + stmts_str.replace("\n", "\n" + self.indent) - def _precedence_parens(self, node, child, is_left: bool = True) -> str: + def _precedence_parens( + self, node: nodes.NodeNG, child: nodes.NodeNG, is_left: bool = True + ) -> str: """Wrap child in parens only if required to keep same semantics""" if self._should_wrap(node, child, is_left): return f"({child.accept(self)})" return child.accept(self) - def _should_wrap(self, node, child, is_left: bool) -> bool: + def _should_wrap( + self, node: nodes.NodeNG, child: nodes.NodeNG, is_left: bool + ) -> bool: """Wrap child if: - it has lower precedence - same precedence with position opposite to associativity direction @@ -96,44 +86,44 @@ def _should_wrap(self, node, child, is_left: bool) -> bool: # visit_ methods ########################################### - def visit_await(self, node) -> str: + def visit_await(self, node: nodes.Await) -> str: return f"await {node.value.accept(self)}" - def visit_asyncwith(self, node) -> str: + def visit_asyncwith(self, node: nodes.AsyncWith) -> str: return f"async {self.visit_with(node)}" - def visit_asyncfor(self, node) -> str: + def visit_asyncfor(self, node: nodes.AsyncFor) -> str: return f"async {self.visit_for(node)}" - def visit_arguments(self, node) -> str: - """return an astroid.Function node as string""" + def visit_arguments(self, node: nodes.Arguments) -> str: + """return an nodes.Arguments node as string""" return node.format_args() - def visit_assignattr(self, node) -> str: - """return an astroid.AssAttr node as string""" + def visit_assignattr(self, node: nodes.AssignAttr) -> str: + """return an nodes.AssignAttr node as string""" return self.visit_attribute(node) - def visit_assert(self, node) -> str: - """return an astroid.Assert node as string""" + def visit_assert(self, node: nodes.Assert) -> str: + """return an nodes.Assert node as string""" if node.fail: return f"assert {node.test.accept(self)}, {node.fail.accept(self)}" return f"assert {node.test.accept(self)}" - def visit_assignname(self, node) -> str: - """return an astroid.AssName node as string""" + def visit_assignname(self, node: nodes.AssignName) -> str: + """return an nodes.AssignName node as string""" return node.name - def visit_assign(self, node) -> str: - """return an astroid.Assign node as string""" + def visit_assign(self, node: nodes.Assign) -> str: + """return an nodes.Assign node as string""" lhs = " = ".join(n.accept(self) for n in node.targets) return f"{lhs} = {node.value.accept(self)}" - def visit_augassign(self, node) -> str: - """return an astroid.AugAssign node as string""" + def visit_augassign(self, node: nodes.AugAssign) -> str: + """return an nodes.AugAssign node as string""" return f"{node.target.accept(self)} {node.op} {node.value.accept(self)}" - def visit_annassign(self, node) -> str: - """Return an astroid.AugAssign node as string""" + def visit_annassign(self, node: nodes.AnnAssign) -> str: + """Return an nodes.AnnAssign node as string""" target = node.target.accept(self) annotation = node.annotation.accept(self) @@ -141,8 +131,8 @@ def visit_annassign(self, node) -> str: return f"{target}: {annotation}" return f"{target}: {annotation} = {node.value.accept(self)}" - def visit_binop(self, node) -> str: - """return an astroid.BinOp node as string""" + def visit_binop(self, node: nodes.BinOp) -> str: + """return an nodes.BinOp node as string""" left = self._precedence_parens(node, node.left) right = self._precedence_parens(node, node.right, is_left=False) if node.op == "**": @@ -150,17 +140,17 @@ def visit_binop(self, node) -> str: return f"{left} {node.op} {right}" - def visit_boolop(self, node) -> str: - """return an astroid.BoolOp node as string""" + def visit_boolop(self, node: nodes.BoolOp) -> str: + """return an nodes.BoolOp node as string""" values = [f"{self._precedence_parens(node, n)}" for n in node.values] return (f" {node.op} ").join(values) - def visit_break(self, node) -> str: - """return an astroid.Break node as string""" + def visit_break(self, node: nodes.Break) -> str: + """return an nodes.Break node as string""" return "break" - def visit_call(self, node) -> str: - """return an astroid.Call node as string""" + def visit_call(self, node: nodes.Call) -> str: + """return an nodes.Call node as string""" expr_str = self._precedence_parens(node, node.func) args = [arg.accept(self) for arg in node.args] if node.keywords: @@ -171,65 +161,74 @@ def visit_call(self, node) -> str: args.extend(keywords) return f"{expr_str}({', '.join(args)})" - def visit_classdef(self, node) -> str: - """return an astroid.ClassDef node as string""" + def _handle_type_params( + self, type_params: list[nodes.TypeVar | nodes.ParamSpec | nodes.TypeVarTuple] + ) -> str: + return ( + f"[{', '.join(tp.accept(self) for tp in type_params)}]" + if type_params + else "" + ) + + def visit_classdef(self, node: nodes.ClassDef) -> str: + """return an nodes.ClassDef node as string""" decorate = node.decorators.accept(self) if node.decorators else "" + type_params = self._handle_type_params(node.type_params) args = [n.accept(self) for n in node.bases] if node._metaclass and not node.has_metaclass_hack(): args.append("metaclass=" + node._metaclass.accept(self)) args += [n.accept(self) for n in node.keywords] args_str = f"({', '.join(args)})" if args else "" docs = self._docs_dedent(node.doc_node) - # TODO: handle type_params - return "\n\n{}class {}{}:{}\n{}\n".format( - decorate, node.name, args_str, docs, self._stmt_list(node.body) + return "\n\n{}class {}{}{}:{}\n{}\n".format( + decorate, node.name, type_params, args_str, docs, self._stmt_list(node.body) ) - def visit_compare(self, node) -> str: - """return an astroid.Compare node as string""" + def visit_compare(self, node: nodes.Compare) -> str: + """return an nodes.Compare node as string""" rhs_str = " ".join( f"{op} {self._precedence_parens(node, expr, is_left=False)}" for op, expr in node.ops ) return f"{self._precedence_parens(node, node.left)} {rhs_str}" - def visit_comprehension(self, node) -> str: - """return an astroid.Comprehension node as string""" + def visit_comprehension(self, node: nodes.Comprehension) -> str: + """return an nodes.Comprehension node as string""" ifs = "".join(f" if {n.accept(self)}" for n in node.ifs) generated = f"for {node.target.accept(self)} in {node.iter.accept(self)}{ifs}" return f"{'async ' if node.is_async else ''}{generated}" - def visit_const(self, node) -> str: - """return an astroid.Const node as string""" + def visit_const(self, node: nodes.Const) -> str: + """return an nodes.Const node as string""" if node.value is Ellipsis: return "..." return repr(node.value) - def visit_continue(self, node) -> str: - """return an astroid.Continue node as string""" + def visit_continue(self, node: nodes.Continue) -> str: + """return an nodes.Continue node as string""" return "continue" - def visit_delete(self, node) -> str: # XXX check if correct - """return an astroid.Delete node as string""" + def visit_delete(self, node: nodes.Delete) -> str: + """return an nodes.Delete node as string""" return f"del {', '.join(child.accept(self) for child in node.targets)}" - def visit_delattr(self, node) -> str: - """return an astroid.DelAttr node as string""" + def visit_delattr(self, node: nodes.DelAttr) -> str: + """return an nodes.DelAttr node as string""" return self.visit_attribute(node) - def visit_delname(self, node) -> str: - """return an astroid.DelName node as string""" + def visit_delname(self, node: nodes.DelName) -> str: + """return an nodes.DelName node as string""" return node.name - def visit_decorators(self, node) -> str: - """return an astroid.Decorators node as string""" + def visit_decorators(self, node: nodes.Decorators) -> str: + """return an nodes.Decorators node as string""" return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes) - def visit_dict(self, node) -> str: - """return an astroid.Dict node as string""" + def visit_dict(self, node: nodes.Dict) -> str: + """return an nodes.Dict node as string""" return "{%s}" % ", ".join(self._visit_dict(node)) - def _visit_dict(self, node) -> Iterator[str]: + def _visit_dict(self, node: nodes.Dict) -> Iterator[str]: for key, value in node.items: key = key.accept(self) value = value.accept(self) @@ -239,26 +238,26 @@ def _visit_dict(self, node) -> Iterator[str]: else: yield f"{key}: {value}" - def visit_dictunpack(self, node) -> str: + def visit_dictunpack(self, node: nodes.DictUnpack) -> str: return "**" - def visit_dictcomp(self, node) -> str: - """return an astroid.DictComp node as string""" + def visit_dictcomp(self, node: nodes.DictComp) -> str: + """return an nodes.DictComp node as string""" return "{{{}: {} {}}}".format( node.key.accept(self), node.value.accept(self), " ".join(n.accept(self) for n in node.generators), ) - def visit_expr(self, node) -> str: - """return an astroid.Discard node as string""" + def visit_expr(self, node: nodes.Expr) -> str: + """return an nodes.Expr node as string""" return node.value.accept(self) - def visit_emptynode(self, node) -> str: - """dummy method for visiting an Empty node""" + def visit_emptynode(self, node: nodes.EmptyNode) -> str: + """dummy method for visiting an EmptyNode""" return "" - def visit_excepthandler(self, node) -> str: + def visit_excepthandler(self, node: nodes.ExceptHandler) -> str: n = "except" if isinstance(getattr(node, "parent", None), nodes.TryStar): n = "except*" @@ -271,12 +270,12 @@ def visit_excepthandler(self, node) -> str: excs = f"{n}" return f"{excs}:\n{self._stmt_list(node.body)}" - def visit_empty(self, node) -> str: - """return an Empty node as string""" + def visit_empty(self, node: nodes.EmptyNode) -> str: + """return an EmptyNode as string""" return "" - def visit_for(self, node) -> str: - """return an astroid.For node as string""" + def visit_for(self, node: nodes.For) -> str: + """return an nodes.For node as string""" fors = "for {} in {}:\n{}".format( node.target.accept(self), node.iter.accept(self), self._stmt_list(node.body) ) @@ -284,13 +283,13 @@ def visit_for(self, node) -> str: fors = f"{fors}\nelse:\n{self._stmt_list(node.orelse)}" return fors - def visit_importfrom(self, node) -> str: - """return an astroid.ImportFrom node as string""" + def visit_importfrom(self, node: nodes.ImportFrom) -> str: + """return an nodes.ImportFrom node as string""" return "from {} import {}".format( "." * (node.level or 0) + node.modname, _import_string(node.names) ) - def visit_joinedstr(self, node) -> str: + def visit_joinedstr(self, node: nodes.JoinedStr) -> str: string = "".join( # Use repr on the string literal parts # to get proper escapes, e.g. \n, \\, \" @@ -317,7 +316,7 @@ def visit_joinedstr(self, node) -> str: return "f" + quote + string + quote - def visit_formattedvalue(self, node) -> str: + def visit_formattedvalue(self, node: nodes.FormattedValue) -> str: result = node.value.accept(self) if node.conversion and node.conversion >= 0: # e.g. if node.conversion == 114: result += "!r" @@ -331,17 +330,18 @@ def visit_formattedvalue(self, node) -> str: def handle_functiondef(self, node: nodes.FunctionDef, keyword: str) -> str: """return a (possibly async) function definition node as string""" decorate = node.decorators.accept(self) if node.decorators else "" + type_params = self._handle_type_params(node.type_params) docs = self._docs_dedent(node.doc_node) trailer = ":" if node.returns: return_annotation = " -> " + node.returns.as_string() trailer = return_annotation + ":" - # TODO: handle type_params - def_format = "\n%s%s %s(%s)%s%s\n%s" + def_format = "\n%s%s %s%s(%s)%s%s\n%s" return def_format % ( decorate, keyword, node.name, + type_params, node.args.accept(self), trailer, docs, @@ -349,21 +349,23 @@ def handle_functiondef(self, node: nodes.FunctionDef, keyword: str) -> str: ) def visit_functiondef(self, node: nodes.FunctionDef) -> str: - """return an astroid.FunctionDef node as string""" + """return an nodes.FunctionDef node as string""" return self.handle_functiondef(node, "def") def visit_asyncfunctiondef(self, node: nodes.AsyncFunctionDef) -> str: - """return an astroid.AsyncFunction node as string""" + """return an nodes.AsyncFunction node as string""" return self.handle_functiondef(node, "async def") - def visit_generatorexp(self, node) -> str: - """return an astroid.GeneratorExp node as string""" + def visit_generatorexp(self, node: nodes.GeneratorExp) -> str: + """return an nodes.GeneratorExp node as string""" return "({} {})".format( node.elt.accept(self), " ".join(n.accept(self) for n in node.generators) ) - def visit_attribute(self, node) -> str: - """return an astroid.Getattr node as string""" + def visit_attribute( + self, node: nodes.Attribute | nodes.AssignAttr | nodes.DelAttr + ) -> str: + """return an nodes.Attribute node as string""" try: left = self._precedence_parens(node, node.expr) except RecursionError: @@ -377,12 +379,12 @@ def visit_attribute(self, node) -> str: left = f"({left})" return f"{left}.{node.attrname}" - def visit_global(self, node) -> str: - """return an astroid.Global node as string""" + def visit_global(self, node: nodes.Global) -> str: + """return an nodes.Global node as string""" return f"global {', '.join(node.names)}" - def visit_if(self, node) -> str: - """return an astroid.If node as string""" + def visit_if(self, node: nodes.If) -> str: + """return an nodes.If node as string""" ifs = [f"if {node.test.accept(self)}:\n{self._stmt_list(node.body)}"] if node.has_elif_block(): ifs.append(f"el{self._stmt_list(node.orelse, indent=False)}") @@ -390,26 +392,26 @@ def visit_if(self, node) -> str: ifs.append(f"else:\n{self._stmt_list(node.orelse)}") return "\n".join(ifs) - def visit_ifexp(self, node) -> str: - """return an astroid.IfExp node as string""" + def visit_ifexp(self, node: nodes.IfExp) -> str: + """return an nodes.IfExp node as string""" return "{} if {} else {}".format( self._precedence_parens(node, node.body, is_left=True), self._precedence_parens(node, node.test, is_left=True), self._precedence_parens(node, node.orelse, is_left=False), ) - def visit_import(self, node) -> str: - """return an astroid.Import node as string""" + def visit_import(self, node: nodes.Import) -> str: + """return an nodes.Import node as string""" return f"import {_import_string(node.names)}" - def visit_keyword(self, node) -> str: - """return an astroid.Keyword node as string""" + def visit_keyword(self, node: nodes.Keyword) -> str: + """return an nodes.Keyword node as string""" if node.arg is None: return f"**{node.value.accept(self)}" return f"{node.arg}={node.value.accept(self)}" - def visit_lambda(self, node) -> str: - """return an astroid.Lambda node as string""" + def visit_lambda(self, node: nodes.Lambda) -> str: + """return an nodes.Lambda node as string""" args = node.args.accept(self) body = node.body.accept(self) if args: @@ -417,57 +419,60 @@ def visit_lambda(self, node) -> str: return f"lambda: {body}" - def visit_list(self, node) -> str: - """return an astroid.List node as string""" + def visit_list(self, node: nodes.List) -> str: + """return an nodes.List node as string""" return f"[{', '.join(child.accept(self) for child in node.elts)}]" - def visit_listcomp(self, node) -> str: - """return an astroid.ListComp node as string""" + def visit_listcomp(self, node: nodes.ListComp) -> str: + """return an nodes.ListComp node as string""" return "[{} {}]".format( node.elt.accept(self), " ".join(n.accept(self) for n in node.generators) ) - def visit_module(self, node) -> str: - """return an astroid.Module node as string""" + def visit_module(self, node: nodes.Module) -> str: + """return an nodes.Module node as string""" docs = f'"""{node.doc_node.value}"""\n\n' if node.doc_node else "" return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n" - def visit_name(self, node) -> str: - """return an astroid.Name node as string""" + def visit_name(self, node: nodes.Name) -> str: + """return an nodes.Name node as string""" return node.name - def visit_namedexpr(self, node) -> str: + def visit_namedexpr(self, node: nodes.NamedExpr) -> str: """Return an assignment expression node as string""" target = node.target.accept(self) value = node.value.accept(self) return f"{target} := {value}" - def visit_nonlocal(self, node) -> str: - """return an astroid.Nonlocal node as string""" + def visit_nonlocal(self, node: nodes.Nonlocal) -> str: + """return an nodes.Nonlocal node as string""" return f"nonlocal {', '.join(node.names)}" def visit_paramspec(self, node: nodes.ParamSpec) -> str: - """return an astroid.ParamSpec node as string""" - return node.name.accept(self) + """return an nodes.ParamSpec node as string""" + default_value_str = ( + f" = {node.default_value.accept(self)}" if node.default_value else "" + ) + return f"**{node.name.accept(self)}{default_value_str}" - def visit_pass(self, node) -> str: - """return an astroid.Pass node as string""" + def visit_pass(self, node: nodes.Pass) -> str: + """return an nodes.Pass node as string""" return "pass" def visit_partialfunction(self, node: objects.PartialFunction) -> str: """Return an objects.PartialFunction as string.""" return self.visit_functiondef(node) - def visit_raise(self, node) -> str: - """return an astroid.Raise node as string""" + def visit_raise(self, node: nodes.Raise) -> str: + """return an nodes.Raise node as string""" if node.exc: if node.cause: return f"raise {node.exc.accept(self)} from {node.cause.accept(self)}" return f"raise {node.exc.accept(self)}" return "raise" - def visit_return(self, node) -> str: - """return an astroid.Return node as string""" + def visit_return(self, node: nodes.Return) -> str: + """return an nodes.Return node as string""" if node.is_tuple_return() and len(node.value.elts) > 1: elts = [child.accept(self) for child in node.value.elts] return f"return {', '.join(elts)}" @@ -477,18 +482,18 @@ def visit_return(self, node) -> str: return "return" - def visit_set(self, node) -> str: - """return an astroid.Set node as string""" + def visit_set(self, node: nodes.Set) -> str: + """return an nodes.Set node as string""" return "{%s}" % ", ".join(child.accept(self) for child in node.elts) - def visit_setcomp(self, node) -> str: - """return an astroid.SetComp node as string""" + def visit_setcomp(self, node: nodes.SetComp) -> str: + """return an nodes.SetComp node as string""" return "{{{} {}}}".format( node.elt.accept(self), " ".join(n.accept(self) for n in node.generators) ) - def visit_slice(self, node) -> str: - """return an astroid.Slice node as string""" + def visit_slice(self, node: nodes.Slice) -> str: + """return an nodes.Slice node as string""" lower = node.lower.accept(self) if node.lower else "" upper = node.upper.accept(self) if node.upper else "" step = node.step.accept(self) if node.step else "" @@ -496,8 +501,8 @@ def visit_slice(self, node) -> str: return f"{lower}:{upper}:{step}" return f"{lower}:{upper}" - def visit_subscript(self, node) -> str: - """return an astroid.Subscript node as string""" + def visit_subscript(self, node: nodes.Subscript) -> str: + """return an nodes.Subscript node as string""" idx = node.slice if idx.__class__.__name__.lower() == "index": idx = idx.value @@ -508,8 +513,8 @@ def visit_subscript(self, node) -> str: idxstr = idxstr[1:-1] return f"{self._precedence_parens(node, node.value)}[{idxstr}]" - def visit_try(self, node) -> str: - """return an astroid.Try node as string""" + def visit_try(self, node: nodes.Try) -> str: + """return an nodes.Try node as string""" trys = [f"try:\n{self._stmt_list(node.body)}"] for handler in node.handlers: trys.append(handler.accept(self)) @@ -519,8 +524,8 @@ def visit_try(self, node) -> str: trys.append(f"finally:\n{self._stmt_list(node.finalbody)}") return "\n".join(trys) - def visit_trystar(self, node) -> str: - """return an astroid.TryStar node as string""" + def visit_trystar(self, node: nodes.TryStar) -> str: + """return an nodes.TryStar node as string""" trys = [f"try:\n{self._stmt_list(node.body)}"] for handler in node.handlers: trys.append(handler.accept(self)) @@ -530,48 +535,56 @@ def visit_trystar(self, node) -> str: trys.append(f"finally:\n{self._stmt_list(node.finalbody)}") return "\n".join(trys) - def visit_tuple(self, node) -> str: - """return an astroid.Tuple node as string""" + def visit_tuple(self, node: nodes.Tuple) -> str: + """return an nodes.Tuple node as string""" if len(node.elts) == 1: return f"({node.elts[0].accept(self)}, )" return f"({', '.join(child.accept(self) for child in node.elts)})" def visit_typealias(self, node: nodes.TypeAlias) -> str: - """return an astroid.TypeAlias node as string""" - return node.name.accept(self) if node.name else "_" + """return an nodes.TypeAlias node as string""" + type_params = self._handle_type_params(node.type_params) + return f"type {node.name.accept(self)}{type_params} = {node.value.accept(self)}" def visit_typevar(self, node: nodes.TypeVar) -> str: - """return an astroid.TypeVar node as string""" - return node.name.accept(self) if node.name else "_" + """return an nodes.TypeVar node as string""" + bound_str = f": {node.bound.accept(self)}" if node.bound else "" + default_value_str = ( + f" = {node.default_value.accept(self)}" if node.default_value else "" + ) + return f"{node.name.accept(self)}{bound_str}{default_value_str}" def visit_typevartuple(self, node: nodes.TypeVarTuple) -> str: - """return an astroid.TypeVarTuple node as string""" - return "*" + node.name.accept(self) if node.name else "" + """return an nodes.TypeVarTuple node as string""" + default_value_str = ( + f" = {node.default_value.accept(self)}" if node.default_value else "" + ) + return f"*{node.name.accept(self)}{default_value_str}" - def visit_unaryop(self, node) -> str: - """return an astroid.UnaryOp node as string""" + def visit_unaryop(self, node: nodes.UnaryOp) -> str: + """return an nodes.UnaryOp node as string""" if node.op == "not": operator = "not " else: operator = node.op return f"{operator}{self._precedence_parens(node, node.operand)}" - def visit_while(self, node) -> str: - """return an astroid.While node as string""" + def visit_while(self, node: nodes.While) -> str: + """return an nodes.While node as string""" whiles = f"while {node.test.accept(self)}:\n{self._stmt_list(node.body)}" if node.orelse: whiles = f"{whiles}\nelse:\n{self._stmt_list(node.orelse)}" return whiles - def visit_with(self, node) -> str: # 'with' without 'as' is possible - """return an astroid.With node as string""" + def visit_with(self, node: nodes.With) -> str: # 'with' without 'as' is possible + """return an nodes.With node as string""" items = ", ".join( f"{expr.accept(self)}" + ((v and f" as {v.accept(self)}") or "") for expr, v in node.items ) return f"with {items}:\n{self._stmt_list(node.body)}" - def visit_yield(self, node) -> str: + def visit_yield(self, node: nodes.Yield) -> str: """yield an ast.Yield node as string""" yi_val = (" " + node.value.accept(self)) if node.value else "" expr = "yield" + yi_val @@ -580,8 +593,8 @@ def visit_yield(self, node) -> str: return f"({expr})" - def visit_yieldfrom(self, node) -> str: - """Return an astroid.YieldFrom node as string.""" + def visit_yieldfrom(self, node: nodes.YieldFrom) -> str: + """Return an nodes.YieldFrom node as string.""" yi_val = (" " + node.value.accept(self)) if node.value else "" expr = "yield from" + yi_val if node.parent.is_statement: @@ -589,39 +602,39 @@ def visit_yieldfrom(self, node) -> str: return f"({expr})" - def visit_starred(self, node) -> str: + def visit_starred(self, node: nodes.Starred) -> str: """return Starred node as string""" return "*" + node.value.accept(self) - def visit_match(self, node: Match) -> str: - """Return an astroid.Match node as string.""" + def visit_match(self, node: nodes.Match) -> str: + """Return an nodes.Match node as string.""" return f"match {node.subject.accept(self)}:\n{self._stmt_list(node.cases)}" - def visit_matchcase(self, node: MatchCase) -> str: - """Return an astroid.MatchCase node as string.""" + def visit_matchcase(self, node: nodes.MatchCase) -> str: + """Return an nodes.MatchCase node as string.""" guard_str = f" if {node.guard.accept(self)}" if node.guard else "" return ( f"case {node.pattern.accept(self)}{guard_str}:\n" f"{self._stmt_list(node.body)}" ) - def visit_matchvalue(self, node: MatchValue) -> str: - """Return an astroid.MatchValue node as string.""" + def visit_matchvalue(self, node: nodes.MatchValue) -> str: + """Return an nodes.MatchValue node as string.""" return node.value.accept(self) @staticmethod - def visit_matchsingleton(node: MatchSingleton) -> str: - """Return an astroid.MatchSingleton node as string.""" + def visit_matchsingleton(node: nodes.MatchSingleton) -> str: + """Return an nodes.MatchSingleton node as string.""" return str(node.value) - def visit_matchsequence(self, node: MatchSequence) -> str: - """Return an astroid.MatchSequence node as string.""" + def visit_matchsequence(self, node: nodes.MatchSequence) -> str: + """Return an nodes.MatchSequence node as string.""" if node.patterns is None: return "[]" return f"[{', '.join(p.accept(self) for p in node.patterns)}]" - def visit_matchmapping(self, node: MatchMapping) -> str: - """Return an astroid.MatchMapping node as string.""" + def visit_matchmapping(self, node: nodes.MatchMapping) -> str: + """Return an nodes..MatchMapping node as string.""" mapping_strings: list[str] = [] if node.keys and node.patterns: mapping_strings.extend( @@ -632,8 +645,8 @@ def visit_matchmapping(self, node: MatchMapping) -> str: mapping_strings.append(f"**{node.rest.accept(self)}") return f"{'{'}{', '.join(mapping_strings)}{'}'}" - def visit_matchclass(self, node: MatchClass) -> str: - """Return an astroid.MatchClass node as string.""" + def visit_matchclass(self, node: nodes.MatchClass) -> str: + """Return an nodes..MatchClass node as string.""" if node.cls is None: raise AssertionError(f"{node} does not have a 'cls' node") class_strings: list[str] = [] @@ -644,51 +657,75 @@ def visit_matchclass(self, node: MatchClass) -> str: class_strings.append(f"{attr}={pattern.accept(self)}") return f"{node.cls.accept(self)}({', '.join(class_strings)})" - def visit_matchstar(self, node: MatchStar) -> str: - """Return an astroid.MatchStar node as string.""" + def visit_matchstar(self, node: nodes.MatchStar) -> str: + """Return an nodes..MatchStar node as string.""" return f"*{node.name.accept(self) if node.name else '_'}" - def visit_matchas(self, node: MatchAs) -> str: - """Return an astroid.MatchAs node as string.""" - # pylint: disable=import-outside-toplevel - # Prevent circular dependency - from astroid.nodes.node_classes import MatchClass, MatchMapping, MatchSequence - - if isinstance(node.parent, (MatchSequence, MatchMapping, MatchClass)): + def visit_matchas(self, node: nodes.MatchAs) -> str: + """Return an nodes..MatchAs node as string.""" + if isinstance( + node.parent, (nodes.MatchSequence, nodes.MatchMapping, nodes.MatchClass) + ): return node.name.accept(self) if node.name else "_" return ( f"{node.pattern.accept(self) if node.pattern else '_'}" f"{f' as {node.name.accept(self)}' if node.name else ''}" ) - def visit_matchor(self, node: MatchOr) -> str: - """Return an astroid.MatchOr node as string.""" + def visit_matchor(self, node: nodes.MatchOr) -> str: + """Return an nodes.MatchOr node as string.""" if node.patterns is None: raise AssertionError(f"{node} does not have pattern nodes") return " | ".join(p.accept(self) for p in node.patterns) + def visit_templatestr(self, node: nodes.TemplateStr) -> str: + """Return an nodes.TemplateStr node as string.""" + string = "" + for value in node.values: + match value: + case nodes.Interpolation(): + string += "{" + value.accept(self) + "}" + case _: + string += value.accept(self)[1:-1] + for quote in ("'", '"', '"""', "'''"): + if quote not in string: + break + return "t" + quote + string + quote + + def visit_interpolation(self, node: nodes.Interpolation) -> str: + """Return an nodes.Interpolation node as string.""" + result = f"{node.str}" + if node.conversion and node.conversion >= 0: + # e.g. if node.conversion == 114: result += "!r" + result += "!" + chr(node.conversion) + if node.format_spec: + # The format spec is itself a JoinedString, i.e. an f-string + # We strip the f and quotes of the ends + result += ":" + node.format_spec.accept(self)[2:-1] + return result + # These aren't for real AST nodes, but for inference objects. - def visit_frozenset(self, node): + def visit_frozenset(self, node: objects.FrozenSet) -> str: return node.parent.accept(self) - def visit_super(self, node): + def visit_super(self, node: objects.Super) -> str: return node.parent.accept(self) - def visit_uninferable(self, node): + def visit_uninferable(self, node) -> str: return str(node) - def visit_property(self, node): + def visit_property(self, node: objects.Property) -> str: return node.function.accept(self) - def visit_evaluatedobject(self, node): + def visit_evaluatedobject(self, node: nodes.EvaluatedObject) -> str: return node.original.accept(self) - def visit_unknown(self, node: Unknown) -> str: + def visit_unknown(self, node: nodes.Unknown) -> str: return str(node) -def _import_string(names) -> str: +def _import_string(names: list[tuple[str, str | None]]) -> str: """return a list of (name, asname) formatted as a string""" _names = [] for name, asname in names: diff --git a/astroid/nodes/node_classes.py b/astroid/nodes/node_classes.py index db78f8ce00..b9077de774 100644 --- a/astroid/nodes/node_classes.py +++ b/astroid/nodes/node_classes.py @@ -15,18 +15,11 @@ import warnings from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from functools import cached_property -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Literal, - Optional, - Union, -) +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Union from astroid import decorators, protocols, util from astroid.bases import Instance, _infer_stmts -from astroid.const import _EMPTY_OBJECT_MARKER, Context +from astroid.const import _EMPTY_OBJECT_MARKER, PY314_PLUS, Context from astroid.context import CallContext, InferenceContext, copy_context from astroid.exceptions import ( AstroidBuildingError, @@ -46,6 +39,7 @@ from astroid.nodes import _base_nodes from astroid.nodes.const import OP_PRECEDENCE from astroid.nodes.node_ng import NodeNG +from astroid.nodes.scoped_nodes import SYNTHETIC_ROOT from astroid.typing import ( ConstFactoryResult, InferenceErrorInfo, @@ -70,23 +64,24 @@ def _is_const(value) -> bool: _NodesT = typing.TypeVar("_NodesT", bound=NodeNG) _BadOpMessageT = typing.TypeVar("_BadOpMessageT", bound=util.BadOperationMessage) +# pylint: disable-next=consider-alternative-union-syntax AssignedStmtsPossibleNode = Union["List", "Tuple", "AssignName", "AssignAttr", None] AssignedStmtsCall = Callable[ [ _NodesT, AssignedStmtsPossibleNode, - Optional[InferenceContext], - Optional[list[int]], + InferenceContext | None, + list[int] | None, ], Any, ] InferBinaryOperation = Callable[ - [_NodesT, Optional[InferenceContext]], - Generator[Union[InferenceResult, _BadOpMessageT]], + [_NodesT, InferenceContext | None], + Generator[InferenceResult | _BadOpMessageT], ] InferLHS = Callable[ - [_NodesT, Optional[InferenceContext]], - Generator[InferenceResult, None, Optional[InferenceErrorInfo]], + [_NodesT, InferenceContext | None], + Generator[InferenceResult, None, InferenceErrorInfo | None], ] InferUnaryOp = Callable[[_NodesT, str], ConstFactoryResult] @@ -1027,7 +1022,7 @@ def get_children(self): @decorators.raise_if_nothing_inferred def _infer( - self: nodes.Arguments, context: InferenceContext | None = None, **kwargs: Any + self, context: InferenceContext | None = None, **kwargs: Any ) -> Generator[InferenceResult]: # pylint: disable-next=import-outside-toplevel from astroid.protocols import _arguments_infer_argname @@ -1446,7 +1441,7 @@ def _infer_augassign( @decorators.raise_if_nothing_inferred @decorators.path_wrapper def _infer( - self: nodes.AugAssign, context: InferenceContext | None = None, **kwargs: Any + self, context: InferenceContext | None = None, **kwargs: Any ) -> Generator[InferenceResult]: return self._filter_operation_errors( self._infer_augassign, context, util.BadBinaryOperationMessage @@ -1523,7 +1518,7 @@ def get_children(self): yield self.left yield self.right - def op_precedence(self): + def op_precedence(self) -> int: return OP_PRECEDENCE[self.op] def op_left_associative(self) -> bool: @@ -1561,7 +1556,7 @@ def _infer_binop( @decorators.yes_if_nothing_inferred @decorators.path_wrapper def _infer( - self: nodes.BinOp, context: InferenceContext | None = None, **kwargs: Any + self, context: InferenceContext | None = None, **kwargs: Any ) -> Generator[InferenceResult]: return self._filter_operation_errors( self._infer_binop, context, util.BadBinaryOperationMessage @@ -1632,13 +1627,13 @@ def postinit(self, values: list[NodeNG] | None = None) -> None: def get_children(self): yield from self.values - def op_precedence(self): + def op_precedence(self) -> int: return OP_PRECEDENCE[self.op] @decorators.raise_if_nothing_inferred @decorators.path_wrapper def _infer( - self: nodes.BoolOp, context: InferenceContext | None = None, **kwargs: Any + self, context: InferenceContext | None = None, **kwargs: Any ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: """Infer a boolean operation (and / or / not). @@ -1856,7 +1851,7 @@ def last_child(self): # TODO: move to util? @staticmethod def _to_literal(node: SuccessfulInferenceResult) -> Any: - # Can raise SyntaxError or ValueError from ast.literal_eval + # Can raise SyntaxError, ValueError, or TypeError from ast.literal_eval # Can raise AttributeError from node.as_string() as not all nodes have a visitor # Is this the stupidest idea or the simplest idea? return ast.literal_eval(node.as_string()) @@ -1892,7 +1887,7 @@ def _do_compare( try: left, right = self._to_literal(left), self._to_literal(right) - except (SyntaxError, ValueError, AttributeError): + except (SyntaxError, ValueError, AttributeError, TypeError): return util.Uninferable try: @@ -2038,7 +2033,7 @@ def __init__( value: Any, lineno: int | None = None, col_offset: int | None = None, - parent: NodeNG | None = None, + parent: NodeNG = SYNTHETIC_ROOT, kind: str | None = None, *, end_lineno: int | None = None, @@ -2061,7 +2056,16 @@ def __init__( :param end_col_offset: The end column this node appears on in the source code. Note: This is after the last symbol. """ - self.value: Any = value + if getattr(value, "__name__", None) == "__doc__": + warnings.warn( # pragma: no cover + "You have most likely called a __doc__ field of some object " + "and it didn't return a string. " + "That happens to some symbols from the standard library. " + "Check for isinstance(.__doc__, str).", + RuntimeWarning, + stacklevel=0, + ) + self.value = value """The value that the constant represents.""" self.kind: str | None = kind # can be None @@ -2162,8 +2166,12 @@ def bool_value(self, context: InferenceContext | None = None): """Determine the boolean value of this node. :returns: The boolean value of this node. - :rtype: bool + :rtype: bool or Uninferable """ + # bool(NotImplemented) is deprecated; it raises TypeError starting from Python 3.14 + # and returns True for versions under 3.14 + if self.value is NotImplemented: + return util.Uninferable if PY314_PLUS else True return bool(self.value) def _infer( @@ -2541,7 +2549,7 @@ def __init__( self, lineno: None = None, col_offset: None = None, - parent: None = None, + parent: NodeNG = SYNTHETIC_ROOT, *, end_lineno: None = None, end_col_offset: None = None, @@ -3042,7 +3050,7 @@ def get_children(self): yield from self.body yield from self.orelse - def has_elif_block(self): + def has_elif_block(self) -> bool: return len(self.orelse) == 1 and isinstance(self.orelse[0], If) def _get_yield_nodes_skip_functions(self): @@ -3100,28 +3108,37 @@ def _infer( to inferring both branches. Otherwise, we infer either branch depending on the condition. """ - both_branches = False + # We use two separate contexts for evaluating lhs and rhs because # evaluating lhs may leave some undesired entries in context.path # which may not let us infer right value of rhs. - context = context or InferenceContext() lhs_context = copy_context(context) rhs_context = copy_context(context) + + # Infer bool condition. Stop inferring if in doubt and fallback to + # evaluating both branches. + condition: bool | None = None try: - test = next(self.test.infer(context=context.clone())) - except (InferenceError, StopIteration): - both_branches = True - else: - if not isinstance(test, util.UninferableBase): - if test.bool_value(): - yield from self.body.infer(context=lhs_context) - else: - yield from self.orelse.infer(context=rhs_context) - else: - both_branches = True - if both_branches: + for test in self.test.infer(context=context.clone()): + if isinstance(test, util.UninferableBase): + condition = None + break + test_bool_value = test.bool_value() + if isinstance(test_bool_value, util.UninferableBase): + condition = None + break + if condition is None: + condition = test_bool_value + elif test_bool_value != condition: + condition = None + break + except InferenceError: + condition = None + + if condition is True or condition is None: yield from self.body.infer(context=lhs_context) + if condition is False or condition is None: yield from self.orelse.infer(context=rhs_context) @@ -3379,9 +3396,9 @@ class ParamSpec(_base_nodes.AssignTypeNode): """ - _astroid_fields = ("name",) - + _astroid_fields = ("name", "default_value") name: AssignName + default_value: NodeNG | None def __init__( self, @@ -3400,8 +3417,9 @@ def __init__( parent=parent, ) - def postinit(self, *, name: AssignName) -> None: + def postinit(self, *, name: AssignName, default_value: NodeNG | None) -> None: self.name = name + self.default_value = default_value def _infer( self, context: InferenceContext | None = None, **kwargs: Any @@ -3489,7 +3507,7 @@ def get_children(self): if self.value is not None: yield self.value - def is_tuple_return(self): + def is_tuple_return(self) -> bool: return isinstance(self.value, Tuple) def _get_return_nodes_skip_functions(self): @@ -4137,10 +4155,10 @@ class TypeVar(_base_nodes.AssignTypeNode): """ - _astroid_fields = ("name", "bound") - + _astroid_fields = ("name", "bound", "default_value") name: AssignName bound: NodeNG | None + default_value: NodeNG | None def __init__( self, @@ -4159,9 +4177,16 @@ def __init__( parent=parent, ) - def postinit(self, *, name: AssignName, bound: NodeNG | None) -> None: + def postinit( + self, + *, + name: AssignName, + bound: NodeNG | None, + default_value: NodeNG | None = None, + ) -> None: self.name = name self.bound = bound + self.default_value = default_value def _infer( self, context: InferenceContext | None = None, **kwargs: Any @@ -4183,9 +4208,9 @@ class TypeVarTuple(_base_nodes.AssignTypeNode): """ - _astroid_fields = ("name",) - + _astroid_fields = ("name", "default_value") name: AssignName + default_value: NodeNG | None def __init__( self, @@ -4204,8 +4229,11 @@ def __init__( parent=parent, ) - def postinit(self, *, name: AssignName) -> None: + def postinit( + self, *, name: AssignName, default_value: NodeNG | None = None + ) -> None: self.name = name + self.default_value = default_value def _infer( self, context: InferenceContext | None = None, **kwargs: Any @@ -4289,14 +4317,14 @@ def type_errors( def get_children(self): yield self.operand - def op_precedence(self): + def op_precedence(self) -> int: if self.op == "not": return OP_PRECEDENCE[self.op] return super().op_precedence() def _infer_unaryop( - self: nodes.UnaryOp, context: InferenceContext | None = None, **kwargs: Any + self, context: InferenceContext | None = None, **kwargs: Any ) -> Generator[ InferenceResult | util.BadUnaryOperationMessage, None, InferenceErrorInfo ]: @@ -4362,7 +4390,7 @@ def _infer_unaryop( @decorators.raise_if_nothing_inferred @decorators.path_wrapper def _infer( - self: nodes.UnaryOp, context: InferenceContext | None = None, **kwargs: Any + self, context: InferenceContext | None = None, **kwargs: Any ) -> Generator[InferenceResult, None, InferenceErrorInfo]: """Infer what an UnaryOp should return when evaluated.""" yield from self._filter_operation_errors( @@ -4705,7 +4733,7 @@ def _infer( continue -MISSING_VALUE = "{MISSING_VALUE}" +UNINFERABLE_VALUE = "{Uninferable}" class JoinedStr(NodeNG): @@ -4771,33 +4799,57 @@ def get_children(self): def _infer( self, context: InferenceContext | None = None, **kwargs: Any ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: - yield from self._infer_from_values(self.values, context) + if self.values: + yield from self._infer_with_values(context) + else: + yield Const("") + + def _infer_with_values( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + uninferable_already_generated = False + for inferred in self._infer_from_values(self.values, context): + failed = inferred is util.Uninferable or ( + isinstance(inferred, Const) and UNINFERABLE_VALUE in inferred.value + ) + if failed: + if not uninferable_already_generated: + uninferable_already_generated = True + yield util.Uninferable + continue + yield inferred @classmethod def _infer_from_values( cls, nodes: list[NodeNG], context: InferenceContext | None = None, **kwargs: Any ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: if not nodes: - yield return if len(nodes) == 1: - yield from nodes[0]._infer(context, **kwargs) + for node in cls._safe_infer_from_node(nodes[0], context, **kwargs): + if isinstance(node, Const): + yield node + continue + yield Const(UNINFERABLE_VALUE) return - uninferable_already_generated = False - for prefix in nodes[0]._infer(context, **kwargs): + for prefix in cls._safe_infer_from_node(nodes[0], context, **kwargs): for suffix in cls._infer_from_values(nodes[1:], context, **kwargs): result = "" for node in (prefix, suffix): if isinstance(node, Const): result += str(node.value) continue - result += MISSING_VALUE - if MISSING_VALUE in result: - if not uninferable_already_generated: - uninferable_already_generated = True - yield util.Uninferable - else: - yield Const(result) + result += UNINFERABLE_VALUE + yield Const(result) + + @classmethod + def _safe_infer_from_node( + cls, node: NodeNG, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + try: + yield from node._infer(context, **kwargs) + except InferenceError: + yield util.Uninferable class NamedExpr(_base_nodes.AssignTypeNode): @@ -4864,9 +4916,7 @@ def postinit(self, target: NodeNG, value: NodeNG) -> None: See astroid/protocols.py for actual implementation. """ - def frame( - self, *, future: Literal[None, True] = None - ) -> nodes.FunctionDef | nodes.Module | nodes.ClassDef | nodes.Lambda: + def frame(self) -> nodes.FunctionDef | nodes.Module | nodes.ClassDef | nodes.Lambda: """The first parent frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, @@ -4874,12 +4924,6 @@ def frame( :returns: The first parent frame node. """ - if future is not None: - warnings.warn( - "The future arg will be removed in astroid 4.0.", - DeprecationWarning, - stacklevel=2, - ) if not self.parent: raise ParentMissingError(target=self) @@ -4937,9 +4981,9 @@ class Unknown(_base_nodes.AssignTypeNode): def __init__( self, + parent: NodeNG, lineno: None = None, col_offset: None = None, - parent: None = None, *, end_lineno: None = None, end_col_offset: None = None, @@ -4960,6 +5004,9 @@ def _infer(self, context: InferenceContext | None = None, **kwargs): yield util.Uninferable +UNATTACHED_UNKNOWN = Unknown(parent=SYNTHETIC_ROOT) + + class EvaluatedObject(NodeNG): """Contains an object that has already been inferred @@ -5461,6 +5508,114 @@ def postinit(self, *, patterns: list[Pattern]) -> None: self.patterns = patterns +class TemplateStr(NodeNG): + """Class representing an :class:`ast.TemplateStr` node. + + >>> import astroid + >>> node = astroid.extract_node('t"{name} finished {place!s}"') + >>> node + + """ + + _astroid_fields = ("values",) + + def __init__( + self, + lineno: int | None = None, + col_offset: int | None = None, + parent: NodeNG | None = None, + *, + end_lineno: int | None = None, + end_col_offset: int | None = None, + ) -> None: + self.values: list[NodeNG] + super().__init__( + lineno=lineno, + col_offset=col_offset, + end_lineno=end_lineno, + end_col_offset=end_col_offset, + parent=parent, + ) + + def postinit(self, *, values: list[NodeNG]) -> None: + self.values = values + + def get_children(self) -> Iterator[NodeNG]: + yield from self.values + + +class Interpolation(NodeNG): + """Class representing an :class:`ast.Interpolation` node. + + >>> import astroid + >>> node = astroid.extract_node('t"{name} finished {place!s}"') + >>> node + + >>> node.values[0] + + >>> node.values[2] + + """ + + _astroid_fields = ("value", "format_spec") + _other_fields = ("str", "conversion") + + def __init__( + self, + lineno: int | None = None, + col_offset: int | None = None, + parent: NodeNG | None = None, + *, + end_lineno: int | None = None, + end_col_offset: int | None = None, + ) -> None: + self.value: NodeNG + """Any expression node.""" + + self.str: str + """Text of the interpolation expression.""" + + self.conversion: int + """The type of formatting to be applied to the value. + + .. seealso:: + :class:`ast.Interpolation` + """ + + self.format_spec: JoinedStr | None = None + """The formatting to be applied to the value. + + .. seealso:: + :class:`ast.Interpolation` + """ + + super().__init__( + lineno=lineno, + col_offset=col_offset, + end_lineno=end_lineno, + end_col_offset=end_col_offset, + parent=parent, + ) + + def postinit( + self, + *, + value: NodeNG, + str: str, # pylint: disable=redefined-builtin + conversion: int = -1, + format_spec: JoinedStr | None = None, + ) -> None: + self.value = value + self.str = str + self.conversion = conversion + self.format_spec = format_spec + + def get_children(self) -> Iterator[NodeNG]: + yield self.value + if self.format_spec: + yield self.format_spec + + # constants ############################################################## # The _proxied attribute of all container types (List, Tuple, etc.) @@ -5533,7 +5688,7 @@ def const_factory(value: Any) -> ConstFactoryResult: instance = initializer_cls( lineno=None, col_offset=None, - parent=None, + parent=SYNTHETIC_ROOT, end_lineno=None, end_col_offset=None, ) @@ -5543,7 +5698,7 @@ def const_factory(value: Any) -> ConstFactoryResult: instance = initializer_cls( lineno=None, col_offset=None, - parent=None, + parent=SYNTHETIC_ROOT, end_lineno=None, end_col_offset=None, ) diff --git a/astroid/nodes/node_ng.py b/astroid/nodes/node_ng.py index 3a482f3cc9..1af39c244b 100644 --- a/astroid/nodes/node_ng.py +++ b/astroid/nodes/node_ng.py @@ -6,7 +6,6 @@ import pprint import sys -import warnings from collections.abc import Generator, Iterator from functools import cached_property from functools import singledispatch as _singledispatch @@ -14,14 +13,12 @@ TYPE_CHECKING, Any, ClassVar, - Literal, TypeVar, - Union, cast, overload, ) -from astroid import util +from astroid import nodes, util from astroid.context import InferenceContext from astroid.exceptions import ( AstroidError, @@ -43,7 +40,6 @@ if TYPE_CHECKING: - from astroid import nodes from astroid.nodes import _base_nodes @@ -51,7 +47,7 @@ _NodesT = TypeVar("_NodesT", bound="NodeNG") _NodesT2 = TypeVar("_NodesT2", bound="NodeNG") _NodesT3 = TypeVar("_NodesT3", bound="NodeNG") -SkipKlassT = Union[None, type["NodeNG"], tuple[type["NodeNG"], ...]] +SkipKlassT = None | type["NodeNG"] | tuple[type["NodeNG"], ...] class NodeNG: @@ -277,26 +273,18 @@ def parent_of(self, node) -> bool: """ return any(self is parent for parent in node.node_ancestors()) - def statement(self, *, future: Literal[None, True] = None) -> _base_nodes.Statement: + def statement(self) -> _base_nodes.Statement: """The first parent node, including self, marked as statement node. :raises StatementMissing: If self has no parent attribute. """ - if future is not None: - warnings.warn( - "The future arg will be removed in astroid 4.0.", - DeprecationWarning, - stacklevel=2, - ) if self.is_statement: return cast("_base_nodes.Statement", self) if not self.parent: raise StatementMissing(target=self) return self.parent.statement() - def frame( - self, *, future: Literal[None, True] = None - ) -> nodes.FunctionDef | nodes.Module | nodes.ClassDef | nodes.Lambda: + def frame(self) -> nodes.FunctionDef | nodes.Module | nodes.ClassDef | nodes.Lambda: """The first parent frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, @@ -305,15 +293,9 @@ def frame( :returns: The first parent frame node. :raises ParentMissingError: If self has no parent attribute. """ - if future is not None: - warnings.warn( - "The future arg will be removed in astroid 4.0.", - DeprecationWarning, - stacklevel=2, - ) if self.parent is None: raise ParentMissingError(target=self) - return self.parent.frame(future=future) + return self.parent.frame() def scope(self) -> nodes.LocalsDictNodeNG: """The first parent node defining a new scope. @@ -332,11 +314,13 @@ def root(self) -> nodes.Module: :returns: The root node. """ if not (parent := self.parent): - return self # type: ignore[return-value] # Only 'Module' does not have a parent node. + assert isinstance(self, nodes.Module) + return self while parent.parent: parent = parent.parent - return parent # type: ignore[return-value] # Only 'Module' does not have a parent node. + assert isinstance(parent, nodes.Module) + return parent def child_sequence(self, child): """Search for the sequence that contains this child. @@ -656,6 +640,8 @@ def repr_tree( :rtype: str """ + # pylint: disable = too-many-statements + @_singledispatch def _repr_tree(node, result, done, cur_indent="", depth=1): """Outputs a representation of a non-tuple/list, non-node that's @@ -776,7 +762,7 @@ def bool_value(self, context: InferenceContext | None = None): """ return util.Uninferable - def op_precedence(self): + def op_precedence(self) -> int: # Look up by class name or default to highest precedence return OP_PRECEDENCE.get(self.__class__.__name__, len(OP_PRECEDENCE)) diff --git a/astroid/nodes/scoped_nodes/__init__.py b/astroid/nodes/scoped_nodes/__init__.py index 35301107d5..01f99fa3d5 100644 --- a/astroid/nodes/scoped_nodes/__init__.py +++ b/astroid/nodes/scoped_nodes/__init__.py @@ -11,6 +11,7 @@ from astroid.nodes.scoped_nodes.mixin import ComprehensionScope, LocalsDictNodeNG from astroid.nodes.scoped_nodes.scoped_nodes import ( + SYNTHETIC_ROOT, AsyncFunctionDef, ClassDef, DictComp, @@ -27,6 +28,7 @@ from astroid.nodes.scoped_nodes.utils import builtin_lookup __all__ = ( + "SYNTHETIC_ROOT", "AsyncFunctionDef", "ClassDef", "ComprehensionScope", diff --git a/astroid/nodes/scoped_nodes/mixin.py b/astroid/nodes/scoped_nodes/mixin.py index 8874c0691a..d10d317440 100644 --- a/astroid/nodes/scoped_nodes/mixin.py +++ b/astroid/nodes/scoped_nodes/mixin.py @@ -6,7 +6,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, TypeVar, overload +import sys +from typing import TYPE_CHECKING, overload from astroid.exceptions import ParentMissingError from astroid.filter_statements import _filter_stmts @@ -14,11 +15,13 @@ from astroid.nodes.scoped_nodes.utils import builtin_lookup from astroid.typing import InferenceResult, SuccessfulInferenceResult +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self if TYPE_CHECKING: from astroid import nodes -_T = TypeVar("_T") - class LocalsDictNodeNG(_base_nodes.LookupMixIn): """this class provides locals handling common to Module, FunctionDef @@ -46,7 +49,7 @@ def qname(self) -> str: except ParentMissingError: return self.name - def scope(self: _T) -> _T: + def scope(self) -> Self: """The first parent node defining a new scope. :returns: The first parent scope node. diff --git a/astroid/nodes/scoped_nodes/scoped_nodes.py b/astroid/nodes/scoped_nodes/scoped_nodes.py index 68eafdee21..f9b06bf6a6 100644 --- a/astroid/nodes/scoped_nodes/scoped_nodes.py +++ b/astroid/nodes/scoped_nodes/scoped_nodes.py @@ -13,10 +13,10 @@ import io import itertools import os -import warnings +import sys from collections.abc import Generator, Iterable, Iterator, Sequence from functools import cached_property, lru_cache -from typing import TYPE_CHECKING, Any, ClassVar, Literal, NoReturn, TypeVar +from typing import TYPE_CHECKING, Any, ClassVar, Literal, NoReturn from astroid import bases, protocols, util from astroid.context import ( @@ -40,15 +40,7 @@ from astroid.interpreter.dunder_lookup import lookup from astroid.interpreter.objectmodel import ClassModel, FunctionModel, ModuleModel from astroid.manager import AstroidManager -from astroid.nodes import ( - Arguments, - Const, - NodeNG, - Unknown, - _base_nodes, - const_factory, - node_classes, -) +from astroid.nodes import _base_nodes, node_classes from astroid.nodes.scoped_nodes.mixin import ComprehensionScope, LocalsDictNodeNG from astroid.nodes.scoped_nodes.utils import builtin_lookup from astroid.nodes.utils import Position @@ -59,8 +51,14 @@ SuccessfulInferenceResult, ) +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + if TYPE_CHECKING: from astroid import nodes, objects + from astroid.nodes import Arguments, Const, NodeNG from astroid.nodes._base_nodes import LookupMixIn @@ -70,8 +68,6 @@ {"classmethod", "staticmethod", "builtins.classmethod", "builtins.staticmethod"} ) -_T = TypeVar("_T") - def _c3_merge(sequences, cls, context): """Merges MROs in *sequences* to a single MRO using the C3 algorithm. @@ -178,6 +174,15 @@ def function_to_method(n, klass): return n +def _infer_last( + arg: SuccessfulInferenceResult, context: InferenceContext +) -> InferenceResult: + res = util.Uninferable + for b in arg.infer(context=context.clone()): + res = b + return res + + class Module(LocalsDictNodeNG): """Class representing an :class:`ast.Module` node. @@ -354,7 +359,9 @@ def getattr( if name in self.special_attributes and not ignore_locals and not name_in_locals: result = [self.special_attributes.lookup(name)] if name == "__name__": - result.append(const_factory("__main__")) + main_const = node_classes.const_factory("__main__") + main_const.parent = AstroidManager().builtins_module + result.append(main_const) elif not ignore_locals and name_in_locals: result = self.locals[name] elif self.package: @@ -399,17 +406,11 @@ def fully_defined(self) -> bool: """ return self.file is not None and self.file.endswith(".py") - def statement(self, *, future: Literal[None, True] = None) -> NoReturn: + def statement(self) -> NoReturn: """The first parent node, including self, marked as statement node. When called on a :class:`Module` this raises a StatementMissing. """ - if future is not None: - warnings.warn( - "The future arg will be removed in astroid 4.0.", - DeprecationWarning, - stacklevel=2, - ) raise StatementMissing(target=self) def previous_sibling(self): @@ -590,7 +591,7 @@ def bool_value(self, context: InferenceContext | None = None) -> bool: def get_children(self): yield from self.body - def frame(self: _T, *, future: Literal[None, True] = None) -> _T: + def frame(self, *, future: Literal[None, True] = None) -> Self: """The node's frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, @@ -606,6 +607,14 @@ def _infer( yield self +class __SyntheticRoot(Module): + def __init__(self): + super().__init__("__astroid_synthetic", pure_python=False) + + +SYNTHETIC_ROOT = __SyntheticRoot() + + class GeneratorExp(ComprehensionScope): """Class representing an :class:`ast.GeneratorExp` node. @@ -1025,7 +1034,7 @@ def get_children(self): yield self.args yield self.body - def frame(self: _T, *, future: Literal[None, True] = None) -> _T: + def frame(self, *, future: Literal[None, True] = None) -> Self: """The node's frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, @@ -1164,9 +1173,6 @@ def __init__( end_col_offset=end_col_offset, parent=parent, ) - if parent and not isinstance(parent, Unknown): - frame = parent.frame() - frame.set_local(name, self) def postinit( self, @@ -1221,7 +1227,7 @@ def extra_decorators(self) -> list[node_classes.Call]: The property will return all the callables that are used for decoration. """ - if not self.parent or not isinstance(frame := self.parent.frame(), ClassDef): + if not (self.parent and isinstance(frame := self.parent.frame(), ClassDef)): return [] decorators: list[node_classes.Call] = [] @@ -1399,6 +1405,8 @@ def blockstart_tolineno(self): :type: int """ + if self.returns: + return self.returns.tolineno return self.args.tolineno def implicit_parameters(self) -> Literal[0, 1]: @@ -1515,37 +1523,19 @@ def _infer( ) -> Generator[objects.Property | FunctionDef, None, InferenceErrorInfo]: from astroid import objects # pylint: disable=import-outside-toplevel - if not self.decorators or not bases._is_property(self): + if not (self.decorators and bases._is_property(self)): yield self return InferenceErrorInfo(node=self, context=context) - # When inferring a property, we instantiate a new `objects.Property` object, - # which in turn, because it inherits from `FunctionDef`, sets itself in the locals - # of the wrapping frame. This means that every time we infer a property, the locals - # are mutated with a new instance of the property. To avoid this, we detect this - # scenario and avoid passing the `parent` argument to the constructor. if not self.parent: raise ParentMissingError(target=self) - parent_frame = self.parent.frame() - property_already_in_parent_locals = self.name in parent_frame.locals and any( - isinstance(val, objects.Property) for val in parent_frame.locals[self.name] - ) - # We also don't want to pass parent if the definition is within a Try node - if isinstance( - self.parent, - (node_classes.Try, node_classes.If), - ): - property_already_in_parent_locals = True - prop_func = objects.Property( function=self, name=self.name, lineno=self.lineno, - parent=self.parent if not property_already_in_parent_locals else None, + parent=self.parent, col_offset=self.col_offset, ) - if property_already_in_parent_locals: - prop_func.parent = self.parent prop_func.postinit(body=[], args=self.args, doc_node=self.doc_node) yield prop_func return InferenceErrorInfo(node=self, context=context) @@ -1558,10 +1548,7 @@ def infer_yield_result(self, context: InferenceContext | None = None): """ for yield_ in self.nodes_of_class(node_classes.Yield): if yield_.value is None: - const = node_classes.Const(None) - const.parent = yield_ - const.lineno = yield_.lineno - yield const + yield node_classes.Const(None, parent=yield_, lineno=yield_.lineno) elif yield_.scope() == self: yield from yield_.value.infer(context=context) @@ -1571,6 +1558,8 @@ def infer_call_result( context: InferenceContext | None = None, ) -> Iterator[InferenceResult]: """Infer what the function returns when called.""" + if context is None: + context = InferenceContext() if self.is_generator(): if isinstance(self, AsyncFunctionDef): generator_cls: type[bases.Generator] = bases.AsyncGenerator @@ -1592,7 +1581,7 @@ def infer_call_result( and len(self.args.args) == 1 and self.args.vararg is not None ): - if isinstance(caller.args, Arguments): + if isinstance(caller.args, node_classes.Arguments): assert caller.args.args is not None metaclass = next(caller.args.args[0].infer(context), None) elif isinstance(caller.args, list): @@ -1602,27 +1591,14 @@ def infer_call_result( f"caller.args was neither Arguments nor list; got {type(caller.args)}" ) if isinstance(metaclass, ClassDef): - try: - class_bases = [ - # Find the first non-None inferred base value - next( - b - for b in arg.infer( - context=context.clone() if context else context - ) - if not (isinstance(b, Const) and b.value is None) - ) - for arg in caller.args[1:] - ] - except StopIteration as e: - raise InferenceError(node=caller.args[1:], context=context) from e + class_bases = [_infer_last(x, context) for x in caller.args[1:]] new_class = ClassDef( name="temporary_class", lineno=0, col_offset=0, end_lineno=0, end_col_offset=0, - parent=self, + parent=SYNTHETIC_ROOT, ) new_class.hide = True new_class.postinit( @@ -1705,7 +1681,7 @@ def scope_lookup( frame = self return frame._scope_lookup(node, name, offset) - def frame(self: _T, *, future: Literal[None, True] = None) -> _T: + def frame(self, *, future: Literal[None, True] = None) -> Self: """The node's frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, @@ -1828,7 +1804,7 @@ def get_wrapping_class(node): return klass -class ClassDef( # pylint: disable=too-many-instance-attributes +class ClassDef( _base_nodes.FilterStmtsBaseNode, LocalsDictNodeNG, _base_nodes.Statement ): """Class representing an :class:`ast.ClassDef` node. @@ -1880,8 +1856,7 @@ def my_meth(self, arg): ), ) _other_fields = ("name", "is_dataclass", "position") - _other_other_fields = ("locals", "_newstyle") - _newstyle: bool | None = None + _other_other_fields = "locals" def __init__( self, @@ -1933,9 +1908,6 @@ def __init__( end_col_offset=end_col_offset, parent=parent, ) - if parent and not isinstance(parent, Unknown): - parent.frame().set_local(name, self) - for local_name, node in self.implicit_locals(): self.add_local_node(node, local_name) @@ -1981,36 +1953,11 @@ def postinit( self.bases = bases self.body = body self.decorators = decorators - self._newstyle = newstyle self._metaclass = metaclass self.position = position self.doc_node = doc_node self.type_params = type_params or [] - def _newstyle_impl(self, context: InferenceContext | None = None): - if context is None: - context = InferenceContext() - if self._newstyle is not None: - return self._newstyle - for base in self.ancestors(recurs=False, context=context): - if base._newstyle_impl(context): - self._newstyle = True - break - klass = self.declared_metaclass() - # could be any callable, we'd need to infer the result of klass(name, - # bases, dict). punt if it's not a class node. - if klass is not None and isinstance(klass, ClassDef): - self._newstyle = klass._newstyle_impl(context) - if self._newstyle is None: - self._newstyle = False - return self._newstyle - - _newstyle = None - newstyle = property( - _newstyle_impl, - doc=("Whether this is a new style class or not\n\n" ":type: bool or None"), - ) - @cached_property def blockstart_tolineno(self): """The line on which the beginning of this block ends. @@ -2031,14 +1978,12 @@ def block_range(self, lineno: int) -> tuple[int, int]: """ return self.fromlineno, self.tolineno - def pytype(self) -> Literal["builtins.type", "builtins.classobj"]: + def pytype(self) -> Literal["builtins.type"]: """Get the name of the type that this node represents. :returns: The name of the type. """ - if self.newstyle: - return "builtins.type" - return "builtins.classobj" + return "builtins.type" def display_type(self) -> str: """A human readable type of this node. @@ -2087,7 +2032,7 @@ def _infer_type_call(self, caller, context): col_offset=0, end_lineno=0, end_col_offset=0, - parent=Unknown(), + parent=caller.parent, ) # Get the bases of the class. @@ -2121,7 +2066,6 @@ def _infer_type_call(self, caller, context): if isinstance(attr, node_classes.Const) and isinstance(attr.value, str): result.locals[attr.value] = [value] - result.parent = caller.parent return result def infer_call_result( @@ -2409,26 +2353,24 @@ def getattr( if name in self.special_attributes and class_context and not values: result = [self.special_attributes.lookup(name)] - if name == "__bases__": - # Need special treatment, since they are mutable - # and we need to return all the values. - result += values return result if class_context: values += self._metaclass_lookup_attribute(name, context) - # Remove AnnAssigns without value, which are not attributes in the purest sense. - for value in values.copy(): + result: list[InferenceResult] = [] + for value in values: if isinstance(value, node_classes.AssignName): stmt = value.statement() + # Ignore AnnAssigns without value, which are not attributes in the purest sense. if isinstance(stmt, node_classes.AnnAssign) and stmt.value is None: - values.pop(values.index(value)) + continue + result.append(value) - if not values: + if not result: raise AttributeInferenceError(target=self, attribute=name, context=context) - return values + return result @lru_cache(maxsize=1024) # noqa def _metaclass_lookup_attribute(self, name, context): @@ -2440,7 +2382,7 @@ def _metaclass_lookup_attribute(self, name, context): for cls in (implicit_meta, metaclass): if cls and cls != self and isinstance(cls, ClassDef): cls_attributes = self._get_attribute_from_metaclass(cls, name, context) - attrs.update(set(cls_attributes)) + attrs.update(cls_attributes) return attrs def _get_attribute_from_metaclass(self, cls, name, context): @@ -2588,7 +2530,6 @@ def _valid_getattr(node): try: return _valid_getattr(self.getattr("__getattr__", context)[0]) except AttributeInferenceError: - # if self.newstyle: XXX cause an infinite recursion error try: getattribute = self.getattr("__getattribute__", context)[0] return _valid_getattr(getattribute) @@ -2675,16 +2616,12 @@ def mymethods(self): def implicit_metaclass(self): """Get the implicit metaclass of the current class. - For newstyle classes, this will return an instance of builtins.type. - For oldstyle classes, it will simply return None, since there's - no implicit metaclass there. + This will return an instance of builtins.type. :returns: The metaclass. - :rtype: builtins.type or None + :rtype: builtins.type """ - if self.newstyle: - return builtin_lookup("type")[1][0] - return None + return builtin_lookup("type")[1][0] def declared_metaclass( self, context: InferenceContext | None = None @@ -2752,7 +2689,7 @@ def metaclass( """ return self._find_metaclass(context=context) - def has_metaclass_hack(self): + def has_metaclass_hack(self) -> bool: return self._metaclass_hack def _islots(self): @@ -2794,9 +2731,10 @@ def _islots(self): for elt in values: try: for inferred in elt.infer(): - if not isinstance( - inferred, node_classes.Const - ) or not isinstance(inferred.value, str): + if not ( + isinstance(inferred, node_classes.Const) + and isinstance(inferred.value, str) + ): continue if not inferred.value: continue @@ -2807,10 +2745,6 @@ def _islots(self): return None def _slots(self): - if not self.newstyle: - raise NotImplementedError( - "The concept of slots is undefined for old-style classes." - ) slots = self._islots() try: @@ -2850,11 +2784,6 @@ def grouped_slots( else: yield None - if not self.newstyle: - raise NotImplementedError( - "The concept of slots is undefined for old-style classes." - ) - try: mro = self.mro() except MroError as e: @@ -2893,13 +2822,8 @@ def _inferred_bases(self, context: InferenceContext | None = None): for stmt in self.bases: try: - # Find the first non-None inferred base value - baseobj = next( - b - for b in stmt.infer(context=context.clone()) - if not (isinstance(b, Const) and b.value is None) - ) - except (InferenceError, StopIteration): + baseobj = _infer_last(stmt, context) + except InferenceError: continue if isinstance(baseobj, bases.Instance): baseobj = baseobj._proxied @@ -2920,17 +2844,8 @@ def _compute_mro(self, context: InferenceContext | None = None): if base is self: continue - try: - mro = base._compute_mro(context=context) - bases_mro.append(mro) - except NotImplementedError: - # Some classes have in their ancestors both newstyle and - # old style classes. For these we can't retrieve the .mro, - # although in Python it's possible, since the class we are - # currently working is in fact new style. - # So, we fallback to ancestors here. - ancestors = list(base.ancestors(context=context)) - bases_mro.append(ancestors) + mro = base._compute_mro(context=context) + bases_mro.append(mro) unmerged_mro: list[list[ClassDef]] = [[self], *bases_mro, inferred_bases] unmerged_mro = clean_duplicates_mro(unmerged_mro, self, context) @@ -2973,7 +2888,7 @@ def _assign_nodes_in_scope(self): ) return list(itertools.chain.from_iterable(children_assign_nodes)) - def frame(self: _T, *, future: Literal[None, True] = None) -> _T: + def frame(self, *, future: Literal[None, True] = None) -> Self: """The node's frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, diff --git a/astroid/objects.py b/astroid/objects.py index 2d12f59805..2c96815bf2 100644 --- a/astroid/objects.py +++ b/astroid/objects.py @@ -13,9 +13,10 @@ from __future__ import annotations +import sys from collections.abc import Generator, Iterator from functools import cached_property -from typing import Any, Literal, NoReturn, TypeVar +from typing import Any, Literal, NoReturn from astroid import bases, util from astroid.context import InferenceContext @@ -30,7 +31,10 @@ from astroid.nodes import node_classes, scoped_nodes from astroid.typing import InferenceResult, SuccessfulInferenceResult -_T = TypeVar("_T") +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self class FrozenSet(node_classes.BaseContainer): @@ -109,9 +113,6 @@ def super_mro(self): super_=self, ) - if not mro_type.newstyle: - raise SuperError("Unable to call super on old-style classes.", super_=self) - mro = mro_type.mro() if self.mro_pointer not in mro: raise SuperError( @@ -277,18 +278,15 @@ class PartialFunction(scoped_nodes.FunctionDef): """A class representing partial function obtained via functools.partial.""" def __init__(self, call, name=None, lineno=None, col_offset=None, parent=None): - # TODO: Pass end_lineno, end_col_offset and parent as well + # TODO: Pass end_lineno, end_col_offset as well super().__init__( name, lineno=lineno, col_offset=col_offset, - parent=node_classes.Unknown(), end_col_offset=0, end_lineno=0, + parent=parent, ) - # A typical FunctionDef automatically adds its name to the parent scope, - # but a partial should not, so defer setting parent until after init - self.parent = parent self.filled_args = call.positional_arguments[1:] self.filled_keywords = call.keyword_arguments @@ -361,6 +359,6 @@ def infer_call_result( raise InferenceError("Properties are not callable") def _infer( - self: _T, context: InferenceContext | None = None, **kwargs: Any - ) -> Generator[_T]: + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[Self]: yield self diff --git a/astroid/protocols.py b/astroid/protocols.py index bacb786a99..50e5cfa5ef 100644 --- a/astroid/protocols.py +++ b/astroid/protocols.py @@ -15,6 +15,7 @@ from typing import TYPE_CHECKING, Any, TypeVar from astroid import bases, decorators, nodes, util +from astroid.builder import extract_node from astroid.const import Context from astroid.context import InferenceContext, copy_context from astroid.exceptions import ( @@ -142,7 +143,10 @@ def _multiply_seq_by_int( context: InferenceContext, ) -> _TupleListNodeT: node = self.__class__(parent=opnode) - if value > 1e8: + if not (value > 0 and self.elts): + node.elts = [] + return node + if len(self.elts) * value > 1e8: node.elts = [util.Uninferable] return node filtered_elts = ( @@ -159,13 +163,13 @@ def _filter_uninferable_nodes( ) -> Iterator[SuccessfulInferenceResult]: for elt in elts: if isinstance(elt, util.UninferableBase): - yield nodes.Unknown() + yield node_classes.UNATTACHED_UNKNOWN else: for inferred in elt.infer(context): if not isinstance(inferred, util.UninferableBase): yield inferred else: - yield nodes.Unknown() + yield node_classes.UNATTACHED_UNKNOWN @decorators.yes_if_nothing_inferred @@ -524,11 +528,34 @@ def excepthandler_assigned_stmts( ) -> Any: from astroid import objects # pylint: disable=import-outside-toplevel - for assigned in node_classes.unpack_infer(self.type): - if isinstance(assigned, nodes.ClassDef): - assigned = objects.ExceptionInstance(assigned) + def _generate_assigned(): + for assigned in node_classes.unpack_infer(self.type): + if isinstance(assigned, nodes.ClassDef): + assigned = objects.ExceptionInstance(assigned) + + yield assigned + if isinstance(self.parent, node_classes.TryStar): + # except * handler has assigned ExceptionGroup with caught + # exceptions under exceptions attribute + # pylint: disable-next=stop-iteration-return + eg = next( + node_classes.unpack_infer( + extract_node( + """ +from builtins import ExceptionGroup +ExceptionGroup +""" + ) + ) + ) + assigned = objects.ExceptionInstance(eg) + assigned.instance_attrs["exceptions"] = [ + nodes.List.from_elements(_generate_assigned()) + ] yield assigned + else: + yield from _generate_assigned() return { "node": self, "unknown": node, @@ -691,7 +718,8 @@ def starred_assigned_stmts( # noqa: C901 the inference results. """ - # pylint: disable=too-many-locals,too-many-statements + # pylint: disable = too-many-locals, too-many-statements, too-many-branches + def _determine_starred_iteration_lookups( starred: nodes.Starred, target: nodes.Tuple, lookups: list[tuple[int, int]] ) -> None: @@ -805,7 +833,7 @@ def _determine_starred_iteration_lookups( if not isinstance(target, nodes.Tuple): raise InferenceError( - "Could not make sense of this, the target must be a tuple", + f"Could not make sense of this, the target must be a tuple, not {type(target)!r}", context=context, ) diff --git a/astroid/raw_building.py b/astroid/raw_building.py index b7aafb00e5..d1bbbd5569 100644 --- a/astroid/raw_building.py +++ b/astroid/raw_building.py @@ -18,27 +18,27 @@ import warnings from collections.abc import Iterable from contextlib import redirect_stderr, redirect_stdout -from typing import Any, Union +from typing import TYPE_CHECKING, Any from astroid import bases, nodes from astroid.const import _EMPTY_OBJECT_MARKER, IS_PYPY -from astroid.manager import AstroidManager from astroid.nodes import node_classes +if TYPE_CHECKING: + from astroid.manager import AstroidManager + logger = logging.getLogger(__name__) -_FunctionTypes = Union[ - types.FunctionType, - types.MethodType, - types.BuiltinFunctionType, - types.WrapperDescriptorType, - types.MethodDescriptorType, - types.ClassMethodDescriptorType, -] +_FunctionTypes = ( + types.FunctionType + | types.MethodType + | types.BuiltinFunctionType + | types.WrapperDescriptorType + | types.MethodDescriptorType + | types.ClassMethodDescriptorType +) -# the keys of CONST_CLS eg python builtin types -_CONSTANTS = tuple(node_classes.CONST_CLS) TYPE_NONE = type(None) TYPE_NOTIMPLEMENTED = type(NotImplemented) TYPE_ELLIPSIS = type(...) @@ -49,24 +49,29 @@ def _attach_local_node(parent, node, name: str) -> None: parent.add_local_node(node) -def _add_dunder_class(func, member) -> None: +def _add_dunder_class(func, parent: nodes.NodeNG, member) -> None: """Add a __class__ member to the given func node, if we can determine it.""" python_cls = member.__class__ cls_name = getattr(python_cls, "__name__", None) if not cls_name: return cls_bases = [ancestor.__name__ for ancestor in python_cls.__bases__] - ast_klass = build_class(cls_name, cls_bases, python_cls.__doc__) + doc = python_cls.__doc__ if isinstance(python_cls.__doc__, str) else None + ast_klass = build_class(cls_name, parent, cls_bases, doc) func.instance_attrs["__class__"] = [ast_klass] +def build_dummy(runtime_object) -> nodes.EmptyNode: + enode = nodes.EmptyNode() + enode.object = runtime_object + return enode + + def attach_dummy_node(node, name: str, runtime_object=_EMPTY_OBJECT_MARKER) -> None: """create a dummy node and register it in the locals of the given node with the specified name """ - enode = nodes.EmptyNode() - enode.object = runtime_object - _attach_local_node(node, enode, name) + _attach_local_node(node, build_dummy(runtime_object), name) def attach_const_node(node, name: str, value) -> None: @@ -96,7 +101,10 @@ def build_module(name: str, doc: str | None = None) -> nodes.Module: def build_class( - name: str, basenames: Iterable[str] = (), doc: str | None = None + name: str, + parent: nodes.NodeNG, + basenames: Iterable[str] = (), + doc: str | None = None, ) -> nodes.ClassDef: """Create and initialize an astroid ClassDef node.""" node = nodes.ClassDef( @@ -105,7 +113,7 @@ def build_class( col_offset=0, end_lineno=0, end_col_offset=0, - parent=nodes.Unknown(), + parent=parent, ) node.postinit( bases=[ @@ -128,6 +136,7 @@ def build_class( def build_function( name: str, + parent: nodes.NodeNG, args: list[str] | None = None, posonlyargs: list[str] | None = None, defaults: list[Any] | None = None, @@ -141,7 +150,7 @@ def build_function( name, lineno=0, col_offset=0, - parent=node_classes.Unknown(), + parent=parent, end_col_offset=0, end_lineno=0, ) @@ -258,11 +267,11 @@ def register_arguments(func: nodes.FunctionDef, args: list | None = None) -> Non def object_build_class( - node: nodes.Module | nodes.ClassDef, member: type, localname: str + node: nodes.Module | nodes.ClassDef, member: type ) -> nodes.ClassDef: """create astroid for a living class object""" basenames = [base.__name__ for base in member.__bases__] - return _base_class_object_build(node, member, basenames, localname=localname) + return _base_class_object_build(node, member, basenames) def _get_args_info_from_callable( @@ -300,8 +309,8 @@ def _get_args_info_from_callable( def object_build_function( - node: nodes.Module | nodes.ClassDef, member: _FunctionTypes, localname: str -) -> None: + node: nodes.Module | nodes.ClassDef, member: _FunctionTypes +) -> nodes.FunctionDef: """create astroid for a living function object""" ( args, @@ -311,59 +320,49 @@ def object_build_function( kwonly_defaults, ) = _get_args_info_from_callable(member) - func = build_function( - getattr(member, "__name__", None) or localname, + return build_function( + getattr(member, "__name__", ""), + node, args, posonlyargs, defaults, - member.__doc__, + member.__doc__ if isinstance(member.__doc__, str) else None, kwonlyargs=kwonlyargs, kwonlydefaults=kwonly_defaults, ) - node.add_local_node(func, localname) - def object_build_datadescriptor( - node: nodes.Module | nodes.ClassDef, member: type, name: str + node: nodes.Module | nodes.ClassDef, member: type ) -> nodes.ClassDef: """create astroid for a living data descriptor object""" - return _base_class_object_build(node, member, [], name) + return _base_class_object_build(node, member, []) def object_build_methoddescriptor( node: nodes.Module | nodes.ClassDef, member: _FunctionTypes, - localname: str, -) -> None: +) -> nodes.FunctionDef: """create astroid for a living method descriptor object""" # FIXME get arguments ? - func = build_function( - getattr(member, "__name__", None) or localname, doc=member.__doc__ - ) - node.add_local_node(func, localname) - _add_dunder_class(func, member) + name = getattr(member, "__name__", "") + func = build_function(name, node, doc=member.__doc__) + _add_dunder_class(func, node, member) + return func def _base_class_object_build( node: nodes.Module | nodes.ClassDef, member: type, basenames: list[str], - name: str | None = None, - localname: str | None = None, ) -> nodes.ClassDef: """create astroid for a living class object, with a given set of base names (e.g. ancestors) """ - class_name = name or getattr(member, "__name__", None) or localname - assert isinstance(class_name, str) - klass = build_class( - class_name, - basenames, - member.__doc__, - ) + name = getattr(member, "__name__", "") + doc = member.__doc__ if isinstance(member.__doc__, str) else None + klass = build_class(name, node, basenames, doc) klass._newstyle = isinstance(member, type) - node.add_local_node(klass, localname) try: # limit the instantiation trick since it's too dangerous # (such as infinite test execution...) @@ -391,10 +390,9 @@ def _base_class_object_build( def _build_from_function( node: nodes.Module | nodes.ClassDef, - name: str, member: _FunctionTypes, module: types.ModuleType, -) -> None: +) -> nodes.FunctionDef | nodes.EmptyNode: # verify this is not an imported function try: code = member.__code__ # type: ignore[union-attr] @@ -404,12 +402,10 @@ def _build_from_function( code = None filename = getattr(code, "co_filename", None) if filename is None: - assert isinstance(member, object) - object_build_methoddescriptor(node, member, name) - elif filename != getattr(module, "__file__", None): - attach_dummy_node(node, name, member) - else: - object_build_function(node, member, name) + return object_build_methoddescriptor(node, member) + if filename == getattr(module, "__file__", None): + return object_build_function(node, member) + return build_dummy(member) def _safe_has_attribute(obj, member: str) -> bool: @@ -432,8 +428,8 @@ class InspectBuilder: bootstrapped: bool = False - def __init__(self, manager_instance: AstroidManager | None = None) -> None: - self._manager = manager_instance or AstroidManager() + def __init__(self, manager_instance: AstroidManager) -> None: + self._manager = manager_instance self._done: dict[types.ModuleType | type, nodes.Module | nodes.ClassDef] = {} self._module: types.ModuleType @@ -476,58 +472,57 @@ def object_build( if obj in self._done: return None self._done[obj] = node - for name in dir(obj): + for alias in dir(obj): # inspect.ismethod() and inspect.isbuiltin() in PyPy return # the opposite of what they do in CPython for __class_getitem__. - pypy__class_getitem__ = IS_PYPY and name == "__class_getitem__" + pypy__class_getitem__ = IS_PYPY and alias == "__class_getitem__" try: with warnings.catch_warnings(): warnings.simplefilter("ignore") - member = getattr(obj, name) + member = getattr(obj, alias) except AttributeError: # damned ExtensionClass.Base, I know you're there ! - attach_dummy_node(node, name) + attach_dummy_node(node, alias) continue if inspect.ismethod(member) and not pypy__class_getitem__: member = member.__func__ if inspect.isfunction(member): - _build_from_function(node, name, member, self._module) + child = _build_from_function(node, member, self._module) elif inspect.isbuiltin(member) or pypy__class_getitem__: - if self.imported_member(node, member, name): + if self.imported_member(node, member, alias): continue - object_build_methoddescriptor(node, member, name) + child = object_build_methoddescriptor(node, member) elif inspect.isclass(member): - if self.imported_member(node, member, name): + if self.imported_member(node, member, alias): continue if member in self._done: - class_node = self._done[member] - assert isinstance(class_node, nodes.ClassDef) - if class_node not in node.locals.get(name, ()): - node.add_local_node(class_node, name) + child = self._done[member] + assert isinstance(child, nodes.ClassDef) else: - class_node = object_build_class(node, member, name) + child = object_build_class(node, member) # recursion - self.object_build(class_node, member) - if name == "__class__" and class_node.parent is None: - class_node.parent = self._done[self._module] + self.object_build(child, member) elif inspect.ismethoddescriptor(member): - object_build_methoddescriptor(node, member, name) + child: nodes.NodeNG = object_build_methoddescriptor(node, member) elif inspect.isdatadescriptor(member): - object_build_datadescriptor(node, member, name) - elif isinstance(member, _CONSTANTS): - attach_const_node(node, name, member) + child = object_build_datadescriptor(node, member) + elif isinstance(member, tuple(node_classes.CONST_CLS)): + if alias in node.special_attributes: + continue + child = nodes.const_factory(member) elif inspect.isroutine(member): # This should be called for Jython, where some builtin # methods aren't caught by isbuiltin branch. - _build_from_function(node, name, member, self._module) + child = _build_from_function(node, member, self._module) elif _safe_has_attribute(member, "__all__"): - module = build_module(name) - _attach_local_node(node, module, name) + child: nodes.NodeNG = build_module(alias) # recursion - self.object_build(module, member) + self.object_build(child, member) else: # create an empty node so that the name is actually defined - attach_dummy_node(node, name, member) + child: nodes.NodeNG = build_dummy(member) + if child not in node.locals.get(alias, ()): + node.add_local_node(child, alias) return None def imported_member(self, node, member, name: str) -> bool: @@ -604,19 +599,19 @@ def _astroid_bootstrapping() -> None: """astroid bootstrapping the builtins module""" # this boot strapping is necessary since we need the Const nodes to # inspect_build builtins, and then we can proxy Const - builder = InspectBuilder() + # pylint: disable-next=import-outside-toplevel + from astroid.manager import AstroidManager + + builder = InspectBuilder(AstroidManager()) astroid_builtin = builder.inspect_build(builtins) for cls, node_cls in node_classes.CONST_CLS.items(): if cls is TYPE_NONE: - proxy = build_class("NoneType") - proxy.parent = astroid_builtin + proxy = build_class("NoneType", astroid_builtin) elif cls is TYPE_NOTIMPLEMENTED: - proxy = build_class("NotImplementedType") - proxy.parent = astroid_builtin + proxy = build_class("NotImplementedType", astroid_builtin) elif cls is TYPE_ELLIPSIS: - proxy = build_class("Ellipsis") - proxy.parent = astroid_builtin + proxy = build_class("Ellipsis", astroid_builtin) else: proxy = astroid_builtin.getattr(cls.__name__)[0] assert isinstance(proxy, nodes.ClassDef) @@ -634,9 +629,9 @@ def _astroid_bootstrapping() -> None: col_offset=0, end_lineno=0, end_col_offset=0, - parent=nodes.Unknown(), + parent=astroid_builtin, ) - _GeneratorType.parent = astroid_builtin + astroid_builtin.set_local(_GeneratorType.name, _GeneratorType) generator_doc_node = ( nodes.Const(value=types.GeneratorType.__doc__) if types.GeneratorType.__doc__ @@ -658,9 +653,9 @@ def _astroid_bootstrapping() -> None: col_offset=0, end_lineno=0, end_col_offset=0, - parent=nodes.Unknown(), + parent=astroid_builtin, ) - _AsyncGeneratorType.parent = astroid_builtin + astroid_builtin.set_local(_AsyncGeneratorType.name, _AsyncGeneratorType) async_generator_doc_node = ( nodes.Const(value=types.AsyncGeneratorType.__doc__) if types.AsyncGeneratorType.__doc__ @@ -682,9 +677,8 @@ def _astroid_bootstrapping() -> None: col_offset=0, end_lineno=0, end_col_offset=0, - parent=nodes.Unknown(), + parent=astroid_builtin, ) - _UnionTypeType.parent = astroid_builtin union_type_doc_node = ( nodes.Const(value=types.UnionType.__doc__) if types.UnionType.__doc__ @@ -719,14 +713,14 @@ def _astroid_bootstrapping() -> None: col_offset=0, end_lineno=0, end_col_offset=0, - parent=nodes.Unknown(), + parent=astroid_builtin, ) - klass.parent = astroid_builtin + doc = _type.__doc__ if isinstance(_type.__doc__, str) else None klass.postinit( bases=[], body=[], decorators=None, - doc_node=nodes.Const(value=_type.__doc__) if _type.__doc__ else None, + doc_node=nodes.Const(doc) if doc else None, ) builder.object_build(klass, _type) astroid_builtin[_type.__name__] = klass diff --git a/astroid/rebuilder.py b/astroid/rebuilder.py index b783885019..97f3a390e1 100644 --- a/astroid/rebuilder.py +++ b/astroid/rebuilder.py @@ -11,20 +11,32 @@ import ast import sys import token -from collections.abc import Callable, Generator +from collections.abc import Callable, Collection, Generator from io import StringIO from tokenize import TokenInfo, generate_tokens -from typing import TYPE_CHECKING, Final, TypeVar, Union, cast, overload +from typing import TYPE_CHECKING, Final, TypeVar, cast, overload from astroid import nodes from astroid._ast import ParserModule, get_parser_module, parse_function_type_comment -from astroid.const import PY312_PLUS, Context -from astroid.manager import AstroidManager -from astroid.nodes import NodeNG -from astroid.nodes.node_classes import AssignName +from astroid.const import PY312_PLUS, PY313_PLUS, Context from astroid.nodes.utils import Position from astroid.typing import InferenceResult +if TYPE_CHECKING: + from astroid.manager import AstroidManager + + T_Doc = TypeVar( + "T_Doc", + ast.Module, + ast.ClassDef, + ast.FunctionDef | ast.AsyncFunctionDef, + ) + _FunctionT = TypeVar("_FunctionT", nodes.FunctionDef, nodes.AsyncFunctionDef) + _ForT = TypeVar("_ForT", nodes.For, nodes.AsyncFor) + _WithT = TypeVar("_WithT", nodes.With, nodes.AsyncWith) + NodesWithDocsType = nodes.Module | nodes.ClassDef | nodes.FunctionDef + + REDIRECT: Final[dict[str, str]] = { "arguments": "Arguments", "comprehension": "Comprehension", @@ -36,18 +48,6 @@ } -T_Doc = TypeVar( - "T_Doc", - "ast.Module", - "ast.ClassDef", - Union["ast.FunctionDef", "ast.AsyncFunctionDef"], -) -_FunctionT = TypeVar("_FunctionT", nodes.FunctionDef, nodes.AsyncFunctionDef) -_ForT = TypeVar("_ForT", nodes.For, nodes.AsyncFor) -_WithT = TypeVar("_WithT", nodes.With, nodes.AsyncWith) -NodesWithDocsType = Union[nodes.Module, nodes.ClassDef, nodes.FunctionDef] - - # noinspection PyMethodMayBeStatic class TreeRebuilder: """Rebuilds the _ast tree to become an Astroid tree.""" @@ -61,16 +61,18 @@ def __init__( self._manager = manager self._data = data.split("\n") if data else None self._global_names: list[dict[str, list[nodes.Global]]] = [] - self._import_from_nodes: list[nodes.ImportFrom] = [] + self._import_from_nodes: list[tuple[nodes.ImportFrom, Collection[str]]] = [] self._delayed_assattr: list[nodes.AssignAttr] = [] - self._visit_meths: dict[type[ast.AST], Callable[[ast.AST, NodeNG], NodeNG]] = {} + self._visit_meths: dict[ + type[ast.AST], Callable[[ast.AST, nodes.NodeNG], nodes.NodeNG] + ] = {} if parser_module is None: self._parser_module = get_parser_module() else: self._parser_module = parser_module - def _get_doc(self, node: T_Doc) -> tuple[T_Doc, ast.Constant | ast.Str | None]: + def _get_doc(self, node: T_Doc) -> tuple[T_Doc, ast.Constant | None]: """Return the doc ast node.""" try: if node.body and isinstance(node.body[0], ast.Expr): @@ -176,265 +178,307 @@ def visit_module( if TYPE_CHECKING: # noqa: C901 @overload - def visit(self, node: ast.arg, parent: NodeNG) -> nodes.AssignName: ... + def visit(self, node: ast.arg, parent: nodes.NodeNG) -> nodes.AssignName: ... @overload - def visit(self, node: ast.arguments, parent: NodeNG) -> nodes.Arguments: ... + def visit( + self, node: ast.arguments, parent: nodes.NodeNG + ) -> nodes.Arguments: ... @overload - def visit(self, node: ast.Assert, parent: NodeNG) -> nodes.Assert: ... + def visit(self, node: ast.Assert, parent: nodes.NodeNG) -> nodes.Assert: ... @overload def visit( - self, node: ast.AsyncFunctionDef, parent: NodeNG + self, node: ast.AsyncFunctionDef, parent: nodes.NodeNG ) -> nodes.AsyncFunctionDef: ... @overload - def visit(self, node: ast.AsyncFor, parent: NodeNG) -> nodes.AsyncFor: ... + def visit(self, node: ast.AsyncFor, parent: nodes.NodeNG) -> nodes.AsyncFor: ... @overload - def visit(self, node: ast.Await, parent: NodeNG) -> nodes.Await: ... + def visit(self, node: ast.Await, parent: nodes.NodeNG) -> nodes.Await: ... @overload - def visit(self, node: ast.AsyncWith, parent: NodeNG) -> nodes.AsyncWith: ... + def visit( + self, node: ast.AsyncWith, parent: nodes.NodeNG + ) -> nodes.AsyncWith: ... @overload - def visit(self, node: ast.Assign, parent: NodeNG) -> nodes.Assign: ... + def visit(self, node: ast.Assign, parent: nodes.NodeNG) -> nodes.Assign: ... @overload - def visit(self, node: ast.AnnAssign, parent: NodeNG) -> nodes.AnnAssign: ... + def visit( + self, node: ast.AnnAssign, parent: nodes.NodeNG + ) -> nodes.AnnAssign: ... @overload - def visit(self, node: ast.AugAssign, parent: NodeNG) -> nodes.AugAssign: ... + def visit( + self, node: ast.AugAssign, parent: nodes.NodeNG + ) -> nodes.AugAssign: ... @overload - def visit(self, node: ast.BinOp, parent: NodeNG) -> nodes.BinOp: ... + def visit(self, node: ast.BinOp, parent: nodes.NodeNG) -> nodes.BinOp: ... @overload - def visit(self, node: ast.BoolOp, parent: NodeNG) -> nodes.BoolOp: ... + def visit(self, node: ast.BoolOp, parent: nodes.NodeNG) -> nodes.BoolOp: ... @overload - def visit(self, node: ast.Break, parent: NodeNG) -> nodes.Break: ... + def visit(self, node: ast.Break, parent: nodes.NodeNG) -> nodes.Break: ... @overload - def visit(self, node: ast.Call, parent: NodeNG) -> nodes.Call: ... + def visit(self, node: ast.Call, parent: nodes.NodeNG) -> nodes.Call: ... @overload - def visit(self, node: ast.ClassDef, parent: NodeNG) -> nodes.ClassDef: ... + def visit(self, node: ast.ClassDef, parent: nodes.NodeNG) -> nodes.ClassDef: ... @overload - def visit(self, node: ast.Continue, parent: NodeNG) -> nodes.Continue: ... + def visit(self, node: ast.Continue, parent: nodes.NodeNG) -> nodes.Continue: ... @overload - def visit(self, node: ast.Compare, parent: NodeNG) -> nodes.Compare: ... + def visit(self, node: ast.Compare, parent: nodes.NodeNG) -> nodes.Compare: ... @overload def visit( - self, node: ast.comprehension, parent: NodeNG + self, node: ast.comprehension, parent: nodes.NodeNG ) -> nodes.Comprehension: ... @overload - def visit(self, node: ast.Delete, parent: NodeNG) -> nodes.Delete: ... + def visit(self, node: ast.Delete, parent: nodes.NodeNG) -> nodes.Delete: ... @overload - def visit(self, node: ast.Dict, parent: NodeNG) -> nodes.Dict: ... + def visit(self, node: ast.Dict, parent: nodes.NodeNG) -> nodes.Dict: ... @overload - def visit(self, node: ast.DictComp, parent: NodeNG) -> nodes.DictComp: ... + def visit(self, node: ast.DictComp, parent: nodes.NodeNG) -> nodes.DictComp: ... @overload - def visit(self, node: ast.Expr, parent: NodeNG) -> nodes.Expr: ... + def visit(self, node: ast.Expr, parent: nodes.NodeNG) -> nodes.Expr: ... @overload def visit( - self, node: ast.ExceptHandler, parent: NodeNG + self, node: ast.ExceptHandler, parent: nodes.NodeNG ) -> nodes.ExceptHandler: ... @overload - def visit(self, node: ast.For, parent: NodeNG) -> nodes.For: ... + def visit(self, node: ast.For, parent: nodes.NodeNG) -> nodes.For: ... @overload - def visit(self, node: ast.ImportFrom, parent: NodeNG) -> nodes.ImportFrom: ... + def visit( + self, node: ast.ImportFrom, parent: nodes.NodeNG + ) -> nodes.ImportFrom: ... @overload - def visit(self, node: ast.FunctionDef, parent: NodeNG) -> nodes.FunctionDef: ... + def visit( + self, node: ast.FunctionDef, parent: nodes.NodeNG + ) -> nodes.FunctionDef: ... @overload def visit( - self, node: ast.GeneratorExp, parent: NodeNG + self, node: ast.GeneratorExp, parent: nodes.NodeNG ) -> nodes.GeneratorExp: ... @overload - def visit(self, node: ast.Attribute, parent: NodeNG) -> nodes.Attribute: ... + def visit( + self, node: ast.Attribute, parent: nodes.NodeNG + ) -> nodes.Attribute: ... @overload - def visit(self, node: ast.Global, parent: NodeNG) -> nodes.Global: ... + def visit(self, node: ast.Global, parent: nodes.NodeNG) -> nodes.Global: ... @overload - def visit(self, node: ast.If, parent: NodeNG) -> nodes.If: ... + def visit(self, node: ast.If, parent: nodes.NodeNG) -> nodes.If: ... @overload - def visit(self, node: ast.IfExp, parent: NodeNG) -> nodes.IfExp: ... + def visit(self, node: ast.IfExp, parent: nodes.NodeNG) -> nodes.IfExp: ... @overload - def visit(self, node: ast.Import, parent: NodeNG) -> nodes.Import: ... + def visit(self, node: ast.Import, parent: nodes.NodeNG) -> nodes.Import: ... @overload - def visit(self, node: ast.JoinedStr, parent: NodeNG) -> nodes.JoinedStr: ... + def visit( + self, node: ast.JoinedStr, parent: nodes.NodeNG + ) -> nodes.JoinedStr: ... @overload def visit( - self, node: ast.FormattedValue, parent: NodeNG + self, node: ast.FormattedValue, parent: nodes.NodeNG ) -> nodes.FormattedValue: ... @overload - def visit(self, node: ast.NamedExpr, parent: NodeNG) -> nodes.NamedExpr: ... + def visit( + self, node: ast.NamedExpr, parent: nodes.NodeNG + ) -> nodes.NamedExpr: ... @overload - def visit(self, node: ast.keyword, parent: NodeNG) -> nodes.Keyword: ... + def visit(self, node: ast.keyword, parent: nodes.NodeNG) -> nodes.Keyword: ... @overload - def visit(self, node: ast.Lambda, parent: NodeNG) -> nodes.Lambda: ... + def visit(self, node: ast.Lambda, parent: nodes.NodeNG) -> nodes.Lambda: ... @overload - def visit(self, node: ast.List, parent: NodeNG) -> nodes.List: ... + def visit(self, node: ast.List, parent: nodes.NodeNG) -> nodes.List: ... @overload - def visit(self, node: ast.ListComp, parent: NodeNG) -> nodes.ListComp: ... + def visit(self, node: ast.ListComp, parent: nodes.NodeNG) -> nodes.ListComp: ... @overload def visit( - self, node: ast.Name, parent: NodeNG + self, node: ast.Name, parent: nodes.NodeNG ) -> nodes.Name | nodes.Const | nodes.AssignName | nodes.DelName: ... @overload - def visit(self, node: ast.Nonlocal, parent: NodeNG) -> nodes.Nonlocal: ... + def visit(self, node: ast.Nonlocal, parent: nodes.NodeNG) -> nodes.Nonlocal: ... @overload - def visit(self, node: ast.Constant, parent: NodeNG) -> nodes.Const: ... + def visit(self, node: ast.Constant, parent: nodes.NodeNG) -> nodes.Const: ... if sys.version_info >= (3, 12): @overload - def visit(self, node: ast.ParamSpec, parent: NodeNG) -> nodes.ParamSpec: ... + def visit( + self, node: ast.ParamSpec, parent: nodes.NodeNG + ) -> nodes.ParamSpec: ... @overload - def visit(self, node: ast.Pass, parent: NodeNG) -> nodes.Pass: ... + def visit(self, node: ast.Pass, parent: nodes.NodeNG) -> nodes.Pass: ... @overload - def visit(self, node: ast.Raise, parent: NodeNG) -> nodes.Raise: ... + def visit(self, node: ast.Raise, parent: nodes.NodeNG) -> nodes.Raise: ... @overload - def visit(self, node: ast.Return, parent: NodeNG) -> nodes.Return: ... + def visit(self, node: ast.Return, parent: nodes.NodeNG) -> nodes.Return: ... @overload - def visit(self, node: ast.Set, parent: NodeNG) -> nodes.Set: ... + def visit(self, node: ast.Set, parent: nodes.NodeNG) -> nodes.Set: ... @overload - def visit(self, node: ast.SetComp, parent: NodeNG) -> nodes.SetComp: ... + def visit(self, node: ast.SetComp, parent: nodes.NodeNG) -> nodes.SetComp: ... @overload def visit(self, node: ast.Slice, parent: nodes.Subscript) -> nodes.Slice: ... @overload - def visit(self, node: ast.Subscript, parent: NodeNG) -> nodes.Subscript: ... + def visit( + self, node: ast.Subscript, parent: nodes.NodeNG + ) -> nodes.Subscript: ... @overload - def visit(self, node: ast.Starred, parent: NodeNG) -> nodes.Starred: ... + def visit(self, node: ast.Starred, parent: nodes.NodeNG) -> nodes.Starred: ... @overload - def visit(self, node: ast.Try, parent: NodeNG) -> nodes.Try: ... + def visit(self, node: ast.Try, parent: nodes.NodeNG) -> nodes.Try: ... if sys.version_info >= (3, 11): @overload - def visit(self, node: ast.TryStar, parent: NodeNG) -> nodes.TryStar: ... + def visit( + self, node: ast.TryStar, parent: nodes.NodeNG + ) -> nodes.TryStar: ... @overload - def visit(self, node: ast.Tuple, parent: NodeNG) -> nodes.Tuple: ... + def visit(self, node: ast.Tuple, parent: nodes.NodeNG) -> nodes.Tuple: ... if sys.version_info >= (3, 12): @overload - def visit(self, node: ast.TypeAlias, parent: NodeNG) -> nodes.TypeAlias: ... + def visit( + self, node: ast.TypeAlias, parent: nodes.NodeNG + ) -> nodes.TypeAlias: ... @overload - def visit(self, node: ast.TypeVar, parent: NodeNG) -> nodes.TypeVar: ... + def visit( + self, node: ast.TypeVar, parent: nodes.NodeNG + ) -> nodes.TypeVar: ... @overload def visit( - self, node: ast.TypeVarTuple, parent: NodeNG + self, node: ast.TypeVarTuple, parent: nodes.NodeNG ) -> nodes.TypeVarTuple: ... @overload - def visit(self, node: ast.UnaryOp, parent: NodeNG) -> nodes.UnaryOp: ... + def visit(self, node: ast.UnaryOp, parent: nodes.NodeNG) -> nodes.UnaryOp: ... @overload - def visit(self, node: ast.While, parent: NodeNG) -> nodes.While: ... + def visit(self, node: ast.While, parent: nodes.NodeNG) -> nodes.While: ... @overload - def visit(self, node: ast.With, parent: NodeNG) -> nodes.With: ... + def visit(self, node: ast.With, parent: nodes.NodeNG) -> nodes.With: ... @overload - def visit(self, node: ast.Yield, parent: NodeNG) -> nodes.Yield: ... + def visit(self, node: ast.Yield, parent: nodes.NodeNG) -> nodes.Yield: ... @overload - def visit(self, node: ast.YieldFrom, parent: NodeNG) -> nodes.YieldFrom: ... + def visit( + self, node: ast.YieldFrom, parent: nodes.NodeNG + ) -> nodes.YieldFrom: ... - if sys.version_info >= (3, 10): + @overload + def visit(self, node: ast.Match, parent: nodes.NodeNG) -> nodes.Match: ... - @overload - def visit(self, node: ast.Match, parent: NodeNG) -> nodes.Match: ... + @overload + def visit( + self, node: ast.match_case, parent: nodes.NodeNG + ) -> nodes.MatchCase: ... - @overload - def visit( - self, node: ast.match_case, parent: NodeNG - ) -> nodes.MatchCase: ... + @overload + def visit( + self, node: ast.MatchValue, parent: nodes.NodeNG + ) -> nodes.MatchValue: ... - @overload - def visit( - self, node: ast.MatchValue, parent: NodeNG - ) -> nodes.MatchValue: ... + @overload + def visit( + self, node: ast.MatchSingleton, parent: nodes.NodeNG + ) -> nodes.MatchSingleton: ... - @overload - def visit( - self, node: ast.MatchSingleton, parent: NodeNG - ) -> nodes.MatchSingleton: ... + @overload + def visit( + self, node: ast.MatchSequence, parent: nodes.NodeNG + ) -> nodes.MatchSequence: ... - @overload - def visit( - self, node: ast.MatchSequence, parent: NodeNG - ) -> nodes.MatchSequence: ... + @overload + def visit( + self, node: ast.MatchMapping, parent: nodes.NodeNG + ) -> nodes.MatchMapping: ... - @overload - def visit( - self, node: ast.MatchMapping, parent: NodeNG - ) -> nodes.MatchMapping: ... + @overload + def visit( + self, node: ast.MatchClass, parent: nodes.NodeNG + ) -> nodes.MatchClass: ... - @overload - def visit( - self, node: ast.MatchClass, parent: NodeNG - ) -> nodes.MatchClass: ... + @overload + def visit( + self, node: ast.MatchStar, parent: nodes.NodeNG + ) -> nodes.MatchStar: ... - @overload - def visit(self, node: ast.MatchStar, parent: NodeNG) -> nodes.MatchStar: ... + @overload + def visit(self, node: ast.MatchAs, parent: nodes.NodeNG) -> nodes.MatchAs: ... - @overload - def visit(self, node: ast.MatchAs, parent: NodeNG) -> nodes.MatchAs: ... + @overload + def visit(self, node: ast.MatchOr, parent: nodes.NodeNG) -> nodes.MatchOr: ... + + @overload + def visit(self, node: ast.pattern, parent: nodes.NodeNG) -> nodes.Pattern: ... + + if sys.version_info >= (3, 14): @overload - def visit(self, node: ast.MatchOr, parent: NodeNG) -> nodes.MatchOr: ... + def visit( + self, node: ast.TemplateStr, parent: nodes.NodeNG + ) -> nodes.TemplateStr: ... @overload - def visit(self, node: ast.pattern, parent: NodeNG) -> nodes.Pattern: ... + def visit( + self, node: ast.Interpolation, parent: nodes.NodeNG + ) -> nodes.Interpolation: ... @overload - def visit(self, node: ast.AST, parent: NodeNG) -> NodeNG: ... + def visit(self, node: ast.AST, parent: nodes.NodeNG) -> nodes.NodeNG: ... @overload - def visit(self, node: None, parent: NodeNG) -> None: ... + def visit(self, node: None, parent: nodes.NodeNG) -> None: ... - def visit(self, node: ast.AST | None, parent: NodeNG) -> NodeNG | None: + def visit(self, node: ast.AST | None, parent: nodes.NodeNG) -> nodes.NodeNG | None: if node is None: return None cls = node.__class__ @@ -456,11 +500,13 @@ def _save_assignment(self, node: nodes.AssignName | nodes.DelName) -> None: assert node.name node.parent.set_local(node.name, node) - def visit_arg(self, node: ast.arg, parent: NodeNG) -> nodes.AssignName: - """Visit an arg node by returning a fresh AssName instance.""" + def visit_arg(self, node: ast.arg, parent: nodes.NodeNG) -> nodes.AssignName: + """Visit an arg node by returning a fresh AssignName instance.""" return self.visit_assignname(node, parent, node.arg) - def visit_arguments(self, node: ast.arguments, parent: NodeNG) -> nodes.Arguments: + def visit_arguments( + self, node: ast.arguments, parent: nodes.NodeNG + ) -> nodes.Arguments: """Visit an Arguments node by returning a fresh instance of it.""" vararg: str | None = None kwarg: str | None = None @@ -472,7 +518,7 @@ def visit_arguments(self, node: ast.arguments, parent: NodeNG) -> nodes.Argument node.kwarg.arg if node.kwarg else None, parent, ( - AssignName( + nodes.AssignName( vararg_node.arg, vararg_node.lineno, vararg_node.col_offset, @@ -484,7 +530,7 @@ def visit_arguments(self, node: ast.arguments, parent: NodeNG) -> nodes.Argument else None ), ( - AssignName( + nodes.AssignName( kwarg_node.arg, kwarg_node.lineno, kwarg_node.col_offset, @@ -498,8 +544,8 @@ def visit_arguments(self, node: ast.arguments, parent: NodeNG) -> nodes.Argument ) args = [self.visit(child, newnode) for child in node.args] defaults = [self.visit(child, newnode) for child in node.defaults] - varargannotation: NodeNG | None = None - kwargannotation: NodeNG | None = None + varargannotation: nodes.NodeNG | None = None + kwargannotation: nodes.NodeNG | None = None if node.vararg: vararg = node.vararg.arg varargannotation = self.visit(node.vararg.annotation, newnode) @@ -551,7 +597,7 @@ def visit_arguments(self, node: ast.arguments, parent: NodeNG) -> nodes.Argument newnode.parent.set_local(kwarg, newnode) return newnode - def visit_assert(self, node: ast.Assert, parent: NodeNG) -> nodes.Assert: + def visit_assert(self, node: ast.Assert, parent: nodes.NodeNG) -> nodes.Assert: """Visit a Assert node by returning a fresh instance of it.""" newnode = nodes.Assert( lineno=node.lineno, @@ -560,7 +606,7 @@ def visit_assert(self, node: ast.Assert, parent: NodeNG) -> nodes.Assert: end_col_offset=node.end_col_offset, parent=parent, ) - msg: NodeNG | None = None + msg: nodes.NodeNG | None = None if node.msg: msg = self.visit(node.msg, newnode) newnode.postinit(self.visit(node.test, newnode), msg) @@ -577,7 +623,7 @@ def check_type_comment( | nodes.With | nodes.AsyncWith ), - ) -> NodeNG | None: + ) -> nodes.NodeNG | None: if not node.type_comment: return None @@ -599,8 +645,8 @@ def check_type_comment( return type_object.value def check_function_type_comment( - self, node: ast.FunctionDef | ast.AsyncFunctionDef, parent: NodeNG - ) -> tuple[NodeNG | None, list[NodeNG]] | None: + self, node: ast.FunctionDef | ast.AsyncFunctionDef, parent: nodes.NodeNG + ) -> tuple[nodes.NodeNG | None, list[nodes.NodeNG]] | None: if not node.type_comment: return None @@ -613,8 +659,8 @@ def check_function_type_comment( if not type_comment_ast: return None - returns: NodeNG | None = None - argtypes: list[NodeNG] = [ + returns: nodes.NodeNG | None = None + argtypes: list[nodes.NodeNG] = [ self.visit(elem, parent) for elem in (type_comment_ast.argtypes or []) ] if type_comment_ast.returns: @@ -623,14 +669,16 @@ def check_function_type_comment( return returns, argtypes def visit_asyncfunctiondef( - self, node: ast.AsyncFunctionDef, parent: NodeNG + self, node: ast.AsyncFunctionDef, parent: nodes.NodeNG ) -> nodes.AsyncFunctionDef: return self._visit_functiondef(nodes.AsyncFunctionDef, node, parent) - def visit_asyncfor(self, node: ast.AsyncFor, parent: NodeNG) -> nodes.AsyncFor: + def visit_asyncfor( + self, node: ast.AsyncFor, parent: nodes.NodeNG + ) -> nodes.AsyncFor: return self._visit_for(nodes.AsyncFor, node, parent) - def visit_await(self, node: ast.Await, parent: NodeNG) -> nodes.Await: + def visit_await(self, node: ast.Await, parent: nodes.NodeNG) -> nodes.Await: newnode = nodes.Await( lineno=node.lineno, col_offset=node.col_offset, @@ -641,10 +689,12 @@ def visit_await(self, node: ast.Await, parent: NodeNG) -> nodes.Await: newnode.postinit(value=self.visit(node.value, newnode)) return newnode - def visit_asyncwith(self, node: ast.AsyncWith, parent: NodeNG) -> nodes.AsyncWith: + def visit_asyncwith( + self, node: ast.AsyncWith, parent: nodes.NodeNG + ) -> nodes.AsyncWith: return self._visit_with(nodes.AsyncWith, node, parent) - def visit_assign(self, node: ast.Assign, parent: NodeNG) -> nodes.Assign: + def visit_assign(self, node: ast.Assign, parent: nodes.NodeNG) -> nodes.Assign: """Visit a Assign node by returning a fresh instance of it.""" newnode = nodes.Assign( lineno=node.lineno, @@ -661,7 +711,9 @@ def visit_assign(self, node: ast.Assign, parent: NodeNG) -> nodes.Assign: ) return newnode - def visit_annassign(self, node: ast.AnnAssign, parent: NodeNG) -> nodes.AnnAssign: + def visit_annassign( + self, node: ast.AnnAssign, parent: nodes.NodeNG + ) -> nodes.AnnAssign: """Visit an AnnAssign node by returning a fresh instance of it.""" newnode = nodes.AnnAssign( lineno=node.lineno, @@ -680,16 +732,16 @@ def visit_annassign(self, node: ast.AnnAssign, parent: NodeNG) -> nodes.AnnAssig @overload def visit_assignname( - self, node: ast.AST, parent: NodeNG, node_name: str + self, node: ast.AST, parent: nodes.NodeNG, node_name: str ) -> nodes.AssignName: ... @overload def visit_assignname( - self, node: ast.AST, parent: NodeNG, node_name: None + self, node: ast.AST, parent: nodes.NodeNG, node_name: None ) -> None: ... def visit_assignname( - self, node: ast.AST, parent: NodeNG, node_name: str | None + self, node: ast.AST, parent: nodes.NodeNG, node_name: str | None ) -> nodes.AssignName | None: """Visit a node and return a AssignName node. @@ -708,7 +760,9 @@ def visit_assignname( self._save_assignment(newnode) return newnode - def visit_augassign(self, node: ast.AugAssign, parent: NodeNG) -> nodes.AugAssign: + def visit_augassign( + self, node: ast.AugAssign, parent: nodes.NodeNG + ) -> nodes.AugAssign: """Visit a AugAssign node by returning a fresh instance of it.""" newnode = nodes.AugAssign( op=self._parser_module.bin_op_classes[type(node.op)] + "=", @@ -723,7 +777,7 @@ def visit_augassign(self, node: ast.AugAssign, parent: NodeNG) -> nodes.AugAssig ) return newnode - def visit_binop(self, node: ast.BinOp, parent: NodeNG) -> nodes.BinOp: + def visit_binop(self, node: ast.BinOp, parent: nodes.NodeNG) -> nodes.BinOp: """Visit a BinOp node by returning a fresh instance of it.""" newnode = nodes.BinOp( op=self._parser_module.bin_op_classes[type(node.op)], @@ -738,7 +792,7 @@ def visit_binop(self, node: ast.BinOp, parent: NodeNG) -> nodes.BinOp: ) return newnode - def visit_boolop(self, node: ast.BoolOp, parent: NodeNG) -> nodes.BoolOp: + def visit_boolop(self, node: ast.BoolOp, parent: nodes.NodeNG) -> nodes.BoolOp: """Visit a BoolOp node by returning a fresh instance of it.""" newnode = nodes.BoolOp( op=self._parser_module.bool_op_classes[type(node.op)], @@ -751,7 +805,7 @@ def visit_boolop(self, node: ast.BoolOp, parent: NodeNG) -> nodes.BoolOp: newnode.postinit([self.visit(child, newnode) for child in node.values]) return newnode - def visit_break(self, node: ast.Break, parent: NodeNG) -> nodes.Break: + def visit_break(self, node: ast.Break, parent: nodes.NodeNG) -> nodes.Break: """Visit a Break node by returning a fresh instance of it.""" return nodes.Break( lineno=node.lineno, @@ -761,7 +815,7 @@ def visit_break(self, node: ast.Break, parent: NodeNG) -> nodes.Break: parent=parent, ) - def visit_call(self, node: ast.Call, parent: NodeNG) -> nodes.Call: + def visit_call(self, node: ast.Call, parent: nodes.NodeNG) -> nodes.Call: """Visit a CallFunc node by returning a fresh instance of it.""" newnode = nodes.Call( lineno=node.lineno, @@ -778,7 +832,7 @@ def visit_call(self, node: ast.Call, parent: NodeNG) -> nodes.Call: return newnode def visit_classdef( - self, node: ast.ClassDef, parent: NodeNG, newstyle: bool = True + self, node: ast.ClassDef, parent: nodes.NodeNG, newstyle: bool = True ) -> nodes.ClassDef: """Visit a ClassDef node to become astroid.""" node, doc_ast_node = self._get_doc(node) @@ -815,9 +869,12 @@ def visit_classdef( else [] ), ) + parent.set_local(newnode.name, newnode) return newnode - def visit_continue(self, node: ast.Continue, parent: NodeNG) -> nodes.Continue: + def visit_continue( + self, node: ast.Continue, parent: nodes.NodeNG + ) -> nodes.Continue: """Visit a Continue node by returning a fresh instance of it.""" return nodes.Continue( lineno=node.lineno, @@ -827,7 +884,7 @@ def visit_continue(self, node: ast.Continue, parent: NodeNG) -> nodes.Continue: parent=parent, ) - def visit_compare(self, node: ast.Compare, parent: NodeNG) -> nodes.Compare: + def visit_compare(self, node: ast.Compare, parent: nodes.NodeNG) -> nodes.Compare: """Visit a Compare node by returning a fresh instance of it.""" newnode = nodes.Compare( lineno=node.lineno, @@ -849,7 +906,7 @@ def visit_compare(self, node: ast.Compare, parent: NodeNG) -> nodes.Compare: return newnode def visit_comprehension( - self, node: ast.comprehension, parent: NodeNG + self, node: ast.comprehension, parent: nodes.NodeNG ) -> nodes.Comprehension: """Visit a Comprehension node by returning a fresh instance of it.""" newnode = nodes.Comprehension( @@ -872,7 +929,7 @@ def visit_comprehension( def visit_decorators( self, node: ast.ClassDef | ast.FunctionDef | ast.AsyncFunctionDef, - parent: NodeNG, + parent: nodes.NodeNG, ) -> nodes.Decorators | None: """Visit a Decorators node by returning a fresh instance of it. @@ -898,7 +955,7 @@ def visit_decorators( newnode.postinit([self.visit(child, newnode) for child in node.decorator_list]) return newnode - def visit_delete(self, node: ast.Delete, parent: NodeNG) -> nodes.Delete: + def visit_delete(self, node: ast.Delete, parent: nodes.NodeNG) -> nodes.Delete: """Visit a Delete node by returning a fresh instance of it.""" newnode = nodes.Delete( lineno=node.lineno, @@ -911,10 +968,10 @@ def visit_delete(self, node: ast.Delete, parent: NodeNG) -> nodes.Delete: return newnode def _visit_dict_items( - self, node: ast.Dict, parent: NodeNG, newnode: nodes.Dict - ) -> Generator[tuple[NodeNG, NodeNG]]: + self, node: ast.Dict, parent: nodes.NodeNG, newnode: nodes.Dict + ) -> Generator[tuple[nodes.NodeNG, nodes.NodeNG]]: for key, value in zip(node.keys, node.values): - rebuilt_key: NodeNG + rebuilt_key: nodes.NodeNG rebuilt_value = self.visit(value, newnode) if not key: # Extended unpacking @@ -929,7 +986,7 @@ def _visit_dict_items( rebuilt_key = self.visit(key, newnode) yield rebuilt_key, rebuilt_value - def visit_dict(self, node: ast.Dict, parent: NodeNG) -> nodes.Dict: + def visit_dict(self, node: ast.Dict, parent: nodes.NodeNG) -> nodes.Dict: """Visit a Dict node by returning a fresh instance of it.""" newnode = nodes.Dict( lineno=node.lineno, @@ -944,7 +1001,9 @@ def visit_dict(self, node: ast.Dict, parent: NodeNG) -> nodes.Dict: newnode.postinit(items) return newnode - def visit_dictcomp(self, node: ast.DictComp, parent: NodeNG) -> nodes.DictComp: + def visit_dictcomp( + self, node: ast.DictComp, parent: nodes.NodeNG + ) -> nodes.DictComp: """Visit a DictComp node by returning a fresh instance of it.""" newnode = nodes.DictComp( lineno=node.lineno, @@ -960,7 +1019,7 @@ def visit_dictcomp(self, node: ast.DictComp, parent: NodeNG) -> nodes.DictComp: ) return newnode - def visit_expr(self, node: ast.Expr, parent: NodeNG) -> nodes.Expr: + def visit_expr(self, node: ast.Expr, parent: nodes.NodeNG) -> nodes.Expr: """Visit a Expr node by returning a fresh instance of it.""" newnode = nodes.Expr( lineno=node.lineno, @@ -973,7 +1032,7 @@ def visit_expr(self, node: ast.Expr, parent: NodeNG) -> nodes.Expr: return newnode def visit_excepthandler( - self, node: ast.ExceptHandler, parent: NodeNG + self, node: ast.ExceptHandler, parent: nodes.NodeNG ) -> nodes.ExceptHandler: """Visit an ExceptHandler node by returning a fresh instance of it.""" newnode = nodes.ExceptHandler( @@ -992,16 +1051,16 @@ def visit_excepthandler( @overload def _visit_for( - self, cls: type[nodes.For], node: ast.For, parent: NodeNG + self, cls: type[nodes.For], node: ast.For, parent: nodes.NodeNG ) -> nodes.For: ... @overload def _visit_for( - self, cls: type[nodes.AsyncFor], node: ast.AsyncFor, parent: NodeNG + self, cls: type[nodes.AsyncFor], node: ast.AsyncFor, parent: nodes.NodeNG ) -> nodes.AsyncFor: ... def _visit_for( - self, cls: type[_ForT], node: ast.For | ast.AsyncFor, parent: NodeNG + self, cls: type[_ForT], node: ast.For | ast.AsyncFor, parent: nodes.NodeNG ) -> _ForT: """Visit a For node by returning a fresh instance of it.""" newnode = cls( @@ -1021,11 +1080,11 @@ def _visit_for( ) return newnode - def visit_for(self, node: ast.For, parent: NodeNG) -> nodes.For: + def visit_for(self, node: ast.For, parent: nodes.NodeNG) -> nodes.For: return self._visit_for(nodes.For, node, parent) def visit_importfrom( - self, node: ast.ImportFrom, parent: NodeNG + self, node: ast.ImportFrom, parent: nodes.NodeNG ) -> nodes.ImportFrom: """Visit an ImportFrom node by returning a fresh instance of it.""" names = [(alias.name, alias.asname) for alias in node.names] @@ -1040,12 +1099,14 @@ def visit_importfrom( parent=parent, ) # store From names to add them to locals after building - self._import_from_nodes.append(newnode) + self._import_from_nodes.append( + (newnode, self._global_names[-1].keys() if self._global_names else ()) + ) return newnode @overload def _visit_functiondef( - self, cls: type[nodes.FunctionDef], node: ast.FunctionDef, parent: NodeNG + self, cls: type[nodes.FunctionDef], node: ast.FunctionDef, parent: nodes.NodeNG ) -> nodes.FunctionDef: ... @overload @@ -1053,14 +1114,14 @@ def _visit_functiondef( self, cls: type[nodes.AsyncFunctionDef], node: ast.AsyncFunctionDef, - parent: NodeNG, + parent: nodes.NodeNG, ) -> nodes.AsyncFunctionDef: ... def _visit_functiondef( self, cls: type[_FunctionT], node: ast.FunctionDef | ast.AsyncFunctionDef, - parent: NodeNG, + parent: nodes.NodeNG, ) -> _FunctionT: """Visit an FunctionDef node to become astroid.""" self._global_names.append({}) @@ -1086,7 +1147,7 @@ def _visit_functiondef( parent=parent, ) decorators = self.visit_decorators(node, newnode) - returns: NodeNG | None + returns: nodes.NodeNG | None if node.returns: returns = self.visit(node.returns, newnode) else: @@ -1112,15 +1173,16 @@ def _visit_functiondef( ), ) self._global_names.pop() + parent.set_local(newnode.name, newnode) return newnode def visit_functiondef( - self, node: ast.FunctionDef, parent: NodeNG + self, node: ast.FunctionDef, parent: nodes.NodeNG ) -> nodes.FunctionDef: return self._visit_functiondef(nodes.FunctionDef, node, parent) def visit_generatorexp( - self, node: ast.GeneratorExp, parent: NodeNG + self, node: ast.GeneratorExp, parent: nodes.NodeNG ) -> nodes.GeneratorExp: """Visit a GeneratorExp node by returning a fresh instance of it.""" newnode = nodes.GeneratorExp( @@ -1137,7 +1199,7 @@ def visit_generatorexp( return newnode def visit_attribute( - self, node: ast.Attribute, parent: NodeNG + self, node: ast.Attribute, parent: nodes.NodeNG ) -> nodes.Attribute | nodes.AssignAttr | nodes.DelAttr: """Visit an Attribute node by returning a fresh instance of it.""" context = self._get_context(node) @@ -1180,7 +1242,7 @@ def visit_attribute( newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_global(self, node: ast.Global, parent: NodeNG) -> nodes.Global: + def visit_global(self, node: ast.Global, parent: nodes.NodeNG) -> nodes.Global: """Visit a Global node to become astroid.""" newnode = nodes.Global( names=node.names, @@ -1195,7 +1257,7 @@ def visit_global(self, node: ast.Global, parent: NodeNG) -> nodes.Global: self._global_names[-1].setdefault(name, []).append(newnode) return newnode - def visit_if(self, node: ast.If, parent: NodeNG) -> nodes.If: + def visit_if(self, node: ast.If, parent: nodes.NodeNG) -> nodes.If: """Visit an If node by returning a fresh instance of it.""" newnode = nodes.If( lineno=node.lineno, @@ -1211,7 +1273,7 @@ def visit_if(self, node: ast.If, parent: NodeNG) -> nodes.If: ) return newnode - def visit_ifexp(self, node: ast.IfExp, parent: NodeNG) -> nodes.IfExp: + def visit_ifexp(self, node: ast.IfExp, parent: nodes.NodeNG) -> nodes.IfExp: """Visit a IfExp node by returning a fresh instance of it.""" newnode = nodes.IfExp( lineno=node.lineno, @@ -1227,7 +1289,7 @@ def visit_ifexp(self, node: ast.IfExp, parent: NodeNG) -> nodes.IfExp: ) return newnode - def visit_import(self, node: ast.Import, parent: NodeNG) -> nodes.Import: + def visit_import(self, node: ast.Import, parent: nodes.NodeNG) -> nodes.Import: """Visit a Import node by returning a fresh instance of it.""" names = [(alias.name, alias.asname) for alias in node.names] newnode = nodes.Import( @@ -1240,11 +1302,16 @@ def visit_import(self, node: ast.Import, parent: NodeNG) -> nodes.Import: ) # save import names in parent's locals: for name, asname in newnode.names: - name = asname or name - parent.set_local(name.split(".")[0], newnode) + name = (asname or name).split(".")[0] + if self._global_names and name in self._global_names[-1]: + parent.root().set_local(name, newnode) + else: + parent.set_local(name, newnode) return newnode - def visit_joinedstr(self, node: ast.JoinedStr, parent: NodeNG) -> nodes.JoinedStr: + def visit_joinedstr( + self, node: ast.JoinedStr, parent: nodes.NodeNG + ) -> nodes.JoinedStr: newnode = nodes.JoinedStr( lineno=node.lineno, col_offset=node.col_offset, @@ -1256,7 +1323,7 @@ def visit_joinedstr(self, node: ast.JoinedStr, parent: NodeNG) -> nodes.JoinedSt return newnode def visit_formattedvalue( - self, node: ast.FormattedValue, parent: NodeNG + self, node: ast.FormattedValue, parent: nodes.NodeNG ) -> nodes.FormattedValue: newnode = nodes.FormattedValue( lineno=node.lineno, @@ -1272,7 +1339,9 @@ def visit_formattedvalue( ) return newnode - def visit_namedexpr(self, node: ast.NamedExpr, parent: NodeNG) -> nodes.NamedExpr: + def visit_namedexpr( + self, node: ast.NamedExpr, parent: nodes.NodeNG + ) -> nodes.NamedExpr: newnode = nodes.NamedExpr( lineno=node.lineno, col_offset=node.col_offset, @@ -1285,7 +1354,7 @@ def visit_namedexpr(self, node: ast.NamedExpr, parent: NodeNG) -> nodes.NamedExp ) return newnode - def visit_keyword(self, node: ast.keyword, parent: NodeNG) -> nodes.Keyword: + def visit_keyword(self, node: ast.keyword, parent: nodes.NodeNG) -> nodes.Keyword: """Visit a Keyword node by returning a fresh instance of it.""" newnode = nodes.Keyword( arg=node.arg, @@ -1299,7 +1368,7 @@ def visit_keyword(self, node: ast.keyword, parent: NodeNG) -> nodes.Keyword: newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_lambda(self, node: ast.Lambda, parent: NodeNG) -> nodes.Lambda: + def visit_lambda(self, node: ast.Lambda, parent: nodes.NodeNG) -> nodes.Lambda: """Visit a Lambda node by returning a fresh instance of it.""" newnode = nodes.Lambda( lineno=node.lineno, @@ -1311,7 +1380,7 @@ def visit_lambda(self, node: ast.Lambda, parent: NodeNG) -> nodes.Lambda: newnode.postinit(self.visit(node.args, newnode), self.visit(node.body, newnode)) return newnode - def visit_list(self, node: ast.List, parent: NodeNG) -> nodes.List: + def visit_list(self, node: ast.List, parent: nodes.NodeNG) -> nodes.List: """Visit a List node by returning a fresh instance of it.""" context = self._get_context(node) newnode = nodes.List( @@ -1325,7 +1394,9 @@ def visit_list(self, node: ast.List, parent: NodeNG) -> nodes.List: newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode - def visit_listcomp(self, node: ast.ListComp, parent: NodeNG) -> nodes.ListComp: + def visit_listcomp( + self, node: ast.ListComp, parent: nodes.NodeNG + ) -> nodes.ListComp: """Visit a ListComp node by returning a fresh instance of it.""" newnode = nodes.ListComp( lineno=node.lineno, @@ -1341,7 +1412,7 @@ def visit_listcomp(self, node: ast.ListComp, parent: NodeNG) -> nodes.ListComp: return newnode def visit_name( - self, node: ast.Name, parent: NodeNG + self, node: ast.Name, parent: nodes.NodeNG ) -> nodes.Name | nodes.AssignName | nodes.DelName: """Visit a Name node by returning a fresh instance of it.""" context = self._get_context(node) @@ -1375,11 +1446,13 @@ def visit_name( ) # XXX REMOVE me : if context in (Context.Del, Context.Store): # 'Aug' ?? - newnode = cast(Union[nodes.AssignName, nodes.DelName], newnode) + newnode = cast((nodes.AssignName | nodes.DelName), newnode) self._save_assignment(newnode) return newnode - def visit_nonlocal(self, node: ast.Nonlocal, parent: NodeNG) -> nodes.Nonlocal: + def visit_nonlocal( + self, node: ast.Nonlocal, parent: nodes.NodeNG + ) -> nodes.Nonlocal: """Visit a Nonlocal node and return a new instance of it.""" return nodes.Nonlocal( names=node.names, @@ -1390,7 +1463,7 @@ def visit_nonlocal(self, node: ast.Nonlocal, parent: NodeNG) -> nodes.Nonlocal: parent=parent, ) - def visit_constant(self, node: ast.Constant, parent: NodeNG) -> nodes.Const: + def visit_constant(self, node: ast.Constant, parent: nodes.NodeNG) -> nodes.Const: """Visit a Constant node by returning a fresh instance of Const.""" return nodes.Const( value=node.value, @@ -1402,7 +1475,9 @@ def visit_constant(self, node: ast.Constant, parent: NodeNG) -> nodes.Const: parent=parent, ) - def visit_paramspec(self, node: ast.ParamSpec, parent: NodeNG) -> nodes.ParamSpec: + def visit_paramspec( + self, node: ast.ParamSpec, parent: nodes.NodeNG + ) -> nodes.ParamSpec: """Visit a ParamSpec node by returning a fresh instance of it.""" newnode = nodes.ParamSpec( lineno=node.lineno, @@ -1413,10 +1488,15 @@ def visit_paramspec(self, node: ast.ParamSpec, parent: NodeNG) -> nodes.ParamSpe ) # Add AssignName node for 'node.name' # https://bugs.python.org/issue43994 - newnode.postinit(name=self.visit_assignname(node, newnode, node.name)) + newnode.postinit( + name=self.visit_assignname(node, newnode, node.name), + default_value=( + self.visit(node.default_value, newnode) if PY313_PLUS else None + ), + ) return newnode - def visit_pass(self, node: ast.Pass, parent: NodeNG) -> nodes.Pass: + def visit_pass(self, node: ast.Pass, parent: nodes.NodeNG) -> nodes.Pass: """Visit a Pass node by returning a fresh instance of it.""" return nodes.Pass( lineno=node.lineno, @@ -1426,7 +1506,7 @@ def visit_pass(self, node: ast.Pass, parent: NodeNG) -> nodes.Pass: parent=parent, ) - def visit_raise(self, node: ast.Raise, parent: NodeNG) -> nodes.Raise: + def visit_raise(self, node: ast.Raise, parent: nodes.NodeNG) -> nodes.Raise: """Visit a Raise node by returning a fresh instance of it.""" newnode = nodes.Raise( lineno=node.lineno, @@ -1442,7 +1522,7 @@ def visit_raise(self, node: ast.Raise, parent: NodeNG) -> nodes.Raise: ) return newnode - def visit_return(self, node: ast.Return, parent: NodeNG) -> nodes.Return: + def visit_return(self, node: ast.Return, parent: nodes.NodeNG) -> nodes.Return: """Visit a Return node by returning a fresh instance of it.""" newnode = nodes.Return( lineno=node.lineno, @@ -1454,7 +1534,7 @@ def visit_return(self, node: ast.Return, parent: NodeNG) -> nodes.Return: newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_set(self, node: ast.Set, parent: NodeNG) -> nodes.Set: + def visit_set(self, node: ast.Set, parent: nodes.NodeNG) -> nodes.Set: """Visit a Set node by returning a fresh instance of it.""" newnode = nodes.Set( lineno=node.lineno, @@ -1466,7 +1546,7 @@ def visit_set(self, node: ast.Set, parent: NodeNG) -> nodes.Set: newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode - def visit_setcomp(self, node: ast.SetComp, parent: NodeNG) -> nodes.SetComp: + def visit_setcomp(self, node: ast.SetComp, parent: nodes.NodeNG) -> nodes.SetComp: """Visit a SetComp node by returning a fresh instance of it.""" newnode = nodes.SetComp( lineno=node.lineno, @@ -1498,7 +1578,9 @@ def visit_slice(self, node: ast.Slice, parent: nodes.Subscript) -> nodes.Slice: ) return newnode - def visit_subscript(self, node: ast.Subscript, parent: NodeNG) -> nodes.Subscript: + def visit_subscript( + self, node: ast.Subscript, parent: nodes.NodeNG + ) -> nodes.Subscript: """Visit a Subscript node by returning a fresh instance of it.""" context = self._get_context(node) newnode = nodes.Subscript( @@ -1514,7 +1596,7 @@ def visit_subscript(self, node: ast.Subscript, parent: NodeNG) -> nodes.Subscrip ) return newnode - def visit_starred(self, node: ast.Starred, parent: NodeNG) -> nodes.Starred: + def visit_starred(self, node: ast.Starred, parent: nodes.NodeNG) -> nodes.Starred: """Visit a Starred node and return a new instance of it.""" context = self._get_context(node) newnode = nodes.Starred( @@ -1528,7 +1610,7 @@ def visit_starred(self, node: ast.Starred, parent: NodeNG) -> nodes.Starred: newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_try(self, node: ast.Try, parent: NodeNG) -> nodes.Try: + def visit_try(self, node: ast.Try, parent: nodes.NodeNG) -> nodes.Try: """Visit a Try node by returning a fresh instance of it""" newnode = nodes.Try( lineno=node.lineno, @@ -1545,7 +1627,7 @@ def visit_try(self, node: ast.Try, parent: NodeNG) -> nodes.Try: ) return newnode - def visit_trystar(self, node: ast.TryStar, parent: NodeNG) -> nodes.TryStar: + def visit_trystar(self, node: ast.TryStar, parent: nodes.NodeNG) -> nodes.TryStar: newnode = nodes.TryStar( lineno=node.lineno, col_offset=node.col_offset, @@ -1561,7 +1643,7 @@ def visit_trystar(self, node: ast.TryStar, parent: NodeNG) -> nodes.TryStar: ) return newnode - def visit_tuple(self, node: ast.Tuple, parent: NodeNG) -> nodes.Tuple: + def visit_tuple(self, node: ast.Tuple, parent: nodes.NodeNG) -> nodes.Tuple: """Visit a Tuple node by returning a fresh instance of it.""" context = self._get_context(node) newnode = nodes.Tuple( @@ -1575,7 +1657,9 @@ def visit_tuple(self, node: ast.Tuple, parent: NodeNG) -> nodes.Tuple: newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode - def visit_typealias(self, node: ast.TypeAlias, parent: NodeNG) -> nodes.TypeAlias: + def visit_typealias( + self, node: ast.TypeAlias, parent: nodes.NodeNG + ) -> nodes.TypeAlias: """Visit a TypeAlias node by returning a fresh instance of it.""" newnode = nodes.TypeAlias( lineno=node.lineno, @@ -1591,7 +1675,7 @@ def visit_typealias(self, node: ast.TypeAlias, parent: NodeNG) -> nodes.TypeAlia ) return newnode - def visit_typevar(self, node: ast.TypeVar, parent: NodeNG) -> nodes.TypeVar: + def visit_typevar(self, node: ast.TypeVar, parent: nodes.NodeNG) -> nodes.TypeVar: """Visit a TypeVar node by returning a fresh instance of it.""" newnode = nodes.TypeVar( lineno=node.lineno, @@ -1605,11 +1689,14 @@ def visit_typevar(self, node: ast.TypeVar, parent: NodeNG) -> nodes.TypeVar: newnode.postinit( name=self.visit_assignname(node, newnode, node.name), bound=self.visit(node.bound, newnode), + default_value=( + self.visit(node.default_value, newnode) if PY313_PLUS else None + ), ) return newnode def visit_typevartuple( - self, node: ast.TypeVarTuple, parent: NodeNG + self, node: ast.TypeVarTuple, parent: nodes.NodeNG ) -> nodes.TypeVarTuple: """Visit a TypeVarTuple node by returning a fresh instance of it.""" newnode = nodes.TypeVarTuple( @@ -1621,10 +1708,15 @@ def visit_typevartuple( ) # Add AssignName node for 'node.name' # https://bugs.python.org/issue43994 - newnode.postinit(name=self.visit_assignname(node, newnode, node.name)) + newnode.postinit( + name=self.visit_assignname(node, newnode, node.name), + default_value=( + self.visit(node.default_value, newnode) if PY313_PLUS else None + ), + ) return newnode - def visit_unaryop(self, node: ast.UnaryOp, parent: NodeNG) -> nodes.UnaryOp: + def visit_unaryop(self, node: ast.UnaryOp, parent: nodes.NodeNG) -> nodes.UnaryOp: """Visit a UnaryOp node by returning a fresh instance of it.""" newnode = nodes.UnaryOp( op=self._parser_module.unary_op_classes[node.op.__class__], @@ -1637,7 +1729,7 @@ def visit_unaryop(self, node: ast.UnaryOp, parent: NodeNG) -> nodes.UnaryOp: newnode.postinit(self.visit(node.operand, newnode)) return newnode - def visit_while(self, node: ast.While, parent: NodeNG) -> nodes.While: + def visit_while(self, node: ast.While, parent: nodes.NodeNG) -> nodes.While: """Visit a While node by returning a fresh instance of it.""" newnode = nodes.While( lineno=node.lineno, @@ -1655,19 +1747,19 @@ def visit_while(self, node: ast.While, parent: NodeNG) -> nodes.While: @overload def _visit_with( - self, cls: type[nodes.With], node: ast.With, parent: NodeNG + self, cls: type[nodes.With], node: ast.With, parent: nodes.NodeNG ) -> nodes.With: ... @overload def _visit_with( - self, cls: type[nodes.AsyncWith], node: ast.AsyncWith, parent: NodeNG + self, cls: type[nodes.AsyncWith], node: ast.AsyncWith, parent: nodes.NodeNG ) -> nodes.AsyncWith: ... def _visit_with( self, cls: type[_WithT], node: ast.With | ast.AsyncWith, - parent: NodeNG, + parent: nodes.NodeNG, ) -> _WithT: newnode = cls( lineno=node.lineno, @@ -1677,7 +1769,9 @@ def _visit_with( parent=parent, ) - def visit_child(child: ast.withitem) -> tuple[NodeNG, NodeNG | None]: + def visit_child( + child: ast.withitem, + ) -> tuple[nodes.NodeNG, nodes.NodeNG | None]: expr = self.visit(child.context_expr, newnode) var = self.visit(child.optional_vars, newnode) return expr, var @@ -1690,10 +1784,10 @@ def visit_child(child: ast.withitem) -> tuple[NodeNG, NodeNG | None]: ) return newnode - def visit_with(self, node: ast.With, parent: NodeNG) -> NodeNG: + def visit_with(self, node: ast.With, parent: nodes.NodeNG) -> nodes.NodeNG: return self._visit_with(nodes.With, node, parent) - def visit_yield(self, node: ast.Yield, parent: NodeNG) -> NodeNG: + def visit_yield(self, node: ast.Yield, parent: nodes.NodeNG) -> nodes.NodeNG: """Visit a Yield node by returning a fresh instance of it.""" newnode = nodes.Yield( lineno=node.lineno, @@ -1705,7 +1799,9 @@ def visit_yield(self, node: ast.Yield, parent: NodeNG) -> NodeNG: newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_yieldfrom(self, node: ast.YieldFrom, parent: NodeNG) -> NodeNG: + def visit_yieldfrom( + self, node: ast.YieldFrom, parent: nodes.NodeNG + ) -> nodes.NodeNG: newnode = nodes.YieldFrom( lineno=node.lineno, col_offset=node.col_offset, @@ -1716,145 +1812,175 @@ def visit_yieldfrom(self, node: ast.YieldFrom, parent: NodeNG) -> NodeNG: newnode.postinit(self.visit(node.value, newnode)) return newnode - if sys.version_info >= (3, 10): + def visit_match(self, node: ast.Match, parent: nodes.NodeNG) -> nodes.Match: + newnode = nodes.Match( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + newnode.postinit( + subject=self.visit(node.subject, newnode), + cases=[self.visit(case, newnode) for case in node.cases], + ) + return newnode - def visit_match(self, node: ast.Match, parent: NodeNG) -> nodes.Match: - newnode = nodes.Match( - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) - newnode.postinit( - subject=self.visit(node.subject, newnode), - cases=[self.visit(case, newnode) for case in node.cases], - ) - return newnode + def visit_matchcase( + self, node: ast.match_case, parent: nodes.NodeNG + ) -> nodes.MatchCase: + newnode = nodes.MatchCase(parent=parent) + newnode.postinit( + pattern=self.visit(node.pattern, newnode), + guard=self.visit(node.guard, newnode), + body=[self.visit(child, newnode) for child in node.body], + ) + return newnode - def visit_matchcase( - self, node: ast.match_case, parent: NodeNG - ) -> nodes.MatchCase: - newnode = nodes.MatchCase(parent=parent) - newnode.postinit( - pattern=self.visit(node.pattern, newnode), - guard=self.visit(node.guard, newnode), - body=[self.visit(child, newnode) for child in node.body], - ) - return newnode + def visit_matchvalue( + self, node: ast.MatchValue, parent: nodes.NodeNG + ) -> nodes.MatchValue: + newnode = nodes.MatchValue( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + newnode.postinit(value=self.visit(node.value, newnode)) + return newnode - def visit_matchvalue( - self, node: ast.MatchValue, parent: NodeNG - ) -> nodes.MatchValue: - newnode = nodes.MatchValue( - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) - newnode.postinit(value=self.visit(node.value, newnode)) - return newnode + def visit_matchsingleton( + self, node: ast.MatchSingleton, parent: nodes.NodeNG + ) -> nodes.MatchSingleton: + return nodes.MatchSingleton( + value=node.value, + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) - def visit_matchsingleton( - self, node: ast.MatchSingleton, parent: NodeNG - ) -> nodes.MatchSingleton: - return nodes.MatchSingleton( - value=node.value, - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) + def visit_matchsequence( + self, node: ast.MatchSequence, parent: nodes.NodeNG + ) -> nodes.MatchSequence: + newnode = nodes.MatchSequence( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + newnode.postinit( + patterns=[self.visit(pattern, newnode) for pattern in node.patterns] + ) + return newnode - def visit_matchsequence( - self, node: ast.MatchSequence, parent: NodeNG - ) -> nodes.MatchSequence: - newnode = nodes.MatchSequence( - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) - newnode.postinit( - patterns=[self.visit(pattern, newnode) for pattern in node.patterns] - ) - return newnode + def visit_matchmapping( + self, node: ast.MatchMapping, parent: nodes.NodeNG + ) -> nodes.MatchMapping: + newnode = nodes.MatchMapping( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + # Add AssignName node for 'node.name' + # https://bugs.python.org/issue43994 + newnode.postinit( + keys=[self.visit(child, newnode) for child in node.keys], + patterns=[self.visit(pattern, newnode) for pattern in node.patterns], + rest=self.visit_assignname(node, newnode, node.rest), + ) + return newnode - def visit_matchmapping( - self, node: ast.MatchMapping, parent: NodeNG - ) -> nodes.MatchMapping: - newnode = nodes.MatchMapping( - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) - # Add AssignName node for 'node.name' - # https://bugs.python.org/issue43994 - newnode.postinit( - keys=[self.visit(child, newnode) for child in node.keys], - patterns=[self.visit(pattern, newnode) for pattern in node.patterns], - rest=self.visit_assignname(node, newnode, node.rest), - ) - return newnode + def visit_matchclass( + self, node: ast.MatchClass, parent: nodes.NodeNG + ) -> nodes.MatchClass: + newnode = nodes.MatchClass( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + newnode.postinit( + cls=self.visit(node.cls, newnode), + patterns=[self.visit(pattern, newnode) for pattern in node.patterns], + kwd_attrs=node.kwd_attrs, + kwd_patterns=[ + self.visit(pattern, newnode) for pattern in node.kwd_patterns + ], + ) + return newnode - def visit_matchclass( - self, node: ast.MatchClass, parent: NodeNG - ) -> nodes.MatchClass: - newnode = nodes.MatchClass( - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) - newnode.postinit( - cls=self.visit(node.cls, newnode), - patterns=[self.visit(pattern, newnode) for pattern in node.patterns], - kwd_attrs=node.kwd_attrs, - kwd_patterns=[ - self.visit(pattern, newnode) for pattern in node.kwd_patterns - ], - ) - return newnode + def visit_matchstar( + self, node: ast.MatchStar, parent: nodes.NodeNG + ) -> nodes.MatchStar: + newnode = nodes.MatchStar( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + # Add AssignName node for 'node.name' + # https://bugs.python.org/issue43994 + newnode.postinit(name=self.visit_assignname(node, newnode, node.name)) + return newnode - def visit_matchstar( - self, node: ast.MatchStar, parent: NodeNG - ) -> nodes.MatchStar: - newnode = nodes.MatchStar( - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) - # Add AssignName node for 'node.name' - # https://bugs.python.org/issue43994 - newnode.postinit(name=self.visit_assignname(node, newnode, node.name)) - return newnode + def visit_matchas(self, node: ast.MatchAs, parent: nodes.NodeNG) -> nodes.MatchAs: + newnode = nodes.MatchAs( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + # Add AssignName node for 'node.name' + # https://bugs.python.org/issue43994 + newnode.postinit( + pattern=self.visit(node.pattern, newnode), + name=self.visit_assignname(node, newnode, node.name), + ) + return newnode + + def visit_matchor(self, node: ast.MatchOr, parent: nodes.NodeNG) -> nodes.MatchOr: + newnode = nodes.MatchOr( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + newnode.postinit( + patterns=[self.visit(pattern, newnode) for pattern in node.patterns] + ) + return newnode + + if sys.version_info >= (3, 14): - def visit_matchas(self, node: ast.MatchAs, parent: NodeNG) -> nodes.MatchAs: - newnode = nodes.MatchAs( + def visit_templatestr( + self, node: ast.TemplateStr, parent: nodes.NodeNG + ) -> nodes.TemplateStr: + newnode = nodes.TemplateStr( lineno=node.lineno, col_offset=node.col_offset, end_lineno=node.end_lineno, end_col_offset=node.end_col_offset, parent=parent, ) - # Add AssignName node for 'node.name' - # https://bugs.python.org/issue43994 newnode.postinit( - pattern=self.visit(node.pattern, newnode), - name=self.visit_assignname(node, newnode, node.name), + values=[self.visit(value, newnode) for value in node.values] ) return newnode - def visit_matchor(self, node: ast.MatchOr, parent: NodeNG) -> nodes.MatchOr: - newnode = nodes.MatchOr( + def visit_interpolation( + self, node: ast.Interpolation, parent: nodes.NodeNG + ) -> nodes.Interpolation: + newnode = nodes.Interpolation( lineno=node.lineno, col_offset=node.col_offset, end_lineno=node.end_lineno, @@ -1862,6 +1988,9 @@ def visit_matchor(self, node: ast.MatchOr, parent: NodeNG) -> nodes.MatchOr: parent=parent, ) newnode.postinit( - patterns=[self.visit(pattern, newnode) for pattern in node.patterns] + value=self.visit(node.value, parent), + str=node.str, + conversion=node.conversion, + format_spec=self.visit(node.format_spec, parent), ) return newnode diff --git a/astroid/transforms.py b/astroid/transforms.py index 5f0e533136..d44ec3dc13 100644 --- a/astroid/transforms.py +++ b/astroid/transforms.py @@ -7,7 +7,7 @@ import warnings from collections import defaultdict from collections.abc import Callable -from typing import TYPE_CHECKING, Optional, TypeVar, Union, cast, overload +from typing import TYPE_CHECKING, TypeVar, Union, cast, overload from astroid.context import _invalidate_cache from astroid.typing import SuccessfulInferenceResult, TransformFn @@ -18,18 +18,19 @@ _SuccessfulInferenceResultT = TypeVar( "_SuccessfulInferenceResultT", bound=SuccessfulInferenceResult ) - _Predicate = Optional[Callable[[_SuccessfulInferenceResultT], bool]] + _Predicate = Callable[[_SuccessfulInferenceResultT], bool] | None +# pylint: disable-next=consider-alternative-union-syntax _Vistables = Union[ "nodes.NodeNG", list["nodes.NodeNG"], tuple["nodes.NodeNG", ...], str, None ] -_VisitReturns = Union[ - SuccessfulInferenceResult, - list[SuccessfulInferenceResult], - tuple[SuccessfulInferenceResult, ...], - str, - None, -] +_VisitReturns = ( + SuccessfulInferenceResult + | list[SuccessfulInferenceResult] + | tuple[SuccessfulInferenceResult, ...] + | str + | None +) class TransformVisitor: @@ -78,7 +79,9 @@ def _transform(self, node: SuccessfulInferenceResult) -> SuccessfulInferenceResu def _visit(self, node: nodes.NodeNG) -> SuccessfulInferenceResult: for name in node._astroid_fields: value = getattr(node, name) - value = cast(_Vistables, value) + if TYPE_CHECKING: + value = cast(_Vistables, value) + visited = self._visit_generic(value) if visited != value: setattr(node, name, visited) @@ -104,11 +107,13 @@ def _visit_generic( def _visit_generic(self, node: nodes.NodeNG) -> SuccessfulInferenceResult: ... def _visit_generic(self, node: _Vistables) -> _VisitReturns: + if not node: + return node if isinstance(node, list): return [self._visit_generic(child) for child in node] if isinstance(node, tuple): return tuple(self._visit_generic(child) for child in node) - if not node or isinstance(node, str): + if isinstance(node, str): return node try: diff --git a/astroid/typing.py b/astroid/typing.py index 77cc120306..37ea43452b 100644 --- a/astroid/typing.py +++ b/astroid/typing.py @@ -47,6 +47,7 @@ class AstroidManagerBrain(TypedDict): _transform: transforms.TransformVisitor +# pylint: disable=consider-alternative-union-syntax InferenceResult = Union["nodes.NodeNG", "util.UninferableBase", "bases.Proxy"] SuccessfulInferenceResult = Union["nodes.NodeNG", "bases.Proxy"] _SuccessfulInferenceResultT = TypeVar( diff --git a/doc/release.md b/doc/release.md index 7dc7453a49..8e4ca833d5 100644 --- a/doc/release.md +++ b/doc/release.md @@ -30,10 +30,9 @@ tbump 2.5.0-dev0 --no-tag --no-push git commit -am "Upgrade the version to 2.5.0-dev0 following 2.4.0 release" ``` -Check the commit and then push to a release branch +Check the commit and then push to a release branch: - Open a merge request with the two commits (no one can push directly on `main`) -- Trigger the "release tests" workflow in GitHub Actions. - After the merge, recover the merged commits on `main` and tag the first one (the version should be `X.Y.Z`) as `vX.Y.Z` (For example: `v2.4.0`) - Push the tag. @@ -92,8 +91,8 @@ is the version under development on `main`.) - Fix version conflicts properly, meaning preserve the version numbers of the form `X.Y.0-devZ` (For example: `2.4.0-dev6`). - Open a merge request against main. Ensure a merge commit is used, because pre-commit - need the patch release tag to be in the main branch history to consider the patch - release as the latest version and this won't be the case with rebase or squash. You + needs the patch release tag to be in the main branch history to consider the patch + release as the latest version, and this won't be the case with rebase or squash. You can defend against trigger-happy future selves by enabling auto-merge with the merge commit strategy. - Wait for approval. Again, use a merge commit. diff --git a/doc/requirements.txt b/doc/requirements.txt index 6f446f6323..cf9e808be3 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,3 +1,3 @@ -e . -sphinx~=7.4 -furo==2024.7.18 +sphinx~=8.1 +furo==2025.9.25 diff --git a/pylintrc b/pylintrc index d605bc4826..2957fffb65 100644 --- a/pylintrc +++ b/pylintrc @@ -9,7 +9,13 @@ # Add files or directories to the blacklist. They should be base names, not # paths. -ignore=CVS +ignore=.tox,CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths=doc/conf.py,tests/testdata/ # Pickle collected data for later comparisons. persistent=yes @@ -36,10 +42,10 @@ unsafe-load-any-extension=no # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code -extension-pkg-whitelist= +extension-pkg-whitelist=mypy # Minimum supported python version -py-version = 3.9.0 +py-version = 3.10.0 [REPORTS] @@ -86,9 +92,6 @@ disable=fixme, missing-docstring, too-few-public-methods, too-many-public-methods, - too-many-boolean-expressions, - too-many-branches, - too-many-statements, # We know about it and we're doing our best to remove it in 2.0 (oups) cyclic-import, # Requires major redesign for fixing this (and private diff --git a/pyproject.toml b/pyproject.toml index cce26ba4f3..a8c53c1f11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ keywords = [ "abstract syntax tree", "python", "static code analysis" ] license = "LGPL-2.1-or-later" license-files = [ "LICENSE", "CONTRIBUTORS.txt" ] -requires-python = ">=3.9.0" +requires-python = ">=3.10.0" classifiers = [ "Development Status :: 6 - Mature", "Environment :: Console", @@ -19,11 +19,11 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules", @@ -50,12 +50,14 @@ include = [ "astroid*" ] version = { attr = "astroid.__pkginfo__.__version__" } [tool.ruff] -target-version = "py39" +target-version = "py310" # ruff is less lenient than pylint and does not make any exceptions # (for docstrings, strings and comments in particular). line-length = 110 +extend-exclude = [ "tests/testdata/" ] + lint.select = [ "B", # bugbear "E", # pycodestyle @@ -81,6 +83,9 @@ lint.fixable = [ ] lint.unfixable = [ "RUF001" ] +[tool.pyproject-fmt] +max_supported_python = "3.14" + [tool.pytest.ini_options] addopts = '-m "not acceptance"' python_files = [ "*test_*.py" ] @@ -88,11 +93,58 @@ testpaths = [ "tests" ] filterwarnings = "error" [tool.mypy] -enable_error_code = "ignore-without-code" -no_implicit_optional = true +python_version = "3.10" +files = [ + "astroid/_ast.py", + "astroid/astroid_manager.py", + "astroid/brain/brain_crypt.py", + "astroid/brain/brain_ctypes.py", + "astroid/brain/brain_curses.py", + "astroid/brain/brain_datetime.py", + "astroid/brain/brain_dateutil.py", + "astroid/brain/brain_hashlib.py", + "astroid/brain/brain_http.py", + "astroid/brain/brain_hypothesis.py", + "astroid/brain/brain_mechanize.py", + "astroid/brain/brain_numpy_core_einsumfunc.py", + "astroid/brain/brain_numpy_core_fromnumeric.py", + "astroid/brain/brain_numpy_core_function_base.py", + "astroid/brain/brain_numpy_core_multiarray.py", + "astroid/brain/brain_numpy_core_numeric.py", + "astroid/brain/brain_numpy_core_numerictypes.py", + "astroid/brain/brain_numpy_core_umath.py", + "astroid/brain/brain_numpy_ma.py", + "astroid/brain/brain_numpy_random_mtrand.py", + "astroid/brain/brain_pkg_resources.py", + "astroid/brain/brain_pytest.py", + "astroid/brain/brain_responses.py", + "astroid/brain/brain_scipy_signal.py", + "astroid/brain/brain_sqlalchemy.py", + "astroid/brain/brain_ssl.py", + "astroid/brain/brain_subprocess.py", + "astroid/brain/brain_threading.py", + "astroid/brain/brain_unittest.py", + "astroid/brain/brain_uuid.py", + "astroid/const.py", + "astroid/context.py", + "astroid/interpreter/_import/", + "astroid/modutils.py", + "astroid/nodes/const.py", + "astroid/nodes/utils.py", +] +always_false = [ + "PY311_PLUS", + "PY312_PLUS", + "PY313_PLUS", +] +disallow_any_decorated = true +disallow_any_explicit = true +follow_imports = "silent" scripts_are_modules = true show_error_codes = true +strict = true warn_redundant_casts = true +warn_unreachable = true [[tool.mypy.overrides]] # Importlib typeshed stubs do not include the private functions we use @@ -100,7 +152,6 @@ module = [ "_io.*", "gi.*", "importlib.*", - "nose.*", "numpy.*", "pytest", "setuptools", diff --git a/requirements_dev.txt b/requirements_dev.txt index 77a6fd9f64..9f5ea5659b 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -3,5 +3,5 @@ # Tools used during development, prefer running these with pre-commit black pre-commit -pylint>=3.2.7 +pylint>=4.0.0 ruff diff --git a/requirements_full.txt b/requirements_full.txt index 99cd84a6ee..e4ac735e6e 100644 --- a/requirements_full.txt +++ b/requirements_full.txt @@ -3,12 +3,11 @@ # Packages used to run additional tests attrs -nose numpy>=1.17.0,<2; python_version<"3.12" python-dateutil PyQt6 regex -setuptools; python_version<"3.12" +setuptools; python_version>="3.12" six urllib3>1,<2 typing_extensions>=4.4.0 diff --git a/requirements_minimal.txt b/requirements_minimal.txt index 969390b5df..9fde495aeb 100644 --- a/requirements_minimal.txt +++ b/requirements_minimal.txt @@ -3,8 +3,7 @@ contributors-txt>=0.7.4 tbump~=6.11 # Tools used to run tests -coverage~=7.8 +coverage~=7.10 pytest -pytest-cov~=5.0 -mypy -setuptools +pytest-cov~=7.0 +mypy; platform_python_implementation!="PyPy" diff --git a/script/.contributors_aliases.json b/script/.contributors_aliases.json index 8a2ad6f7d1..56a97d5d0f 100644 --- a/script/.contributors_aliases.json +++ b/script/.contributors_aliases.json @@ -22,6 +22,17 @@ "name": "Mark Byrne", "team": "Maintainers" }, + "44875844+temyurchenko@users.noreply.github.com": { + "mails": [ + "artemyurchenko@zoho.com", + "44875844+temyurchenko@users.noreply.github.com" + ], + "name": "Artem Yurchenko" + }, + "53538590+zenlyj@users.noreply.github.com": { + "mails": ["53538590+zenlyj@users.noreply.github.com", "zenlyj97@gmail.com"], + "name": "Zen Lee" + }, "55152140+jayaddison@users.noreply.github.com": { "mails": ["55152140+jayaddison@users.noreply.github.com", "jay@jp-hosting.net"], "name": "James Addison" @@ -34,6 +45,10 @@ "mails": ["adam.grant.hendry@gmail.com"], "name": "Adam Hendry" }, + "akhil.kamat@gmail.com": { + "mails": ["akhil.kamat@gmail.com"], + "name": "Akhil Kamat" + }, "androwiiid@gmail.com": { "mails": ["androwiiid@gmail.com"], "name": "Paligot Gérard" @@ -62,6 +77,7 @@ "66853113+pre-commit-ci[bot]@users.noreply.github.com", "49699333+dependabot[bot]@users.noreply.github.com", "41898282+github-actions[bot]@users.noreply.github.com", + "212256041+pylint-backport[bot]@users.noreply.github.com", "212256041+pylint-backport-bot[bot]@users.noreply.github.com" ], "name": "bot" @@ -93,13 +109,20 @@ "mails": ["david@dropbox.com", "github@euresti.com"], "name": "David Euresti" }, + "grayjk@gmail.com": { + "mails": ["grayjk@gmail.com"], + "name": "grayjk" + }, "guillaume.peillex@gmail.com": { "mails": ["guillaume.peillex@gmail.com"], "name": "Hippo91", "team": "Maintainers" }, "hugovk@users.noreply.github.com": { - "mails": ["hugovk@users.noreply.github.com"], + "mails": [ + "hugovk@users.noreply.github.com", + "1324225+hugovk@users.noreply.github.com" + ], "name": "Hugo van Kemenade" }, "jacob@bogdanov.dev": { @@ -127,6 +150,10 @@ "mails": ["mcorcherojim@bloomberg.net", "mariocj89@gmail.com"], "name": "Mario Corchero" }, + "matusvalo@users.noreply.github.com": { + "mails": ["matusvalo@users.noreply.github.com"], + "name": "Matus Valo" + }, "me@the-compiler.org": { "mails": ["me@the-compiler.org"], "name": "Florian Bruhin", diff --git a/script/bump_changelog.py b/script/bump_changelog.py index a08a1aef10..7bdcd5c355 100644 --- a/script/bump_changelog.py +++ b/script/bump_changelog.py @@ -34,7 +34,7 @@ def main() -> None: if args.verbose: logging.basicConfig(level=logging.DEBUG) logging.debug(f"Launching bump_changelog with args: {args}") - if any(s in args.version for s in ("dev", "a", "b")): + if any(s in args.version for s in ("dev", "a", "b", "rc")): return with open(DEFAULT_CHANGELOG_PATH, encoding="utf-8") as f: content = f.read() diff --git a/tbump.toml b/tbump.toml index b3dfc11e1c..29eba1925b 100644 --- a/tbump.toml +++ b/tbump.toml @@ -1,7 +1,7 @@ github_url = "https://github.com/pylint-dev/astroid" [version] -current = "3.3.11" +current = "4.0.3" regex = ''' ^(?P0|[1-9]\d*) \. diff --git a/tests/brain/test_attr.py b/tests/brain/test_attr.py index ef4887378f..23dae47085 100644 --- a/tests/brain/test_attr.py +++ b/tests/brain/test_attr.py @@ -85,7 +85,7 @@ class Eggs: for name in ("f", "g", "h", "i", "j", "k", "l", "m"): should_be_unknown = next(module.getattr(name)[0].infer()).getattr("d")[0] - self.assertIsInstance(should_be_unknown, astroid.Unknown) + self.assertIsInstance(should_be_unknown, nodes.Unknown) def test_attrs_transform(self) -> None: """Test brain for decorators of the 'attrs' package. @@ -158,7 +158,7 @@ class Legs: for name in ("f", "g", "h", "i", "j", "k", "l"): should_be_unknown = next(module.getattr(name)[0].infer()).getattr("d")[0] - self.assertIsInstance(should_be_unknown, astroid.Unknown, name) + self.assertIsInstance(should_be_unknown, nodes.Unknown, name) def test_special_attributes(self) -> None: """Make sure special attrs attributes exist""" @@ -200,7 +200,7 @@ class Foo: Foo() """ should_be_unknown = next(astroid.extract_node(code).infer()).getattr("bar")[0] - self.assertIsInstance(should_be_unknown, astroid.Unknown) + self.assertIsInstance(should_be_unknown, nodes.Unknown) def test_attr_with_only_annotation_fails(self) -> None: code = """ @@ -228,4 +228,60 @@ class Foo: should_be_unknown = next(astroid.extract_node(code).infer()).getattr( attr_name )[0] - self.assertIsInstance(should_be_unknown, astroid.Unknown) + self.assertIsInstance(should_be_unknown, nodes.Unknown) + + def test_attrs_with_class_var_annotation(self) -> None: + cases = { + "with-subscript": """ + import attrs + from typing import ClassVar + + @attrs.define + class Foo: + bar: ClassVar[int] = 1 + Foo() + """, + "no-subscript": """ + import attrs + from typing import ClassVar + + @attrs.define + class Foo: + bar: ClassVar = 1 + Foo() + """, + } + + for name, code in cases.items(): + with self.subTest(case=name): + instance = next(astroid.extract_node(code).infer()) + self.assertIsInstance(instance.getattr("bar")[0], nodes.AssignName) + self.assertNotIn("bar", instance.instance_attrs) + + def test_attrs_without_class_var_annotation(self) -> None: + cases = { + "wrong-name": """ + import attrs + from typing import Final + + @attrs.define + class Foo: + bar: Final[int] = 1 + Foo() + """, + "classvar-not-outermost": """ + import attrs + from typing import ClassVar + + @attrs.define + class Foo: + bar: list[ClassVar[int]] = [] + Foo() + """, + } + + for name, code in cases.items(): + with self.subTest(case=name): + instance = next(astroid.extract_node(code).infer()) + self.assertIsInstance(instance.getattr("bar")[0], nodes.Unknown) + self.assertIn("bar", instance.instance_attrs) diff --git a/tests/brain/test_brain.py b/tests/brain/test_brain.py index 0b082ab535..64dbdc876e 100644 --- a/tests/brain/test_brain.py +++ b/tests/brain/test_brain.py @@ -15,17 +15,15 @@ from astroid import MANAGER, builder, nodes, objects, test_utils, util from astroid.bases import Instance from astroid.brain.brain_namedtuple_enum import _get_namedtuple_fields -from astroid.const import PY312_PLUS, PY313_PLUS, PY314_PLUS +from astroid.const import PY312_PLUS, PY313_PLUS from astroid.exceptions import ( AttributeInferenceError, InferenceError, UseInferenceDefault, ) -from astroid.nodes.node_classes import Const -from astroid.nodes.scoped_nodes import ClassDef -def assertEqualMro(klass: ClassDef, expected_mro: list[str]) -> None: +def assertEqualMro(klass: nodes.ClassDef, expected_mro: list[str]) -> None: """Check mro names.""" assert [member.qname() for member in klass.mro()] == expected_mro @@ -128,7 +126,6 @@ def test_sys_streams(self): self.assertEqual(raw.name, "FileIO") -@test_utils.require_version("3.9") class TypeBrain(unittest.TestCase): def test_type_subscript(self): """ @@ -141,10 +138,10 @@ def test_type_subscript(self): """ ) val_inf = src.annotation.value.inferred()[0] - self.assertIsInstance(val_inf, astroid.ClassDef) + self.assertIsInstance(val_inf, nodes.ClassDef) self.assertEqual(val_inf.name, "type") meth_inf = val_inf.getattr("__class_getitem__")[0] - self.assertIsInstance(meth_inf, astroid.FunctionDef) + self.assertIsInstance(meth_inf, nodes.FunctionDef) def test_invalid_type_subscript(self): """ @@ -158,7 +155,7 @@ def test_invalid_type_subscript(self): """ ) val_inf = src.annotation.value.inferred()[0] - self.assertIsInstance(val_inf, astroid.ClassDef) + self.assertIsInstance(val_inf, nodes.ClassDef) self.assertEqual(val_inf.name, "str") with self.assertRaises(AttributeInferenceError): # pylint: disable=expression-not-assigned @@ -182,7 +179,7 @@ def check_metaclass_is_abc(node: nodes.ClassDef): if PY312_PLUS and node.name == "ByteString": # .metaclass() finds the first metaclass in the mro(), # which, from 3.12, is _DeprecateByteStringMeta (unhelpful) - # until ByteString is removed in 3.14. + # until ByteString is removed in 3.17. # Jump over the first two ByteString classes in the mro(). check_metaclass_is_abc(node.mro()[2]) else: @@ -265,43 +262,6 @@ def test_collections_object_subscriptable(self): inferred.getattr("__class_getitem__")[0], nodes.FunctionDef ) - @test_utils.require_version(maxver="3.9") - def test_collections_object_not_yet_subscriptable(self): - """ - Test that unsubscriptable types are detected as such. - Until python39 MutableSet of the collections module is not subscriptable. - """ - wrong_node = builder.extract_node( - """ - import collections.abc - collections.abc.MutableSet[int] - """ - ) - with self.assertRaises(InferenceError): - next(wrong_node.infer()) - right_node = builder.extract_node( - """ - import collections.abc - collections.abc.MutableSet - """ - ) - inferred = next(right_node.infer()) - check_metaclass_is_abc(inferred) - assertEqualMro( - inferred, - [ - "_collections_abc.MutableSet", - "_collections_abc.Set", - "_collections_abc.Collection", - "_collections_abc.Sized", - "_collections_abc.Iterable", - "_collections_abc.Container", - "builtins.object", - ], - ) - with self.assertRaises(AttributeInferenceError): - inferred.getattr("__class_getitem__") - def test_collections_object_subscriptable_2(self): """Starting with python39 Iterator in the collection.abc module is subscriptable""" node = builder.extract_node( @@ -323,21 +283,6 @@ class Derived(collections.abc.Iterator[int]): ], ) - @test_utils.require_version(maxver="3.9") - def test_collections_object_not_yet_subscriptable_2(self): - """Before python39 Iterator in the collection.abc module is not subscriptable""" - node = builder.extract_node( - """ - import collections.abc - collections.abc.Iterator[int] - """ - ) - with self.assertRaises(InferenceError): - next(node.infer()) - - @pytest.mark.skipif( - PY314_PLUS, reason="collections.abc.ByteString was removed in 3.14" - ) def test_collections_object_subscriptable_3(self): """With Python 3.9 the ByteString class of the collections module is subscriptable (but not the same class from typing module)""" @@ -374,6 +319,16 @@ class Derived(collections.abc.Hashable, collections.abc.Iterator[int]): ], ) + def test_statistics_quantiles_from_import(self): + node = builder.extract_node( + """ + from statistics import quantiles + quantiles([1, 2, 3, 4, 5, 6, 7, 8, 9], n=4) + """ + ) + inferred = next(node.infer()) + self.assertIs(inferred, util.Uninferable) + class TypingBrain(unittest.TestCase): def test_namedtuple_base(self) -> None: @@ -408,7 +363,7 @@ def as_integer(self): ) self.assertEqual(len(klass.getattr("as_string")), 1) inferred = next(called.infer()) - self.assertIsInstance(inferred, astroid.Const) + self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 5) def test_namedtuple_inference(self) -> None: @@ -498,7 +453,7 @@ class Example(NamedTuple): self.assertIsInstance(inferred, astroid.Instance) class_attr = inferred.getattr("CLASS_ATTR")[0] - self.assertIsInstance(class_attr, astroid.AssignName) + self.assertIsInstance(class_attr, nodes.AssignName) const = next(class_attr.infer()) self.assertEqual(const.value, "class_attr") @@ -589,10 +544,10 @@ class Foo: """ ) inferred = next(result.infer()) - self.assertIsInstance(inferred, astroid.ClassDef) + self.assertIsInstance(inferred, nodes.ClassDef) class_def_attr = inferred.getattr("Foo")[0] - self.assertIsInstance(class_def_attr, astroid.ClassDef) + self.assertIsInstance(class_def_attr, nodes.ClassDef) attr_def = class_def_attr.getattr("bar")[0] attr = next(attr_def.infer()) self.assertEqual(attr.value, "bar") @@ -705,7 +660,6 @@ def test_typing_no_duplicates_2(self): ) assert len(node.inferred()) == 1 - @test_utils.require_version(minver="3.10") def test_typing_param_spec(self): node = builder.extract_node( """ @@ -921,9 +875,8 @@ class Derived(typing.Hashable, typing.Iterator[int]): ], ) - @pytest.mark.skipif(PY314_PLUS, reason="typing.ByteString was removed in 3.14") def test_typing_object_notsubscriptable_3(self): - """Until python39 ByteString class of the typing module is not + """The ByteString class of the typing module is not subscriptable (whereas it is in the collections' module)""" right_node = builder.extract_node( """ @@ -1025,55 +978,6 @@ def test_regex_flags(self) -> None: self.assertIn(name, re_ast) self.assertEqual(next(re_ast[name].infer()).value, getattr(re, name)) - @test_utils.require_version(maxver="3.9") - def test_re_pattern_unsubscriptable(self): - """ - re.Pattern and re.Match are unsubscriptable until PY39. - """ - right_node1 = builder.extract_node( - """ - import re - re.Pattern - """ - ) - inferred1 = next(right_node1.infer()) - assert isinstance(inferred1, nodes.ClassDef) - with self.assertRaises(AttributeInferenceError): - assert isinstance( - inferred1.getattr("__class_getitem__")[0], nodes.FunctionDef - ) - - right_node2 = builder.extract_node( - """ - import re - re.Pattern - """ - ) - inferred2 = next(right_node2.infer()) - assert isinstance(inferred2, nodes.ClassDef) - with self.assertRaises(AttributeInferenceError): - assert isinstance( - inferred2.getattr("__class_getitem__")[0], nodes.FunctionDef - ) - - wrong_node1 = builder.extract_node( - """ - import re - re.Pattern[int] - """ - ) - with self.assertRaises(InferenceError): - next(wrong_node1.infer()) - - wrong_node2 = builder.extract_node( - """ - import re - re.Match[int] - """ - ) - with self.assertRaises(InferenceError): - next(wrong_node2.infer()) - def test_re_pattern_subscriptable(self): """Test re.Pattern and re.Match are subscriptable in PY39+""" node1 = builder.extract_node( @@ -1134,7 +1038,7 @@ def test_inferred_successfully(self) -> None: """ ) inferred = next(node.infer()) - self.assertIsInstance(inferred, astroid.List) + self.assertIsInstance(inferred, nodes.List) elems = sorted(elem.value for elem in inferred.elts) self.assertEqual(elems, [1, 2]) @@ -1152,12 +1056,12 @@ def sequence(): ) # Check that arguments are of type `nodes.Call`. sequence, length = node.args - self.assertIsInstance(sequence, astroid.Call) - self.assertIsInstance(length, astroid.Call) + self.assertIsInstance(sequence, nodes.Call) + self.assertIsInstance(length, nodes.Call) # Check the inference of `random.sample` call. inferred = next(node.infer()) - self.assertIsInstance(inferred, astroid.List) + self.assertIsInstance(inferred, nodes.List) elems = sorted(elem.value for elem in inferred.elts) self.assertEqual(elems, [1, 2]) @@ -1169,7 +1073,7 @@ class A: pass sample(list({1: A()}.values()), 1)""" ) inferred = next(node.infer()) - assert isinstance(inferred, astroid.List) + assert isinstance(inferred, nodes.List) assert len(inferred.elts) == 1 assert isinstance(inferred.elts[0], nodes.Call) @@ -1200,10 +1104,9 @@ def test_subprcess_check_output(self) -> None: node = astroid.extract_node(code) inferred = next(node.infer()) # Can be either str or bytes - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) assert isinstance(inferred.value, (str, bytes)) - @test_utils.require_version("3.9") def test_popen_does_not_have_class_getitem(self): code = """import subprocess; subprocess.Popen""" node = astroid.extract_node(code) @@ -1456,7 +1359,7 @@ def test_too_many_args(self) -> None: _get_result_node("issubclass(int, int, str)") -def _get_result_node(code: str) -> Const: +def _get_result_node(code: str) -> nodes.Const: node = next(astroid.extract_node(code).infer()) return node @@ -1677,7 +1580,7 @@ def test_infer_str() -> None: ) for node in ast_nodes: inferred = next(node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) node = astroid.extract_node( """ @@ -1698,7 +1601,7 @@ def test_infer_int() -> None: ) for node in ast_nodes: inferred = next(node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) ast_nodes = astroid.extract_node( """ @@ -1725,7 +1628,8 @@ def test_infer_dict_from_keys() -> None: ) for node in bad_nodes: with pytest.raises(InferenceError): - next(node.infer()) + if isinstance(next(node.infer()), util.UninferableBase): + raise InferenceError # Test uninferable values good_nodes = astroid.extract_node( @@ -1739,7 +1643,7 @@ def test_infer_dict_from_keys() -> None: ) for node in good_nodes: inferred = next(node.infer()) - assert isinstance(inferred, astroid.Dict) + assert isinstance(inferred, nodes.Dict) assert inferred.items == [] # Test inferable values @@ -1751,9 +1655,9 @@ def test_infer_dict_from_keys() -> None: """ ) inferred = next(from_dict.infer()) - assert isinstance(inferred, astroid.Dict) + assert isinstance(inferred, nodes.Dict) itered = inferred.itered() - assert all(isinstance(elem, astroid.Const) for elem in itered) + assert all(isinstance(elem, nodes.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == ["a", "b", "c"] @@ -1764,9 +1668,9 @@ def test_infer_dict_from_keys() -> None: """ ) inferred = next(from_string.infer()) - assert isinstance(inferred, astroid.Dict) + assert isinstance(inferred, nodes.Dict) itered = inferred.itered() - assert all(isinstance(elem, astroid.Const) for elem in itered) + assert all(isinstance(elem, nodes.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == ["a", "b", "c"] @@ -1777,9 +1681,9 @@ def test_infer_dict_from_keys() -> None: """ ) inferred = next(from_bytes.infer()) - assert isinstance(inferred, astroid.Dict) + assert isinstance(inferred, nodes.Dict) itered = inferred.itered() - assert all(isinstance(elem, astroid.Const) for elem in itered) + assert all(isinstance(elem, nodes.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == [97, 98, 99] @@ -1793,9 +1697,9 @@ def test_infer_dict_from_keys() -> None: ) for node in from_others: inferred = next(node.infer()) - assert isinstance(inferred, astroid.Dict) + assert isinstance(inferred, nodes.Dict) itered = inferred.itered() - assert all(isinstance(elem, astroid.Const) for elem in itered) + assert all(isinstance(elem, nodes.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == ["a", "b", "c"] @@ -1843,7 +1747,7 @@ def test(a, b, c): ) for node in ast_nodes: inferred = next(node.infer()) - assert isinstance(inferred, (astroid.FunctionDef, astroid.Instance)) + assert isinstance(inferred, (nodes.FunctionDef, astroid.Instance)) assert inferred.qname() in { "functools.partial", "functools.partial.newfunc", @@ -1874,7 +1778,7 @@ def other_test(a, b, *, c=1): expected_values = [4, 7, 7, 3, 12, 16, 32, 36, 3, 9, 7] for node, expected_value in zip(ast_nodes, expected_values): inferred = next(node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) assert inferred.value == expected_value def test_partial_assignment(self) -> None: @@ -1975,7 +1879,7 @@ def test_http_status_brain() -> None: """ ) inferred = next(node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) def test_http_status_brain_iterable() -> None: @@ -2002,7 +1906,7 @@ def test_oserror_model() -> None: ) inferred = next(node.infer()) strerror = next(inferred.igetattr("strerror")) - assert isinstance(strerror, astroid.Const) + assert isinstance(strerror, nodes.Const) assert strerror.value == "" @@ -2023,9 +1927,9 @@ def test_crypt_brain() -> None: @pytest.mark.parametrize( "code,expected_class,expected_value", [ - ("'hey'.encode()", astroid.Const, b""), - ("b'hey'.decode()", astroid.Const, ""), - ("'hey'.encode().decode()", astroid.Const, ""), + ("'hey'.encode()", nodes.Const, b""), + ("b'hey'.decode()", nodes.Const, ""), + ("'hey'.encode().decode()", nodes.Const, ""), ], ) def test_str_and_bytes(code, expected_class, expected_value): diff --git a/tests/brain/test_builtin.py b/tests/brain/test_builtin.py index cf413f16cd..6c49a80042 100644 --- a/tests/brain/test_builtin.py +++ b/tests/brain/test_builtin.py @@ -14,7 +14,7 @@ class BuiltinsTest(unittest.TestCase): def test_infer_property(self): - class_with_property = _extract_single_node( + property_assign = _extract_single_node( """ class Something: def getter(): @@ -22,14 +22,15 @@ def getter(): asd = property(getter) #@ """ ) - inferred_property = next(iter(class_with_property.value.infer())) + inferred_property = next(iter(property_assign.value.infer())) self.assertTrue(isinstance(inferred_property, objects.Property)) - class_parent = inferred_property.parent.parent.parent + class_parent = property_assign.scope() self.assertIsInstance(class_parent, nodes.ClassDef) self.assertFalse( any( - isinstance(getter, objects.Property) - for getter in class_parent.locals["getter"] + isinstance(def_, objects.Property) + for def_list in class_parent.locals.values() + for def_ in def_list ) ) self.assertTrue(hasattr(inferred_property, "args")) diff --git a/tests/brain/test_dataclasses.py b/tests/brain/test_dataclasses.py index cd3fcb4cfb..f34bcb4baf 100644 --- a/tests/brain/test_dataclasses.py +++ b/tests/brain/test_dataclasses.py @@ -6,7 +6,6 @@ import astroid from astroid import bases, nodes -from astroid.const import PY310_PLUS from astroid.exceptions import InferenceError from astroid.util import Uninferable @@ -422,19 +421,12 @@ class KeywordOnlyChild(KeywordOnlyParent): assert [a.name for a in child_init.args.args] == ["self", "a", "b", "c"] normal_init: bases.UnboundMethod = next(normal.infer()) - if PY310_PLUS: - assert [a.name for a in normal_init.args.args] == ["self", "a", "c"] - assert [a.name for a in normal_init.args.kwonlyargs] == ["b"] - else: - assert [a.name for a in normal_init.args.args] == ["self", "a", "b", "c"] - assert [a.name for a in normal_init.args.kwonlyargs] == [] + assert [a.name for a in normal_init.args.args] == ["self", "a", "c"] + assert [a.name for a in normal_init.args.kwonlyargs] == ["b"] keyword_only_init: bases.UnboundMethod = next(keyword_only.infer()) - if PY310_PLUS: - assert [a.name for a in keyword_only_init.args.args] == ["self"] - assert [a.name for a in keyword_only_init.args.kwonlyargs] == ["a", "b", "c"] - else: - assert [a.name for a in keyword_only_init.args.args] == ["self", "a", "b", "c"] + assert [a.name for a in keyword_only_init.args.args] == ["self"] + assert [a.name for a in keyword_only_init.args.kwonlyargs] == ["a", "b", "c"] def test_pydantic_field() -> None: @@ -806,10 +798,7 @@ class B: B.__init__ #@ """ ) - if PY310_PLUS: - expected = ["self", "y"] - else: - expected = ["self", "_", "y"] + expected = ["self", "y"] init = next(node_one.infer()) assert [a.name for a in init.args.args] == expected @@ -818,10 +807,7 @@ class B: def test_kw_only_decorator() -> None: - """Test that we update the signature correctly based on the keyword. - - kw_only was introduced in PY310. - """ + """Test that we update the signature correctly based on the keyword.""" foodef, bardef, cee, dee = astroid.extract_node( """ from dataclasses import dataclass @@ -855,43 +841,20 @@ class Dee(Cee): ) foo_init: bases.UnboundMethod = next(foodef.infer()) - if PY310_PLUS: - assert [a.name for a in foo_init.args.args] == ["self"] - assert [a.name for a in foo_init.args.kwonlyargs] == ["a", "e"] - else: - assert [a.name for a in foo_init.args.args] == ["self", "a", "e"] - assert [a.name for a in foo_init.args.kwonlyargs] == [] + assert [a.name for a in foo_init.args.args] == ["self"] + assert [a.name for a in foo_init.args.kwonlyargs] == ["a", "e"] bar_init: bases.UnboundMethod = next(bardef.infer()) - if PY310_PLUS: - assert [a.name for a in bar_init.args.args] == ["self", "c"] - assert [a.name for a in bar_init.args.kwonlyargs] == ["a", "e"] - else: - assert [a.name for a in bar_init.args.args] == ["self", "a", "e", "c"] - assert [a.name for a in bar_init.args.kwonlyargs] == [] + assert [a.name for a in bar_init.args.args] == ["self", "c"] + assert [a.name for a in bar_init.args.kwonlyargs] == ["a", "e"] cee_init: bases.UnboundMethod = next(cee.infer()) - if PY310_PLUS: - assert [a.name for a in cee_init.args.args] == ["self", "c", "d"] - assert [a.name for a in cee_init.args.kwonlyargs] == ["a", "e"] - else: - assert [a.name for a in cee_init.args.args] == ["self", "a", "e", "c", "d"] - assert [a.name for a in cee_init.args.kwonlyargs] == [] + assert [a.name for a in cee_init.args.args] == ["self", "c", "d"] + assert [a.name for a in cee_init.args.kwonlyargs] == ["a", "e"] dee_init: bases.UnboundMethod = next(dee.infer()) - if PY310_PLUS: - assert [a.name for a in dee_init.args.args] == ["self", "c", "d"] - assert [a.name for a in dee_init.args.kwonlyargs] == ["a", "e", "ee"] - else: - assert [a.name for a in dee_init.args.args] == [ - "self", - "a", - "e", - "c", - "d", - "ee", - ] - assert [a.name for a in dee_init.args.kwonlyargs] == [] + assert [a.name for a in dee_init.args.args] == ["self", "c", "d"] + assert [a.name for a in dee_init.args.kwonlyargs] == ["a", "e", "ee"] def test_kw_only_in_field_call() -> None: diff --git a/tests/brain/test_enum.py b/tests/brain/test_enum.py index 910c81f680..4e3e732af2 100644 --- a/tests/brain/test_enum.py +++ b/tests/brain/test_enum.py @@ -38,7 +38,7 @@ def mymethod(self, x): self.assertIn("builtins.property", prop.decoratornames()) meth = one.getattr("mymethod")[0] - self.assertIsInstance(meth, astroid.FunctionDef) + self.assertIsInstance(meth, nodes.FunctionDef) def test_looks_like_enum_false_positive(self) -> None: # Test that a class named Enumeration is not considered a builtin enum. @@ -70,7 +70,7 @@ class Color(Enum): assert isinstance(ast_node, nodes.NodeNG) inferred = ast_node.inferred() self.assertEqual(len(inferred), 1) - self.assertIsInstance(inferred[0], astroid.Const) + self.assertIsInstance(inferred[0], nodes.Const) self.assertEqual(inferred[0].value, 1) def test_ignores_with_nodes_from_body_of_enum(self) -> None: @@ -169,7 +169,7 @@ def test_enum_func_form_subscriptable(self) -> None: self.assertIsInstance(instance, astroid.Instance) inferred = next(name.infer()) - self.assertIsInstance(inferred, astroid.Const) + self.assertIsInstance(inferred, nodes.Const) def test_enum_func_form_has_dunder_members(self) -> None: instance = builder.extract_node( @@ -181,7 +181,7 @@ def test_enum_func_form_has_dunder_members(self) -> None: """ ) instance = next(instance.infer()) - self.assertIsInstance(instance, astroid.Const) + self.assertIsInstance(instance, nodes.Const) self.assertIsInstance(instance.value, str) def test_infer_enum_value_as_the_right_type(self) -> None: @@ -197,13 +197,13 @@ class A(Enum): ) inferred_string = string_value.inferred() assert any( - isinstance(elem, astroid.Const) and elem.value == "a" + isinstance(elem, nodes.Const) and elem.value == "a" for elem in inferred_string ) inferred_int = int_value.inferred() assert any( - isinstance(elem, astroid.Const) and elem.value == 1 for elem in inferred_int + isinstance(elem, nodes.Const) and elem.value == 1 for elem in inferred_int ) def test_mingled_single_and_double_quotes_does_not_crash(self) -> None: @@ -247,7 +247,7 @@ class Commands(IntEnum): """ ) inferred = next(node.infer()) - assert isinstance(inferred, astroid.ClassDef) + assert isinstance(inferred, nodes.ClassDef) def test_enum_tuple_list_values(self) -> None: tuple_node, list_node = builder.extract_node( @@ -263,8 +263,8 @@ class MyEnum(enum.Enum): ) inferred_tuple_node = next(tuple_node.infer()) inferred_list_node = next(list_node.infer()) - assert isinstance(inferred_tuple_node, astroid.Tuple) - assert isinstance(inferred_list_node, astroid.List) + assert isinstance(inferred_tuple_node, nodes.Tuple) + assert isinstance(inferred_list_node, nodes.List) assert inferred_tuple_node.as_string() == "(1, 2)" assert inferred_list_node.as_string() == "[2, 4]" @@ -303,6 +303,18 @@ def func(self): next(i_value.infer()) next(c_value.infer()) + def test_enum_name_property_has_docstring(self) -> None: + code = """ + from enum import Enum + class EmptyEnum(Enum): #@ + ... + """ + node = astroid.extract_node(code) + name_property = next(node.mymethods()) + + assert name_property.name == "name" + assert name_property.doc_node is not None + def test_enum_name_and_value_members_override_dynamicclassattr(self) -> None: code = """ from enum import Enum @@ -349,7 +361,7 @@ class Color(EnumSubclass): assert isinstance(ast_node, nodes.NodeNG) inferred = ast_node.inferred() self.assertEqual(len(inferred), 1) - self.assertIsInstance(inferred[0], astroid.Const) + self.assertIsInstance(inferred[0], nodes.Const) self.assertEqual(inferred[0].value, "red") def test_enum_subclass_member_value(self) -> None: @@ -369,7 +381,7 @@ class Color(EnumSubclass): assert isinstance(ast_node, nodes.NodeNG) inferred = ast_node.inferred() self.assertEqual(len(inferred), 1) - self.assertIsInstance(inferred[0], astroid.Const) + self.assertIsInstance(inferred[0], nodes.Const) self.assertEqual(inferred[0].value, 1) def test_enum_subclass_member_method(self) -> None: @@ -391,7 +403,7 @@ class Color(EnumSubclass): assert isinstance(ast_node, nodes.NodeNG) inferred = ast_node.inferred() self.assertEqual(len(inferred), 1) - self.assertIsInstance(inferred[0], astroid.Const) + self.assertIsInstance(inferred[0], nodes.Const) self.assertEqual(inferred[0].value, "red") def test_enum_subclass_different_modules(self) -> None: @@ -418,7 +430,7 @@ class Color(EnumSubclass): assert isinstance(ast_node, nodes.NodeNG) inferred = ast_node.inferred() self.assertEqual(len(inferred), 1) - self.assertIsInstance(inferred[0], astroid.Const) + self.assertIsInstance(inferred[0], nodes.Const) self.assertEqual(inferred[0].value, 1) def test_members_member_ignored(self) -> None: @@ -433,7 +445,7 @@ class Animal(Enum): ) inferred = next(ast_node.infer()) - self.assertIsInstance(inferred, astroid.Dict) + self.assertIsInstance(inferred, nodes.Dict) self.assertTrue(inferred.locals) def test_enum_as_renamed_import(self) -> None: diff --git a/tests/brain/test_helpers.py b/tests/brain/test_helpers.py new file mode 100644 index 0000000000..d30fd1d76e --- /dev/null +++ b/tests/brain/test_helpers.py @@ -0,0 +1,85 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt + +import pytest + +from astroid import extract_node, nodes +from astroid.brain.helpers import is_class_var + + +@pytest.mark.parametrize( + "code", + [ + pytest.param( + """ + from typing import ClassVar + + foo: ClassVar[int] + """, + id="from-import", + ), + pytest.param( + """ + from typing import ClassVar + + foo: ClassVar + """, + id="bare-classvar", + ), + pytest.param( + """ + import typing + + foo: typing.ClassVar[int] + """, + id="module-import", + ), + ], +) +def test_is_class_var_returns_true(code): + node = extract_node(code) + assert isinstance(node, nodes.AnnAssign) + assert is_class_var(node.annotation) + + +@pytest.mark.parametrize( + "code", + [ + pytest.param( + """ + from typing import Final + + foo: Final[int] + """, + id="wrong-name", + ), + pytest.param( + """ + from typing import ClassVar + + foo: list[ClassVar[int]] + """, + id="classvar-not-outermost", + ), + pytest.param( + """ + from typing import ClassVar + ClassVar = int + + foo: ClassVar + """, + id="shadowed-name", + ), + pytest.param( + """ + foo: ClassVar[int] + """, + id="missing-import", + ), + ], +) +def test_is_class_var_returns_false(code): + node = extract_node(code) + assert isinstance(node, nodes.AnnAssign) + assert not is_class_var(node.annotation) diff --git a/tests/brain/test_named_tuple.py b/tests/brain/test_named_tuple.py index 40a96c7cee..c26585fb6b 100644 --- a/tests/brain/test_named_tuple.py +++ b/tests/brain/test_named_tuple.py @@ -6,7 +6,6 @@ import unittest -import astroid from astroid import builder, nodes, util from astroid.exceptions import AttributeInferenceError @@ -184,8 +183,8 @@ def test_namedtuple_bases_are_actually_names_not_nodes(self) -> None: """ ) inferred = next(node.infer()) - self.assertIsInstance(inferred, astroid.ClassDef) - self.assertIsInstance(inferred.bases[0], astroid.Name) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertIsInstance(inferred.bases[0], nodes.Name) self.assertEqual(inferred.bases[0].name, "tuple") def test_invalid_label_does_not_crash_inference(self) -> None: @@ -196,7 +195,7 @@ def test_invalid_label_does_not_crash_inference(self) -> None: """ node = builder.extract_node(code) inferred = next(node.infer()) - assert isinstance(inferred, astroid.ClassDef) + assert isinstance(inferred, nodes.ClassDef) assert "b" not in inferred.locals assert "c" not in inferred.locals diff --git a/tests/brain/test_nose.py b/tests/brain/test_nose.py deleted file mode 100644 index 2b615c1833..0000000000 --- a/tests/brain/test_nose.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html -# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE -# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt - -from __future__ import annotations - -import unittest -import warnings - -import astroid -from astroid import builder - -try: - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - import nose # pylint: disable=unused-import - HAS_NOSE = True -except ImportError: - HAS_NOSE = False - - -@unittest.skipUnless(HAS_NOSE, "This test requires nose library.") -class NoseBrainTest(unittest.TestCase): - def test_nose_tools(self): - methods = builder.extract_node( - """ - from nose.tools import assert_equal - from nose.tools import assert_equals - from nose.tools import assert_true - assert_equal = assert_equal #@ - assert_true = assert_true #@ - assert_equals = assert_equals #@ - """ - ) - assert isinstance(methods, list) - assert_equal = next(methods[0].value.infer()) - assert_true = next(methods[1].value.infer()) - assert_equals = next(methods[2].value.infer()) - - self.assertIsInstance(assert_equal, astroid.BoundMethod) - self.assertIsInstance(assert_true, astroid.BoundMethod) - self.assertIsInstance(assert_equals, astroid.BoundMethod) - self.assertEqual(assert_equal.qname(), "unittest.case.TestCase.assertEqual") - self.assertEqual(assert_true.qname(), "unittest.case.TestCase.assertTrue") - self.assertEqual(assert_equals.qname(), "unittest.case.TestCase.assertEqual") diff --git a/tests/brain/test_pathlib.py b/tests/brain/test_pathlib.py index b97b3e75fa..2335e28ad6 100644 --- a/tests/brain/test_pathlib.py +++ b/tests/brain/test_pathlib.py @@ -5,7 +5,7 @@ import astroid from astroid import bases -from astroid.const import PY310_PLUS, PY313 +from astroid.const import PY313 from astroid.util import Uninferable @@ -62,11 +62,8 @@ def test_inference_parents_subscript_slice() -> None: ) inferred = name_node.inferred() assert len(inferred) == 1 - if PY310_PLUS: - assert isinstance(inferred[0], bases.Instance) - assert inferred[0].qname() == "builtins.tuple" - else: - assert inferred[0] is Uninferable + assert isinstance(inferred[0], bases.Instance) + assert inferred[0].qname() == "builtins.tuple" def test_inference_parents_subscript_not_path() -> None: diff --git a/tests/brain/test_statistics.py b/tests/brain/test_statistics.py new file mode 100644 index 0000000000..9a65a3306b --- /dev/null +++ b/tests/brain/test_statistics.py @@ -0,0 +1,68 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt + +"""Tests for brain statistics module.""" + +from __future__ import annotations + +import unittest + +from astroid import extract_node +from astroid.util import Uninferable + + +class StatisticsBrainTest(unittest.TestCase): + """Test the brain statistics module functionality.""" + + def test_statistics_quantiles_inference(self) -> None: + """Test that statistics.quantiles() returns Uninferable instead of empty list.""" + node = extract_node( + """ + import statistics + statistics.quantiles(list(range(100)), n=4) #@ + """ + ) + inferred = list(node.infer()) + self.assertEqual(len(inferred), 1) + self.assertIs(inferred[0], Uninferable) + + def test_statistics_quantiles_different_args(self) -> None: + """Test statistics.quantiles with different arguments.""" + node = extract_node( + """ + import statistics + statistics.quantiles([1, 2, 3, 4, 5], n=10, method='inclusive') #@ + """ + ) + inferred = list(node.infer()) + self.assertEqual(len(inferred), 1) + self.assertIs(inferred[0], Uninferable) + + def test_statistics_quantiles_assignment_unpacking(self) -> None: + """Test the specific case that was causing false positives.""" + node = extract_node( + """ + import statistics + q1, q2, q3 = statistics.quantiles(list(range(100)), n=4) #@ + """ + ) + call_node = node.value + inferred = list(call_node.infer()) + self.assertEqual(len(inferred), 1) + self.assertIs(inferred[0], Uninferable) + + def test_other_statistics_functions_not_affected(self) -> None: + """Test that other statistics functions are not affected by our brain module.""" + node = extract_node( + """ + import statistics + statistics.mean([1, 2, 3, 4, 5]) #@ + """ + ) + inferred = list(node.infer()) + self.assertNotEqual(len(inferred), 0) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/brain/test_typing.py b/tests/brain/test_typing.py index 11b77b7f9e..744be94586 100644 --- a/tests/brain/test_typing.py +++ b/tests/brain/test_typing.py @@ -70,3 +70,23 @@ def test_infer_typing_alias_incorrect_number_of_arguments( inferred = next(node.value.infer()) assert isinstance(inferred, bases.Instance) assert inferred.name == "_SpecialGenericAlias" + + +class TestSpecialAlias: + @pytest.mark.parametrize( + "code", + [ + "_CallableType()", + "_TupleType()", + ], + ) + def test_special_alias_no_crash_on_empty_args(self, code: str) -> None: + """ + Regression test for: https://github.com/pylint-dev/astroid/issues/2772 + + Test that _CallableType() and _TupleType() calls with no arguments + do not cause an IndexError. + """ + # Should not raise IndexError + module = builder.parse(code) + assert isinstance(module, nodes.Module) diff --git a/tests/resources.py b/tests/resources.py index 853fd796f1..fb3ef1be9c 100644 --- a/tests/resources.py +++ b/tests/resources.py @@ -4,11 +4,14 @@ from __future__ import annotations +import contextlib import os import sys +from collections.abc import Iterator, Sequence from pathlib import Path from astroid import builder +from astroid.manager import AstroidManager from astroid.nodes.scoped_nodes import Module DATA_DIR = Path("testdata") / "python3" @@ -20,7 +23,7 @@ def find(name: str) -> str: def build_file(path: str, modname: str | None = None) -> Module: - return builder.AstroidBuilder().file_build(find(path), modname) + return builder.AstroidBuilder(AstroidManager()).file_build(find(path), modname) class SysPathSetup: @@ -33,3 +36,26 @@ def tearDown(self) -> None: for key in list(sys.path_importer_cache): if key.startswith(datadir): del sys.path_importer_cache[key] + + +def _augment_sys_path(additional_paths: Sequence[str]) -> list[str]: + original = list(sys.path) + changes = [] + seen = set() + for additional_path in additional_paths: + if additional_path not in seen: + changes.append(additional_path) + seen.add(additional_path) + + sys.path[:] = changes + sys.path + return original + + +@contextlib.contextmanager +def augmented_sys_path(additional_paths: Sequence[str]) -> Iterator[None]: + """Augment 'sys.path' by adding entries from additional_paths.""" + original = _augment_sys_path(additional_paths) + try: + yield + finally: + sys.path[:] = original diff --git a/tests/test_builder.py b/tests/test_builder.py index b5335d5667..175f75408d 100644 --- a/tests/test_builder.py +++ b/tests/test_builder.py @@ -27,6 +27,7 @@ InferenceError, StatementMissing, ) +from astroid.manager import AstroidManager from astroid.nodes.scoped_nodes import Module from . import resources @@ -519,36 +520,6 @@ def test_object(self) -> None: obj_ast = self.builder.inspect_build(object) self.assertIn("__setattr__", obj_ast) - def test_newstyle_detection(self) -> None: - data = """ - class A: - "old style" - - class B(A): - "old style" - - class C(object): - "new style" - - class D(C): - "new style" - - __metaclass__ = type - - class E(A): - "old style" - - class F: - "new style" - """ - mod_ast = builder.parse(data, __name__) - self.assertTrue(mod_ast["A"].newstyle) - self.assertTrue(mod_ast["B"].newstyle) - self.assertTrue(mod_ast["E"].newstyle) - self.assertTrue(mod_ast["C"].newstyle) - self.assertTrue(mod_ast["D"].newstyle) - self.assertTrue(mod_ast["F"].newstyle) - def test_globals(self) -> None: data = """ CSTE = 1 @@ -766,10 +737,6 @@ def test_module_base_props(self) -> None: self.assertEqual(module.pure_python, 1) self.assertEqual(module.package, 0) self.assertFalse(module.is_statement) - with self.assertRaises(StatementMissing): - with pytest.warns(DeprecationWarning) as records: - self.assertEqual(module.statement(future=True), module) - assert len(records) == 1 with self.assertRaises(StatementMissing): module.statement() @@ -814,9 +781,7 @@ def test_function_base_props(self) -> None: def test_function_locals(self) -> None: """Test the 'locals' dictionary of an astroid function.""" _locals = self.module["global_access"].locals - self.assertEqual(len(_locals), 4) - keys = sorted(_locals.keys()) - self.assertEqual(keys, ["i", "key", "local", "val"]) + self.assertEqual(sorted(_locals.keys()), ["i", "key", "local", "val"]) def test_class_base_props(self) -> None: """Test base properties and method of an astroid class.""" @@ -833,19 +798,12 @@ def test_class_base_props(self) -> None: self.assertEqual(klass.parent.frame(), module) self.assertEqual(klass.root(), module) self.assertEqual(klass.basenames, []) - self.assertTrue(klass.newstyle) def test_class_locals(self) -> None: """Test the 'locals' dictionary of an astroid class.""" module = self.module - klass1 = module["YO"] - locals1 = klass1.locals - keys = sorted(locals1.keys()) assert_keys = ["__annotations__", "__init__", "__module__", "__qualname__", "a"] - self.assertEqual(keys, assert_keys) - klass2 = module["YOUPI"] - locals2 = klass2.locals - keys = locals2.keys() + self.assertEqual(sorted(module["YO"].locals.keys()), assert_keys) assert_keys = [ "__annotations__", "__init__", @@ -856,50 +814,43 @@ def test_class_locals(self) -> None: "method", "static_method", ] - self.assertEqual(sorted(keys), assert_keys) + self.assertEqual(sorted(module["YOUPI"].locals.keys()), assert_keys) def test_class_instance_attrs(self) -> None: module = self.module - klass1 = module["YO"] - klass2 = module["YOUPI"] - self.assertEqual(list(klass1.instance_attrs.keys()), ["yo"]) - self.assertEqual(list(klass2.instance_attrs.keys()), ["member"]) + self.assertEqual(list(module["YO"].instance_attrs.keys()), ["yo"]) + self.assertEqual(list(module["YOUPI"].instance_attrs.keys()), ["member"]) def test_class_basenames(self) -> None: module = self.module - klass1 = module["YO"] - klass2 = module["YOUPI"] - self.assertEqual(klass1.basenames, []) - self.assertEqual(klass2.basenames, ["YO"]) + self.assertEqual(module["YO"].basenames, []) + self.assertEqual(module["YOUPI"].basenames, ["YO"]) def test_method_base_props(self) -> None: """Test base properties and method of an astroid method.""" - klass2 = self.module["YOUPI"] - # "normal" method - method = klass2["method"] + method = self.module["YOUPI"]["method"] self.assertEqual(method.name, "method") self.assertEqual([n.name for n in method.args.args], ["self"]) assert isinstance(method.doc_node, nodes.Const) self.assertEqual(method.doc_node.value, "method\n test") self.assertEqual(method.fromlineno, 48) self.assertEqual(method.type, "method") - # class method - method = klass2["class_method"] + + def test_class_method_base_props(self) -> None: + method = self.module["YOUPI"]["class_method"] self.assertEqual([n.name for n in method.args.args], ["cls"]) self.assertEqual(method.type, "classmethod") - # static method - method = klass2["static_method"] + + def test_static_method_base_props(self) -> None: + method = self.module["YOUPI"]["static_method"] self.assertEqual(method.args.args, []) self.assertEqual(method.type, "staticmethod") def test_method_locals(self) -> None: """Test the 'locals' dictionary of an astroid method.""" method = self.module["YOUPI"]["method"] - _locals = method.locals - keys = sorted(_locals) # ListComp variables are not accessible outside - self.assertEqual(len(_locals), 3) - self.assertEqual(keys, ["autre", "local", "self"]) + self.assertEqual(sorted(method.locals), ["a", "autre", "local", "self"]) def test_unknown_encoding(self) -> None: with self.assertRaises(AstroidSyntaxError): @@ -910,7 +861,7 @@ def test_module_build_dunder_file() -> None: """Test that module_build() can work with modules that have the *__file__* attribute. """ - module = builder.AstroidBuilder().module_build(collections) + module = builder.AstroidBuilder(AstroidManager()).module_build(collections) assert module.path[0] == collections.__file__ @@ -995,7 +946,7 @@ def test_build_from_live_module_without_source_file(self) -> None: with self.assertRaises(AttributeError): _ = self.imported_module.__file__ - my_builder = builder.AstroidBuilder() + my_builder = builder.AstroidBuilder(AstroidManager()) with unittest.mock.patch.object( self.imported_module.__loader__, "get_source", diff --git a/tests/test_constraint.py b/tests/test_constraint.py index 63f62754be..4859d4241f 100644 --- a/tests/test_constraint.py +++ b/tests/test_constraint.py @@ -17,6 +17,8 @@ def common_params(node: str) -> pytest.MarkDecorator: ( (f"{node} is None", None, 3), (f"{node} is not None", 3, None), + (f"{node}", 3, None), + (f"not {node}", None, 3), ), ) @@ -590,3 +592,184 @@ def method(self): assert isinstance(inferred[1], nodes.Const) assert inferred[1].value == fail_val + + +@common_params(node="x") +def test_if_exp_body( + condition: str, satisfy_val: int | None, fail_val: int | None +) -> None: + """Test constraint for a variable that is used in an if exp body.""" + node1, node2 = builder.extract_node( + f""" + def f1(x = {fail_val}): + return ( + x if {condition} else None #@ + ) + + def f2(x = {satisfy_val}): + return ( + x if {condition} else None #@ + ) + """ + ) + + inferred = node1.body.inferred() + assert len(inferred) == 1 + assert inferred[0] is Uninferable + + inferred = node2.body.inferred() + assert len(inferred) == 2 + assert isinstance(inferred[0], nodes.Const) + assert inferred[0].value == satisfy_val + assert inferred[1] is Uninferable + + +@common_params(node="x") +def test_if_exp_else( + condition: str, satisfy_val: int | None, fail_val: int | None +) -> None: + """Test constraint for a variable that is used in an if exp else block.""" + node1, node2 = builder.extract_node( + f""" + def f1(x = {satisfy_val}): + return ( + None if {condition} else x #@ + ) + + def f2(x = {fail_val}): + return ( + None if {condition} else x #@ + ) + """ + ) + + inferred = node1.orelse.inferred() + assert len(inferred) == 1 + assert inferred[0] is Uninferable + + inferred = node2.orelse.inferred() + assert len(inferred) == 2 + assert isinstance(inferred[0], nodes.Const) + assert inferred[0].value == fail_val + assert inferred[1] is Uninferable + + +@common_params(node="x") +def test_outside_if_exp( + condition: str, satisfy_val: int | None, fail_val: int | None +) -> None: + """Test that constraint in an if exp condition doesn't apply outside of the if exp.""" + nodes_ = builder.extract_node( + f""" + def f1(x = {fail_val}): + x if {condition} else None + return ( + x #@ + ) + + def f2(x = {satisfy_val}): + None if {condition} else x + return ( + x #@ + ) + """ + ) + for node, val in zip(nodes_, (fail_val, satisfy_val)): + inferred = node.inferred() + assert len(inferred) == 2 + assert isinstance(inferred[0], nodes.Const) + assert inferred[0].value == val + assert inferred[1] is Uninferable + + +@common_params(node="x") +def test_nested_if_exp( + condition: str, satisfy_val: int | None, fail_val: int | None +) -> None: + """Test that constraint in an if exp condition applies within inner if exp.""" + node1, node2 = builder.extract_node( + f""" + def f1(y, x = {fail_val}): + return ( + (x if y else None) if {condition} else None #@ + ) + + def f2(y, x = {satisfy_val}): + return ( + (x if y else None) if {condition} else None #@ + ) + """ + ) + + inferred = node1.body.body.inferred() + assert len(inferred) == 1 + assert inferred[0] is Uninferable + + inferred = node2.body.body.inferred() + assert len(inferred) == 2 + assert isinstance(inferred[0], nodes.Const) + assert inferred[0].value == satisfy_val + assert inferred[1] is Uninferable + + +@common_params(node="self.x") +def test_if_exp_instance_attr( + condition: str, satisfy_val: int | None, fail_val: int | None +) -> None: + """Test constraint for an instance attribute in an if exp.""" + node1, node2 = builder.extract_node( + f""" + class A1: + def __init__(self, x = {fail_val}): + self.x = x + + def method(self): + return ( + self.x if {condition} else None #@ + ) + + class A2: + def __init__(self, x = {satisfy_val}): + self.x = x + + def method(self): + return ( + self.x if {condition} else None #@ + ) + """ + ) + + inferred = node1.body.inferred() + assert len(inferred) == 1 + assert inferred[0] is Uninferable + + inferred = node2.body.inferred() + assert len(inferred) == 2 + assert isinstance(inferred[0], nodes.Const) + assert inferred[0].value == satisfy_val + assert inferred[1].value is Uninferable + + +@common_params(node="self.x") +def test_if_exp_instance_attr_varname_collision( + condition: str, satisfy_val: int | None, fail_val: int | None +) -> None: + """Test that constraint in an if exp condition doesn't apply to a variable with the same name.""" + node = builder.extract_node( + f""" + class A: + def __init__(self, x = {fail_val}): + self.x = x + + def method(self, x = {fail_val}): + return ( + x if {condition} else None #@ + ) + """ + ) + + inferred = node.body.inferred() + assert len(inferred) == 2 + assert isinstance(inferred[0], nodes.Const) + assert inferred[0].value == fail_val + assert inferred[1].value is Uninferable diff --git a/tests/test_get_relative_base_path.py b/tests/test_get_relative_base_path.py new file mode 100644 index 0000000000..280a725882 --- /dev/null +++ b/tests/test_get_relative_base_path.py @@ -0,0 +1,119 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +import os +import tempfile +import unittest + +from astroid import modutils + + +class TestModUtilsRelativePath(unittest.TestCase): + + def setUp(self): + self.cwd = os.getcwd() + + def _run_relative_path_test(self, target, base, expected): + if not (target and base): + result = None + else: + base_dir = os.path.join(self.cwd, base) + target_path = os.path.join(self.cwd, target) + result = modutils._get_relative_base_path(target_path, base_dir) + self.assertEqual(result, expected) + + def test_similar_prefixes_no_match(self): + + cases = [ + ("something", "some", None), + ("some-thing", "some", None), + ("some2", "some", None), + ("somedir", "some", None), + ("some_thing", "some", None), + ("some.dir", "some", None), + ] + for target, base, expected in cases: + with self.subTest(target=target, base=base): + self._run_relative_path_test(target, base, expected) + + def test_valid_subdirectories(self): + + cases = [ + ("some/sub", "some", ["sub"]), + ("some/foo/bar", "some", ["foo", "bar"]), + ("some/foo-bar", "some", ["foo-bar"]), + ("some/foo/bar-ext", "some/foo", ["bar-ext"]), + ("something/sub", "something", ["sub"]), + ] + for target, base, expected in cases: + with self.subTest(target=target, base=base): + self._run_relative_path_test(target, base, expected) + + def test_path_format_variations(self): + + cases = [ + ("some", "some", []), + ("some/", "some", []), + ("../some", "some", None), + ] + + if os.path.isabs("/abs/path"): + cases.append(("/abs/path/some", "/abs/path", ["some"])) + + for target, base, expected in cases: + with self.subTest(target=target, base=base): + self._run_relative_path_test(target, base, expected) + + def test_case_sensitivity(self): + + cases = [ + ("Some/sub", "some", None if os.path.sep == "/" else ["sub"]), + ("some/Sub", "some", ["Sub"]), + ] + for target, base, expected in cases: + with self.subTest(target=target, base=base): + self._run_relative_path_test(target, base, expected) + + def test_special_path_components(self): + + cases = [ + ("some/.hidden", "some", [".hidden"]), + ("some/with space", "some", ["with space"]), + ("some/unicode_ø", "some", ["unicode_ø"]), + ] + for target, base, expected in cases: + with self.subTest(target=target, base=base): + self._run_relative_path_test(target, base, expected) + + def test_nonexistent_paths(self): + + cases = [("nonexistent", "some", None), ("some/sub", "nonexistent", None)] + for target, base, expected in cases: + with self.subTest(target=target, base=base): + self._run_relative_path_test(target, base, expected) + + def test_empty_paths(self): + + cases = [("", "some", None), ("some", "", None), ("", "", None)] + for target, base, expected in cases: + with self.subTest(target=target, base=base): + self._run_relative_path_test(target, base, expected) + + def test_symlink_resolution(self): + with tempfile.TemporaryDirectory() as tmpdir: + base_dir = os.path.join(tmpdir, "some") + os.makedirs(base_dir, exist_ok=True) + + real_file = os.path.join(base_dir, "real.py") + with open(real_file, "w", encoding="utf-8") as f: + f.write("# dummy content") + + symlink_path = os.path.join(tmpdir, "symlink.py") + os.symlink(real_file, symlink_path) + + result = modutils._get_relative_base_path(symlink_path, base_dir) + self.assertEqual(result, ["real"]) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_group_exceptions.py b/tests/test_group_exceptions.py index 2ee4143fc7..661a824e80 100644 --- a/tests/test_group_exceptions.py +++ b/tests/test_group_exceptions.py @@ -6,18 +6,29 @@ import pytest from astroid import ( - AssignName, - ExceptHandler, - For, - Name, - Try, Uninferable, bases, extract_node, + nodes, ) from astroid.const import PY311_PLUS from astroid.context import InferenceContext -from astroid.nodes import Expr, Raise, TryStar + + +@pytest.mark.skipif(not PY311_PLUS, reason="Exception group introduced in Python 3.11") +def test_group_exceptions_exceptions() -> None: + node = extract_node( + textwrap.dedent( + """ + try: + raise ExceptionGroup('', [TypeError(), TypeError()]) + except ExceptionGroup as eg: + eg.exceptions #@""" + ) + ) + + inferred = node.inferred()[0] + assert isinstance(inferred, nodes.Tuple) @pytest.mark.skipif(not PY311_PLUS, reason="Requires Python 3.11 or higher") @@ -35,20 +46,20 @@ def test_group_exceptions() -> None: print("Handling TypeError")""" ) ) - assert isinstance(node, Try) + assert isinstance(node, nodes.Try) handler = node.handlers[0] assert node.block_range(lineno=1) == (1, 9) assert node.block_range(lineno=2) == (2, 2) assert node.block_range(lineno=5) == (5, 9) - assert isinstance(handler, ExceptHandler) + assert isinstance(handler, nodes.ExceptHandler) assert handler.type.name == "ExceptionGroup" children = list(handler.get_children()) assert len(children) == 3 exception_group, short_name, for_loop = children - assert isinstance(exception_group, Name) + assert isinstance(exception_group, nodes.Name) assert exception_group.block_range(1) == (1, 4) - assert isinstance(short_name, AssignName) - assert isinstance(for_loop, For) + assert isinstance(short_name, nodes.AssignName) + assert isinstance(for_loop, nodes.For) @pytest.mark.skipif(not PY311_PLUS, reason="Requires Python 3.11 or higher") @@ -67,9 +78,9 @@ def test_star_exceptions() -> None: sys.exit(0)""" ) node = extract_node(code) - assert isinstance(node, TryStar) + assert isinstance(node, nodes.TryStar) assert node.as_string() == code.replace('"', "'").strip() - assert isinstance(node.body[0], Raise) + assert isinstance(node.body[0], nodes.Raise) assert node.block_range(1) == (1, 11) assert node.block_range(2) == (2, 2) assert node.block_range(3) == (3, 3) @@ -83,13 +94,13 @@ def test_star_exceptions() -> None: assert node.block_range(11) == (11, 11) assert node.handlers handler = node.handlers[0] - assert isinstance(handler, ExceptHandler) + assert isinstance(handler, nodes.ExceptHandler) assert handler.type.name == "ValueError" orelse = node.orelse[0] - assert isinstance(orelse, Expr) + assert isinstance(orelse, nodes.Expr) assert orelse.value.args[0].value == 127 final = node.finalbody[0] - assert isinstance(final, Expr) + assert isinstance(final, nodes.Expr) assert final.value.args[0].value == 0 @@ -108,3 +119,33 @@ def test_star_exceptions_infer_name() -> None: stmts = bases._infer_stmts([trystar], context) assert list(stmts) == [Uninferable] assert context.lookupname == name + + +@pytest.mark.skipif(not PY311_PLUS, reason="Requires Python 3.11 or higher") +def test_star_exceptions_infer_exceptions() -> None: + code = textwrap.dedent( + """ + try: + raise ExceptionGroup("group", [ValueError(654), TypeError(10)]) + except* ValueError as ve: + print(e.exceptions) + except* TypeError as te: + print(e.exceptions) + else: + sys.exit(127) + finally: + sys.exit(0)""" + ) + node = extract_node(code) + assert isinstance(node, nodes.TryStar) + inferred_ve = next(node.handlers[0].statement().name.infer()) + assert inferred_ve.name == "ExceptionGroup" + assert isinstance(inferred_ve.getattr("exceptions")[0], nodes.List) + assert ( + inferred_ve.getattr("exceptions")[0].elts[0].pytype() == "builtins.ValueError" + ) + + inferred_te = next(node.handlers[1].statement().name.infer()) + assert inferred_te.name == "ExceptionGroup" + assert isinstance(inferred_te.getattr("exceptions")[0], nodes.List) + assert inferred_te.getattr("exceptions")[0].elts[0].pytype() == "builtins.TypeError" diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 2dd94a6ae3..4df145bab9 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -11,7 +11,7 @@ from astroid.builder import AstroidBuilder from astroid.const import IS_PYPY from astroid.exceptions import _NonDeducibleTypeHierarchy -from astroid.nodes.scoped_nodes import ClassDef +from astroid.nodes.node_classes import UNATTACHED_UNKNOWN class TestHelpers(unittest.TestCase): @@ -22,15 +22,14 @@ def setUp(self) -> None: self.builtins = astroid_manager.astroid_cache[builtins_name] self.manager = manager.AstroidManager() - def _extract(self, obj_name: str) -> ClassDef: + def _extract(self, obj_name: str) -> nodes.ClassDef: return self.builtins.getattr(obj_name)[0] - def _build_custom_builtin(self, obj_name: str) -> ClassDef: - proxy = raw_building.build_class(obj_name) - proxy.parent = self.builtins + def _build_custom_builtin(self, obj_name: str) -> nodes.ClassDef: + proxy = raw_building.build_class(obj_name, self.builtins) return proxy - def assert_classes_equal(self, cls: ClassDef, other: ClassDef) -> None: + def assert_classes_equal(self, cls: nodes.ClassDef, other: nodes.ClassDef) -> None: self.assertEqual(cls.name, other.name) self.assertEqual(cls.parent, other.parent) self.assertEqual(cls.qname(), other.qname()) @@ -270,7 +269,7 @@ def test_uninferable_for_safe_infer() -> None: def test_safe_infer_shim() -> None: with pytest.warns(DeprecationWarning) as records: - helpers.safe_infer(nodes.Unknown()) + helpers.safe_infer(UNATTACHED_UNKNOWN) assert ( "Import safe_infer from astroid.util; this shim in astroid.helpers will be removed." diff --git a/tests/test_inference.py b/tests/test_inference.py index f1e7192d5f..6a19220495 100644 --- a/tests/test_inference.py +++ b/tests/test_inference.py @@ -19,9 +19,6 @@ import pytest from astroid import ( - Assign, - Const, - Slice, Uninferable, arguments, manager, @@ -34,7 +31,7 @@ from astroid.arguments import CallSite from astroid.bases import BoundMethod, Generator, Instance, UnboundMethod, UnionType from astroid.builder import AstroidBuilder, _extract_single_node, extract_node, parse -from astroid.const import IS_PYPY, PY310_PLUS, PY312_PLUS, PY314_PLUS +from astroid.const import IS_PYPY, PY312_PLUS, PY314_PLUS from astroid.context import CallContext, InferenceContext from astroid.exceptions import ( AstroidTypeError, @@ -43,6 +40,7 @@ NoDefault, NotFoundError, ) +from astroid.manager import AstroidManager from astroid.objects import ExceptionInstance from . import resources @@ -59,7 +57,7 @@ def get_node_of_class(start_from: nodes.FunctionDef, klass: type) -> nodes.Attri return next(start_from.nodes_of_class(klass)) -builder = AstroidBuilder() +builder = AstroidBuilder(AstroidManager()) DATA_DIR = Path(__file__).parent / "testdata" / "python3" / "data" @@ -70,7 +68,7 @@ def infer_default(self: Any, *args: InferenceContext) -> None: raise InferenceError infer_default = decoratorsmod.path_wrapper(infer_default) - infer_end = decoratorsmod.path_wrapper(Slice._infer) + infer_end = decoratorsmod.path_wrapper(nodes.Slice._infer) with self.assertRaises(InferenceError): next(infer_default(1)) self.assertEqual(next(infer_end(1)), 1) @@ -664,7 +662,7 @@ def test_fstring_inference(self) -> None: inferred = node.inferred() self.assertEqual(len(inferred), 1) value_node = inferred[0] - self.assertIsInstance(value_node, Const) + self.assertIsInstance(value_node, nodes.Const) self.assertEqual(value_node.value, "Hello John!") def test_float_complex_ambiguity(self) -> None: @@ -1262,6 +1260,7 @@ def randint(maximum): def test_binary_op_or_union_type(self) -> None: """Binary or union is only defined for Python 3.10+.""" + # pylint: disable = too-many-statements code = """ class A: ... @@ -1290,61 +1289,57 @@ class B: ... tuple | int #@ """ ast_nodes = extract_node(code) - if not PY310_PLUS: - for n in ast_nodes: - assert n.inferred() == [util.Uninferable] + i0 = ast_nodes[0].inferred()[0] + assert isinstance(i0, UnionType) + assert isinstance(i0.left, nodes.ClassDef) + assert i0.left.name == "int" + assert isinstance(i0.right, nodes.Const) + assert i0.right.value is None + + # Assert basic UnionType properties and methods + assert i0.callable() is False + assert i0.bool_value() is True + assert i0.pytype() == "types.UnionType" + assert i0.display_type() == "UnionType" + if PY314_PLUS: + assert str(i0) == "UnionType(Union)" + assert repr(i0) == f"" else: - i0 = ast_nodes[0].inferred()[0] - assert isinstance(i0, UnionType) - assert isinstance(i0.left, nodes.ClassDef) - assert i0.left.name == "int" - assert isinstance(i0.right, nodes.Const) - assert i0.right.value is None - - # Assert basic UnionType properties and methods - assert i0.callable() is False - assert i0.bool_value() is True - assert i0.pytype() == "types.UnionType" - assert i0.display_type() == "UnionType" - if PY314_PLUS: - assert str(i0) == "UnionType(Union)" - assert repr(i0) == f"" - else: - assert str(i0) == "UnionType(UnionType)" - assert repr(i0) == f"" - - i1 = ast_nodes[1].inferred()[0] - assert isinstance(i1, UnionType) - - i2 = ast_nodes[2].inferred()[0] - assert isinstance(i2, UnionType) - assert isinstance(i2.left, UnionType) - assert isinstance(i2.left.left, nodes.ClassDef) - assert i2.left.left.name == "int" - assert isinstance(i2.left.right, nodes.ClassDef) - assert i2.left.right.name == "str" - assert isinstance(i2.right, nodes.Const) - assert i2.right.value is None - - i3 = ast_nodes[3].inferred()[0] - assert isinstance(i3, UnionType) - assert isinstance(i3.left, nodes.ClassDef) - assert i3.left.name == "A" - assert isinstance(i3.right, nodes.ClassDef) - assert i3.right.name == "B" - - i4 = ast_nodes[4].inferred()[0] - assert isinstance(i4, UnionType) - - i5 = ast_nodes[5].inferred()[0] - assert isinstance(i5, UnionType) - assert isinstance(i5.left, nodes.ClassDef) - assert i5.left.name == "List" - - i6 = ast_nodes[6].inferred()[0] - assert isinstance(i6, UnionType) - assert isinstance(i6.left, nodes.ClassDef) - assert i6.left.name == "tuple" + assert str(i0) == "UnionType(UnionType)" + assert repr(i0) == f"" + + i1 = ast_nodes[1].inferred()[0] + assert isinstance(i1, UnionType) + + i2 = ast_nodes[2].inferred()[0] + assert isinstance(i2, UnionType) + assert isinstance(i2.left, UnionType) + assert isinstance(i2.left.left, nodes.ClassDef) + assert i2.left.left.name == "int" + assert isinstance(i2.left.right, nodes.ClassDef) + assert i2.left.right.name == "str" + assert isinstance(i2.right, nodes.Const) + assert i2.right.value is None + + i3 = ast_nodes[3].inferred()[0] + assert isinstance(i3, UnionType) + assert isinstance(i3.left, nodes.ClassDef) + assert i3.left.name == "A" + assert isinstance(i3.right, nodes.ClassDef) + assert i3.right.name == "B" + + i4 = ast_nodes[4].inferred()[0] + assert isinstance(i4, UnionType) + + i5 = ast_nodes[5].inferred()[0] + assert isinstance(i5, UnionType) + assert isinstance(i5.left, nodes.ClassDef) + assert i5.left.name == "List" + + i6 = ast_nodes[6].inferred()[0] + assert isinstance(i6, UnionType) + assert isinstance(i6.left, nodes.ClassDef) + assert i6.left.name == "tuple" code = """ from typing import List @@ -1357,26 +1352,22 @@ class B: ... Alias1 | Alias2 #@ """ ast_nodes = extract_node(code) - if not PY310_PLUS: - for n in ast_nodes: - assert n.inferred() == [util.Uninferable] - else: - i0 = ast_nodes[0].inferred()[0] - assert isinstance(i0, UnionType) - assert isinstance(i0.left, nodes.ClassDef) - assert i0.left.name == "List" - - i1 = ast_nodes[1].inferred()[0] - assert isinstance(i1, UnionType) - assert isinstance(i1.left, UnionType) - assert isinstance(i1.left.left, nodes.ClassDef) - assert i1.left.left.name == "str" - - i2 = ast_nodes[2].inferred()[0] - assert isinstance(i2, UnionType) - assert isinstance(i2.left, nodes.ClassDef) - assert i2.left.name == "List" - assert isinstance(i2.right, UnionType) + i0 = ast_nodes[0].inferred()[0] + assert isinstance(i0, UnionType) + assert isinstance(i0.left, nodes.ClassDef) + assert i0.left.name == "List" + + i1 = ast_nodes[1].inferred()[0] + assert isinstance(i1, UnionType) + assert isinstance(i1.left, UnionType) + assert isinstance(i1.left.left, nodes.ClassDef) + assert i1.left.left.name == "str" + + i2 = ast_nodes[2].inferred()[0] + assert isinstance(i2, UnionType) + assert isinstance(i2.left, nodes.ClassDef) + assert i2.left.name == "List" + assert isinstance(i2.right, UnionType) def test_nonregr_lambda_arg(self) -> None: code = """ @@ -1885,6 +1876,10 @@ def do_a_thing(): node = ast["do_a_thing"] self.assertEqual(node.type, "function") + @pytest.mark.skipif( + IS_PYPY, + reason="Persistent recursion error that we ignore and never fix", + ) def test_no_infinite_ancestor_loop(self) -> None: klass = extract_node( """ @@ -2986,6 +2981,14 @@ def __bool__(self): inferred = next(instance.infer()) self.assertIs(inferred.bool_value(), util.Uninferable) + def test_bool_value_not_implemented(self) -> None: + node = extract_node("""NotImplemented""") + inferred = next(node.infer()) + if PY314_PLUS: + self.assertIs(inferred.bool_value(), util.Uninferable) + else: + self.assertIs(inferred.bool_value(), True) + def test_infer_coercion_rules_for_floats_complex(self) -> None: ast_nodes = extract_node( """ @@ -5493,7 +5496,7 @@ def add(x, y): else: kwargs = {} - if nums: + if nums is not None: add(*nums) print(**kwargs) """ @@ -5565,7 +5568,7 @@ def test_formatted_fstring_inference(code, result) -> None: if result is None: assert value_node is util.Uninferable else: - assert isinstance(value_node, Const) + assert isinstance(value_node, nodes.Const) assert value_node.value == result @@ -6179,7 +6182,7 @@ def test(self): """ ) inferred = next(node.infer()) - assert isinstance(inferred, Slice) + assert isinstance(inferred, nodes.Slice) def test_exception_lookup_last_except_handler_wins() -> None: @@ -6439,6 +6442,98 @@ def both_branches(): assert [third[0].value, third[1].value] == [1, 2] +def test_ifexp_with_default_arguments() -> None: + code = """ + def with_default(foo: str | None = None): + a = 1 if foo else "bar" #@ + + def without_default(foo: str): + a = 1 if foo else "bar" #@ + + def some_ifexps(foo: str | None = None): + a = 1 if foo else 2 + b = 3 if a else 4 #@ + c = 4 if b else 5 #@ + d = 5 if not foo else foo #@ + e = d if not foo else foo #@ + """ + + ast_nodes = extract_node(code) + + first = ast_nodes[0].value.inferred() + second = ast_nodes[1].value.inferred() + third = ast_nodes[2].value.inferred() + fourth = ast_nodes[3].value.inferred() + fifth = ast_nodes[4].value.inferred() + sixth = ast_nodes[5].value.inferred() + + assert len(first) == 2 + assert [first[0].value, first[1].value] == [1, "bar"] + + assert len(second) == 2 + assert [second[0].value, second[1].value] == [1, "bar"] + + assert len(third) == 1 + assert third[0].value == 3 + + assert len(fourth) == 1 + assert fourth[0].value == 4 + + assert len(fifth) == 2 + assert [fifth[0].value, fifth[1].value] == [5, Uninferable] + + assert len(sixth) == 3 + assert [sixth[0].value, sixth[1].value, sixth[2].value] == [ + 5, + Uninferable, + Uninferable, + ] + + +def test_ifexp_with_uninferables() -> None: + code = """ + def truthy_and_falsy(): + return False if unknown() else True + + def truthy_and_uninferable(): + return False if unknown() else unknown() + + def calls_truthy_and_falsy(): + return 1 if truthy_and_falsy() else 2 + + def calls_truthy_and_uninferable(): + return 1 if range(10) else truthy_and_uninferable() + + truthy_and_falsy() #@ + truthy_and_uninferable() #@ + calls_truthy_and_falsy() #@ + calls_truthy_and_uninferable() #@ + """ + + ast_nodes = extract_node(code) + + first = ast_nodes[0].inferred() + second = ast_nodes[1].inferred() + third = ast_nodes[2].inferred() + fourth = ast_nodes[3].inferred() + + assert len(first) == 2 + assert [first[0].value, first[1].value] == [False, True] + + assert len(second) == 2 + assert [second[0].value, second[1].value] == [False, Uninferable] + + assert len(third) == 2 + assert [third[0].value, third[1].value] == [1, 2] + + assert len(fourth) == 3 + assert [fourth[0].value, fourth[1].value, fourth[2].value] == [ + 1, + False, + Uninferable, + ] + + def test_assert_last_function_returns_none_on_inference() -> None: code = """ def check_equal(a, b): @@ -6884,20 +6979,15 @@ class A: @property def a(self): return 42 - - A() """ - node = extract_node(code) - # Infer the class - cls = next(node.infer()) + cls = extract_node(code) (prop,) = cls.getattr("a") - # Try to infer the property function *multiple* times. `A.locals` should be modified only once + assert len(cls.locals["a"]) == 1 for _ in range(3): prop.inferred() a_locals = cls.locals["a"] - # [FunctionDef, Property] - assert len(a_locals) == 2 + assert len(a_locals) == 1 def test_getattr_fails_on_empty_values() -> None: @@ -7410,16 +7500,24 @@ class Cls: """, "<__main__.Cls", ), - ("s1 = f'{5}' #@", "5"), + ( + "s1 = f'{5}' #@", + "5", + ), + ("s1 = f'{missing}'", None), + ("s1 = f'a/{missing}/b'", None), ], ) def test_joined_str_returns_string(source, expected) -> None: """Regression test for https://github.com/pylint-dev/pylint/issues/9947.""" node = extract_node(source) - assert isinstance(node, Assign) + assert isinstance(node, nodes.Assign) target = node.targets[0] assert target inferred = list(target.inferred()) assert len(inferred) == 1 - assert isinstance(inferred[0], Const) - inferred[0].value.startswith(expected) + if expected: + assert isinstance(inferred[0], nodes.Const) + inferred[0].value.startswith(expected) + else: + assert inferred[0] is Uninferable diff --git a/tests/test_lookup.py b/tests/test_lookup.py index b452d62894..bcee8f6746 100644 --- a/tests/test_lookup.py +++ b/tests/test_lookup.py @@ -322,24 +322,6 @@ class _Inner: self.assertEqual(len(name.lookup("x")[1]), 1, repr(name)) self.assertEqual(name.lookup("x")[1][0].lineno, 3, repr(name)) - def test_generator_attributes(self) -> None: - tree = builder.parse( - """ - def count(): - "test" - yield 0 - - iterer = count() - num = iterer.next() - """ - ) - next_node = tree.body[2].value.func - gener = next_node.expr.inferred()[0] - self.assertIsInstance(gener.getattr("__next__")[0], nodes.FunctionDef) - self.assertIsInstance(gener.getattr("send")[0], nodes.FunctionDef) - self.assertIsInstance(gener.getattr("throw")[0], nodes.FunctionDef) - self.assertIsInstance(gener.getattr("close")[0], nodes.FunctionDef) - def test_explicit___name__(self) -> None: code = """ class Pouet: diff --git a/tests/test_manager.py b/tests/test_manager.py index 9a7bbdb7ef..e420e6b150 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -15,7 +15,7 @@ import pytest import astroid -from astroid import manager, test_utils +from astroid import manager, nodes, test_utils from astroid.const import IS_JYTHON, IS_PYPY, PY312_PLUS from astroid.exceptions import ( AstroidBuildingError, @@ -107,7 +107,7 @@ def _test_ast_from_old_namespace_package_protocol(self, root: str) -> None: try: for name in ("foo", "bar", "baz"): module = self.manager.ast_from_module_name("package." + name) - self.assertIsInstance(module, astroid.Module) + self.assertIsInstance(module, nodes.Module) finally: sys.path = origpath @@ -120,8 +120,15 @@ def test_ast_from_namespace_pkg_resources(self) -> None: def test_identify_old_namespace_package_protocol(self) -> None: # Like the above cases, this package follows the old namespace package protocol # astroid currently assumes such packages are in sys.modules, so import it - # pylint: disable-next=import-outside-toplevel - import tests.testdata.python3.data.path_pkg_resources_1.package.foo as _ # noqa + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + category=UserWarning, + message=".*pkg_resources is deprecated.*", + ) + + # pylint: disable-next=import-outside-toplevel + import tests.testdata.python3.data.path_pkg_resources_1.package.foo as _ # noqa self.assertTrue( util.is_namespace("tests.testdata.python3.data.path_pkg_resources_1") @@ -174,10 +181,10 @@ def test_implicit_namespace_package(self) -> None: try: module = self.manager.ast_from_module_name("namespace_pep_420.module") - self.assertIsInstance(module, astroid.Module) + self.assertIsInstance(module, nodes.Module) self.assertEqual(module.name, "namespace_pep_420.module") var = next(module.igetattr("var")) - self.assertIsInstance(var, astroid.Const) + self.assertIsInstance(var, nodes.Const) self.assertEqual(var.value, 42) finally: for _ in range(2): @@ -195,7 +202,7 @@ def test_namespace_package_pth_support(self) -> None: module = self.manager.ast_from_module_name("foogle.fax") submodule = next(module.igetattr("a")) value = next(submodule.igetattr("x")) - self.assertIsInstance(value, astroid.Const) + self.assertIsInstance(value, nodes.Const) with self.assertRaises(AstroidImportError): self.manager.ast_from_module_name("foogle.moogle") finally: @@ -277,6 +284,17 @@ def test_ast_from_module_name_pyz(self) -> None: finally: os.remove(linked_file_name) + def test_ast_from_module_name_pyz_with_submodule(self) -> None: + with self._restore_package_cache(): + archive_path = os.path.join(resources.RESOURCE_PATH, "x.zip") + sys.path.insert(0, archive_path) + module = self.manager.ast_from_module_name("xxx.test") + self.assertEqual(module.name, "xxx.test") + end = os.path.join(archive_path, "xxx", "test") + self.assertTrue( + module.file.endswith(end), f"{module.file} doesn't endswith {end}" + ) + def test_zip_import_data(self) -> None: """Check if zip_import_data works.""" with self._restore_package_cache(): diff --git a/tests/test_modutils.py b/tests/test_modutils.py index 8ea7713abe..1d6959f11c 100644 --- a/tests/test_modutils.py +++ b/tests/test_modutils.py @@ -20,7 +20,6 @@ import astroid from astroid import modutils -from astroid.const import PY310_PLUS from astroid.interpreter._import import spec from . import resources @@ -162,7 +161,7 @@ def test_known_values_modpath_from_file_1(self) -> None: ) def test_raise_modpath_from_file_exception(self) -> None: - self.assertRaises(Exception, modutils.modpath_from_file, "/turlututu") + self.assertRaises(ImportError, modutils.modpath_from_file, "/turlututu") def test_import_symlink_with_source_outside_of_path(self) -> None: with tempfile.NamedTemporaryFile() as tmpfile: @@ -175,6 +174,37 @@ def test_import_symlink_with_source_outside_of_path(self) -> None: finally: os.remove(linked_file_name) + def test_modpath_from_file_path_order(self) -> None: + """Test for ordering of paths. + The test does the following: + 1. Add a tmp directory to beginning of sys.path via augmented_sys_path + 2. Create a module file in sub directory of tmp directory + 3. If the sub directory is passed as additional directory, module name + should be relative to the subdirectory since additional directory has + higher precedence.""" + with tempfile.TemporaryDirectory() as tmp_dir: + with resources.augmented_sys_path([tmp_dir]): + mod_name = "module" + sub_dirname = "subdir" + sub_dir = tmp_dir + "/" + sub_dirname + os.mkdir(sub_dir) + module_file = f"{sub_dir}/{mod_name}.py" + + with open(module_file, "w+", encoding="utf-8"): + pass + + # Without additional directory, return relative to tmp_dir + self.assertEqual( + modutils.modpath_from_file(module_file), [sub_dirname, mod_name] + ) + + # With sub directory as additional directory, return relative to + # sub directory + self.assertEqual( + modutils.modpath_from_file(f"{sub_dir}/{mod_name}.py", [sub_dir]), + [mod_name], + ) + def test_import_symlink_both_outside_of_path(self) -> None: with tempfile.NamedTemporaryFile() as tmpfile: linked_file_name = os.path.join(tempfile.gettempdir(), "symlinked_file.py") @@ -471,18 +501,6 @@ def test_failure(self) -> None: assert not modutils.module_in_path("astroid", datadir) -class BackportStdlibNamesTest(resources.SysPathSetup, unittest.TestCase): - """ - Verify backport raises exception on newer versions - """ - - @pytest.mark.skipif(not PY310_PLUS, reason="Backport valid on <=3.9") - def test_import_error(self) -> None: - with pytest.raises(AssertionError): - # pylint: disable-next=import-outside-toplevel, unused-import - from astroid import _backport_stdlib_names # noqa - - class IsRelativeTest(unittest.TestCase): def test_known_values_is_relative_1(self) -> None: self.assertTrue(modutils.is_relative("utils", email.__path__[0])) diff --git a/tests/test_nodes.py b/tests/test_nodes.py index 3c0e7c218f..8d2f4d5462 100644 --- a/tests/test_nodes.py +++ b/tests/test_nodes.py @@ -13,6 +13,7 @@ import sys import textwrap import unittest +import warnings from typing import Any import pytest @@ -28,34 +29,33 @@ transforms, util, ) -from astroid.const import IS_PYPY, PY310_PLUS, PY311_PLUS, PY312_PLUS, Context +from astroid.const import ( + IS_PYPY, + PY311_PLUS, + PY312_PLUS, + PY313_PLUS, + PY314_PLUS, + Context, +) from astroid.context import InferenceContext from astroid.exceptions import ( AstroidBuildingError, AstroidSyntaxError, AttributeInferenceError, - ParentMissingError, StatementMissing, ) -from astroid.nodes.node_classes import ( - AssignAttr, - AssignName, - Attribute, - Call, - ImportFrom, - Tuple, -) -from astroid.nodes.scoped_nodes import ClassDef, FunctionDef, GeneratorExp, Module +from astroid.nodes.node_classes import UNATTACHED_UNKNOWN +from astroid.nodes.scoped_nodes import SYNTHETIC_ROOT from tests.testdata.python3.recursion_error import LONG_CHAINED_METHOD_CALL from . import resources -abuilder = builder.AstroidBuilder() +abuilder = builder.AstroidBuilder(astroid.MANAGER) class AsStringTest(resources.SysPathSetup, unittest.TestCase): def test_tuple_as_string(self) -> None: - def build(string: str) -> Tuple: + def build(string: str) -> nodes.Tuple: return abuilder.string_build(string).body[0].value self.assertEqual(build("1,").as_string(), "(1, )") @@ -109,11 +109,31 @@ def test_varargs_kwargs_as_string(self) -> None: ast = abuilder.string_build("raise_string(*args, **kwargs)").body[0] self.assertEqual(ast.as_string(), "raise_string(*args, **kwargs)") - def test_module_as_string(self) -> None: - """Check as_string on a whole module prepared to be returned identically.""" + @pytest.mark.skipif(PY314_PLUS, reason="return in finally is now a syntax error") + def test_module_as_string_pre_3_14(self) -> None: + """Check as_string on a whole module prepared to be returned identically for py < 3.14.""" + self.maxDiff = None module = resources.build_file("data/module.py", "data.module") with open(resources.find("data/module.py"), encoding="utf-8") as fobj: - self.assertMultiLineEqual(module.as_string(), fobj.read()) + # Ignore comments in python file + data_str = "\n".join( + [s for s in fobj.read().split("\n") if not s.lstrip().startswith("# ")] + ) + self.assertMultiLineEqual(module.as_string(), data_str) + + @pytest.mark.skipif( + not PY314_PLUS, reason="return in finally is now a syntax error" + ) + def test_module_as_string(self) -> None: + """Check as_string on a whole module prepared to be returned identically for py > 3.14.""" + self.maxDiff = None + module = resources.build_file("data/module3.14.py", "data.module3.14") + with open(resources.find("data/module3.14.py"), encoding="utf-8") as fobj: + # Ignore comments in python file + data_str = "\n".join( + [s for s in fobj.read().split("\n") if not s.lstrip().startswith("# ")] + ) + self.assertMultiLineEqual(module.as_string(), data_str) def test_module2_as_string(self) -> None: """Check as_string on a whole module prepared to be returned identically.""" @@ -276,8 +296,10 @@ def test_f_strings(self): @staticmethod def test_as_string_unknown() -> None: - assert nodes.Unknown().as_string() == "Unknown.Unknown()" - assert nodes.Unknown(lineno=1, col_offset=0).as_string() == "Unknown.Unknown()" + unknown1 = nodes.Unknown(parent=SYNTHETIC_ROOT) + unknown2 = nodes.Unknown(lineno=1, col_offset=0, parent=SYNTHETIC_ROOT) + assert unknown1.as_string() == "Unknown.Unknown()" + assert unknown2.as_string() == "Unknown.Unknown()" @staticmethod @pytest.mark.skipif( @@ -298,15 +320,34 @@ def test_recursion_error_trapped() -> None: class AsStringTypeParamNodes(unittest.TestCase): @staticmethod def test_as_string_type_alias() -> None: - ast = abuilder.string_build("type Point = tuple[float, float]") - type_alias = ast.body[0] - assert type_alias.as_string().strip() == "Point" + ast1 = abuilder.string_build("type Point = tuple[float, float]") + type_alias1 = ast1.body[0] + assert type_alias1.as_string().strip() == "type Point = tuple[float, float]" + ast2 = abuilder.string_build( + "type Point[T, **P] = tuple[float, T, Callable[P, None]]" + ) + type_alias2 = ast2.body[0] + assert ( + type_alias2.as_string().strip() + == "type Point[T, **P] = tuple[float, T, Callable[P, None]]" + ) @staticmethod def test_as_string_type_var() -> None: - ast = abuilder.string_build("type Point[T] = tuple[float, float]") + ast = abuilder.string_build("type Point[T: int | str] = tuple[float, float]") + type_var = ast.body[0].type_params[0] + assert type_var.as_string().strip() == "T: int | str" + + @staticmethod + @pytest.mark.skipif( + not PY313_PLUS, reason="Type parameter defaults were added in 313" + ) + def test_as_string_type_var_default() -> None: + ast = abuilder.string_build( + "type Point[T: int | str = int] = tuple[float, float]" + ) type_var = ast.body[0].type_params[0] - assert type_var.as_string().strip() == "T" + assert type_var.as_string().strip() == "T: int | str = int" @staticmethod def test_as_string_type_var_tuple() -> None: @@ -314,11 +355,41 @@ def test_as_string_type_var_tuple() -> None: type_var_tuple = ast.body[0].type_params[0] assert type_var_tuple.as_string().strip() == "*Ts" + @staticmethod + @pytest.mark.skipif( + not PY313_PLUS, reason="Type parameter defaults were added in 313" + ) + def test_as_string_type_var_tuple_defaults() -> None: + ast = abuilder.string_build("type Alias[*Ts = tuple[int, str]] = tuple[*Ts]") + type_var_tuple = ast.body[0].type_params[0] + assert type_var_tuple.as_string().strip() == "*Ts = tuple[int, str]" + @staticmethod def test_as_string_param_spec() -> None: ast = abuilder.string_build("type Alias[**P] = Callable[P, int]") param_spec = ast.body[0].type_params[0] - assert param_spec.as_string().strip() == "P" + assert param_spec.as_string().strip() == "**P" + + @staticmethod + @pytest.mark.skipif( + not PY313_PLUS, reason="Type parameter defaults were added in 313" + ) + def test_as_string_param_spec_defaults() -> None: + ast = abuilder.string_build("type Alias[**P = [str, int]] = Callable[P, int]") + param_spec = ast.body[0].type_params[0] + assert param_spec.as_string().strip() == "**P = [str, int]" + + @staticmethod + def test_as_string_class_type_params() -> None: + code = abuilder.string_build("class A[T, **P]: ...") + cls_node = code.body[0] + assert cls_node.as_string().strip() == "class A[T, **P]:\n ..." + + @staticmethod + def test_as_string_function_type_params() -> None: + code = abuilder.string_build("def func[T, **P](): ...") + func_node = code.body[0] + assert func_node.as_string().strip() == "def func[T, **P]():\n ..." class _NodeTest(unittest.TestCase): @@ -327,7 +398,7 @@ class _NodeTest(unittest.TestCase): CODE = "" @property - def astroid(self) -> Module: + def astroid(self) -> nodes.Module: try: return self.__class__.__dict__["CODE_Astroid"] except KeyError: @@ -522,9 +593,7 @@ def test_as_string(self) -> None: ast = self.module["modutils"] self.assertEqual(ast.as_string(), "from astroid import modutils") ast = self.module["NameNode"] - self.assertEqual( - ast.as_string(), "from astroid.nodes.node_classes import Name as NameNode" - ) + self.assertEqual(ast.as_string(), "from astroid.nodes import Name as NameNode") ast = self.module["os"] self.assertEqual(ast.as_string(), "import os.path") code = """from . import here @@ -615,19 +684,9 @@ def _test(self, value: Any) -> None: self.assertIs(node.value, value) self.assertTrue(node._proxied.parent) self.assertEqual(node._proxied.root().name, value.__class__.__module__) - with self.assertRaises(StatementMissing): - with pytest.warns(DeprecationWarning) as records: - node.statement(future=True) - assert len(records) == 1 with self.assertRaises(StatementMissing): node.statement() - - with self.assertRaises(ParentMissingError): - with pytest.warns(DeprecationWarning) as records: - node.frame(future=True) - assert len(records) == 1 - with self.assertRaises(ParentMissingError): - node.frame() + assert node.frame() is SYNTHETIC_ROOT def test_none(self) -> None: self._test(None) @@ -834,8 +893,8 @@ def test_complex(self) -> None: assign = builder.extract_node(code) self.assertIsInstance(assign, nodes.AnnAssign) self.assertEqual(assign.target.name, "test") - self.assertIsInstance(assign.annotation, astroid.Subscript) - self.assertIsInstance(assign.value, astroid.Dict) + self.assertIsInstance(assign.annotation, nodes.Subscript) + self.assertIsInstance(assign.value, nodes.Dict) def test_as_string(self) -> None: code = textwrap.dedent( @@ -1203,30 +1262,30 @@ class AliasesTest(unittest.TestCase): def setUp(self) -> None: self.transformer = transforms.TransformVisitor() - def parse_transform(self, code: str) -> Module: + def parse_transform(self, code: str) -> nodes.Module: module = parse(code, apply_transforms=False) return self.transformer.visit(module) def test_aliases(self) -> None: - def test_from(node: ImportFrom) -> ImportFrom: + def test_from(node: nodes.ImportFrom) -> nodes.ImportFrom: node.names = [*node.names, ("absolute_import", None)] return node - def test_class(node: ClassDef) -> ClassDef: + def test_class(node: nodes.ClassDef) -> nodes.ClassDef: node.name = "Bar" return node - def test_function(node: FunctionDef) -> FunctionDef: + def test_function(node: nodes.FunctionDef) -> nodes.FunctionDef: node.name = "another_test" return node - def test_callfunc(node: Call) -> Call | None: + def test_callfunc(node: nodes.Call) -> nodes.Call | None: if node.func.name == "Foo": node.func.name = "Bar" return node return None - def test_assname(node: AssignName) -> AssignName | None: + def test_assname(node: nodes.AssignName) -> nodes.AssignName | None: if node.name == "foo": return nodes.AssignName( "bar", @@ -1238,19 +1297,19 @@ def test_assname(node: AssignName) -> AssignName | None: ) return None - def test_assattr(node: AssignAttr) -> AssignAttr: + def test_assattr(node: nodes.AssignAttr) -> nodes.AssignAttr: if node.attrname == "a": node.attrname = "b" return node return None - def test_getattr(node: Attribute) -> Attribute: + def test_getattr(node: nodes.Attribute) -> nodes.Attribute: if node.attrname == "a": node.attrname = "b" return node return None - def test_genexpr(node: GeneratorExp) -> GeneratorExp: + def test_genexpr(node: nodes.GeneratorExp) -> nodes.GeneratorExp: if node.elt.value == 1: node.elt = nodes.Const(2, node.lineno, node.col_offset, node.parent) return node @@ -1443,9 +1502,9 @@ def test_starred_store(self) -> None: def test_unknown() -> None: """Test Unknown node.""" - assert isinstance(next(nodes.Unknown().infer()), type(util.Uninferable)) - assert isinstance(nodes.Unknown().name, str) - assert isinstance(nodes.Unknown().qname(), str) + assert isinstance(next(UNATTACHED_UNKNOWN.infer()), type(util.Uninferable)) + assert isinstance(UNATTACHED_UNKNOWN.name, str) + assert isinstance(UNATTACHED_UNKNOWN.qname(), str) def test_type_comments_with() -> None: @@ -1459,7 +1518,7 @@ def test_type_comments_with() -> None: ) node = module.body[0] ignored_node = module.body[1] - assert isinstance(node.type_annotation, astroid.Name) + assert isinstance(node.type_annotation, nodes.Name) assert ignored_node.type_annotation is None @@ -1475,7 +1534,7 @@ def test_type_comments_for() -> None: ) node = module.body[0] ignored_node = module.body[1] - assert isinstance(node.type_annotation, astroid.Subscript) + assert isinstance(node.type_annotation, nodes.Subscript) assert node.type_annotation.as_string() == "List[int]" assert ignored_node.type_annotation is None @@ -1490,7 +1549,7 @@ def test_type_coments_assign() -> None: ) node = module.body[0] ignored_node = module.body[1] - assert isinstance(node.type_annotation, astroid.Subscript) + assert isinstance(node.type_annotation, nodes.Subscript) assert node.type_annotation.as_string() == "List[int]" assert ignored_node.type_annotation is None @@ -1546,9 +1605,9 @@ def func2(): """ ) expected_annotations = [ - (["int"], astroid.Name, "str"), - (["int", "int", "int"], astroid.Tuple, "(str, str)"), - (["int", "int", "str", "List[int]"], astroid.Subscript, "List[int]"), + (["int"], nodes.Name, "str"), + (["int", "int", "int"], nodes.Tuple, "(str, str)"), + (["int", "int", "str", "List[int]"], nodes.Subscript, "List[int]"), ] for node, (expected_args, expected_returns_type, expected_returns_string) in zip( module.body, expected_annotations @@ -1593,7 +1652,7 @@ def func2( ] for node, expected_args in zip(module.body, expected_annotations): assert len(node.type_comment_args) == 1 - assert isinstance(node.type_comment_args[0], astroid.Const) + assert isinstance(node.type_comment_args[0], nodes.Const) assert node.type_comment_args[0].value == Ellipsis assert len(node.args.type_comment_args) == len(expected_args) for expected_arg, actual_arg in zip(expected_args, node.args.type_comment_args): @@ -1622,7 +1681,7 @@ def f_arg_comment( ] for node, expected_types in zip(module.body, expected_annotations): assert len(node.type_comment_args) == 1 - assert isinstance(node.type_comment_args[0], astroid.Const) + assert isinstance(node.type_comment_args[0], nodes.Const) assert node.type_comment_args[0].value == Ellipsis type_comments = [ node.args.type_comment_posonlyargs, @@ -1665,7 +1724,7 @@ def test(self): assert bool(inferred.is_generator()) -class AsyncGeneratorTest: +class AsyncGeneratorTest(unittest.TestCase): def test_async_generator(self): node = astroid.extract_node( """ @@ -1683,23 +1742,6 @@ async def a_iter(n): assert inferred.pytype() == "builtins.async_generator" assert inferred.display_type() == "AsyncGenerator" - def test_async_generator_is_generator_on_older_python(self): - node = astroid.extract_node( - """ - async def a_iter(n): - for i in range(1, n + 1): - yield i - await asyncio.sleep(1) - a_iter(2) #@ - """ - ) - inferred = next(node.infer()) - assert isinstance(inferred, bases.Generator) - assert inferred.getattr("__iter__") - assert inferred.getattr("__next__") - assert inferred.pytype() == "builtins.generator" - assert inferred.display_type() == "Generator" - def test_f_string_correct_line_numbering() -> None: """Test that we generate correct line numbers for f-strings.""" @@ -1860,15 +1902,15 @@ def g( assert len(type_comments) == 1 type_comment = type_comments[0] - assert isinstance(type_comment, astroid.Attribute) - assert isinstance(type_comment.parent, astroid.Expr) - assert isinstance(type_comment.parent.parent, astroid.Arguments) + assert isinstance(type_comment, nodes.Attribute) + assert isinstance(type_comment.parent, nodes.Expr) + assert isinstance(type_comment.parent.parent, nodes.Arguments) def test_const_itered() -> None: code = 'a = "string"' node = astroid.extract_node(code).value - assert isinstance(node, astroid.Const) + assert isinstance(node, nodes.Const) itered = node.itered() assert len(itered) == 6 assert [elem.value for elem in itered] == list("string") @@ -1910,7 +1952,6 @@ def test(): assert bool(node.is_generator()) -@pytest.mark.skipif(not PY310_PLUS, reason="pattern matching was added in PY310") class TestPatternMatching: @staticmethod def test_match_simple(): @@ -1938,12 +1979,12 @@ def test_match_simple(): assert isinstance(case0.pattern, nodes.MatchValue) assert ( - isinstance(case0.pattern.value, astroid.Const) + isinstance(case0.pattern.value, nodes.Const) and case0.pattern.value.value == 200 ) assert list(case0.pattern.get_children()) == [case0.pattern.value] assert case0.guard is None - assert isinstance(case0.body[0], astroid.Pass) + assert isinstance(case0.body[0], nodes.Pass) assert list(case0.get_children()) == [case0.pattern, case0.body[0]] assert isinstance(case1.pattern, nodes.MatchOr) @@ -2167,6 +2208,41 @@ def return_from_match(x): assert [inf.value for inf in inferred] == [10, -1] +@pytest.mark.skipif(not PY314_PLUS, reason="TemplateStr was added in PY314") +class TestTemplateString: + @staticmethod + def test_template_string_simple() -> None: + code = textwrap.dedent( + """ + name = "Foo" + place = 3 + t"{name} finished {place!r:ordinal}" #@ + """ + ).strip() + node = builder.extract_node(code) + assert node.as_string() == "t'{name} finished {place!r:ordinal}'" + assert isinstance(node, nodes.TemplateStr) + assert len(node.values) == 3 + value = node.values[0] + assert isinstance(value, nodes.Interpolation) + assert isinstance(value.value, nodes.Name) + assert value.value.name == "name" + assert value.str == "name" + assert value.conversion == -1 + assert value.format_spec is None + value = node.values[1] + assert isinstance(value, nodes.Const) + assert value.pytype() == "builtins.str" + assert value.value == " finished " + value = node.values[2] + assert isinstance(value, nodes.Interpolation) + assert isinstance(value.value, nodes.Name) + assert value.value.name == "place" + assert value.str == "place" + assert value.conversion == ord("r") + assert isinstance(value.format_spec, nodes.JoinedStr) + + @pytest.mark.parametrize( "node", [ @@ -2184,13 +2260,15 @@ def test_str_repr_no_warnings(node): if name == "self": continue - if "int" in param_type.annotation: + if name == "parent" and "NodeNG" in param_type.annotation: + args[name] = SYNTHETIC_ROOT + elif "int" in param_type.annotation: args[name] = random.randint(0, 50) elif ( "NodeNG" in param_type.annotation or "SuccessfulInferenceResult" in param_type.annotation ): - args[name] = nodes.Unknown() + args[name] = UNATTACHED_UNKNOWN elif "str" in param_type.annotation: args[name] = "" else: @@ -2234,3 +2312,21 @@ def test_arguments_default_value(): node = extract_node("def fruit(seeds, flavor='good', *, peel='maybe'): ...") assert node.args.default_value("flavor").value == "good" + + +def test_deprecated_nodes_import_from_toplevel(): + # pylint: disable=import-outside-toplevel,no-name-in-module + with pytest.raises( + DeprecationWarning, match="importing 'For' from 'astroid' is deprecated" + ): + from astroid import For + + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + from astroid import For + + assert For is nodes.For + + # This should not raise a DeprecationWarning + # pylint: disable-next=unused-import + from astroid import builtin_lookup diff --git a/tests/test_nodes_lineno.py b/tests/test_nodes_lineno.py index c8a8839e21..f8c6f91152 100644 --- a/tests/test_nodes_lineno.py +++ b/tests/test_nodes_lineno.py @@ -4,11 +4,9 @@ import textwrap -import pytest - import astroid from astroid import builder, nodes -from astroid.const import PY310_PLUS, PY312_PLUS +from astroid.const import PY312_PLUS class TestLinenoColOffset: @@ -557,6 +555,7 @@ def test_end_lineno_const() -> None: @staticmethod def test_end_lineno_function() -> None: """FunctionDef, AsyncFunctionDef, Decorators, Lambda, Arguments.""" + # pylint: disable = too-many-statements code = textwrap.dedent( """ def func( #@ @@ -986,11 +985,11 @@ def test_end_lineno_string() -> None: assert (s4.value.end_lineno, s4.value.end_col_offset) == (2, 14) @staticmethod - @pytest.mark.skipif(not PY310_PLUS, reason="pattern matching was added in PY310") def test_end_lineno_match() -> None: """Match, MatchValue, MatchSingleton, MatchSequence, MatchMapping, MatchClass, MatchStar, MatchOr, MatchAs. """ + # pylint: disable = too-many-statements code = textwrap.dedent( """ match x: #@ diff --git a/tests/test_object_model.py b/tests/test_object_model.py index 9ad4d39a90..f3015db9c6 100644 --- a/tests/test_object_model.py +++ b/tests/test_object_model.py @@ -38,21 +38,21 @@ def __init__(self): ) assert isinstance(ast_nodes, list) cls = next(ast_nodes[0].infer()) - self.assertIsInstance(cls, astroid.ClassDef) + self.assertIsInstance(cls, nodes.ClassDef) self.assertEqual(cls.name, "A") module = next(ast_nodes[1].infer()) - self.assertIsInstance(module, astroid.Const) + self.assertIsInstance(module, nodes.Const) self.assertEqual(module.value, "fake_module") doc = next(ast_nodes[2].infer()) - self.assertIsInstance(doc, astroid.Const) + self.assertIsInstance(doc, nodes.Const) self.assertEqual(doc.value, "test") dunder_dict = next(ast_nodes[3].infer()) - self.assertIsInstance(dunder_dict, astroid.Dict) - attr = next(dunder_dict.getitem(astroid.Const("a")).infer()) - self.assertIsInstance(attr, astroid.Const) + self.assertIsInstance(dunder_dict, nodes.Dict) + attr = next(dunder_dict.getitem(nodes.Const("a")).infer()) + self.assertIsInstance(attr, nodes.Const) self.assertEqual(attr.value, 42) @pytest.mark.xfail(reason="Instance lookup cannot override object model") @@ -68,7 +68,7 @@ def __dict__(self): """ ) inferred = next(ast_node.infer()) - self.assertIsInstance(inferred, astroid.List) + self.assertIsInstance(inferred, nodes.List) self.assertEqual(inferred.elts, []) @@ -85,7 +85,7 @@ def test(self): pass ) assert isinstance(ast_nodes, list) func = next(ast_nodes[0].infer()) - self.assertIsInstance(func, astroid.FunctionDef) + self.assertIsInstance(func, nodes.FunctionDef) self.assertEqual(func.name, "test") self_ = next(ast_nodes[1].infer()) @@ -110,17 +110,17 @@ def test(self): pass ) assert isinstance(ast_nodes, list) cls = next(ast_nodes[0].infer()) - self.assertIsInstance(cls, astroid.ClassDef) + self.assertIsInstance(cls, nodes.ClassDef) unbound_name = "function" self.assertEqual(cls.name, unbound_name) func = next(ast_nodes[1].infer()) - self.assertIsInstance(func, astroid.FunctionDef) + self.assertIsInstance(func, nodes.FunctionDef) self.assertEqual(func.name, "test") self_ = next(ast_nodes[2].infer()) - self.assertIsInstance(self_, astroid.Const) + self.assertIsInstance(self_, nodes.Const) self.assertIsNone(self_.value) self.assertEqual(cls.name, next(ast_nodes[3].infer()).name) @@ -138,7 +138,7 @@ class A: """ ) inferred = next(ast_node.infer()) - self.assertIsInstance(inferred, astroid.Const) + self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, "first") def test_class_model_correct_mro_subclasses_proxied(self) -> None: @@ -153,8 +153,8 @@ class A(object): for node in ast_nodes: inferred = next(node.infer()) self.assertIsInstance(inferred, astroid.BoundMethod) - self.assertIsInstance(inferred._proxied, astroid.FunctionDef) - self.assertIsInstance(inferred.bound, astroid.ClassDef) + self.assertIsInstance(inferred._proxied, nodes.FunctionDef) + self.assertIsInstance(inferred.bound, nodes.ClassDef) self.assertEqual(inferred.bound.name, "type") def test_class_model(self) -> None: @@ -181,41 +181,41 @@ class C(A): pass ) assert isinstance(ast_nodes, list) module = next(ast_nodes[0].infer()) - self.assertIsInstance(module, astroid.Const) + self.assertIsInstance(module, nodes.Const) self.assertEqual(module.value, "fake_module") name = next(ast_nodes[1].infer()) - self.assertIsInstance(name, astroid.Const) + self.assertIsInstance(name, nodes.Const) self.assertEqual(name.value, "A") qualname = next(ast_nodes[2].infer()) - self.assertIsInstance(qualname, astroid.Const) + self.assertIsInstance(qualname, nodes.Const) self.assertEqual(qualname.value, "fake_module.A") doc = next(ast_nodes[3].infer()) - self.assertIsInstance(doc, astroid.Const) + self.assertIsInstance(doc, nodes.Const) self.assertEqual(doc.value, "test") mro = next(ast_nodes[4].infer()) - self.assertIsInstance(mro, astroid.Tuple) + self.assertIsInstance(mro, nodes.Tuple) self.assertEqual([cls.name for cls in mro.elts], ["A", "object"]) called_mro = next(ast_nodes[5].infer()) self.assertEqual(called_mro.elts, mro.elts) base_nodes = next(ast_nodes[6].infer()) - self.assertIsInstance(base_nodes, astroid.Tuple) + self.assertIsInstance(base_nodes, nodes.Tuple) self.assertEqual([cls.name for cls in base_nodes.elts], ["object"]) cls = next(ast_nodes[7].infer()) - self.assertIsInstance(cls, astroid.ClassDef) + self.assertIsInstance(cls, nodes.ClassDef) self.assertEqual(cls.name, "type") cls_dict = next(ast_nodes[8].infer()) - self.assertIsInstance(cls_dict, astroid.Dict) + self.assertIsInstance(cls_dict, nodes.Dict) subclasses = next(ast_nodes[9].infer()) - self.assertIsInstance(subclasses, astroid.List) + self.assertIsInstance(subclasses, nodes.List) self.assertEqual([cls.name for cls in subclasses.elts], ["B", "C"]) @@ -227,7 +227,7 @@ def test_priority_to_local_defined_values(self) -> None: """ ) file_value = next(ast_node.igetattr("__file__")) - self.assertIsInstance(file_value, astroid.Const) + self.assertIsInstance(file_value, nodes.Const) self.assertEqual(file_value.value, "mine") def test__path__not_a_package(self) -> None: @@ -278,20 +278,20 @@ def test_module_model(self) -> None: ) assert isinstance(ast_nodes, list) path = next(ast_nodes[0].infer()) - self.assertIsInstance(path, astroid.List) - self.assertIsInstance(path.elts[0], astroid.Const) + self.assertIsInstance(path, nodes.List) + self.assertIsInstance(path.elts[0], nodes.Const) self.assertEqual(path.elts[0].value, xml.__path__[0]) name = next(ast_nodes[1].infer()) - self.assertIsInstance(name, astroid.Const) + self.assertIsInstance(name, nodes.Const) self.assertEqual(name.value, "xml") doc = next(ast_nodes[2].infer()) - self.assertIsInstance(doc, astroid.Const) + self.assertIsInstance(doc, nodes.Const) self.assertEqual(doc.value, xml.__doc__) file_ = next(ast_nodes[3].infer()) - self.assertIsInstance(file_, astroid.Const) + self.assertIsInstance(file_, nodes.Const) self.assertEqual(file_.value, xml.__file__.replace(".pyc", ".py")) for ast_node in ast_nodes[4:7]: @@ -299,11 +299,11 @@ def test_module_model(self) -> None: self.assertIs(inferred, astroid.Uninferable) package = next(ast_nodes[7].infer()) - self.assertIsInstance(package, astroid.Const) + self.assertIsInstance(package, nodes.Const) self.assertEqual(package.value, "xml") dict_ = next(ast_nodes[8].infer()) - self.assertIsInstance(dict_, astroid.Dict) + self.assertIsInstance(dict_, nodes.Dict) init_ = next(ast_nodes[9].infer()) assert isinstance(init_, bases.BoundMethod) @@ -346,7 +346,7 @@ def test(self): return 42 self.assertIsInstance(bound, astroid.BoundMethod) self.assertEqual(bound._proxied._proxied.name, "test") result = next(result.infer()) - self.assertIsInstance(result, astroid.Const) + self.assertIsInstance(result, nodes.Const) self.assertEqual(result.value, 42) def test___get__has_extra_params_defined(self) -> None: @@ -386,7 +386,7 @@ def test(self): return self.x """ ) result = next(result.infer()) - self.assertIsInstance(result, astroid.Const) + self.assertIsInstance(result, nodes.Const) self.assertEqual(result.value, 42) def test_descriptors_binding_invalid(self) -> None: @@ -464,30 +464,30 @@ def func(a=1, b=2): ) assert isinstance(ast_nodes, list) name = next(ast_nodes[0].infer()) - self.assertIsInstance(name, astroid.Const) + self.assertIsInstance(name, nodes.Const) self.assertEqual(name.value, "func") doc = next(ast_nodes[1].infer()) - self.assertIsInstance(doc, astroid.Const) + self.assertIsInstance(doc, nodes.Const) self.assertEqual(doc.value, "test") qualname = next(ast_nodes[2].infer()) - self.assertIsInstance(qualname, astroid.Const) + self.assertIsInstance(qualname, nodes.Const) self.assertEqual(qualname.value, "fake_module.func") module = next(ast_nodes[3].infer()) - self.assertIsInstance(module, astroid.Const) + self.assertIsInstance(module, nodes.Const) self.assertEqual(module.value, "fake_module") defaults = next(ast_nodes[4].infer()) - self.assertIsInstance(defaults, astroid.Tuple) + self.assertIsInstance(defaults, nodes.Tuple) self.assertEqual([default.value for default in defaults.elts], [1, 2]) dict_ = next(ast_nodes[5].infer()) - self.assertIsInstance(dict_, astroid.Dict) + self.assertIsInstance(dict_, nodes.Dict) globals_ = next(ast_nodes[6].infer()) - self.assertIsInstance(globals_, astroid.Dict) + self.assertIsInstance(globals_, nodes.Dict) for ast_node in ast_nodes[7:9]: self.assertIs(next(ast_node.infer()), astroid.Uninferable) @@ -529,7 +529,7 @@ def test(): pass """ ) annotations = next(ast_node.infer()) - self.assertIsInstance(annotations, astroid.Dict) + self.assertIsInstance(annotations, nodes.Dict) self.assertEqual(len(annotations.items), 0) def test_builtin_dunder_init_does_not_crash_when_accessing_annotations( @@ -544,7 +544,7 @@ def class_method(cls): """ ) inferred = next(ast_node.infer()) - self.assertIsInstance(inferred, astroid.Dict) + self.assertIsInstance(inferred, nodes.Dict) self.assertEqual(len(inferred.items), 0) def test_annotations_kwdefaults(self) -> None: @@ -556,20 +556,18 @@ def test(a: 1, *args: 2, f:4='lala', **kwarg:3)->2: pass """ ) annotations = next(ast_node[0].infer()) - self.assertIsInstance(annotations, astroid.Dict) - self.assertIsInstance( - annotations.getitem(astroid.Const("return")), astroid.Const - ) - self.assertEqual(annotations.getitem(astroid.Const("return")).value, 2) - self.assertIsInstance(annotations.getitem(astroid.Const("a")), astroid.Const) - self.assertEqual(annotations.getitem(astroid.Const("a")).value, 1) - self.assertEqual(annotations.getitem(astroid.Const("args")).value, 2) - self.assertEqual(annotations.getitem(astroid.Const("kwarg")).value, 3) + self.assertIsInstance(annotations, nodes.Dict) + self.assertIsInstance(annotations.getitem(nodes.Const("return")), nodes.Const) + self.assertEqual(annotations.getitem(nodes.Const("return")).value, 2) + self.assertIsInstance(annotations.getitem(nodes.Const("a")), nodes.Const) + self.assertEqual(annotations.getitem(nodes.Const("a")).value, 1) + self.assertEqual(annotations.getitem(nodes.Const("args")).value, 2) + self.assertEqual(annotations.getitem(nodes.Const("kwarg")).value, 3) - self.assertEqual(annotations.getitem(astroid.Const("f")).value, 4) + self.assertEqual(annotations.getitem(nodes.Const("f")).value, 4) kwdefaults = next(ast_node[1].infer()) - self.assertIsInstance(kwdefaults, astroid.Dict) + self.assertIsInstance(kwdefaults, nodes.Dict) # self.assertEqual(kwdefaults.getitem('f').value, 'lala') def test_annotation_positional_only(self): @@ -580,12 +578,12 @@ def test(a: 1, b: 2, /, c: 3): pass """ ) annotations = next(ast_node.infer()) - self.assertIsInstance(annotations, astroid.Dict) + self.assertIsInstance(annotations, nodes.Dict) - self.assertIsInstance(annotations.getitem(astroid.Const("a")), astroid.Const) - self.assertEqual(annotations.getitem(astroid.Const("a")).value, 1) - self.assertEqual(annotations.getitem(astroid.Const("b")).value, 2) - self.assertEqual(annotations.getitem(astroid.Const("c")).value, 3) + self.assertIsInstance(annotations.getitem(nodes.Const("a")), nodes.Const) + self.assertEqual(annotations.getitem(nodes.Const("a")).value, 1) + self.assertEqual(annotations.getitem(nodes.Const("b")).value, 2) + self.assertEqual(annotations.getitem(nodes.Const("c")).value, 3) def test_is_not_lambda(self): ast_node = builder.extract_node("def func(): pass") @@ -657,11 +655,11 @@ def test(): self.assertEqual(doc.value, "a") gi_code = next(ast_nodes[2].infer()) - self.assertIsInstance(gi_code, astroid.ClassDef) + self.assertIsInstance(gi_code, nodes.ClassDef) self.assertEqual(gi_code.name, "gi_code") gi_frame = next(ast_nodes[3].infer()) - self.assertIsInstance(gi_frame, astroid.ClassDef) + self.assertIsInstance(gi_frame, nodes.ClassDef) self.assertEqual(gi_frame.name, "gi_frame") send = next(ast_nodes[4].infer()) @@ -690,7 +688,7 @@ def test_valueerror_py3() -> None: ) assert isinstance(ast_nodes, list) args = next(ast_nodes[0].infer()) - assert isinstance(args, astroid.Tuple) + assert isinstance(args, nodes.Tuple) tb = next(ast_nodes[1].infer()) # Python 3.11: If 'contextlib' is loaded, '__traceback__' # could be set inside '__exit__' method in @@ -717,7 +715,7 @@ def test_syntax_error(self) -> None: """ ) inferred = next(ast_node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) @unittest.skipIf(HAS_SIX, "This test fails if the six library is installed") def test_oserror(self) -> None: @@ -734,7 +732,7 @@ def test_oserror(self) -> None: expected_values = ["", "", 0] for node, value in zip(ast_nodes, expected_values): inferred = next(node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) assert inferred.value == value def test_unicodedecodeerror(self) -> None: @@ -746,7 +744,7 @@ def test_unicodedecodeerror(self) -> None: """ node = builder.extract_node(code) inferred = next(node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) assert inferred.value == b"" def test_import_error(self) -> None: @@ -761,7 +759,7 @@ def test_import_error(self) -> None: ) for node in ast_nodes: inferred = next(node.infer()) - assert isinstance(inferred, astroid.Const) + assert isinstance(inferred, nodes.Const) assert inferred.value == "" def test_exception_instance_correctly_instantiated(self) -> None: @@ -776,14 +774,14 @@ def test_exception_instance_correctly_instantiated(self) -> None: inferred = next(ast_node.infer()) assert isinstance(inferred, astroid.Instance) cls = next(inferred.igetattr("__class__")) - assert isinstance(cls, astroid.ClassDef) + assert isinstance(cls, nodes.ClassDef) class DictObjectModelTest(unittest.TestCase): def test__class__(self) -> None: ast_node = builder.extract_node("{}.__class__") inferred = next(ast_node.infer()) - self.assertIsInstance(inferred, astroid.ClassDef) + self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, "dict") def test_attributes_inferred_as_methods(self) -> None: @@ -851,7 +849,7 @@ def foo(): cache_clear = next(ast_nodes[0].infer()) assert isinstance(cache_clear, astroid.BoundMethod) wrapped = next(ast_nodes[1].infer()) - assert isinstance(wrapped, astroid.FunctionDef) + assert isinstance(wrapped, nodes.FunctionDef) assert wrapped.name == "foo" cache_info = next(ast_nodes[2].infer()) assert isinstance(cache_info, astroid.Instance) diff --git a/tests/test_protocols.py b/tests/test_protocols.py index 72b91a1156..bb2d939be2 100644 --- a/tests/test_protocols.py +++ b/tests/test_protocols.py @@ -13,7 +13,7 @@ import astroid from astroid import extract_node, nodes -from astroid.const import PY310_PLUS, PY312_PLUS +from astroid.const import PY312_PLUS from astroid.exceptions import InferenceError from astroid.manager import AstroidManager from astroid.util import Uninferable, UninferableBase @@ -122,7 +122,7 @@ def test_assigned_stmts_starred_for(self) -> None: for1_starred = next(assign_stmts.nodes_of_class(nodes.Starred)) assigned = next(for1_starred.assigned_stmts()) - assert isinstance(assigned, astroid.List) + assert isinstance(assigned, nodes.List) assert assigned.as_string() == "[1, 2]" def _get_starred_stmts(self, code: str) -> list | UninferableBase: @@ -236,7 +236,7 @@ def transform(node: nodes.Assign) -> None: node.root().locals["__all__"] = [node.value] manager = astroid.MANAGER - with _add_transform(manager, astroid.Assign, transform): + with _add_transform(manager, nodes.Assign, transform): module = astroid.parse( """ __all__ = ['a'] @@ -286,6 +286,28 @@ def test_uninferable_list_multiplication() -> None: element = parsed.inferred()[0].elts[0] assert element.value is Uninferable + @staticmethod + def test_uninferable_list_multiplication_with_multiple_operands() -> None: + """Attempting to calculate the result is prohibitively expensive.""" + parsed = extract_node("[0] * 825 * 16547118") + element = parsed.inferred()[0].elts[0] + assert element.value is Uninferable + + @staticmethod + def test_list_multiplication_with_empty_list_and_overflowing_multiplier() -> None: + parsed = extract_node("[] * 1163845194457646539560") + assert parsed.inferred()[0].elts == [] + + @staticmethod + def test_list_multiplication_with_zero_multiplier() -> None: + parsed = extract_node("[0] * 0") + assert parsed.inferred()[0].elts == [] + + @staticmethod + def test_list_multiplication_with_negative_overflowing_multiplier() -> None: + parsed = extract_node("[0] * -9223372036854775809") + assert parsed.inferred()[0].elts == [] + def test_named_expr_inference() -> None: code = """ @@ -342,7 +364,6 @@ def test(value=(p := 24)): return p assert node.value == 1 -@pytest.mark.skipif(not PY310_PLUS, reason="Match requires python 3.10") class TestPatternMatching: @staticmethod def test_assigned_stmts_match_mapping(): diff --git a/tests/test_python3.py b/tests/test_python3.py index 8c3bc16950..d982e6f069 100644 --- a/tests/test_python3.py +++ b/tests/test_python3.py @@ -9,12 +9,13 @@ from astroid import exceptions, nodes from astroid.builder import AstroidBuilder, extract_node +from astroid.manager import AstroidManager class Python3TC(unittest.TestCase): @classmethod def setUpClass(cls): - cls.builder = AstroidBuilder() + cls.builder = AstroidBuilder(AstroidManager()) def test_starred_notation(self) -> None: astroid = self.builder.string_build("*a, b = [1, 2, 3]", "test", "test") @@ -155,7 +156,6 @@ class SubTest(Test): pass ) ) klass = astroid["SubTest"] - self.assertTrue(klass.newstyle) metaclass = klass.metaclass() self.assertIsInstance(metaclass, nodes.ClassDef) self.assertEqual(metaclass.name, "ABCMeta") diff --git a/tests/test_raw_building.py b/tests/test_raw_building.py index f9536ebd20..fdd86032a5 100644 --- a/tests/test_raw_building.py +++ b/tests/test_raw_building.py @@ -10,7 +10,7 @@ from __future__ import annotations -import _io +import _io # pylint: disable=wrong-import-order import logging import os import sys @@ -19,7 +19,6 @@ from typing import Any from unittest import mock -import mypy.build import pytest import tests.testdata.python3.data.fake_module_with_broken_getattr as fm_getattr @@ -27,6 +26,7 @@ import tests.testdata.python3.data.fake_module_with_warnings as fm from astroid.builder import AstroidBuilder from astroid.const import IS_PYPY, PY312_PLUS +from astroid.manager import AstroidManager from astroid.raw_building import ( attach_dummy_node, build_class, @@ -36,6 +36,13 @@ object_build_class, ) +try: + import mypy.build + + HAS_MYPY = True +except ImportError: + HAS_MYPY = False + DUMMY_MOD = build_module("DUMMY") @@ -53,33 +60,33 @@ def test_build_module(self) -> None: self.assertEqual(node.parent, None) def test_build_class(self) -> None: - node = build_class("MyClass") + node = build_class("MyClass", DUMMY_MOD) self.assertEqual(node.name, "MyClass") self.assertEqual(node.doc_node, None) def test_build_function(self) -> None: - node = build_function("MyFunction") + node = build_function("MyFunction", DUMMY_MOD) self.assertEqual(node.name, "MyFunction") self.assertEqual(node.doc_node, None) def test_build_function_args(self) -> None: args = ["myArgs1", "myArgs2"] - node = build_function("MyFunction", args) + node = build_function("MyFunction", DUMMY_MOD, args) self.assertEqual("myArgs1", node.args.args[0].name) self.assertEqual("myArgs2", node.args.args[1].name) self.assertEqual(2, len(node.args.args)) def test_build_function_defaults(self) -> None: defaults = ["defaults1", "defaults2"] - node = build_function(name="MyFunction", args=None, defaults=defaults) + node = build_function("MyFunction", DUMMY_MOD, args=None, defaults=defaults) self.assertEqual(2, len(node.args.defaults)) def test_build_function_posonlyargs(self) -> None: - node = build_function(name="MyFunction", posonlyargs=["a", "b"]) + node = build_function("MyFunction", DUMMY_MOD, posonlyargs=["a", "b"]) self.assertEqual(2, len(node.args.posonlyargs)) def test_build_function_kwonlyargs(self) -> None: - node = build_function(name="MyFunction", kwonlyargs=["a", "b"]) + node = build_function("MyFunction", DUMMY_MOD, kwonlyargs=["a", "b"]) assert len(node.args.kwonlyargs) == 2 assert node.args.kwonlyargs[0].name == "a" assert node.args.kwonlyargs[1].name == "b" @@ -96,7 +103,7 @@ def test_io_is__io(self): # what io.BufferedReader is. The code that handles this # is in astroid.raw_building.imported_member, which verifies # the true name of the module. - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) module = builder.inspect_build(_io) buffered_reader = module.getattr("BufferedReader")[0] expected = "_io" if PY312_PLUS else "io" @@ -113,7 +120,7 @@ def test_build_function_deepinspect_deprecation(self) -> None: m.pd = fm # This should not raise an exception - AstroidBuilder().module_build(m, "test") + AstroidBuilder(AstroidManager()).module_build(m, "test") def test_module_object_with_broken_getattr(self) -> None: # Tests https://github.com/pylint-dev/astroid/issues/1958 @@ -121,7 +128,7 @@ def test_module_object_with_broken_getattr(self) -> None: # errors when using hasattr(). # This should not raise an exception - AstroidBuilder().inspect_build(fm_getattr, "test") + AstroidBuilder(AstroidManager()).inspect_build(fm_getattr, "test") def test_module_collection_with_object_getattribute(self) -> None: # Tests https://github.com/pylint-dev/astroid/issues/2686 @@ -129,7 +136,7 @@ def test_module_collection_with_object_getattribute(self) -> None: # error when element __getattribute__ causes collection to change size. # This should not raise an exception - AstroidBuilder().inspect_build(fm_collection, "test") + AstroidBuilder(AstroidManager()).inspect_build(fm_collection, "test") @pytest.mark.skipif( @@ -162,7 +169,7 @@ def mocked_sys_modules_getitem(name: str) -> types.ModuleType | CustomGetattr: with mock.patch("astroid.raw_building.sys.modules") as sys_mock: sys_mock.__getitem__.side_effect = mocked_sys_modules_getitem - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) builder.inspect_build(os) out, err = capsys.readouterr() @@ -172,6 +179,7 @@ def mocked_sys_modules_getitem(name: str) -> types.ModuleType | CustomGetattr: assert not err +@pytest.mark.skipif(not HAS_MYPY, reason="This test requires mypy") def test_missing__dict__(): # This shouldn't raise an exception. - object_build_class(DUMMY_MOD, mypy.build.ModuleNotFound, "arbitrary_name") + object_build_class(DUMMY_MOD, mypy.build.ModuleNotFound) diff --git a/tests/test_regrtest.py b/tests/test_regrtest.py index 86bc193803..0ff3b6f520 100644 --- a/tests/test_regrtest.py +++ b/tests/test_regrtest.py @@ -2,6 +2,7 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +import platform import sys import textwrap import unittest @@ -13,7 +14,8 @@ from astroid.builder import AstroidBuilder, _extract_single_node, extract_node from astroid.const import PY312_PLUS from astroid.context import InferenceContext -from astroid.exceptions import InferenceError +from astroid.exceptions import AstroidSyntaxError, InferenceError +from astroid.manager import AstroidManager from astroid.raw_building import build_module from astroid.util import Uninferable @@ -80,7 +82,7 @@ def test_package_sidepackage(self) -> None: self.assertEqual(subpackage.name, "absimp.sidepackage") def test_living_property(self) -> None: - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) builder._done = {} builder._module = sys.modules[__name__] builder.object_build(build_module("module_name", ""), Whatever) @@ -90,7 +92,7 @@ def test_numpy_crash(self): """Test don't crash on numpy.""" # a crash occurred somewhere in the past, and an # InferenceError instead of a crash was better, but now we even infer! - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) data = """ from numpy import multiply @@ -120,14 +122,14 @@ def test_numpy_distutils(self): def test_nameconstant(self) -> None: # used to fail for Python 3.4 - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) astroid = builder.string_build("def test(x=True): pass") default = astroid.body[0].args.args[0] self.assertEqual(default.name, "x") self.assertEqual(next(default.infer()).value, True) def test_recursion_regression_issue25(self) -> None: - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) data = """ import recursion as base @@ -148,7 +150,7 @@ def run(): klass.type # pylint: disable=pointless-statement # noqa: B018 def test_decorator_callchain_issue42(self) -> None: - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) data = """ def test(): @@ -166,7 +168,7 @@ def crash(): self.assertEqual(astroid["crash"].type, "function") def test_filter_stmts_scoping(self) -> None: - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) data = """ def test(): compiler = int() @@ -183,7 +185,7 @@ class B(compiler.__class__): self.assertEqual(base.name, "int") def test_filter_stmts_nested_if(self) -> None: - builder = AstroidBuilder() + builder = AstroidBuilder(AstroidManager()) data = """ def test(val): variable = None @@ -214,7 +216,7 @@ def test(val): assert result[2].lineno == 12 def test_ancestors_patching_class_recursion(self) -> None: - node = AstroidBuilder().string_build( + node = AstroidBuilder(AstroidManager()).string_build( textwrap.dedent( """ import string @@ -248,7 +250,7 @@ def with_metaclass(meta, *bases): class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) + return type.__new__(metaclass, 'temporary_class', (), {}) import lala @@ -499,6 +501,29 @@ def _get_option(self, option): assert node.inferred()[0].value == "mystr" +def test_regression_root_is_not_a_module() -> None: + """Regression test for #2672.""" + node: nodes.ClassDef = _extract_single_node( + textwrap.dedent( + """ + a=eval.__get__(1).__gt__ + + @a + class c: ... + """ + ) + ) + assert node.name == "c" + + +@pytest.mark.xfail(reason="Not fixed yet") +def test_regression_eval_get_of_arg() -> None: + """Regression test for #2743""" + node = _extract_single_node("eval.__get__(1)") + with pytest.raises(InferenceError): + next(node.infer()) + + def test_regression_no_crash_during_build() -> None: node: nodes.Attribute = extract_node("__()") assert node.args == [] @@ -519,3 +544,33 @@ class a: ... ) assert isinstance(node, nodes.ClassDef) assert node.name == "a" + + +def test_regression_infer_dict_literal_comparison_uninferable() -> None: + """Regression test for issue #2522.""" + node = extract_node("{{}}>0") + inferred = next(node.infer()) + assert inferred.value == Uninferable + + +def test_regression_infer_namedtuple_invalid_fieldname_error() -> None: + """Regression test for issue #2519.""" + code = """ + from collections import namedtuple + namedtuple('a','}') + """ + node = extract_node(code) + inferred = next(node.infer()) + assert inferred.value == Uninferable + + +def test_regression_parse_deeply_nested_parentheses() -> None: + """Regression test for issue #2643.""" + with pytest.raises(AstroidSyntaxError, match="Parsing Python code failed:") as ctx: + extract_node( + "A=((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((c,j=t" + ) + expected = ( + SyntaxError if platform.python_implementation() == "PyPy" else MemoryError + ) + assert isinstance(ctx.value.error, expected) diff --git a/tests/test_scoped_nodes.py b/tests/test_scoped_nodes.py index 209710b86a..f3ac0a6fd6 100644 --- a/tests/test_scoped_nodes.py +++ b/tests/test_scoped_nodes.py @@ -29,7 +29,7 @@ util, ) from astroid.bases import BoundMethod, Generator, Instance, UnboundMethod -from astroid.const import WIN32 +from astroid.const import PY312_PLUS, WIN32 from astroid.exceptions import ( AstroidBuildingError, AttributeInferenceError, @@ -42,6 +42,7 @@ ResolveError, TooManyLevelsError, ) +from astroid.manager import AstroidManager from astroid.nodes.scoped_nodes.scoped_nodes import _is_metaclass from . import resources @@ -268,21 +269,21 @@ def test_file_stream_in_memory(self) -> None: def test_file_stream_physical(self) -> None: path = resources.find("data/all.py") - astroid = builder.AstroidBuilder().file_build(path, "all") + astroid = builder.AstroidBuilder(AstroidManager()).file_build(path, "all") with open(path, "rb") as file_io: with astroid.stream() as stream: self.assertEqual(stream.read(), file_io.read()) def test_file_stream_api(self) -> None: path = resources.find("data/all.py") - file_build = builder.AstroidBuilder().file_build(path, "all") + file_build = builder.AstroidBuilder(AstroidManager()).file_build(path, "all") with self.assertRaises(AttributeError): # pylint: disable=pointless-statement, no-member file_build.file_stream # noqa: B018 def test_stream_api(self) -> None: path = resources.find("data/all.py") - astroid = builder.AstroidBuilder().file_build(path, "all") + astroid = builder.AstroidBuilder(AstroidManager()).file_build(path, "all") stream = astroid.stream() self.assertTrue(hasattr(stream, "close")) with stream: @@ -981,6 +982,46 @@ def foo(): with pytest.raises(AttributeInferenceError): func.getattr("") + @staticmethod + def test_blockstart_tolineno() -> None: + code = textwrap.dedent( + """\ + def f1(bar: str) -> None: #@ + pass + + def f2( #@ + bar: str) -> None: + pass + + def f3( #@ + bar: str + ) -> None: + pass + + def f4( #@ + bar: str + ): + pass + + def f5( #@ + bar: str): + pass + """ + ) + ast_nodes: list[nodes.FunctionDef] = builder.extract_node(code) # type: ignore[assignment] + assert len(ast_nodes) == 5 + + assert ast_nodes[0].blockstart_tolineno == 1 + + assert ast_nodes[1].blockstart_tolineno == 5 + + assert ast_nodes[2].blockstart_tolineno == 10 + + # Unimplemented, will return line 14 for now. + # assert ast_nodes[3].blockstart_tolineno == 15 + + assert ast_nodes[4].blockstart_tolineno == 19 + class ClassNodeTest(ModuleLoader, unittest.TestCase): def test_dict_interface(self) -> None: @@ -1002,8 +1043,7 @@ def test_cls_special_attributes_1(self) -> None: self.assertIsInstance(cls.getattr("__module__")[0], nodes.Const) self.assertEqual(cls.getattr("__module__")[0].value, "data.module") self.assertEqual(len(cls.getattr("__dict__")), 1) - if not cls.newstyle: - self.assertRaises(AttributeInferenceError, cls.getattr, "__mro__") + for cls in (nodes.List._proxied, nodes.Const(1)._proxied): self.assertEqual(len(cls.getattr("__bases__")), 1) self.assertEqual(len(cls.getattr("__name__")), 1) @@ -1927,6 +1967,34 @@ class E(C[str], D): ... cls, [".E", ".C", ".A", ".B", "typing.Generic", ".D", "builtins.object"] ) + @pytest.mark.skipif(not PY312_PLUS, reason="PEP 695 syntax requires Python 3.12") + def test_mro_generic_8(self): + cls = builder.extract_node( + """ + class A: ... + class B[T]: ... + class C[T](A, B[T]): ... + """ + ) + assert isinstance(cls, nodes.ClassDef) + self.assertEqualMroQName(cls, [".C", ".A", ".B", "builtins.object"]) + + @pytest.mark.skipif(not PY312_PLUS, reason="PEP 695 syntax requires Python 3.12") + def test_mro_generic_9(self): + cls = builder.extract_node( + """ + from dataclasses import dataclass + @dataclass + class A: ... + @dataclass + class B[T]: ... + @dataclass + class C[T](A, B[T]): ... + """ + ) + assert isinstance(cls, nodes.ClassDef) + self.assertEqualMroQName(cls, [".C", ".A", ".B", "builtins.object"]) + def test_mro_generic_error_1(self): cls = builder.extract_node( """ @@ -2154,7 +2222,7 @@ class ParentGetattr(Getattr): # Test that objects analyzed through the live introspection # aren't considered to have dynamic getattr implemented. - astroid_builder = builder.AstroidBuilder() + astroid_builder = builder.AstroidBuilder(AstroidManager()) module = astroid_builder.module_build(difflib) self.assertFalse(module["SequenceMatcher"].has_dynamic_getattr()) @@ -2803,6 +2871,31 @@ class First(object, object): #@ astroid["First"].slots() +def test_import_with_global() -> None: + code = builder.parse( + """ + def f1(): + global platform + from sys import platform as plat + platform = plat + + def f2(): + global os, RE, deque, VERSION, Path + import os + import re as RE + from collections import deque + from sys import version as VERSION + from pathlib import * + """ + ) + assert "platform" in code.locals + assert "os" in code.locals + assert "RE" in code.locals + assert "deque" in code.locals + assert "VERSION" in code.locals + assert "Path" in code.locals + + class TestFrameNodes: @staticmethod def test_frame_node(): diff --git a/tests/test_transforms.py b/tests/test_transforms.py index f4875ca5f2..04e02662a0 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -16,8 +16,6 @@ from astroid.brain.brain_dataclasses import _looks_like_dataclass_field_call from astroid.const import IS_PYPY from astroid.manager import AstroidManager -from astroid.nodes.node_classes import Call, Compare, Const, Name -from astroid.nodes.scoped_nodes import FunctionDef, Module from tests.testdata.python3.recursion_error import LONG_CHAINED_METHOD_CALL @@ -39,12 +37,12 @@ class TestTransforms(unittest.TestCase): def setUp(self) -> None: self.transformer = transforms.TransformVisitor() - def parse_transform(self, code: str) -> Module: + def parse_transform(self, code: str) -> nodes.Module: module = parse(code, apply_transforms=False) return self.transformer.visit(module) def test_function_inlining_transform(self) -> None: - def transform_call(node: Call) -> Const: + def transform_call(node: nodes.Call) -> nodes.Const: # Let's do some function inlining inferred = next(node.infer()) return inferred @@ -65,14 +63,14 @@ def test(): return 42 def test_recursive_transforms_into_astroid_fields(self) -> None: # Test that the transformer walks properly the tree # by going recursively into the _astroid_fields per each node. - def transform_compare(node: Compare) -> Const: + def transform_compare(node: nodes.Compare) -> nodes.Const: # Let's check the values of the ops _, right = node.ops[0] # Assume they are Consts and they were transformed before # us. return nodes.const_factory(node.left.value < right.value) - def transform_name(node: Name) -> Const: + def transform_name(node: nodes.Name) -> nodes.Const: # Should be Consts return next(node.infer()) @@ -92,7 +90,7 @@ def transform_name(node: Name) -> Const: self.assertFalse(module.body[2].value.value) def test_transform_patches_locals(self) -> None: - def transform_function(node: FunctionDef) -> None: + def transform_function(node: nodes.FunctionDef) -> None: assign = nodes.Assign( parent=node, lineno=node.lineno, @@ -127,11 +125,11 @@ def test(): self.assertEqual(func.body[1].as_string(), "value = 42") def test_predicates(self) -> None: - def transform_call(node: Call) -> Const: + def transform_call(node: nodes.Call) -> nodes.Const: inferred = next(node.infer()) return inferred - def should_inline(node: Call) -> bool: + def should_inline(node: nodes.Call) -> bool: return node.func.name.startswith("inlineme") self.transformer.register_transform(nodes.Call, transform_call, should_inline) @@ -165,7 +163,7 @@ def test_transforms_are_separated(self) -> None: # on a partially constructed tree anymore, which was the # source of crashes in the past when certain inference rules # were used in a transform. - def transform_function(node: FunctionDef) -> Const: + def transform_function(node: nodes.FunctionDef) -> nodes.Const: if node.decorators: for decorator in node.decorators.nodes: inferred = next(decorator.infer()) @@ -201,7 +199,7 @@ def bala(self): def test_transforms_are_called_for_builtin_modules(self) -> None: # Test that transforms are called for builtin modules. - def transform_function(node: FunctionDef) -> FunctionDef: + def transform_function(node: nodes.FunctionDef) -> nodes.FunctionDef: name = nodes.AssignName( name="value", lineno=0, @@ -215,11 +213,11 @@ def transform_function(node: FunctionDef) -> FunctionDef: manager = MANAGER - def predicate(node: FunctionDef) -> bool: + def predicate(node: nodes.FunctionDef) -> bool: return node.root().name == "time" with add_transform(manager, nodes.FunctionDef, transform_function, predicate): - builder_instance = builder.AstroidBuilder() + builder_instance = builder.AstroidBuilder(AstroidManager()) module = builder_instance.module_build(time) asctime = module["asctime"] @@ -233,7 +231,9 @@ def transform_function(node): manager = MANAGER with add_transform(manager, nodes.FunctionDef, transform_function): - astroid_builder = builder.AstroidBuilder(apply_transforms=False) + astroid_builder = builder.AstroidBuilder( + AstroidManager(), apply_transforms=False + ) module = astroid_builder.string_build("""def test(): pass""") # The transform wasn't applied. @@ -269,7 +269,7 @@ def transform_class(cls): IS_PYPY, reason="Could not find a useful recursion limit on all versions" ) def test_transform_aborted_if_recursion_limited(self): - def transform_call(node: Call) -> Const: + def transform_call(node: nodes.Call) -> nodes.Const: return node self.transformer.register_transform( diff --git a/tests/test_type_params.py b/tests/test_type_params.py index 6398f78ade..021aa9a285 100644 --- a/tests/test_type_params.py +++ b/tests/test_type_params.py @@ -5,11 +5,14 @@ import pytest from astroid import extract_node -from astroid.const import PY312_PLUS +from astroid.const import PY312_PLUS, PY313_PLUS from astroid.nodes import ( AssignName, + List, + Name, ParamSpec, Subscript, + Tuple, TypeAlias, TypeVar, TypeVarTuple, @@ -26,6 +29,7 @@ def test_type_alias() -> None: assert isinstance(node.type_params[0].name, AssignName) assert node.type_params[0].name.name == "T" assert node.type_params[0].bound is None + assert node.type_params[0].default_value is None assert isinstance(node.value, Subscript) assert node.value.value.name == "list" @@ -41,12 +45,46 @@ def test_type_alias() -> None: assert assigned is node.value +def test_type_var() -> None: + node = extract_node("type Point[T: int] = T") + param = node.type_params[0] + assert isinstance(param, TypeVar) + assert isinstance(param.bound, Name) + assert param.bound.name == "int" + assert param.default_value is None + + +@pytest.mark.skipif(not PY313_PLUS, reason="Type parameter defaults were added in 313") +def test_type_var_defaults() -> None: + node = extract_node("type Point[T: int = int] = T") + param = node.type_params[0] + assert isinstance(param, TypeVar) + assert isinstance(param.bound, Name) + assert param.bound.name == "int" + assert isinstance(param.default_value, Name) + assert param.default_value.name == "int" + + def test_type_param_spec() -> None: node = extract_node("type Alias[**P] = Callable[P, int]") params = node.type_params[0] assert isinstance(params, ParamSpec) assert isinstance(params.name, AssignName) assert params.name.name == "P" + assert params.default_value is None + + assert node.inferred()[0] is node + + +@pytest.mark.skipif(not PY313_PLUS, reason="Type parameter defaults were added in 313") +def test_type_param_spec_defaults() -> None: + node = extract_node("type Alias[**P = [int, str]] = Callable[P, int]") + params = node.type_params[0] + assert isinstance(params, ParamSpec) + assert isinstance(params.name, AssignName) + assert params.name.name == "P" + assert isinstance(params.default_value, List) + assert len(params.default_value.elts) == 2 assert node.inferred()[0] is node @@ -57,6 +95,23 @@ def test_type_var_tuple() -> None: assert isinstance(params, TypeVarTuple) assert isinstance(params.name, AssignName) assert params.name.name == "Ts" + assert params.default_value is None + + assert node.inferred()[0] is node + + +@pytest.mark.skipif(not PY313_PLUS, reason="Type parameter defaults were added in 313") +def test_type_var_tuple_defaults() -> None: + node = extract_node("type Alias[*Ts = tuple[int, str]] = tuple[*Ts]") + params = node.type_params[0] + assert isinstance(params, TypeVarTuple) + assert isinstance(params.name, AssignName) + assert params.name.name == "Ts" + assert isinstance(params.default_value, Subscript) + assert isinstance(params.default_value.value, Name) + assert params.default_value.value.name == "tuple" + assert isinstance(params.default_value.slice, Tuple) + assert len(params.default_value.slice.elts) == 2 assert node.inferred()[0] is node diff --git a/tests/testdata/python3/data/module.py b/tests/testdata/python3/data/module.py index af4a75f7d4..98da5dd632 100644 --- a/tests/testdata/python3/data/module.py +++ b/tests/testdata/python3/data/module.py @@ -2,7 +2,7 @@ """ __revision__ = '$Id: module.py,v 1.2 2005-11-02 11:56:54 syt Exp $' -from astroid.nodes.node_classes import Name as NameNode +from astroid.nodes import Name as NameNode from astroid import modutils from astroid.utils import * import os.path @@ -59,7 +59,9 @@ def method(self): return 'hehe' global_access(local, val=autre) finally: - return local + # return in finally was previously tested here but became a syntax error + # in 3.14 and this file is used in 188/1464 tests + a = local def static_method(): """static method test""" diff --git a/tests/testdata/python3/data/module3.14.py b/tests/testdata/python3/data/module3.14.py new file mode 100644 index 0000000000..e5af6b022c --- /dev/null +++ b/tests/testdata/python3/data/module3.14.py @@ -0,0 +1,91 @@ +"""test module for astroid +""" + +__revision__ = '$Id: module.py,v 1.2 2005-11-02 11:56:54 syt Exp $' +from astroid.nodes import Name as NameNode +from astroid import modutils +from astroid.utils import * +import os.path +MY_DICT = {} + +def global_access(key, val): + """function test""" + local = 1 + MY_DICT[key] = val + for i in val: + if i: + del MY_DICT[i] + continue + else: + break + else: + return + + +class YO: + """hehe + haha""" + a = 1 + + def __init__(self): + try: + self.yo = 1 + except ValueError as ex: + pass + except (NameError, TypeError): + raise XXXError() + except: + raise + + + +class YOUPI(YO): + class_attr = None + + def __init__(self): + self.member = None + + def method(self): + """method + test""" + global MY_DICT + try: + MY_DICT = {} + local = None + autre = [a for (a, b) in MY_DICT if b] + if b in autre: + return + elif a in autre: + return 'hehe' + global_access(local, val=autre) + finally: + # return in finally was previously tested here but became a syntax error + # in 3.14 and is used in 188/1464 tests + print(local) + + def static_method(): + """static method test""" + assert MY_DICT, '???' + static_method = staticmethod(static_method) + + def class_method(cls): + """class method test""" + exec(a, b) + class_method = classmethod(class_method) + + +def four_args(a, b, c, d): + """four arguments (was nested_args)""" + while 1: + if a: + break + a += +1 + else: + b += -2 + if c: + d = a and (b or c) + else: + c = a and b or d + list(map(lambda x, y: (y, x), a)) +redirect = four_args + diff --git a/tests/testdata/python3/data/x.zip b/tests/testdata/python3/data/x.zip new file mode 100644 index 0000000000..4f663fb344 Binary files /dev/null and b/tests/testdata/python3/data/x.zip differ diff --git a/tox.ini b/tox.ini index 107e473c00..b3bd055eb5 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{38,39,310,311,312} +envlist = py{39,310,311,312,313,314} skip_missing_interpreters = true isolated_build = true