diff --git a/.coveragerc b/.coveragerc
index 49fe2faf..ab44bcad 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,10 +2,20 @@
omit =
# leading `*/` for pytest-dev/pytest-cov#456
*/.tox/*
+ */pep517-build-env-*
tests/*
prepare/*
*/_itertools.py
exercises.py
+ */pip-run-*
+disable_warnings =
+ couldnt-parse
[report]
show_missing = True
+exclude_also =
+ # Exclude common false positives per
+ # https://coverage.readthedocs.io/en/latest/excluding.html#advanced-exclusion
+ # Ref jaraco/skeleton#97 and jaraco/skeleton#135
+ class .*\bProtocol\):
+ if TYPE_CHECKING:
diff --git a/.editorconfig b/.editorconfig
index b8aeea17..304196f8 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -14,3 +14,6 @@ max_line_length = 88
[*.{yml,yaml}]
indent_style = space
indent_size = 2
+
+[*.rst]
+indent_style = space
diff --git a/.flake8 b/.flake8
deleted file mode 100644
index 542d2986..00000000
--- a/.flake8
+++ /dev/null
@@ -1,10 +0,0 @@
-[flake8]
-max-line-length = 88
-
-# jaraco/skeleton#34
-max-complexity = 10
-
-extend-ignore =
- # Black creates whitespace before colon
- E203
-enable-extensions = U4
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 00000000..5cbfe040
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1 @@
+tidelift: pypi/importlib-metadata
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
deleted file mode 100644
index 89ff3396..00000000
--- a/.github/dependabot.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-version: 2
-updates:
- - package-ecosystem: "pip"
- directory: "/"
- schedule:
- interval: "daily"
- allow:
- - dependency-type: "all"
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index d36f6322..53513eee 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -1,70 +1,130 @@
name: tests
-on: [push, pull_request]
+on:
+ merge_group:
+ push:
+ branches-ignore:
+ # temporary GH branches relating to merge queues (jaraco/skeleton#93)
+ - gh-readonly-queue/**
+ tags:
+ # required if branches-ignore is supplied (jaraco/skeleton#103)
+ - '**'
+ pull_request:
+ workflow_dispatch:
+
+permissions:
+ contents: read
+
+env:
+ # Environment variable to support color support (jaraco/skeleton#66)
+ FORCE_COLOR: 1
+
+ # Suppress noisy pip warnings
+ PIP_DISABLE_PIP_VERSION_CHECK: 'true'
+ PIP_NO_WARN_SCRIPT_LOCATION: 'true'
+
+ # Ensure tests can sense settings about the environment
+ TOX_OVERRIDE: >-
+ testenv.pass_env+=GITHUB_*,FORCE_COLOR
+
jobs:
test:
strategy:
+ # https://blog.jaraco.com/efficient-use-of-ci-resources/
matrix:
python:
- - 3.7
- - 3.9
- - "3.10"
+ - "3.9"
+ - "3.13"
platform:
- ubuntu-latest
- macos-latest
- windows-latest
+ include:
+ - python: "3.10"
+ platform: ubuntu-latest
+ - python: "3.11"
+ platform: ubuntu-latest
+ - python: "3.12"
+ platform: ubuntu-latest
+ - python: "3.14"
+ platform: ubuntu-latest
+ - python: pypy3.10
+ platform: ubuntu-latest
runs-on: ${{ matrix.platform }}
+ continue-on-error: ${{ matrix.python == '3.14' }}
steps:
- - uses: actions/checkout@v2
- with:
- # fetch all branches and tags (to get tags for versioning)
- # ref actions/checkout#448
- fetch-depth: 0
-
+ - uses: actions/checkout@v4
+ - name: Install build dependencies
+ # Install dependencies for building packages on pre-release Pythons
+ # jaraco/skeleton#161
+ if: matrix.python == '3.14' && matrix.platform == 'ubuntu-latest'
+ run: |
+ sudo apt update
+ sudo apt install -y libxml2-dev libxslt-dev
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
+ allow-prereleases: true
- name: Install tox
- run: |
- python -m pip install tox
- - name: Run tests
+ run: python -m pip install tox
+ - name: Run
run: tox
- diffcov:
+ collateral:
+ strategy:
+ fail-fast: false
+ matrix:
+ job:
+ - diffcov
+ - docs
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v5
with:
- python-version: 3.9
+ python-version: 3.x
- name: Install tox
- run: |
- python -m pip install tox
- - name: Evaluate coverage
- run: tox
- env:
- TOXENV: diffcov
+ run: python -m pip install tox
+ - name: Eval ${{ matrix.job }}
+ run: tox -e ${{ matrix.job }}
+
+ check: # This job does nothing and is only used for the branch protection
+ if: always()
+
+ needs:
+ - test
+ - collateral
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Decide whether the needed jobs succeeded or failed
+ uses: re-actors/alls-green@release/v1
+ with:
+ jobs: ${{ toJSON(needs) }}
release:
- needs: test
+ permissions:
+ contents: write
+ needs:
+ - check
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: 3.x
- name: Install tox
- run: |
- python -m pip install tox
- - name: Release
+ run: python -m pip install tox
+ - name: Run
run: tox -e release
env:
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index edf6f55f..fa559241 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,5 +1,7 @@
repos:
-- repo: https://github.com/psf/black
- rev: 22.1.0
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.12.0
hooks:
- - id: black
+ - id: ruff
+ args: [--fix, --unsafe-fixes]
+ - id: ruff-format
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 00000000..72437063
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,19 @@
+version: 2
+python:
+ install:
+ - path: .
+ extra_requirements:
+ - doc
+
+sphinx:
+ configuration: docs/conf.py
+
+# required boilerplate readthedocs/readthedocs.org#10401
+build:
+ os: ubuntu-lts-latest
+ tools:
+ python: latest
+ # post-checkout job to ensure the clone isn't shallow jaraco/skeleton#114
+ jobs:
+ post_checkout:
+ - git fetch --unshallow || true
diff --git a/.readthedocs.yml b/.readthedocs.yml
deleted file mode 100644
index cc698548..00000000
--- a/.readthedocs.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-version: 2
-python:
- install:
- - path: .
- extra_requirements:
- - docs
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index be7e092b..00000000
--- a/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright 2017-2019 Jason R. Coombs, Barry Warsaw
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/CHANGES.rst b/NEWS.rst
similarity index 76%
rename from CHANGES.rst
rename to NEWS.rst
index 91e3bcba..1a92cd19 100644
--- a/CHANGES.rst
+++ b/NEWS.rst
@@ -1,3 +1,327 @@
+v8.7.1
+======
+
+Bugfixes
+--------
+
+- Fixed errors in FastPath under fork-multiprocessing. (#520)
+- Removed cruft from Python 3.8. (#524)
+
+
+v8.7.0
+======
+
+Features
+--------
+
+- ``.metadata()`` (and ``Distribution.metadata``) can now return ``None`` if the metadata directory exists but not metadata file is present. (#493)
+
+
+Bugfixes
+--------
+
+- Raise consistent ValueError for invalid EntryPoint.value (#518)
+
+
+v8.6.1
+======
+
+Bugfixes
+--------
+
+- Fixed indentation logic to also honor blank lines.
+
+
+v8.6.0
+======
+
+Features
+--------
+
+- Add support for rendering metadata where some fields have newlines (python/cpython#119650).
+
+
+v8.5.0
+======
+
+Features
+--------
+
+- Deferred import of zipfile.Path (#502)
+- Deferred import of json (#503)
+- Rely on zipp overlay for zipfile.Path.
+
+
+v8.4.0
+======
+
+Features
+--------
+
+- Deferred import of inspect for import performance. (#499)
+
+
+v8.3.0
+======
+
+Features
+--------
+
+- Disallow passing of 'dist' to EntryPoints.select.
+
+
+v8.2.0
+======
+
+Features
+--------
+
+- Add SimplePath to importlib_metadata.__all__. (#494)
+
+
+v8.1.0
+======
+
+Features
+--------
+
+- Prioritize valid dists to invalid dists when retrieving by name. (#489)
+
+
+v8.0.0
+======
+
+Deprecations and Removals
+-------------------------
+
+- Message.__getitem__ now raises a KeyError on missing keys. (#371)
+- Removed deprecated support for Distribution subclasses not implementing abstract methods.
+
+
+v7.2.1
+======
+
+Bugfixes
+--------
+
+- When reading installed files from an egg, use ``relative_to(walk_up=True)`` to honor files installed outside of the installation root. (#455)
+
+
+v7.2.0
+======
+
+Features
+--------
+
+- Deferred select imports in for speedup (python/cpython#109829).
+- Updated fixtures for python/cpython#120801.
+
+
+v7.1.0
+======
+
+Features
+--------
+
+- Improve import time (python/cpython#114664).
+
+
+Bugfixes
+--------
+
+- Make MetadataPathFinder.find_distributions a classmethod for consistency with CPython. Closes #484. (#484)
+- Allow ``MetadataPathFinder.invalidate_caches`` to be called as a classmethod.
+
+
+v7.0.2
+======
+
+No significant changes.
+
+
+v7.0.1
+======
+
+Bugfixes
+--------
+
+- Corrected the interface for SimplePath to encompass the expectations of locate_file and PackagePath.
+- Fixed type annotations to allow strings.
+
+
+v7.0.0
+======
+
+Deprecations and Removals
+-------------------------
+
+- Removed EntryPoint access by numeric index (tuple behavior).
+
+
+v6.11.0
+=======
+
+Features
+--------
+
+- Added ``Distribution.origin`` supplying the ``direct_url.json`` in a ``SimpleNamespace``. (#404)
+
+
+v6.10.0
+=======
+
+Features
+--------
+
+- Added diagnose script. (#461)
+
+
+v6.9.0
+======
+
+Features
+--------
+
+- Added EntryPoints.__repr__ (#473)
+
+
+v6.8.0
+======
+
+Features
+--------
+
+- Require Python 3.8 or later.
+
+
+v6.7.0
+======
+
+* #453: When inferring top-level names that are importable for
+ distributions in ``package_distributions``, now symlinks to
+ other directories are honored.
+
+v6.6.0
+======
+
+* #449: Expanded type annotations.
+
+v6.5.1
+======
+
+* python/cpython#103661: Removed excess error suppression in
+ ``_read_files_egginfo_installed`` and fixed path handling
+ on Windows.
+
+v6.5.0
+======
+
+* #422: Removed ABC metaclass from ``Distribution`` and instead
+ deprecated construction of ``Distribution`` objects without
+ concrete methods.
+
+v6.4.1
+======
+
+* Updated docs with tweaks from upstream CPython.
+
+v6.4.0
+======
+
+* Consolidated some behaviors in tests around ``_path``.
+* Added type annotation for ``Distribution.read_text``.
+
+v6.3.0
+======
+
+* #115: Support ``installed-files.txt`` for ``Distribution.files``
+ when present.
+
+v6.2.1
+======
+
+* #442: Fixed issue introduced in v6.1.0 where non-importable
+ names (metadata dirs) began appearing in
+ ``packages_distributions``.
+
+v6.2.0
+======
+
+* #384: ``PackageMetadata`` now stipulates an additional ``get``
+ method allowing for easy querying of metadata keys that may not
+ be present.
+
+v6.1.0
+======
+
+* #428: ``packages_distributions`` now honors packages and modules
+ with Python modules that not ``.py`` sources (e.g. ``.pyc``,
+ ``.so``).
+
+v6.0.1
+======
+
+* #434: Expand protocol for ``PackageMetadata.get_all`` to match
+ the upstream implementation of ``email.message.Message.get_all``
+ in python/typeshed#9620.
+
+v6.0.0
+======
+
+* #419: Declared ``Distribution`` as an abstract class, enforcing
+ definition of abstract methods in instantiated subclasses. It's no
+ longer possible to instantiate a ``Distribution`` or any subclasses
+ unless they define the abstract methods.
+
+ Please comment in the issue if this change breaks any projects.
+ This change will likely be rolled back if it causes significant
+ disruption.
+
+v5.2.0
+======
+
+* #371: Deprecated expectation that ``PackageMetadata.__getitem__``
+ will return ``None`` for missing keys. In the future, it will raise a
+ ``KeyError``.
+
+v5.1.0
+======
+
+* #415: Instrument ``SimplePath`` with generic support.
+
+v5.0.0
+======
+
+* #97, #284, #300: Removed compatibility shims for deprecated entry
+ point interfaces.
+
+v4.13.0
+=======
+
+* #396: Added compatibility for ``PathDistributions`` originating
+ from Python 3.8 and 3.9.
+
+v4.12.0
+=======
+
+* py-93259: Now raise ``ValueError`` when ``None`` or an empty
+ string are passed to ``Distribution.from_name`` (and other
+ callers).
+
+v4.11.4
+=======
+
+* #379: In ``PathDistribution._name_from_stem``, avoid including
+ parts of the extension in the result.
+* #381: In ``PathDistribution._normalized_name``, ensure names
+ loaded from the stem of the filename are also normalized, ensuring
+ duplicate entry points by packages varying only by non-normalized
+ name are hidden.
+
+Note (#459): This change had a backward-incompatible effect for
+any installers that created metadata in the filesystem with dashes
+in the package names (not replaced by underscores).
+
v4.11.3
=======
diff --git a/README.rst b/README.rst
index 78a6af71..2193ec31 100644
--- a/README.rst
+++ b/README.rst
@@ -1,25 +1,24 @@
.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg
- :target: `PyPI link`_
+ :target: https://pypi.org/project/importlib_metadata
.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg
- :target: `PyPI link`_
-.. _PyPI link: https://pypi.org/project/importlib_metadata
-
-.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg
+.. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg
:target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22
:alt: tests
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
- :target: https://github.com/psf/black
- :alt: Code style: Black
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
+ :target: https://github.com/astral-sh/ruff
+ :alt: Ruff
.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest
:target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest
-.. image:: https://img.shields.io/badge/skeleton-2022-informational
+.. image:: https://img.shields.io/badge/skeleton-2025-informational
:target: https://blog.jaraco.com/skeleton
+.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata
+ :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme
Library to access the metadata for a Python package.
@@ -40,9 +39,13 @@ were contributed to different versions in the standard library:
* - importlib_metadata
- stdlib
- * - 4.8
+ * - 7.0
+ - 3.13
+ * - 6.5
+ - 3.12
+ * - 4.13
- 3.11
- * - 4.4
+ * - 4.6
- 3.10
* - 1.4
- 3.8
@@ -51,7 +54,7 @@ were contributed to different versions in the standard library:
Usage
=====
-See the `online documentation `_
+See the `online documentation `_
for usage details.
`Finder authors
@@ -75,4 +78,13 @@ Project details
* Project home: https://github.com/python/importlib_metadata
* Report bugs at: https://github.com/python/importlib_metadata/issues
* Code hosting: https://github.com/python/importlib_metadata
- * Documentation: https://importlib_metadata.readthedocs.io/
+ * Documentation: https://importlib-metadata.readthedocs.io/
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000..54f99acb
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,3 @@
+# Security Contact
+
+To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure.
diff --git a/conftest.py b/conftest.py
index ab6c8cae..6d3402d6 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,9 +1,8 @@
import sys
-
collect_ignore = [
# this module fails mypy tests because 'setup.py' matches './setup.py'
- 'prepare/example/setup.py',
+ 'tests/data/sources/example/setup.py',
]
@@ -13,13 +12,18 @@ def pytest_configure():
def remove_importlib_metadata():
"""
- Because pytest imports importlib_metadata, the coverage
- reports are broken (#322). So work around the issue by
- undoing the changes made by pytest's import of
- importlib_metadata (if any).
+ Ensure importlib_metadata is not imported yet.
+
+ Because pytest or other modules might import
+ importlib_metadata, the coverage reports are broken (#322).
+ Work around the issue by undoing the changes made by a
+ previous import of importlib_metadata (if any).
"""
- if sys.meta_path[-1].__class__.__name__ == 'MetadataPathFinder':
- del sys.meta_path[-1]
+ sys.meta_path[:] = [
+ item
+ for item in sys.meta_path
+ if item.__class__.__name__ != 'MetadataPathFinder'
+ ]
for mod in list(sys.modules):
if mod.startswith('importlib_metadata'):
del sys.modules[mod]
diff --git a/docs/api.rst b/docs/api.rst
index 02b389ba..d22eecd5 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -9,3 +9,8 @@ API Reference
:members:
:undoc-members:
:show-inheritance:
+
+.. automodule:: importlib_metadata._meta
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index 96dc2030..32528f86 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,11 +1,17 @@
-#!/usr/bin/env python3
+from __future__ import annotations
-extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker']
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'jaraco.packaging.sphinx',
+]
master_doc = "index"
+html_theme = "furo"
+# Link dates and other references in the changelog
+extensions += ['rst.linker']
link_files = {
- '../CHANGES.rst': dict(
+ '../NEWS.rst': dict(
using=dict(GH='https://github.com'),
replace=[
dict(
@@ -18,14 +24,23 @@
),
dict(
pattern=r'PEP[- ](?P\d+)',
- url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
+ url='https://peps.python.org/pep-{pep_number:0>4}/',
+ ),
+ dict(
+ pattern=r'(python/cpython#|Python #)(?P\d+)',
+ url='https://github.com/python/cpython/issues/{python}',
+ ),
+ dict(
+ pattern=r'bpo-(?P\d+)',
+ url='http://bugs.python.org/issue{bpo}',
),
],
)
}
-# Be strict about any broken references:
+# Be strict about any broken references
nitpicky = True
+nitpick_ignore: list[tuple[str, str]] = []
# Include Python intersphinx mapping to prevent failures
# jaraco/skeleton#51
@@ -34,6 +49,22 @@
'python': ('https://docs.python.org/3', None),
}
+# Preserve authored syntax for defaults
+autodoc_preserve_defaults = True
+
+# Add support for linking usernames, PyPI projects, Wikipedia pages
+github_url = 'https://github.com/'
+extlinks = {
+ 'user': (f'{github_url}%s', '@%s'),
+ 'pypi': ('https://pypi.org/project/%s', '%s'),
+ 'wiki': ('https://wikipedia.org/wiki/%s', '%s'),
+}
+extensions += ['sphinx.ext.extlinks']
+
+# local
+
+extensions += ['jaraco.tidelift']
+
intersphinx_mapping.update(
importlib_resources=(
'https://importlib-resources.readthedocs.io/en/latest/',
@@ -41,9 +72,19 @@
),
)
-# Workaround for #316
-nitpick_ignore = [
+intersphinx_mapping.update(
+ packaging=(
+ 'https://packaging.python.org/en/latest/',
+ None,
+ ),
+)
+
+nitpick_ignore += [
+ # Workaround for #316
('py:class', 'importlib_metadata.EntryPoints'),
+ ('py:class', 'importlib_metadata.PackagePath'),
('py:class', 'importlib_metadata.SelectableGroups'),
('py:class', 'importlib_metadata._meta._T'),
+ # Workaround for #435
+ ('py:class', '_T'),
]
diff --git a/docs/history.rst b/docs/history.rst
index 8e217503..5bdc2320 100644
--- a/docs/history.rst
+++ b/docs/history.rst
@@ -5,4 +5,4 @@
History
*******
-.. include:: ../CHANGES (links).rst
+.. include:: ../NEWS (links).rst
diff --git a/docs/index.rst b/docs/index.rst
index 168f7fbc..66755216 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,33 +1,28 @@
Welcome to |project| documentation!
===================================
-``importlib_metadata`` is a library which provides an API for accessing an
-installed package's metadata (see :pep:`566`), such as its entry points or its top-level
-name. This functionality intends to replace most uses of ``pkg_resources``
-`entry point API`_ and `metadata API`_. Along with :mod:`importlib.resources`
-and newer (backported as :doc:`importlib_resources `),
-this package can eliminate the need to use the older and less
-efficient ``pkg_resources`` package.
-
-``importlib_metadata`` supplies a backport of
-:doc:`importlib.metadata `,
+.. sidebar-links::
+ :home:
+ :pypi:
+
+``importlib_metadata`` supplies a backport of :mod:`importlib.metadata`,
enabling early access to features of future Python versions and making
functionality available for older Python versions. Users are encouraged to
use the Python standard library where suitable and fall back to
-this library for future compatibility. Developers looking for detailed API
-descriptions should refer to the standard library documentation.
-
-The documentation here includes a general :ref:`usage ` guide.
+this library for future compatibility. For general usage guidance, start
+with :mod:`importlib.metadata` but substitute ``importlib_metadata``
+for ``importlib.metadata``.
.. toctree::
:maxdepth: 1
- using
api
migration
history
+.. tidelift-referral-banner::
+
Project details
===============
@@ -35,7 +30,7 @@ Project details
* Project home: https://github.com/python/importlib_metadata
* Report bugs at: https://github.com/python/importlib_metadata/issues
* Code hosting: https://github.com/python/importlib_metadata
- * Documentation: https://importlib_metadata.readthedocs.io/
+ * Documentation: https://importlib-metadata.readthedocs.io/
Indices and tables
@@ -44,7 +39,3 @@ Indices and tables
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
-
-
-.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
-.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
diff --git a/docs/using.rst b/docs/using.rst
deleted file mode 100644
index 0bbc0df8..00000000
--- a/docs/using.rst
+++ /dev/null
@@ -1,340 +0,0 @@
-.. _using:
-
-=================================
- Using :mod:`!importlib_metadata`
-=================================
-
-``importlib_metadata`` is a library that provides for access to installed
-package metadata. Built in part on Python's import system, this library
-intends to replace similar functionality in the `entry point
-API`_ and `metadata API`_ of ``pkg_resources``. Along with
-:mod:`importlib.resources` in Python 3.7
-and newer (backported as :doc:`importlib_resources ` for older versions of
-Python), this can eliminate the need to use the older and less efficient
-``pkg_resources`` package.
-
-By "installed package" we generally mean a third-party package installed into
-Python's ``site-packages`` directory via tools such as `pip
-`_. Specifically,
-it means a package with either a discoverable ``dist-info`` or ``egg-info``
-directory, and metadata defined by :pep:`566` or its older specifications.
-By default, package metadata can live on the file system or in zip archives on
-:data:`sys.path`. Through an extension mechanism, the metadata can live almost
-anywhere.
-
-
-.. seealso::
-
- https://importlib-metadata.readthedocs.io/
- The documentation for ``importlib_metadata``, which supplies a
- backport of ``importlib.metadata``.
-
-
-Overview
-========
-
-Let's say you wanted to get the version string for a package you've installed
-using ``pip``. We start by creating a virtual environment and installing
-something into it::
-
- $ python3 -m venv example
- $ source example/bin/activate
- (example) $ pip install importlib_metadata
- (example) $ pip install wheel
-
-You can get the version string for ``wheel`` by running the following::
-
- (example) $ python
- >>> from importlib_metadata import version
- >>> version('wheel')
- '0.32.3'
-
-You can also get a collection of entry points selectable by properties of the EntryPoint (typically 'group' or 'name'), such as
-``console_scripts``, ``distutils.commands`` and others. Each group contains a
-collection of :ref:`EntryPoint ` objects.
-
-You can get the :ref:`metadata for a distribution `::
-
- >>> list(metadata('wheel'))
- ['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist']
-
-You can also get a :ref:`distribution's version number `, list its
-:ref:`constituent files `, and get a list of the distribution's
-:ref:`requirements`.
-
-
-Functional API
-==============
-
-This package provides the following functionality via its public API.
-
-
-.. _entry-points:
-
-Entry points
-------------
-
-The ``entry_points()`` function returns a collection of entry points.
-Entry points are represented by ``EntryPoint`` instances;
-each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and
-a ``.load()`` method to resolve the value. There are also ``.module``,
-``.attr``, and ``.extras`` attributes for getting the components of the
-``.value`` attribute.
-
-Query all entry points::
-
- >>> eps = entry_points()
-
-The ``entry_points()`` function returns an ``EntryPoints`` object,
-a collection of all ``EntryPoint`` objects with ``names`` and ``groups``
-attributes for convenience::
-
- >>> sorted(eps.groups)
- ['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation']
-
-``EntryPoints`` has a ``select`` method to select entry points
-matching specific properties. Select entry points in the
-``console_scripts`` group::
-
- >>> scripts = eps.select(group='console_scripts')
-
-Equivalently, since ``entry_points`` passes keyword arguments
-through to select::
-
- >>> scripts = entry_points(group='console_scripts')
-
-Pick out a specific script named "wheel" (found in the wheel project)::
-
- >>> 'wheel' in scripts.names
- True
- >>> wheel = scripts['wheel']
-
-Equivalently, query for that entry point during selection::
-
- >>> (wheel,) = entry_points(group='console_scripts', name='wheel')
- >>> (wheel,) = entry_points().select(group='console_scripts', name='wheel')
-
-Inspect the resolved entry point::
-
- >>> wheel
- EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts')
- >>> wheel.module
- 'wheel.cli'
- >>> wheel.attr
- 'main'
- >>> wheel.extras
- []
- >>> main = wheel.load()
- >>> main
-
-
-The ``group`` and ``name`` are arbitrary values defined by the package author
-and usually a client will wish to resolve all entry points for a particular
-group. Read `the setuptools docs
-`_
-for more information on entry points, their definition, and usage.
-
-*Compatibility Note*
-
-The "selectable" entry points were introduced in ``importlib_metadata``
-3.6 and Python 3.10. Prior to those changes, ``entry_points`` accepted
-no parameters and always returned a dictionary of entry points, keyed
-by group. For compatibility, if no parameters are passed to entry_points,
-a ``SelectableGroups`` object is returned, implementing that dict
-interface. In the future, calling ``entry_points`` with no parameters
-will return an ``EntryPoints`` object. Users should rely on the selection
-interface to retrieve entry points by group.
-
-
-.. _metadata:
-
-Distribution metadata
----------------------
-
-Every distribution includes some metadata, which you can extract using the
-``metadata()`` function::
-
- >>> wheel_metadata = metadata('wheel')
-
-The keys of the returned data structure, a ``PackageMetadata``,
-name the metadata keywords, and
-the values are returned unparsed from the distribution metadata::
-
- >>> wheel_metadata['Requires-Python']
- '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
-
-``PackageMetadata`` also presents a ``json`` attribute that returns
-all the metadata in a JSON-compatible form per PEP 566::
-
- >>> wheel_metadata.json['requires_python']
- '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
-
-.. note::
-
- The actual type of the object returned by ``metadata()`` is an
- implementation detail and should be accessed only through the interface
- described by the
- `PackageMetadata protocol `.
-
-
-.. _version:
-
-Distribution versions
----------------------
-
-The ``version()`` function is the quickest way to get a distribution's version
-number, as a string::
-
- >>> version('wheel')
- '0.32.3'
-
-
-.. _files:
-
-Distribution files
-------------------
-
-You can also get the full set of files contained within a distribution. The
-``files()`` function takes a distribution package name and returns all of the
-files installed by this distribution. Each file object returned is a
-``PackagePath``, a :class:`pathlib.PurePath` derived object with additional ``dist``,
-``size``, and ``hash`` properties as indicated by the metadata. For example::
-
- >>> util = [p for p in files('wheel') if 'util.py' in str(p)][0]
- >>> util
- PackagePath('wheel/util.py')
- >>> util.size
- 859
- >>> util.dist
-
- >>> util.hash
-
-
-Once you have the file, you can also read its contents::
-
- >>> print(util.read_text())
- import base64
- import sys
- ...
- def as_bytes(s):
- if isinstance(s, text_type):
- return s.encode('utf-8')
- return s
-
-You can also use the ``locate`` method to get a the absolute path to the
-file::
-
- >>> util.locate() # doctest: +SKIP
- PosixPath('/home/gustav/example/lib/site-packages/wheel/util.py')
-
-In the case where the metadata file listing files
-(RECORD or SOURCES.txt) is missing, ``files()`` will
-return ``None``. The caller may wish to wrap calls to
-``files()`` in `always_iterable
-`_
-or otherwise guard against this condition if the target
-distribution is not known to have the metadata present.
-
-.. _requirements:
-
-Distribution requirements
--------------------------
-
-To get the full set of requirements for a distribution, use the ``requires()``
-function::
-
- >>> requires('wheel')
- ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"]
-
-
-Package distributions
----------------------
-
-A convenience method to resolve the distribution or
-distributions (in the case of a namespace package) for top-level
-Python packages or modules::
-
- >>> packages_distributions()
- {'importlib_metadata': ['importlib-metadata'], 'yaml': ['PyYAML'], 'jaraco': ['jaraco.classes', 'jaraco.functools'], ...}
-
-.. _distributions:
-
-Distributions
-=============
-
-While the above API is the most common and convenient usage, you can get all
-of that information from the ``Distribution`` class. A ``Distribution`` is an
-abstract object that represents the metadata for a Python package. You can
-get the ``Distribution`` instance::
-
- >>> from importlib_metadata import distribution
- >>> dist = distribution('wheel')
-
-Thus, an alternative way to get the version number is through the
-``Distribution`` instance::
-
- >>> dist.version
- '0.32.3'
-
-There are all kinds of additional metadata available on the ``Distribution``
-instance::
-
- >>> dist.metadata['Requires-Python']
- '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
- >>> dist.metadata['License']
- 'MIT'
-
-The full set of available metadata is not described here. See :pep:`566`
-for additional details.
-
-
-Distribution Discovery
-======================
-
-By default, this package provides built-in support for discovery of metadata for file system and zip file packages. This metadata finder search defaults to ``sys.path``, but varies slightly in how it interprets those values from how other import machinery does. In particular:
-
-- ``importlib_metadata`` does not honor :class:`bytes` objects on ``sys.path``.
-- ``importlib_metadata`` will incidentally honor :py:class:`pathlib.Path` objects on ``sys.path`` even though such values will be ignored for imports.
-
-
-Extending the search algorithm
-==============================
-
-Because package metadata is not available through :data:`sys.path` searches, or
-package loaders directly, the metadata for a package is found through import
-system `finders`_. To find a distribution package's metadata,
-``importlib.metadata`` queries the list of :term:`meta path finders ` on
-:data:`sys.meta_path`.
-
-By default ``importlib_metadata`` installs a finder for distribution packages
-found on the file system. This finder doesn't actually find any *packages*,
-but it can find the packages' metadata.
-
-The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the
-interface expected of finders by Python's import system.
-``importlib_metadata`` extends this protocol by looking for an optional
-``find_distributions`` callable on the finders from
-:data:`sys.meta_path` and presents this extended interface as the
-``DistributionFinder`` abstract base class, which defines this abstract
-method::
-
- @abc.abstractmethod
- def find_distributions(context=DistributionFinder.Context()):
- """Return an iterable of all Distribution instances capable of
- loading the metadata for packages for the indicated ``context``.
- """
-
-The ``DistributionFinder.Context`` object provides ``.path`` and ``.name``
-properties indicating the path to search and name to match and may
-supply other relevant context.
-
-What this means in practice is that to support finding distribution package
-metadata in locations other than the file system, subclass
-``Distribution`` and implement the abstract methods. Then from
-a custom finder, return instances of this derived ``Distribution`` in the
-``find_distributions()`` method.
-
-
-.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
-.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
-.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders
diff --git a/exercises.py b/exercises.py
index c88fa983..adccf03c 100644
--- a/exercises.py
+++ b/exercises.py
@@ -29,6 +29,7 @@ def cached_distribution_perf():
def uncached_distribution_perf():
"uncached distribution"
import importlib
+
import importlib_metadata
# end warmup
@@ -37,9 +38,10 @@ def uncached_distribution_perf():
def entrypoint_regexp_perf():
- import importlib_metadata
import re
+ import importlib_metadata
+
input = '0' + ' ' * 2**10 + '0' # end warmup
re.match(importlib_metadata.EntryPoint.pattern, input)
diff --git a/importlib_metadata/__init__.py b/importlib_metadata/__init__.py
index 5ac8be23..508b02e4 100644
--- a/importlib_metadata/__init__.py
+++ b/importlib_metadata/__init__.py
@@ -1,42 +1,52 @@
-import os
-import re
+"""
+APIs exposing metadata from third-party Python packages.
+
+This codebase is shared between importlib.metadata in the stdlib
+and importlib_metadata in PyPI. See
+https://github.com/python/importlib_metadata/wiki/Development-Methodology
+for more detail.
+"""
+
+from __future__ import annotations
+
import abc
-import csv
-import sys
-import zipp
+import collections
import email
-import pathlib
-import operator
-import textwrap
-import warnings
import functools
import itertools
+import operator
+import os
+import pathlib
import posixpath
-import collections
+import re
+import sys
+import textwrap
+import types
+from collections.abc import Iterable, Mapping
+from contextlib import suppress
+from importlib import import_module
+from importlib.abc import MetaPathFinder
+from itertools import starmap
+from typing import Any
-from . import _adapters, _meta
+from . import _meta
from ._collections import FreezableDefaultDict, Pair
from ._compat import (
NullFinder,
install,
- pypy_partial,
)
-from ._functools import method_cache, pass_none
-from ._itertools import always_iterable, unique_everseen
+from ._functools import method_cache, noop, pass_none, passthrough
+from ._itertools import always_iterable, bucket, unique_everseen
from ._meta import PackageMetadata, SimplePath
-
-from contextlib import suppress
-from importlib import import_module
-from importlib.abc import MetaPathFinder
-from itertools import starmap
-from typing import List, Mapping, Optional, Union
-
+from ._typing import md_none
+from .compat import py39, py311
__all__ = [
'Distribution',
'DistributionFinder',
'PackageMetadata',
'PackageNotFoundError',
+ 'SimplePath',
'distribution',
'distributions',
'entry_points',
@@ -51,11 +61,11 @@
class PackageNotFoundError(ModuleNotFoundError):
"""The package was not found."""
- def __str__(self):
+ def __str__(self) -> str:
return f"No package metadata was found for {self.name}"
@property
- def name(self):
+ def name(self) -> str: # type: ignore[override] # make readonly
(name,) = self.args
return name
@@ -121,37 +131,17 @@ def read(text, filter_=None):
yield Pair(name, value)
@staticmethod
- def valid(line):
+ def valid(line: str):
return line and not line.startswith('#')
-class DeprecatedTuple:
- """
- Provide subscript item access for backward compatibility.
-
- >>> recwarn = getfixture('recwarn')
- >>> ep = EntryPoint(name='name', value='value', group='group')
- >>> ep[:]
- ('name', 'value', 'group')
- >>> ep[0]
- 'name'
- >>> len(recwarn)
- 1
- """
-
- _warn = functools.partial(
- warnings.warn,
- "EntryPoint tuple interface is deprecated. Access members by name.",
- DeprecationWarning,
- stacklevel=pypy_partial(2),
- )
-
- def __getitem__(self, item):
- self._warn()
- return self._key()[item]
+class _EntryPointMatch(types.SimpleNamespace):
+ module: str
+ attr: str
+ extras: str
-class EntryPoint(DeprecatedTuple):
+class EntryPoint:
"""An entry point as defined by Python packaging conventions.
See `the packaging docs on entry points
@@ -166,6 +156,30 @@ class EntryPoint(DeprecatedTuple):
'attr'
>>> ep.extras
['extra1', 'extra2']
+
+ If the value package or module are not valid identifiers, a
+ ValueError is raised on access.
+
+ >>> EntryPoint(name=None, group=None, value='invalid-name').module
+ Traceback (most recent call last):
+ ...
+ ValueError: ('Invalid object reference...invalid-name...
+ >>> EntryPoint(name=None, group=None, value='invalid-name').attr
+ Traceback (most recent call last):
+ ...
+ ValueError: ('Invalid object reference...invalid-name...
+ >>> EntryPoint(name=None, group=None, value='invalid-name').extras
+ Traceback (most recent call last):
+ ...
+ ValueError: ('Invalid object reference...invalid-name...
+
+ The same thing happens on construction.
+
+ >>> EntryPoint(name=None, group=None, value='invalid-name')
+ Traceback (most recent call last):
+ ...
+ ValueError: ('Invalid object reference...invalid-name...
+
"""
pattern = re.compile(
@@ -189,51 +203,53 @@ class EntryPoint(DeprecatedTuple):
following the attr, and following any extras.
"""
- dist: Optional['Distribution'] = None
+ name: str
+ value: str
+ group: str
+
+ dist: Distribution | None = None
- def __init__(self, name, value, group):
+ def __init__(self, name: str, value: str, group: str) -> None:
vars(self).update(name=name, value=value, group=group)
+ self.module
- def load(self):
+ def load(self) -> Any:
"""Load the entry point from its definition. If only a module
is indicated by the value, return that module. Otherwise,
return the named object.
"""
- match = self.pattern.match(self.value)
- module = import_module(match.group('module'))
- attrs = filter(None, (match.group('attr') or '').split('.'))
+ module = import_module(self.module)
+ attrs = filter(None, (self.attr or '').split('.'))
return functools.reduce(getattr, attrs, module)
@property
- def module(self):
- match = self.pattern.match(self.value)
- return match.group('module')
+ def module(self) -> str:
+ return self._match.module
@property
- def attr(self):
- match = self.pattern.match(self.value)
- return match.group('attr')
+ def attr(self) -> str:
+ return self._match.attr
@property
- def extras(self):
+ def extras(self) -> list[str]:
+ return re.findall(r'\w+', self._match.extras or '')
+
+ @functools.cached_property
+ def _match(self) -> _EntryPointMatch:
match = self.pattern.match(self.value)
- return re.findall(r'\w+', match.group('extras') or '')
+ if not match:
+ raise ValueError(
+ 'Invalid object reference. '
+ 'See https://packaging.python.org'
+ '/en/latest/specifications/entry-points/#data-model',
+ self.value,
+ )
+ return _EntryPointMatch(**match.groupdict())
def _for(self, dist):
vars(self).update(dist=dist)
return self
- def __iter__(self):
- """
- Supply iter so one may construct dicts of EntryPoints by name.
- """
- msg = (
- "Construction of dict of EntryPoints is deprecated in "
- "favor of EntryPoints."
- )
- warnings.warn(msg, DeprecationWarning)
- return iter((self.name, self))
-
def matches(self, **params):
"""
EntryPoint matches the given parameters.
@@ -254,9 +270,26 @@ def matches(self, **params):
>>> ep.matches(attr='bong')
True
"""
+ self._disallow_dist(params)
attrs = (getattr(self, param) for param in params)
return all(map(operator.eq, params.values(), attrs))
+ @staticmethod
+ def _disallow_dist(params):
+ """
+ Querying by dist is not allowed (dist objects are not comparable).
+ >>> EntryPoint(name='fan', value='fav', group='fag').matches(dist='foo')
+ Traceback (most recent call last):
+ ...
+ ValueError: "dist" is not suitable for matching...
+ """
+ if "dist" in params:
+ raise ValueError(
+ '"dist" is not suitable for matching. '
+ "Instead, use Distribution.entry_points.select() on a "
+ "located distribution."
+ )
+
def _key(self):
return self.name, self.value, self.group
@@ -275,126 +308,51 @@ def __repr__(self):
f'group={self.group!r})'
)
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(self._key())
-class DeprecatedList(list):
- """
- Allow an otherwise immutable object to implement mutability
- for compatibility.
-
- >>> recwarn = getfixture('recwarn')
- >>> dl = DeprecatedList(range(3))
- >>> dl[0] = 1
- >>> dl.append(3)
- >>> del dl[3]
- >>> dl.reverse()
- >>> dl.sort()
- >>> dl.extend([4])
- >>> dl.pop(-1)
- 4
- >>> dl.remove(1)
- >>> dl += [5]
- >>> dl + [6]
- [1, 2, 5, 6]
- >>> dl + (6,)
- [1, 2, 5, 6]
- >>> dl.insert(0, 0)
- >>> dl
- [0, 1, 2, 5]
- >>> dl == [0, 1, 2, 5]
- True
- >>> dl == (0, 1, 2, 5)
- True
- >>> len(recwarn)
- 1
- """
-
- __slots__ = ()
-
- _warn = functools.partial(
- warnings.warn,
- "EntryPoints list interface is deprecated. Cast to list if needed.",
- DeprecationWarning,
- stacklevel=pypy_partial(2),
- )
-
- def _wrap_deprecated_method(method_name: str): # type: ignore
- def wrapped(self, *args, **kwargs):
- self._warn()
- return getattr(super(), method_name)(*args, **kwargs)
-
- return method_name, wrapped
-
- locals().update(
- map(
- _wrap_deprecated_method,
- '__setitem__ __delitem__ append reverse extend pop remove '
- '__iadd__ insert sort'.split(),
- )
- )
-
- def __add__(self, other):
- if not isinstance(other, tuple):
- self._warn()
- other = tuple(other)
- return self.__class__(tuple(self) + other)
-
- def __eq__(self, other):
- if not isinstance(other, tuple):
- self._warn()
- other = tuple(other)
-
- return tuple(self).__eq__(other)
-
-
-class EntryPoints(DeprecatedList):
+class EntryPoints(tuple):
"""
An immutable collection of selectable EntryPoint objects.
"""
__slots__ = ()
- def __getitem__(self, name): # -> EntryPoint:
+ def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] # Work with str instead of int
"""
Get the EntryPoint in self matching name.
"""
- if isinstance(name, int):
- warnings.warn(
- "Accessing entry points by index is deprecated. "
- "Cast to tuple if needed.",
- DeprecationWarning,
- stacklevel=2,
- )
- return super().__getitem__(name)
try:
return next(iter(self.select(name=name)))
except StopIteration:
raise KeyError(name)
- def select(self, **params):
+ def __repr__(self):
+ """
+ Repr with classname and tuple constructor to
+ signal that we deviate from regular tuple behavior.
+ """
+ return '%s(%r)' % (self.__class__.__name__, tuple(self))
+
+ def select(self, **params) -> EntryPoints:
"""
Select entry points from self that match the
given parameters (typically group and/or name).
"""
- return EntryPoints(ep for ep in self if ep.matches(**params))
+ return EntryPoints(ep for ep in self if py39.ep_matches(ep, **params))
@property
- def names(self):
+ def names(self) -> set[str]:
"""
Return the set of all names of all entry points.
"""
return {ep.name for ep in self}
@property
- def groups(self):
+ def groups(self) -> set[str]:
"""
Return the set of all groups of all entry points.
-
- For coverage while SelectableGroups is present.
- >>> EntryPoints().groups
- set()
"""
return {ep.group for ep in self}
@@ -410,145 +368,86 @@ def _from_text(text):
)
-class Deprecated:
- """
- Compatibility add-in for mapping to indicate that
- mapping behavior is deprecated.
-
- >>> recwarn = getfixture('recwarn')
- >>> class DeprecatedDict(Deprecated, dict): pass
- >>> dd = DeprecatedDict(foo='bar')
- >>> dd.get('baz', None)
- >>> dd['foo']
- 'bar'
- >>> list(dd)
- ['foo']
- >>> list(dd.keys())
- ['foo']
- >>> 'foo' in dd
- True
- >>> list(dd.values())
- ['bar']
- >>> len(recwarn)
- 1
- """
-
- _warn = functools.partial(
- warnings.warn,
- "SelectableGroups dict interface is deprecated. Use select.",
- DeprecationWarning,
- stacklevel=pypy_partial(2),
- )
-
- def __getitem__(self, name):
- self._warn()
- return super().__getitem__(name)
-
- def get(self, name, default=None):
- self._warn()
- return super().get(name, default)
-
- def __iter__(self):
- self._warn()
- return super().__iter__()
-
- def __contains__(self, *args):
- self._warn()
- return super().__contains__(*args)
-
- def keys(self):
- self._warn()
- return super().keys()
-
- def values(self):
- self._warn()
- return super().values()
-
-
-class SelectableGroups(Deprecated, dict):
- """
- A backward- and forward-compatible result from
- entry_points that fully implements the dict interface.
- """
-
- @classmethod
- def load(cls, eps):
- by_group = operator.attrgetter('group')
- ordered = sorted(eps, key=by_group)
- grouped = itertools.groupby(ordered, by_group)
- return cls((group, EntryPoints(eps)) for group, eps in grouped)
-
- @property
- def _all(self):
- """
- Reconstruct a list of all entrypoints from the groups.
- """
- groups = super(Deprecated, self).values()
- return EntryPoints(itertools.chain.from_iterable(groups))
-
- @property
- def groups(self):
- return self._all.groups
-
- @property
- def names(self):
- """
- for coverage:
- >>> SelectableGroups().names
- set()
- """
- return self._all.names
-
- def select(self, **params):
- if not params:
- return self
- return self._all.select(**params)
-
-
class PackagePath(pathlib.PurePosixPath):
"""A reference to a path in a package"""
- def read_text(self, encoding='utf-8'):
- with self.locate().open(encoding=encoding) as stream:
- return stream.read()
+ hash: FileHash | None
+ size: int
+ dist: Distribution
- def read_binary(self):
- with self.locate().open('rb') as stream:
- return stream.read()
+ def read_text(self, encoding: str = 'utf-8') -> str:
+ return self.locate().read_text(encoding=encoding)
- def locate(self):
+ def read_binary(self) -> bytes:
+ return self.locate().read_bytes()
+
+ def locate(self) -> SimplePath:
"""Return a path-like object for this path"""
return self.dist.locate_file(self)
class FileHash:
- def __init__(self, spec):
+ def __init__(self, spec: str) -> None:
self.mode, _, self.value = spec.partition('=')
- def __repr__(self):
+ def __repr__(self) -> str:
return f''
-class Distribution:
- """A Python distribution package."""
+class Distribution(metaclass=abc.ABCMeta):
+ """
+ An abstract Python distribution package.
+
+ Custom providers may derive from this class and define
+ the abstract methods to provide a concrete implementation
+ for their environment. Some providers may opt to override
+ the default implementation of some properties to bypass
+ the file-reading mechanism.
+ """
@abc.abstractmethod
- def read_text(self, filename):
+ def read_text(self, filename) -> str | None:
"""Attempt to load metadata file given by the name.
+ Python distribution metadata is organized by blobs of text
+ typically represented as "files" in the metadata directory
+ (e.g. package-1.0.dist-info). These files include things
+ like:
+
+ - METADATA: The distribution metadata including fields
+ like Name and Version and Description.
+ - entry_points.txt: A series of entry points as defined in
+ `the entry points spec `_.
+ - RECORD: A record of files according to
+ `this recording spec `_.
+
+ A package may provide any set of files, including those
+ not listed here or none at all.
+
:param filename: The name of the file in the distribution info.
:return: The text if found, otherwise None.
"""
@abc.abstractmethod
- def locate_file(self, path):
+ def locate_file(self, path: str | os.PathLike[str]) -> SimplePath:
"""
- Given a path to a file in this distribution, return a path
+ Given a path to a file in this distribution, return a SimplePath
to it.
+
+ This method is used by callers of ``Distribution.files()`` to
+ locate files within the distribution. If it's possible for a
+ Distribution to represent files in the distribution as
+ ``SimplePath`` objects, it should implement this method
+ to resolve such objects.
+
+ Some Distribution providers may elect not to resolve SimplePath
+ objects within the distribution by raising a
+ NotImplementedError, but consumers of such a Distribution would
+ be unable to invoke ``Distribution.files()``.
"""
@classmethod
- def from_name(cls, name):
+ def from_name(cls, name: str) -> Distribution:
"""Return the Distribution for the given package name.
:param name: The name of the distribution package to search for.
@@ -556,26 +455,28 @@ def from_name(cls, name):
package, if found.
:raises PackageNotFoundError: When the named package's distribution
metadata cannot be found.
+ :raises ValueError: When an invalid value is supplied for name.
"""
- for resolver in cls._discover_resolvers():
- dists = resolver(DistributionFinder.Context(name=name))
- dist = next(iter(dists), None)
- if dist is not None:
- return dist
- else:
+ if not name:
+ raise ValueError("A distribution name is required.")
+ try:
+ return next(iter(cls._prefer_valid(cls.discover(name=name))))
+ except StopIteration:
raise PackageNotFoundError(name)
@classmethod
- def discover(cls, **kwargs):
+ def discover(
+ cls, *, context: DistributionFinder.Context | None = None, **kwargs
+ ) -> Iterable[Distribution]:
"""Return an iterable of Distribution objects for all packages.
Pass a ``context`` or pass keyword arguments for constructing
a context.
:context: A ``DistributionFinder.Context`` object.
- :return: Iterable of Distribution objects for all packages.
+ :return: Iterable of Distribution objects for packages matching
+ the context.
"""
- context = kwargs.pop('context', None)
if context and kwargs:
raise ValueError("cannot accept context and kwargs")
context = context or DistributionFinder.Context(**kwargs)
@@ -584,8 +485,18 @@ def discover(cls, **kwargs):
)
@staticmethod
- def at(path):
- """Return a Distribution for the indicated metadata path
+ def _prefer_valid(dists: Iterable[Distribution]) -> Iterable[Distribution]:
+ """
+ Prefer (move to the front) distributions that have metadata.
+
+ Ref python/importlib_resources#489.
+ """
+ buckets = bucket(dists, lambda dist: bool(dist.metadata))
+ return itertools.chain(buckets[True], buckets[False])
+
+ @staticmethod
+ def at(path: str | os.PathLike[str]) -> Distribution:
+ """Return a Distribution for the indicated metadata path.
:param path: a string or path-like object
:return: a concrete Distribution instance for the path
@@ -594,19 +505,24 @@ def at(path):
@staticmethod
def _discover_resolvers():
- """Search the meta_path for resolvers."""
+ """Search the meta_path for resolvers (MetadataPathFinders)."""
declared = (
getattr(finder, 'find_distributions', None) for finder in sys.meta_path
)
return filter(None, declared)
@property
- def metadata(self) -> _meta.PackageMetadata:
+ def metadata(self) -> _meta.PackageMetadata | None:
"""Return the parsed metadata for this Distribution.
The returned object will have keys that name the various bits of
- metadata. See PEP 566 for details.
+ metadata per the
+ `Core metadata specifications `_.
+
+ Custom providers may provide the METADATA file or override this
+ property.
"""
+
text = (
self.read_text('METADATA')
or self.read_text('PKG-INFO')
@@ -615,12 +531,20 @@ def metadata(self) -> _meta.PackageMetadata:
# (which points to the egg-info file) attribute unchanged.
or self.read_text('')
)
+ return self._assemble_message(text)
+
+ @staticmethod
+ @pass_none
+ def _assemble_message(text: str) -> _meta.PackageMetadata:
+ # deferred for performance (python/cpython#109829)
+ from . import _adapters
+
return _adapters.Message(email.message_from_string(text))
@property
- def name(self):
+ def name(self) -> str:
"""Return the 'Name' metadata for the distribution package."""
- return self.metadata['Name']
+ return md_none(self.metadata)['Name']
@property
def _normalized_name(self):
@@ -628,24 +552,34 @@ def _normalized_name(self):
return Prepared.normalize(self.name)
@property
- def version(self):
+ def version(self) -> str:
"""Return the 'Version' metadata for the distribution package."""
- return self.metadata['Version']
+ return md_none(self.metadata)['Version']
@property
- def entry_points(self):
+ def entry_points(self) -> EntryPoints:
+ """
+ Return EntryPoints for this distribution.
+
+ Custom providers may provide the ``entry_points.txt`` file
+ or override this property.
+ """
return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self)
@property
- def files(self):
+ def files(self) -> list[PackagePath] | None:
"""Files in this distribution.
:return: List of PackagePath for this distribution or None
Result is `None` if the metadata file that enumerates files
- (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
- missing.
+ (i.e. RECORD for dist-info, or installed-files.txt or
+ SOURCES.txt for egg-info) is missing.
Result may be empty if the metadata exists but is empty.
+
+ Custom providers are recommended to provide a "RECORD" file (in
+ ``read_text``) or override this property to allow for callers to be
+ able to resolve filenames provided by the package.
"""
def make_file(name, hash=None, size_str=None):
@@ -657,27 +591,76 @@ def make_file(name, hash=None, size_str=None):
@pass_none
def make_files(lines):
- return list(starmap(make_file, csv.reader(lines)))
+ # Delay csv import, since Distribution.files is not as widely used
+ # as other parts of importlib.metadata
+ import csv
+
+ return starmap(make_file, csv.reader(lines))
- return make_files(self._read_files_distinfo() or self._read_files_egginfo())
+ @pass_none
+ def skip_missing_files(package_paths):
+ return list(filter(lambda path: path.locate().exists(), package_paths))
+
+ return skip_missing_files(
+ make_files(
+ self._read_files_distinfo()
+ or self._read_files_egginfo_installed()
+ or self._read_files_egginfo_sources()
+ )
+ )
def _read_files_distinfo(self):
"""
- Read the lines of RECORD
+ Read the lines of RECORD.
"""
text = self.read_text('RECORD')
return text and text.splitlines()
- def _read_files_egginfo(self):
+ def _read_files_egginfo_installed(self):
+ """
+ Read installed-files.txt and return lines in a similar
+ CSV-parsable format as RECORD: each file must be placed
+ relative to the site-packages directory and must also be
+ quoted (since file names can contain literal commas).
+
+ This file is written when the package is installed by pip,
+ but it might not be written for other installation methods.
+ Assume the file is accurate if it exists.
+ """
+ text = self.read_text('installed-files.txt')
+ # Prepend the .egg-info/ subdir to the lines in this file.
+ # But this subdir is only available from PathDistribution's
+ # self._path.
+ subdir = getattr(self, '_path', None)
+ if not text or not subdir:
+ return
+
+ paths = (
+ py311
+ .relative_fix((subdir / name).resolve())
+ .relative_to(self.locate_file('').resolve(), walk_up=True)
+ .as_posix()
+ for name in text.splitlines()
+ )
+ return map('"{}"'.format, paths)
+
+ def _read_files_egginfo_sources(self):
"""
- SOURCES.txt might contain literal commas, so wrap each line
- in quotes.
+ Read SOURCES.txt and return lines in a similar CSV-parsable
+ format as RECORD: each file name must be quoted (since it
+ might contain literal commas).
+
+ Note that SOURCES.txt is not a reliable source for what
+ files are installed by a package. This file is generated
+ for a source archive, and the files that are present
+ there (e.g. setup.py) may not correctly reflect the files
+ that are present after the package has been installed.
"""
text = self.read_text('SOURCES.txt')
return text and map('"{}"'.format, text.splitlines())
@property
- def requires(self):
+ def requires(self) -> list[str] | None:
"""Generated requirements specified for this Distribution"""
reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
return reqs and list(reqs)
@@ -728,10 +711,26 @@ def url_req_space(req):
space = url_req_space(section.value)
yield section.value + space + quoted_marker(section.name)
+ @property
+ def origin(self):
+ return self._load_json('direct_url.json')
+
+ def _load_json(self, filename):
+ # Deferred for performance (python/importlib_metadata#503)
+ import json
+
+ return pass_none(json.loads)(
+ self.read_text(filename),
+ object_hook=lambda data: types.SimpleNamespace(**data),
+ )
+
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
+
+ Custom providers should implement this interface in order to
+ supply metadata.
"""
class Context:
@@ -744,6 +743,17 @@ class Context:
Each DistributionFinder may expect any parameters
and should attempt to honor the canonical
parameters defined below when appropriate.
+
+ This mechanism gives a custom provider a means to
+ solicit additional details from the caller beyond
+ "name" and "path" when searching distributions.
+ For example, imagine a provider that exposes suites
+ of packages in either a "public" or "private" ``realm``.
+ A caller may wish to query only for distributions in
+ a particular realm and could call
+ ``distributions(realm="private")`` to signal to the
+ custom provider to only include distributions from that
+ realm.
"""
name = None
@@ -756,7 +766,7 @@ def __init__(self, **kwargs):
vars(self).update(kwargs)
@property
- def path(self):
+ def path(self) -> list[str]:
"""
The sequence of directory path that a distribution finder
should search.
@@ -767,7 +777,7 @@ def path(self):
return vars(self).get('path', sys.path)
@abc.abstractmethod
- def find_distributions(self, context=Context()):
+ def find_distributions(self, context=Context()) -> Iterable[Distribution]:
"""
Find distributions.
@@ -777,16 +787,38 @@ def find_distributions(self, context=Context()):
"""
+@passthrough
+def _clear_after_fork(cached):
+ """Ensure ``func`` clears cached state after ``fork`` when supported.
+
+ ``FastPath`` caches zip-backed ``pathlib.Path`` objects that retain a
+ reference to the parent's open ``ZipFile`` handle. Re-using a cached
+ instance in a forked child can therefore resurrect invalid file pointers
+ and trigger ``BadZipFile``/``OSError`` failures (python/importlib_metadata#520).
+ Registering ``cache_clear`` with ``os.register_at_fork`` keeps each process
+ on its own cache.
+ """
+ getattr(os, 'register_at_fork', noop)(after_in_child=cached.cache_clear)
+
+
class FastPath:
"""
- Micro-optimized class for searching a path for
- children.
+ Micro-optimized class for searching a root for children.
+
+ Root is a path on the file system that may contain metadata
+ directories either as natural directories or within a zip file.
>>> FastPath('').children()
['...']
+
+ FastPath objects are cached and recycled for any given root.
+
+ >>> FastPath('foobar') is FastPath('foobar')
+ True
"""
- @functools.lru_cache() # type: ignore
+ @_clear_after_fork # type: ignore[misc]
+ @functools.lru_cache()
def __new__(cls, root):
return super().__new__(cls)
@@ -804,7 +836,10 @@ def children(self):
return []
def zip_children(self):
- zip_path = zipp.Path(self.root)
+ # deferred for performance (python/importlib_metadata#502)
+ from zipp.compat.overlay import zipfile
+
+ zip_path = zipfile.Path(self.root)
names = zip_path.root.namelist()
self.joinpath = zip_path.joinpath
@@ -825,7 +860,19 @@ def lookup(self, mtime):
class Lookup:
+ """
+ A micro-optimized class for searching a (fast) path for metadata.
+ """
+
def __init__(self, path: FastPath):
+ """
+ Calculate all of the children representing metadata.
+
+ From the children in the path, calculate early all of the
+ children that appear to represent metadata (infos) or legacy
+ metadata (eggs).
+ """
+
base = os.path.basename(path.root).lower()
base_is_egg = base.endswith(".egg")
self.infos = FreezableDefaultDict(list)
@@ -846,7 +893,10 @@ def __init__(self, path: FastPath):
self.infos.freeze()
self.eggs.freeze()
- def search(self, prepared):
+ def search(self, prepared: Prepared):
+ """
+ Yield all infos and eggs matching the Prepared query.
+ """
infos = (
self.infos[prepared.normalized]
if prepared
@@ -862,13 +912,28 @@ def search(self, prepared):
class Prepared:
"""
- A prepared search for metadata on a possibly-named package.
+ A prepared search query for metadata on a possibly-named package.
+
+ Pre-calculates the normalization to prevent repeated operations.
+
+ >>> none = Prepared(None)
+ >>> none.normalized
+ >>> none.legacy_normalized
+ >>> bool(none)
+ False
+ >>> sample = Prepared('Sample__Pkg-name.foo')
+ >>> sample.normalized
+ 'sample_pkg_name_foo'
+ >>> sample.legacy_normalized
+ 'sample__pkg_name.foo'
+ >>> bool(sample)
+ True
"""
normalized = None
legacy_normalized = None
- def __init__(self, name):
+ def __init__(self, name: str | None):
self.name = name
if name is None:
return
@@ -902,7 +967,10 @@ class MetadataPathFinder(NullFinder, DistributionFinder):
of Python that do not have a PathFinder find_distributions().
"""
- def find_distributions(self, context=DistributionFinder.Context()):
+ @classmethod
+ def find_distributions(
+ cls, context=DistributionFinder.Context()
+ ) -> Iterable[PathDistribution]:
"""
Find distributions.
@@ -911,7 +979,7 @@ def find_distributions(self, context=DistributionFinder.Context()):
(or all names if ``None`` indicated) along the paths in the list
of directories ``context.path``.
"""
- found = self._search_paths(context.name, context.path)
+ found = cls._search_paths(context.name, context.path)
return map(PathDistribution, found)
@classmethod
@@ -922,19 +990,20 @@ def _search_paths(cls, name, paths):
path.search(prepared) for path in map(FastPath, paths)
)
- def invalidate_caches(cls):
+ @classmethod
+ def invalidate_caches(cls) -> None:
FastPath.__new__.cache_clear()
class PathDistribution(Distribution):
- def __init__(self, path: SimplePath):
+ def __init__(self, path: SimplePath) -> None:
"""Construct a distribution.
:param path: SimplePath indicating the metadata directory.
"""
self._path = path
- def read_text(self, filename):
+ def read_text(self, filename: str | os.PathLike[str]) -> str | None:
with suppress(
FileNotFoundError,
IsADirectoryError,
@@ -944,9 +1013,11 @@ def read_text(self, filename):
):
return self._path.joinpath(filename).read_text(encoding='utf-8')
+ return None
+
read_text.__doc__ = Distribution.read_text.__doc__
- def locate_file(self, path):
+ def locate_file(self, path: str | os.PathLike[str]) -> SimplePath:
return self._path.parent / path
@property
@@ -956,17 +1027,30 @@ def _normalized_name(self):
normalized name from the file system path.
"""
stem = os.path.basename(str(self._path))
- return self._name_from_stem(stem) or super()._normalized_name
+ return (
+ pass_none(Prepared.normalize)(self._name_from_stem(stem))
+ or super()._normalized_name
+ )
- def _name_from_stem(self, stem):
- name, ext = os.path.splitext(stem)
+ @staticmethod
+ def _name_from_stem(stem):
+ """
+ >>> PathDistribution._name_from_stem('foo-3.0.egg-info')
+ 'foo'
+ >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info')
+ 'CherryPy'
+ >>> PathDistribution._name_from_stem('face.egg-info')
+ 'face'
+ >>> PathDistribution._name_from_stem('foo.bar')
+ """
+ filename, ext = os.path.splitext(stem)
if ext not in ('.dist-info', '.egg-info'):
return
- name, sep, rest = stem.partition('-')
+ name, sep, rest = filename.partition('-')
return name
-def distribution(distribution_name):
+def distribution(distribution_name: str) -> Distribution:
"""Get the ``Distribution`` instance for the named package.
:param distribution_name: The name of the distribution package as a string.
@@ -975,7 +1059,7 @@ def distribution(distribution_name):
return Distribution.from_name(distribution_name)
-def distributions(**kwargs):
+def distributions(**kwargs) -> Iterable[Distribution]:
"""Get all ``Distribution`` instances in the current environment.
:return: An iterable of ``Distribution`` instances.
@@ -983,7 +1067,7 @@ def distributions(**kwargs):
return Distribution.discover(**kwargs)
-def metadata(distribution_name) -> _meta.PackageMetadata:
+def metadata(distribution_name: str) -> _meta.PackageMetadata | None:
"""Get the metadata for the named package.
:param distribution_name: The name of the distribution package to query.
@@ -992,7 +1076,7 @@ def metadata(distribution_name) -> _meta.PackageMetadata:
return Distribution.from_name(distribution_name).metadata
-def version(distribution_name):
+def version(distribution_name: str) -> str:
"""Get the version string for the named package.
:param distribution_name: The name of the distribution package to query.
@@ -1002,32 +1086,31 @@ def version(distribution_name):
return distribution(distribution_name).version
-def entry_points(**params) -> Union[EntryPoints, SelectableGroups]:
+_unique = functools.partial(
+ unique_everseen,
+ key=py39.normalized_name,
+)
+"""
+Wrapper for ``distributions`` to return unique distributions by name.
+"""
+
+
+def entry_points(**params) -> EntryPoints:
"""Return EntryPoint objects for all installed packages.
Pass selection parameters (group or name) to filter the
result to entry points matching those properties (see
EntryPoints.select()).
- For compatibility, returns ``SelectableGroups`` object unless
- selection parameters are supplied. In the future, this function
- will return ``EntryPoints`` instead of ``SelectableGroups``
- even when no selection parameters are supplied.
-
- For maximum future compatibility, pass selection parameters
- or invoke ``.select`` with parameters on the result.
-
- :return: EntryPoints or SelectableGroups for all installed packages.
+ :return: EntryPoints for all installed packages.
"""
- norm_name = operator.attrgetter('_normalized_name')
- unique = functools.partial(unique_everseen, key=norm_name)
eps = itertools.chain.from_iterable(
- dist.entry_points for dist in unique(distributions())
+ dist.entry_points for dist in _unique(distributions())
)
- return SelectableGroups.load(eps).select(**params)
+ return EntryPoints(eps).select(**params)
-def files(distribution_name):
+def files(distribution_name: str) -> list[PackagePath] | None:
"""Return a list of files for the named package.
:param distribution_name: The name of the distribution package to query.
@@ -1036,17 +1119,17 @@ def files(distribution_name):
return distribution(distribution_name).files
-def requires(distribution_name):
+def requires(distribution_name: str) -> list[str] | None:
"""
Return a list of requirements for the named package.
- :return: An iterator of requirements, suitable for
+ :return: An iterable of requirements, suitable for
packaging.requirement.Requirement.
"""
return distribution(distribution_name).requires
-def packages_distributions() -> Mapping[str, List[str]]:
+def packages_distributions() -> Mapping[str, list[str]]:
"""
Return a mapping of top-level packages to their
distributions.
@@ -1059,7 +1142,7 @@ def packages_distributions() -> Mapping[str, List[str]]:
pkg_to_dist = collections.defaultdict(list)
for dist in distributions():
for pkg in _top_level_declared(dist) or _top_level_inferred(dist):
- pkg_to_dist[pkg].append(dist.metadata['Name'])
+ pkg_to_dist[pkg].append(md_none(dist.metadata)['Name'])
return dict(pkg_to_dist)
@@ -1067,9 +1150,42 @@ def _top_level_declared(dist):
return (dist.read_text('top_level.txt') or '').split()
+def _topmost(name: PackagePath) -> str | None:
+ """
+ Return the top-most parent as long as there is a parent.
+ """
+ top, *rest = name.parts
+ return top if rest else None
+
+
+def _get_toplevel_name(name: PackagePath) -> str:
+ """
+ Infer a possibly importable module name from a name presumed on
+ sys.path.
+
+ >>> _get_toplevel_name(PackagePath('foo.py'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo.pyc'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo/__init__.py'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo.pth'))
+ 'foo.pth'
+ >>> _get_toplevel_name(PackagePath('foo.dist-info'))
+ 'foo.dist-info'
+ """
+ # Defer import of inspect for performance (python/cpython#118761)
+ import inspect
+
+ return _topmost(name) or inspect.getmodulename(name) or str(name)
+
+
def _top_level_inferred(dist):
- return {
- f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name
- for f in always_iterable(dist.files)
- if f.suffix == ".py"
- }
+ opt_names = set(map(_get_toplevel_name, always_iterable(dist.files)))
+
+ def importable_name(name):
+ return '.' not in name
+
+ return filter(importable_name, opt_names)
diff --git a/importlib_metadata/_adapters.py b/importlib_metadata/_adapters.py
index aa460d3e..dede395d 100644
--- a/importlib_metadata/_adapters.py
+++ b/importlib_metadata/_adapters.py
@@ -1,11 +1,59 @@
+import email.message
+import email.policy
import re
import textwrap
-import email.message
from ._text import FoldedCase
+class RawPolicy(email.policy.EmailPolicy):
+ def fold(self, name, value):
+ folded = self.linesep.join(
+ textwrap
+ .indent(value, prefix=' ' * 8, predicate=lambda line: True)
+ .lstrip()
+ .splitlines()
+ )
+ return f'{name}: {folded}{self.linesep}'
+
+
class Message(email.message.Message):
+ r"""
+ Specialized Message subclass to handle metadata naturally.
+
+ Reads values that may have newlines in them and converts the
+ payload to the Description.
+
+ >>> msg_text = textwrap.dedent('''
+ ... Name: Foo
+ ... Version: 3.0
+ ... License: blah
+ ... de-blah
+ ...
+ ... First line of description.
+ ... Second line of description.
+ ...
+ ... Fourth line!
+ ... ''').lstrip().replace('', '')
+ >>> msg = Message(email.message_from_string(msg_text))
+ >>> msg['Description']
+ 'First line of description.\nSecond line of description.\n\nFourth line!\n'
+
+ Message should render even if values contain newlines.
+
+ >>> print(msg)
+ Name: Foo
+ Version: 3.0
+ License: blah
+ de-blah
+ Description: First line of description.
+ Second line of description.
+
+ Fourth line!
+
+
+ """
+
multiple_use_keys = set(
map(
FoldedCase,
@@ -39,18 +87,38 @@ def __init__(self, *args, **kwargs):
def __iter__(self):
return super().__iter__()
+ def __getitem__(self, item):
+ """
+ Override parent behavior to typical dict behavior.
+
+ ``email.message.Message`` will emit None values for missing
+ keys. Typical mappings, including this ``Message``, will raise
+ a key error for missing keys.
+
+ Ref python/importlib_metadata#371.
+ """
+ res = super().__getitem__(item)
+ if res is None:
+ raise KeyError(item)
+ return res
+
def _repair_headers(self):
def redent(value):
"Correct for RFC822 indentation"
- if not value or '\n' not in value:
+ indent = ' ' * 8
+ if not value or '\n' + indent not in value:
return value
- return textwrap.dedent(' ' * 8 + value)
+ return textwrap.dedent(indent + value)
headers = [(key, redent(value)) for key, value in vars(self)['_headers']]
if self._payload:
headers.append(('Description', self.get_payload()))
+ self.set_payload('')
return headers
+ def as_string(self):
+ return super().as_string(policy=RawPolicy())
+
@property
def json(self):
"""
diff --git a/importlib_metadata/_collections.py b/importlib_metadata/_collections.py
index cf0954e1..fc5045d3 100644
--- a/importlib_metadata/_collections.py
+++ b/importlib_metadata/_collections.py
@@ -1,4 +1,5 @@
import collections
+import typing
# from jaraco.collections 3.3
@@ -24,7 +25,10 @@ def freeze(self):
self._frozen = lambda key: self.default_factory()
-class Pair(collections.namedtuple('Pair', 'name value')):
+class Pair(typing.NamedTuple):
+ name: str
+ value: str
+
@classmethod
def parse(cls, text):
return cls(*map(str.strip, text.split("=", 1)))
diff --git a/importlib_metadata/_compat.py b/importlib_metadata/_compat.py
index 8fe4e4e3..01356d69 100644
--- a/importlib_metadata/_compat.py
+++ b/importlib_metadata/_compat.py
@@ -1,14 +1,7 @@
-import sys
import platform
+import sys
-
-__all__ = ['install', 'NullFinder', 'Protocol']
-
-
-try:
- from typing import Protocol
-except ImportError: # pragma: no cover
- from typing_extensions import Protocol # type: ignore
+__all__ = ['install', 'NullFinder']
def install(cls):
@@ -44,7 +37,7 @@ def matches(finder):
class NullFinder:
"""
- A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ A "Finder" (aka "MetaPathFinder") that never finds any modules,
but may find distributions.
"""
@@ -52,14 +45,6 @@ class NullFinder:
def find_spec(*args, **kwargs):
return None
- # In Python 2, the import system requires finders
- # to have a find_module() method, but this usage
- # is deprecated in Python 3 in favor of find_spec().
- # For the purposes of this finder (i.e. being present
- # on sys.meta_path but having no other import
- # system functionality), the two methods are identical.
- find_module = find_spec
-
def pypy_partial(val):
"""
diff --git a/importlib_metadata/_functools.py b/importlib_metadata/_functools.py
index 71f66bd0..b1fd04a8 100644
--- a/importlib_metadata/_functools.py
+++ b/importlib_metadata/_functools.py
@@ -1,5 +1,6 @@
-import types
import functools
+import types
+from typing import Callable, TypeVar
# from jaraco.functools 3.3
@@ -102,3 +103,33 @@ def wrapper(param, *args, **kwargs):
return func(param, *args, **kwargs)
return wrapper
+
+
+# From jaraco.functools 4.4
+def noop(*args, **kwargs):
+ """
+ A no-operation function that does nothing.
+
+ >>> noop(1, 2, three=3)
+ """
+
+
+_T = TypeVar('_T')
+
+
+# From jaraco.functools 4.4
+def passthrough(func: Callable[..., object]) -> Callable[[_T], _T]:
+ """
+ Wrap the function to always return the first parameter.
+
+ >>> passthrough(print)('3')
+ 3
+ '3'
+ """
+
+ @functools.wraps(func)
+ def wrapper(first: _T, *args, **kwargs) -> _T:
+ func(first, *args, **kwargs)
+ return first
+
+ return wrapper # type: ignore[return-value]
diff --git a/importlib_metadata/_itertools.py b/importlib_metadata/_itertools.py
index d4ca9b91..79d37198 100644
--- a/importlib_metadata/_itertools.py
+++ b/importlib_metadata/_itertools.py
@@ -1,3 +1,4 @@
+from collections import defaultdict, deque
from itertools import filterfalse
@@ -71,3 +72,100 @@ def always_iterable(obj, base_type=(str, bytes)):
return iter(obj)
except TypeError:
return iter((obj,))
+
+
+# Copied from more_itertools 10.3
+class bucket:
+ """Wrap *iterable* and return an object that buckets the iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character
+ >>> sorted(list(s)) # Get the keys
+ ['a', 'b', 'c']
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __iter__(self):
+ for item in self._it:
+ item_value = self._key(item)
+ if self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ yield from self._cache.keys()
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
diff --git a/importlib_metadata/_meta.py b/importlib_metadata/_meta.py
index 37ee43e6..0c20eff3 100644
--- a/importlib_metadata/_meta.py
+++ b/importlib_metadata/_meta.py
@@ -1,30 +1,48 @@
-from ._compat import Protocol
-from typing import Any, Dict, Iterator, List, TypeVar, Union
+from __future__ import annotations
+import os
+from collections.abc import Iterator
+from typing import (
+ Any,
+ Protocol,
+ TypeVar,
+ overload,
+)
_T = TypeVar("_T")
class PackageMetadata(Protocol):
- def __len__(self) -> int:
- ... # pragma: no cover
+ def __len__(self) -> int: ... # pragma: no cover
- def __contains__(self, item: str) -> bool:
- ... # pragma: no cover
+ def __contains__(self, item: str) -> bool: ... # pragma: no cover
- def __getitem__(self, key: str) -> str:
- ... # pragma: no cover
+ def __getitem__(self, key: str) -> str: ... # pragma: no cover
- def __iter__(self) -> Iterator[str]:
- ... # pragma: no cover
+ def __iter__(self) -> Iterator[str]: ... # pragma: no cover
- def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
+ @overload
+ def get(
+ self, name: str, failobj: None = None
+ ) -> str | None: ... # pragma: no cover
+
+ @overload
+ def get(self, name: str, failobj: _T) -> str | _T: ... # pragma: no cover
+
+ # overload per python/importlib_metadata#435
+ @overload
+ def get_all(
+ self, name: str, failobj: None = None
+ ) -> list[Any] | None: ... # pragma: no cover
+
+ @overload
+ def get_all(self, name: str, failobj: _T) -> list[Any] | _T:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
- def json(self) -> Dict[str, Union[str, List[str]]]:
+ def json(self) -> dict[str, str | list[str]]:
"""
A JSON-compatible form of the metadata.
"""
@@ -32,17 +50,22 @@ def json(self) -> Dict[str, Union[str, List[str]]]:
class SimplePath(Protocol):
"""
- A minimal subset of pathlib.Path required by PathDistribution.
+ A minimal subset of pathlib.Path required by Distribution.
"""
- def joinpath(self) -> 'SimplePath':
- ... # pragma: no cover
+ def joinpath(
+ self, other: str | os.PathLike[str]
+ ) -> SimplePath: ... # pragma: no cover
+
+ def __truediv__(
+ self, other: str | os.PathLike[str]
+ ) -> SimplePath: ... # pragma: no cover
+
+ @property
+ def parent(self) -> SimplePath: ... # pragma: no cover
- def __truediv__(self) -> 'SimplePath':
- ... # pragma: no cover
+ def read_text(self, encoding=None) -> str: ... # pragma: no cover
- def parent(self) -> 'SimplePath':
- ... # pragma: no cover
+ def read_bytes(self) -> bytes: ... # pragma: no cover
- def read_text(self) -> str:
- ... # pragma: no cover
+ def exists(self) -> bool: ... # pragma: no cover
diff --git a/importlib_metadata/_typing.py b/importlib_metadata/_typing.py
new file mode 100644
index 00000000..32b1d2b9
--- /dev/null
+++ b/importlib_metadata/_typing.py
@@ -0,0 +1,15 @@
+import functools
+import typing
+
+from ._meta import PackageMetadata
+
+md_none = functools.partial(typing.cast, PackageMetadata)
+"""
+Suppress type errors for optional metadata.
+
+Although Distribution.metadata can return None when metadata is corrupt
+and thus None, allow callers to assume it's not None and crash if
+that's the case.
+
+# python/importlib_metadata#493
+"""
diff --git a/importlib_metadata/compat/__init__.py b/importlib_metadata/compat/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/importlib_metadata/compat/py311.py b/importlib_metadata/compat/py311.py
new file mode 100644
index 00000000..3a532743
--- /dev/null
+++ b/importlib_metadata/compat/py311.py
@@ -0,0 +1,22 @@
+import os
+import pathlib
+import sys
+import types
+
+
+def wrap(path): # pragma: no cover
+ """
+ Workaround for https://github.com/python/cpython/issues/84538
+ to add backward compatibility for walk_up=True.
+ An example affected package is dask-labextension, which uses
+ jupyter-packaging to install JupyterLab javascript files outside
+ of site-packages.
+ """
+
+ def relative_to(root, *, walk_up=False):
+ return pathlib.Path(os.path.relpath(path, root))
+
+ return types.SimpleNamespace(relative_to=relative_to)
+
+
+relative_fix = wrap if sys.version_info < (3, 12) else lambda x: x
diff --git a/importlib_metadata/compat/py39.py b/importlib_metadata/compat/py39.py
new file mode 100644
index 00000000..3eb9c01e
--- /dev/null
+++ b/importlib_metadata/compat/py39.py
@@ -0,0 +1,42 @@
+"""
+Compatibility layer with Python 3.8/3.9
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING: # pragma: no cover
+ # Prevent circular imports on runtime.
+ from .. import Distribution, EntryPoint
+else:
+ Distribution = EntryPoint = Any
+
+from .._typing import md_none
+
+
+def normalized_name(dist: Distribution) -> str | None:
+ """
+ Honor name normalization for distributions that don't provide ``_normalized_name``.
+ """
+ try:
+ return dist._normalized_name
+ except AttributeError:
+ from .. import Prepared # -> delay to prevent circular imports.
+
+ return Prepared.normalize(
+ getattr(dist, "name", None) or md_none(dist.metadata)['Name']
+ )
+
+
+def ep_matches(ep: EntryPoint, **params) -> bool:
+ """
+ Workaround for ``EntryPoint`` objects without the ``matches`` method.
+ """
+ try:
+ return ep.matches(**params)
+ except AttributeError:
+ from .. import EntryPoint # -> delay to prevent circular imports.
+
+ # Reconstruct the EntryPoint object to make sure it is compatible.
+ return EntryPoint(ep.name, ep.value, ep.group).matches(**params)
diff --git a/importlib_metadata/diagnose.py b/importlib_metadata/diagnose.py
new file mode 100644
index 00000000..e405471a
--- /dev/null
+++ b/importlib_metadata/diagnose.py
@@ -0,0 +1,21 @@
+import sys
+
+from . import Distribution
+
+
+def inspect(path):
+ print("Inspecting", path)
+ dists = list(Distribution.discover(path=[path]))
+ if not dists:
+ return
+ print("Found", len(dists), "packages:", end=' ')
+ print(', '.join(dist.name for dist in dists))
+
+
+def run():
+ for path in sys.path:
+ inspect(path)
+
+
+if __name__ == '__main__':
+ run()
diff --git a/mypy.ini b/mypy.ini
index 976ba029..feac94cc 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,2 +1,27 @@
[mypy]
+# Is the project well-typed?
+strict = False
+
+# Early opt-in even when strict = False
+warn_unused_ignores = True
+warn_redundant_casts = True
+enable_error_code = ignore-without-code
+
+# Support namespace packages per https://github.com/python/mypy/issues/14057
+explicit_package_bases = True
+
+disable_error_code =
+ # Disable due to many false positives
+ overload-overlap,
+
+# jaraco/pytest-perf#16
+[mypy-pytest_perf.*]
+ignore_missing_imports = True
+
+# jaraco/zipp#123
+[mypy-zipp.*]
+ignore_missing_imports = True
+
+# jaraco/jaraco.test#7
+[mypy-jaraco.test.*]
ignore_missing_imports = True
diff --git a/pyproject.toml b/pyproject.toml
index 190b3551..b71b9a9b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,20 +1,84 @@
[build-system]
-requires = ["setuptools>=56", "setuptools_scm[toml]>=3.4.1"]
+requires = [
+ "setuptools>=77",
+ "setuptools_scm[toml]>=3.4.1",
+ # jaraco/skeleton#174
+ "coherent.licensed",
+]
build-backend = "setuptools.build_meta"
-[tool.black]
-skip-string-normalization = true
+[project]
+name = "importlib_metadata"
+authors = [
+ { name = "Jason R. Coombs", email = "jaraco@jaraco.com" },
+]
+description = "Read metadata from Python packages"
+readme = "README.rst"
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3 :: Only",
+]
+requires-python = ">=3.9"
+license = "Apache-2.0"
+dependencies = [
+ "zipp>=3.20",
+]
+dynamic = ["version"]
-[tool.setuptools_scm]
+[project.urls]
+Source = "https://github.com/python/importlib_metadata"
+
+[project.optional-dependencies]
+test = [
+ # upstream
+ "pytest >= 6, != 8.1.*",
+
+ # local
+ "packaging",
+ "pyfakefs",
+ "pytest-perf >= 0.9.2",
+ "jaraco.test >= 5.4",
+]
+
+doc = [
+ # upstream
+ "sphinx >= 3.5",
+ "jaraco.packaging >= 9.3",
+ "rst.linker >= 1.9",
+ "furo",
+ "sphinx-lint",
+
+ # tidelift
+ "jaraco.tidelift >= 1.4",
+
+ # local
+]
+perf = ["ipython"]
-[pytest.enabler.black]
-addopts = "--black"
+check = [
+ "pytest-checkdocs >= 2.4",
+ "pytest-ruff >= 0.2.1; sys_platform != 'cygwin'",
+]
-[pytest.enabler.mypy]
-addopts = "--mypy"
+cover = [
+ "pytest-cov",
+]
-[pytest.enabler.flake8]
-addopts = "--flake8"
+enabler = [
+ "pytest-enabler >= 3.4",
+]
-[pytest.enabler.cov]
-addopts = "--cov"
+type = [
+ # upstream
+ "pytest-mypy >= 1.0.1",
+
+ ## workaround for python/mypy#20454
+ "mypy < 1.19; python_implementation == 'PyPy'",
+
+ # local
+]
+
+
+[tool.setuptools_scm]
diff --git a/pytest.ini b/pytest.ini
index 80e98cc9..9a0f3bce 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -1,17 +1,25 @@
[pytest]
norecursedirs=dist build .tox .eggs
-addopts=--doctest-modules
-doctest_optionflags=ALLOW_UNICODE ELLIPSIS
+addopts=
+ --doctest-modules
+ --import-mode importlib
+consider_namespace_packages=true
filterwarnings=
- # Suppress deprecation warning in flake8
- ignore:SelectableGroups dict interface is deprecated::flake8
+ ## upstream
- # shopkeep/pytest-black#55
- ignore: is not using a cooperative constructor:pytest.PytestDeprecationWarning
- ignore:The \(fspath. py.path.local\) argument to BlackItem is deprecated.:pytest.PytestDeprecationWarning
- ignore:BlackItem is an Item subclass and should not be a collector:pytest.PytestWarning
+ # Ensure ResourceWarnings are emitted
+ default::ResourceWarning
- # tholo/pytest-flake8#83
- ignore: is not using a cooperative constructor:pytest.PytestDeprecationWarning
- ignore:The \(fspath. py.path.local\) argument to Flake8Item is deprecated.:pytest.PytestDeprecationWarning
- ignore:Flake8Item is an Item subclass and should not be a collector:pytest.PytestWarning
+ # realpython/pytest-mypy#152
+ ignore:'encoding' argument not specified::pytest_mypy
+
+ # python/cpython#100750
+ ignore:'encoding' argument not specified::platform
+
+ # pypa/build#615
+ ignore:'encoding' argument not specified::build.env
+
+ # dateutil/dateutil#1284
+ ignore:datetime.datetime.utcfromtimestamp:DeprecationWarning:dateutil.tz.tz
+
+ ## end upstream
diff --git a/ruff.toml b/ruff.toml
new file mode 100644
index 00000000..63c0825f
--- /dev/null
+++ b/ruff.toml
@@ -0,0 +1,51 @@
+[lint]
+extend-select = [
+ # upstream
+
+ "C901", # complex-structure
+ "I", # isort
+ "PERF401", # manual-list-comprehension
+
+ # Ensure modern type annotation syntax and best practices
+ # Not including those covered by type-checkers or exclusive to Python 3.11+
+ "FA", # flake8-future-annotations
+ "F404", # late-future-import
+ "PYI", # flake8-pyi
+ "UP006", # non-pep585-annotation
+ "UP007", # non-pep604-annotation
+ "UP010", # unnecessary-future-import
+ "UP035", # deprecated-import
+ "UP037", # quoted-annotation
+ "UP043", # unnecessary-default-type-args
+
+ # local
+]
+ignore = [
+ # upstream
+
+ # Typeshed rejects complex or non-literal defaults for maintenance and testing reasons,
+ # irrelevant to this project.
+ "PYI011", # typed-argument-default-in-stub
+ # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
+ "W191",
+ "E111",
+ "E114",
+ "E117",
+ "D206",
+ "D300",
+ "Q000",
+ "Q001",
+ "Q002",
+ "Q003",
+ "COM812",
+ "COM819",
+
+ # local
+]
+
+[format]
+# Enable preview to get hugged parenthesis unwrapping and other nice surprises
+# See https://github.com/jaraco/skeleton/pull/133#issuecomment-2239538373
+preview = true
+# https://docs.astral.sh/ruff/settings/#format_quote-style
+quote-style = "preserve"
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 2dbed020..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,64 +0,0 @@
-[metadata]
-name = importlib_metadata
-author = Jason R. Coombs
-author_email = jaraco@jaraco.com
-description = Read metadata from Python packages
-long_description = file:README.rst
-url = https://github.com/python/importlib_metadata
-classifiers =
- Development Status :: 5 - Production/Stable
- Intended Audience :: Developers
- License :: OSI Approved :: Apache Software License
- Programming Language :: Python :: 3
- Programming Language :: Python :: 3 :: Only
-
-[options]
-packages = find_namespace:
-include_package_data = true
-python_requires = >=3.7
-install_requires =
- zipp>=0.5
- typing-extensions>=3.6.4; python_version < "3.8"
-
-[options.packages.find]
-exclude =
- build*
- dist*
- docs*
- tests*
- prepare*
-
-[options.extras_require]
-testing =
- # upstream
- pytest >= 6
- pytest-checkdocs >= 2.4
- pytest-flake8
- pytest-black >= 0.3.7; \
- # workaround for jaraco/skeleton#22
- python_implementation != "PyPy"
- pytest-cov
- pytest-mypy >= 0.9.1; \
- # workaround for jaraco/skeleton#22
- python_implementation != "PyPy"
- pytest-enabler >= 1.0.1
-
- # local
- importlib_resources>=1.3; python_version < "3.9"
- packaging
- pyfakefs
- flufl.flake8
- pytest-perf >= 0.9.2
-
-docs =
- # upstream
- sphinx
- jaraco.packaging >= 9
- rst.linker >= 1.9
-
- # local
-
-perf =
- ipython
-
-[options.entry_points]
diff --git a/tests/_context.py b/tests/_context.py
new file mode 100644
index 00000000..8a53eb55
--- /dev/null
+++ b/tests/_context.py
@@ -0,0 +1,13 @@
+import contextlib
+
+
+# from jaraco.context 4.3
+class suppress(contextlib.suppress, contextlib.ContextDecorator):
+ """
+ A version of contextlib.suppress with decorator support.
+
+ >>> @suppress(KeyError)
+ ... def key_error():
+ ... {}['']
+ >>> key_error()
+ """
diff --git a/tests/_path.py b/tests/_path.py
new file mode 100644
index 00000000..e63d889f
--- /dev/null
+++ b/tests/_path.py
@@ -0,0 +1,117 @@
+# from jaraco.path 3.7.2
+
+from __future__ import annotations
+
+import functools
+import pathlib
+from collections.abc import Mapping
+from typing import TYPE_CHECKING, Protocol, Union, runtime_checkable
+
+if TYPE_CHECKING:
+ from typing_extensions import Self
+
+
+class Symlink(str):
+ """
+ A string indicating the target of a symlink.
+ """
+
+
+FilesSpec = Mapping[str, Union[str, bytes, Symlink, 'FilesSpec']]
+
+
+@runtime_checkable
+class TreeMaker(Protocol):
+ def __truediv__(self, other, /) -> Self: ...
+ def mkdir(self, *, exist_ok) -> object: ...
+ def write_text(self, content, /, *, encoding) -> object: ...
+ def write_bytes(self, content, /) -> object: ...
+ def symlink_to(self, target, /) -> object: ...
+
+
+def _ensure_tree_maker(obj: str | TreeMaker) -> TreeMaker:
+ return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj)
+
+
+def build(
+ spec: FilesSpec,
+ prefix: str | TreeMaker = pathlib.Path(),
+):
+ """
+ Build a set of files/directories, as described by the spec.
+
+ Each key represents a pathname, and the value represents
+ the content. Content may be a nested directory.
+
+ >>> spec = {
+ ... 'README.txt': "A README file",
+ ... "foo": {
+ ... "__init__.py": "",
+ ... "bar": {
+ ... "__init__.py": "",
+ ... },
+ ... "baz.py": "# Some code",
+ ... "bar.py": Symlink("baz.py"),
+ ... },
+ ... "bing": Symlink("foo"),
+ ... }
+ >>> target = getfixture('tmp_path')
+ >>> build(spec, target)
+ >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
+ '# Some code'
+ >>> target.joinpath('bing/bar.py').read_text(encoding='utf-8')
+ '# Some code'
+ """
+ for name, contents in spec.items():
+ create(contents, _ensure_tree_maker(prefix) / name)
+
+
+@functools.singledispatch
+def create(content: str | bytes | FilesSpec, path: TreeMaker) -> None:
+ path.mkdir(exist_ok=True)
+ # Mypy only looks at the signature of the main singledispatch method. So it must contain the complete Union
+ build(content, prefix=path) # type: ignore[arg-type] # python/mypy#11727
+
+
+@create.register
+def _(content: bytes, path: TreeMaker) -> None:
+ path.write_bytes(content)
+
+
+@create.register
+def _(content: str, path: TreeMaker) -> None:
+ path.write_text(content, encoding='utf-8')
+
+
+@create.register
+def _(content: Symlink, path: TreeMaker) -> None:
+ path.symlink_to(content)
+
+
+class Recording:
+ """
+ A TreeMaker object that records everything that would be written.
+
+ >>> r = Recording()
+ >>> build({'foo': {'foo1.txt': 'yes'}, 'bar.txt': 'abc'}, r)
+ >>> r.record
+ ['foo/foo1.txt', 'bar.txt']
+ """
+
+ def __init__(self, loc=pathlib.PurePosixPath(), record=None):
+ self.loc = loc
+ self.record = record if record is not None else []
+
+ def __truediv__(self, other):
+ return Recording(self.loc / other, self.record)
+
+ def write_text(self, content, **kwargs):
+ self.record.append(str(self.loc))
+
+ write_bytes = write_text
+
+ def mkdir(self, **kwargs):
+ return
+
+ def symlink_to(self, target):
+ pass
diff --git a/tests/compat/__init__.py b/tests/compat/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/compat/py312.py b/tests/compat/py312.py
new file mode 100644
index 00000000..ea9a58ba
--- /dev/null
+++ b/tests/compat/py312.py
@@ -0,0 +1,18 @@
+import contextlib
+
+from .py39 import import_helper
+
+
+@contextlib.contextmanager
+def isolated_modules():
+ """
+ Save modules on entry and cleanup on exit.
+ """
+ (saved,) = import_helper.modules_setup()
+ try:
+ yield
+ finally:
+ import_helper.modules_cleanup(saved)
+
+
+vars(import_helper).setdefault('isolated_modules', isolated_modules)
diff --git a/tests/compat/py39.py b/tests/compat/py39.py
new file mode 100644
index 00000000..4e45d7cc
--- /dev/null
+++ b/tests/compat/py39.py
@@ -0,0 +1,8 @@
+from jaraco.test.cpython import from_test_support, try_import
+
+os_helper = try_import('os_helper') or from_test_support(
+ 'FS_NONASCII', 'skip_unless_symlink', 'temp_dir'
+)
+import_helper = try_import('import_helper') or from_test_support(
+ 'modules_setup', 'modules_cleanup'
+)
diff --git a/tests/compat/test_py39_compat.py b/tests/compat/test_py39_compat.py
new file mode 100644
index 00000000..db9fb1b7
--- /dev/null
+++ b/tests/compat/test_py39_compat.py
@@ -0,0 +1,74 @@
+import pathlib
+import sys
+import unittest
+
+from importlib_metadata import (
+ distribution,
+ distributions,
+ entry_points,
+ metadata,
+ version,
+)
+
+from .. import fixtures
+
+
+class OldStdlibFinderTests(fixtures.DistInfoPkgOffPath, unittest.TestCase):
+ def setUp(self):
+ if sys.version_info >= (3, 10):
+ self.skipTest("Tests specific for Python 3.8/3.9")
+ super().setUp()
+
+ def _meta_path_finder(self):
+ from importlib.metadata import (
+ Distribution,
+ DistributionFinder,
+ PathDistribution,
+ )
+ from importlib.util import spec_from_file_location
+
+ path = pathlib.Path(self.site_dir)
+
+ class CustomDistribution(Distribution):
+ def __init__(self, name, path):
+ self.name = name
+ self._path_distribution = PathDistribution(path)
+
+ def read_text(self, filename):
+ return self._path_distribution.read_text(filename)
+
+ def locate_file(self, path):
+ return self._path_distribution.locate_file(path)
+
+ class CustomFinder:
+ @classmethod
+ def find_spec(cls, fullname, _path=None, _target=None):
+ candidate = pathlib.Path(path, *fullname.split(".")).with_suffix(".py")
+ if candidate.exists():
+ return spec_from_file_location(fullname, candidate)
+
+ @classmethod
+ def find_distributions(self, context=DistributionFinder.Context()):
+ for dist_info in path.glob("*.dist-info"):
+ yield PathDistribution(dist_info)
+ name, _, _ = str(dist_info).partition("-")
+ yield CustomDistribution(name + "_custom", dist_info)
+
+ return CustomFinder
+
+ def test_compatibility_with_old_stdlib_path_distribution(self):
+ """
+ Given a custom finder that uses Python 3.8/3.9 importlib.metadata is installed,
+ when importlib_metadata functions are called, there should be no exceptions.
+ Ref python/importlib_metadata#396.
+ """
+ self.fixtures.enter_context(fixtures.install_finder(self._meta_path_finder()))
+
+ assert list(distributions())
+ assert distribution("distinfo_pkg")
+ assert distribution("distinfo_pkg_custom")
+ assert version("distinfo_pkg") > "0"
+ assert version("distinfo_pkg_custom") > "0"
+ assert list(metadata("distinfo_pkg"))
+ assert list(metadata("distinfo_pkg_custom"))
+ assert list(entry_points(group="entries"))
diff --git a/prepare/example/example/__init__.py b/tests/data/sources/example/example/__init__.py
similarity index 100%
rename from prepare/example/example/__init__.py
rename to tests/data/sources/example/example/__init__.py
diff --git a/prepare/example/setup.py b/tests/data/sources/example/setup.py
similarity index 100%
rename from prepare/example/setup.py
rename to tests/data/sources/example/setup.py
diff --git a/prepare/example2/example2/__init__.py b/tests/data/sources/example2/example2/__init__.py
similarity index 100%
rename from prepare/example2/example2/__init__.py
rename to tests/data/sources/example2/example2/__init__.py
diff --git a/prepare/example2/pyproject.toml b/tests/data/sources/example2/pyproject.toml
similarity index 100%
rename from prepare/example2/pyproject.toml
rename to tests/data/sources/example2/pyproject.toml
diff --git a/tests/fixtures.py b/tests/fixtures.py
index 08a478ac..021eb811 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -1,48 +1,26 @@
-import os
-import sys
+import contextlib
import copy
-import shutil
+import functools
+import json
import pathlib
-import tempfile
+import shutil
+import sys
import textwrap
-import contextlib
+from importlib import resources
-from .py39compat import FS_NONASCII
-from typing import Dict, Union
-
-try:
- from importlib import resources # type: ignore
-
- getattr(resources, 'files')
- getattr(resources, 'as_file')
-except (ImportError, AttributeError):
- import importlib_resources as resources # type: ignore
+from . import _path
+from ._path import FilesSpec
+from .compat.py39 import os_helper
+from .compat.py312 import import_helper
@contextlib.contextmanager
-def tempdir():
- tmpdir = tempfile.mkdtemp()
- try:
- yield pathlib.Path(tmpdir)
- finally:
- shutil.rmtree(tmpdir)
-
-
-@contextlib.contextmanager
-def save_cwd():
- orig = os.getcwd()
- try:
- yield
- finally:
- os.chdir(orig)
-
-
-@contextlib.contextmanager
-def tempdir_as_cwd():
- with tempdir() as tmp:
- with save_cwd():
- os.chdir(str(tmp))
- yield tmp
+def tmp_path():
+ """
+ Like os_helper.temp_dir, but yields a pathlib.Path.
+ """
+ with os_helper.temp_dir() as path:
+ yield pathlib.Path(path)
@contextlib.contextmanager
@@ -63,7 +41,7 @@ def setUp(self):
class SiteDir(Fixtures):
def setUp(self):
super().setUp()
- self.site_dir = self.fixtures.enter_context(tempdir())
+ self.site_dir = self.fixtures.enter_context(tmp_path())
class OnSysPath(Fixtures):
@@ -79,15 +57,19 @@ def add_sys_path(dir):
def setUp(self):
super().setUp()
self.fixtures.enter_context(self.add_sys_path(self.site_dir))
+ self.fixtures.enter_context(import_helper.isolated_modules())
-# Except for python/mypy#731, prefer to define
-# FilesDef = Dict[str, Union['FilesDef', str]]
-FilesDef = Dict[str, Union[Dict[str, Union[Dict[str, str], str]], str]]
+class SiteBuilder(SiteDir):
+ def setUp(self):
+ super().setUp()
+ for cls in self.__class__.mro():
+ with contextlib.suppress(AttributeError):
+ build_files(cls.files, prefix=self.site_dir)
-class DistInfoPkg(OnSysPath, SiteDir):
- files: FilesDef = {
+class DistInfoPkg(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
"distinfo_pkg-1.0.0.dist-info": {
"METADATA": """
Name: distinfo-pkg
@@ -113,10 +95,6 @@ def main():
""",
}
- def setUp(self):
- super().setUp()
- build_files(DistInfoPkg.files, self.site_dir)
-
def make_uppercase(self):
"""
Rewrite metadata with everything uppercase.
@@ -128,8 +106,27 @@ def make_uppercase(self):
build_files(files, self.site_dir)
-class DistInfoPkgWithDot(OnSysPath, SiteDir):
- files: FilesDef = {
+class DistInfoPkgEditable(DistInfoPkg):
+ """
+ Package with a PEP 660 direct_url.json.
+ """
+
+ some_hash = '524127ce937f7cb65665130c695abd18ca386f60bb29687efb976faa1596fdcc'
+ files: FilesSpec = {
+ 'distinfo_pkg-1.0.0.dist-info': {
+ 'direct_url.json': json.dumps({
+ "archive_info": {
+ "hash": f"sha256={some_hash}",
+ "hashes": {"sha256": f"{some_hash}"},
+ },
+ "url": "file:///path/to/distinfo_pkg-1.0.0.editable-py3-none-any.whl",
+ })
+ },
+ }
+
+
+class DistInfoPkgWithDot(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
"pkg_dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
@@ -138,13 +135,9 @@ class DistInfoPkgWithDot(OnSysPath, SiteDir):
},
}
- def setUp(self):
- super().setUp()
- build_files(DistInfoPkgWithDot.files, self.site_dir)
-
-class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir):
- files: FilesDef = {
+class DistInfoPkgWithDotLegacy(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
"pkg.dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
@@ -159,19 +152,13 @@ class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir):
},
}
- def setUp(self):
- super().setUp()
- build_files(DistInfoPkgWithDotLegacy.files, self.site_dir)
-
-class DistInfoPkgOffPath(SiteDir):
- def setUp(self):
- super().setUp()
- build_files(DistInfoPkg.files, self.site_dir)
+class DistInfoPkgOffPath(SiteBuilder):
+ files = DistInfoPkg.files
-class EggInfoPkg(OnSysPath, SiteDir):
- files: FilesDef = {
+class EggInfoPkg(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
"egginfo_pkg.egg-info": {
"PKG-INFO": """
Name: egginfo-pkg
@@ -205,13 +192,122 @@ def main():
""",
}
- def setUp(self):
- super().setUp()
- build_files(EggInfoPkg.files, prefix=self.site_dir)
+
+class EggInfoPkgPipInstalledNoToplevel(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
+ "egg_with_module_pkg.egg-info": {
+ "PKG-INFO": "Name: egg_with_module-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ egg_with_module.py
+ setup.py
+ egg_with_module_pkg.egg-info/PKG-INFO
+ egg_with_module_pkg.egg-info/SOURCES.txt
+ egg_with_module_pkg.egg-info/top_level.txt
+ """,
+ # installed-files.txt is written by pip, and is a strictly more
+ # accurate source than SOURCES.txt as to the installed contents of
+ # the package.
+ "installed-files.txt": """
+ ../egg_with_module.py
+ PKG-INFO
+ SOURCES.txt
+ top_level.txt
+ """,
+ # missing top_level.txt (to trigger fallback to installed-files.txt)
+ },
+ "egg_with_module.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+
+class EggInfoPkgPipInstalledExternalDataFiles(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
+ "egg_with_module_pkg.egg-info": {
+ "PKG-INFO": "Name: egg_with_module-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ egg_with_module.py
+ setup.py
+ egg_with_module.json
+ egg_with_module_pkg.egg-info/PKG-INFO
+ egg_with_module_pkg.egg-info/SOURCES.txt
+ egg_with_module_pkg.egg-info/top_level.txt
+ """,
+ # installed-files.txt is written by pip, and is a strictly more
+ # accurate source than SOURCES.txt as to the installed contents of
+ # the package.
+ "installed-files.txt": """
+ ../../../etc/jupyter/jupyter_notebook_config.d/relative.json
+ /etc/jupyter/jupyter_notebook_config.d/absolute.json
+ ../egg_with_module.py
+ PKG-INFO
+ SOURCES.txt
+ top_level.txt
+ """,
+ # missing top_level.txt (to trigger fallback to installed-files.txt)
+ },
+ "egg_with_module.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+
+class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
+ "egg_with_no_modules_pkg.egg-info": {
+ "PKG-INFO": "Name: egg_with_no_modules-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ setup.py
+ egg_with_no_modules_pkg.egg-info/PKG-INFO
+ egg_with_no_modules_pkg.egg-info/SOURCES.txt
+ egg_with_no_modules_pkg.egg-info/top_level.txt
+ """,
+ # installed-files.txt is written by pip, and is a strictly more
+ # accurate source than SOURCES.txt as to the installed contents of
+ # the package.
+ "installed-files.txt": """
+ PKG-INFO
+ SOURCES.txt
+ top_level.txt
+ """,
+ # top_level.txt correctly reflects that no modules are installed
+ "top_level.txt": b"\n",
+ },
+ }
-class EggInfoFile(OnSysPath, SiteDir):
- files: FilesDef = {
+class EggInfoPkgSourcesFallback(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
+ "sources_fallback_pkg.egg-info": {
+ "PKG-INFO": "Name: sources_fallback-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ sources_fallback.py
+ setup.py
+ sources_fallback_pkg.egg-info/PKG-INFO
+ sources_fallback_pkg.egg-info/SOURCES.txt
+ """,
+ # missing installed-files.txt (i.e. not installed by pip) and
+ # missing top_level.txt (to trigger fallback to SOURCES.txt)
+ },
+ "sources_fallback.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+
+class EggInfoFile(OnSysPath, SiteBuilder):
+ files: FilesSpec = {
"egginfo_file.egg-info": """
Metadata-Version: 1.0
Name: egginfo_file
@@ -226,48 +322,30 @@ class EggInfoFile(OnSysPath, SiteDir):
""",
}
- def setUp(self):
- super().setUp()
- build_files(EggInfoFile.files, prefix=self.site_dir)
+# dedent all text strings before writing
+orig = _path.create.registry[str]
+_path.create.register(str, lambda content, path: orig(DALS(content), path))
-def build_files(file_defs, prefix=pathlib.Path()):
- """Build a set of files/directories, as described by the
- file_defs dictionary. Each key/value pair in the dictionary is
- interpreted as a filename/contents pair. If the contents value is a
- dictionary, a directory is created, and the dictionary interpreted
- as the files within it, recursively.
+build_files = _path.build
- For example:
- {"README.txt": "A README file",
- "foo": {
- "__init__.py": "",
- "bar": {
- "__init__.py": "",
- },
- "baz.py": "# Some code",
- }
- }
- """
- for name, contents in file_defs.items():
- full_name = prefix / name
- if isinstance(contents, dict):
- full_name.mkdir()
- build_files(contents, prefix=full_name)
- else:
- if isinstance(contents, bytes):
- with full_name.open('wb') as f:
- f.write(contents)
- else:
- with full_name.open('w', encoding='utf-8') as f:
- f.write(DALS(contents))
+def build_record(file_defs):
+ return ''.join(f'{name},,\n' for name in record_names(file_defs))
+
+
+def record_names(file_defs):
+ recording = _path.Recording()
+ _path.build(file_defs, recording)
+ return recording.record
class FileBuilder:
def unicode_filename(self):
- return FS_NONASCII or self.skip("File system does not support non-ascii.")
+ return os_helper.FS_NONASCII or self.skip(
+ "File system does not support non-ascii."
+ )
def DALS(str):
@@ -275,11 +353,6 @@ def DALS(str):
return textwrap.dedent(str).lstrip()
-class NullFinder:
- def find_module(self, name):
- pass
-
-
class ZipFixtures:
root = 'tests.data'
@@ -294,3 +367,18 @@ def setUp(self):
# Add self.zip_name to the front of sys.path.
self.resources = contextlib.ExitStack()
self.addCleanup(self.resources.close)
+
+
+def parameterize(*args_set):
+ """Run test method with a series of parameters."""
+
+ def wrapper(func):
+ @functools.wraps(func)
+ def _inner(self):
+ for args in args_set:
+ with self.subTest(**args):
+ func(self, **args)
+
+ return _inner
+
+ return wrapper
diff --git a/tests/py39compat.py b/tests/py39compat.py
deleted file mode 100644
index 926dcad9..00000000
--- a/tests/py39compat.py
+++ /dev/null
@@ -1,4 +0,0 @@
-try:
- from test.support.os_helper import FS_NONASCII
-except ImportError:
- from test.support import FS_NONASCII # noqa
diff --git a/tests/test_api.py b/tests/test_api.py
index 26731884..c36f93e0 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1,11 +1,8 @@
+import importlib
import re
import textwrap
import unittest
-import warnings
-import importlib
-import contextlib
-from . import fixtures
from importlib_metadata import (
Distribution,
PackageNotFoundError,
@@ -17,22 +14,20 @@
version,
)
-
-@contextlib.contextmanager
-def suppress_known_deprecation():
- with warnings.catch_warnings(record=True) as ctx:
- warnings.simplefilter('default', category=DeprecationWarning)
- yield ctx
+from . import fixtures
class APITests(
fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgPipInstalledExternalDataFiles,
+ fixtures.EggInfoPkgSourcesFallback,
fixtures.DistInfoPkg,
fixtures.DistInfoPkgWithDot,
fixtures.EggInfoFile,
unittest.TestCase,
):
-
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
@@ -63,15 +58,28 @@ def test_prefix_not_matched(self):
distribution(prefix)
def test_for_top_level(self):
- self.assertEqual(
- distribution('egginfo-pkg').read_text('top_level.txt').strip(), 'mod'
- )
+ tests = [
+ ('egginfo-pkg', 'mod'),
+ ('egg_with_no_modules-pkg', ''),
+ ]
+ for pkg_name, expect_content in tests:
+ with self.subTest(pkg_name):
+ self.assertEqual(
+ distribution(pkg_name).read_text('top_level.txt').strip(),
+ expect_content,
+ )
def test_read_text(self):
- top_level = [
- path for path in files('egginfo-pkg') if path.name == 'top_level.txt'
- ][0]
- self.assertEqual(top_level.read_text(), 'mod\n')
+ tests = [
+ ('egginfo-pkg', 'mod\n'),
+ ('egg_with_no_modules-pkg', '\n'),
+ ]
+ for pkg_name, expect_content in tests:
+ with self.subTest(pkg_name):
+ top_level = [
+ path for path in files(pkg_name) if path.name == 'top_level.txt'
+ ][0]
+ self.assertEqual(top_level.read_text(), expect_content)
def test_entry_points(self):
eps = entry_points()
@@ -89,15 +97,15 @@ def test_entry_points_distribution(self):
self.assertIn(ep.dist.name, ('distinfo-pkg', 'egginfo-pkg'))
self.assertEqual(ep.dist.version, "1.0.0")
- def test_entry_points_unique_packages(self):
+ def test_entry_points_unique_packages_normalized(self):
"""
Entry points should only be exposed for the first package
- on sys.path with a given name.
+ on sys.path with a given name (even when normalized).
"""
- alt_site_dir = self.fixtures.enter_context(fixtures.tempdir())
+ alt_site_dir = self.fixtures.enter_context(fixtures.tmp_path())
self.fixtures.enter_context(self.add_sys_path(alt_site_dir))
alt_pkg = {
- "distinfo_pkg-1.1.0.dist-info": {
+ "DistInfo_pkg-1.1.0.dist-info": {
"METADATA": """
Name: distinfo-pkg
Version: 1.1.0
@@ -124,62 +132,6 @@ def test_entry_points_missing_name(self):
def test_entry_points_missing_group(self):
assert entry_points(group='missing') == ()
- def test_entry_points_dict_construction(self):
- """
- Prior versions of entry_points() returned simple lists and
- allowed casting those lists into maps by name using ``dict()``.
- Capture this now deprecated use-case.
- """
- with suppress_known_deprecation() as caught:
- eps = dict(entry_points(group='entries'))
-
- assert 'main' in eps
- assert eps['main'] == entry_points(group='entries')['main']
-
- # check warning
- expected = next(iter(caught))
- assert expected.category is DeprecationWarning
- assert "Construction of dict of EntryPoints is deprecated" in str(expected)
-
- def test_entry_points_by_index(self):
- """
- Prior versions of Distribution.entry_points would return a
- tuple that allowed access by index.
- Capture this now deprecated use-case
- See python/importlib_metadata#300 and bpo-44246.
- """
- eps = distribution('distinfo-pkg').entry_points
- with suppress_known_deprecation() as caught:
- eps[0]
-
- # check warning
- expected = next(iter(caught))
- assert expected.category is DeprecationWarning
- assert "Accessing entry points by index is deprecated" in str(expected)
-
- def test_entry_points_groups_getitem(self):
- """
- Prior versions of entry_points() returned a dict. Ensure
- that callers using '.__getitem__()' are supported but warned to
- migrate.
- """
- with suppress_known_deprecation():
- entry_points()['entries'] == entry_points(group='entries')
-
- with self.assertRaises(KeyError):
- entry_points()['missing']
-
- def test_entry_points_groups_get(self):
- """
- Prior versions of entry_points() returned a dict. Ensure
- that callers using '.get()' are supported but warned to
- migrate.
- """
- with suppress_known_deprecation():
- entry_points().get('missing', 'default') == 'default'
- entry_points().get('entries', 'default') == entry_points()['entries']
- entry_points().get('missing', ()) == ()
-
def test_entry_points_allows_no_attributes(self):
ep = entry_points().select(group='entries', name='main')
with self.assertRaises(AttributeError):
@@ -197,6 +149,28 @@ def test_importlib_metadata_version(self):
resolved = version('importlib-metadata')
assert re.match(self.version_pattern, resolved)
+ def test_missing_key(self):
+ """
+ Requesting a missing key raises KeyError.
+ """
+ md = metadata('distinfo-pkg')
+ with self.assertRaises(KeyError):
+ md['does-not-exist']
+
+ def test_get_key(self):
+ """
+ Getting a key gets the key.
+ """
+ md = metadata('egginfo-pkg')
+ assert md.get('Name') == 'egginfo-pkg'
+
+ def test_get_missing_key(self):
+ """
+ Requesting a missing key will return None.
+ """
+ md = metadata('distinfo-pkg')
+ assert md.get('does-not-exist') is None
+
@staticmethod
def _test_files(files):
root = files[0].root
@@ -219,6 +193,9 @@ def test_files_dist_info(self):
def test_files_egg_info(self):
self._test_files(files('egginfo-pkg'))
+ self._test_files(files('egg_with_module-pkg'))
+ self._test_files(files('egg_with_no_modules-pkg'))
+ self._test_files(files('sources_fallback-pkg'))
def test_version_egg_info_file(self):
self.assertEqual(version('egginfo-file'), '0.1')
diff --git a/tests/test_integration.py b/tests/test_integration.py
index c382a506..9bb3e793 100644
--- a/tests/test_integration.py
+++ b/tests/test_integration.py
@@ -1,15 +1,24 @@
+"""
+Test behaviors specific to importlib_metadata.
+
+These tests are excluded downstream in CPython as they
+test functionality only in importlib_metadata or require
+behaviors ('packaging') that aren't available in the
+stdlib.
+"""
+
import unittest
+
import packaging.requirements
import packaging.version
-from . import fixtures
from importlib_metadata import (
- MetadataPathFinder,
_compat,
- distributions,
version,
)
+from . import fixtures
+
class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase):
def test_package_spec_installed(self):
@@ -29,11 +38,14 @@ def is_installed(package_spec):
class FinderTests(fixtures.Fixtures, unittest.TestCase):
def test_finder_without_module(self):
- class ModuleFreeFinder(fixtures.NullFinder):
+ class ModuleFreeFinder:
"""
A finder without an __module__ attribute
"""
+ def find_module(self, name):
+ pass
+
def __getattribute__(self, name):
if name == '__module__':
raise AttributeError(name)
@@ -41,27 +53,3 @@ def __getattribute__(self, name):
self.fixtures.enter_context(fixtures.install_finder(ModuleFreeFinder()))
_compat.disable_stdlib_finder()
-
-
-class DistSearch(unittest.TestCase):
- def test_search_dist_dirs(self):
- """
- Pip needs the _search_paths interface to locate
- distribution metadata dirs. Protect it for PyPA
- use-cases (only). Ref python/importlib_metadata#111.
- """
- res = MetadataPathFinder._search_paths('any-name', [])
- assert list(res) == []
-
- def test_interleaved_discovery(self):
- """
- When the search is cached, it is
- possible for searches to be interleaved, so make sure
- those use-cases are safe.
-
- Ref #293
- """
- dists = distributions()
- next(dists)
- version('importlib_metadata')
- next(dists)
diff --git a/tests/test_main.py b/tests/test_main.py
index 1a64af56..5ed08c89 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -1,19 +1,16 @@
-import re
-import json
+import importlib
import pickle
-import textwrap
+import re
import unittest
-import warnings
-import importlib
-import importlib_metadata
+
import pyfakefs.fake_filesystem_unittest as ffs
-from . import fixtures
+import importlib_metadata
from importlib_metadata import (
Distribution,
EntryPoint,
- MetadataPathFinder,
PackageNotFoundError,
+ _unique,
distributions,
entry_points,
metadata,
@@ -21,6 +18,10 @@
version,
)
+from . import fixtures
+from ._path import Symlink
+from .compat.py39 import os_helper
+
class BasicTests(fixtures.DistInfoPkg, unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
@@ -37,7 +38,7 @@ def test_for_name_does_not_exist(self):
def test_package_not_found_mentions_metadata(self):
"""
When a package is not found, that could indicate that the
- packgae is not installed or that it is installed without
+ package is not installed or that it is installed without
metadata. Ensure the exception mentions metadata to help
guide users toward the cause. See #124.
"""
@@ -46,9 +47,17 @@ def test_package_not_found_mentions_metadata(self):
assert "metadata" in str(ctx.exception)
- def test_new_style_classes(self):
- self.assertIsInstance(Distribution, type)
- self.assertIsInstance(MetadataPathFinder, type)
+ def test_abc_enforced(self):
+ with self.assertRaises(TypeError):
+ type('DistributionSubclass', (Distribution,), {})()
+
+ @fixtures.parameterize(
+ dict(name=None),
+ dict(name=''),
+ )
+ def test_invalid_inputs_to_from_name(self, name):
+ with self.assertRaises(Exception):
+ Distribution.from_name(name)
class ImportTests(fixtures.DistInfoPkg, unittest.TestCase):
@@ -77,48 +86,85 @@ def test_resolve_without_attr(self):
class NameNormalizationTests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
@staticmethod
- def pkg_with_dashes(site_dir):
+ def make_pkg(name):
"""
- Create minimal metadata for a package with dashes
- in the name (and thus underscores in the filename).
+ Create minimal metadata for a dist-info package with
+ the indicated name on the file system.
"""
- metadata_dir = site_dir / 'my_pkg.dist-info'
- metadata_dir.mkdir()
- metadata = metadata_dir / 'METADATA'
- with metadata.open('w', encoding='utf-8') as strm:
- strm.write('Version: 1.0\n')
- return 'my-pkg'
+ return {
+ f'{name}.dist-info': {
+ 'METADATA': 'VERSION: 1.0\n',
+ },
+ }
def test_dashes_in_dist_name_found_as_underscores(self):
"""
For a package with a dash in the name, the dist-info metadata
uses underscores in the name. Ensure the metadata loads.
"""
- pkg_name = self.pkg_with_dashes(self.site_dir)
- assert version(pkg_name) == '1.0'
-
- @staticmethod
- def pkg_with_mixed_case(site_dir):
- """
- Create minimal metadata for a package with mixed case
- in the name.
- """
- metadata_dir = site_dir / 'CherryPy.dist-info'
- metadata_dir.mkdir()
- metadata = metadata_dir / 'METADATA'
- with metadata.open('w', encoding='utf-8') as strm:
- strm.write('Version: 1.0\n')
- return 'CherryPy'
+ fixtures.build_files(self.make_pkg('my_pkg'), self.site_dir)
+ assert version('my-pkg') == '1.0'
def test_dist_name_found_as_any_case(self):
"""
Ensure the metadata loads when queried with any case.
"""
- pkg_name = self.pkg_with_mixed_case(self.site_dir)
+ pkg_name = 'CherryPy'
+ fixtures.build_files(self.make_pkg(pkg_name), self.site_dir)
assert version(pkg_name) == '1.0'
assert version(pkg_name.lower()) == '1.0'
assert version(pkg_name.upper()) == '1.0'
+ def test_unique_distributions(self):
+ """
+ Two distributions varying only by non-normalized name on
+ the file system should resolve as the same.
+ """
+ fixtures.build_files(self.make_pkg('abc'), self.site_dir)
+ before = list(_unique(distributions()))
+
+ alt_site_dir = self.fixtures.enter_context(fixtures.tmp_path())
+ self.fixtures.enter_context(self.add_sys_path(alt_site_dir))
+ fixtures.build_files(self.make_pkg('ABC'), alt_site_dir)
+ after = list(_unique(distributions()))
+
+ assert len(after) == len(before)
+
+
+class InvalidMetadataTests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
+ @staticmethod
+ def make_pkg(name, files=dict(METADATA="VERSION: 1.0")):
+ """
+ Create metadata for a dist-info package with name and files.
+ """
+ return {
+ f'{name}.dist-info': files,
+ }
+
+ def test_valid_dists_preferred(self):
+ """
+ Dists with metadata should be preferred when discovered by name.
+
+ Ref python/importlib_metadata#489.
+ """
+ # create three dists with the valid one in the middle (lexicographically)
+ # such that on most file systems, the valid one is never naturally first.
+ fixtures.build_files(self.make_pkg('foo-4.0', files={}), self.site_dir)
+ fixtures.build_files(self.make_pkg('foo-4.1'), self.site_dir)
+ fixtures.build_files(self.make_pkg('foo-4.2', files={}), self.site_dir)
+ dist = Distribution.from_name('foo')
+ assert dist.version == "1.0"
+
+ def test_missing_metadata(self):
+ """
+ Dists with a missing metadata file should return None.
+
+ Ref python/importlib_metadata#493.
+ """
+ fixtures.build_files(self.make_pkg('foo-4.3', files={}), self.site_dir)
+ assert Distribution.from_name('foo').metadata is None
+ assert metadata('foo') is None
+
class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
@staticmethod
@@ -127,11 +173,12 @@ def pkg_with_non_ascii_description(site_dir):
Create minimal metadata for a package with non-ASCII in
the description.
"""
- metadata_dir = site_dir / 'portend.dist-info'
- metadata_dir.mkdir()
- metadata = metadata_dir / 'METADATA'
- with metadata.open('w', encoding='utf-8') as fp:
- fp.write('Description: pôrˈtend')
+ contents = {
+ 'portend.dist-info': {
+ 'METADATA': 'Description: pôrˈtend',
+ },
+ }
+ fixtures.build_files(contents, site_dir)
return 'portend'
@staticmethod
@@ -140,19 +187,15 @@ def pkg_with_non_ascii_description_egg_info(site_dir):
Create minimal metadata for an egg-info package with
non-ASCII in the description.
"""
- metadata_dir = site_dir / 'portend.dist-info'
- metadata_dir.mkdir()
- metadata = metadata_dir / 'METADATA'
- with metadata.open('w', encoding='utf-8') as fp:
- fp.write(
- textwrap.dedent(
- """
+ contents = {
+ 'portend.dist-info': {
+ 'METADATA': """
Name: portend
- pôrˈtend
- """
- ).strip()
- )
+ pôrˈtend""",
+ },
+ }
+ fixtures.build_files(contents, site_dir)
return 'portend'
def test_metadata_loads(self):
@@ -166,17 +209,41 @@ def test_metadata_loads_egg_info(self):
assert meta['Description'] == 'pôrˈtend'
-class DiscoveryTests(fixtures.EggInfoPkg, fixtures.DistInfoPkg, unittest.TestCase):
+class DiscoveryTests(
+ fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
+ fixtures.DistInfoPkg,
+ unittest.TestCase,
+):
def test_package_discovery(self):
dists = list(distributions())
assert all(isinstance(dist, Distribution) for dist in dists)
assert any(dist.metadata['Name'] == 'egginfo-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'egg_with_module-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'egg_with_no_modules-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'sources_fallback-pkg' for dist in dists)
assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists)
def test_invalid_usage(self):
with self.assertRaises(ValueError):
list(distributions(context='something', name='else'))
+ def test_interleaved_discovery(self):
+ """
+ Ensure interleaved searches are safe.
+
+ When the search is cached, it is possible for searches to be
+ interleaved, so make sure those use-cases are safe.
+
+ Ref #293
+ """
+ dists = distributions()
+ next(dists)
+ version('egginfo-pkg')
+ next(dists)
+
class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
def test_egg_info(self):
@@ -252,14 +319,6 @@ def test_hashable(self):
"""EntryPoints should be hashable"""
hash(self.ep)
- def test_json_dump(self):
- """
- json should not expect to be able to dump an EntryPoint
- """
- with self.assertRaises(Exception):
- with warnings.catch_warnings(record=True):
- json.dumps(self.ep)
-
def test_module(self):
assert self.ep.module == 'value'
@@ -270,12 +329,10 @@ def test_sortable(self):
"""
EntryPoint objects are sortable, but result is undefined.
"""
- sorted(
- [
- EntryPoint(name='b', value='val', group='group'),
- EntryPoint(name='a', value='val', group='group'),
- ]
- )
+ sorted([
+ EntryPoint(name='b', value='val', group='group'),
+ EntryPoint(name='a', value='val', group='group'),
+ ])
class FileSystem(
@@ -326,3 +383,106 @@ def test_packages_distributions_neither_toplevel_nor_files(self):
prefix=self.site_dir,
)
packages_distributions()
+
+ def test_packages_distributions_all_module_types(self):
+ """
+ Test top-level modules detected on a package without 'top-level.txt'.
+ """
+ suffixes = importlib.machinery.all_suffixes()
+ metadata = dict(
+ METADATA="""
+ Name: all_distributions
+ Version: 1.0.0
+ """,
+ )
+ files = {
+ 'all_distributions-1.0.0.dist-info': metadata,
+ }
+ for i, suffix in enumerate(suffixes):
+ files.update({
+ f'importable-name {i}{suffix}': '',
+ f'in_namespace_{i}': {
+ f'mod{suffix}': '',
+ },
+ f'in_package_{i}': {
+ '__init__.py': '',
+ f'mod{suffix}': '',
+ },
+ })
+ metadata.update(RECORD=fixtures.build_record(files))
+ fixtures.build_files(files, prefix=self.site_dir)
+
+ distributions = packages_distributions()
+
+ for i in range(len(suffixes)):
+ assert distributions[f'importable-name {i}'] == ['all_distributions']
+ assert distributions[f'in_namespace_{i}'] == ['all_distributions']
+ assert distributions[f'in_package_{i}'] == ['all_distributions']
+
+ assert not any(name.endswith('.dist-info') for name in distributions)
+
+ @os_helper.skip_unless_symlink
+ def test_packages_distributions_symlinked_top_level(self) -> None:
+ """
+ Distribution is resolvable from a simple top-level symlink in RECORD.
+ See #452.
+ """
+
+ files: fixtures.FilesSpec = {
+ "symlinked_pkg-1.0.0.dist-info": {
+ "METADATA": """
+ Name: symlinked-pkg
+ Version: 1.0.0
+ """,
+ "RECORD": "symlinked,,\n",
+ },
+ ".symlink.target": {},
+ "symlinked": Symlink(".symlink.target"),
+ }
+
+ fixtures.build_files(files, self.site_dir)
+ assert packages_distributions()['symlinked'] == ['symlinked-pkg']
+
+
+class PackagesDistributionsEggTest(
+ fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
+ unittest.TestCase,
+):
+ def test_packages_distributions_on_eggs(self):
+ """
+ Test old-style egg packages with a variation of 'top_level.txt',
+ 'SOURCES.txt', and 'installed-files.txt', available.
+ """
+ distributions = packages_distributions()
+
+ def import_names_from_package(package_name):
+ return {
+ import_name
+ for import_name, package_names in distributions.items()
+ if package_name in package_names
+ }
+
+ # egginfo-pkg declares one import ('mod') via top_level.txt
+ assert import_names_from_package('egginfo-pkg') == {'mod'}
+
+ # egg_with_module-pkg has one import ('egg_with_module') inferred from
+ # installed-files.txt (top_level.txt is missing)
+ assert import_names_from_package('egg_with_module-pkg') == {'egg_with_module'}
+
+ # egg_with_no_modules-pkg should not be associated with any import names
+ # (top_level.txt is empty, and installed-files.txt has no .py files)
+ assert import_names_from_package('egg_with_no_modules-pkg') == set()
+
+ # sources_fallback-pkg has one import ('sources_fallback') inferred from
+ # SOURCES.txt (top_level.txt and installed-files.txt is missing)
+ assert import_names_from_package('sources_fallback-pkg') == {'sources_fallback'}
+
+
+class EditableDistributionTest(fixtures.DistInfoPkgEditable, unittest.TestCase):
+ def test_origin(self):
+ dist = Distribution.from_name('distinfo-pkg')
+ assert dist.origin.url.endswith('.whl')
+ assert dist.origin.archive_info.hashes.sha256
diff --git a/tests/test_zip.py b/tests/test_zip.py
index 01aba6df..165aa6dd 100644
--- a/tests/test_zip.py
+++ b/tests/test_zip.py
@@ -1,8 +1,10 @@
+import multiprocessing
+import os
import sys
import unittest
-from . import fixtures
from importlib_metadata import (
+ FastPath,
PackageNotFoundError,
distribution,
distributions,
@@ -11,6 +13,8 @@
version,
)
+from . import fixtures
+
class TestZip(fixtures.ZipFixtures, unittest.TestCase):
def setUp(self):
@@ -46,6 +50,37 @@ def test_one_distribution(self):
dists = list(distributions(path=sys.path[:1]))
assert len(dists) == 1
+ @unittest.skipUnless(
+ hasattr(os, 'register_at_fork')
+ and 'fork' in multiprocessing.get_all_start_methods(),
+ 'requires fork-based multiprocessing support',
+ )
+ def test_fastpath_cache_cleared_in_forked_child(self):
+ zip_path = sys.path[0]
+
+ FastPath(zip_path)
+ assert FastPath.__new__.cache_info().currsize >= 1
+
+ ctx = multiprocessing.get_context('fork')
+ parent_conn, child_conn = ctx.Pipe()
+
+ def child(conn, root):
+ try:
+ before = FastPath.__new__.cache_info().currsize
+ FastPath(root)
+ after = FastPath.__new__.cache_info().currsize
+ conn.send((before, after))
+ finally:
+ conn.close()
+
+ proc = ctx.Process(target=child, args=(child_conn, zip_path))
+ proc.start()
+ child_conn.close()
+ cache_sizes = parent_conn.recv()
+ proc.join()
+
+ self.assertEqual(cache_sizes, (0, 1))
+
class TestEgg(TestZip):
def setUp(self):
diff --git a/towncrier.toml b/towncrier.toml
new file mode 100644
index 00000000..577e87a7
--- /dev/null
+++ b/towncrier.toml
@@ -0,0 +1,3 @@
+[tool.towncrier]
+title_format = "{version}"
+directory = "newsfragments" # jaraco/skeleton#184
diff --git a/tox.ini b/tox.ini
index a0ce7c61..7fa8d70f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,44 +1,59 @@
-[tox]
-envlist = python
-minversion = 3.2
-# https://github.com/jaraco/skeleton/issues/6
-tox_pip_extensions_ext_venv_update = true
-toxworkdir={env:TOX_WORK_DIR:.tox}
-
-
[testenv]
+description = perform primary checks (tests, style, types, coverage)
deps =
+setenv =
+ PYTHONWARNDEFAULTENCODING = 1
commands =
pytest {posargs}
passenv =
HOME
usedevelop = True
-extras = testing
+extras =
+ test
+ check
+ cover
+ enabler
+ type
+[testenv:diffcov]
+description = run tests and check that diff from main is covered
+deps =
+ {[testenv]deps}
+ diff-cover
+commands =
+ pytest {posargs} --cov-report xml
+ diff-cover coverage.xml --compare-branch=origin/main --html-report diffcov.html
+ diff-cover coverage.xml --compare-branch=origin/main --fail-under=100
[testenv:docs]
+description = build the documentation
extras =
- docs
- testing
+ doc
+ test
changedir = docs
commands =
python -m sphinx -W --keep-going . {toxinidir}/build/html
+ python -m sphinxlint
-[testenv:diffcov]
+[testenv:finalize]
+description = assemble changelog and tag a release
+skip_install = True
deps =
- diff-cover
+ towncrier
+ jaraco.develop >= 7.23
+pass_env = *
commands =
- pytest {posargs} --cov-report xml
- diff-cover coverage.xml --compare-branch=origin/main --html-report diffcov.html
- diff-cover coverage.xml --compare-branch=origin/main --fail-under=100
+ python -m jaraco.develop.finalize
+
[testenv:release]
+description = publish the package to PyPI and GitHub
skip_install = True
deps =
build
twine>=3
jaraco.develop>=7.1
-passenv =
+pass_env =
TWINE_PASSWORD
GITHUB_TOKEN
setenv =