diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..55dbe85 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "monthly" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 366a8d3..5d02cb9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -9,28 +9,65 @@ on: - v* pull_request: +env: + UV_SYSTEM_PYTHON: 1 + jobs: - dowsing: + test: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: - python-version: [3.6, 3.7, 3.8] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] os: [macOS-latest, ubuntu-latest, windows-latest] steps: - name: Checkout - uses: actions/checkout@v1 + uses: actions/checkout@v4 - name: Set Up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - uses: astral-sh/setup-uv@v3 - name: Install run: | - python -m pip install --upgrade pip - make setup - pip install -U . + uv pip install -e .[test] - name: Test run: make test - name: Lint - run: make lint + run: | + uv pip install -e .[test,dev] + make lint + if: ${{ matrix.python-version != '3.9' && matrix.python-version != '3.8' }} + + build: + needs: test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + - uses: astral-sh/setup-uv@v3 + - name: Install + run: uv pip install build + - name: Build + run: python -m build + - name: Upload + uses: actions/upload-artifact@v3 + with: + name: sdist + path: dist + + publish: + needs: build + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/v') + permissions: + id-token: write + steps: + - uses: actions/download-artifact@v3 + with: + name: sdist + path: dist + - uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..8ff4ed4 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,40 @@ +## v0.9.0b3 + +* Support PEP 639 style metadata (#76) +* Support more `setup.py` assignments (#57) +* 3.12 compat (depends on setuptools) +* Fix tests to work on modern Python + +## v0.9.0b2 + +* `source_mapping` bugfixes + * `packages` being an empty string (#20) + * `py_modules` containing dots (#22) + * Flit modules instead of packages (#24) + * `setup.py` parsing addition operator (#25) + +## v0.9.0b1 + +* Includes package data in `source_mapping` all the time. +* Support `flit.buildapi` as alternate flit build-backend +* Switch to usort for import sorting + +## v0.8.0 + +* Adds `Distribution.source_mapping` +* Enable gh actions on 3.9 + +## v0.7.0 + +* Adds Poetry support +* Addd Maturin support +* Adds `packages_dict` and better `packages` support across supported backends +* Allows `setup.cfg` fields to use dashes + +## v0.6.0 + +* Fix many bugs in Flit and Setuptools support, better test coverage. + +## v0.5.0 + +* Initial code extracted from Opine diff --git a/Makefile b/Makefile index 4c166f6..da69f1f 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -PYTHON?=python +PYTHON?=python3 SOURCES=dowsing setup.py .PHONY: venv @@ -12,7 +12,7 @@ venv: .PHONY: setup setup: - python -m pip install -Ur requirements-dev.txt + python -m pip install -Ue .[dev,test] .PHONY: test test: @@ -21,13 +21,11 @@ test: .PHONY: format format: - python -m isort --recursive -y $(SOURCES) - python -m black $(SOURCES) + python -m ufmt format $(SOURCES) .PHONY: lint lint: - python -m isort --recursive --diff $(SOURCES) - python -m black --check $(SOURCES) + python -m ufmt check $(SOURCES) python -m flake8 $(SOURCES) mypy --strict dowsing diff --git a/README.md b/README.md index b510735..a9b5bda 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,34 @@ # dowsing -TODO: Reword so it flows better. +Short version: + +``` +python -m dowsing.pep517 /path/to/repo | jq . +``` + +or + +``` +from dowsing.pep517 import get_metadata +dist = get_metadata(Path("/path/to/repo")) +``` ## Basic reasoning +I don't want to execute arbitrary `setup.py` in order to find out their basic +metadata. I don't want to use the pep517 module in a sandbox, because commonly +packages forget to list their build-time dependencies. + +This project is one step better than grepping source files, but also understands +`build-system` in `pyproject.toml` (from PEP 517/518). It does pretty well run +on a sampling of pypi projects, but does fail on some notable ones (including +setuptools). + +When it fails, a key will be `"??"` and due to some quirks in list context, this +can be `["?", "?"]`. + +## A rant + The reality of python packaging, even with recent PEPs, is that most nontrivial python packages do moderately interesting stuff in their `setup.py`: @@ -13,27 +38,24 @@ python packages do moderately interesting stuff in their `setup.py`: * Making sure native libs are installed, or there's a working C compiler * Choosing deps based on platform -The disappointing part of several of these from the perspective of basically -running a distro, is that they produce messages intended for humans, rather than -actually using the mechanisms that we have in PEP 508 (environment markers) and -518 (pyproject.toml requires). +From the perspective of basically running a distro, they produce messages +intended for humans, rather than actually using the mechanisms that we have in +PEP 508 (environment markers) and 518 (pyproject.toml requires). There is also +no well-specified way to request native libs, and many projects choose to fail +to run `setup.py` when libs are missing. ## Goals This project is a bridge to find several things out, about primarily setup.py -but also understanding PEP 517/518 as a one-stop-shop, about: - -* for cases where the package's version is stored within, but has external - requirements that are not listed at build-time, currently returns an unknown - value and moves on -* potential imports, to guess at what should have been in the build-time - requirements (e.g `numpy.distutils` is pretty clear) -* doesn't actually execute, so fetches or execs can't cause it to fail -* Gives the PEP 517 APIs `get_requirements_for_sdist` and - `get_requirements_for_build_wheel`, even on a different platform through - simulated execution, with no sandboxing required. -* A lower-level api suitable for making edits to the place where the setup args - are defined. +but also understanding some popular PEP 517/518 builders as a one-stop-shop, about: + +* doesn't actually execute, so fetches or execs can't cause it to fail [done] +* cases where we could find out the version string, but it fails to import [done] +* lets you simulate the `pep517` module's output on different platforms [done] +* a lower-level api suitable for making edits to the place where the setup args + are defined [done] +* to list potential imports, and guess at missing build-time deps (something + like `numpy.distutils` is pretty clear) [todo] ## Doing this "right" @@ -46,6 +68,10 @@ If you're willing to run the code and have it take longer, take a look at the pep517 api `get_requires_for_*` or have it generate the metadata (assuming what you want is in there). An example is in `dowsing/_demo_pep517.py` +This project's `dowsing.pep517` api is designed to do something similar, but not +fail on missing build-time requirements. + + # Further Reading * PEP 241, Metadata 1.0 diff --git a/dowsing/_demo_pep517.py b/dowsing/_demo_pep517.py index 967d9c7..e66cb8b 100644 --- a/dowsing/_demo_pep517.py +++ b/dowsing/_demo_pep517.py @@ -1,6 +1,7 @@ """ For testing, dump the requirements that we find using the pep517 project. """ + import json import sys @@ -19,12 +20,12 @@ def main(path: str) -> None: d = {} with BuildEnvironment() as env: env.pip_install(requires) - d[ - "get_requires_for_build_sdist" - ] = requires + hooks.get_requires_for_build_sdist(None) - d[ - "get_requires_for_build_wheel" - ] = requires + hooks.get_requires_for_build_wheel(None) + d["get_requires_for_build_sdist"] = ( + requires + hooks.get_requires_for_build_sdist(None) + ) + d["get_requires_for_build_wheel"] = ( + requires + hooks.get_requires_for_build_wheel(None) + ) print(json.dumps(d)) diff --git a/dowsing/check_source_mapping.py b/dowsing/check_source_mapping.py new file mode 100644 index 0000000..efe38f3 --- /dev/null +++ b/dowsing/check_source_mapping.py @@ -0,0 +1,86 @@ +import sys +from pathlib import Path +from typing import List + +import click +from honesty.archive import extract_and_get_names +from honesty.cache import Cache +from honesty.cmdline import select_versions, wrap_async +from honesty.releases import async_parse_index, FileType +from moreorless.click import echo_color_unified_diff + +from dowsing.pep517 import get_metadata + + +@click.command() +@click.argument("packages", nargs=-1) +@wrap_async +async def main(packages: List[str]) -> None: + # Much of this code mirrors the methods in honesty/cmdline.py + async with Cache(fresh_index=True) as cache: + for package_name in packages: + package_name, operator, version = package_name.partition("==") + try: + package = await async_parse_index(package_name, cache, use_json=True) + except Exception as e: + print(package_name, repr(e), file=sys.stderr) + continue + + selected_versions = select_versions(package, operator, version) + rel = package.releases[selected_versions[0]] + + sdists = [f for f in rel.files if f.file_type == FileType.SDIST] + wheels = [f for f in rel.files if f.file_type == FileType.BDIST_WHEEL] + + if not sdists or not wheels: + print(f"{package_name}: insufficient artifacts") + continue + + sdist_path = await cache.async_fetch(pkg=package_name, url=sdists[0].url) + wheel_path = await cache.async_fetch(pkg=package_name, url=wheels[0].url) + + sdist_root, sdist_filenames = extract_and_get_names( + sdist_path, strip_top_level=True, patterns=("*.*") + ) + wheel_root, wheel_filenames = extract_and_get_names( + wheel_path, strip_top_level=True, patterns=("*.*") + ) + + try: + subdirs = tuple(Path(sdist_root).iterdir()) + metadata = get_metadata(Path(sdist_root, subdirs[0])) + assert metadata.source_mapping is not None, "no source_mapping" + except Exception as e: + print(package_name, repr(e), file=sys.stderr) + continue + + skip_patterns = [ + ".so", + ".pyc", + "nspkg", + ".dist-info", + ".data/scripts", + ] + wheel_blob = "".join( + sorted( + f"{f[0]}\n" + for f in wheel_filenames + if not any(s in f[0] for s in skip_patterns) + ) + ) + md_blob = "".join(sorted(f"{f}\n" for f in metadata.source_mapping.keys())) + + if metadata.source_mapping == {}: + print(f"{package_name}: empty dict") + elif md_blob == wheel_blob: + print(f"{package_name}: ok") + elif md_blob in ("", "?.py\n"): + print(f"{package_name}: COMPLETELY MISSING") + else: + echo_color_unified_diff( + wheel_blob, md_blob, f"{package_name}/files.txt" + ) + + +if __name__ == "__main__": + main() diff --git a/dowsing/flit.py b/dowsing/flit.py index c0f8231..60907d0 100644 --- a/dowsing/flit.py +++ b/dowsing/flit.py @@ -2,11 +2,13 @@ from typing import Sequence import tomlkit +from setuptools import find_packages -from .types import BaseReader, Distribution +from .pep621 import Pep621Reader +from .types import Distribution -class FlitReader(BaseReader): +class FlitReader(Pep621Reader): def __init__(self, path: Path): self.path = path @@ -20,16 +22,50 @@ def get_metadata(self) -> Distribution: pyproject = self.path / "pyproject.toml" doc = tomlkit.parse(pyproject.read_text()) - d = Distribution() - d.metadata_version = "2.1" + d = self.get_pep621_metadata() + d.entry_points = dict(d.entry_points) or {} + d.project_urls = list(d.project_urls) + + assert isinstance(d.project_urls, list) + + flit = doc.get("tool", {}).get("flit", {}) + metadata = flit.get("metadata", {}) + for k, v in metadata.items(): + # TODO description-file -> long_description + # TODO home-page -> urls + # TODO requires -> requires_dist + # TODO tool.flit.metadata.urls + if k == "home-page": + d.project_urls.append("Homepage={v}") + continue + elif k == "module": + if (self.path / f"{v}.py").exists(): + k = "py_modules" + v = [v] + else: + k = "packages" + v = find_packages(self.path.as_posix(), include=(f"{v}.*")) + d.packages_dict = {i: i.replace(".", "/") for i in v} + elif k == "description-file": + k = "description" + v = f"file: {v}" + elif k == "requires": + k = "requires_dist" - for k, v in doc["tool"]["flit"]["metadata"].items(): k2 = k.replace("-", "_") if k2 in d: setattr(d, k2, v) + for k, v in metadata.get("urls", {}).items(): + d.project_urls.append(f"{k}={v}") + + for k, v in flit.get("scripts", {}).items(): + d.entry_points[k] = v + # TODO extras-require + # TODO distutils commands (e.g. pex 2.1.19) + d.source_mapping = d._source_mapping(self.path) return d def _get_requires(self) -> Sequence[str]: @@ -40,8 +76,7 @@ def _get_requires(self) -> Sequence[str]: https://github.com/takluyver/flit/issues/141 """ - pyproject = self.path / "pyproject.toml" - doc = tomlkit.parse(pyproject.read_text()) - seq = doc["tool"]["flit"]["metadata"].get("requires", ()) + dist = self.get_metadata() + seq = dist.requires_dist assert isinstance(seq, (list, tuple)) return seq diff --git a/dowsing/maturin.py b/dowsing/maturin.py new file mode 100644 index 0000000..299ea51 --- /dev/null +++ b/dowsing/maturin.py @@ -0,0 +1,54 @@ +from pathlib import Path +from typing import Sequence + +import tomlkit + +from .types import BaseReader, Distribution + + +class MaturinReader(BaseReader): + def __init__(self, path: Path): + self.path = path + + def get_requires_for_build_sdist(self) -> Sequence[str]: + return [] # TODO + + def get_requires_for_build_wheel(self) -> Sequence[str]: + return [] # TODO + + def get_metadata(self) -> Distribution: + pyproject = self.path / "pyproject.toml" + doc = tomlkit.parse(pyproject.read_text()) + + d = Distribution() + d.metadata_version = "2.1" + + cargo = self.path / "Cargo.toml" + doc = tomlkit.parse(cargo.read_text()) + package = doc.get("package", {}) + for k, v in package.items(): + if k == "name": + d.name = v + elif k == "version": + d.version = v + elif k == "license": + d.license = v + elif k == "description": + d.summary = v + # authors ["foo "] + # repository + # homepage + # readme (filename) + + maturin = package.get("metadata", {}).get("maturin", {}) + for k, v in maturin.items(): + if k == "requires-python": + d.requires_python = v + elif k == "classifier": + d.classifiers = v + elif k == "requires-dist": + d.requires_dist = v + # Many others, see https://docs.rs/maturin/0.8.3/maturin/struct.Metadata21.html + # but these do not seem to be that popular. + + return d diff --git a/dowsing/pep517.py b/dowsing/pep517.py index 50fbd9b..57ee59a 100644 --- a/dowsing/pep517.py +++ b/dowsing/pep517.py @@ -2,7 +2,7 @@ import json import sys from pathlib import Path -from typing import Dict, List, Tuple, Type +from typing import Any, Dict, List, Tuple, Type import tomlkit @@ -11,7 +11,12 @@ KNOWN_BACKENDS: Dict[str, str] = { "setuptools.build_meta:__legacy__": "dowsing.setuptools:SetuptoolsReader", "setuptools.build_meta": "dowsing.setuptools:SetuptoolsReader", + "jupyter_packaging.build_api": "dowsing.setuptools:SetuptoolsReader", "flit_core.buildapi": "dowsing.flit:FlitReader", + "flit.buildapi": "dowsing.flit:FlitReader", + "maturin": "dowsing.maturin:MaturinReader", + "poetry.core.masonry.api": "dowsing.poetry:PoetryReader", + "poetry.masonry.api": "dowsing.poetry:PoetryReader", } @@ -22,13 +27,14 @@ def get_backend(path: Path) -> Tuple[List[str], BaseReader]: requires: List[str] = [] if pyproject.exists(): doc = tomlkit.parse(pyproject.read_text()) - if "build-system" in doc: - # 1b. include any build-system requires - if "requires" in doc["build-system"]: - requires.extend(doc["build-system"]["requires"]) - if "build-backend" in doc["build-system"]: - backend = doc["build-system"]["build-backend"] - # TODO backend-path + table = doc.get("build-system", {}) + + # 1b. include any build-system requires + if "requires" in table: + requires.extend(table["requires"]) + if "build-backend" in table: + backend = table["build-backend"] + # TODO backend-path try: backend_path = KNOWN_BACKENDS[backend] @@ -63,13 +69,21 @@ def get_metadata(path: Path) -> Distribution: return backend.get_metadata() +def _default(obj: Any) -> Any: + if obj.__class__.__name__ == "FindPackages": + return f"FindPackages({obj.where!r}, {obj.exclude!r}, {obj.include!r}" + raise TypeError(obj) + + def main(path: Path) -> None: + metadata = get_metadata(path) d = { "get_requires_for_build_sdist": get_requires_for_build_sdist(path), "get_requires_for_build_wheel": get_requires_for_build_wheel(path), - "get_metadata": get_metadata(path).asdict(), + "get_metadata": metadata.asdict(), + "source_mapping": metadata.source_mapping, } - print(json.dumps(d)) + print(json.dumps(d, default=_default)) if __name__ == "__main__": diff --git a/dowsing/pep621.py b/dowsing/pep621.py new file mode 100644 index 0000000..47697f8 --- /dev/null +++ b/dowsing/pep621.py @@ -0,0 +1,53 @@ +import tomlkit +from setuptools import find_packages + +from .types import BaseReader, Distribution + + +class Pep621Reader(BaseReader): + def get_pep621_metadata(self) -> Distribution: + pyproject = self.path / "pyproject.toml" + doc = tomlkit.parse(pyproject.read_text()) + + d = Distribution() + d.metadata_version = "2.1" + d.project_urls = [] + d.entry_points = {} + d.requires_dist = [] + d.packages = [] + d.packages_dict = {} + + assert isinstance(d.project_urls, list) + + table = doc.get("project", None) + if table: + for k, v in table.items(): + if k == "name": + if (self.path / f"{v}.py").exists(): + d.py_modules = [v] + else: + d.packages = find_packages( + self.path.as_posix(), include=(f"{v}.*") + ) + d.packages_dict = {i: i.replace(".", "/") for i in d.packages} + elif k == "license": + if isinstance(v, str): + pass # PEP 639 proposes `license = "MIT"` style metadata + elif "text" in v: + v = v["text"] + elif "file" in v: + v = f"file: {v['file']}" + else: + raise ValueError("no known license field values") + elif k == "dependencies": + k = "requires_dist" + elif k == "optional-dependencies": + pass + elif k == "urls": + d.project_urls.extend([f"{x}={y}" for x, y in v.items()]) + + k2 = k.replace("-", "_") + if k2 in d: + setattr(d, k2, v) + + return d diff --git a/dowsing/poetry.py b/dowsing/poetry.py new file mode 100644 index 0000000..f6dc977 --- /dev/null +++ b/dowsing/poetry.py @@ -0,0 +1,83 @@ +import posixpath +from pathlib import Path +from typing import Sequence + +import tomlkit +from setuptools import find_packages + +from .types import BaseReader, Distribution + +METADATA_MAPPING = { + "name": "name", + "version": "version", + "description": "summary", + "license": "license", # SPDX short name + # authors + # maintainers + # readme -> long desc? w/ content type rst/md + "keywords": "keywords", + "classifiers": "classifiers", +} + + +class PoetryReader(BaseReader): + def __init__(self, path: Path): + self.path = path + + def get_requires_for_build_sdist(self) -> Sequence[str]: + return () # TODO + + def get_requires_for_build_wheel(self) -> Sequence[str]: + return () # TODO + + def get_metadata(self) -> Distribution: + pyproject = self.path / "pyproject.toml" + doc = tomlkit.parse(pyproject.read_text()) + + d = Distribution() + d.metadata_version = "2.1" + d.project_urls = [] + d.entry_points = {} + d.requires_dist = [] + d.packages = [] + d.packages_dict = {} + + assert isinstance(d.project_urls, list) + + poetry = doc.get("tool", {}).get("poetry", {}) + for k, v in poetry.items(): + if k in ("homepage", "repository", "documentation"): + d.project_urls.append(f"{k}={v}") + elif k == "packages": + # TODO improve and add tests; this works for tf2_utils and + # poetry itself but include can be a glob and there are excludes + for x in v: + f = x.get("from", ".") + for p in find_packages((self.path / f).as_posix()): + if p == x["include"] or p.startswith(f"{x['include']}."): + d.packages_dict[p] = posixpath.normpath( + posixpath.join(f, p.replace(".", "/")) + ) + d.packages.append(p) + elif k in METADATA_MAPPING: + setattr(d, METADATA_MAPPING[k], v) + + if not d.packages: + for p in find_packages(self.path.as_posix()): + d.packages_dict[p] = p.replace(".", "/") + d.packages.append(p) + + for k, v in poetry.get("dependencies", {}).items(): + if k == "python": + pass # TODO translate to requires_python + else: + d.requires_dist.append(k) # TODO something with version + + for k, v in poetry.get("urls", {}).items(): + d.project_urls.append(f"{k}={v}") + + for k, v in poetry.get("scripts", {}).items(): + d.entry_points[k] = v + + d.source_mapping = d._source_mapping(self.path) + return d diff --git a/dowsing/py.typed b/dowsing/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/dowsing/setuptools/__init__.py b/dowsing/setuptools/__init__.py index f4954e1..ce8452e 100644 --- a/dowsing/setuptools/__init__.py +++ b/dowsing/setuptools/__init__.py @@ -1,9 +1,18 @@ +import posixpath from pathlib import Path -from typing import Sequence, Tuple +from typing import Generator, Mapping, Sequence, Tuple + +from setuptools import find_packages from ..types import BaseReader, Distribution from .setup_cfg_parsing import from_setup_cfg -from .setup_py_parsing import from_setup_py +from .setup_py_parsing import FindPackages, from_setup_py + + +def _prefixes(dotted_name: str) -> Generator[Tuple[str, str], None, None]: + parts = dotted_name.split(".") + for i in range(len(parts), -1, -1): + yield ".".join(parts[:i]), "/".join(parts[i:]) class SetuptoolsReader(BaseReader): @@ -34,6 +43,68 @@ def get_metadata(self) -> Distribution: if getattr(d2, k): setattr(d1, k, getattr(d2, k)) + # This is the bare minimum to get pbr projects to show as having any + # sources. I don't want to use pbr.util.cfg_to_args because it appears + # to import and run arbitrary code. + if d1.pbr or (d1.pbr__files__packages and not d1.packages): + where = "." + if d1.pbr__files__packages_root: + d1.package_dir = {"": d1.pbr__files__packages_root} + where = d1.pbr__files__packages_root + + if d1.pbr__files__packages: + d1.packages = d1.pbr__files__packages + else: + d1.packages = FindPackages(where, (), ("*",)) # type: ignore + + # package_dir can both add and remove components, see docs + # https://docs.python.org/2/distutils/setupscript.html#listing-whole-packages + package_dir: Mapping[str, str] = d1.package_dir + # If there was an error, we might have written "??" + if package_dir != "??": # type: ignore + if not package_dir: + package_dir = {"": "."} + + assert isinstance(package_dir, dict) + + def mangle(package: str) -> str: + for x, rest in _prefixes(package): + if x in package_dir: + return posixpath.normpath(posixpath.join(package_dir[x], rest)) + + # Some projects seem to set only a partial package_dir, but then + # use find_packages which wants to include some outside. + return package + + d1.packages_dict = {} # Break shared class-level dict + + # The following as_posix calls are necessary for Windows, but don't + # hurt elsewhere. + if isinstance(d1.packages, FindPackages): + # This encodes a lot of sketchy logic, and deserves more test cases, + # plus some around py_modules + for p in find_packages( + (self.path / d1.packages.where).as_posix(), + d1.packages.exclude, + d1.packages.include, + ): + d1.packages_dict[p] = mangle(p) + elif d1.packages == ["find:"]: + for p in find_packages( + (self.path / d1.find_packages_where).as_posix(), + d1.find_packages_exclude, + d1.find_packages_include, + ): + d1.packages_dict[p] = mangle(p) + elif d1.packages not in ("??", "????"): + assert isinstance( + d1.packages, (list, tuple) + ), f"{d1.packages!r} is not a list/tuple" + for p in d1.packages: + if p: + d1.packages_dict[p] = mangle(p) + + d1.source_mapping = d1._source_mapping(self.path) return d1 def _get_requires(self) -> Tuple[str, ...]: diff --git a/dowsing/setuptools/setup_and_metadata.py b/dowsing/setuptools/setup_and_metadata.py index 0d478d8..ff64c4c 100644 --- a/dowsing/setuptools/setup_and_metadata.py +++ b/dowsing/setuptools/setup_and_metadata.py @@ -33,17 +33,30 @@ # but doesn't really tell you what they do or what the metadata keys are or # what metadata version they correspond to. ConfigField("name", SetupCfg("metadata", "name"), Metadata("Name")), - ConfigField("version", SetupCfg("metadata", "version"), Metadata("Version")), + ConfigField( + "version", + SetupCfg("metadata", "version"), + Metadata("Version"), + sample_value="1.5.1", + ), ConfigField("author", SetupCfg("metadata", "author"), Metadata("Author")), ConfigField( - "author_email", SetupCfg("metadata", "author_email"), Metadata("Author-email"), + "author_email", + SetupCfg("metadata", "author_email"), + Metadata("Author-email"), + ), + ConfigField( + "license", + SetupCfg("metadata", "license"), + Metadata("License"), ), - ConfigField("license", SetupCfg("metadata", "license"), Metadata("License"),), # TODO licence (alternate spelling) # TODO license_file, license_files (setuptools-specific) ConfigField("url", SetupCfg("metadata", "url"), Metadata("Home-page")), ConfigField( - "description", SetupCfg("metadata", "description"), Metadata("Summary"), + "description", + SetupCfg("metadata", "description"), + Metadata("Summary"), ), ConfigField( "long_description", @@ -54,6 +67,7 @@ "keywords", SetupCfg("metadata", "keywords", writer_cls=ListCommaWriterCompat), Metadata("Keywords"), + sample_value=["abc", "def"], ), # but not repeated # platforms # fullname @@ -67,7 +81,11 @@ "classifiers", SetupCfg("metadata", "classifiers", writer_cls=ListSemiWriter), Metadata("Classifier", repeated=True), - sample_value=None, + sample_value=[ + "License :: OSI Approved :: MIT License", + "Intended Audience :: Developers", + ], + distribution_key="classifiers", ), # download_url # Metadata 1.1 @@ -84,7 +102,7 @@ # Metadata 1.2, not at all supported by distutils ConfigField( "python_requires", - SetupCfg("options", "python_requires"), + SetupCfg("options", "python_requires"), # also requires_python :/ Metadata("Requires-Python"), sample_value="<4.0", ), @@ -94,9 +112,9 @@ "project_urls", SetupCfg("metadata", "project_urls", writer_cls=DictWriter), Metadata("Project-URL"), - sample_value=None, # {"Bugtracker": "http://example.com"}, + sample_value={"Bugtracker": "http://example.com"}, + distribution_key="project_urls", ), - # requires_dist # provides_dist (rarely used) # obsoletes_dist (rarely used) # Metadata 2.1 @@ -113,27 +131,29 @@ ConfigField( "zip_safe", SetupCfg("options", "zip_safe", writer_cls=BoolWriter), - sample_value=None, + sample_value=True, ), ConfigField( "setup_requires", SetupCfg("options", "setup_requires", writer_cls=ListSemiWriter), - sample_value=None, + sample_value=["setuptools"], ), ConfigField( "install_requires", - SetupCfg("options", "install_requires", writer_cls=ListSemiWriter), - sample_value=None, + SetupCfg("options", "install_requires", writer_cls=ListCommaWriter), + Metadata("Requires-Dist", repeated=True), + sample_value=["a", "b ; python_version < '3'"], + distribution_key="requires_dist", ), ConfigField( "tests_require", SetupCfg("options", "tests_require", writer_cls=ListSemiWriter), - sample_value=None, + sample_value=["pytest"], ), ConfigField( "include_package_data", SetupCfg("options", "include_package_data", writer_cls=BoolWriter), - sample_value=None, # True, + sample_value=True, ), # ConfigField( @@ -155,7 +175,7 @@ ConfigField( "packages", SetupCfg("options", "packages", writer_cls=ListCommaWriter), - sample_value=None, + sample_value=["a"], ), ConfigField( "package_dir", @@ -184,8 +204,45 @@ SetupCfg("options.data_files", "UNUSED", writer_cls=SectionWriter), sample_value=None, ), + ConfigField( + "entry_points", + SetupCfg("options.entry_points", "UNUSED", writer_cls=SectionWriter), + sample_value=None, + ), # # Documented, but not in the table... - ConfigField("test_suite", SetupCfg("options", "test_suite"), sample_value=None,), - ConfigField("test_loader", SetupCfg("options", "test_loader"), sample_value=None,), + ConfigField("test_suite", SetupCfg("options", "test_suite")), + ConfigField("test_loader", SetupCfg("options", "test_loader")), + # + # FindPackages + ConfigField( + "find_packages_where", + SetupCfg("options.packages.find", "where"), + sample_value=None, + ), + ConfigField( + "find_packages_exclude", + SetupCfg("options.packages.find", "exclude", writer_cls=ListCommaWriter), + sample_value=None, + ), + ConfigField( + "find_packages_include", + SetupCfg("options.packages.find", "include", writer_cls=ListCommaWriter), + sample_value=None, + ), + ConfigField( + "pbr", + SetupCfg("--unused--", "--unused--"), + sample_value=None, + ), + ConfigField( + "pbr__files__packages_root", + SetupCfg("files", "packages_root"), + sample_value=None, + ), + ConfigField( + "pbr__files__packages", + SetupCfg("files", "packages", writer_cls=ListCommaWriter), + sample_value=None, + ), ] diff --git a/dowsing/setuptools/setup_cfg_parsing.py b/dowsing/setuptools/setup_cfg_parsing.py index af4d309..aa93448 100644 --- a/dowsing/setuptools/setup_cfg_parsing.py +++ b/dowsing/setuptools/setup_cfg_parsing.py @@ -5,6 +5,7 @@ from ..types import Distribution from .setup_and_metadata import SETUP_ARGS +from .types import SectionWriter def from_setup_cfg(path: Path, markers: Dict[str, Any]) -> Distribution: @@ -15,21 +16,29 @@ def from_setup_cfg(path: Path, markers: Dict[str, Any]) -> Distribution: d.metadata_version = "2.1" for field in SETUP_ARGS: - # Until there's a better representation... - if not field.metadata and field.keyword not in ("setup_requires",): + name = field.get_distribution_key() + if not hasattr(d, name): continue - try: - raw_data = cfg[field.cfg.section][field.cfg.key] - except KeyError: - continue cls = field.cfg.writer_cls - parsed = cls().from_ini(raw_data) + if cls is SectionWriter: + try: + raw_section_data = cfg[field.cfg.section] + except KeyError: + continue + # ConfigSection behaves like a Dict[str, str] so this is fine + parsed = SectionWriter().from_ini_section(raw_section_data) # type: ignore + else: + try: + # All fields are defined as underscore, but it appears + # setuptools normalizes so dashes are ok too. + key = field.cfg.key + if key not in cfg[field.cfg.section]: + key = key.replace("_", "-") + raw_data = cfg[field.cfg.section][key] + except KeyError: + continue + parsed = cls().from_ini(raw_data) - name = ( - (field.metadata.key if field.metadata else field.keyword) - .lower() - .replace("-", "_") - ) setattr(d, name, parsed) return d diff --git a/dowsing/setuptools/setup_py_parsing.py b/dowsing/setuptools/setup_py_parsing.py index d974759..8774e6d 100644 --- a/dowsing/setuptools/setup_py_parsing.py +++ b/dowsing/setuptools/setup_py_parsing.py @@ -8,7 +8,12 @@ from typing import Any, Dict, Optional import libcst as cst -from libcst.metadata import ParentNodeProvider, QualifiedNameProvider, ScopeProvider +from libcst.metadata import ( + ParentNodeProvider, + PositionProvider, + QualifiedNameProvider, + ScopeProvider, +) from ..types import Distribution from .setup_and_metadata import SETUP_ARGS @@ -48,15 +53,10 @@ def from_setup_py(path: Path, markers: Dict[str, Any]) -> Distribution: raise SyntaxError("No simple setup call found") for field in SETUP_ARGS: - # Until there's a better representation... - if not field.metadata and field.keyword not in ("setup_requires",): + name = field.get_distribution_key() + if not hasattr(d, name): continue - name = ( - (field.metadata.key if field.metadata else field.keyword) - .lower() - .replace("-", "_") - ) if field.keyword in analyzer.saved_args: v = analyzer.saved_args[field.keyword] if isinstance(v, Literal): @@ -84,13 +84,20 @@ class Literal: cst_node: Optional[cst.CSTNode] +@dataclass +class FindPackages: + where: Any = None + exclude: Any = None + include: Any = None + + class FileReference: def __init__(self, filename: str) -> None: self.filename = filename class SetupCallTransformer(cst.CSTTransformer): - METADATA_DEPENDENCIES = (ScopeProvider, ParentNodeProvider, QualifiedNameProvider) # type: ignore + METADATA_DEPENDENCIES = (ScopeProvider, ParentNodeProvider, QualifiedNameProvider) def __init__( self, @@ -122,7 +129,12 @@ def leave_Call( class SetupCallAnalyzer(cst.CSTVisitor): - METADATA_DEPENDENCIES = (ScopeProvider, ParentNodeProvider, QualifiedNameProvider) # type: ignore + METADATA_DEPENDENCIES = ( + ScopeProvider, + ParentNodeProvider, + QualifiedNameProvider, + PositionProvider, + ) # TODO names resulting from other than 'from setuptools import setup' # TODO wrapper funcs that modify args @@ -139,7 +151,13 @@ def visit_Call(self, node: cst.Call) -> Optional[bool]: # TODO sometimes there is more than one setup call, we might # prioritize/merge... if any( - q.name in ("setuptools.setup", "distutils.core.setup", "setup3lib") + q.name + in ( + "setuptools.setup", + "distutils.core.setup", + "setup3lib", + "skbuild.setup", + ) for q in names ): self.found_setup = True @@ -170,28 +188,50 @@ def visit_Call(self, node: cst.Call) -> Optional[bool]: BOOL_NAMES = {"True": True, "False": False, "None": None} PRETEND_ARGV = ["setup.py", "bdist_wheel"] - def evaluate_in_scope(self, item: cst.CSTNode, scope: Any) -> Any: + def evaluate_in_scope( + self, item: cst.CSTNode, scope: Any, target_line: int = 0 + ) -> Any: + qnames = self.get_metadata(QualifiedNameProvider, item) + if isinstance(item, cst.SimpleString): return item.evaluated_value - # TODO int/float/etc + elif isinstance(item, (cst.Integer, cst.Float)): + return int(item.value) elif isinstance(item, cst.Name) and item.value in self.BOOL_NAMES: return self.BOOL_NAMES[item.value] elif isinstance(item, cst.Name): name = item.value assignments = scope[name] - for a in assignments: - # TODO: Only assignments "before" this node matter if in the - # same scope; really if we had a call graph and walked the other - # way, we could have a better idea of what has already happened. + assignment_nodes = sorted( + ( + (self.get_metadata(PositionProvider, a.node).start.line, a.node) + for a in assignments + if a.node + ), + reverse=True, + ) + # Walk assignments from bottom to top, evaluating them recursively. + for lineno, node in assignment_nodes: + + # When recursing, only look at assignments above the "target line". + if target_line and lineno >= target_line: + continue # Assign( # targets=[AssignTarget(target=Name(value="v"))], # value=SimpleString(value="'x'"), # ) + # + # AugAssign( + # target=Name(value="v"), + # operator=AddAssign(...), + # value=SimpleString(value="'x'"), + # ) + # # TODO or an import... # TODO builtins have BuiltinAssignment + try: - node = a.node if node: parent = self.get_metadata(ParentNodeProvider, node) if parent: @@ -201,31 +241,62 @@ def evaluate_in_scope(self, item: cst.CSTNode, scope: Any) -> Any: else: raise KeyError except (KeyError, AttributeError): - return "??" - - # This presumes a single assignment - if not isinstance(gp, cst.Assign) or len(gp.targets) != 1: - return "??" # TooComplicated(repr(gp)) + continue try: scope = self.get_metadata(ScopeProvider, gp) except KeyError: # module scope isn't in the dict - return "??" + continue + + # This presumes a single assignment + if isinstance(gp, cst.Assign) and len(gp.targets) == 1: + result = self.evaluate_in_scope(gp.value, scope, lineno) + elif isinstance(parent, cst.AugAssign): + result = self.evaluate_in_scope(parent, scope, lineno) + else: + # too complicated? + continue + + # keep trying assignments until we get something other than ?? + if result != "??": + return result - return self.evaluate_in_scope(gp.value, scope) + # give up + return "??" elif isinstance(item, (cst.Tuple, cst.List)): lst = [] for el in item.elements: lst.append( self.evaluate_in_scope( - el.value, self.get_metadata(ScopeProvider, el) + el.value, + self.get_metadata(ScopeProvider, el), + target_line, ) ) if isinstance(item, cst.Tuple): return tuple(lst) else: return lst + elif isinstance(item, cst.Call) and any( + q.name == "setuptools.find_packages" for q in qnames + ): + default_args = [".", (), ("*",)] + args = default_args.copy() + + names = ("where", "exclude", "include") + i = 0 + for arg in item.args: + if isinstance(arg.keyword, cst.Name): + args[names.index(arg.keyword.value)] = self.evaluate_in_scope( + arg.value, scope, target_line + ) + else: + args[i] = self.evaluate_in_scope(arg.value, scope, target_line) + i += 1 + + # TODO clear ones that are still default + return FindPackages(*args) elif ( isinstance(item, cst.Call) and isinstance(item.func, cst.Name) @@ -234,7 +305,9 @@ def evaluate_in_scope(self, item: cst.CSTNode, scope: Any) -> Any: d = {} for arg in item.args: if isinstance(arg.keyword, cst.Name): - d[arg.keyword.value] = self.evaluate_in_scope(arg.value, scope) + d[arg.keyword.value] = self.evaluate_in_scope( + arg.value, scope, target_line + ) # TODO something with **kwargs return d elif isinstance(item, cst.Dict): @@ -242,22 +315,55 @@ def evaluate_in_scope(self, item: cst.CSTNode, scope: Any) -> Any: for el2 in item.elements: if isinstance(el2, cst.DictElement): d[self.evaluate_in_scope(el2.key, scope)] = self.evaluate_in_scope( - el2.value, scope + el2.value, scope, target_line ) return d elif isinstance(item, cst.Subscript): - lhs = self.evaluate_in_scope(item.value, scope) + lhs = self.evaluate_in_scope(item.value, scope, target_line) if isinstance(lhs, str): # A "??" entry, propagate return "??" # TODO: Figure out why this is Sequence if isinstance(item.slice[0].slice, cst.Index): - rhs = self.evaluate_in_scope(item.slice[0].slice.value, scope) - return lhs.get(rhs, "??") + rhs = self.evaluate_in_scope( + item.slice[0].slice.value, scope, target_line + ) + try: + if isinstance(lhs, dict): + return lhs.get(rhs, "??") + else: + return lhs[rhs] + except Exception: + return "??" + else: # LOG.warning(f"Omit2 {type(item.slice[0].slice)!r}") return "??" + elif isinstance(item, cst.BinaryOperation): + lhs = self.evaluate_in_scope(item.left, scope, target_line) + rhs = self.evaluate_in_scope(item.right, scope, target_line) + if lhs == "??" or rhs == "??": + return "??" + if isinstance(item.operator, cst.Add): + try: + return lhs + rhs + except Exception: + return "??" + else: + return "??" + elif isinstance(item, cst.AugAssign): + lhs = self.evaluate_in_scope(item.target, scope, target_line) + rhs = self.evaluate_in_scope(item.value, scope, target_line) + if lhs == "??" or rhs == "??": + return "??" + if isinstance(item.operator, cst.AddAssign): + try: + return lhs + rhs + except Exception: + return "??" + else: + return "??" else: # LOG.warning(f"Omit1 {type(item)!r}") return "??" diff --git a/dowsing/setuptools/types.py b/dowsing/setuptools/types.py index d53e394..4618ca0 100644 --- a/dowsing/setuptools/types.py +++ b/dowsing/setuptools/types.py @@ -24,11 +24,11 @@ class ListCommaWriter(BaseWriter): def to_ini(self, value: List[str]) -> str: if not value: return "" - return "".join(f"\n{k}" for k in value) + return "".join(f"\n {k}" for k in value) def from_ini(self, value: str) -> List[str]: # TODO, on all of these, handle other separators, \r, and stripping - return value.strip().split("\n") + return [line.strip() for line in value.strip().split("\n")] class ListCommaWriterCompat(BaseWriter): @@ -37,20 +37,20 @@ def to_ini(self, value: Union[str, List[str]]) -> str: return "" if isinstance(value, str): value = [value] - return "".join(f"\n{k}" for k in value) + return "".join(f"\n {k}" for k in value) def from_ini(self, value: str) -> List[str]: - return value.strip().split("\n") + return [line.strip() for line in value.strip().split("\n")] class ListSemiWriter(BaseWriter): def to_ini(self, value: List[str]) -> str: if not value: return "" - return "".join(f"\n{k}" for k in value) + return "".join(f"\n {k}" for k in value) def from_ini(self, value: str) -> List[str]: - return value.strip().split("\n") + return [line.strip() for line in value.strip().split("\n")] # This class is also specialcased @@ -60,6 +60,9 @@ def to_ini(self, value: List[str]) -> str: return "" return "".join(f"\n{k}" for k in value) + def from_ini_section(self, section: Dict[str, str]) -> Dict[str, List[str]]: + return {k: section[k].strip().split("\n") for k in section.keys()} + class BoolWriter(BaseWriter): def to_ini(self, value: bool) -> str: @@ -74,7 +77,7 @@ class DictWriter(BaseWriter): def to_ini(self, value: Dict[str, str]) -> str: if not value: return "" - return "".join(f"\n{k}={v}" for k, v in value.items()) + return "".join(f"\n {k}={v}" for k, v in value.items()) def from_ini(self, value: str) -> Dict[str, str]: d = {} @@ -128,3 +131,15 @@ class ConfigField: # Not all kwargs end up in metadata. We have a modified Distribution that # keeps them for now, but looking for something better (even if it's just # using ConfigField objects as events in a stream). + distribution_key: Optional[str] = None + + def get_distribution_key(self) -> str: + # Returns the member name of pkginfo.Distribution (or our subclasS) + if self.metadata is not None: + return ( + (self.distribution_key or self.metadata.key or self.keyword) + .replace("-", "_") + .lower() + ) + else: + return (self.distribution_key or self.keyword).replace("-", "_").lower() diff --git a/dowsing/tests/__init__.py b/dowsing/tests/__init__.py index d9b9540..80335e6 100644 --- a/dowsing/tests/__init__.py +++ b/dowsing/tests/__init__.py @@ -1,11 +1,21 @@ from .api import ApiTest from .flit import FlitReaderTest +from .maturin import MaturinReaderTest from .pep517 import Pep517Test +from .pep621 import Pep621ReaderTest +from .poetry import PoetryReaderTest from .setuptools import SetuptoolsReaderTest +from .setuptools_metadata import SetupArgsTest +from .setuptools_types import WriterTest __all__ = [ "ApiTest", "FlitReaderTest", + "MaturinReaderTest", "Pep517Test", + "Pep621ReaderTest", + "PoetryReaderTest", "SetuptoolsReaderTest", + "WriterTest", + "SetupArgsTest", ] diff --git a/dowsing/tests/flit.py b/dowsing/tests/flit.py index 1bc74b9..98a15b0 100644 --- a/dowsing/tests/flit.py +++ b/dowsing/tests/flit.py @@ -23,8 +23,8 @@ def test_simplest(self) -> None: # handle missing metadata appropriately. r = FlitReader(dp) - self.assertEqual((), r.get_requires_for_build_sdist()) - self.assertEqual((), r.get_requires_for_build_wheel()) + self.assertEqual([], r.get_requires_for_build_sdist()) + self.assertEqual([], r.get_requires_for_build_wheel()) md = r.get_metadata() self.assertEqual("Name", md.name) @@ -41,8 +41,15 @@ def test_normal(self) -> None: name = "Name" module = "foo" requires = ["abc", "def"] + +[tool.flit.metadata.urls] +Foo = "https://" """ ) + (dp / "foo").mkdir() + (dp / "foo" / "tests").mkdir() + (dp / "foo" / "__init__.py").write_text("") + (dp / "foo" / "tests" / "__init__.py").write_text("") r = FlitReader(dp) # Notably these do not include flit itself; that's handled by @@ -52,6 +59,54 @@ def test_normal(self) -> None: md = r.get_metadata() self.assertEqual("Name", md.name) self.assertEqual( - {"metadata_version": "2.1", "name": "Name", "requires": ["abc", "def"]}, + { + "metadata_version": "2.1", + "name": "Name", + "packages": ["foo", "foo.tests"], + "packages_dict": {"foo": "foo", "foo.tests": "foo/tests"}, + "requires_dist": ["abc", "def"], + "project_urls": ["Foo=https://"], + }, + md.asdict(), + ) + + def test_pep621(self) -> None: + with volatile.dir() as d: + dp = Path(d) + (dp / "pyproject.toml").write_text( + """\ +[build-system] +requires = ["flit_core >=2,<4"] +build-backend = "flit_core.buildapi" + +[project] +name = "foo" +dependencies = ["abc", "def"] + +[project.urls] +Foo = "https://" +""" + ) + (dp / "foo").mkdir() + (dp / "foo" / "tests").mkdir() + (dp / "foo" / "__init__.py").write_text("") + (dp / "foo" / "tests" / "__init__.py").write_text("") + + r = FlitReader(dp) + # Notably these do not include flit itself; that's handled by + # dowsing.pep517 + self.assertEqual(["abc", "def"], r.get_requires_for_build_sdist()) + self.assertEqual(["abc", "def"], r.get_requires_for_build_wheel()) + md = r.get_metadata() + self.assertEqual("foo", md.name) + self.assertEqual( + { + "metadata_version": "2.1", + "name": "foo", + "packages": ["foo", "foo.tests"], + "packages_dict": {"foo": "foo", "foo.tests": "foo/tests"}, + "requires_dist": ["abc", "def"], + "project_urls": ["Foo=https://"], + }, md.asdict(), ) diff --git a/dowsing/tests/maturin.py b/dowsing/tests/maturin.py new file mode 100644 index 0000000..38409af --- /dev/null +++ b/dowsing/tests/maturin.py @@ -0,0 +1,51 @@ +import unittest +from pathlib import Path + +import volatile + +from dowsing.maturin import MaturinReader + + +class MaturinReaderTest(unittest.TestCase): + def test_orjson(self) -> None: + # This is a simplified version of orjson 3.4.0 + with volatile.dir() as d: + dp = Path(d) + (dp / "pyproject.toml").write_text( + """\ +[project] +name = "orjson" +repository = "https://example.com/" + +[build-system] +build-backend = "maturin" +requires = ["maturin>=0.8.1,<0.9"] +""" + ) + + (dp / "Cargo.toml").write_text( + """\ +[package] +name = "orjson" +version = "3.4.0" +authors = ["foo "] +description = "Summary here" +license = "Apache-2.0 OR MIT" +repository = "https://example.com/repo" +homepage = "https://example.com/home" +readme = "README.md" +keywords = ["foo", "bar", "baz"] + +[package.metadata.maturin] +requires-python = ">=3.6" +classifer = [ + "License :: OSI Approved :: Apache Software License", + "License :: OSI Approved :: MIT License", +] +""" + ) + r = MaturinReader(dp) + md = r.get_metadata() + self.assertEqual("orjson", md.name) + self.assertEqual("3.4.0", md.version) + # TODO more tests diff --git a/dowsing/tests/pep621.py b/dowsing/tests/pep621.py new file mode 100644 index 0000000..408f3ae --- /dev/null +++ b/dowsing/tests/pep621.py @@ -0,0 +1,73 @@ +import unittest +from pathlib import Path + +import volatile + +from ..pep621 import Pep621Reader + + +class Pep621ReaderTest(unittest.TestCase): + def test_simplest(self) -> None: + with volatile.dir() as d: + dp = Path(d) + (dp / "pyproject.toml").write_text( + """\ +[project] +name = "Name" +""" + ) + + r = Pep621Reader(dp) + md = r.get_pep621_metadata() + self.assertEqual("Name", md.name) + + def test_normal(self) -> None: + with volatile.dir() as d: + dp = Path(d) + (dp / "pyproject.toml").write_text( + """\ +[project] +name = "foo" +dependencies = ["abc", "def"] +license = {text = "MIT"} + +[project.urls] +Foo = "https://" +""" + ) + (dp / "foo").mkdir() + (dp / "foo" / "tests").mkdir() + (dp / "foo" / "__init__.py").write_text("") + (dp / "foo" / "tests" / "__init__.py").write_text("") + + r = Pep621Reader(dp) + md = r.get_pep621_metadata() + self.assertEqual("foo", md.name) + self.assertEqual( + { + "metadata_version": "2.1", + "name": "foo", + "license": "MIT", + "packages": ["foo", "foo.tests"], + "packages_dict": {"foo": "foo", "foo.tests": "foo/tests"}, + "requires_dist": ["abc", "def"], + "project_urls": ["Foo=https://"], + }, + md.asdict(), + ) + + def test_pep639(self) -> None: + with volatile.dir() as d: + dp = Path(d) + (dp / "pyproject.toml").write_text( + """\ +[project] +name = "Name" +license = "MIT" +""" + ) + + r = Pep621Reader(dp) + md = r.get_pep621_metadata() + self.assertEqual("Name", md.name) + self.assertEqual("MIT", md.license) diff --git a/dowsing/tests/poetry.py b/dowsing/tests/poetry.py new file mode 100644 index 0000000..f8a1f9e --- /dev/null +++ b/dowsing/tests/poetry.py @@ -0,0 +1,51 @@ +import unittest +from pathlib import Path + +import volatile + +from dowsing.poetry import PoetryReader + + +class PoetryReaderTest(unittest.TestCase): + def test_basic(self) -> None: + with volatile.dir() as d: + dp = Path(d) + (dp / "pyproject.toml").write_text( + """\ +[build-system] +requires = ["poetry-core>=1.0.0a9"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "Name" +version = "1.5.2" +description = "Short Desc" +authors = ["Author "] +license = "BSD-3-Clause" +homepage = "http://example.com" +classifiers = [ + "Not a real classifier", +] + +[tool.poetry.dependencies] +python = "~2.7 || ^3.5" +functools32 = { version = "^3.2.3", python = "~2.7" } + +[tool.poetry.urls] +"Bug Tracker" = "https://github.com/python-poetry/poetry/issues" +""" + ) + r = PoetryReader(dp) + md = r.get_metadata() + self.assertEqual("Name", md.name) + self.assertEqual("1.5.2", md.version) + self.assertEqual("BSD-3-Clause", md.license) + self.assertEqual( + [ + "homepage=http://example.com", + "Bug Tracker=https://github.com/python-poetry/poetry/issues", + ], + md.project_urls, + ) + self.assertEqual(["Not a real classifier"], md.classifiers) + self.assertEqual(["functools32"], md.requires_dist) diff --git a/dowsing/tests/setuptools.py b/dowsing/tests/setuptools.py index 770a23d..88840a7 100644 --- a/dowsing/tests/setuptools.py +++ b/dowsing/tests/setuptools.py @@ -1,17 +1,12 @@ import unittest from pathlib import Path +from typing import Dict, Optional import volatile from dowsing.setuptools import SetuptoolsReader -from dowsing.setuptools.types import ( - BoolWriter, - DictWriter, - ListCommaWriter, - ListCommaWriterCompat, - ListSemiWriter, - StrWriter, -) +from dowsing.setuptools.setup_py_parsing import FindPackages +from dowsing.types import Distribution class SetuptoolsReaderTest(unittest.TestCase): @@ -34,6 +29,24 @@ def test_setup_cfg(self) -> None: ("setuptools", "wheel", "def"), r.get_requires_for_build_wheel() ) + def test_setup_cfg_dash_normalization(self) -> None: + # I can't find documentation for this, but e.g. auditwheel 3.2.0 uses + # dashes instead of underscores and it works. + with volatile.dir() as d: + dp = Path(d) + (dp / "setup.cfg").write_text( + """\ +[metadata] +name = foo +author = Foo +author-email = foo@example.com +""" + ) + + r = SetuptoolsReader(dp) + md = r.get_metadata() + self.assertEqual("foo@example.com", md.author_email) + def test_setup_py(self) -> None: with volatile.dir() as d: dp = Path(d) @@ -51,26 +64,356 @@ def test_setup_py(self) -> None: ("setuptools", "wheel", "def"), r.get_requires_for_build_wheel() ) - def test_writer_classes_roundtrip_str(self) -> None: - s = "abc" - inst = StrWriter() - self.assertEqual(s, inst.from_ini(inst.to_ini(s))) - - def test_writer_classes_roundtrip_lists(self) -> None: - lst = ["a", "bc"] - inst = ListSemiWriter() - self.assertEqual(lst, inst.from_ini(inst.to_ini(lst))) - inst2 = ListCommaWriter() - self.assertEqual(lst, inst2.from_ini(inst2.to_ini(lst))) - inst3 = ListCommaWriterCompat() - self.assertEqual(lst, inst3.from_ini(inst3.to_ini(lst))) - - def test_writer_classes_roundtrip_dict(self) -> None: - d = {"a": "bc", "d": "ef"} - inst = DictWriter() - self.assertEqual(d, inst.from_ini(inst.to_ini(d))) - - def test_writer_classes_roundtrip_bool(self) -> None: - for b in (True, False): - inst = BoolWriter() - self.assertEqual(b, inst.from_ini(inst.to_ini(b))) + def _read( + self, + data: str, + src_dir: str = ".", + extra_files: Optional[Dict[str, str]] = None, + ) -> Distribution: + with volatile.dir() as d: + sp = Path(d, "setup.py") + sp.write_text(data) + if extra_files: + for k, v in extra_files.items(): + Path(d, k).write_text(v) + Path(d, src_dir, "pkg").mkdir(parents=True) + Path(d, src_dir, "pkg", "__init__.py").touch() + Path(d, src_dir, "pkg", "sub").mkdir() + Path(d, src_dir, "pkg", "sub", "__init__.py").touch() + Path(d, src_dir, "pkg", "tests").mkdir() + Path(d, src_dir, "pkg", "tests", "__init__.py").touch() + return SetuptoolsReader(Path(d)).get_metadata() + + def test_smoke(self) -> None: + d = self._read( + """\ +from setuptools import setup +setup( + name="foo", + version="0.1", + classifiers=["CLASSIFIER"], + install_requires=["abc"], +) +""" + ) + self.assertEqual("foo", d.name) + self.assertEqual("0.1", d.version) + self.assertEqual(["CLASSIFIER"], d.classifiers) + self.assertEqual(["abc"], d.requires_dist) + + def test_packages_dict_literal(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + packages=["pkg", "pkg.tests"], +) +""" + ) + self.assertEqual(d.packages, ["pkg", "pkg.tests"]) + self.assertEqual(d.packages_dict, {"pkg": "pkg", "pkg.tests": "pkg/tests"}) + + def test_packages_find_packages_call(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + packages=find_packages(exclude=("pkg.sub",)), +) + """ + ) + self.assertEqual(d.packages, FindPackages(".", ("pkg.sub",), ("*",))) + self.assertEqual(d.packages_dict, {"pkg": "pkg", "pkg.tests": "pkg/tests"}) + + def test_packages_find_packages_call_package_dir(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + package_dir={'': '.'}, + packages=find_packages(exclude=("pkg.sub",)), +) + """ + ) + self.assertEqual(d.packages, FindPackages(".", ("pkg.sub",), ("*",))) + self.assertEqual(d.packages_dict, {"pkg": "pkg", "pkg.tests": "pkg/tests"}) + + def test_packages_find_packages_call_package_dir_src(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + package_dir={'': 'src'}, + packages=find_packages("src", exclude=("pkg.sub",)), +) + """, + "src", + ) + self.assertEqual(d.packages, FindPackages("src", ("pkg.sub",), ("*",))) + self.assertEqual( + d.packages_dict, {"pkg": "src/pkg", "pkg.tests": "src/pkg/tests"} + ) + + def test_packages_find_packages_call_package_dir2(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + package_dir={'pkg': 'pkg'}, + packages=find_packages(exclude=("pkg.sub",)), +) + """ + ) + self.assertEqual(d.packages, FindPackages(".", ("pkg.sub",), ("*",))) + self.assertEqual(d.packages_dict, {"pkg": "pkg", "pkg.tests": "pkg/tests"}) + self.assertEqual( + d.source_mapping, + { + "pkg/__init__.py": "pkg/__init__.py", + # TODO this line should not be here as it's excluded + "pkg/sub/__init__.py": "pkg/sub/__init__.py", + "pkg/tests/__init__.py": "pkg/tests/__init__.py", + }, + ) + + def test_packages_find_packages_call_package_dir3(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + package_dir={'': 'pkg'}, + packages=find_packages("pkg"), +) + """ + ) + self.assertEqual(d.packages, FindPackages("pkg", (), ("*",))) + self.assertEqual(d.packages_dict, {"sub": "pkg/sub", "tests": "pkg/tests"}) + self.assertEqual( + d.source_mapping, + { + "sub/__init__.py": "pkg/sub/__init__.py", + "tests/__init__.py": "pkg/tests/__init__.py", + }, + ) + + def test_packages_find_packages_include(self) -> None: + # This is weird behavior but documented. + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + packages=find_packages(include=("pkg",)), +) + """ + ) + self.assertEqual(d.packages, FindPackages(".", (), ("pkg",))) + self.assertEqual(d.packages_dict, {"pkg": "pkg"}) + # TODO strict interpretation should be this commented line + # self.assertEqual(d.source_mapping, {"pkg/__init__.py": "pkg/__init__.py"}) + self.assertEqual( + d.source_mapping, + { + "pkg/__init__.py": "pkg/__init__.py", + "pkg/sub/__init__.py": "pkg/sub/__init__.py", + "pkg/tests/__init__.py": "pkg/tests/__init__.py", + }, + ) + + def test_py_modules(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + py_modules=["a", "b"], +) + """ + ) + self.assertEqual(d.source_mapping, {"a.py": "a.py", "b.py": "b.py"}) + + def test_invalid_packages(self) -> None: + d = self._read( + """\ +from setuptools import setup, find_packages +setup( + packages = ["zzz"], +) + """ + ) + # TODO wish this were None + self.assertEqual(d.source_mapping, {}) + + def test_pbr_properly_enabled(self) -> None: + d = self._read( + """\ +from setuptools import setup + +setup( + setup_requires=['pbr>=1.9', 'setuptools>=17.1'], + pbr=True, +)""", + extra_files={ + "setup.cfg": """\ +[metadata] +name = pbr +author = OpenStack Foundation + +[files] +packages = + pkg +""" + }, + ) + self.assertEqual( + d.source_mapping, + { + "pkg/__init__.py": "pkg/__init__.py", + "pkg/sub/__init__.py": "pkg/sub/__init__.py", + "pkg/tests/__init__.py": "pkg/tests/__init__.py", + }, + ) + + def test_pbr_properly_enabled_src(self) -> None: + d = self._read( + """\ +from setuptools import setup + +setup( + setup_requires=['pbr>=1.9', 'setuptools>=17.1'], + pbr=True, +)""", + src_dir="src", + extra_files={ + "setup.cfg": """\ +[metadata] +name = pbr +author = OpenStack Foundation + +[files] +packages = + pkg +packages_root = src +""" + }, + ) + self.assertEqual( + d.source_mapping, + { + "pkg/__init__.py": "src/pkg/__init__.py", + "pkg/sub/__init__.py": "src/pkg/sub/__init__.py", + "pkg/tests/__init__.py": "src/pkg/tests/__init__.py", + }, + ) + + def test_pbr_improperly_enabled(self) -> None: + # pbr itself is something like this. + d = self._read( + """\ +from setuptools import setup + +setup()""", + extra_files={ + "setup.cfg": """\ +[metadata] +name = pbr +author = OpenStack Foundation + +[files] +packages = + pkg +""" + }, + ) + self.assertEqual( + d.source_mapping, + { + "pkg/__init__.py": "pkg/__init__.py", + "pkg/sub/__init__.py": "pkg/sub/__init__.py", + "pkg/tests/__init__.py": "pkg/tests/__init__.py", + }, + ) + + def test_add_items(self) -> None: + d = self._read( + """\ +from setuptools import setup +a = "aaaa" +p = ["a", "b", "c"] +setup(name = a + "1111", packages=[] + p, classifiers=a + p) + """ + ) + self.assertEqual(d.name, "aaaa1111") + self.assertEqual(d.packages, ["a", "b", "c"]) + self.assertEqual(d.classifiers, "??") + + def test_self_reference_assignments(self) -> None: + d = self._read( + """\ +from setuptools import setup + +version = "base" +name = "foo" +name += "bar" +version = version + ".suffix" + +classifiers = [ + "123", + "abc", +] + +if True: + classifiers = classifiers + ["xyz"] + +setup( + name=name, + version=version, + classifiers=classifiers, +) + """ + ) + self.assertEqual(d.name, "foobar") + self.assertEqual(d.version, "base.suffix") + self.assertSequenceEqual(d.classifiers, ["123", "abc", "xyz"]) + + def test_circular_references(self) -> None: + d = self._read( + """\ +from setuptools import setup + +name = "foo" + +foo = bar +bar = version +version = foo + +classifiers = classifiers + +setup( + name=name, + version=version, +) + """ + ) + self.assertEqual(d.name, "foo") + self.assertEqual(d.version, "??") + self.assertEqual(d.classifiers, ()) + + def test_redefines_builtin(self) -> None: + d = self._read( + """\ +import setuptools +with open("CREDITS.txt", "r", encoding="utf-8") as fp: + credits = fp.read() + +long_desc = "a" + credits + "b" +name = "foo" + +kwargs = dict( + long_description = long_desc, + name = name, +) + +setuptools.setup(**kwargs) +""" + ) + self.assertEqual(d.name, "foo") + self.assertEqual(d.description, "??") diff --git a/dowsing/tests/setuptools_metadata.py b/dowsing/tests/setuptools_metadata.py new file mode 100644 index 0000000..3e3c0ac --- /dev/null +++ b/dowsing/tests/setuptools_metadata.py @@ -0,0 +1,107 @@ +import email.parser +import io +import os +import sys +import tempfile +import unittest +from distutils.core import run_setup +from email.message import Message +from pathlib import Path +from typing import Dict, Tuple + +import setuptools # noqa: F401 patchers gotta patch + +from dowsing.setuptools import SetuptoolsReader +from dowsing.setuptools.setup_and_metadata import SETUP_ARGS +from dowsing.types import Distribution + + +def egg_info(files: Dict[str, str]) -> Tuple[Message, Distribution]: + # TODO consider + # https://docs.python.org/3/distutils/apiref.html#distutils.core.run_setup + # and whether that gives a Distribution that knows setuptools-only options + with tempfile.TemporaryDirectory() as d: + for relname, contents in files.items(): + Path(d, relname).parent.mkdir(exist_ok=True, parents=True) + Path(d, relname).write_text(contents) + + try: + cwd = os.getcwd() + stdout = sys.stdout + + os.chdir(d) + sys.stdout = io.StringIO() + dist = run_setup("setup.py", ["egg_info"]) + finally: + os.chdir(cwd) + sys.stdout = stdout + + sources = list(Path(d).rglob("PKG-INFO")) + assert len(sources) == 1, sources + + with open(sources[0]) as f: + parser = email.parser.Parser() + info = parser.parse(f) + reader = SetuptoolsReader(Path(d)) + dist = reader.get_metadata() # type: ignore[assignment] + return info, dist # type: ignore[return-value] + + +# These tests do not increase coverage, and just verify that we have the right +# static data. +class SetupArgsTest(unittest.TestCase): + def test_arg_mapping(self) -> None: + for field in SETUP_ARGS: + if field.sample_value is None: + continue + with self.subTest(field.keyword): + # Tests that the same arg from setup.py or setup.cfg makes it into + # metadata in the same way. + foo = field.sample_value + setup_py_info, setup_py_dist = egg_info( + { + "setup.py": "from setuptools import setup\n" + f"setup({field.keyword}={foo!r})\n", + "a/__init__.py": "", + } + ) + + cfg_format_foo = field.cfg.writer_cls().to_ini(foo) + setup_cfg_info, setup_cfg_dist = egg_info( + { + "setup.cfg": f"[{field.cfg.section}]\n" + f"{field.cfg.key} = {cfg_format_foo}\n", + "setup.py": "from setuptools import setup\n" "setup()\n", + "a/__init__.py": "", + } + ) + + name = field.get_distribution_key() + self.assertNotEqual( + getattr(setup_py_dist, name), + None, + ) + self.assertEqual( + foo, + getattr(setup_py_dist, name), + ) + self.assertEqual( + foo, + getattr(setup_cfg_dist, name), + ) + + if field.metadata: + a = setup_py_info.get_all(field.metadata.key) + b = setup_cfg_info.get_all(field.metadata.key) + + # setuptools>=57 writes long_description to the body/payload + # of PKG-INFO, and skips the description field entirely. + if field.keyword == "long_description" and a is None: + a = setup_py_info.get_payload() # type: ignore[assignment] + b = setup_cfg_info.get_payload() # type: ignore[assignment] + + # install_requires gets written out to *.egg-info/requires.txt + # instead + if field.keyword != "install_requires": + self.assertEqual(a, b) + self.assertNotEqual(a, None) diff --git a/dowsing/tests/setuptools_types.py b/dowsing/tests/setuptools_types.py new file mode 100644 index 0000000..dc51813 --- /dev/null +++ b/dowsing/tests/setuptools_types.py @@ -0,0 +1,173 @@ +import unittest +from configparser import RawConfigParser +from io import StringIO +from typing import Dict, List, Union + +from imperfect import ConfigFile +from parameterized import parameterized + +from dowsing.setuptools.types import ( + BoolWriter, + DictWriter, + ListCommaWriter, + ListCommaWriterCompat, + ListSemiWriter, + SectionWriter, + StrWriter, +) + + +class WriterTest(unittest.TestCase): + @parameterized.expand( # type: ignore + [ + (False,), + (True,), + ] + ) + def test_bool_writer(self, arg: bool) -> None: + c = ConfigFile() + c.set_value("a", "b", BoolWriter().to_ini(arg)) + buf = StringIO() + c.build(buf) + + rcp = RawConfigParser(strict=False) + rcp.read_string(buf.getvalue()) + self.assertEqual(str(arg).lower(), rcp["a"]["b"]) + + @parameterized.expand( # type: ignore + [ + ("hello",), + ("a\nb\nc",), + ] + ) + def test_str_writer(self, arg: str) -> None: + c = ConfigFile() + c.set_value("a", "b", StrWriter().to_ini(arg)) + buf = StringIO() + c.build(buf) + + rcp = RawConfigParser(strict=False) + rcp.read_string(buf.getvalue()) + self.assertEqual(arg, rcp["a"]["b"]) + + @parameterized.expand( # type: ignore + [ + ([], ""), + (["a"], "\na"), + (["a", "b"], "\na\nb"), + (["a", "b", "c"], "\na\nb\nc"), + ] + ) + def test_list_comma_writer(self, arg: List[str], expected: str) -> None: + c = ConfigFile() + c.set_value("a", "b", ListCommaWriter().to_ini(arg)) + buf = StringIO() + c.build(buf) + + rcp = RawConfigParser(strict=False) + rcp.read_string(buf.getvalue()) + self.assertEqual(expected, rcp["a"]["b"]) + + @parameterized.expand( # type: ignore + [ + ([], ""), + (["a"], "\na"), + (["a", "b"], "\na\nb"), + (["a", "b", "c"], "\na\nb\nc"), + ] + ) + def test_list_semi_writer(self, arg: List[str], expected: str) -> None: + c = ConfigFile() + c.set_value("a", "b", ListSemiWriter().to_ini(arg)) + buf = StringIO() + c.build(buf) + + rcp = RawConfigParser(strict=False) + rcp.read_string(buf.getvalue()) + self.assertEqual(expected, rcp["a"]["b"]) + + @parameterized.expand( # type: ignore + # fmt: off + [ + ({}, ""), + ({"x": "y"}, "\nx=y"), + ({"x": "y", "z": "zz"}, "\nx=y\nz=zz"), + ] + # fmt: on + ) + def test_dict_writer(self, arg: Dict[str, str], expected: str) -> None: + c = ConfigFile() + c.set_value("a", "b", DictWriter().to_ini(arg)) + buf = StringIO() + c.build(buf) + + rcp = RawConfigParser(strict=False) + rcp.read_string(buf.getvalue()) + # I would prefer this be dangling lines + self.assertEqual(expected, rcp["a"]["b"]) + + @parameterized.expand( # type: ignore + # fmt: off + [ + ([], ""), + ("abc", "\nabc"), + (["a"], "\na"), + (["a", "b"], "\na\nb"), + (["a", "b", "c"], "\na\nb\nc"), + ] + # fmt: on + ) + def test_list_comma_writer_compat( + self, arg: Union[str, List[str]], expected: str + ) -> None: + c = ConfigFile() + c.set_value("a", "b", ListCommaWriterCompat().to_ini(arg)) + buf = StringIO() + c.build(buf) + + rcp = RawConfigParser(strict=False) + rcp.read_string(buf.getvalue()) + # I would prefer this be dangling lines + self.assertEqual(expected, rcp["a"]["b"]) + + @parameterized.expand( # type: ignore + [ + ([], ""), + (["a"], "\na"), + (["a", "b"], "\na\nb"), + (["a", "b", "c"], "\na\nb\nc"), + ] + ) + def test_section_writer(self, arg: List[str], expected: str) -> None: + c = ConfigFile() + c.set_value("a", "b", SectionWriter().to_ini(arg)) + buf = StringIO() + c.build(buf) + + rcp = RawConfigParser(strict=False) + rcp.read_string(buf.getvalue()) + self.assertEqual(expected, rcp["a"]["b"]) + + def test_roundtrip_str(self) -> None: + s = "abc" + inst = StrWriter() + self.assertEqual(s, inst.from_ini(inst.to_ini(s))) + + def test_roundtrip_lists(self) -> None: + lst = ["a", "bc"] + inst = ListSemiWriter() + self.assertEqual(lst, inst.from_ini(inst.to_ini(lst))) + inst2 = ListCommaWriter() + self.assertEqual(lst, inst2.from_ini(inst2.to_ini(lst))) + inst3 = ListCommaWriterCompat() + self.assertEqual(lst, inst3.from_ini(inst3.to_ini(lst))) + + def test_roundtrip_dict(self) -> None: + d = {"a": "bc", "d": "ef"} + inst = DictWriter() + self.assertEqual(d, inst.from_ini(inst.to_ini(d))) + + def test_roundtrip_bool(self) -> None: + for b in (True, False): + inst = BoolWriter() + self.assertEqual(b, inst.from_ini(inst.to_ini(b))) diff --git a/dowsing/types.py b/dowsing/types.py index bfbfae1..7c93096 100644 --- a/dowsing/types.py +++ b/dowsing/types.py @@ -1,5 +1,6 @@ from pathlib import Path -from typing import Any, Dict, Optional, Sequence, Tuple +from types import MappingProxyType +from typing import Any, Dict, Mapping, Optional, Sequence, Set, Tuple import pkginfo.distribution @@ -34,31 +35,58 @@ def get_metadata(self) -> "Distribution": raise NotImplementedError -# TODO: pkginfo isn't typed, and is doing to require a yak-shave to send a PR -# since it's on launchpad. -class Distribution(pkginfo.distribution.Distribution): # type: ignore +DEFAULT_EMPTY_DICT: Mapping[str, Any] = MappingProxyType({}) + + +class Distribution(pkginfo.distribution.Distribution): # These are not actually part of the metadata, see PEP 566 setup_requires: Sequence[str] = () tests_require: Sequence[str] = () - extras_require: Dict[str, Sequence[str]] = {} + extras_require: Mapping[str, Sequence[str]] = DEFAULT_EMPTY_DICT use_scm_version: Optional[bool] = None zip_safe: Optional[bool] = None include_package_data: Optional[bool] = None test_suite: str = "" + test_loader: str = "" namespace_packages: Sequence[str] = () + package_data: Mapping[str, Sequence[str]] = DEFAULT_EMPTY_DICT + packages: Sequence[str] = () + package_dir: Mapping[str, str] = DEFAULT_EMPTY_DICT + packages_dict: Mapping[str, str] = DEFAULT_EMPTY_DICT + py_modules: Sequence[str] = () + entry_points: Mapping[str, Sequence[str]] = DEFAULT_EMPTY_DICT + find_packages_where: str = "." + find_packages_exclude: Sequence[str] = () + find_packages_include: Sequence[str] = ("*",) + source_mapping: Optional[Mapping[str, str]] = None + pbr: Optional[bool] = None + pbr__files__packages_root: Optional[str] = None + pbr__files__packages: Optional[str] = None + provides_extra: Optional[Sequence[str]] = () def _getHeaderAttrs(self) -> Sequence[Tuple[str, str, bool]]: # Until I invent a metadata version to include this, do so # unconditionally. - return tuple(super()._getHeaderAttrs()) + ( - ("Setup-Requires", "setup_requires", True), - ("Tests-Require", "tests_require", True), + # Stubs are wrong, this does too exist. + return tuple(super()._getHeaderAttrs()) + ( # type: ignore[misc] + ("X-Setup-Requires", "setup_requires", True), + ("X-Tests-Require", "tests_require", True), ("???", "extras_require", False), - ("Use-SCM-Version", "use_scm_version", False), - ("Zip-Safe", "zip_safe", False), - ("Test-Suite", "test_suite", False), - ("Include-Package-Data", "include_package_data", False), - ("Namespace-Package", "namespace_packages", True), + ("X-Use-SCM-Version", "use_scm_version", False), + ("x-Zip-Safe", "zip_safe", False), + ("X-Test-Suite", "test_suite", False), + ("X-Test-Loader", "test_loader", False), + ("X-Include-Package-Data", "include_package_data", False), + ("X-Namespace-Package", "namespace_packages", True), + ("X-Package-Data", "package_data", False), + ("X-Packages", "packages", True), + ("X-Package-Dir", "package_dir", False), + ("X-Packages-Dict", "packages_dict", False), + ("X-Py-Modules", "py_modules", True), + ("X-Entry-Points", "entry_points", False), + ("X-Pbr", "pbr", False), + ("X-pbr__files__packages_root", "pbr__files__packages_root", False), + ("X-pbr__files__packages", "pbr__files__packages", True), ) def asdict(self) -> Dict[str, Any]: @@ -67,3 +95,54 @@ def asdict(self) -> Dict[str, Any]: if getattr(self, x): d[x] = getattr(self, x) return d + + def _source_mapping(self, root: Path) -> Optional[Dict[str, str]]: + """ + Returns install path -> src path + + If an exception like FileNotFound is encountered, returns None. + """ + d: Dict[str, str] = {} + + for m in self.py_modules: + if m == "?": + return None + m = m.replace(".", "/") + d[f"{m}.py"] = f"{m}.py" + + try: + # This commented block is approximately correct for setuptools, but + # does not understand package_data. + # # k = foo.bar, v = src/foo/bar + # for k, v in self.packages_dict.items(): + # kp = k.replace(".", "/") + # for item in (root / v).iterdir(): + # if item.is_file(): + # d[f"{kp}/{item.name}"] = f"{v}/{item.name}" + + # Instead, this behavior is more like flit/poetry by including all + # files under package dirs, in a way that's mostly compatible with + # setuptools setting package_dir dicts. This tends to include + # in-package tests, which is a behavior I like, but I'm sure some + # people won't. + + seen_paths: Set[Path] = set() + + # Longest source path first, will "own" the item + for k, v in sorted( + self.packages_dict.items(), key=lambda x: len(x[1]), reverse=True + ): + kp = k.replace(".", "/") + vp = root / v + for item in vp.rglob("*"): + if item in seen_paths: + continue + seen_paths.add(item) + if item.is_file(): + rel = item.relative_to(vp) + d[(kp / rel).as_posix()] = (v / rel).as_posix() + + except IOError: + return None + + return d diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 0479c95..0000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,9 +0,0 @@ -black==19.10b0 -coverage==4.5.4 -flake8==3.7.9 -isort==4.3.21 -mypy==0.750 -tox==3.14.1 -twine==3.1.1 -volatile==2.1.0 -wheel==0.33.6 diff --git a/requirements.txt b/requirements.txt index 757f549..7afca69 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ -highlighter==0.1.0 +highlighter==0.2.0 imperfect==0.3.0 -LibCST==0.3.12 -tomlkit==0.7.0 +LibCST==1.5.1 +tomlkit==0.13.2 +pkginfo==1.11.2 diff --git a/setup.cfg b/setup.cfg index 18b446a..db662a1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,16 +9,37 @@ author = Tim Hatch author_email = tim@timhatch.com [options] -packages = dowsing +packages = + dowsing + dowsing.setuptools + dowsing.tests setup_requires = setuptools_scm setuptools >= 38.3.0 -python_requires = >=3.6 +python_requires = >=3.7 install_requires = - highlighter - imperfect - LibCST>=0.3.1 + highlighter>=0.1.1 + imperfect>=0.1.0 + LibCST>=0.3.7 tomlkit>=0.2.0 + pkginfo>=1.4.2 + setuptools >= 38.3.0 + +[options.extras_require] +dev = + black==24.10.0 + click==8.1.7 + flake8==7.1.1 + mypy==1.13.0 + tox==4.23.2 + twine==5.1.1 + ufmt==2.8.0 + usort==1.0.8.post1 + wheel==0.45.1 + honesty==0.3.0b1 +test = + coverage >= 6 + volatile==2.1.0 [check] metadata = true @@ -44,18 +65,24 @@ use_parentheses = True [mypy] ignore_missing_imports = True +python_version = 3.8 +strict = True [tox:tox] -envlist = py36, py37, py38 +envlist = py{38,39,310,311,312,313}-tests [testenv] -deps = -rrequirements-dev.txt -whitelist_externals = make +deps = .[test] +allowlist_externals = make commands = make test setenv = - py{36,37,38}: COVERAGE_FILE={envdir}/.coverage + tests: COVERAGE_FILE={envdir}/.coverage [flake8] ignore = E203, E231, E266, E302, E501, W503 max-line-length = 88 + +[options.package_data] +dowsing = + py.typed