diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..b697e71 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,16 @@ +[run] +branch = True +source_pkgs=rez_pip + +[paths] +; Map files to src/ so that codecov is happy and can find the right paths. +source = + src + .nox/**/site-packages + +[report] +exclude_also = + def __dir__ + if TYPE_CHECKING: + if typing\.TYPE_CHECKING: + \.\.\. diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..c271d45 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +# Set the default behavior, in case people don't have core.autocrlf set. +* text=auto + +# Denote all files that are truly binary and should not be modified. +*.patch binary diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8c8a52f..64af952 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -3,6 +3,7 @@ name: test on: push: + branches: [main] paths-ignore: - .github/workflows/update_pip.yaml - 'docs/**' @@ -26,10 +27,10 @@ jobs: strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python: ['3.8', '3.9', '3.10', '3.11'] # Ping macos to 13 so that we get intel CPUs. # TODO: Make our tests support arm64. - os: ['ubuntu-latest', 'windows-latest', 'macos-13'] + os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] fail-fast: false steps: @@ -42,9 +43,10 @@ jobs: run: pipx run nox --error-on-missing-interpreter -s test-${{ matrix.python }} - name: Codecov upload - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: files: 'coverage.xml' + disable_search: true env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore index 7a75dcf..c58dcc9 100644 --- a/.gitignore +++ b/.gitignore @@ -57,3 +57,4 @@ tests/data/rez_repo/ tests/data/_tmp_download/ docs/bin/ .idea/ +/patches diff --git a/docs/requirements.txt b/docs/requirements.txt index 4ba4268..316872a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -sphinx ~= 7.3 +sphinx ~= 8.1 furo sphinx-autobuild sphinx-inline-tabs diff --git a/docs/source/api.rst b/docs/source/api.rst new file mode 100644 index 0000000..8932f57 --- /dev/null +++ b/docs/source/api.rst @@ -0,0 +1,27 @@ +=== +API +=== + +.. warning:: The API is only meant to be used by plugins authors. + +.. autoclass:: rez_pip.pip.T + +.. autoclass:: rez_pip.pip.DownloadInfo + :members: + +.. autoclass:: rez_pip.pip.Metadata + :members: + :undoc-members: + +.. autoclass:: rez_pip.pip.PackageInfo + :members: + +.. autoclass:: rez_pip.pip.PackageGroup + :members: + :show-inheritance: + +.. autoclass:: rez_pip.pip.DownloadedArtifact + :members: + +.. autoclass:: rez_pip.plugins.CleanupAction + :members: diff --git a/docs/source/conf.py b/docs/source/conf.py index 5d33d50..7fb90cc 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -4,14 +4,20 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html import re +import inspect import argparse +import importlib import docutils.nodes import sphinx.transforms +import sphinx.util.nodes import sphinx.application +import sphinx.ext.autodoc import sphinx.util.docutils +import docutils.statemachine import rez_pip.cli +import rez_pip.plugins # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information @@ -25,6 +31,7 @@ extensions = [ # first-party extensions + "sphinx.ext.todo", "sphinx.ext.autodoc", "sphinx.ext.extlinks", "sphinx.ext.intersphinx", @@ -55,7 +62,7 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-the-linkcheck-builder linkcheck_allowed_redirects = { - r"https://github.com/JeanChristopheMorinPerso/rez-pip/issues/\d+": "https://github.com/JeanChristopheMorinPerso/rez-pip/pull/\d+" + r"https://github.com/JeanChristopheMorinPerso/rez-pip/issues/\d+": r"https://github.com/JeanChristopheMorinPerso/rez-pip/pull/\d+" } @@ -63,13 +70,26 @@ # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), "rez": ("https://rez.readthedocs.io/en/stable/", None), } # Force usage of :external: -intersphinx_disabled_reftypes = ["*"] +# intersphinx_disabled_reftypes = ["*"] +# -- Options for sphinx.ext.autodoc ------------------------------------------ +# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html + +# autodoc_typehints = "description" +autodoc_typehints_format = "short" +autodoc_member_order = "bysource" + +# -- Options for sphinx.ext.todo -------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/todo.html + +todo_include_todos = True + # -- Custom ------------------------------------------------------------------ # Custom stuff @@ -194,7 +214,7 @@ def run(self) -> list[docutils.nodes.Node]: # Add links to rez docs for known settings. help_str = re.sub( - "(.* \(default: configured )([a-zA-Z_]+)(.*)$", + r"(.* \(default: configured )([a-zA-Z_]+)(.*)$", r"\g<1> :external:data:`\g<2>`\g<3>", help_str, ) @@ -235,6 +255,158 @@ def run(self) -> list[docutils.nodes.Node]: return node.children +class RezAutoPlugins(sphinx.util.docutils.SphinxDirective): + """ + Special rez-pip-autoplugins directive. This is quite similar to "autosummary" in some ways. + """ + + required_arguments = 0 + optional_arguments = 0 + + def run(self) -> list[docutils.nodes.Node]: + # Create the node. + node = docutils.nodes.section() + node.document = self.state.document + + rst = docutils.statemachine.ViewList() + + # Add rezconfig as a dependency to the current document. The document + # will be rebuilt if rezconfig changes. + self.env.note_dependency(rez_pip.plugins.__file__) + self.env.note_dependency(__file__) + + path, lineNumber = self.get_source_info() + + document = [] + for plugin, hooks in rez_pip.plugins._getHookImplementations().items(): + hooks = [f":func:`{hook}`" for hook in hooks] + document.append(f"* {plugin.split('.')[-1]}: {', '.join(hooks)}") + + document = "\n".join(document) + + # Add each line to the view list. + for index, line in enumerate(document.split("\n")): + # Note to future people that will look at this. + # "line" has to be a single line! It can't be a line like "this\nthat". + rst.append(line, path, lineNumber + index) + + # Finally, convert the rst into the appropriate docutils/sphinx nodes. + sphinx.util.nodes.nested_parse_with_titles(self.state, rst, node) + + # Return the generated nodes. + return node.children + + +class RezPipAutoPluginHooks(sphinx.util.docutils.SphinxDirective): + """ + Special rez-pip-autopluginhooks directive. This is quite similar to "autosummary" in some ways. + """ + + required_arguments = 1 + optional_arguments = 0 + + def run(self) -> list[docutils.nodes.Node]: + # Create the node. + node = docutils.nodes.section() + node.document = self.state.document + + rst = docutils.statemachine.ViewList() + + # Add rezconfig as a dependency to the current document. The document + # will be rebuilt if rezconfig changes. + self.env.note_dependency(rez_pip.plugins.__file__) + self.env.note_dependency(__file__) + + path, lineNumber = self.get_source_info() + + fullyQualifiedClassName = self.arguments[0] + module, klassname = fullyQualifiedClassName.rsplit(".", 1) + + mod = importlib.import_module(module) + klass = getattr(mod, klassname) + + methods = [ + method + for method in inspect.getmembers(klass, predicate=inspect.isfunction) + if not method[0].startswith("_") + ] + + document = [] + for method in sorted(methods, key=lambda x: x[1].__code__.co_firstlineno): + document.append(f".. autohook:: {module}.{klassname}.{method[0]}") + + document = "\n".join(document) + + # Add each line to the view list. + for index, line in enumerate(document.split("\n")): + # Note to future people that will look at this. + # "line" has to be a single line! It can't be a line like "this\nthat". + rst.append(line, path, lineNumber + index) + + # Finally, convert the rst into the appropriate docutils/sphinx nodes. + sphinx.util.nodes.nested_parse_with_titles(self.state, rst, node) + + # Return the generated nodes. + return node.children + + +def autodoc_process_signature( + app: sphinx.application.Sphinx, + what: str, + name: str, + obj, + options: dict, + signature: str, + return_annotation, +): + signature = signature.replace( + "rez_pip.compat.importlib_metadata", "~importlib.metadata" + ) + + return signature, return_annotation + + +class HookDocumenter(sphinx.ext.autodoc.FunctionDocumenter): + """ + Custom autohook directive to document our hooks. + It allows us to easily document the hooks from the rez_pip.plugins.PluginSpec + class without exposing the class and module name. + """ + + objtype = "hook" # auto + hook + directivetype = "function" # generated reST directive + + def format_signature(self, **kwargs) -> str: + """ + Format the signature and remove self. We really don't want to expose + the class and module name or the fact that we are documenting methods. + """ + sig = super().format_signature(**kwargs) + sig = re.sub(r"\(self(,\s)?", "(", sig) + + # Also force short names for our own types + sig = sig.replace("rez_pip.", "~rez_pip.") + + return sig + + def add_directive_header(self, sig): + modname = self.modname + # Hacky, but it does the job. This should remove the module name from the directive + # created by autodoc. + self.modname = "" + + data = super().add_directive_header(sig) + + # We need to restore it because autodoc does lots of things with the module name. + self.modname = modname + return data + + def setup(app: sphinx.application.Sphinx): app.add_directive("rez-autoargparse", RezAutoArgparseDirective) + app.add_directive("rez-pip-autoplugins", RezAutoPlugins) + app.add_directive("rez-pip-autopluginhooks", RezPipAutoPluginHooks) app.add_transform(ReplaceGHRefs) + + app.connect("autodoc-process-signature", autodoc_process_signature) + app.add_autodocumenter(HookDocumenter) diff --git a/docs/source/faq.rst b/docs/source/faq.rst index fc26232..756dac4 100644 --- a/docs/source/faq.rst +++ b/docs/source/faq.rst @@ -4,8 +4,31 @@ FAQ List of commonly asked questions. -Why does the rez package created by rez-pip creates a variant per platform? -=========================================================================== +Which packages does it support? +=============================== + +It technically supports all packages available on PyPI that are distributed as wheels. +Packages that only provide an sdist are not supported. + +We say "technically" because there are some exceptions. Some packages on PyPI rely +on DSOs (shared libraries, i.e. ``.so``/``.DLL``/``.dylib`` files) that are not available on +all platforms. This is normal and is supported for most packages. However, there are some +packages that rely on methods like adding paths using :func:`os.add_dll_directory` or +hardcoded paths. + +Some others rely on `path configuration (.pth) files `_. + +When a package relies on these methods, rez-pip will successfully install it, but +the package might not function correctly (either partly or entirely). + +The :doc:`plugin system ` was created to handled these cases. You can use plugins +to modify the package metatada, patch source files, add/remove files, etc. + +``rez-pip`` comes with some :ref:`built-in plugins ` for packages that are popular +in our communities and are known to be "broken" when installed with ``rez-pip``. + +Why does the rez package created by rez-pip create a variant per platform? +========================================================================== Sometimes rez-pip creates rez packages that have variants for the platform and arch on which they were installed, and sometimes it even creates variants for Python versions. Bellow are the scenarios diff --git a/docs/source/index.rst b/docs/source/index.rst index 3cd4830..6ace50e 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -22,6 +22,7 @@ Features create per python version variants when installing a package that has console scripts. * Better output logs. * Implemented as an out-of-tree plugin, which means faster development cycle and more frequent releases. +* :doc:`Plugin system ` that allows for easy extensibility (experimental). * Maintained by the rez maintainers. Prerequisites @@ -69,5 +70,7 @@ automatically created by the `install.py `_ framework, +and as such, plugins must be registered using `entry points `_. + +The entry point group is named ``rez-pip``. + +In a ``pyproject.toml`` file, it can be set like this: + +.. code-block:: toml + :caption: pyproject.toml + + [project.entry-points."rez-pip"] + my_plugin = "my_plugin_module" + + +Functions +========= + +.. Not Using autodoc here because the decorator has a complex + signature to help type hinters. That signature is not needed + for the end user. +.. py:decorator:: rez_pip.plugins.hookimpl + + Decorator used to register a plugin hook. + +Hooks +===== + +The list of available hooks is provided below. They are listed in the order they +are called by rez-pip. + +.. rez-pip-autopluginhooks:: rez_pip.plugins.PluginSpec + + +Built-in plugins +================ + +rez-pip comes with some built-in plugins that are enabled by default. They exists mostly +to fix packages that are known to be "broken" if we don't fix them using plugins. + +This lists the plugin names and the hooks they implement. + +.. rez-pip-autoplugins:: diff --git a/docs/source/user_guide.rst b/docs/source/user_guide.rst index ea08de3..df1b122 100644 --- a/docs/source/user_guide.rst +++ b/docs/source/user_guide.rst @@ -59,3 +59,86 @@ Since pip is used under the hood, pip can be configured as usual. See the `pip c for more information on the subject. Alternatively, you can also :ref:`pass custom command line arguments to pip `. .. _pip configuration documentation: https://pip.pypa.io/en/stable/topics/configuration/ + +Writing a plugin +================ + +As documented in :ref:`plugins:register a plugin`, plugins must be registered using entry points. +This also means that your plugin will have to be packaged using standard Python packaging tools. + +.. note:: Even if you package it using standard Python packaging tools, you won't need + to distribute it to PyPI. + +Our plugin will have the following file structure: + +.. code-block:: text + + my_plugin/ + ├── pyproject.toml + └── src/ + └── my_plugin/ + └── __init__.py + +In ``pyproject.toml``, we will define our package and plugin entry point: + +.. code-block:: toml + :caption: pyproject.toml + + [project] + name = "my_plugin" + version = "0.1.0" + + [project.entry-points."rez-pip"] + my_plugin = "my_plugin" + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + +This is the absolute minimum required to create a plugin. For more details +on how to package a python project, please refer to `the official documentation`_. + +.. _the official documentation: https://packaging.python.org/en/latest/tutorials/packaging-projects/ + +Now that this is out of the way, let's write our plugin. + +.. code-block:: python + :caption: src/my_plugin/__init__.py + + import logging + + import rez_pip.plugins + import rez.package_maker + + _LOG = logging.getLogger(__name__) + + + @rez_pip.plugins.hookimpl + def metadata(package: rez.package_maker.PackageMaker) -> None + _LOG.info( + "Adding my_custom_attr to the package definition of %s %s", + package.name, + package.version, + ) + package.my_custom_attr = "my_custom_value" + +.. tip:: + :name: Logs + + It is highly recommended to add logs to your plugins. You can use :func:`logging.getLogger` + to get a pre-configured logger. Make sure to pass a unique name to the logger. + + Your logs should clearly describe what your plugin is doing. If your plugin modifies + something, then it should log that. If it is just reading something, then you might + not need to log. + +The plugin we defined in ``src/my_plugin/__init__.py`` registers a hook called ``metadata`` that +modifies the package definition. More particularly, it adds an attribute called ``my_custom_attr`` +to the package definition. Here we use a dummy attribute name just to illustrate the concept. +But this is a common scenario. + +For brevity, we only implement one hook. ``rez-pip`` provides many other hooks that you can implement. +Hooks are documented in :ref:`plugins:hooks`. + +Once this is done, you can test your plugin by installing it. For example, you can use ``pip install -e .`` +to install it in `editable mode `_. diff --git a/mypy.ini b/mypy.ini index b6d560a..19f54df 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,6 +1,6 @@ [mypy] files = src -python_version = 3.7 +python_version = 3.8 strict = True show_error_codes = True warn_unused_configs = True diff --git a/noxfile.py b/noxfile.py index c3681da..352cdd4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,4 +1,5 @@ """Development automation""" + import nox # nox.options.sessions = ["lint", "test", "doctest"] @@ -12,8 +13,7 @@ def lint(session: nox.Session): @nox.session() def mypy(session: nox.Session): - session.install("mypy") - session.install(".", "-c", "tests/constraints.txt") + session.install("mypy", ".") session.run("mypy") @@ -25,7 +25,7 @@ def format(session: nox.Session): session.run("black", ".", "--check") -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11"]) +@nox.session(python=["3.8", "3.9", "3.10", "3.11"]) def test(session: nox.Session): session.install("-r", "tests/requirements.txt") session.install(".") diff --git a/pyproject.toml b/pyproject.toml index df30d26..66a218a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ authors = [ ] license = { file="LICENSE" } readme = "README.md" -requires-python = ">=3.7" +requires-python = ">=3.8" dependencies = [ "installer>=0.7.0", "aiohttp", @@ -20,14 +20,18 @@ dependencies = [ "rez>=2.114.1", "dataclasses-json", "rich", - "importlib_metadata>=4.6 ; python_version < '3.10'", + "importlib_metadata>=4.6; python_version < '3.10'", + # 1.3 introduces type hints. + "pluggy>=1.2", + # Patches are finicky... Let's lock on the current latest version. + # We could always relax later if needed. + "patch-ng==1.18.1", ] classifiers = [ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", diff --git a/pytest.ini b/pytest.ini index d9f1853..6657b85 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,12 +7,12 @@ addopts = --cov-report=term-missing --cov-report=xml --cov-report=html - --durations=0 + #--durations=0 norecursedirs = rez_repo markers = integration: mark the tests as integration tests py37: mark the tests has using a Python 3.7 rez package - py39: mark the tests has using a Python 3.7 rez package + py39: mark the tests has using a Python 3.9 rez package py311: mark the tests has using a Python 3.11 rez package diff --git a/scripts/get_pyside6_files.py b/scripts/get_pyside6_files.py new file mode 100644 index 0000000..fcdd581 --- /dev/null +++ b/scripts/get_pyside6_files.py @@ -0,0 +1,313 @@ +from __future__ import annotations + +import os +import ast +import sys +import bisect +import typing +import difflib +import zipfile +import tempfile +import itertools +import contextlib +import subprocess + +import requests +import requests.models +import packaging.utils + + +# Token from https://github.com/pypa/pip/blob/bc553db53c264abe3bb63c6bcd6fc6f303c6f6e3/src/pip/_internal/network/lazy_wheel.py +class LazyZipOverHTTP: + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__( + self, + url: str, + session: requests.Session, + chunk_size: int = requests.models.CONTENT_CHUNK_SIZE, + ) -> None: + head = session.head(url, headers={"Accept-Encoding": "identity"}) + head.raise_for_status() + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers["Content-Length"]) + self._file = tempfile.NamedTemporaryFile() + self.truncate(self._length) + self._left: list[int] = [] + self._right: list[int] = [] + if "bytes" not in head.headers.get("Accept-Ranges", "none"): + raise ValueError("range request is not supported") + self._check_zip() + + @property + def mode(self) -> str: + """Opening mode, which is always rb.""" + return "rb" + + @property + def name(self) -> str: + """Path to the underlying file.""" + return self._file.name + + def seekable(self) -> bool: + """Return whether random access is supported, which is True.""" + return True + + def close(self) -> None: + """Close the file.""" + self._file.close() + + @property + def closed(self) -> bool: + """Whether the file is closed.""" + return self._file.closed + + def read(self, size: int = -1) -> bytes: + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + download_size = max(size, self._chunk_size) + start, length = self.tell(), self._length + stop = length if size < 0 else min(start + download_size, length) + start = max(0, stop - download_size) + self._download(start, stop - 1) + return self._file.read(size) + + def readable(self) -> bool: + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset: int, whence: int = 0) -> int: + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self) -> int: + """Return the current position.""" + return self._file.tell() + + def truncate(self, size: int | None = None) -> int: + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self) -> bool: + """Return False.""" + return False + + def __enter__(self) -> LazyZipOverHTTP: + self._file.__enter__() + return self + + def __exit__(self, *exc: Any) -> None: + self._file.__exit__(*exc) + + @contextlib.contextmanager + def _stay(self) -> typing.Generator[None]: + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self) -> None: + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + zipfile.ZipFile(self) + except zipfile.BadZipFile: + pass + else: + break + + def _stream_response( + self, + start: int, + end: int, + base_headers: dict[str, str] = {"Accept-Encoding": "identity"}, + ) -> requests.Response: + """Return HTTP response to a range request from start to end.""" + headers = base_headers.copy() + headers["Range"] = f"bytes={start}-{end}" + # TODO: Get range requests to be correctly cached + headers["Cache-Control"] = "no-cache" + return self._session.get(self._url, headers=headers, stream=True) + + def _merge( + self, start: int, end: int, left: int, right: int + ) -> typing.Generator[tuple[int, int]]: + """Return a generator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start] + lslice[:1]) + end = max([end] + rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j - 1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start: int, end: int) -> None: + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect.bisect_left(self._right, start) + right = bisect.bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response.iter_content(self._chunk_size): + self._file.write(chunk) + + +# https://stackoverflow.com/a/66733795 +def compare_ast( + node1: ast.expr | list[ast.expr], node2: ast.expr | list[ast.expr] +) -> bool: + if type(node1) is not type(node2): + return False + + if isinstance(node1, ast.AST): + for k, v in vars(node1).items(): + if k in {"lineno", "end_lineno", "col_offset", "end_col_offset", "ctx"}: + continue + if not compare_ast(v, getattr(node2, k)): + return False + return True + + elif isinstance(node1, list) and isinstance(node2, list): + return all( + compare_ast(n1, n2) for n1, n2 in itertools.zip_longest(node1, node2) + ) + else: + return node1 == node2 + + +def run(): + with requests.get( + "https://pypi.org/simple/pyside6", + headers={"Accept": "application/vnd.pypi.simple.v1+json"}, + ) as resp: + resp.raise_for_status() + + data = resp.json() + + versions: list[str] = [] + for entry in data["files"]: + if not entry["filename"].endswith(".whl"): + continue + + name, version, buildtag, tags = packaging.utils.parse_wheel_filename( + entry["filename"] + ) + if version.pre: + continue + + if not any( + tag.platform.startswith("win_") and not tag.interpreter.startswith("pp") + for tag in tags + ): + continue + + print(entry["filename"]) + + # Store raw files in patches/data/ + # This will allow us ot inspect them before deciding on how + # to create patches. + + directory = os.path.join("patches", "data", str(version)) + os.makedirs(directory, exist_ok=True) + + session = requests.Session() + wheel = LazyZipOverHTTP(entry["url"], session) + with zipfile.ZipFile(wheel) as zf: + for info in zf.infolist(): + if info.filename != "PySide6/__init__.py": + continue + + with open( + os.path.join(directory, os.path.basename(info.filename)), "wb" + ) as f: + f.write(zf.read(info)) + break + + versions.append(str(version)) + + print("Comparing files") + first = versions.pop(0) + + while len(versions) > 1: + leftFile = f"patches/data/{versions[0]}/__init__.py" + rightFile = f"patches/data/{versions[1]}/__init__.py" + with open(leftFile) as lfh, open(rightFile) as rfh: + lhs = ast.parse(lfh.read()) + rhs = ast.parse(rfh.read()) + + leftAST = next( + node + for node in lhs.body + if isinstance(node, ast.FunctionDef) + and node.name == "_additional_dll_directories" + ) + + rightAST = next( + node + for node in rhs.body + if isinstance(node, ast.FunctionDef) + and node.name == "_additional_dll_directories" + ) + + if not compare_ast(leftAST, rightAST): + print( + f"{versions[0]} and {versions[1]}'s _additional_dll_directories function differ" + ) + leftCode = ast.unparse(leftAST).splitlines(keepends=True) + rightCode = ast.unparse(rightAST).splitlines(keepends=True) + + result = difflib.unified_diff( + leftCode, rightCode, fromfile=leftFile, tofile=rightFile + ) + + sys.stdout.writelines(result) + + versions.pop(0) + + +run() diff --git a/src/rez_pip/cli.py b/src/rez_pip/cli.py index 3527a02..aae3a30 100644 --- a/src/rez_pip/cli.py +++ b/src/rez_pip/cli.py @@ -1,23 +1,19 @@ +from __future__ import annotations + import os import sys import json import shutil -import typing import logging import argparse import textwrap -import pathlib import tempfile +import itertools import subprocess -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - -import rich import rich.text import rich.panel +import rich.table import rez.version import rich.markup import rich.logging @@ -25,16 +21,20 @@ import rez_pip.pip import rez_pip.rez import rez_pip.data +import rez_pip.patch +import rez_pip.utils +import rez_pip.plugins import rez_pip.install import rez_pip.download import rez_pip.exceptions +from rez_pip.compat import importlib_metadata _LOG = logging.getLogger("rez_pip.cli") __all__ = ["run"] -def __dir__() -> typing.List[str]: +def __dir__() -> list[str]: return __all__ @@ -120,6 +120,10 @@ def _createParser() -> argparse.ArgumentParser: help="Print debug information that you can use when reporting an issue on GitHub.", ) + debugGroup.add_argument( + "--list-plugins", action="store_true", help="List all registered plugins" + ) + parser.usage = f""" %(prog)s [options] @@ -129,8 +133,8 @@ def _createParser() -> argparse.ArgumentParser: def _parseArgs( - args: typing.List[str], -) -> typing.Tuple[argparse.Namespace, typing.List[str]]: + args: list[str], +) -> tuple[argparse.Namespace, list[str]]: parser = _createParser() knownArgs = [] @@ -165,7 +169,7 @@ def _validateArgs(args: argparse.Namespace) -> None: ) -def _run(args: argparse.Namespace, pipArgs: typing.List[str], pipWorkArea: str) -> None: +def _run(args: argparse.Namespace, pipArgs: list[str], pipWorkArea: str) -> None: pythonVersions = rez_pip.rez.getPythonExecutables( args.python_version, packageFamily="python" ) @@ -188,7 +192,7 @@ def _run(args: argparse.Namespace, pipArgs: typing.List[str], pipWorkArea: str) installedWheelsDir = os.path.join(pipWorkArea, "installed", pythonVersion) os.makedirs(installedWheelsDir, exist_ok=True) - with rich.get_console().status( + with rez_pip.utils.CONSOLE.status( f"[bold]Resolving dependencies for {rich.markup.escape(', '.join(args.packages))} (python-{pythonVersion})" ): packages = rez_pip.pip.getPackages( @@ -202,44 +206,75 @@ def _run(args: argparse.Namespace, pipArgs: typing.List[str], pipWorkArea: str) ) _LOG.info(f"Resolved {len(packages)} dependencies for python {pythonVersion}") + _packageGroups: list[rez_pip.pip.PackageGroup[rez_pip.pip.PackageInfo]] = list( + itertools.chain(*rez_pip.plugins.getHook().groupPackages(packages=packages)) # type: ignore[arg-type] + ) + + # TODO: Verify that no packages are in two or more groups? It should theorically + # not be possible since plugins are called one after the other? But it could happen + # if a plugin forgets to pop items from the package list... The problem is that we + # can't know which plugin did what, so we could only say "something went wrong" + # and can't point to which plugin is at fault. + + # Remove empty groups + _packageGroups = [group for group in _packageGroups if group] + + # Add packages that were not grouped. + _packageGroups += [ + rez_pip.pip.PackageGroup[rez_pip.pip.PackageInfo](tuple([package])) + for package in packages + ] # TODO: Should we postpone downloading to the last minute if we can? _LOG.info("[bold]Downloading...") - wheels = rez_pip.download.downloadPackages(packages, wheelsDir) - _LOG.info(f"[bold]Downloaded {len(wheels)} wheels") - dists: typing.Dict[importlib_metadata.Distribution, bool] = {} - - with rich.get_console().status( - f"[bold]Installing wheels into {installedWheelsDir!r}" - ): - for package, wheel in zip(packages, wheels): - _LOG.info(f"[bold]Installing {package.name}-{package.version} wheel") - dist, isPure = rez_pip.install.installWheel( - package, pathlib.Path(wheel), installedWheelsDir - ) + packageGroups: list[ + rez_pip.pip.PackageGroup[rez_pip.pip.DownloadedArtifact] + ] = rez_pip.download.downloadPackages(_packageGroups, wheelsDir) - dists[dist] = isPure + foundLocally = downloaded = 0 + for group in packageGroups: + for package in group.packages: + if not package.isDownloadRequired(): + foundLocally += 1 + else: + downloaded += 1 - distNames = [dist.name for dist in dists.keys()] + _LOG.info( + f"[bold]Downloaded {downloaded} wheels, skipped {foundLocally} because they resolved to local files" + ) - with rich.get_console().status("[bold]Creating rez packages..."): - for dist, package in zip(dists, packages): - isPure = dists[dist] + with rez_pip.utils.CONSOLE.status( + f"[bold]Installing wheels into {installedWheelsDir!r}" + ): + for group in packageGroups: + for package in group.packages: + _LOG.info(f"[bold]Installing {package.name!r} {package.path!r}") + targetPath = os.path.join(installedWheelsDir, package.name) + dist = rez_pip.install.installWheel( + package, + package.path, + targetPath, + ) + + rez_pip.install.cleanup(dist, targetPath) + rez_pip.patch.patch(dist, targetPath) + + group.dists.append(dist) + + with rez_pip.utils.CONSOLE.status("[bold]Creating rez packages..."): + for group in packageGroups: rez_pip.rez.createPackage( - dist, - isPure, + group, rez.version.Version(pythonVersion), - distNames, installedWheelsDir, - wheelURL=package.download_info.url, prefix=args.prefix, release=args.release, ) def _debug( - args: argparse.Namespace, console: rich.console.Console = rich.get_console() + args: argparse.Namespace, console: rich.console.Console = rez_pip.utils.CONSOLE ) -> None: """Print debug information""" prefix = " " @@ -313,10 +348,24 @@ def _debug( ) +def _printPlugins() -> None: + table = rich.table.Table("Name", "Hooks", box=None) + for plugin, hooks in rez_pip.plugins._getHookImplementations().items(): + table.add_row(plugin, ", ".join(hooks)) + rez_pip.utils.CONSOLE.print(table) + + def run() -> int: pipWorkArea = tempfile.mkdtemp(prefix="rez-pip-target") args, pipArgs = _parseArgs(sys.argv[1:]) + # Initialize the plugin system + rez_pip.plugins.getManager() + + if args.list_plugins: + _printPlugins() + return 0 + try: _validateArgs(args) @@ -336,7 +385,7 @@ def run() -> int: _run(args, pipArgs, pipWorkArea) return 0 except rez_pip.exceptions.RezPipError as exc: - rich.get_console().print(exc, soft_wrap=True) + rez_pip.utils.CONSOLE.print(exc, soft_wrap=True) return 1 finally: if not args.keep_tmp_dirs: diff --git a/src/rez_pip/compat.py b/src/rez_pip/compat.py new file mode 100644 index 0000000..469fa93 --- /dev/null +++ b/src/rez_pip/compat.py @@ -0,0 +1,9 @@ +import sys + + +if sys.version_info >= (3, 10): + import importlib.metadata as importlib_metadata +else: + import importlib_metadata + +__all__ = ["importlib_metadata"] diff --git a/src/rez_pip/data/patches/__init__.py b/src/rez_pip/data/patches/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/rez_pip/data/patches/pyside6_6_0_0_win_dll_path.patch b/src/rez_pip/data/patches/pyside6_6_0_0_win_dll_path.patch new file mode 100644 index 0000000..cb60541 --- /dev/null +++ b/src/rez_pip/data/patches/pyside6_6_0_0_win_dll_path.patch @@ -0,0 +1,20 @@ +--- python/PySide6/__init__.py 2025-01-11 15:01:54.266726602 -0500 ++++ python/PySide6/__init__.py 2025-01-11 15:06:19.646365323 -0500 +@@ -10,14 +10,9 @@ + + + def _additional_dll_directories(package_dir): +- # Find shiboken6 relative to the package directory. +- root = os.path.dirname(package_dir) +- # Check for a flat .zip as deployed by cx_free(PYSIDE-1257) +- if root.endswith('.zip'): +- return [] +- shiboken6 = os.path.join(root, 'shiboken6') +- if os.path.isdir(shiboken6): # Standard case, only shiboken6 is needed +- return [shiboken6] ++ # rez-pip patch: Return the path to the shiboken rez package ++ return [os.path.join(os.environ["REZ_SHIBOKEN6_ROOT"], "python", "shiboken6")] ++ + # The below code is for the build process when generate_pyi.py + # is executed in the build directory. We need libpyside and Qt in addition. + shiboken6 = os.path.join(os.path.dirname(root), 'shiboken6', 'libshiboken') diff --git a/src/rez_pip/data/patches/pyside6_6_1_0_win_dll_path.patch b/src/rez_pip/data/patches/pyside6_6_1_0_win_dll_path.patch new file mode 100644 index 0000000..6c9cce8 --- /dev/null +++ b/src/rez_pip/data/patches/pyside6_6_1_0_win_dll_path.patch @@ -0,0 +1,20 @@ +--- python/PySide6/__init__.py 2025-01-11 14:43:26.190668073 -0500 ++++ python/PySide6/__init__.py 2025-01-11 15:09:43.741718533 -0500 +@@ -11,14 +11,9 @@ + + + def _additional_dll_directories(package_dir): +- # Find shiboken6 relative to the package directory. +- root = Path(package_dir).parent +- # Check for a flat .zip as deployed by cx_free(PYSIDE-1257) +- if root.suffix == '.zip': +- return [] +- shiboken6 = root / 'shiboken6' +- if shiboken6.is_dir(): # Standard case, only shiboken6 is needed +- return [shiboken6] ++ # rez-pip patch: Return the path to the shiboken rez package ++ return [os.path.join(os.environ["REZ_SHIBOKEN6_ROOT"], "python", "shiboken6")] ++ + # The below code is for the build process when generate_pyi.py + # is executed in the build directory. We need libpyside and Qt in addition. + shiboken6 = Path(root).parent / 'shiboken6' / 'libshiboken' diff --git a/src/rez_pip/data/patches/pyside6_6_2_4_win_dll_path.patch b/src/rez_pip/data/patches/pyside6_6_2_4_win_dll_path.patch new file mode 100644 index 0000000..9fac2e2 --- /dev/null +++ b/src/rez_pip/data/patches/pyside6_6_2_4_win_dll_path.patch @@ -0,0 +1,20 @@ +--- python/PySide6/__init__.py 2025-01-11 14:43:29.390687962 -0500 ++++ python/PySide6/__init__.py 2025-01-11 15:11:41.569480197 -0500 +@@ -11,14 +11,9 @@ + + + def _additional_dll_directories(package_dir): +- # Find shiboken6 relative to the package directory. +- root = Path(package_dir).parent +- # Check for a flat .zip as deployed by cx_free(PYSIDE-1257) +- if root.suffix == '.zip': +- return [] +- shiboken6 = root / 'shiboken6' +- if shiboken6.is_dir(): # Standard case, only shiboken6 is needed +- return [shiboken6] ++ # rez-pip patch: Return the path to the shiboken rez package ++ return [os.path.join(os.environ["REZ_SHIBOKEN6_ROOT"], "python", "shiboken6")] ++ + # The below code is for the build process when generate_pyi.py + # is executed in the build directory. We need libpyside and Qt in addition. + shiboken6 = Path(root).parent / 'shiboken6' / 'libshiboken' diff --git a/src/rez_pip/data/patches/pyside6_6_3_0_win_dll_path.patch b/src/rez_pip/data/patches/pyside6_6_3_0_win_dll_path.patch new file mode 100644 index 0000000..4744f3c --- /dev/null +++ b/src/rez_pip/data/patches/pyside6_6_3_0_win_dll_path.patch @@ -0,0 +1,20 @@ +--- python/PySide6/__init__.py 2025-01-11 14:43:29.554022310 -0500 ++++ python/PySide6/__init__.py 2025-01-11 15:13:40.307210955 -0500 +@@ -12,14 +12,9 @@ + + + def _additional_dll_directories(package_dir): +- # Find shiboken6 relative to the package directory. +- root = Path(package_dir).parent +- # Check for a flat .zip as deployed by cx_free(PYSIDE-1257) +- if root.suffix == '.zip': +- return [] +- shiboken6 = root / 'shiboken6' +- if shiboken6.is_dir(): # Standard case, only shiboken6 is needed +- return [shiboken6] ++ # rez-pip patch: Return the path to the shiboken rez package ++ return [os.path.join(os.environ["REZ_SHIBOKEN6_ROOT"], "python", "shiboken6")] ++ + # The below code is for the build process when generate_pyi.py + # is executed in the build directory. We need libpyside and Qt in addition. + shiboken6 = Path(root).parent / 'shiboken6' / 'libshiboken' diff --git a/src/rez_pip/data/patches/pyside6_6_7_3_win_dll_path.patch b/src/rez_pip/data/patches/pyside6_6_7_3_win_dll_path.patch new file mode 100644 index 0000000..1d24c67 --- /dev/null +++ b/src/rez_pip/data/patches/pyside6_6_7_3_win_dll_path.patch @@ -0,0 +1,20 @@ +--- python/PySide6/__init__.py 2025-01-12 11:37:31.807822271 -0500 ++++ python/PySide6/__init__.py 2025-01-12 11:46:10.322763362 -0500 +@@ -10,14 +10,9 @@ + + + def _additional_dll_directories(package_dir): +- # Find shiboken6 relative to the package directory. +- root = Path(package_dir).parent +- # Check for a flat .zip as deployed by cx_free(PYSIDE-1257) +- if root.suffix == '.zip': +- return [] +- shiboken6 = root / 'shiboken6' +- if shiboken6.is_dir(): # Standard case, only shiboken6 is needed +- return [shiboken6] ++ # rez-pip patch: Return the path to the shiboken rez package ++ return [os.path.join(os.environ["REZ_SHIBOKEN6_ROOT"], "python", "shiboken6")] ++ + # The below code is for the build process when generate_pyi.py + # is executed in the build directory. We need libpyside and Qt in addition. + shiboken6 = Path(root).parent / 'shiboken6' / 'libshiboken' diff --git a/src/rez_pip/data/patches/pyside6_6_8_1_win_dll_path.patch b/src/rez_pip/data/patches/pyside6_6_8_1_win_dll_path.patch new file mode 100644 index 0000000..177e3cf --- /dev/null +++ b/src/rez_pip/data/patches/pyside6_6_8_1_win_dll_path.patch @@ -0,0 +1,20 @@ +--- python/PySide6/__init__.py 2025-01-12 11:53:31.681352613 -0500 ++++ python/PySide6/__init__.py 2025-01-12 11:56:58.385665284 -0500 +@@ -13,14 +13,9 @@ + + + def _additional_dll_directories(package_dir): +- # Find shiboken6 relative to the package directory. +- root = Path(package_dir).parent +- # Check for a flat .zip as deployed by cx_free(PYSIDE-1257) +- if root.suffix == '.zip': +- return [] +- shiboken6 = root / 'shiboken6' +- if shiboken6.is_dir(): # Standard case, only shiboken6 is needed +- return [shiboken6] ++ # rez-pip patch: Return the path to the shiboken rez package ++ return [os.path.join(os.environ["REZ_SHIBOKEN6_ROOT"], "python", "shiboken6")] ++ + # The below code is for the build process when generate_pyi.py + # is executed in the build directory. We need libpyside and Qt in addition. + shiboken6 = Path(root).parent / 'shiboken6' / 'libshiboken' diff --git a/src/rez_pip/download.py b/src/rez_pip/download.py index 88604ee..4383144 100644 --- a/src/rez_pip/download.py +++ b/src/rez_pip/download.py @@ -1,38 +1,37 @@ +from __future__ import annotations + import os -import sys import typing import asyncio import hashlib import logging -import rich import aiohttp import rich.progress -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - import rez_pip.pip +import rez_pip.utils +from rez_pip.compat import importlib_metadata _LOG = logging.getLogger(__name__) _lock = asyncio.Lock() def downloadPackages( - packages: typing.List[rez_pip.pip.PackageInfo], dest: str -) -> typing.List[str]: - return asyncio.run(_downloadPackages(packages, dest)) + packageGroups: list[rez_pip.pip.PackageGroup[rez_pip.pip.PackageInfo]], + dest: str, +) -> list[rez_pip.pip.PackageGroup[rez_pip.pip.DownloadedArtifact]]: + return asyncio.run(_downloadPackages(packageGroups, dest)) async def _downloadPackages( - packages: typing.List[rez_pip.pip.PackageInfo], dest: str -) -> typing.List[str]: - items: typing.List[ - typing.Coroutine[typing.Any, typing.Any, typing.Optional[str]] - ] = [] - wheels = [] + packageGroups: list[rez_pip.pip.PackageGroup[rez_pip.pip.PackageInfo]], + dest: str, +) -> list[rez_pip.pip.PackageGroup[rez_pip.pip.DownloadedArtifact]]: + newPackageGroups: list[rez_pip.pip.PackageGroup[rez_pip.pip.DownloadedArtifact]] = ( + [] + ) + someFailed = False async with aiohttp.ClientSession() as session: with rich.progress.Progress( @@ -42,30 +41,90 @@ async def _downloadPackages( rich.progress.DownloadColumn(), rich.progress.TransferSpeedColumn(), transient=True, - console=rich.get_console(), + console=rez_pip.utils.CONSOLE, ) as progress: - tasks: typing.Dict[str, rich.progress.TaskID] = {} + tasks: dict[str, rich.progress.TaskID] = {} - # Create all the downlod tasks first - for package in packages: - tasks[package.name] = progress.add_task(package.name) + # Create all the download tasks first + numPackages = 0 + for group in packageGroups: + for package in group.packages: + if not package.isDownloadRequired(): + continue - # Then create the "total" progress bar. This ensures that total is at the bottom. - mainTask = progress.add_task(f"[bold]Total (0/{len(packages)})", total=0) + numPackages += 1 + tasks[package.name] = progress.add_task(package.name) - for package in packages: - items.append( - _download( - package, dest, session, progress, tasks[package.name], mainTask - ) + # Then create the "total" progress bar. This ensures that total is at the bottom. + mainTask = progress.add_task(f"[bold]Total (0/{numPackages})", total=0) + + futureGroups: list[ + list[ + typing.Coroutine[ + typing.Any, + typing.Any, + rez_pip.pip.DownloadedArtifact | None, + ] + ] + ] = [] + + # loop = asyncio.get_event_loop() + for group in packageGroups: + futures: list[ + typing.Coroutine[ + typing.Any, + typing.Any, + rez_pip.pip.DownloadedArtifact | None, + ] + ] = [] + for package in group.packages: + wheelName: str = os.path.basename(package.download_info.url) + wheelPath = os.path.join(dest, wheelName) + + if not package.isDownloadRequired(): + + # Note the subtlety of having to pass variables in the function + # signature. We can't rely on the scoped variable. + async def _return_local( + _wheelPath: str, _package: rez_pip.pip.PackageInfo + ) -> rez_pip.pip.DownloadedArtifact: + return rez_pip.pip.DownloadedArtifact.from_dict( + {"_localPath": _wheelPath, **_package.to_dict()} + ) + + futures.append(_return_local(wheelPath, package)) + else: + futures.append( + _download( + package, + session, + progress, + tasks[package.name], + mainTask, + wheelName, + wheelPath, + ) + ) + futureGroups.append(futures) + + for _futures in futureGroups: + artifacts = tuple(await asyncio.gather(*_futures)) + + if not all(artifacts): + someFailed = True + + artifacts = typing.cast( + typing.Tuple[rez_pip.pip.DownloadedArtifact], artifacts ) - wheels = await asyncio.gather(*items) + newPackageGroups.append( + rez_pip.pip.PackageGroup[rez_pip.pip.DownloadedArtifact](artifacts) + ) - if not all(wheels): + if someFailed: raise RuntimeError("Some wheels failed to be downloaded") - return typing.cast(typing.List[str], wheels) + return newPackageGroups def getSHA256(path: str) -> str: @@ -86,15 +145,13 @@ def getSHA256(path: str) -> str: async def _download( package: rez_pip.pip.PackageInfo, - target: str, session: aiohttp.ClientSession, progress: rich.progress.Progress, taskID: rich.progress.TaskID, mainTaskID: rich.progress.TaskID, -) -> typing.Optional[str]: - wheelName: str = os.path.basename(package.download_info.url) - wheelPath = os.path.join(target, wheelName) - + wheelName: str, + wheelPath: str, +) -> rez_pip.pip.DownloadedArtifact | None: # TODO: Handle case where sha256 doesn't exist. We should also support the other supported # hash types. if ( @@ -152,4 +209,6 @@ async def _download( mainTaskID, description=f"[bold]Total ({len(completedItems)}/{total})" ) - return wheelPath + return rez_pip.pip.DownloadedArtifact.from_dict( + {"_localPath": wheelPath, **package.to_dict()} + ) diff --git a/src/rez_pip/exceptions.py b/src/rez_pip/exceptions.py index 54ccf4a..fe26adf 100644 --- a/src/rez_pip/exceptions.py +++ b/src/rez_pip/exceptions.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import rich import rich.console diff --git a/src/rez_pip/install.py b/src/rez_pip/install.py index 3d180b5..b92b141 100644 --- a/src/rez_pip/install.py +++ b/src/rez_pip/install.py @@ -1,25 +1,25 @@ """ Code that takes care of installing (extracting) wheels. """ + +from __future__ import annotations + import io import os +import re import sys +import shutil import typing -import zipfile import logging import pathlib +import zipfile import sysconfig +import collections.abc -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata +import rez_pip.exceptions if typing.TYPE_CHECKING: - if sys.version_info >= (3, 8): - from typing import Literal - else: - from typing_extensions import Literal + from typing import Literal import installer import installer.utils @@ -29,6 +29,9 @@ import installer.destinations import rez_pip.pip +import rez_pip.plugins +import rez_pip.exceptions +from rez_pip.compat import importlib_metadata _LOG = logging.getLogger(__name__) @@ -37,14 +40,28 @@ ScriptSection = Literal["console", "gui"] -def isWheelPure(source: installer.sources.WheelSource) -> bool: - stream = source.read_dist_info("WHEEL") - metadata = installer.utils.parse_metadata_file(stream) +class CleanupError(rez_pip.exceptions.RezPipError): + """ + Raised when a cleanup operation fails. + """ + + +def isWheelPure(dist: importlib_metadata.Distribution) -> bool: + # dist.files should never be empty, but assert to silence mypy. + assert dist.files is not None + + path = next( + f + for f in dist.files + if os.fspath(f.locate()).endswith(os.path.join(".dist-info", "WHEEL")) + ) + with open(path.locate()) as fd: + metadata = installer.utils.parse_metadata_file(fd.read()) return typing.cast(str, metadata["Root-Is-Purelib"]) == "true" # Taken from https://github.com/pypa/installer/blob/main/src/installer/__main__.py#L49 -def getSchemeDict(name: str, target: str) -> typing.Dict[str, str]: +def getSchemeDict(name: str, target: str) -> dict[str, str]: vars = {} vars["base"] = vars["platbase"] = installed_base = target @@ -68,9 +85,9 @@ def getSchemeDict(name: str, target: str) -> typing.Dict[str, str]: def installWheel( package: rez_pip.pip.PackageInfo, - wheelPath: pathlib.Path, + wheelPath: str, targetPath: str, -) -> typing.Tuple[importlib_metadata.Distribution, bool]: +) -> importlib_metadata.Distribution: # TODO: Technically, target should be optional. We will always want to install in "pip install --target" # mode. So right now it's a CLI option for debugging purposes. @@ -81,11 +98,8 @@ def installWheel( script_kind=installer.utils.get_launcher_kind(), ) - isPure = True _LOG.debug(f"Installing {wheelPath} into {targetPath!r}") - with installer.sources.WheelFile.open(wheelPath) as source: - isPure = isWheelPure(source) - + with installer.sources.WheelFile.open(pathlib.Path(wheelPath)) as source: installer.install( source=source, destination=destination, @@ -118,14 +132,14 @@ def installWheel( if not dist.files: raise RuntimeError(f"{path!r} does not exist!") - return dist, isPure + return dist # TODO: Document where this code comes from. class CustomWheelDestination(installer.destinations.SchemeDictionaryDestination): # Exactly the same as SchemeDictionaryDestination, but uses our custom Script class. def write_script( - self, name: str, module: str, attr: str, section: "ScriptSection" + self, name: str, module: str, attr: str, section: ScriptSection ) -> installer.records.RecordEntry: """Write a script to invoke an entrypoint. :param name: name of the script @@ -173,9 +187,7 @@ def write_script( # TODO: Document where this code comes from. class Script(installer.scripts.Script): - def generate( - self, executable: str, kind: "LauncherKind" - ) -> typing.Tuple[str, bytes]: + def generate(self, executable: str, kind: LauncherKind) -> tuple[str, bytes]: """Generate a launcher for this script. :param executable: Path to the executable to invoke. :param kind: Which launcher template should be used. @@ -204,3 +216,103 @@ def generate( name = f"{self.name}.exe" data = launcher + shebang + b"\n" + stream.getvalue() return (name, data) + + +def cleanup(dist: importlib_metadata.Distribution, path: str) -> None: + """ + Run cleanup hooks. + + Note that this lives in install because the cleanups + are made on the installs (the wheel install). We could move this somewhere + else but it's not clear where. + """ + actionsGroups: collections.abc.Sequence[ + collections.abc.Sequence[rez_pip.plugins.CleanupAction] + ] = rez_pip.plugins.getHook().cleanup( + dist=dist, path=path + ) # type: ignore[assignment] + + # Flatten + actions: list[rez_pip.plugins.CleanupAction] = [ + action for group in actionsGroups for action in group + ] + + recordEntriesToRemove = [] + + for action in actions: + if not action.path.startswith(path): + # Security measure. Only perform operations on + # paths that are within the install path. + raise CleanupError( + f"Typing to {action.op} {action.path!r} which is outside of {path!r}" + ) + + if action.op == "remove": + if not os.path.exists(action.path): + continue + + _LOG.info(f"Removing {action.path!r}") + if os.path.isdir(action.path): + shutil.rmtree(action.path) + else: + os.remove(action.path) + + recordEntriesToRemove.append( + os.path.normpath(os.path.relpath(action.path, path)).replace("\\", "/") + ) + else: + raise CleanupError(f"Unknown action: {action.op}") + + if recordEntriesToRemove: + deleteEntryFromRecord(dist, path, recordEntriesToRemove) + + +def deleteEntryFromRecord( + dist: importlib_metadata.Distribution, path: str, entries: list[str] +) -> None: + """ + Delete an entry from the record file. + + This code is not great. I feel like updating the RECORD file should + be simpler. Which means that we miht need to refactor things a bit. + """ + items = [ + os.fspath(item) + for item in dist.files + if re.search(r"[a-zA-Z0-9._+]+\.dist-info/RECORD", os.fspath(item)) + ] + + if not items: + raise CleanupError(f"RECORD file not found for {dist.name!r}") + + recordFilePathRel = items[0] + recordFilePath = os.path.join(path, "python", recordFilePathRel) + + with open(recordFilePath) as f: + lines = f.readlines() + + schemesRaw = getSchemeDict(dist.name, path) + schemes = { + key: os.path.relpath(value, path) + for key, value in schemesRaw.items() + if value.startswith(path) + } + + # Format the entries to match the record file. This is important + # because when we install the files, we use a custom scheme. + # For example, we have to trim "python/" or "scripts/". + for index, entry in enumerate(entries): + for schemePath in schemes.values(): + if entry.startswith(schemePath): + _LOG.debug(f"Stripping {schemePath!r}/ from {entry!r}") + entries[index] = entry.lstrip(schemePath + "/") + # Break on first match + break + + for index, elements in enumerate(installer.records.parse_record_file(lines)): + if elements[0] in entries: + lines.pop(index) + + with open(recordFilePath, "w") as f: + for line in lines: + f.write(line) diff --git a/src/rez_pip/patch.py b/src/rez_pip/patch.py new file mode 100644 index 0000000..02b75ab --- /dev/null +++ b/src/rez_pip/patch.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +import os +import math +import typing +import logging +import contextlib +import collections.abc +import logging.handlers + +import patch_ng + +import rez_pip.utils +import rez_pip.compat +import rez_pip.plugins +import rez_pip.exceptions +import rez_pip.data.patches +from rez_pip.compat import importlib_metadata + + +_LOG = logging.getLogger(__name__) + + +class PatchError(rez_pip.exceptions.RezPipError): + pass + + +def getBuiltinPatchesDir() -> str: + """Get the built-in patches directory""" + return os.path.dirname(rez_pip.data.patches.__file__) + + +@contextlib.contextmanager +def logIfErrorOrRaises() -> typing.Generator[None, None, None]: + """ + Log patch_ng logs if any error is logged or if the wrapped body raises. + Very slightly inspired by https://docs.python.org/3/howto/logging-cookbook.html#buffering-logging-messages-and-outputting-them-conditionally + + We basically don't want any logs from patch_ng is everything worked. We only + want logs when something wrong happens. + """ + patch_ng.debugmode = True + logger = logging.getLogger("patch_ng") + initialLevel = logger.level + logger.setLevel(logging.DEBUG) + + handler = logging.handlers.MemoryHandler( + math.inf, # type: ignore[arg-type] + flushLevel=logging.ERROR, + target=logging.getLogger("rez_pip").handlers[0], + ) + handler.setFormatter(logging.Formatter("%(name)s %(levelname)8s %(message)s")) + + logger.addHandler(handler) + + try: + yield + except Exception as exc: + handler.flush() + raise exc from None + finally: + patch_ng.debugmode = False + logger.setLevel(initialLevel) + logger.removeHandler(handler) + + +def patch(dist: importlib_metadata.Distribution, path: str) -> None: + """Patch an installed package (wheel)""" + _LOG.debug(f"[bold]Attempting to patch {dist.name!r} at {path!r}") + patchesGroups: collections.abc.Sequence[collections.abc.Sequence[str]] = ( + rez_pip.plugins.getHook().patches(dist=dist, path=path) + ) + + # Flatten the list + patches = [path for group in patchesGroups for path in group] + + if not patches: + _LOG.debug(f"No patches found") + return + + _LOG.info(f"Applying {len(patches)} patches for {dist.name!r} at {path!r}") + + for patch in patches: + _LOG.info(f"Applying patch {patch!r} on {path!r}") + + if not os.path.isabs(patch): + raise PatchError(f"{patch!r} is not an absolute path") + + if not os.path.exists(patch): + raise PatchError(f"Patch at {patch!r} does not exist") + + patchset = patch_ng.fromfile(patch) + with logIfErrorOrRaises(): + if not patchset.apply(root=path): + # A logger that only gets flushed on demand would be better... + raise PatchError(f"Failed to apply patch {patch!r} on {path!r}") diff --git a/src/rez_pip/pip.py b/src/rez_pip/pip.py index b02cfaf..7b9945a 100644 --- a/src/rez_pip/pip.py +++ b/src/rez_pip/pip.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import sys import json @@ -11,26 +13,38 @@ import dataclasses_json import rez_pip.data +import rez_pip.plugins import rez_pip.exceptions +if typing.TYPE_CHECKING: + import rez_pip.compat + _LOG = logging.getLogger(__name__) @dataclasses.dataclass class Metadata(dataclasses_json.DataClassJsonMixin): + """Represents metadata for a package""" + version: str name: str @dataclasses.dataclass class ArchiveInfo(dataclasses_json.DataClassJsonMixin): + #: Archive hash hash: str + + #: Archive hashes hashes: typing.Dict[str, str] @dataclasses.dataclass class DownloadInfo(dataclasses_json.DataClassJsonMixin): + #: Download URL url: str + + #: Archive information archive_info: ArchiveInfo dataclass_json_config = dataclasses_json.config( @@ -38,11 +52,20 @@ class DownloadInfo(dataclasses_json.DataClassJsonMixin): ) -@dataclasses.dataclass +@dataclasses.dataclass(frozen=True) class PackageInfo(dataclasses_json.DataClassJsonMixin): + """Represents data returned by pip for a single package""" + + #: Download information download_info: DownloadInfo + + #: Is this a direct dependency? is_direct: bool + + #: Is this a requested package? requested: bool + + #: Metadata about the package metadata: Metadata dataclass_json_config = dataclasses_json.config( @@ -51,27 +74,96 @@ class PackageInfo(dataclasses_json.DataClassJsonMixin): @property def name(self) -> str: + """Package name""" return self.metadata.name @property def version(self) -> str: + """Package version""" return self.metadata.version + def isDownloadRequired(self) -> bool: + return not self.download_info.url.startswith("file://") + + +@dataclasses.dataclass(frozen=True) # Nonsense, but we have to do this here too... +class DownloadedArtifact(PackageInfo): + """ + This is a subclass of :class:`PackageInfo`. It's used to represent a local wheel. + It is immutable so that we can clearly express immutability in plugins. + """ + + _localPath: str + + @property + def path(self) -> str: + """Path to the package on disk.""" + if not self.isDownloadRequired(): + # It's a local file, so we can return the URL (without file://) + return self.download_info.url[7:] + + return self._localPath + + +T = typing.TypeVar("T", PackageInfo, DownloadedArtifact) + + +class PackageGroup(typing.Generic[T]): + """A group of package. The order of packages and dists must be the same.""" + + #: List of packages + packages: tuple[T, ...] + + #: List of distributions + dists: list[rez_pip.compat.importlib_metadata.Distribution] + + # Using a tuple to make it immutable + def __init__(self, packages: tuple[T, ...]) -> None: + self.packages = packages + self.dists = [] + + def __str__(self) -> str: + return "PackageGroup({})".format( + [f"{p.name}=={p.version}" for p in self.packages] + ) + + def __repr__(self) -> str: + return "PackageGroup({})".format( + [f"{p.name}=={p.version}" for p in self.packages] + ) + + def __bool__(self) -> bool: + return bool(self.packages) + + def __eq__(self, value: typing.Any) -> bool: + """Needed for tests""" + if not isinstance(value, PackageGroup): + return False + + return self.packages == value.packages and self.dists == value.dists + + @property + def downloadUrls(self) -> list[str]: + """List of download URLs""" + return [p.download_info.url for p in self.packages] + def getBundledPip() -> str: return os.path.join(os.path.dirname(rez_pip.data.__file__), "pip.pyz") def getPackages( - packageNames: typing.List[str], + packageNames: list[str], pip: str, pythonVersion: str, pythonExecutable: str, - requirements: typing.List[str], - constraints: typing.List[str], - extraArgs: typing.List[str], -) -> typing.List[PackageInfo]: - # python pip.pyz install -q requests --dry-run --ignore-installed --python-version 2.7 --only-binary=:all: --target /tmp/asd --report - + requirements: list[str], + constraints: list[str], + extraArgs: list[str], +) -> list[PackageInfo]: + rez_pip.plugins.getHook().prePipResolve( + packages=tuple(packageNames), requirements=tuple(requirements) + ) _fd, tmpFile = tempfile.mkstemp(prefix="pip-install-output", text=True) os.close(_fd) @@ -132,19 +224,21 @@ def getPackages( rawPackages = reportContent["install"] - packages: typing.List[PackageInfo] = [] + packages: list[PackageInfo] = [] for rawPackage in rawPackages: packageInfo = PackageInfo.from_dict(rawPackage) packages.append(packageInfo) + rez_pip.plugins.getHook().postPipResolve(packages=tuple(packages)) + return packages -def _readPipReport(reportPath: str) -> typing.Dict[str, typing.Any]: +def _readPipReport(reportPath: str) -> dict[str, typing.Any]: """ Retrieve the json report generated by pip as json dict object. """ - with open(reportPath, "r", encoding="utf-8") as reportFile: - reportContent: typing.Dict[typing.Any, typing.Any] = json.load(reportFile) + with open(reportPath, encoding="utf-8") as reportFile: + reportContent: dict[typing.Any, typing.Any] = json.load(reportFile) return reportContent diff --git a/src/rez_pip/plugins/PySide6.py b/src/rez_pip/plugins/PySide6.py new file mode 100644 index 0000000..6e7df9b --- /dev/null +++ b/src/rez_pip/plugins/PySide6.py @@ -0,0 +1,207 @@ +"""PySide6 plugin. +""" + +from __future__ import annotations + +import os +import typing +import logging +import platform + +import packaging.utils +import packaging.version +import packaging.specifiers +import packaging.requirements + +import rez_pip.pip +import rez_pip.patch +import rez_pip.plugins +import rez_pip.exceptions + +if typing.TYPE_CHECKING: + from rez_pip.compat import importlib_metadata + +_LOG = logging.getLogger(__name__) + + +@rez_pip.plugins.hookimpl +def prePipResolve( + packages: tuple[str], +) -> None: + """ + PySide6 was initially a single package that had shiboken as a dependency. + Starting from 6.3.0, the package was spit in 3, PySide6, PySide6-Essentials and + PySide6-Addons. + + So we need to intercept what the user installs and install all 3 packages together. + Not doing that would result in a broken install (eventually). + """ + pyside6Seen = False + variantsSeens = [] + + for package in packages: + req = packaging.requirements.Requirement(package) + name = packaging.utils.canonicalize_name(req.name) + + if name == "pyside6": + pyside6Seen = True + elif name in ["pyside6-essentials", "pyside6-addons"]: + variantsSeens.append(req.name) + + if variantsSeens and not pyside6Seen: + variants = " and ".join(variantsSeens) + verb = "was" if len(variantsSeens) == 1 else "were" + raise rez_pip.exceptions.RezPipError( + f"{variants} {verb} requested but PySide6 was not. You must explicitly request PySide6 in addition to {variants}." + ) + + +@rez_pip.plugins.hookimpl +def postPipResolve(packages: tuple[rez_pip.pip.PackageInfo]) -> None: + """ + This hook is implemented out of extra caution. We really don't want PySide6-Addons + or PySide6-Essentials to be installed without PySide6. + + In this case, we cover cases where a user requests a package X and that package + depends on PySide6-Addons or PySide6-Essentials. + """ + pyside6Seen = False + variantsSeens = [] + + for package in packages: + name = packaging.utils.canonicalize_name(package.name) + if name == "pyside6": + pyside6Seen = True + elif name in ["pyside6-essentials", "pyside6-addons"]: + variantsSeens.append(package.name) + + if variantsSeens and not pyside6Seen: + variants = " and ".join(variantsSeens) + verb = "is" if len(variantsSeens) == 1 else "are" + raise rez_pip.exceptions.RezPipError( + f"{variants} {verb} part of the resolved packages but PySide6 was not. Dependencies and or you must explicitly request PySide6 in addition to {variants}." + ) + + +@rez_pip.plugins.hookimpl +def groupPackages( + packages: list[rez_pip.pip.PackageInfo], +) -> list[rez_pip.pip.PackageGroup[rez_pip.pip.PackageInfo]]: + data = [] + for package in packages[:]: + if packaging.utils.canonicalize_name(package.name) in [ + "pyside6", + "pyside6-addons", + "pyside6-essentials", + ]: + data.append(package) + packages.remove(package) + + return [rez_pip.pip.PackageGroup[rez_pip.pip.PackageInfo](tuple(data))] + + +@rez_pip.plugins.hookimpl +def patches(dist: importlib_metadata.Distribution, path: str) -> list[str]: + if dist.name != "PySide6" or platform.system() != "Windows": + return [] + + patches: list[str] = [] + + # To generate the patches: + # 1. run srcipts/get_pyside6_files.py + # 2. Look at the output and get the different diffs. + # 3. Generate a patch for each version range where there is a diff: + # 3.1 cp patches/data/6.3.0/__init__.py patches/data/6.3.0/__init__.py.new + # 3.2 vim patches/data/6.3.0/__init__.py.new + # 3.3 diff --unified patches/data/6.3.0/__init__.py patches/data/6.3.0/__init__.py.new > src/rez_pip/data/patches/pyside6_6_3_0_win_dll_path.patch + # 3.4 Edit src/rez_pip/data/patches/pyside6_6_3_0_win_dll_path.patch to make sure the paths + # are good in the patch header. Also make sure CRLF (line ending) is kept as-is. + # + # I think that it's important that the paths in the look like "python/PySide6/..." + # so that it matches the file layout... + version = packaging.version.Version(dist.version) + if version in packaging.specifiers.SpecifierSet(">=6.0.0,<6.1.0"): + patches.append( + os.path.join( + rez_pip.patch.getBuiltinPatchesDir(), "pyside6_6_0_0_win_dll_path.patch" + ) + ) + + elif version in packaging.specifiers.SpecifierSet(">=6.1.0,<6.2.4"): + patches.append( + os.path.join( + rez_pip.patch.getBuiltinPatchesDir(), "pyside6_6_1_0_win_dll_path.patch" + ) + ) + + elif version in packaging.specifiers.SpecifierSet(">=6.2.4,<6.3.0"): + patches.append( + os.path.join( + rez_pip.patch.getBuiltinPatchesDir(), "pyside6_6_2_4_win_dll_path.patch" + ) + ) + + elif version in packaging.specifiers.SpecifierSet(">=6.3.0,<6.7.3"): + patches.append( + os.path.join( + rez_pip.patch.getBuiltinPatchesDir(), "pyside6_6_3_0_win_dll_path.patch" + ) + ) + + elif version in packaging.specifiers.SpecifierSet(">=6.7.3,<6.8.1"): + patches.append( + os.path.join( + rez_pip.patch.getBuiltinPatchesDir(), "pyside6_6_7_3_win_dll_path.patch" + ) + ) + elif version in packaging.specifiers.SpecifierSet(">=6.8.1"): + patches.append( + os.path.join( + rez_pip.patch.getBuiltinPatchesDir(), "pyside6_6_8_1_win_dll_path.patch" + ) + ) + return patches + + +@rez_pip.plugins.hookimpl +def cleanup( + dist: importlib_metadata.Distribution, path: str +) -> list[rez_pip.plugins.CleanupAction]: + actions: list[rez_pip.plugins.CleanupAction] = [] + + if packaging.utils.canonicalize_name(dist.name) not in [ + "pyside6", + "pyside6-addons", + "pyside6-essentials", + ]: + return actions + + # Remove shiboken6 from PySide6 packages... + # PySide6 >=6.3, <6.6.2 were shipping some shiboken6 folders by mistake. + # Not removing these extra folders would stop python from being able to import + # the correct shiboken (that lives in a separate rez package). + actions.extend( + [ + rez_pip.plugins.CleanupAction( + "remove", os.path.join(path, "python", "shiboken6") + ), + rez_pip.plugins.CleanupAction( + "remove", os.path.join(path, "python", "shiboken6_generator") + ), + ] + ) + + if packaging.utils.canonicalize_name(dist.name) in [ + "pyside6-addons", + "pyside6-essentials", + ]: + # Because we patch __init__.py, we need to make sure that + # PySide6-Addons and PySide6-Essentials' _init__.py won't + # overrite our patched __init__.py. + actions.append( + rez_pip.plugins.CleanupAction( + "remove", os.path.join(path, "python", "PySide6", "__init__.py") + ) + ) + + return actions diff --git a/src/rez_pip/plugins/__init__.py b/src/rez_pip/plugins/__init__.py new file mode 100644 index 0000000..1fb9ec3 --- /dev/null +++ b/src/rez_pip/plugins/__init__.py @@ -0,0 +1,210 @@ +"""Plugin system.""" + +from __future__ import annotations + +import typing +import logging +import pkgutil +import functools +import importlib +import dataclasses +import collections.abc + +import pluggy +import rez.package_maker + +if typing.TYPE_CHECKING: + import rez_pip.pip + import rez_pip.compat + +__all__ = [ + "hookimpl", +] + + +def __dir__() -> list[str]: + return __all__ + + +_LOG = logging.getLogger(__name__) + +F = typing.TypeVar("F", bound=typing.Callable[..., typing.Any]) +hookspec = typing.cast(typing.Callable[[F], F], pluggy.HookspecMarker("rez-pip")) +hookimpl = typing.cast(typing.Callable[[F], F], pluggy.HookimplMarker("rez-pip")) + + +@dataclasses.dataclass(frozen=True) +class CleanupAction: + """ + Cleanup hook action. If you want to do any cleanup from a cleanup hook + you need to return this from the :func:`cleanup` hook. + """ + + #: Operation to perform. + op: typing.Literal["remove"] + + #: Path on which to perform the operation. + path: str + + +class PluginSpec: + @hookspec + def prePipResolve( + self, + packages: tuple[str, ...], # Immutable + requirements: tuple[str, ...], # Immutable + ) -> None: + """ + The pre-pip resolve hook allows a plugin to run some checks *before* resolving the + requested packages using pip. The hook **must** not modify the content of the + arguments passed to it. + + Some use cases are allowing or disallowing the installation of some packages. + + :param packages: List of packages requested by the user. + :param requirements: List of `requirements files `_ if any. + """ + ... + + @hookspec + def postPipResolve( + self, + packages: tuple[rez_pip.pip.PackageInfo, ...], # Immutable + ) -> None: + """ + The post-pip resolve hook allows a plugin to run some checks *after* resolving the + requested packages using pip. The hook **must** not modify the content of the + arguments passed to it. + + Some use cases are allowing or disallowing the installation of some packages. + + :param packages: List of resolved packages. + """ + ... + + @hookspec + def groupPackages( # type: ignore[empty-body] + self, + packages: collections.abc.MutableSequence[rez_pip.pip.PackageInfo], + ) -> collections.abc.Sequence[ + rez_pip.pip.PackageGroup[rez_pip.pip.DownloadedArtifact] + ]: + """ + Merge packages into groups of packages. The name and version of the first package + in the group will be used as the name and version for the rez package. + + The hook **must** pop grouped packages out of the "packages" variable. + + :param packages: List of resolved packages. + :returns: A list of package groups. + """ + ... + + @hookspec + def cleanup( # type: ignore[empty-body] + self, dist: rez_pip.compat.importlib_metadata.Distribution, path: str + ) -> collections.abc.Sequence[CleanupAction]: + """ + Cleanup a package post-installation. Do not delete any files/directories from this hook. + Return the list of actions you want to perform and let rez-pip perform them. + + :param dist: Python distribution. + :param path: Root path of the rez variant. + """ + ... + + @hookspec + def patches( # type: ignore[empty-body] + self, dist: rez_pip.compat.importlib_metadata.Distribution, path: str + ) -> collections.abc.Sequence[str]: + """ + Provide paths to patches to be applied on the source code of a package. + + :param dist: Python distribution. + :param path: Root path of the installed content. + """ + # TODO: This will alter files (obviously) and change their hashes. + # This could be a problem to verify the integrity of the package. + # https://packaging.python.org/en/latest/specifications/recording-installed-packages/#the-record-file + ... + + @hookspec + def metadata(self, package: rez.package_maker.PackageMaker) -> None: + """ + Modify/inject metadata in the rez package. The plugin is expected to modify + "package" in place. + + :param package: An insatnce of :class:`rez.package_maker.PackageMaker`. + """ + ... + + +def before( + hookName: str, + hookImpls: collections.abc.Sequence[pluggy.HookImpl], + kwargs: collections.abc.Mapping[str, typing.Any], +) -> None: + """Function that will be called before each hook.""" + _LOG.debug("Calling the %r hooks", hookName) + + +def after( + outcome: pluggy.Result[typing.Any], + hookName: str, + hookImpls: collections.abc.Sequence[pluggy.HookImpl], + kwargs: collections.abc.Mapping[str, typing.Any], +) -> None: + """Function that will be called after each hook.""" + _LOG.debug("Called the %r hooks", hookName) + + +@functools.lru_cache +def getManager() -> pluggy.PluginManager: + """ + Returns the plugin manager. The return value will be cached on first call + and the cached value will be return in subsequent calls. + """ + manager = pluggy.PluginManager("rez-pip") + if _LOG.getEffectiveLevel() <= logging.DEBUG: + manager.trace.root.setwriter(print) + manager.enable_tracing() + + manager.add_hookspecs(PluginSpec) + + # Register the builtin plugins + for module in pkgutil.iter_modules(__path__): + manager.register( + importlib.import_module(f"rez_pip.plugins.{module.name}"), + name=f"rez_pip.{module.name}", + ) + + manager.load_setuptools_entrypoints("rez-pip") + + manager.add_hookcall_monitoring(before, after) + return manager + + +def getHook() -> PluginSpec: + """ + Returns the hook attribute from the manager. This is allows + to have type hints at the caller sites. + + Inspired by https://stackoverflow.com/a/54695761. + """ + manager = getManager() + return typing.cast(PluginSpec, manager.hook) + + +def _getHookImplementations() -> dict[str, list[str]]: + manager = getManager() + + implementations = {} + for name, plugin in manager.list_name_plugin(): + hookcallers = manager.get_hookcallers(plugin) + + # hookcallers will never be None because we get the names from list_name_plugin. + # But it silences mypy. + assert hookcallers is not None + + implementations[name] = [caller.name for caller in hookcallers] + return implementations diff --git a/src/rez_pip/plugins/shiboken6.py b/src/rez_pip/plugins/shiboken6.py new file mode 100644 index 0000000..fb0d3f0 --- /dev/null +++ b/src/rez_pip/plugins/shiboken6.py @@ -0,0 +1,33 @@ +"""shiboken6 plugin. +""" + +from __future__ import annotations + +import os +import typing +import logging + +import packaging.utils + +import rez_pip.plugins + +if typing.TYPE_CHECKING: + from rez_pip.compat import importlib_metadata + +_LOG = logging.getLogger(__name__) + + +@rez_pip.plugins.hookimpl +def cleanup( + dist: importlib_metadata.Distribution, path: str +) -> list[rez_pip.plugins.CleanupAction]: + if packaging.utils.canonicalize_name(dist.name) != "shiboken6": + return [] + + # Remove PySide6 from shiboken6 packages... + # shiboken6 >=6.3, <6.6.2 were shipping some PySide6 folders by mistake. + # Not removing these extra folders would stop python from being able to import + # the correct PySide6 (that lives in a separate rez package). + return [ + rez_pip.plugins.CleanupAction("remove", os.path.join(path, "python", "PySide6")) + ] diff --git a/src/rez_pip/rez.py b/src/rez_pip/rez.py index 11f5def..7cc7d5e 100644 --- a/src/rez_pip/rez.py +++ b/src/rez_pip/rez.py @@ -1,5 +1,6 @@ +from __future__ import annotations + import os -import sys import copy import shutil import typing @@ -7,11 +8,6 @@ import pathlib import itertools -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - import rez.config import rez.version import rez.packages @@ -20,29 +16,64 @@ import rez_pip.pip import rez_pip.utils +import rez_pip.plugins +from rez_pip.compat import importlib_metadata _LOG = logging.getLogger(__name__) def createPackage( - dist: importlib_metadata.Distribution, - isPure: bool, + packageGroup: rez_pip.pip.PackageGroup[rez_pip.pip.DownloadedArtifact], pythonVersion: rez.version.Version, - nameCasings: typing.List[str], installedWheelsDir: str, - wheelURL: str, - prefix: typing.Optional[str] = None, + prefix: str | None = None, release: bool = False, ) -> None: - _LOG.info(f"Creating rez package for {dist.name}") - name = rez_pip.utils.pythontDistributionNameToRez(dist.name) - version = rez_pip.utils.pythonDistributionVersionToRez(dist.version) + _LOG.info( + "Creating rez package for {0}".format( + " + ".join(dist.name for dist in packageGroup.dists) + ) + ) - requirements = rez_pip.utils.getRezRequirements(dist, pythonVersion, isPure, []) + rezNames = [ + rez_pip.utils.pythontDistributionNameToRez(dist.name) + for dist in packageGroup.dists + ] + + name = rezNames[0] + version = rez_pip.utils.pythonDistributionVersionToRez( + packageGroup.dists[0].version + ) - requires = requirements.requires - variant_requires = requirements.variant_requires - metadata = requirements.metadata + requires = [] + variant_requires = [] + metadata: dict[str, typing.Any] = {} + isPure = True + for dist in packageGroup.dists: + requirements = rez_pip.utils.getRezRequirements(dist, pythonVersion, []) + if not metadata: + # For now we only use the metadata from the first package. Far from ideal... + metadata = requirements.metadata + + # TODO: Remove grouped packages (PySide-Addons, etc) + requires += [ + require + for require in requirements.requires + if require not in requires + # Check that the rez requirement isn't in the group name since it would be + # an invalid requirement (because we merge them). + and rez.version.Requirement(require).name not in rezNames[1:] + ] + variant_requires += [ + require + for require in requirements.variant_requires + if require not in variant_requires + # Check that the rez requirement isn't in the group name since it would be + # an invalid requirement (because we merge them). + and rez.version.Requirement(require).name not in rezNames[1:] + ] + if isPure: + isPure = metadata["is_pure_python"] if prefix: packagesPath = prefix @@ -63,21 +94,30 @@ def make_root(variant: rez.packages.Variant, path: str) -> None: _LOG.info( rf"Installing {variant.qualified_package_name} \[{formattedRequirements}]" ) - if not dist.files: - raise RuntimeError( - f"{dist.name} package has no files registered! Something is wrong maybe?" - ) - - wheelsDirAbsolute = pathlib.Path(installedWheelsDir).resolve() - for src in dist.files: - srcAbsolute = src.locate().resolve() - dest = os.path.join(path, srcAbsolute.relative_to(wheelsDirAbsolute)) - if not os.path.exists(os.path.dirname(dest)): - os.makedirs(os.path.dirname(dest)) - - _LOG.debug(f"Copying {str(srcAbsolute)!r} to {str(dest)!r}") - shutil.copyfile(srcAbsolute, dest) - shutil.copystat(srcAbsolute, dest) + for dist in packageGroup.dists: + if not dist.files: + raise RuntimeError( + f"{dist.name} package has no files registered! Something is wrong maybe?" + ) + + wheelsDirAbsolute = pathlib.Path(installedWheelsDir).resolve() + for src in dist.files: + srcAbsolute: pathlib.Path = typing.cast( + pathlib.Path, src.locate() + ).resolve() + dest = os.path.join( + path, + os.path.sep.join( + srcAbsolute.relative_to(wheelsDirAbsolute).parts[1:] + ), + ) + # print(dest) + if not os.path.exists(os.path.dirname(dest)): + os.makedirs(os.path.dirname(dest)) + + _LOG.debug(f"Copying {str(srcAbsolute)!r} to {str(dest)!r}") + shutil.copyfile(srcAbsolute, dest) + shutil.copystat(srcAbsolute, dest) with rez.package_maker.make_package( name, packagesPath, make_root=make_root, skip_existing=True, warn_on_skip=False @@ -113,8 +153,8 @@ def make_root(variant: rez.packages.Variant, path: str) -> None: pkg.pip = { "name": dist.name, "version": dist.version, - "is_pure_python": metadata["is_pure_python"], - "wheel_url": wheelURL, + "is_pure_python": isPure, + "wheel_urls": packageGroup.downloadUrls, "rez_pip_version": importlib_metadata.version("rez-pip"), } @@ -126,6 +166,8 @@ def make_root(variant: rez.packages.Variant, path: str) -> None: pkg.pip["metadata"] = remainingMetadata + rez_pip.plugins.getHook().metadata(package=pkg) + _LOG.info( f"[bold]Created {len(pkg.installed_variants)} variants and skipped {len(pkg.skipped_variants)}" ) @@ -133,47 +175,47 @@ def make_root(variant: rez.packages.Variant, path: str) -> None: def _convertMetadata( dist: importlib_metadata.Distribution, -) -> typing.Tuple[typing.Dict[str, typing.Any], typing.Dict[str, typing.Any]]: - metadata = {} +) -> tuple[dict[str, typing.Any], dict[str, typing.Any]]: + metadata: dict[str, typing.Any] = {} originalMetadata = copy.deepcopy(dist.metadata.json) del originalMetadata["metadata_version"] del originalMetadata["name"] del originalMetadata["version"] # https://packaging.python.org/en/latest/specifications/core-metadata/#summary - if dist.metadata["Summary"]: + if "Summary" in dist.metadata: metadata["summary"] = dist.metadata["Summary"] del originalMetadata["summary"] # https://packaging.python.org/en/latest/specifications/core-metadata/#description - if dist.metadata["Description"]: + if "Description" in dist.metadata: metadata["description"] = dist.metadata["Description"] del originalMetadata["description"] authors = [] # https://packaging.python.org/en/latest/specifications/core-metadata/#author - author = dist.metadata["Author"] - if author: - authors.append(author) + if "Author" in dist.metadata: + authors.append(dist.metadata["Author"]) del originalMetadata["author"] # https://packaging.python.org/en/latest/specifications/core-metadata/#author-email - authorEmail = dist.metadata["Author-email"] - if authorEmail: - authors.extend([email.strip() for email in authorEmail.split(",")]) + if "Author-email" in dist.metadata: + authors.extend( + [email.strip() for email in dist.metadata["Author-email"].split(",")] + ) del originalMetadata["author_email"] # https://packaging.python.org/en/latest/specifications/core-metadata/#maintainer - maintainer = dist.metadata["Maintainer"] - if maintainer: - authors.append(maintainer) + if "Maintainer" in dist.metadata: + authors.append(dist.metadata["Maintainer"]) del originalMetadata["maintainer"] # https://packaging.python.org/en/latest/specifications/core-metadata/#maintainer-email - maintainerEmail = dist.metadata["Maintainer-email"] - if maintainerEmail: - authors.extend([email.strip() for email in maintainerEmail.split(",")]) + if "Maintainer-email" in dist.metadata: + authors.extend( + [email.strip() for email in dist.metadata["Maintainer-email"].split(",")] + ) del originalMetadata["maintainer_email"] if authors: @@ -181,7 +223,7 @@ def _convertMetadata( # https://packaging.python.org/en/latest/specifications/core-metadata/#license # Prefer the License field and fallback to classifiers if one is present. - if dist.metadata["License"]: + if "License" in dist.metadata: metadata["license"] = dist.metadata["License"] del originalMetadata["license"] else: @@ -199,22 +241,22 @@ def _convertMetadata( helpLinks = [] # https://packaging.python.org/en/latest/specifications/core-metadata/#home-page - if dist.metadata["Home-page"]: + if "Home-page" in dist.metadata: helpLinks.append(["Home-page", dist.metadata["Home-page"]]) del originalMetadata["home_page"] # https://packaging.python.org/en/latest/specifications/core-metadata/#project-url-multiple-use - if dist.metadata["Project-URL"]: + if "Project-URL" in dist.metadata: urls = [ url.strip() - for value in dist.metadata.get_all("Project-URL") + for value in dist.metadata.get_all("Project-URL", failobj=[]) for url in value.split(",") ] helpLinks.extend([list(entry) for entry in zip(urls[::2], urls[1::2])]) del originalMetadata["project_url"] # https://packaging.python.org/en/latest/specifications/core-metadata/#download-url - if dist.metadata["Download-URL"]: + if "Download-URL" in dist.metadata: helpLinks.append(["Download-URL", dist.metadata["Download-URL"]]) del originalMetadata["download_url"] @@ -225,8 +267,8 @@ def _convertMetadata( def getPythonExecutables( - range_: typing.Optional[str], packageFamily: str = "python" -) -> typing.Dict[str, pathlib.Path]: + range_: str | None, packageFamily: str = "python" +) -> dict[str, pathlib.Path]: """ Get the available python executable from rez packages. @@ -241,7 +283,7 @@ def getPythonExecutables( key=lambda x: x.version, ) - packages: typing.List[rez.packages.Package] + packages: list[rez.packages.Package] if range_ == "latest": packages = [list(all_packages)[-1]] else: @@ -257,7 +299,7 @@ def getPythonExecutables( # Note that "pkgs" is already in the right order since all_packages is sorted. packages = [pkgs[-1] for pkgs in groups] - pythons: typing.Dict[str, pathlib.Path] = {} + pythons: dict[str, pathlib.Path] = {} for package in packages: resolvedContext = rez.resolved_context.ResolvedContext( [f"{package.name}=={package.version}"] diff --git a/src/rez_pip/utils.py b/src/rez_pip/utils.py index 641dd01..0a0af56 100644 --- a/src/rez_pip/utils.py +++ b/src/rez_pip/utils.py @@ -1,27 +1,32 @@ -import sys +from __future__ import annotations + import typing import logging import dataclasses -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - import rez.system import rez.version +import rich.console import packaging.version import packaging.specifiers import packaging.requirements +import rez_pip.install + +if typing.TYPE_CHECKING: + from rez_pip.compat import importlib_metadata + _LOG = logging.getLogger(__name__) +CONSOLE = rich.console.Console() + + @dataclasses.dataclass class RequirementsDict: - requires: typing.List[str] - variant_requires: typing.List[str] - metadata: typing.Dict[str, typing.Any] + requires: list[str] + variant_requires: list[str] + metadata: dict[str, typing.Any] def pythontDistributionNameToRez(name: str) -> str: @@ -281,12 +286,12 @@ def pythonReqToRezReq( class CustomPyPackagingRequirement(packaging.requirements.Requirement): - conditional_extras: typing.Optional[typing.Set[str]] + conditional_extras: set[str] | None def normalizeRequirement( - requirement: typing.Union[str, typing.Dict[typing.Any, typing.Any]] -) -> typing.List[CustomPyPackagingRequirement]: + requirement: str | dict[typing.Any, typing.Any] +) -> list[CustomPyPackagingRequirement]: """Normalize a package requirement. Requirements from distlib packages can be a mix of string- or dict- based @@ -319,8 +324,8 @@ def normalizeRequirement( def reconstruct( req: CustomPyPackagingRequirement, - marker_str: typing.Optional[str] = None, - conditional_extras: typing.Union[typing.Set[str], None] = None, + marker_str: str | None = None, + conditional_extras: set[str] | None = None, ) -> CustomPyPackagingRequirement: new_req_str = req.name @@ -341,7 +346,7 @@ def reconstruct( # So only metadata that are of version 2.0 will be in dict. The other versions # (1.0, 1.1, 1.2, 2.1) will be strings. if isinstance(requirement, dict): - result: typing.List[CustomPyPackagingRequirement] = [] + result: list[CustomPyPackagingRequirement] = [] requires = requirement["requires"] extra = requirement.get("extra") marker_str = requirement.get("environment") @@ -373,12 +378,12 @@ def reconstruct( return [req] # Parse conditional extras out of marker - conditional_extras: typing.Set[str] = set() + conditional_extras: set[str] = set() marker_str = marker_str.replace(" and ", " \nand ") marker_str = marker_str.replace(" or ", " \nor ") lines = marker_str.split("\n") lines = [x.strip() for x in lines] - new_marker_lines: typing.List[str] = [] + new_marker_lines: list[str] = [] for line in lines: if "extra" in line.split(): @@ -402,7 +407,7 @@ def reconstruct( return [new_req] -def convertMarker(marker: str) -> typing.List[str]: +def convertMarker(marker: str) -> list[str]: """Get the system requirements that an environment marker introduces. Consider: @@ -451,7 +456,7 @@ def convertMarker(marker: str) -> typing.List[str]: "python_full_version": [_py], # PEP-0508 } - sys_requires: typing.Set[str] = set() + sys_requires: set[str] = set() # note: packaging lib already delimits with whitespace marker_parts = marker.split() @@ -466,8 +471,7 @@ def convertMarker(marker: str) -> typing.List[str]: def getRezRequirements( installedDist: importlib_metadata.Distribution, pythonVersion: rez.version.Version, - isPure: bool, - nameCasings: typing.Optional[typing.List[str]] = None, + nameCasings: list[str] | None = None, ) -> RequirementsDict: """Get requirements of the given dist, in rez-compatible format. @@ -504,19 +508,20 @@ def getRezRequirements( :returns: See example above. """ _system = rez.system.System() - result_requires: typing.List[str] = [] - result_variant_requires: typing.List[str] = [] + result_requires: list[str] = [] + result_variant_requires: list[str] = [] # create cased names lookup - name_mapping = dict((x.lower(), x) for x in (nameCasings or [])) + name_mapping = {x.lower(): x for x in (nameCasings or [])} # requirements such as platform, arch, os, and python - sys_requires: typing.Set[str] = set() + sys_requires: set[str] = set() # entry_points scripts are platform and arch specific executables generated by # python build frontends during install has_entry_points_scripts = bool(installedDist.entry_points) + isPure = rez_pip.install.isWheelPure(installedDist) # assume package is platform- and arch- specific if it isn't pure python if not isPure or has_entry_points_scripts: sys_requires.update(["platform", "arch"]) @@ -591,7 +596,7 @@ def getRezRequirements( result_requires.append(rez_req) # prefix variant with system requirements - sys_variant_requires: typing.List[str] = [] + sys_variant_requires: list[str] = [] if "platform" in sys_requires: sys_variant_requires.append(f"platform-{_system.platform}") diff --git a/tests/conftest.py b/tests/conftest.py index b6c9a80..221af08 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import time import typing @@ -11,10 +13,13 @@ import pytest import rez.config +import rez.system import rez.packages import rez.package_bind import rez.package_maker +import rez_pip.utils + from . import utils DATA_ROOT_DIR = os.path.join(os.path.dirname(__file__), "data") @@ -35,8 +40,16 @@ def pytest_runtest_makereport(item: pytest.Item, call): item.stash.setdefault(phaseReportKey, {})[rep.when] = rep +@pytest.fixture(scope="function", autouse=True) +def patchRichConsole(monkeypatch: pytest.MonkeyPatch): + """Patch the rich console so that it doesn't wrap long lines""" + monkeypatch.setattr(rez_pip.utils.CONSOLE, "width", 1000) + + @pytest.fixture(scope="session") -def index(tmpdir_factory: pytest.TempdirFactory) -> utils.PyPIIndex: +def index( + tmpdir_factory: pytest.TempdirFactory, printer_session: typing.Callable[[str], None] +) -> utils.PyPIIndex: """Build PyPI Index and return the path""" srcPackages = os.path.join(DATA_ROOT_DIR, "src_packages") @@ -45,7 +58,9 @@ def index(tmpdir_factory: pytest.TempdirFactory) -> utils.PyPIIndex: for pkg in os.listdir(srcPackages): dest = indexPath.mkdir(pkg) - utils.buildPackage(pkg, os.fspath(dest)) + printer_session(f"Building {pkg!r}...") + wheel = utils.buildPackage(pkg, os.fspath(dest)) + printer_session(f"Built {pkg!r} at {wheel!r}") return utils.PyPIIndex(pathlib.Path(indexPath.strpath)) @@ -134,6 +149,13 @@ def hardenRezConfig(tmp_path_factory: pytest.TempPathFactory): yield +@pytest.fixture(scope="function", autouse=True) +def resetRez(): + """Reset rez caches to make sure we don't leak anything between tests""" + yield + rez.system.system.clear_caches() + + @pytest.fixture(scope="session") def rezRepo() -> typing.Generator[str, None, None]: path = os.path.join(DATA_ROOT_DIR, "rez_repo") @@ -191,7 +213,13 @@ def downloadPythonVersion( pytest.param( # Nuget doesn't have 3.7.16 "3.7.9" if platform.system() == "Windows" else "3.7.16", - marks=pytest.mark.py37, + marks=[ + pytest.mark.py3, + pytest.mark.skipif( + platform.processor() == "arm" and platform.system() == "Darwin", + reason="Python 3.7 is not compatible with Apple Silicon", + ), + ], ), pytest.param( # Nuget doesn't have 3.9.16 diff --git a/tests/constraints.txt b/tests/constraints.txt deleted file mode 100644 index 44f6b7b..0000000 --- a/tests/constraints.txt +++ /dev/null @@ -1,5 +0,0 @@ -# markdown-it-py 3.0.0 dropped support for Python 3.7 -# Mypy complains about that. So constraints to less than 3. -markdown-it-py<3 -# 3.9 dropped support for python 3.7 -aiohttp<3.9.0 diff --git a/tests/plugins/__init__.py b/tests/plugins/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/plugins/test_plugins.py b/tests/plugins/test_plugins.py new file mode 100644 index 0000000..5149597 --- /dev/null +++ b/tests/plugins/test_plugins.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import pluggy + +import rez_pip.plugins + + +def test_getManager(): + assert isinstance(rez_pip.plugins.getManager(), pluggy.PluginManager) + + +def test_getHook(): + assert isinstance(rez_pip.plugins.getHook(), pluggy.HookRelay) + + +def test_getHookImplementations(): + implementations = rez_pip.plugins._getHookImplementations() + assert implementations == { + "rez_pip.PySide6": [ + "cleanup", + "groupPackages", + "patches", + "postPipResolve", + "prePipResolve", + ], + "rez_pip.shiboken6": ["cleanup"], + } diff --git a/tests/plugins/test_pyside6.py b/tests/plugins/test_pyside6.py new file mode 100644 index 0000000..cf97219 --- /dev/null +++ b/tests/plugins/test_pyside6.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import os +import pathlib +import unittest.mock + +import pytest + +import rez_pip.pip +import rez_pip.plugins +import rez_pip.exceptions + +from . import utils + + +@pytest.fixture(scope="module", autouse=True) +def setupPluginManager(): + yield utils.initializePluginManager("pyside6") + + +@pytest.mark.parametrize( + "packages", + [ + ("asd",), + ("pyside6",), + ("PysiDe6",), + ("pyside6", "pyside6-addons"), + ("pyside6", "pyside6-essentials"), + ("pyside6", "pyside6-essentials", "pyside6-addons"), + ("pyside6", "pyside6-addons", "asdasdad"), + ], +) +def test_prePipResolve_noop(packages: tuple[str, ...]): + rez_pip.plugins.getHook().prePipResolve(packages=packages) + + +@pytest.mark.parametrize("packages", [("pyside6-addons",), ("PysiDe6_essentials",)]) +def test_prePipResolve_raises(packages: tuple[str, ...]): + with pytest.raises(rez_pip.exceptions.RezPipError): + rez_pip.plugins.getHook().prePipResolve(packages=packages) + + +def fakePackage(name: str, **kwargs) -> unittest.mock.Mock: + value = unittest.mock.MagicMock() + value.configure_mock(name=name, **kwargs) + return value + + +@pytest.mark.parametrize( + "packages", + [ + (fakePackage("asd"),), + (fakePackage("pyside6"),), + (fakePackage("PysiDe6"),), + (fakePackage("pyside6"), fakePackage("pyside6-addons")), + (fakePackage("pyside6"), fakePackage("pyside6-essentials")), + ( + fakePackage("pyside6"), + fakePackage("pyside6-essentials"), + fakePackage("pyside6-addons"), + ), + ( + fakePackage("pyside6"), + fakePackage("pyside6-addons"), + fakePackage("asdasdad"), + ), + ], +) +def test_postPipResolve_noop(packages: tuple[str, ...]): + rez_pip.plugins.getHook().postPipResolve(packages=packages) + + +@pytest.mark.parametrize( + "packages", + [ + (fakePackage("pyside6-addons"),), + (fakePackage("PysiDe6_essentials"),), + (fakePackage("PysiDe6_essentials"), fakePackage("asd")), + ], +) +def test_postPipResolve_raises(packages: tuple[str, ...]): + with pytest.raises(rez_pip.exceptions.RezPipError): + rez_pip.plugins.getHook().postPipResolve(packages=packages) + + +@pytest.mark.parametrize( + "packages", + [[fakePackage("asd")]], +) +def test_groupPackages_noop(packages: list[str]): + assert rez_pip.plugins.getHook().groupPackages(packages=packages) == [ + [rez_pip.pip.PackageGroup(tuple())] + ] + + +class FakePackageInfo: + def __init__(self, name: str, version: str): + self.name = name + self.version = version + + def __eq__(self, value): + return self.name == value.name and self.version == value.version + + +@pytest.mark.parametrize( + "packages,expectedGroups", + [ + [ + [fakePackage("pyside6", version="1")], + [[rez_pip.pip.PackageGroup((FakePackageInfo("pyside6", "1"),))]], + ], + [ + [ + fakePackage("pyside6", version="1"), + fakePackage("pyside6_addons", version="1"), + ], + [ + [ + rez_pip.pip.PackageGroup( + ( + FakePackageInfo("pyside6", "1"), + FakePackageInfo("pyside6_addons", "1"), + ) + ) + ] + ], + ], + [ + [ + fakePackage("pyside6", version="1"), + fakePackage("pyside6_essentials", version="1"), + ], + [ + [ + rez_pip.pip.PackageGroup( + ( + FakePackageInfo("pyside6", "1"), + FakePackageInfo("pyside6_essentials", "1"), + ) + ) + ] + ], + ], + [ + [ + fakePackage("pyside6", version="1"), + fakePackage("pyside6_essentials", version="1"), + fakePackage("pyside6-Addons", version="1"), + ], + [ + [ + rez_pip.pip.PackageGroup( + ( + FakePackageInfo("pyside6", "1"), + FakePackageInfo("pyside6_essentials", "1"), + FakePackageInfo("pyside6-Addons", "1"), + ) + ) + ] + ], + ], + [ + [ + fakePackage("pyside6", version="1"), + fakePackage("asdasd", version=2), + fakePackage("pyside6_essentials", version="1"), + fakePackage("pyside6-Addons", version="1"), + ], + [ + [ + rez_pip.pip.PackageGroup( + ( + FakePackageInfo("pyside6", "1"), + FakePackageInfo("pyside6_essentials", "1"), + FakePackageInfo("pyside6-Addons", "1"), + ) + ) + ] + ], + ], + ], +) +def test_groupPackages( + packages: list[str], expectedGroups: list[rez_pip.pip.PackageGroup] +): + data = rez_pip.plugins.getHook().groupPackages(packages=packages) + assert data == expectedGroups + + +@pytest.mark.parametrize("package", [fakePackage("asd")]) +def test_cleanup_noop(package, tmp_path: pathlib.Path): + (tmp_path / "python" / "shiboken6").mkdir(parents=True) + (tmp_path / "python" / "shiboken6_generator").mkdir(parents=True) + + rez_pip.plugins.getHook().cleanup(dist=package, path=tmp_path) + + assert (tmp_path / "python" / "shiboken6").exists() + assert (tmp_path / "python" / "shiboken6_generator").exists() + + +@pytest.mark.parametrize( + "package,expectedPaths", + [ + [fakePackage("pyside6"), ["shiboken6", "shiboken6_generator"]], + [ + fakePackage("pyside6_essentials"), + [ + "shiboken6", + "shiboken6_generator", + os.path.join("PySide6", "__init__.py"), + ], + ], + [ + fakePackage("PySiDe6-AddoNs"), + [ + "shiboken6", + "shiboken6_generator", + os.path.join("PySide6", "__init__.py"), + ], + ], + ], +) +def test_cleanup(package, expectedPaths: list[str], tmp_path: pathlib.Path): + actions = rez_pip.plugins.getHook().cleanup(dist=package, path=tmp_path) + + expectedActions = [] + for path in expectedPaths: + expectedActions.append( + rez_pip.plugins.CleanupAction( + "remove", + str(tmp_path / "python" / path), + ) + ) + assert actions == [[], expectedActions] diff --git a/tests/plugins/test_shiboken6.py b/tests/plugins/test_shiboken6.py new file mode 100644 index 0000000..17cf041 --- /dev/null +++ b/tests/plugins/test_shiboken6.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import pathlib +import unittest.mock + +import pytest + +import rez_pip.pip +import rez_pip.plugins +import rez_pip.exceptions + +from . import utils + + +@pytest.fixture(scope="module", autouse=True) +def setupPluginManager(): + yield utils.initializePluginManager("shiboken6") + + +def fakePackage(name: str, **kwargs) -> unittest.mock.Mock: + value = unittest.mock.MagicMock() + value.configure_mock(name=name, **kwargs) + return value + + +@pytest.mark.parametrize("package", [fakePackage("asd")]) +def test_cleanup_noop(package, tmp_path: pathlib.Path): + (tmp_path / "python" / "PySide6").mkdir(parents=True) + + rez_pip.plugins.getHook().cleanup(dist=package, path=tmp_path) + + assert (tmp_path / "python" / "PySide6").exists() + + +@pytest.mark.parametrize( + "package", + [ + fakePackage("shiboken6"), + fakePackage("shiboken6_essentials"), + fakePackage("ShIbOkEn6-AddoNs"), + ], +) +def test_cleanup(package, tmp_path: pathlib.Path): + (tmp_path / "python" / "PySide6").mkdir(parents=True) + + rez_pip.plugins.getHook().cleanup(dist=package, path=tmp_path) + + assert not (tmp_path / "python" / "shiboken6").exists() diff --git a/tests/plugins/utils.py b/tests/plugins/utils.py new file mode 100644 index 0000000..84f7917 --- /dev/null +++ b/tests/plugins/utils.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +import rez_pip.plugins + + +def initializePluginManager(name: str): + """Initialize a plugin manager and clear the cache before exiting the function + + :param name: Name of the plugin to load. + """ + manager = rez_pip.plugins.getManager() + for name, plugin in manager.list_name_plugin(): + if not name.startswith(f"rez-pip.plugins.{name}"): + manager.unregister(plugin) + + try: + yield manager + finally: + del manager + rez_pip.plugins.getManager.cache_clear() diff --git a/tests/requirements.txt b/tests/requirements.txt index 9b93af5..417fc3b 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -2,9 +2,6 @@ pytest pytest-cov pytest-print pypiserver -# Needed because of the use of async mocks which were fully added in 3.8 -mock; python_version < "3.8" -types-mock; python_version < "3.8" build hatchling git+https://github.com/conda/conda-package-handling@2.1.0; platform_system != "Windows" diff --git a/tests/sitecustomize.py b/tests/sitecustomize.py new file mode 100644 index 0000000..24dde22 --- /dev/null +++ b/tests/sitecustomize.py @@ -0,0 +1,5 @@ +"""Needed to corectly track coverage in subprocesses from the integration tests""" + +import coverage + +coverage.process_startup() diff --git a/tests/test_cli.py b/tests/test_cli.py index ada7b07..7a60d70 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import sys import logging @@ -6,19 +8,14 @@ import subprocess import unittest.mock -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - import pytest -import rich.console import packaging.version import rez_pip.cli import rez_pip.pip import rez_pip.rez import rez_pip.exceptions +from rez_pip.compat import importlib_metadata def test_parseArgs_empty(): @@ -26,6 +23,7 @@ def test_parseArgs_empty(): assert vars(args) == { "constraint": None, "keep_tmp_dirs": False, + "list_plugins": False, "log_level": "info", "packages": [], "pip": rez_pip.pip.getBundledPip(), @@ -45,6 +43,7 @@ def test_parseArgs_packages(packages): assert vars(args) == { "constraint": None, "keep_tmp_dirs": False, + "list_plugins": False, "log_level": "info", "packages": packages, "pip": rez_pip.pip.getBundledPip(), @@ -64,6 +63,7 @@ def test_parseArgs_no_package_with_requirements(files): assert vars(args) == { "constraint": None, "keep_tmp_dirs": False, + "list_plugins": False, "log_level": "info", "packages": [], "pip": rez_pip.pip.getBundledPip(), @@ -82,6 +82,7 @@ def test_parseArgs_constraints(): assert vars(args) == { "constraint": ["asd", "adasdasd"], "keep_tmp_dirs": False, + "list_plugins": False, "log_level": "info", "packages": [], "pip": rez_pip.pip.getBundledPip(), @@ -102,6 +103,7 @@ def test_parseArgs_pipArgs(): assert vars(args) == { "constraint": None, "keep_tmp_dirs": False, + "list_plugins": False, "log_level": "info", "packages": [], "pip": rez_pip.pip.getBundledPip(), @@ -318,10 +320,10 @@ def test_debug(capsys: pytest.CaptureFixture, monkeypatch: pytest.MonkeyPatch): unittest.mock.Mock(stdout="mocked pip config list"), ), ) as mocked: - rez_pip.cli._debug( - argparse.Namespace(pip=None, python_version="2.7+"), - console=rich.console.Console(), + monkeypatch.setattr( + sys, "argv", ["rez-pip", "--debug-info", "asd", "--python-version", "2.7+"] ) + assert rez_pip.cli.run() == 0 assert mocked.call_args_list == [ unittest.mock.call( @@ -366,3 +368,19 @@ def test_debug(capsys: pytest.CaptureFixture, monkeypatch: pytest.MonkeyPatch): """ ) + + +def test_list_plugins(monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture): + monkeypatch.setattr(sys, "argv", ["rez-pip", "--list-plugins"]) + + assert rez_pip.cli.run() == 0 + + output = capsys.readouterr().out + output = "\n".join(map(str.strip, output.split("\n"))) + assert ( + output + == """Name Hooks +rez_pip.PySide6 cleanup, groupPackages, patches, postPipResolve, prePipResolve +rez_pip.shiboken6 cleanup +""" + ) diff --git a/tests/test_download.py b/tests/test_download.py index cf8530e..32b690a 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -1,104 +1,175 @@ +from __future__ import annotations + import os -import sys import typing import hashlib import pathlib - -if sys.version_info[:2] < (3, 8): - import mock -else: - from unittest import mock - -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata +import unittest.mock import pytest import aiohttp import rez_pip.pip import rez_pip.download +from rez_pip.compat import importlib_metadata @pytest.fixture(scope="module", autouse=True) def rezPipVersion(): - with mock.patch.object(importlib_metadata, "version", return_value="1.2.3.4.5"): + with unittest.mock.patch.object( + importlib_metadata, "version", return_value="1.2.3.4.5" + ): yield +class Package: + def __init__(self, name: str, content: str, local: bool): + self.name = name + self.content = content + self.local = local + + +class Group: + def __init__(self, packages: typing.List[Package]): + self.packages = packages + + def getPackage(self, name: str) -> Package: + for package in self.packages: + if package.name == name: + return package + raise KeyError(name) + + @pytest.mark.parametrize( - "packages", + "groups", [ - {"package-a": "package-a data"}, - {"package-a": "package-a data", "package-b": "package-b data"}, + [Group([Package("package-a", "package-a data", False)])], + [ + Group([Package("package-a", "package-a data", False)]), + Group([Package("package-b", "package-b data", False)]), + ], ], - ids=["single-package", "multiple-packages"], + ids=["one-group-with-one-package", "multiple-groups-with-one-package"], ) -def test_download(packages: typing.Dict[str, str], tmp_path: pathlib.Path): +def test_download(groups: typing.List[Group], tmp_path: pathlib.Path): sideEffects = tuple() - for content in packages.values(): - mockedContent = mock.MagicMock() - mockedContent.return_value.__aiter__.return_value = [ - [ - content.encode("utf-8"), - None, + for group in groups: + for package in group.packages: + mockedContent = unittest.mock.MagicMock() + mockedContent.return_value.__aiter__.return_value = [ + [ + package.content.encode("utf-8"), + None, + ] ] - ] - sideEffects += ( - mock.Mock( - headers={"content-length": 100}, - status=200, - content=mock.Mock(iter_chunks=mockedContent), - ), - ) + sideEffects += ( + unittest.mock.Mock( + headers={"content-length": 100}, + status=200, + content=unittest.mock.Mock(iter_chunks=mockedContent), + ), + ) - mockedGet = mock.AsyncMock() + mockedGet = unittest.mock.AsyncMock() mockedGet.__aenter__.side_effect = sideEffects - with mock.patch.object(aiohttp.ClientSession, "get") as mocked: + with unittest.mock.patch.object(aiohttp.ClientSession, "get") as mocked: mocked.return_value = mockedGet - wheels = rez_pip.download.downloadPackages( - [ - rez_pip.pip.PackageInfo( - metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), - download_info=rez_pip.pip.DownloadInfo( - url=f"https://example.com/{package}.whl", - archive_info=rez_pip.pip.ArchiveInfo("hash", {}), - ), - is_direct=True, - requested=True, + _groups = [] + for group in groups: + infos = [] + for package in group.packages: + infos.append( + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata( + name=package.name, version="1.0.0" + ), + download_info=rez_pip.pip.DownloadInfo( + url=f"https://example.com/{package.name}.whl", + archive_info=rez_pip.pip.ArchiveInfo("hash", {}), + ), + is_direct=True, + requested=True, + ) ) - for package in packages - ], - os.fspath(tmp_path), - ) + _groups.append(rez_pip.pip.PackageGroup(infos)) + + new_groups = rez_pip.download.downloadPackages(_groups, os.fspath(tmp_path)) - assert sorted(wheels) == sorted( - [os.fspath(tmp_path / f"{package}.whl") for package in packages] + assert len(new_groups) == len(groups) + assert sum(len(group.packages) for group in new_groups) == sum( + len(group.packages) for group in groups ) - for wheel in wheels: - with open(wheel, "r") as fd: - content = fd.read() - assert packages[os.path.basename(wheel).split(".")[0]] == content + wheelsMapping = { + package.name: package.path for group in new_groups for package in group.packages + } + + for group in groups: + for package in group.packages: + with open(wheelsMapping[package.name]) as fd: + content = fd.read() + assert content == package.content assert mocked.call_args_list == [ - mock.call( - f"https://example.com/{package}.whl", + unittest.mock.call( + f"https://example.com/{package.name}.whl", headers={ "Content-Type": "application/octet-stream", "User-Agent": "rez-pip/1.2.3.4.5", }, ) - for package in packages + for group in groups + for package in group.packages + ] + + +def test_download_skip_local(tmp_path: pathlib.Path): + """Test that wheels are not downloaded if they are local wheels""" + groups = [ + rez_pip.pip.PackageGroup[rez_pip.pip.PackageInfo]( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata(name="package-a", version="1.0.0"), + download_info=rez_pip.pip.DownloadInfo( + url="file:///example.com/package-a", + archive_info=rez_pip.pip.ArchiveInfo("hash-a", {}), + ), + is_direct=True, + requested=True, + ) + ] + ) ] + mockedGet = unittest.mock.AsyncMock() + + with unittest.mock.patch.object(aiohttp.ClientSession, "get") as mocked: + mocked.return_value = mockedGet + wheels = rez_pip.download.downloadPackages(groups, os.fspath(tmp_path)) + + assert not mocked.called + data = rez_pip.pip.PackageGroup( + ( + rez_pip.pip.DownloadedArtifact.from_dict( + { + **groups[0].packages[0].to_dict(), + "_localPath": os.path.join(os.fspath(tmp_path), "package-a"), + } + ), + ) + ) + assert wheels == [data] + def test_download_multiple_packages_with_failure(tmp_path: pathlib.Path): - mockedContent = mock.MagicMock() + """ + Test that a failure in one package does not prevent other + packages from being downloaded + """ + mockedContent = unittest.mock.MagicMock() mockedContent.return_value.__aiter__.return_value = [ [ b"package-a data", @@ -106,66 +177,74 @@ def test_download_multiple_packages_with_failure(tmp_path: pathlib.Path): ] ] - mockedGet = mock.AsyncMock() + mockedGet = unittest.mock.AsyncMock() mockedGet.__aenter__.side_effect = ( - mock.Mock( + unittest.mock.Mock( headers={"content-length": 100}, status=200, - content=mock.Mock(iter_chunks=mockedContent), + content=unittest.mock.Mock(iter_chunks=mockedContent), ), - mock.Mock( + unittest.mock.Mock( headers={"content-length": 100}, status=400, reason="Expected to fail", request_info={"key": "here"}, - content=mock.Mock(iter_chunks=mockedContent), + content=unittest.mock.Mock(iter_chunks=mockedContent), ), ) - with mock.patch.object(aiohttp.ClientSession, "get") as mocked: + with unittest.mock.patch.object(aiohttp.ClientSession, "get") as mocked: mocked.return_value = mockedGet with pytest.raises(RuntimeError): rez_pip.download.downloadPackages( [ - rez_pip.pip.PackageInfo( - metadata=rez_pip.pip.Metadata( - name="package-a", version="1.0.0" - ), - download_info=rez_pip.pip.DownloadInfo( - url="https://example.com/package-a", - archive_info=rez_pip.pip.ArchiveInfo("hash-a", {}), - ), - is_direct=True, - requested=True, + rez_pip.pip.PackageGroup( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata( + name="package-a", version="1.0.0" + ), + download_info=rez_pip.pip.DownloadInfo( + url="https://example.com/package-a", + archive_info=rez_pip.pip.ArchiveInfo("hash-a", {}), + ), + is_direct=True, + requested=True, + ) + ] ), - rez_pip.pip.PackageInfo( - metadata=rez_pip.pip.Metadata( - name="package-b", version="1.0.0" - ), - download_info=rez_pip.pip.DownloadInfo( - url="https://example.com/package-b", - archive_info=rez_pip.pip.ArchiveInfo("hash-b", {}), - ), - is_direct=True, - requested=True, + rez_pip.pip.PackageGroup( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata( + name="package-b", version="1.0.0" + ), + download_info=rez_pip.pip.DownloadInfo( + url="https://example.com/package-b", + archive_info=rez_pip.pip.ArchiveInfo("hash-b", {}), + ), + is_direct=True, + requested=True, + ) + ] ), ], os.fspath(tmp_path), ) # Check that package-a was downloaded even if even if package-b failed. - with open(tmp_path / "package-a", "r") as fd: + with open(tmp_path / "package-a") as fd: assert fd.read() == "package-a data" assert mocked.call_args_list == [ - mock.call( + unittest.mock.call( "https://example.com/package-a", headers={ "Content-Type": "application/octet-stream", "User-Agent": "rez-pip/1.2.3.4.5", }, ), - mock.call( + unittest.mock.call( "https://example.com/package-b", headers={ "Content-Type": "application/octet-stream", @@ -178,7 +257,7 @@ def test_download_multiple_packages_with_failure(tmp_path: pathlib.Path): def test_download_reuse_if_same_hash(tmp_path: pathlib.Path): """Test that wheels are re-used if the sha256 matches""" sideEffects = tuple() - packages = [] + groups = [] for package in ["package-a", "package-b"]: content = f"{package} data".encode("utf-8") @@ -186,21 +265,25 @@ def test_download_reuse_if_same_hash(tmp_path: pathlib.Path): hash = hashlib.new("sha256") hash.update(content) - packages.append( - rez_pip.pip.PackageInfo( - metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), - download_info=rez_pip.pip.DownloadInfo( - url=f"https://example.com/{package}.whl", - archive_info=rez_pip.pip.ArchiveInfo( - "hash-a", {"sha256": hash.hexdigest()} - ), - ), - is_direct=True, - requested=True, + groups.append( + rez_pip.pip.PackageGroup( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), + download_info=rez_pip.pip.DownloadInfo( + url=f"https://example.com/{package}.whl", + archive_info=rez_pip.pip.ArchiveInfo( + "hash-a", {"sha256": hash.hexdigest()} + ), + ), + is_direct=True, + requested=True, + ) + ] ) ) - mockedContent = mock.MagicMock() + mockedContent = unittest.mock.MagicMock() mockedContent.return_value.__aiter__.return_value = [ [ content, @@ -209,30 +292,30 @@ def test_download_reuse_if_same_hash(tmp_path: pathlib.Path): ] sideEffects += ( - mock.Mock( + unittest.mock.Mock( headers={"content-length": 100}, status=200, - content=mock.Mock(iter_chunks=mockedContent), + content=unittest.mock.Mock(iter_chunks=mockedContent), ), ) - mockedGet1 = mock.AsyncMock() + mockedGet1 = unittest.mock.AsyncMock() mockedGet1.__aenter__.side_effect = sideEffects - with mock.patch.object(aiohttp.ClientSession, "get") as mocked: + with unittest.mock.patch.object(aiohttp.ClientSession, "get") as mocked: mocked.return_value = mockedGet1 - rez_pip.download.downloadPackages(packages, str(tmp_path)) + rez_pip.download.downloadPackages(groups, str(tmp_path)) assert mocked.call_args_list == [ - mock.call( + unittest.mock.call( "https://example.com/package-a.whl", headers={ "Content-Type": "application/octet-stream", "User-Agent": "rez-pip/1.2.3.4.5", }, ), - mock.call( + unittest.mock.call( "https://example.com/package-b.whl", headers={ "Content-Type": "application/octet-stream", @@ -241,7 +324,7 @@ def test_download_reuse_if_same_hash(tmp_path: pathlib.Path): ), ] - packages = [] + groups = [] # package-b will be re-used for package in ["package-c", "package-b"]: content = f"{package} data".encode("utf-8") @@ -249,21 +332,25 @@ def test_download_reuse_if_same_hash(tmp_path: pathlib.Path): hash = hashlib.new("sha256") hash.update(content) - packages.append( - rez_pip.pip.PackageInfo( - metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), - download_info=rez_pip.pip.DownloadInfo( - url=f"https://example.com/{package}.whl", - archive_info=rez_pip.pip.ArchiveInfo( - "hash-a", {"sha256": hash.hexdigest()} - ), - ), - is_direct=True, - requested=True, + groups.append( + rez_pip.pip.PackageGroup( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), + download_info=rez_pip.pip.DownloadInfo( + url=f"https://example.com/{package}.whl", + archive_info=rez_pip.pip.ArchiveInfo( + "hash-a", {"sha256": hash.hexdigest()} + ), + ), + is_direct=True, + requested=True, + ) + ] ) ) - mockedContent = mock.MagicMock() + mockedContent = unittest.mock.MagicMock() mockedContent.return_value.__aiter__.return_value = [ [ content, @@ -272,23 +359,23 @@ def test_download_reuse_if_same_hash(tmp_path: pathlib.Path): ] sideEffects += ( - mock.Mock( + unittest.mock.Mock( headers={"content-length": 100}, status=200, - content=mock.Mock(iter_chunks=mockedContent), + content=unittest.mock.Mock(iter_chunks=mockedContent), ), ) - mockedGet2 = mock.AsyncMock() + mockedGet2 = unittest.mock.AsyncMock() mockedGet2.__aenter__.side_effect = sideEffects - with mock.patch.object(aiohttp.ClientSession, "get") as mocked: + with unittest.mock.patch.object(aiohttp.ClientSession, "get") as mocked: mocked.return_value = mockedGet2 - wheels = rez_pip.download.downloadPackages(packages, str(tmp_path)) + wheels = rez_pip.download.downloadPackages(groups, str(tmp_path)) assert mocked.call_args_list == [ - mock.call( + unittest.mock.call( "https://example.com/package-c.whl", headers={ "Content-Type": "application/octet-stream", @@ -297,15 +384,15 @@ def test_download_reuse_if_same_hash(tmp_path: pathlib.Path): ), ] - assert sorted(wheels) == [ - os.fspath(tmp_path / f"{package}.whl") for package in ["package-b", "package-c"] - ] + assert len(wheels) == 2 + assert len(wheels[0].packages) == 1 + assert len(wheels[1].packages) == 1 def test_download_redownload_if_hash_changes(tmp_path: pathlib.Path): - """Test that wheels are re-used if the sha256 matches""" + """Test that wheels are re-downloaded if the sha256 changes""" sideEffects = tuple() - packages = [] + groups = [] for package in ["package-a", "package-b"]: content = f"{package} data".encode("utf-8") @@ -313,21 +400,25 @@ def test_download_redownload_if_hash_changes(tmp_path: pathlib.Path): hash = hashlib.new("sha256") hash.update(content) - packages.append( - rez_pip.pip.PackageInfo( - metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), - download_info=rez_pip.pip.DownloadInfo( - url=f"https://example.com/{package}.whl", - archive_info=rez_pip.pip.ArchiveInfo( - "hash-a", {"sha256": hash.hexdigest()} - ), - ), - is_direct=True, - requested=True, + groups.append( + rez_pip.pip.PackageGroup( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), + download_info=rez_pip.pip.DownloadInfo( + url=f"https://example.com/{package}.whl", + archive_info=rez_pip.pip.ArchiveInfo( + "hash-a", {"sha256": hash.hexdigest()} + ), + ), + is_direct=True, + requested=True, + ) + ] ) ) - mockedContent = mock.MagicMock() + mockedContent = unittest.mock.MagicMock() mockedContent.return_value.__aiter__.return_value = [ [ content, @@ -336,30 +427,30 @@ def test_download_redownload_if_hash_changes(tmp_path: pathlib.Path): ] sideEffects += ( - mock.Mock( + unittest.mock.Mock( headers={"content-length": 100}, status=200, - content=mock.Mock(iter_chunks=mockedContent), + content=unittest.mock.Mock(iter_chunks=mockedContent), ), ) - mockedGet1 = mock.AsyncMock() + mockedGet1 = unittest.mock.AsyncMock() mockedGet1.__aenter__.side_effect = sideEffects - with mock.patch.object(aiohttp.ClientSession, "get") as mocked: + with unittest.mock.patch.object(aiohttp.ClientSession, "get") as mocked: mocked.return_value = mockedGet1 - rez_pip.download.downloadPackages(packages, str(tmp_path)) + rez_pip.download.downloadPackages(groups, str(tmp_path)) assert mocked.call_args_list == [ - mock.call( + unittest.mock.call( "https://example.com/package-a.whl", headers={ "Content-Type": "application/octet-stream", "User-Agent": "rez-pip/1.2.3.4.5", }, ), - mock.call( + unittest.mock.call( "https://example.com/package-b.whl", headers={ "Content-Type": "application/octet-stream", @@ -368,30 +459,33 @@ def test_download_redownload_if_hash_changes(tmp_path: pathlib.Path): ), ] - packages = [] - # package-b will be re-used + groups = [] for package in ["package-a", "package-b"]: content = f"{package} data".encode("utf-8") - packages.append( - rez_pip.pip.PackageInfo( - metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), - download_info=rez_pip.pip.DownloadInfo( - url=f"https://example.com/{package}.whl", - archive_info=rez_pip.pip.ArchiveInfo( - # - # Bad sha256. This will trigger a new download - # - "hash-a", - {"sha256": "asd"}, - ), - ), - is_direct=True, - requested=True, + groups.append( + rez_pip.pip.PackageGroup( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata(name=package, version="1.0.0"), + download_info=rez_pip.pip.DownloadInfo( + url=f"https://example.com/{package}.whl", + archive_info=rez_pip.pip.ArchiveInfo( + # + # Bad sha256. This will trigger a new download + # + "hash-a", + {"sha256": "asd"}, + ), + ), + is_direct=True, + requested=True, + ) + ] ) ) - mockedContent = mock.MagicMock() + mockedContent = unittest.mock.MagicMock() mockedContent.return_value.__aiter__.return_value = [ [ content, @@ -400,30 +494,30 @@ def test_download_redownload_if_hash_changes(tmp_path: pathlib.Path): ] sideEffects += ( - mock.Mock( + unittest.mock.Mock( headers={"content-length": 100}, status=200, - content=mock.Mock(iter_chunks=mockedContent), + content=unittest.mock.Mock(iter_chunks=mockedContent), ), ) - mockedGet2 = mock.AsyncMock() + mockedGet2 = unittest.mock.AsyncMock() mockedGet2.__aenter__.side_effect = sideEffects - with mock.patch.object(aiohttp.ClientSession, "get") as mocked: + with unittest.mock.patch.object(aiohttp.ClientSession, "get") as mocked: mocked.return_value = mockedGet2 - wheels = rez_pip.download.downloadPackages(packages, str(tmp_path)) + wheels = rez_pip.download.downloadPackages(groups, str(tmp_path)) assert mocked.call_args_list == [ - mock.call( + unittest.mock.call( "https://example.com/package-a.whl", headers={ "Content-Type": "application/octet-stream", "User-Agent": "rez-pip/1.2.3.4.5", }, ), - mock.call( + unittest.mock.call( "https://example.com/package-b.whl", headers={ "Content-Type": "application/octet-stream", @@ -432,6 +526,6 @@ def test_download_redownload_if_hash_changes(tmp_path: pathlib.Path): ), ] - assert sorted(wheels) == [ - os.fspath(tmp_path / f"{package}.whl") for package in ["package-a", "package-b"] - ] + assert len(wheels) == 2 + assert len(wheels[0].packages) == 1 + assert len(wheels[1].packages) == 1 diff --git a/tests/test_install.py b/tests/test_install.py index 45803eb..04b5dc8 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -1,15 +1,17 @@ +from __future__ import annotations + import os -import sys -import glob import pathlib import platform import subprocess import pytest +import rez.rex import installer.utils import rez_pip.pip import rez_pip.install +from rez_pip.compat import importlib_metadata from . import utils @@ -60,18 +62,19 @@ def test_console_scripts( assert os.path.exists(consoleScript), f"{consoleScript!r} does not exists!" + def injectEnvVars(executor: rez.rex.RexExecutor): + executor.env.PYTHONPATH.prepend(os.fspath(installPath / "python")) + code, stdout, _ = ctx.execute_shell( command=[consoleScript], block=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - parent_environ={ - "PYTHONPATH": os.fspath(installPath / "python"), - "SYSTEMROOT": os.environ.get("SYSTEMROOT", ""), - }, text=True, + actions_callback=injectEnvVars, ) + assert code == 0 # Use dirnames to avoid having to deal woth python vs python2 or python vs python3 assert os.path.dirname(stdout) == os.path.dirname( executable diff --git a/tests/test_integration.py b/tests/test_integration.py index fa0243c..ed29b1d 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -1,5 +1,9 @@ +from __future__ import annotations + import os import re +import sys +import pathlib import platform import subprocess @@ -58,3 +62,73 @@ def test_python_packages(pythonRezPackage: str, rezRepo: str): assert code == 0 assert stdout.decode("utf-8").strip().lower() == expectedPath.lower() + + +@pytest.mark.parametrize( + "packagesToInstall,imports", [[["PySide6"], ["PySide6"]]], ids=["PySide6"] +) +def test_installs( + pythonRezPackage: str, + rezRepo: str, + packagesToInstall: list[str], + imports: list[str], + tmp_path: pathlib.Path, + capsys: pytest.CaptureFixture, +): + """End to end integration test""" + command = [ + sys.executable, + "-m", + "rez_pip", + *packagesToInstall, + "--prefix", + os.fspath(tmp_path), + "--python-version", + pythonRezPackage, + ] + + env = { + "REZ_PACKAGES_PATH": os.pathsep.join([rezRepo, os.fspath(tmp_path)]), + "REZ_DISABLE_HOME_CONFIG": "1", + "COVERAGE_PROCESS_START": os.path.join( + os.path.dirname(os.path.dirname(__file__)), ".coveragerc" + ), + "PYTHONPATH": os.path.dirname(__file__), + } + if platform.system() == "Windows": + # https://stackoverflow.com/a/64706392 + env["SYSTEMROOT"] = os.environ["SYSTEMROOT"] + # Needed for getpass.getuser to work on Windows. + env["USERNAME"] = os.environ["USERNAME"] + + with capsys.disabled(): + subprocess.check_call(command, env=env) + + ctx = rez.resolved_context.ResolvedContext( + packagesToInstall + [f"python-{pythonRezPackage}"], + package_paths=[rezRepo, os.fspath(tmp_path)], + ) + assert ctx.status == rez.resolved_context.ResolverStatus.solved + + code, stdout, stderr = ctx.execute_shell( + command=[ + "python", + "-c", + f"import {','.join(imports)}; [print(i.__path__[0]) for i in [{','.join(imports)}]]", + ], + block=True, + text=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + print(stdout) + print("****") + print(stderr) + assert code == 0 + + for path in stdout.strip().split("\n"): + assert path.lower().startswith( + os.fspath(tmp_path).lower() + ), f"{path!r} does not start with {os.fspath(tmp_path)!r}" + + assert not stderr diff --git a/tests/test_pip.py b/tests/test_pip.py index e525091..c5f238b 100644 --- a/tests/test_pip.py +++ b/tests/test_pip.py @@ -1,20 +1,83 @@ +from __future__ import annotations + import os import re import sys import uuid -import typing import pathlib import subprocess -import rich import pytest import rez_pip.pip +import rez_pip.utils import rez_pip.exceptions from . import utils +@pytest.mark.parametrize( + "url,shouldDownload", + [ + ( + "https://pypi.org/packages/package_a/package_a-1.0.0-py2.py3-none-any.whl", + True, + ), + ("file:///tmp/asd.whl", False), + ], +) +def test_PackageInfo(url: str, shouldDownload: bool): + info = rez_pip.pip.PackageInfo( + rez_pip.pip.DownloadInfo( + url, + rez_pip.pip.ArchiveInfo( + "sha256=", + {"sha256": ""}, + ), + ), + False, + True, + rez_pip.pip.Metadata( + "1.0.0", + "package_a", + ), + ) + + assert info.name == "package_a" + assert info.version == "1.0.0" + assert info.isDownloadRequired() == shouldDownload + + +@pytest.mark.parametrize( + "url", + [ + "https://pypi.org/packages/package_a/package_a-1.0.0-py2.py3-none-any.whl", + "file:///tmp/package_a-1.0.0-py2.py3-none-any.whl", + ], +) +def test_DownloadedArtifact(url: str): + info = rez_pip.pip.DownloadedArtifact( + rez_pip.pip.DownloadInfo( + url, + rez_pip.pip.ArchiveInfo( + "sha256=", + {"sha256": ""}, + ), + ), + False, + True, + rez_pip.pip.Metadata( + "1.0.0", + "package_a", + ), + "/tmp/package_a-1.0.0-py2.py3-none-any.whl", + ) + + assert info.name == "package_a" + assert info.version == "1.0.0" + assert info.path == "/tmp/package_a-1.0.0-py2.py3-none-any.whl" + + def test_getBundledPip(): """Test that the bundled pip exists and can be executed""" assert os.path.exists(rez_pip.pip.getBundledPip()) @@ -77,8 +140,8 @@ def test_getBundledPip(): ids=["package_a", "package_a+console_scripts"], ) def test_getPackages_no_deps( - packages: typing.List[str], - expectedPackages: typing.List[rez_pip.pip.PackageInfo], + packages: list[str], + expectedPackages: list[rez_pip.pip.PackageInfo], pythonRezPackage: str, rezRepo: str, pypi: str, @@ -170,8 +233,8 @@ def test_getPackages_error( ], ) - with rich.get_console().capture() as capture: - rich.get_console().print(exc.value, soft_wrap=True) + with rez_pip.utils.CONSOLE.capture() as capture: + rez_pip.utils.CONSOLE.print(exc.value, soft_wrap=True) match = re.match( r"rez_pip\.exceptions\.PipError: Failed to run pip command\: '.*'", diff --git a/tests/test_rez.py b/tests/test_rez.py index d24bc97..f21ac8f 100644 --- a/tests/test_rez.py +++ b/tests/test_rez.py @@ -1,5 +1,6 @@ +from __future__ import annotations + import os -import sys import stat import typing import pathlib @@ -12,13 +13,10 @@ import rez.packages import rez.package_repository -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - +import rez_pip.pip import rez_pip.rez import rez_pip.utils +from rez_pip.compat import importlib_metadata def test_createPackage(monkeypatch: pytest.MonkeyPatch, tmp_path: pathlib.Path): @@ -62,16 +60,28 @@ def make_file(path: str) -> importlib_metadata.PackagePath: metadata={"is_pure_python": False}, ) + packageGroup = rez_pip.pip.PackageGroup( + [ + rez_pip.pip.PackageInfo( + metadata=rez_pip.pip.Metadata(name="package-a", version="1.0.0.post0"), + download_info=rez_pip.pip.DownloadInfo( + url=f"http://localhost/asd", + archive_info=rez_pip.pip.ArchiveInfo("hash", {}), + ), + is_direct=True, + requested=True, + ) + ] + ) + packageGroup.dists = [dist] + with unittest.mock.patch.object( rez_pip.utils, "getRezRequirements", return_value=expectedRequirements ): rez_pip.rez.createPackage( - dist, - False, + packageGroup, rez.version.Version("3.7.0"), - [], source, - "http://localhost/asd", prefix=repo, ) @@ -84,14 +94,14 @@ def make_file(path: str) -> importlib_metadata.PackagePath: "name": dist.name, "version": dist.version, "is_pure_python": False, - "wheel_url": "http://localhost/asd", + "wheel_urls": ["http://localhost/asd"], "rez_pip_version": importlib_metadata.version("rez-pip"), "metadata": {}, } def test_convertMetadata_nothing_to_convert(monkeypatch: pytest.MonkeyPatch): - dist = importlib_metadata.Distribution() + dist = importlib_metadata.Distribution.at("asd") monkeypatch.setattr( dist, "read_text", @@ -232,7 +242,7 @@ def test_convertMetadata( expectedRemaining, monkeypatch: pytest.MonkeyPatch, ): - dist = importlib_metadata.Distribution() + dist = importlib_metadata.Distribution.at("asd") monkeypatch.setattr( dist, "read_text", @@ -337,12 +347,12 @@ def test_convertMetadata( ) def test_getPythonExecutables( monkeypatch: pytest.MonkeyPatch, - availableVersions: typing.List[str], - range_: typing.Optional[str], - executables: typing.List[str], - expectedExecutables: typing.Dict[str, str], + availableVersions: list[str], + range_: str | None, + executables: list[str], + expectedExecutables: dict[str, str], ) -> None: - repoData: typing.Dict[str, typing.Dict[str, typing.Dict[str, str]]] = {"python": {}} + repoData: dict[str, dict[str, dict[str, str]]] = {"python": {}} for version in availableVersions: repoData["python"][version] = {"version": version} @@ -387,7 +397,7 @@ def test_getPythonExecutables_isolation( escapedPath = os.fspath(packagePath).replace("\\", "\\\\") # Create a fake python-1.0.0 package - repoData: typing.Dict[str, typing.Dict[str, typing.Dict[str, str]]] = { + repoData: dict[str, dict[str, dict[str, str]]] = { "python": { "1.0.0": { "version": "1.0.0", diff --git a/tests/test_utils.py b/tests/test_utils.py index 6733eea..f3c70cb 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing import pytest @@ -135,7 +137,7 @@ def test_packaging_req_to_rez_req(pythonReq: str, rezReq: str): ['python_full_version == "3.7.4"', ["python"]], ], ) -def test_convertMarker(marker: str, expected: typing.List[str]): +def test_convertMarker(marker: str, expected: list[str]): assert rez_pip.utils.convertMarker(marker) == expected @@ -229,9 +231,9 @@ def test_convertMarker(marker: str, expected: typing.List[str]): ], ) def test_normalizeRequirement( - requirement: typing.Union[str, typing.Dict[str, typing.Any]], - expected: typing.List[packaging.requirements.Requirement], - conditional_extras: typing.List[typing.Optional[typing.Set[str]]], + requirement: str | dict[str, typing.Any], + expected: list[packaging.requirements.Requirement], + conditional_extras: list[set[str] | None], ): result = rez_pip.utils.normalizeRequirement(requirement) assert [str(req) for req in result] == [str(req) for req in expected] diff --git a/tests/utils.py b/tests/utils.py index b4e2a5a..95a51a3 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import sys import glob