From 336a6f4b66b9f2fcafde558eff5b98758d8c4826 Mon Sep 17 00:00:00 2001 From: Suprithvarma1 Date: Fri, 22 Dec 2023 15:25:59 +0530 Subject: [PATCH 1/9] updated readme --- README.md | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/README.md b/README.md index e6cd15b6fe..7c1f91ad43 100644 --- a/README.md +++ b/README.md @@ -327,6 +327,55 @@ You can read more about [pip's implementation of vcs support here](https://pip.p Launching subshell in virtual environment. Type 'exit' or 'Ctrl+D' to return. $ ▯ + +### PURPOSE AND ADVANTAGES OF PIPENV + +To understand the problems that Pipenv solves, it's useful to show how Python package management has evolved. + +Take yourself back to the first Python iteration. We had Python, but there was no clean way to install packages. + +Then came Easy Install, a package that installs other Python packages with relative ease. But it came with a catch: it wasn't easy to uninstall packages that were no longer needed. + +Enter pip, which most Python users are familiar with. pip lets us install and uninstall packages. We could specify versions, run pip freeze > requirements.txt to output a list of installed packages to a text file, and use that same text file to install everything an app needed with pip install -r requirements.txt. + +But pip didn't include a way to isolate packages from each other. We might work on apps that use different versions of the same libraries, so we needed a way to enable that. + + +Pipenv aims to solve several problems. +First, the problem of needing the pip library for package installation, plus a library for creating a virtual environment, plus a library for managing virtual environments, plus all the commands associated with those libraries. That's a lot to manage. Pipenv ships with package management and virtual environment support, so you can use one tool to install, uninstall, track, and document your dependencies and to create, use, and organize your virtual environments. When you start a project with it, Pipenv will automatically create a virtual environment for that project if you aren't already using one. + +Pipenv accomplishes this dependency management by abandoning the requirements.txt norm and trading it for a new document called a Pipfile. When you install a library with Pipenv, a Pipfile for your project is automatically updated with the details of that installation, including version information and possibly the Git repository location, file path, and other information. + +Second, Pipenv wants to make it easier to manage complex interdependencies. + +Using Pipenv, which gives you Pipfile, lets you avoid these problems by managing dependencies for different environments for you. This command will install the main project dependencies: + + pipenv install + +Adding the --dev tag will install the dev/testing requirements: + + pipenv install --dev +To generate a Pipfile.lock file, run: + +pipenv lock + +You can also run Python scripts with Pipenv. To run a top-level Python script called hello.py, run: + +pipenv run python hello.py + +And you will see your expected result in the console. + +To start a shell, run: + +pipenv shell + +If you would like to convert a project that currently uses a requirements.txt file to use Pipenv, install Pipenv and run: + +pipenv install requirements.txt + +This will create a Pipfile and install the specified requirements. + + Documentation --------------- From d5564b082aa715a85e0037acc770321dc6abb359 Mon Sep 17 00:00:00 2001 From: Oz Tiram Date: Thu, 4 Jan 2024 14:56:56 +0100 Subject: [PATCH 2/9] Bump vendored pipdeptree Also update the patch we apply Signed-off-by: Oz Tiram --- pipenv/vendor/pipdeptree/__init__.py | 1082 ----------------- pipenv/vendor/pipdeptree/__main__.py | 46 +- pipenv/vendor/pipdeptree/_cli.py | 152 +++ pipenv/vendor/pipdeptree/_discovery.py | 37 + pipenv/vendor/pipdeptree/_models/__init__.py | 11 + pipenv/vendor/pipdeptree/_models/dag.py | 246 ++++ pipenv/vendor/pipdeptree/_models/package.py | 228 ++++ pipenv/vendor/pipdeptree/_non_host.py | 52 + pipenv/vendor/pipdeptree/_render/__init__.py | 38 + pipenv/vendor/pipdeptree/_render/graphviz.py | 116 ++ pipenv/vendor/pipdeptree/_render/json.py | 30 + pipenv/vendor/pipdeptree/_render/json_tree.py | 57 + pipenv/vendor/pipdeptree/_render/mermaid.py | 112 ++ pipenv/vendor/pipdeptree/_render/text.py | 146 +++ pipenv/vendor/pipdeptree/_validate.py | 123 ++ pipenv/vendor/pipdeptree/py.typed | 0 pipenv/vendor/pipdeptree/version.py | 16 +- pipenv/vendor/vendor.txt | 2 +- .../vendor/pipdeptree-update-pip-import.patch | 18 +- 19 files changed, 1421 insertions(+), 1091 deletions(-) create mode 100644 pipenv/vendor/pipdeptree/_cli.py create mode 100644 pipenv/vendor/pipdeptree/_discovery.py create mode 100644 pipenv/vendor/pipdeptree/_models/__init__.py create mode 100644 pipenv/vendor/pipdeptree/_models/dag.py create mode 100644 pipenv/vendor/pipdeptree/_models/package.py create mode 100644 pipenv/vendor/pipdeptree/_non_host.py create mode 100644 pipenv/vendor/pipdeptree/_render/__init__.py create mode 100644 pipenv/vendor/pipdeptree/_render/graphviz.py create mode 100644 pipenv/vendor/pipdeptree/_render/json.py create mode 100644 pipenv/vendor/pipdeptree/_render/json_tree.py create mode 100644 pipenv/vendor/pipdeptree/_render/mermaid.py create mode 100644 pipenv/vendor/pipdeptree/_render/text.py create mode 100644 pipenv/vendor/pipdeptree/_validate.py create mode 100644 pipenv/vendor/pipdeptree/py.typed diff --git a/pipenv/vendor/pipdeptree/__init__.py b/pipenv/vendor/pipdeptree/__init__.py index cc26012f51..e69de29bb2 100644 --- a/pipenv/vendor/pipdeptree/__init__.py +++ b/pipenv/vendor/pipdeptree/__init__.py @@ -1,1082 +0,0 @@ -import argparse -import fnmatch -import inspect -import json -import os -import shutil -import subprocess -import sys -import tempfile -from collections import defaultdict, deque -from collections.abc import Mapping -from importlib import import_module -from itertools import chain -from textwrap import dedent - -from pipenv.patched.pip._vendor import pkg_resources - -from .version import version as __version__ - -try: - from pipenv.patched.pip._internal.operations.freeze import FrozenRequirement -except ImportError: - from pipenv.patched.pip import FrozenRequirement - - -def sorted_tree(tree): - """ - Sorts the dict representation of the tree. The root packages as well as the intermediate packages are sorted in the - alphabetical order of the package names. - - :param dict tree: the pkg dependency tree obtained by calling `construct_tree` function - :returns: sorted tree - :rtype: dict - """ - return {k: sorted(v) for k, v in sorted(tree.items())} - - -def guess_version(pkg_key, default="?"): - """Guess the version of a pkg when pip doesn't provide it - - :param str pkg_key: key of the package - :param str default: default version to return if unable to find - :returns: version - :rtype: string - """ - try: - if sys.version_info >= (3, 8): # pragma: >=3.8 cover - import importlib.metadata as importlib_metadata - else: # pragma: <3.8 cover - import importlib_metadata - return importlib_metadata.version(pkg_key) - except ImportError: - pass - # Avoid AssertionError with setuptools, see https://github.com/tox-dev/pipdeptree/issues/162 - if pkg_key in {"setuptools"}: - return default - try: - m = import_module(pkg_key) - except ImportError: - return default - else: - v = getattr(m, "__version__", default) - if inspect.ismodule(v): - return getattr(v, "__version__", default) - else: - return v - - -def frozen_req_from_dist(dist): - # The `pipenv.patched.pip._internal.metadata` modules were introduced in 21.1.1 - # and the `pipenv.patched.pip._internal.operations.freeze.FrozenRequirement` - # class now expects dist to be a subclass of - # `pipenv.patched.pip._internal.metadata.BaseDistribution`, however the - # `pipenv.patched.pip._internal.utils.misc.get_installed_distributions` continues - # to return objects of type - # pipenv.patched.pip._vendor.pkg_resources.DistInfoDistribution. - # - # This is a hacky backward compatible (with older versions of pip) - # fix. - try: - from pipenv.patched.pip._internal import metadata - except ImportError: - pass - else: - dist = metadata.pkg_resources.Distribution(dist) - - try: - return FrozenRequirement.from_dist(dist) - except TypeError: - return FrozenRequirement.from_dist(dist, []) - - -class Package: - """ - Abstract class for wrappers around objects that pip returns. This class needs to be subclassed with implementations - for `render_as_root` and `render_as_branch` methods. - """ - - def __init__(self, obj): - self._obj = obj - self.project_name = obj.project_name - self.key = obj.key - - def render_as_root(self, frozen): # noqa: U100 - return NotImplementedError - - def render_as_branch(self, frozen): # noqa: U100 - return NotImplementedError - - def render(self, parent=None, frozen=False): - if not parent: - return self.render_as_root(frozen) - else: - return self.render_as_branch(frozen) - - @staticmethod - def frozen_repr(obj): - fr = frozen_req_from_dist(obj) - return str(fr).strip() - - def __getattr__(self, key): - return getattr(self._obj, key) - - def __repr__(self): - return f'<{self.__class__.__name__}("{self.key}")>' - - def __lt__(self, rhs): - return self.key < rhs.key - - -class DistPackage(Package): - """ - Wrapper class for pkg_resources.Distribution instances - - :param obj: pkg_resources.Distribution to wrap over - :param req: optional ReqPackage object to associate this DistPackage with. This is useful for displaying the tree - in reverse - """ - - def __init__(self, obj, req=None): - super().__init__(obj) - self.version_spec = None - self.req = req - - def render_as_root(self, frozen): - if not frozen: - return f"{self.project_name}=={self.version}" - else: - return self.__class__.frozen_repr(self._obj) - - def render_as_branch(self, frozen): - assert self.req is not None - if not frozen: - parent_ver_spec = self.req.version_spec - parent_str = self.req.project_name - if parent_ver_spec: - parent_str += parent_ver_spec - return f"{self.project_name}=={self.version} [requires: {parent_str}]" - else: - return self.render_as_root(frozen) - - def as_requirement(self): - """Return a ReqPackage representation of this DistPackage""" - return ReqPackage(self._obj.as_requirement(), dist=self) - - def as_parent_of(self, req): - """ - Return a DistPackage instance associated to a requirement. This association is necessary for reversing the - PackageDAG. - - If `req` is None, and the `req` attribute of the current instance is also None, then the same instance will be - returned. - - :param ReqPackage req: the requirement to associate with - :returns: DistPackage instance - """ - if req is None and self.req is None: - return self - return self.__class__(self._obj, req) - - def as_dict(self): - return {"key": self.key, "package_name": self.project_name, "installed_version": self.version} - - -class ReqPackage(Package): - """ - Wrapper class for Requirements instance - - :param obj: The `Requirements` instance to wrap over - :param dist: optional `pkg_resources.Distribution` instance for this requirement - """ - - UNKNOWN_VERSION = "?" - - def __init__(self, obj, dist=None): - super().__init__(obj) - self.dist = dist - - @property - def version_spec(self): - specs = sorted(self._obj.specs, reverse=True) # `reverse` makes '>' prior to '<' - return ",".join(["".join(sp) for sp in specs]) if specs else None - - @property - def installed_version(self): - if not self.dist: - return guess_version(self.key, self.UNKNOWN_VERSION) - return self.dist.version - - @property - def is_missing(self): - return self.installed_version == self.UNKNOWN_VERSION - - def is_conflicting(self): - """If installed version conflicts with required version""" - # unknown installed version is also considered conflicting - if self.installed_version == self.UNKNOWN_VERSION: - return True - ver_spec = self.version_spec if self.version_spec else "" - req_version_str = f"{self.project_name}{ver_spec}" - req_obj = pkg_resources.Requirement.parse(req_version_str) - return self.installed_version not in req_obj - - def render_as_root(self, frozen): - if not frozen: - return f"{self.project_name}=={self.installed_version}" - elif self.dist: - return self.__class__.frozen_repr(self.dist._obj) - else: - return self.project_name - - def render_as_branch(self, frozen): - if not frozen: - req_ver = self.version_spec if self.version_spec else "Any" - return f"{self.project_name} [required: {req_ver}, installed: {self.installed_version}]" - else: - return self.render_as_root(frozen) - - def as_dict(self): - return { - "key": self.key, - "package_name": self.project_name, - "installed_version": self.installed_version, - "required_version": self.version_spec, - } - - -class PackageDAG(Mapping): - """ - Representation of Package dependencies as directed acyclic graph using a dict (Mapping) as the underlying - datastructure. - - The nodes and their relationships (edges) are internally stored using a map as follows, - - {a: [b, c], - b: [d], - c: [d, e], - d: [e], - e: [], - f: [b], - g: [e, f]} - - Here, node `a` has 2 children nodes `b` and `c`. Consider edge direction from `a` -> `b` and `a` -> `c` - respectively. - - A node is expected to be an instance of a subclass of `Package`. The keys are must be of class `DistPackage` and - each item in values must be of class `ReqPackage`. (See also ReversedPackageDAG where the key and value types are - interchanged). - """ - - @classmethod - def from_pkgs(cls, pkgs): - pkgs = [DistPackage(p) for p in pkgs] - idx = {p.key: p for p in pkgs} - m = {p: [ReqPackage(r, idx.get(r.key)) for r in p.requires()] for p in pkgs} - return cls(m) - - def __init__(self, m): - """Initialize the PackageDAG object - - :param dict m: dict of node objects (refer class docstring) - :returns: None - :rtype: NoneType - - """ - self._obj = m - self._index = {p.key: p for p in list(self._obj)} - - def get_node_as_parent(self, node_key): - """ - Get the node from the keys of the dict representing the DAG. - - This method is useful if the dict representing the DAG contains different kind of objects in keys and values. - Use this method to look up a node obj as a parent (from the keys of the dict) given a node key. - - :param node_key: identifier corresponding to key attr of node obj - :returns: node obj (as present in the keys of the dict) - :rtype: Object - """ - try: - return self._index[node_key] - except KeyError: - return None - - def get_children(self, node_key): - """ - Get child nodes for a node by its key - - :param str node_key: key of the node to get children of - :returns: list of child nodes - :rtype: ReqPackage[] - """ - node = self.get_node_as_parent(node_key) - return self._obj[node] if node else [] - - def filter(self, include, exclude): - """ - Filters nodes in a graph by given parameters - - If a node is included, then all it's children are also included. - - :param set include: set of node keys to include (or None) - :param set exclude: set of node keys to exclude (or None) - :returns: filtered version of the graph - :rtype: PackageDAG - """ - # If neither of the filters are specified, short circuit - if include is None and exclude is None: - return self - - # Note: In following comparisons, we use lower cased values so - # that user may specify `key` or `project_name`. As per the - # documentation, `key` is simply - # `project_name.lower()`. Refer: - # https://setuptools.readthedocs.io/en/latest/pkg_resources.html#distribution-objects - if include: - include = {s.lower() for s in include} - if exclude: - exclude = {s.lower() for s in exclude} - else: - exclude = set() - - # Check for mutual exclusion of show_only and exclude sets - # after normalizing the values to lowercase - if include and exclude: - assert not (include & exclude) - - # Traverse the graph in a depth first manner and filter the - # nodes according to `show_only` and `exclude` sets - stack = deque() - m = {} - seen = set() - for node in self._obj.keys(): - if any(fnmatch.fnmatch(node.key, e) for e in exclude): - continue - if include is None or any(fnmatch.fnmatch(node.key, i) for i in include): - stack.append(node) - while True: - if len(stack) > 0: - n = stack.pop() - cldn = [c for c in self._obj[n] if not any(fnmatch.fnmatch(c.key, e) for e in exclude)] - m[n] = cldn - seen.add(n.key) - for c in cldn: - if c.key not in seen: - cld_node = self.get_node_as_parent(c.key) - if cld_node: - stack.append(cld_node) - else: - # It means there's no root node corresponding to the child node i.e. - # a dependency is missing - continue - else: - break - - return self.__class__(m) - - def reverse(self): - """ - Reverse the DAG, or turn it upside-down. - - In other words, the directions of edges of the nodes in the DAG will be reversed. - - Note that this function purely works on the nodes in the graph. This implies that to perform a combination of - filtering and reversing, the order in which `filter` and `reverse` methods should be applied is important. For - e.g., if reverse is called on a filtered graph, then only the filtered nodes and it's children will be - considered when reversing. On the other hand, if filter is called on reversed DAG, then the definition of - "child" nodes is as per the reversed DAG. - - :returns: DAG in the reversed form - :rtype: ReversedPackageDAG - """ - m = defaultdict(list) - child_keys = {r.key for r in chain.from_iterable(self._obj.values())} - for k, vs in self._obj.items(): - for v in vs: - # if v is already added to the dict, then ensure that - # we are using the same object. This check is required - # as we're using array mutation - try: - node = [p for p in m.keys() if p.key == v.key][0] - except IndexError: - node = v - m[node].append(k.as_parent_of(v)) - if k.key not in child_keys: - m[k.as_requirement()] = [] - return ReversedPackageDAG(dict(m)) - - def sort(self): - """ - Return sorted tree in which the underlying _obj dict is an dict, sorted alphabetically by the keys. - - :returns: Instance of same class with dict - """ - return self.__class__(sorted_tree(self._obj)) - - # Methods required by the abstract base class Mapping - def __getitem__(self, *args): - return self._obj.get(*args) - - def __iter__(self): - return self._obj.__iter__() - - def __len__(self): - return len(self._obj) - - -class ReversedPackageDAG(PackageDAG): - """Representation of Package dependencies in the reverse order. - - Similar to it's super class `PackageDAG`, the underlying datastructure is a dict, but here the keys are expected to - be of type `ReqPackage` and each item in the values of type `DistPackage`. - - Typically, this object will be obtained by calling `PackageDAG.reverse`. - """ - - def reverse(self): - """ - Reverse the already reversed DAG to get the PackageDAG again - - :returns: reverse of the reversed DAG - :rtype: PackageDAG - """ - m = defaultdict(list) - child_keys = {r.key for r in chain.from_iterable(self._obj.values())} - for k, vs in self._obj.items(): - for v in vs: - try: - node = [p for p in m.keys() if p.key == v.key][0] - except IndexError: - node = v.as_parent_of(None) - m[node].append(k) - if k.key not in child_keys: - m[k.dist] = [] - return PackageDAG(dict(m)) - - -def render_text(tree, list_all=True, frozen=False): - """Print tree as text on console - - :param dict tree: the package tree - :param bool list_all: whether to list all the pgks at the root level or only those that are the sub-dependencies - :param bool frozen: show the names of the pkgs in the output that's favourable to pip --freeze - :returns: None - """ - tree = tree.sort() - nodes = tree.keys() - branch_keys = {r.key for r in chain.from_iterable(tree.values())} - - if not list_all: - nodes = [p for p in nodes if p.key not in branch_keys] - - if sys.stdout.encoding.lower() in ("utf-8", "utf-16", "utf-32"): - _render_text_with_unicode(tree, nodes, frozen) - else: - _render_text_without_unicode(tree, nodes, frozen) - - -def _render_text_with_unicode(tree, nodes, frozen): - use_bullets = not frozen - - def aux( - node, - parent=None, - indent=0, - cur_chain=None, - prefix="", - depth=0, - has_grand_parent=False, - is_last_child=False, - parent_is_last_child=False, - ): - cur_chain = cur_chain or [] - node_str = node.render(parent, frozen) - next_prefix = "" - next_indent = indent + 2 - - if parent: - bullet = "├── " - if is_last_child: - bullet = "└── " - - line_char = "│" - if not use_bullets: - line_char = "" - # Add 2 spaces so direct dependencies to a project are indented - bullet = " " - - if has_grand_parent: - next_indent -= 1 - if parent_is_last_child: - offset = 0 if len(line_char) == 1 else 1 - prefix += " " * (indent + 1 - offset - depth) - else: - prefix += line_char + " " * (indent - depth) - # Without this extra space, bullets will point to the space just before the project name - prefix += " " if use_bullets else "" - next_prefix = prefix - node_str = prefix + bullet + node_str - result = [node_str] - - children = tree.get_children(node.key) - children_strings = [ - aux( - c, - node, - indent=next_indent, - cur_chain=cur_chain + [c.project_name], - prefix=next_prefix, - depth=depth + 1, - has_grand_parent=parent is not None, - is_last_child=c is children[-1], - parent_is_last_child=is_last_child, - ) - for c in children - if c.project_name not in cur_chain - ] - - result += list(chain.from_iterable(children_strings)) - return result - - lines = chain.from_iterable([aux(p) for p in nodes]) - print("\n".join(lines)) - - -def _render_text_without_unicode(tree, nodes, frozen): - use_bullets = not frozen - - def aux(node, parent=None, indent=0, cur_chain=None): - cur_chain = cur_chain or [] - node_str = node.render(parent, frozen) - if parent: - prefix = " " * indent + ("- " if use_bullets else "") - node_str = prefix + node_str - result = [node_str] - children = [ - aux(c, node, indent=indent + 2, cur_chain=cur_chain + [c.project_name]) - for c in tree.get_children(node.key) - if c.project_name not in cur_chain - ] - result += list(chain.from_iterable(children)) - return result - - lines = chain.from_iterable([aux(p) for p in nodes]) - print("\n".join(lines)) - - -def render_json(tree, indent): - """ - Converts the tree into a flat json representation. - - The json repr will be a list of hashes, each hash having 2 fields: - - package - - dependencies: list of dependencies - - :param dict tree: dependency tree - :param int indent: no. of spaces to indent json - :returns: json representation of the tree - :rtype: str - """ - tree = tree.sort() - return json.dumps( - [{"package": k.as_dict(), "dependencies": [v.as_dict() for v in vs]} for k, vs in tree.items()], indent=indent - ) - - -def render_json_tree(tree, indent): - """ - Converts the tree into a nested json representation. - - The json repr will be a list of hashes, each hash having the following fields: - - - package_name - - key - - required_version - - installed_version - - dependencies: list of dependencies - - :param dict tree: dependency tree - :param int indent: no. of spaces to indent json - :returns: json representation of the tree - :rtype: str - """ - tree = tree.sort() - branch_keys = {r.key for r in chain.from_iterable(tree.values())} - nodes = [p for p in tree.keys() if p.key not in branch_keys] - - def aux(node, parent=None, cur_chain=None): - if cur_chain is None: - cur_chain = [node.project_name] - - d = node.as_dict() - if parent: - d["required_version"] = node.version_spec if node.version_spec else "Any" - else: - d["required_version"] = d["installed_version"] - - d["dependencies"] = [ - aux(c, parent=node, cur_chain=cur_chain + [c.project_name]) - for c in tree.get_children(node.key) - if c.project_name not in cur_chain - ] - - return d - - return json.dumps([aux(p) for p in nodes], indent=indent) - - -def render_mermaid(tree) -> str: - """Produce a Mermaid flowchart from the dependency graph. - - :param dict tree: dependency graph - """ - # List of reserved keywords in Mermaid that cannot be used as node names. - # See: https://github.com/mermaid-js/mermaid/issues/4182#issuecomment-1454787806 - reserved_ids: set[str] = { - "C4Component", - "C4Container", - "C4Deployment", - "C4Dynamic", - "_blank", - "_parent", - "_self", - "_top", - "call", - "class", - "classDef", - "click", - "end", - "flowchart", - "flowchart-v2", - "graph", - "interpolate", - "linkStyle", - "style", - "subgraph", - } - node_ids_map: dict[str:str] = {} - - def mermaid_id(key: str) -> str: - """Returns a valid Mermaid node ID from a string.""" - # If we have already seen this key, return the canonical ID. - canonical_id = node_ids_map.get(key) - if canonical_id is not None: - return canonical_id - # If the key is not a reserved keyword, return it as is, and update the map. - if key not in reserved_ids: - node_ids_map[key] = key - return key - # If the key is a reserved keyword, append a number to it. - number = 0 - while True: - new_id = f"{key}_{number}" - if new_id not in node_ids_map: - node_ids_map[key] = new_id - return new_id - number += 1 - - # Use a sets to avoid duplicate entries. - nodes: set[str] = set() - edges: set[str] = set() - - if isinstance(tree, ReversedPackageDAG): - for package, reverse_dependencies in tree.items(): - package_label = "\\n".join( - (package.project_name, "(missing)" if package.is_missing else package.installed_version) - ) - package_key = mermaid_id(package.key) - nodes.add(f'{package_key}["{package_label}"]') - for reverse_dependency in reverse_dependencies: - edge_label = reverse_dependency.req.version_spec or "any" - reverse_dependency_key = mermaid_id(reverse_dependency.key) - edges.add(f'{package_key} -- "{edge_label}" --> {reverse_dependency_key}') - else: - for package, dependencies in tree.items(): - package_label = "\\n".join((package.project_name, package.version)) - package_key = mermaid_id(package.key) - nodes.add(f'{package_key}["{package_label}"]') - for dependency in dependencies: - edge_label = dependency.version_spec or "any" - dependency_key = mermaid_id(dependency.key) - if dependency.is_missing: - dependency_label = f"{dependency.project_name}\\n(missing)" - nodes.add(f'{dependency_key}["{dependency_label}"]:::missing') - edges.add(f"{package_key} -.-> {dependency_key}") - else: - edges.add(f'{package_key} -- "{edge_label}" --> {dependency_key}') - - # Produce the Mermaid Markdown. - indent = " " * 4 - output = dedent( - f"""\ - flowchart TD - {indent}classDef missing stroke-dasharray: 5 - """ - ) - # Sort the nodes and edges to make the output deterministic. - output += indent - output += f"\n{indent}".join(node for node in sorted(nodes)) - output += "\n" + indent - output += f"\n{indent}".join(edge for edge in sorted(edges)) - output += "\n" - return output - - -def dump_graphviz(tree, output_format="dot", is_reverse=False): - """Output dependency graph as one of the supported GraphViz output formats. - - :param dict tree: dependency graph - :param string output_format: output format - :param bool is_reverse: reverse or not - :returns: representation of tree in the specified output format - :rtype: str or binary representation depending on the output format - - """ - try: - from graphviz import Digraph - except ImportError: - print("graphviz is not available, but necessary for the output " "option. Please install it.", file=sys.stderr) - sys.exit(1) - - try: - from graphviz import parameters - except ImportError: - from graphviz import backend - - valid_formats = backend.FORMATS - print( - "Deprecation warning! Please upgrade graphviz to version >=0.18.0 " - "Support for older versions will be removed in upcoming release", - file=sys.stderr, - ) - else: - valid_formats = parameters.FORMATS - - if output_format not in valid_formats: - print(f"{output_format} is not a supported output format.", file=sys.stderr) - print(f"Supported formats are: {', '.join(sorted(valid_formats))}", file=sys.stderr) - sys.exit(1) - - graph = Digraph(format=output_format) - - if not is_reverse: - for pkg, deps in tree.items(): - pkg_label = f"{pkg.project_name}\\n{pkg.version}" - graph.node(pkg.key, label=pkg_label) - for dep in deps: - edge_label = dep.version_spec or "any" - if dep.is_missing: - dep_label = f"{dep.project_name}\\n(missing)" - graph.node(dep.key, label=dep_label, style="dashed") - graph.edge(pkg.key, dep.key, style="dashed") - else: - graph.edge(pkg.key, dep.key, label=edge_label) - else: - for dep, parents in tree.items(): - dep_label = f"{dep.project_name}\\n{dep.installed_version}" - graph.node(dep.key, label=dep_label) - for parent in parents: - # req reference of the dep associated with this - # particular parent package - req_ref = parent.req - edge_label = req_ref.version_spec or "any" - graph.edge(dep.key, parent.key, label=edge_label) - - # Allow output of dot format, even if GraphViz isn't installed. - if output_format == "dot": - # Emulates graphviz.dot.Dot.__iter__() to force the sorting of graph.body. - # Fixes https://github.com/tox-dev/pipdeptree/issues/188 - # That way we can guarantee the output of the dot format is deterministic - # and stable. - return "".join([tuple(graph)[0]] + sorted(graph.body) + [graph._tail]) - - # As it's unknown if the selected output format is binary or not, try to - # decode it as UTF8 and only print it out in binary if that's not possible. - try: - return graph.pipe().decode("utf-8") - except UnicodeDecodeError: - return graph.pipe() - - -def print_graphviz(dump_output): - """ - Dump the data generated by GraphViz to stdout. - - :param dump_output: The output from dump_graphviz - """ - if hasattr(dump_output, "encode"): - print(dump_output) - else: - with os.fdopen(sys.stdout.fileno(), "wb") as bytestream: - bytestream.write(dump_output) - - -def conflicting_deps(tree): - """ - Returns dependencies which are not present or conflict with the requirements of other packages. - - e.g. will warn if pkg1 requires pkg2==2.0 and pkg2==1.0 is installed - - :param tree: the requirements tree (dict) - :returns: dict of DistPackage -> list of unsatisfied/unknown ReqPackage - :rtype: dict - """ - conflicting = defaultdict(list) - for p, rs in tree.items(): - for req in rs: - if req.is_conflicting(): - conflicting[p].append(req) - return conflicting - - -def render_conflicts_text(conflicts): - if conflicts: - print("Warning!!! Possibly conflicting dependencies found:", file=sys.stderr) - # Enforce alphabetical order when listing conflicts - pkgs = sorted(conflicts.keys()) - for p in pkgs: - pkg = p.render_as_root(False) - print(f"* {pkg}", file=sys.stderr) - for req in conflicts[p]: - req_str = req.render_as_branch(False) - print(f" - {req_str}", file=sys.stderr) - - -def cyclic_deps(tree): - """ - Return cyclic dependencies as list of tuples - - :param PackageDAG tree: package tree/dag - :returns: list of tuples representing cyclic dependencies - :rtype: list - """ - index = {p.key: {r.key for r in rs} for p, rs in tree.items()} - cyclic = [] - for p, rs in tree.items(): - for r in rs: - if p.key in index.get(r.key, []): - p_as_dep_of_r = [x for x in tree.get(tree.get_node_as_parent(r.key)) if x.key == p.key][0] - cyclic.append((p, r, p_as_dep_of_r)) - return cyclic - - -def render_cycles_text(cycles): - if cycles: - print("Warning!! Cyclic dependencies found:", file=sys.stderr) - # List in alphabetical order of the dependency that's cycling - # (2nd item in the tuple) - cycles = sorted(cycles, key=lambda xs: xs[1].key) - for a, b, c in cycles: - print(f"* {a.project_name} => {b.project_name} => {c.project_name}", file=sys.stderr) - - -def get_parser(): - parser = argparse.ArgumentParser(description="Dependency tree of the installed python packages") - parser.add_argument("-v", "--version", action="version", version=f"{__version__}") - parser.add_argument("-f", "--freeze", action="store_true", help="Print names so as to write freeze files") - parser.add_argument( - "--python", - default=sys.executable, - help="Python to use to look for packages in it (default: where" " installed)", - ) - parser.add_argument("-a", "--all", action="store_true", help="list all deps at top level") - parser.add_argument( - "-l", - "--local-only", - action="store_true", - help="If in a virtualenv that has global access " "do not show globally installed packages", - ) - parser.add_argument("-u", "--user-only", action="store_true", help="Only show installations in the user site dir") - parser.add_argument( - "-w", - "--warn", - action="store", - dest="warn", - nargs="?", - default="suppress", - choices=("silence", "suppress", "fail"), - help=( - 'Warning control. "suppress" will show warnings ' - "but return 0 whether or not they are present. " - '"silence" will not show warnings at all and ' - 'always return 0. "fail" will show warnings and ' - "return 1 if any are present. The default is " - '"suppress".' - ), - ) - parser.add_argument( - "-r", - "--reverse", - action="store_true", - default=False, - help=( - "Shows the dependency tree in the reverse fashion " - "ie. the sub-dependencies are listed with the " - "list of packages that need them under them." - ), - ) - parser.add_argument( - "-p", - "--packages", - help=( - "Comma separated list of select packages to show in the output. " - "Wildcards are supported, like 'somepackage.*'. " - "If set, --all will be ignored." - ), - ) - parser.add_argument( - "-e", - "--exclude", - help=( - "Comma separated list of select packages to exclude from the output. " - "Wildcards are supported, like 'somepackage.*'. " - "If set, --all will be ignored." - ), - metavar="PACKAGES", - ) - parser.add_argument( - "-j", - "--json", - action="store_true", - default=False, - help=( - "Display dependency tree as json. This will yield " - '"raw" output that may be used by external tools. ' - "This option overrides all other options." - ), - ) - parser.add_argument( - "--json-tree", - action="store_true", - default=False, - help=( - "Display dependency tree as json which is nested " - "the same way as the plain text output printed by default. " - "This option overrides all other options (except --json)." - ), - ) - parser.add_argument( - "--mermaid", - action="store_true", - default=False, - help=("Display dependency tree as a Mermaid graph. " "This option overrides all other options."), - ) - parser.add_argument( - "--graph-output", - dest="output_format", - help=( - "Print a dependency graph in the specified output " - "format. Available are all formats supported by " - "GraphViz, e.g.: dot, jpeg, pdf, png, svg" - ), - ) - return parser - - -def _get_args(): - parser = get_parser() - return parser.parse_args() - - -def handle_non_host_target(args): - of_python = os.path.abspath(args.python) - # if target is not current python re-invoke it under the actual host - if of_python != os.path.abspath(sys.executable): - # there's no way to guarantee that graphviz is available, so refuse - if args.output_format: - print("graphviz functionality is not supported when querying" " non-host python", file=sys.stderr) - raise SystemExit(1) - argv = sys.argv[1:] # remove current python executable - for py_at, value in enumerate(argv): - if value == "--python": - del argv[py_at] - del argv[py_at] - elif value.startswith("--python"): - del argv[py_at] - - main_file = inspect.getsourcefile(sys.modules[__name__]) - with tempfile.TemporaryDirectory() as project: - dest = os.path.join(project, "pipdeptree") - shutil.copytree(os.path.dirname(main_file), dest) - # invoke from an empty folder to avoid cwd altering sys.path - env = os.environ.copy() - env["PYTHONPATH"] = project - cmd = [of_python, "-m", "pipdeptree"] - cmd.extend(argv) - return subprocess.call(cmd, cwd=project, env=env) - return None - - -def get_installed_distributions(local_only=False, user_only=False): - try: - from pipenv.patched.pip._internal.metadata import pkg_resources - except ImportError: - # For backward compatibility with python ver. 2.7 and pip - # version 20.3.4 (the latest pip version that works with python - # version 2.7) - from pipenv.patched.pip._internal.utils import misc - - return misc.get_installed_distributions(local_only=local_only, user_only=user_only) - else: - dists = pkg_resources.Environment.from_paths(None).iter_installed_distributions( - local_only=local_only, skip=(), user_only=user_only - ) - return [d._dist for d in dists] - - -def main(): - args = _get_args() - result = handle_non_host_target(args) - if result is not None: - return result - - pkgs = get_installed_distributions(local_only=args.local_only, user_only=args.user_only) - - tree = PackageDAG.from_pkgs(pkgs) - - is_text_output = not any([args.json, args.json_tree, args.output_format]) - - return_code = 0 - - # Before any reversing or filtering, show warnings to console - # about possibly conflicting or cyclic deps if found and warnings - # are enabled (i.e. only if output is to be printed to console) - if is_text_output and args.warn != "silence": - conflicts = conflicting_deps(tree) - if conflicts: - render_conflicts_text(conflicts) - print("-" * 72, file=sys.stderr) - - cycles = cyclic_deps(tree) - if cycles: - render_cycles_text(cycles) - print("-" * 72, file=sys.stderr) - - if args.warn == "fail" and (conflicts or cycles): - return_code = 1 - - # Reverse the tree (if applicable) before filtering, thus ensuring - # that the filter will be applied on ReverseTree - if args.reverse: - tree = tree.reverse() - - show_only = set(args.packages.split(",")) if args.packages else None - exclude = set(args.exclude.split(",")) if args.exclude else None - - if show_only is not None or exclude is not None: - tree = tree.filter(show_only, exclude) - - if args.json: - print(render_json(tree, indent=4)) - elif args.json_tree: - print(render_json_tree(tree, indent=4)) - elif args.mermaid: - print(render_mermaid(tree)) - elif args.output_format: - output = dump_graphviz(tree, output_format=args.output_format, is_reverse=args.reverse) - print_graphviz(output) - else: - render_text(tree, args.all, args.freeze) - - return return_code diff --git a/pipenv/vendor/pipdeptree/__main__.py b/pipenv/vendor/pipdeptree/__main__.py index 6c94a67a10..21661a8d1f 100644 --- a/pipenv/vendor/pipdeptree/__main__.py +++ b/pipenv/vendor/pipdeptree/__main__.py @@ -1,5 +1,17 @@ +"""The main entry point used for CLI.""" +from __future__ import annotations + import os import sys +from typing import Sequence + +from pipenv.vendor.pipdeptree._cli import get_options +from pipenv.vendor.pipdeptree._discovery import get_installed_distributions +from pipenv.vendor.pipdeptree._models import PackageDAG +from pipenv.vendor.pipdeptree._non_host import handle_non_host_target +from pipenv.vendor.pipdeptree._render import render +from pipenv.vendor.pipdeptree._validate import validate + pardir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # for finding pipdeptree itself @@ -8,7 +20,39 @@ sys.path.append(os.path.dirname(os.path.dirname(pardir))) -from pipenv.vendor.pipdeptree import main +def main(args: Sequence[str] | None = None) -> None | int: + """CLI - The main function called as entry point.""" + options = get_options(args) + result = handle_non_host_target(options) + if result is not None: + return result + + pkgs = get_installed_distributions(local_only=options.local_only, user_only=options.user_only) + tree = PackageDAG.from_pkgs(pkgs) + is_text_output = not any([options.json, options.json_tree, options.output_format]) + + return_code = validate(options, is_text_output, tree) + + # Reverse the tree (if applicable) before filtering, thus ensuring, that the filter will be applied on ReverseTree + if options.reverse: + tree = tree.reverse() + + show_only = options.packages.split(",") if options.packages else None + exclude = set(options.exclude.split(",")) if options.exclude else None + + if show_only is not None or exclude is not None: + try: + tree = tree.filter_nodes(show_only, exclude) + except ValueError as e: + if options.warn in ("suppress", "fail"): + print(e, file=sys.stderr) # noqa: T201 + return_code |= 1 if options.warn == "fail" else 0 + return return_code + + render(options, tree) + + return return_code + if __name__ == "__main__": sys.exit(main()) diff --git a/pipenv/vendor/pipdeptree/_cli.py b/pipenv/vendor/pipdeptree/_cli.py new file mode 100644 index 0000000000..9dd1d3754e --- /dev/null +++ b/pipenv/vendor/pipdeptree/_cli.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +import sys +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace +from typing import TYPE_CHECKING, Sequence, cast + +from .version import __version__ + +if TYPE_CHECKING: + from typing import Literal + + +class Options(Namespace): + freeze: bool + python: str + all: bool # noqa: A003 + local_only: bool + user_only: bool + warn: Literal["silence", "suppress", "fail"] + reverse: bool + packages: str + exclude: str + json: bool + json_tree: bool + mermaid: bool + output_format: str | None + depth: float + encoding: str + + +class _Formatter(ArgumentDefaultsHelpFormatter): + def __init__(self, prog: str) -> None: + super().__init__(prog, max_help_position=22, width=240) + + +def build_parser() -> ArgumentParser: + parser = ArgumentParser(description="Dependency tree of the installed python packages", formatter_class=_Formatter) + parser.add_argument("-v", "--version", action="version", version=f"{__version__}") + parser.add_argument( + "-w", + "--warn", + action="store", + dest="warn", + nargs="?", + default="suppress", + choices=("silence", "suppress", "fail"), + help=( + "warning control: suppress will show warnings but return 0 whether or not they are present; silence will " + "not show warnings at all and always return 0; fail will show warnings and return 1 if any are present" + ), + ) + parser.add_argument( + "-r", + "--reverse", + action="store_true", + default=False, + help=( + "render the dependency tree in the reverse fashion ie. the sub-dependencies are listed with the list of " + "packages that need them under them" + ), + ) + + select = parser.add_argument_group(title="select", description="choose what to render") + select.add_argument("--python", default=sys.executable, help="Python interpreter to inspect") + select.add_argument( + "-p", + "--packages", + help="comma separated list of packages to show - wildcards are supported, like 'somepackage.*'", + metavar="P", + ) + select.add_argument( + "-e", + "--exclude", + help="comma separated list of packages to not show - wildcards are supported, like 'somepackage.*'. " + "(cannot combine with -p or -a)", + metavar="P", + ) + select.add_argument("-a", "--all", action="store_true", help="list all deps at top level") + + scope = select.add_mutually_exclusive_group() + scope.add_argument( + "-l", + "--local-only", + action="store_true", + help="if in a virtualenv that has global access do not show globally installed packages", + ) + scope.add_argument("-u", "--user-only", action="store_true", help="only show installations in the user site dir") + + render = parser.add_argument_group( + title="render", + description="choose how to render the dependency tree (by default will use text mode)", + ) + render.add_argument("-f", "--freeze", action="store_true", help="print names so as to write freeze files") + render.add_argument( + "--encoding", + dest="encoding_type", + default=sys.stdout.encoding, + help="the encoding to use when writing to the output", + metavar="E", + ) + render.add_argument( + "-d", + "--depth", + type=lambda x: int(x) if x.isdigit() and (int(x) >= 0) else parser.error("Depth must be a number that is >= 0"), + default=float("inf"), + help="limit the depth of the tree (text render only)", + metavar="D", + ) + + render_type = render.add_mutually_exclusive_group() + render_type.add_argument( + "-j", + "--json", + action="store_true", + default=False, + help="raw JSON - this will yield output that may be used by external tools", + ) + render_type.add_argument( + "--json-tree", + action="store_true", + default=False, + help="nested JSON - mimics the text format layout", + ) + render_type.add_argument( + "--mermaid", + action="store_true", + default=False, + help="https://mermaid.js.org flow diagram", + ) + render_type.add_argument( + "--graph-output", + metavar="FMT", + dest="output_format", + help="Graphviz rendering with the value being the graphviz output e.g.: dot, jpeg, pdf, png, svg", + ) + return parser + + +def get_options(args: Sequence[str] | None) -> Options: + parser = build_parser() + parsed_args = parser.parse_args(args) + + if parsed_args.exclude and (parsed_args.all or parsed_args.packages): + return parser.error("cannot use --exclude with --packages or --all") + + return cast(Options, parsed_args) + + +__all__ = [ + "get_options", + "Options", +] diff --git a/pipenv/vendor/pipdeptree/_discovery.py b/pipenv/vendor/pipdeptree/_discovery.py new file mode 100644 index 0000000000..8763371e8d --- /dev/null +++ b/pipenv/vendor/pipdeptree/_discovery.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pipenv.patched.pip._vendor.pkg_resources import DistInfoDistribution + + +def get_installed_distributions( + local_only: bool = False, # noqa: FBT001, FBT002 + user_only: bool = False, # noqa: FBT001, FBT002 +) -> list[DistInfoDistribution]: + try: + from pipenv.patched.pip._internal.metadata import pkg_resources + except ImportError: + # For backward compatibility with python ver. 2.7 and pip + # version 20.3.4 (the latest pip version that works with python + # version 2.7) + from pipenv.patched.pip._internal.utils import misc + + return misc.get_installed_distributions( # type: ignore[no-any-return,attr-defined] + local_only=local_only, + user_only=user_only, + ) + + else: + dists = pkg_resources.Environment.from_paths(None).iter_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + ) + return [d._dist for d in dists] # type: ignore[attr-defined] # noqa: SLF001 + + +__all__ = [ + "get_installed_distributions", +] diff --git a/pipenv/vendor/pipdeptree/_models/__init__.py b/pipenv/vendor/pipdeptree/_models/__init__.py new file mode 100644 index 0000000000..a5128301fc --- /dev/null +++ b/pipenv/vendor/pipdeptree/_models/__init__.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from .dag import PackageDAG, ReversedPackageDAG +from .package import DistPackage, ReqPackage + +__all__ = [ + "ReqPackage", + "DistPackage", + "PackageDAG", + "ReversedPackageDAG", +] diff --git a/pipenv/vendor/pipdeptree/_models/dag.py b/pipenv/vendor/pipdeptree/_models/dag.py new file mode 100644 index 0000000000..f3d22539c5 --- /dev/null +++ b/pipenv/vendor/pipdeptree/_models/dag.py @@ -0,0 +1,246 @@ +from __future__ import annotations + +from collections import defaultdict, deque +from fnmatch import fnmatch +from itertools import chain +from typing import TYPE_CHECKING, Iterator, List, Mapping + +if TYPE_CHECKING: + from pipenv.patched.pip._vendor.pkg_resources import DistInfoDistribution + + +from .package import DistPackage, ReqPackage + + +class PackageDAG(Mapping[DistPackage, List[ReqPackage]]): + """Representation of Package dependencies as directed acyclic graph using a dict as the underlying datastructure. + + The nodes and their relationships (edges) are internally stored using a map as follows, + + {a: [b, c], + b: [d], + c: [d, e], + d: [e], + e: [], + f: [b], + g: [e, f]} + + Here, node `a` has 2 children nodes `b` and `c`. Consider edge direction from `a` -> `b` and `a` -> `c` + respectively. + + A node is expected to be an instance of a subclass of `Package`. The keys are must be of class `DistPackage` and + each item in values must be of class `ReqPackage`. (See also ReversedPackageDAG where the key and value types are + interchanged). + + """ + + @classmethod + def from_pkgs(cls, pkgs: list[DistInfoDistribution]) -> PackageDAG: + dist_pkgs = [DistPackage(p) for p in pkgs] + idx = {p.key: p for p in dist_pkgs} + m: dict[DistPackage, list[ReqPackage]] = {} + for p in dist_pkgs: + reqs = [] + for r in p.requires(): + d = idx.get(r.key) + # pip's _vendor.packaging.requirements.Requirement uses the exact casing of a dependency's name found in + # a project's build config, which is not ideal when rendering. + # See https://github.com/tox-dev/pipdeptree/issues/242 + r.project_name = d.project_name if d is not None else r.project_name + pkg = ReqPackage(r, d) + reqs.append(pkg) + m[p] = reqs + + return cls(m) + + def __init__(self, m: dict[DistPackage, list[ReqPackage]]) -> None: + """Initialize the PackageDAG object. + + :param dict m: dict of node objects (refer class docstring) + :returns: None + :rtype: NoneType + + """ + self._obj: dict[DistPackage, list[ReqPackage]] = m + self._index: dict[str, DistPackage] = {p.key: p for p in list(self._obj)} + + def get_node_as_parent(self, node_key: str) -> DistPackage | None: + """Get the node from the keys of the dict representing the DAG. + + This method is useful if the dict representing the DAG contains different kind of objects in keys and values. + Use this method to look up a node obj as a parent (from the keys of the dict) given a node key. + + :param node_key: identifier corresponding to key attr of node obj + :returns: node obj (as present in the keys of the dict) + + """ + try: + return self._index[node_key] + except KeyError: + return None + + def get_children(self, node_key: str) -> list[ReqPackage]: + """Get child nodes for a node by its key. + + :param node_key: key of the node to get children of + :returns: child nodes + + """ + node = self.get_node_as_parent(node_key) + return self._obj[node] if node else [] + + def filter_nodes(self, include: list[str] | None, exclude: set[str] | None) -> PackageDAG: # noqa: C901, PLR0912 + """Filter nodes in a graph by given parameters. + + If a node is included, then all it's children are also included. + + :param include: list of node keys to include (or None) + :param exclude: set of node keys to exclude (or None) + :raises ValueError: If include has node keys that do not exist in the graph + :returns: filtered version of the graph + + """ + # If neither of the filters are specified, short circuit + if include is None and exclude is None: + return self + + # Note: In following comparisons, we use lower cased values so + # that user may specify `key` or `project_name`. As per the + # documentation, `key` is simply + # `project_name.lower()`. Refer: + # https://setuptools.readthedocs.io/en/latest/pkg_resources.html#distribution-objects + include_with_casing_preserved: list[str] = [] + if include: + include_with_casing_preserved = include + include = [s.lower() for s in include] + exclude = {s.lower() for s in exclude} if exclude else set() + + # Check for mutual exclusion of show_only and exclude sets + # after normalizing the values to lowercase + if include and exclude: + assert not (set(include) & exclude) + + # Traverse the graph in a depth first manner and filter the + # nodes according to `show_only` and `exclude` sets + stack: deque[DistPackage] = deque() + m: dict[DistPackage, list[ReqPackage]] = {} + seen = set() + matched_includes: set[str] = set() + for node in self._obj: + if any(fnmatch(node.key, e) for e in exclude): + continue + if include is None: + stack.append(node) + else: + should_append = False + for i in include: + if fnmatch(node.key, i): + # Add all patterns that match with the node key. Otherwise if we break, patterns like py* or + # pytest* (which both should match "pytest") may cause one pattern to be missed and will + # raise an error + matched_includes.add(i) + should_append = True + if should_append: + stack.append(node) + + while stack: + n = stack.pop() + cldn = [c for c in self._obj[n] if not any(fnmatch(c.key, e) for e in exclude)] + m[n] = cldn + seen.add(n.key) + for c in cldn: + if c.key not in seen: + cld_node = self.get_node_as_parent(c.key) + if cld_node: + stack.append(cld_node) + else: + # It means there's no root node corresponding to the child node i.e. + # a dependency is missing + continue + + non_existent_includes = [i for i in include_with_casing_preserved if i.lower() not in matched_includes] + if non_existent_includes: + raise ValueError("No packages matched using the following patterns: " + ", ".join(non_existent_includes)) + + return self.__class__(m) + + def reverse(self) -> ReversedPackageDAG: + """Reverse the DAG, or turn it upside-down. + + In other words, the directions of edges of the nodes in the DAG will be reversed. + + Note that this function purely works on the nodes in the graph. This implies that to perform a combination of + filtering and reversing, the order in which `filter` and `reverse` methods should be applied is important. For + e.g., if reverse is called on a filtered graph, then only the filtered nodes and it's children will be + considered when reversing. On the other hand, if filter is called on reversed DAG, then the definition of + "child" nodes is as per the reversed DAG. + + :returns: DAG in the reversed form + + """ + m: defaultdict[ReqPackage, list[DistPackage]] = defaultdict(list) + child_keys = {r.key for r in chain.from_iterable(self._obj.values())} + for k, vs in self._obj.items(): + for v in vs: + # if v is already added to the dict, then ensure that + # we are using the same object. This check is required + # as we're using array mutation + node: ReqPackage = next((p for p in m if p.key == v.key), v) + m[node].append(k.as_parent_of(v)) + if k.key not in child_keys: + m[k.as_requirement()] = [] + return ReversedPackageDAG(dict(m)) # type: ignore[arg-type] + + def sort(self) -> PackageDAG: + """Return sorted tree in which the underlying _obj dict is an dict, sorted alphabetically by the keys. + + :returns: Instance of same class with dict + + """ + return self.__class__({k: sorted(v) for k, v in sorted(self._obj.items())}) + + # Methods required by the abstract base class Mapping + def __getitem__(self, arg: DistPackage) -> list[ReqPackage] | None: # type: ignore[override] + return self._obj.get(arg) + + def __iter__(self) -> Iterator[DistPackage]: + return self._obj.__iter__() + + def __len__(self) -> int: + return len(self._obj) + + +class ReversedPackageDAG(PackageDAG): + """Representation of Package dependencies in the reverse order. + + Similar to it's super class `PackageDAG`, the underlying datastructure is a dict, but here the keys are expected to + be of type `ReqPackage` and each item in the values of type `DistPackage`. + + Typically, this object will be obtained by calling `PackageDAG.reverse`. + + """ + + def reverse(self) -> PackageDAG: # type: ignore[override] + """Reverse the already reversed DAG to get the PackageDAG again. + + :returns: reverse of the reversed DAG + + """ + m: defaultdict[DistPackage, list[ReqPackage]] = defaultdict(list) + child_keys = {r.key for r in chain.from_iterable(self._obj.values())} + for k, vs in self._obj.items(): + for v in vs: + assert isinstance(v, DistPackage) + node = next((p for p in m if p.key == v.key), v.as_parent_of(None)) + m[node].append(k) # type: ignore[arg-type] + if k.key not in child_keys: + assert isinstance(k, ReqPackage) + assert k.dist is not None + m[k.dist] = [] + return PackageDAG(dict(m)) + + +__all__ = [ + "PackageDAG", + "ReversedPackageDAG", +] diff --git a/pipenv/vendor/pipdeptree/_models/package.py b/pipenv/vendor/pipdeptree/_models/package.py new file mode 100644 index 0000000000..5d64cf0efe --- /dev/null +++ b/pipenv/vendor/pipdeptree/_models/package.py @@ -0,0 +1,228 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from importlib import import_module +from importlib.metadata import PackageNotFoundError, version +from inspect import ismodule +from typing import TYPE_CHECKING + +from pipenv.patched.pip._vendor.pkg_resources import Requirement + +if TYPE_CHECKING: + from pipenv.patched.pip._internal.metadata import BaseDistribution + from pipenv.patched.pip._vendor.pkg_resources import DistInfoDistribution + + +class Package(ABC): + """Abstract class for wrappers around objects that pip returns.""" + + def __init__(self, obj: DistInfoDistribution) -> None: + self._obj: DistInfoDistribution = obj + + @property + def key(self) -> str: + return self._obj.key # type: ignore[no-any-return] + + @property + def project_name(self) -> str: + return self._obj.project_name # type: ignore[no-any-return] + + @abstractmethod + def render_as_root(self, *, frozen: bool) -> str: + raise NotImplementedError + + @abstractmethod + def render_as_branch(self, *, frozen: bool) -> str: + raise NotImplementedError + + @abstractmethod + def as_dict(self) -> dict[str, str | None]: + raise NotImplementedError + + @property + def version_spec(self) -> None | str: + return None + + def render( + self, + parent: DistPackage | ReqPackage | None = None, + *, + frozen: bool = False, + ) -> str: + render = self.render_as_branch if parent else self.render_as_root + return render(frozen=frozen) + + @staticmethod + def as_frozen_repr(obj: DistInfoDistribution) -> str: + # The `pipenv.patched.pip._internal.metadata` modules were introduced in 21.1.1 + # and the `pipenv.patched.pip._internal.operations.freeze.FrozenRequirement` + # class now expects dist to be a subclass of + # `pipenv.patched.pip._internal.metadata.BaseDistribution`, however the + # `pipenv.patched.pip._internal.utils.misc.get_installed_distributions` continues + # to return objects of type + # pipenv.patched.pip._vendor.pkg_resources.DistInfoDistribution. + # + # This is a hacky backward compatible (with older versions of pip) fix. + try: + from pipenv.patched.pip._internal.operations.freeze import FrozenRequirement + except ImportError: + from pipenv.patched.pip import FrozenRequirement # type: ignore[attr-defined, no-redef] + + try: + from pipenv.patched.pip._internal import metadata + except ImportError: + our_dist: BaseDistribution = obj # type: ignore[assignment] + else: + our_dist = metadata.pkg_resources.Distribution(obj) + + try: + fr = FrozenRequirement.from_dist(our_dist) + except TypeError: + fr = FrozenRequirement.from_dist(our_dist, []) # type: ignore[call-arg] + return str(fr).strip() + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}("{self.key}")>' + + def __lt__(self, rhs: Package) -> bool: + return self.key < rhs.key + + +class DistPackage(Package): + """Wrapper class for pkg_resources.Distribution instances. + + :param obj: pkg_resources.Distribution to wrap over + :param req: optional ReqPackage object to associate this DistPackage with. This is useful for displaying the tree in + reverse + + """ + + def __init__(self, obj: DistInfoDistribution, req: ReqPackage | None = None) -> None: + super().__init__(obj) + self.req = req + + def requires(self) -> list[Requirement]: + return self._obj.requires() # type: ignore[no-untyped-call,no-any-return] + + @property + def version(self) -> str: + return self._obj.version # type: ignore[no-any-return] + + def render_as_root(self, *, frozen: bool) -> str: + if not frozen: + return f"{self.project_name}=={self.version}" + return self.as_frozen_repr(self._obj) + + def render_as_branch(self, *, frozen: bool) -> str: + assert self.req is not None + if not frozen: + parent_ver_spec = self.req.version_spec + parent_str = self.req.project_name + if parent_ver_spec: + parent_str += parent_ver_spec + return f"{self.project_name}=={self.version} [requires: {parent_str}]" + return self.render_as_root(frozen=frozen) + + def as_requirement(self) -> ReqPackage: + """Return a ReqPackage representation of this DistPackage.""" + return ReqPackage(self._obj.as_requirement(), dist=self) # type: ignore[no-untyped-call] + + def as_parent_of(self, req: ReqPackage | None) -> DistPackage: + """Return a DistPackage instance associated to a requirement. + + This association is necessary for reversing the PackageDAG. + If `req` is None, and the `req` attribute of the current instance is also None, then the same instance will be + returned. + + :param ReqPackage req: the requirement to associate with + :returns: DistPackage instance + + """ + if req is None and self.req is None: + return self + return self.__class__(self._obj, req) + + def as_dict(self) -> dict[str, str | None]: + return {"key": self.key, "package_name": self.project_name, "installed_version": self.version} + + +class ReqPackage(Package): + """Wrapper class for Requirements instance. + + :param obj: The `Requirements` instance to wrap over + :param dist: optional `pkg_resources.Distribution` instance for this requirement + + """ + + UNKNOWN_VERSION = "?" + + def __init__(self, obj: Requirement, dist: DistPackage | None = None) -> None: + super().__init__(obj) + self.dist = dist + + def render_as_root(self, *, frozen: bool) -> str: + if not frozen: + return f"{self.project_name}=={self.installed_version}" + if self.dist: + return self.as_frozen_repr(self.dist._obj) # noqa: SLF001 + return self.project_name + + def render_as_branch(self, *, frozen: bool) -> str: + if not frozen: + req_ver = self.version_spec if self.version_spec else "Any" + return f"{self.project_name} [required: {req_ver}, installed: {self.installed_version}]" + return self.render_as_root(frozen=frozen) + + @property + def version_spec(self) -> str | None: + specs = sorted(self._obj.specs, reverse=True) # `reverse` makes '>' prior to '<' + return ",".join(["".join(sp) for sp in specs]) if specs else None + + @property + def installed_version(self) -> str: + if not self.dist: + try: + return version(self.key) + except PackageNotFoundError: + pass + # Avoid AssertionError with setuptools, see https://github.com/tox-dev/pipdeptree/issues/162 + if self.key in {"setuptools"}: + return self.UNKNOWN_VERSION + try: + m = import_module(self.key) + except ImportError: + return self.UNKNOWN_VERSION + else: + v = getattr(m, "__version__", self.UNKNOWN_VERSION) + if ismodule(v): + return getattr(v, "__version__", self.UNKNOWN_VERSION) + return v + return self.dist.version + + @property + def is_missing(self) -> bool: + return self.installed_version == self.UNKNOWN_VERSION + + def is_conflicting(self) -> bool: + """If installed version conflicts with required version.""" + # unknown installed version is also considered conflicting + if self.installed_version == self.UNKNOWN_VERSION: + return True + ver_spec = self.version_spec if self.version_spec else "" + req_version_str = f"{self.project_name}{ver_spec}" + req_obj = Requirement.parse(req_version_str) # type: ignore[no-untyped-call] + return self.installed_version not in req_obj + + def as_dict(self) -> dict[str, str | None]: + return { + "key": self.key, + "package_name": self.project_name, + "installed_version": self.installed_version, + "required_version": self.version_spec, + } + + +__all__ = [ + "DistPackage", + "ReqPackage", +] diff --git a/pipenv/vendor/pipdeptree/_non_host.py b/pipenv/vendor/pipdeptree/_non_host.py new file mode 100644 index 0000000000..4a2e543dec --- /dev/null +++ b/pipenv/vendor/pipdeptree/_non_host.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import os +import sys +from inspect import getsourcefile +from pathlib import Path +from shutil import copytree +from subprocess import call +from tempfile import TemporaryDirectory +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._cli import Options + + +def handle_non_host_target(args: Options) -> int | None: + # if target is not current python re-invoke it under the actual host + py_path = Path(args.python).absolute() + if py_path != Path(sys.executable).absolute(): + # there's no way to guarantee that graphviz is available, so refuse + if args.output_format: + print( # noqa: T201 + "graphviz functionality is not supported when querying non-host python", + file=sys.stderr, + ) + raise SystemExit(1) + argv = sys.argv[1:] # remove current python executable + for py_at, value in enumerate(argv): + if value == "--python": + del argv[py_at] + del argv[py_at] + elif value.startswith("--python"): + del argv[py_at] + + src = getsourcefile(sys.modules[__name__]) + assert src is not None + our_root = Path(src).parent + + with TemporaryDirectory() as project: + dest = Path(project) + copytree(our_root, dest / "pipdeptree") + # invoke from an empty folder to avoid cwd altering sys.path + env = os.environ.copy() + env["PYTHONPATH"] = project + cmd = [str(py_path), "-m", "pipdeptree", *argv] + return call(cmd, cwd=project, env=env) # noqa: S603 + return None + + +__all__ = [ + "handle_non_host_target", +] diff --git a/pipenv/vendor/pipdeptree/_render/__init__.py b/pipenv/vendor/pipdeptree/_render/__init__.py new file mode 100644 index 0000000000..d60c8f4a4c --- /dev/null +++ b/pipenv/vendor/pipdeptree/_render/__init__.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from .graphviz import render_graphviz +from .json import render_json +from .json_tree import render_json_tree +from .mermaid import render_mermaid +from .text import render_text + +if TYPE_CHECKING: + from pipenv.vendor.pipdeptree._cli import Options + from pipenv.vendor.pipdeptree._models import PackageDAG + + +def render(options: Options, tree: PackageDAG) -> None: + if options.json: + print(render_json(tree)) # noqa: T201 + elif options.json_tree: + print(render_json_tree(tree)) # noqa: T201 + elif options.mermaid: + print(render_mermaid(tree)) # noqa: T201 + elif options.output_format: + assert options.output_format is not None + render_graphviz(tree, output_format=options.output_format, reverse=options.reverse) + else: + render_text( + tree, + max_depth=options.depth, + encoding=options.encoding_type, + list_all=options.all, + frozen=options.freeze, + ) + + +__all__ = [ + "render", +] diff --git a/pipenv/vendor/pipdeptree/_render/graphviz.py b/pipenv/vendor/pipdeptree/_render/graphviz.py new file mode 100644 index 0000000000..2c0b8ba9e6 --- /dev/null +++ b/pipenv/vendor/pipdeptree/_render/graphviz.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING + +from pipenv.vendor.pipdeptree._models import DistPackage, ReqPackage + +if TYPE_CHECKING: + from pipenv.vendor.pipdeptree._models import PackageDAG + + +def dump_graphviz( # noqa: C901, PLR0912 + tree: PackageDAG, + output_format: str = "dot", + is_reverse: bool = False, # noqa: FBT001, FBT002 +) -> str | bytes: + """Output dependency graph as one of the supported GraphViz output formats. + + :param dict tree: dependency graph + :param string output_format: output format + :param bool is_reverse: reverse or not + :returns: representation of tree in the specified output format + :rtype: str or binary representation depending on the output format + + """ + try: + from graphviz import Digraph + except ImportError as exc: + print( # noqa: T201 + "graphviz is not available, but necessary for the output option. Please install it.", + file=sys.stderr, + ) + raise SystemExit(1) from exc + + try: + from graphviz import parameters + except ImportError: + from graphviz import backend + + valid_formats = backend.FORMATS + print( # noqa: T201 + "Deprecation warning! Please upgrade graphviz to version >=0.18.0 " + "Support for older versions will be removed in upcoming release", + file=sys.stderr, + ) + else: + valid_formats = parameters.FORMATS + + if output_format not in valid_formats: + print(f"{output_format} is not a supported output format.", file=sys.stderr) # noqa: T201 + print(f"Supported formats are: {', '.join(sorted(valid_formats))}", file=sys.stderr) # noqa: T201 + raise SystemExit(1) + + graph = Digraph(format=output_format) + + if is_reverse: + for dep_rev, parents in tree.items(): + assert isinstance(dep_rev, ReqPackage) + dep_label = f"{dep_rev.project_name}\\n{dep_rev.installed_version}" + graph.node(dep_rev.key, label=dep_label) + for parent in parents: + # req reference of the dep associated with this particular parent package + assert isinstance(parent, DistPackage) + edge_label = (parent.req.version_spec if parent.req is not None else None) or "any" + graph.edge(dep_rev.key, parent.key, label=edge_label) + else: + for pkg, deps in tree.items(): + pkg_label = f"{pkg.project_name}\\n{pkg.version}" + graph.node(pkg.key, label=pkg_label) + for dep in deps: + edge_label = dep.version_spec or "any" + if dep.is_missing: + dep_label = f"{dep.project_name}\\n(missing)" + graph.node(dep.key, label=dep_label, style="dashed") + graph.edge(pkg.key, dep.key, style="dashed") + else: + graph.edge(pkg.key, dep.key, label=edge_label) + + # Allow output of dot format, even if GraphViz isn't installed. + if output_format == "dot": + # Emulates graphviz.dot.Dot.__iter__() to force the sorting of graph.body. + # Fixes https://github.com/tox-dev/pipdeptree/issues/188 + # That way we can guarantee the output of the dot format is deterministic + # and stable. + return "".join([next(iter(graph)), *sorted(graph.body), graph._tail]) # noqa: SLF001 + + # As it's unknown if the selected output format is binary or not, try to + # decode it as UTF8 and only print it out in binary if that's not possible. + try: + return graph.pipe().decode("utf-8") # type: ignore[no-any-return] + except UnicodeDecodeError: + return graph.pipe() # type: ignore[no-any-return] + + +def print_graphviz(dump_output: str | bytes) -> None: + """Dump the data generated by GraphViz to stdout. + + :param dump_output: The output from dump_graphviz + + """ + if hasattr(dump_output, "encode"): + print(dump_output) # noqa: T201 + else: + with os.fdopen(sys.stdout.fileno(), "wb") as bytestream: + bytestream.write(dump_output) + + +def render_graphviz(tree: PackageDAG, *, output_format: str, reverse: bool) -> None: + output = dump_graphviz(tree, output_format=output_format, is_reverse=reverse) + print_graphviz(output) + + +__all__ = [ + "render_graphviz", +] diff --git a/pipenv/vendor/pipdeptree/_render/json.py b/pipenv/vendor/pipdeptree/_render/json.py new file mode 100644 index 0000000000..c5d48601ac --- /dev/null +++ b/pipenv/vendor/pipdeptree/_render/json.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import json +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pipenv.vendor.pipdeptree._models import PackageDAG + + +def render_json(tree: PackageDAG) -> str: + """Convert the tree into a flat json representation. + + The json repr will be a list of hashes, each hash having 2 fields: + - package + - dependencies: list of dependencies + + :param tree: dependency tree + :returns: JSON representation of the tree + + """ + tree = tree.sort() + return json.dumps( + [{"package": k.as_dict(), "dependencies": [v.as_dict() for v in vs]} for k, vs in tree.items()], + indent=4, + ) + + +__all__ = [ + "render_json", +] diff --git a/pipenv/vendor/pipdeptree/_render/json_tree.py b/pipenv/vendor/pipdeptree/_render/json_tree.py new file mode 100644 index 0000000000..bd9a1393cb --- /dev/null +++ b/pipenv/vendor/pipdeptree/_render/json_tree.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +import json +from itertools import chain +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from pipenv.vendor.pipdeptree._models import DistPackage, PackageDAG, ReqPackage + + +def render_json_tree(tree: PackageDAG) -> str: + """Convert the tree into a nested json representation. + + The json repr will be a list of hashes, each hash having the following fields: + + - package_name + - key + - required_version + - installed_version + - dependencies: list of dependencies + + :param tree: dependency tree + :returns: json representation of the tree + + """ + tree = tree.sort() + branch_keys = {r.key for r in chain.from_iterable(tree.values())} + nodes = [p for p in tree if p.key not in branch_keys] + + def aux( + node: DistPackage | ReqPackage, + parent: DistPackage | ReqPackage | None = None, + cur_chain: list[str] | None = None, + ) -> dict[str, Any]: + if cur_chain is None: + cur_chain = [node.project_name] + + d: dict[str, str | list[Any] | None] = node.as_dict() # type: ignore[assignment] + if parent: + d["required_version"] = node.version_spec if node.version_spec else "Any" + else: + d["required_version"] = d["installed_version"] + + d["dependencies"] = [ + aux(c, parent=node, cur_chain=[*cur_chain, c.project_name]) + for c in tree.get_children(node.key) + if c.project_name not in cur_chain + ] + + return d + + return json.dumps([aux(p) for p in nodes], indent=4) + + +__all__ = [ + "render_json_tree", +] diff --git a/pipenv/vendor/pipdeptree/_render/mermaid.py b/pipenv/vendor/pipdeptree/_render/mermaid.py new file mode 100644 index 0000000000..4b77f28c96 --- /dev/null +++ b/pipenv/vendor/pipdeptree/_render/mermaid.py @@ -0,0 +1,112 @@ +from __future__ import annotations + +import itertools as it +from typing import TYPE_CHECKING, Final + +from pipenv.vendor.pipdeptree._models import DistPackage, ReqPackage, ReversedPackageDAG + +if TYPE_CHECKING: + from pipenv.vendor.pipdeptree._models import PackageDAG + +_RESERVED_IDS: Final[frozenset[str]] = frozenset( + [ + "C4Component", + "C4Container", + "C4Deployment", + "C4Dynamic", + "_blank", + "_parent", + "_self", + "_top", + "call", + "class", + "classDef", + "click", + "end", + "flowchart", + "flowchart-v2", + "graph", + "interpolate", + "linkStyle", + "style", + "subgraph", + ], +) + + +def render_mermaid(tree: PackageDAG) -> str: # noqa: C901 + """Produce a Mermaid flowchart from the dependency graph. + + :param tree: dependency graph + + """ + # List of reserved keywords in Mermaid that cannot be used as node names. + # See: https://github.com/mermaid-js/mermaid/issues/4182#issuecomment-1454787806 + + node_ids_map: dict[str, str] = {} + + def mermaid_id(key: str) -> str: + """Return a valid Mermaid node ID from a string.""" + # If we have already seen this key, return the canonical ID. + canonical_id = node_ids_map.get(key) + if canonical_id is not None: + return canonical_id + # If the key is not a reserved keyword, return it as is, and update the map. + if key not in _RESERVED_IDS: + node_ids_map[key] = key + return key + # If the key is a reserved keyword, append a number to it. + for number in it.count(): + new_id = f"{key}_{number}" + if new_id not in node_ids_map: + node_ids_map[key] = new_id + return new_id + raise NotImplementedError + + # Use a sets to avoid duplicate entries. + nodes: set[str] = set() + edges: set[str] = set() + + if isinstance(tree, ReversedPackageDAG): + for package, reverse_dependencies in tree.items(): + assert isinstance(package, ReqPackage) + package_label = "\\n".join( + (package.project_name, "(missing)" if package.is_missing else package.installed_version), + ) + package_key = mermaid_id(package.key) + nodes.add(f'{package_key}["{package_label}"]') + for reverse_dependency in reverse_dependencies: + assert isinstance(reverse_dependency, DistPackage) + edge_label = ( + reverse_dependency.req.version_spec if reverse_dependency.req is not None else None + ) or "any" + reverse_dependency_key = mermaid_id(reverse_dependency.key) + edges.add(f'{package_key} -- "{edge_label}" --> {reverse_dependency_key}') + else: + for package, dependencies in tree.items(): + package_label = f"{package.project_name}\\n{package.version}" + package_key = mermaid_id(package.key) + nodes.add(f'{package_key}["{package_label}"]') + for dependency in dependencies: + edge_label = dependency.version_spec or "any" + dependency_key = mermaid_id(dependency.key) + if dependency.is_missing: + dependency_label = f"{dependency.project_name}\\n(missing)" + nodes.add(f'{dependency_key}["{dependency_label}"]:::missing') + edges.add(f"{package_key} -.-> {dependency_key}") + else: + edges.add(f'{package_key} -- "{edge_label}" --> {dependency_key}') + + # Produce the Mermaid Markdown. + lines = [ + "flowchart TD", + "classDef missing stroke-dasharray: 5", + *sorted(nodes), + *sorted(edges), + ] + return "".join(f"{' ' if i else ''}{line}\n" for i, line in enumerate(lines)) + + +__all__ = [ + "render_mermaid", +] diff --git a/pipenv/vendor/pipdeptree/_render/text.py b/pipenv/vendor/pipdeptree/_render/text.py new file mode 100644 index 0000000000..1d4365b863 --- /dev/null +++ b/pipenv/vendor/pipdeptree/_render/text.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +from itertools import chain +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from pipenv.vendor.pipdeptree._models import DistPackage, PackageDAG, ReqPackage + + +def render_text( + tree: PackageDAG, + *, + max_depth: float, + encoding: str, + list_all: bool = True, + frozen: bool = False, +) -> None: + """Print tree as text on console. + + :param tree: the package tree + :param list_all: whether to list all the pgks at the root level or only those that are the sub-dependencies + :param frozen: show the names of the pkgs in the output that's favourable to pip --freeze + :returns: None + + """ + tree = tree.sort() + nodes = list(tree.keys()) + branch_keys = {r.key for r in chain.from_iterable(tree.values())} + + if not list_all: + nodes = [p for p in nodes if p.key not in branch_keys] + + if encoding in ("utf-8", "utf-16", "utf-32"): + _render_text_with_unicode(tree, nodes, max_depth, frozen) + else: + _render_text_without_unicode(tree, nodes, max_depth, frozen) + + +def _render_text_with_unicode( + tree: PackageDAG, + nodes: list[DistPackage], + max_depth: float, + frozen: bool, # noqa: FBT001 +) -> None: + use_bullets = not frozen + + def aux( # noqa: PLR0913 + node: DistPackage | ReqPackage, + parent: DistPackage | ReqPackage | None = None, + indent: int = 0, + cur_chain: list[str] | None = None, + prefix: str = "", + depth: int = 0, + has_grand_parent: bool = False, # noqa: FBT001, FBT002 + is_last_child: bool = False, # noqa: FBT001, FBT002 + parent_is_last_child: bool = False, # noqa: FBT001, FBT002 + ) -> list[Any]: + cur_chain = cur_chain or [] + node_str = node.render(parent, frozen=frozen) + next_prefix = "" + next_indent = indent + 2 + + if parent: + bullet = "├── " + if is_last_child: + bullet = "└── " + + line_char = "│" + if not use_bullets: + line_char = "" + # Add 2 spaces so direct dependencies to a project are indented + bullet = " " + + if has_grand_parent: + next_indent -= 1 + if parent_is_last_child: + offset = 0 if len(line_char) == 1 else 1 + prefix += " " * (indent + 1 - offset - depth) + else: + prefix += line_char + " " * (indent - depth) + # Without this extra space, bullets will point to the space just before the project name + prefix += " " if use_bullets else "" + next_prefix = prefix + node_str = prefix + bullet + node_str + result = [node_str] + + children = tree.get_children(node.key) + children_strings = [ + aux( + c, + node, + indent=next_indent, + cur_chain=[*cur_chain, c.project_name], + prefix=next_prefix, + depth=depth + 1, + has_grand_parent=parent is not None, + is_last_child=c is children[-1], + parent_is_last_child=is_last_child, + ) + for c in children + if c.project_name not in cur_chain and depth + 1 <= max_depth + ] + + result += list(chain.from_iterable(children_strings)) + return result + + lines = chain.from_iterable([aux(p) for p in nodes]) + print("\n".join(lines)) # noqa: T201 + + +def _render_text_without_unicode( + tree: PackageDAG, + nodes: list[DistPackage], + max_depth: float, + frozen: bool, # noqa: FBT001 +) -> None: + use_bullets = not frozen + + def aux( + node: DistPackage | ReqPackage, + parent: DistPackage | ReqPackage | None = None, + indent: int = 0, + cur_chain: list[str] | None = None, + depth: int = 0, + ) -> list[Any]: + cur_chain = cur_chain or [] + node_str = node.render(parent, frozen=frozen) + if parent: + prefix = " " * indent + ("- " if use_bullets else "") + node_str = prefix + node_str + result = [node_str] + children = [ + aux(c, node, indent=indent + 2, cur_chain=[*cur_chain, c.project_name], depth=depth + 1) + for c in tree.get_children(node.key) + if c.project_name not in cur_chain and depth + 1 <= max_depth + ] + result += list(chain.from_iterable(children)) + return result + + lines = chain.from_iterable([aux(p) for p in nodes]) + print("\n".join(lines)) # noqa: T201 + + +__all__ = [ + "render_text", +] diff --git a/pipenv/vendor/pipdeptree/_validate.py b/pipenv/vendor/pipdeptree/_validate.py new file mode 100644 index 0000000000..ffce300acf --- /dev/null +++ b/pipenv/vendor/pipdeptree/_validate.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +import sys +from collections import defaultdict +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pipenv.vendor.pipdeptree._models.package import Package + + from ._cli import Options + from ._models import DistPackage, PackageDAG, ReqPackage + + +def validate(args: Options, is_text_output: bool, tree: PackageDAG) -> int: # noqa: FBT001 + # Before any reversing or filtering, show warnings to console, about possibly conflicting or cyclic deps if found + # and warnings are enabled (i.e. only if output is to be printed to console) + if is_text_output and args.warn != "silence": + conflicts = conflicting_deps(tree) + if conflicts: + render_conflicts_text(conflicts) + print("-" * 72, file=sys.stderr) # noqa: T201 + + cycles = cyclic_deps(tree) + if cycles: + render_cycles_text(cycles) + print("-" * 72, file=sys.stderr) # noqa: T201 + + if args.warn == "fail" and (conflicts or cycles): + return 1 + return 0 + + +def conflicting_deps(tree: PackageDAG) -> dict[DistPackage, list[ReqPackage]]: + """Return dependencies which are not present or conflict with the requirements of other packages. + + e.g. will warn if pkg1 requires pkg2==2.0 and pkg2==1.0 is installed + + :param tree: the requirements tree (dict) + :returns: dict of DistPackage -> list of unsatisfied/unknown ReqPackage + :rtype: dict + + """ + conflicting = defaultdict(list) + for package, requires in tree.items(): + for req in requires: + if req.is_conflicting(): + conflicting[package].append(req) + return conflicting + + +def render_conflicts_text(conflicts: dict[DistPackage, list[ReqPackage]]) -> None: + if conflicts: + print("Warning!!! Possibly conflicting dependencies found:", file=sys.stderr) # noqa: T201 + # Enforce alphabetical order when listing conflicts + pkgs = sorted(conflicts.keys()) + for p in pkgs: + pkg = p.render_as_root(frozen=False) + print(f"* {pkg}", file=sys.stderr) # noqa: T201 + for req in conflicts[p]: + req_str = req.render_as_branch(frozen=False) + print(f" - {req_str}", file=sys.stderr) # noqa: T201 + + +def cyclic_deps(tree: PackageDAG) -> list[list[Package]]: + """Return cyclic dependencies as list of lists. + + :param tree: package tree/dag + :returns: list of lists, where each list represents a cycle + + """ + + def dfs(root: DistPackage, current: Package, visited: set[str], cdeps: list[Package]) -> bool: + if current.key not in visited: + visited.add(current.key) + current_dist = tree.get_node_as_parent(current.key) + if not current_dist: + return False + + reqs = tree.get(current_dist) + if not reqs: + return False + + for req in reqs: + if dfs(root, req, visited, cdeps): + cdeps.append(current) + return True + elif current.key == root.key: + cdeps.append(current) + return True + return False + + cycles: list[list[Package]] = [] + + for p in tree: + cdeps: list[Package] = [] + visited: set[str] = set() + if dfs(p, p, visited, cdeps): + cdeps.reverse() + cycles.append(cdeps) + + return cycles + + +def render_cycles_text(cycles: list[list[Package]]) -> None: + if cycles: + print("Warning!! Cyclic dependencies found:", file=sys.stderr) # noqa: T201 + # List in alphabetical order the dependency that caused the cycle (i.e. the second-to-last Package element) + cycles = sorted(cycles, key=lambda c: c[len(c) - 2].key) + for cycle in cycles: + print("*", end=" ", file=sys.stderr) # noqa: T201 + + size = len(cycle) - 1 + for idx, pkg in enumerate(cycle): + if idx == size: + print(f"{pkg.project_name}", end="", file=sys.stderr) # noqa: T201 + else: + print(f"{pkg.project_name} =>", end=" ", file=sys.stderr) # noqa: T201 + print(file=sys.stderr) # noqa: T201 + + +__all__ = [ + "validate", +] diff --git a/pipenv/vendor/pipdeptree/py.typed b/pipenv/vendor/pipdeptree/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/vendor/pipdeptree/version.py b/pipenv/vendor/pipdeptree/version.py index dcef104cc3..3c2d0cfd1a 100644 --- a/pipenv/vendor/pipdeptree/version.py +++ b/pipenv/vendor/pipdeptree/version.py @@ -1,4 +1,16 @@ # file generated by setuptools_scm # don't change, don't track in version control -__version__ = version = '2.8.0' -__version_tuple__ = version_tuple = (2, 8, 0) +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '2.13.1' +__version_tuple__ = version_tuple = (2, 13, 1) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index e4d86094c8..395ac25a94 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -5,7 +5,7 @@ dparse==0.6.3 markupsafe==2.1.2 pep517==0.13.0 pexpect==4.8.0 -pipdeptree==2.8.0 +pipdeptree==2.13.1 plette==0.4.4 ptyprocess==0.7.0 pydantic==1.10.10 diff --git a/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch index e123d133fa..5d438aa1ee 100644 --- a/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch +++ b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch @@ -1,10 +1,18 @@ diff --git a/pipenv/vendor/pipdeptree/__main__.py b/pipenv/vendor/pipdeptree/__main__.py -index 85cca3c..a002019 100644 +index cb48791..d1c24f5 100644 --- a/pipenv/vendor/pipdeptree/__main__.py +++ b/pipenv/vendor/pipdeptree/__main__.py -@@ -1,5 +1,13 @@ +@@ -1,6 +1,7 @@ + """The main entry point used for CLI.""" + from __future__ import annotations + +import os import sys + from typing import Sequence + +@@ -12,6 +13,13 @@ from pipdeptree._render import render + from pipenv.vendor.pipdeptree._validate import validate + +pardir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +# for finding pipdeptree itself @@ -13,6 +21,6 @@ index 85cca3c..a002019 100644 +sys.path.append(os.path.dirname(os.path.dirname(pardir))) + + - from pipenv.vendor.pipdeptree import main - - if __name__ == "__main__": + def main(args: Sequence[str] | None = None) -> None | int: + """CLI - The main function called as entry point.""" + options = get_options(args) From b1e69d07afc4f94c6148f4ec65834e341a67b10d Mon Sep 17 00:00:00 2001 From: Oz Tiram Date: Thu, 4 Jan 2024 14:59:33 +0100 Subject: [PATCH 3/9] Fix import path of PackageDAG pipdeptree moved it in the recent versions. Signed-off-by: Oz Tiram --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index 26975ce0dc..7e0206adec 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -678,7 +678,7 @@ def _get_requirements_for_package(cls, node, key_tree, parent=None, chain=None): def get_package_requirements(self, pkg=None): from itertools import chain - from pipenv.vendor.pipdeptree import PackageDAG + from pipenv.vendor.pipdeptree._models import PackageDAG flatten = chain.from_iterable From 4e45725fc8ad0e2815ed6122ebc3a921983dad9e Mon Sep 17 00:00:00 2001 From: Oz Tiram Date: Thu, 4 Jan 2024 15:06:33 +0100 Subject: [PATCH 4/9] Vendoring: add snippet for bumping pipdeptree Signed-off-by: Oz Tiram --- news/6055.vendor.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/6055.vendor.rst diff --git a/news/6055.vendor.rst b/news/6055.vendor.rst new file mode 100644 index 0000000000..79316fec96 --- /dev/null +++ b/news/6055.vendor.rst @@ -0,0 +1 @@ +Bump version of pipdeptree to 0.13.2 From c87379046f73a9ec2acf008af410518498264980 Mon Sep 17 00:00:00 2001 From: Oz Tiram Date: Thu, 4 Jan 2024 21:23:25 +0100 Subject: [PATCH 5/9] Fix patch applied at vendoring to pipdeptree Signed-off-by: Oz Tiram --- pipenv/vendor/pipdeptree/__main__.py | 13 ++++++------- .../vendor/pipdeptree-update-pip-import.patch | 15 +++++---------- 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/pipenv/vendor/pipdeptree/__main__.py b/pipenv/vendor/pipdeptree/__main__.py index 21661a8d1f..6669ee9733 100644 --- a/pipenv/vendor/pipdeptree/__main__.py +++ b/pipenv/vendor/pipdeptree/__main__.py @@ -5,6 +5,12 @@ import sys from typing import Sequence +pardir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +# for finding pipdeptree itself +sys.path.append(pardir) +# for finding stuff in vendor and patched +sys.path.append(os.path.dirname(os.path.dirname(pardir))) + from pipenv.vendor.pipdeptree._cli import get_options from pipenv.vendor.pipdeptree._discovery import get_installed_distributions from pipenv.vendor.pipdeptree._models import PackageDAG @@ -13,13 +19,6 @@ from pipenv.vendor.pipdeptree._validate import validate -pardir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -# for finding pipdeptree itself -sys.path.append(pardir) -# for finding stuff in vendor and patched -sys.path.append(os.path.dirname(os.path.dirname(pardir))) - - def main(args: Sequence[str] | None = None) -> None | int: """CLI - The main function called as entry point.""" options = get_options(args) diff --git a/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch index 5d438aa1ee..b8d195743c 100644 --- a/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch +++ b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch @@ -1,8 +1,8 @@ diff --git a/pipenv/vendor/pipdeptree/__main__.py b/pipenv/vendor/pipdeptree/__main__.py -index cb48791..d1c24f5 100644 +index cb48791..77ebab7 100644 --- a/pipenv/vendor/pipdeptree/__main__.py +++ b/pipenv/vendor/pipdeptree/__main__.py -@@ -1,6 +1,7 @@ +@@ -1,9 +1,16 @@ """The main entry point used for CLI.""" from __future__ import annotations @@ -10,17 +10,12 @@ index cb48791..d1c24f5 100644 import sys from typing import Sequence -@@ -12,6 +13,13 @@ from pipdeptree._render import render - from pipenv.vendor.pipdeptree._validate import validate - - +pardir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +# for finding pipdeptree itself +sys.path.append(pardir) +# for finding stuff in vendor and patched +sys.path.append(os.path.dirname(os.path.dirname(pardir))) + -+ - def main(args: Sequence[str] | None = None) -> None | int: - """CLI - The main function called as entry point.""" - options = get_options(args) + from pipenv.vendor.pipdeptree._cli import get_options + from pipenv.vendor.pipdeptree._discovery import get_installed_distributions + from pipenv.vendor.pipdeptree._models import PackageDAG From abefb159138a9409bdddb72e86377a0c75d76905 Mon Sep 17 00:00:00 2001 From: Oz Tiram Date: Thu, 4 Jan 2024 21:57:14 +0100 Subject: [PATCH 6/9] Fix tests for reverse depency graph pyyaml is now returned as PyYAML. Signed-off-by: Oz Tiram --- tests/integration/test_cli.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index c31fa40e62..df760e49b5 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -119,20 +119,28 @@ def test_pipenv_graph_reverse(pipenv_instance_private_pypi): for dep_name, dep_constraint in requests_dependency: pat = fr'{dep_name}==[\d.]+' - dep_match = re.search(pat, output, flags=re.MULTILINE) + dep_match = re.search(pat, + output, + flags=re.MULTILINE | re.IGNORECASE) assert dep_match is not None, f'{pat} not found in {output}' # openpyxl should be indented if dep_name == 'openpyxl': - openpyxl_dep = re.search(r'^openpyxl', output, flags=re.MULTILINE) + openpyxl_dep = re.search(r'^openpyxl', + output, + flags=re.MULTILINE | re.IGNORECASE) assert openpyxl_dep is None, f'openpyxl should not appear at beginning of lines in {output}' assert 'openpyxl==2.5.4 [requires: et-xmlfile]' in output else: - dep_match = re.search(fr'^[ -]*{dep_name}==[\d.]+$', output, flags=re.MULTILINE) + dep_match = re.search(fr'^[ -]*{dep_name}==[\d.]+$', + output, + flags=re.MULTILINE | re.IGNORECASE) assert dep_match is not None, f'{dep_name} not found at beginning of line in {output}' - dep_requests_match = re.search(fr'└── tablib==0.13.0 \[requires: {dep_constraint}', output, flags=re.MULTILINE) + dep_requests_match = re.search(fr'└── tablib==0.13.0 \[requires: {dep_constraint}', + output, + flags=re.MULTILINE | re.IGNORECASE) assert dep_requests_match is not None, f'constraint {dep_constraint} not found in {output}' assert dep_requests_match.start() > dep_match.start() From dc261212c845d3f5e33b472ba11008b07cb8ea19 Mon Sep 17 00:00:00 2001 From: Aleksandr Mangin Date: Fri, 5 Jan 2024 14:07:39 +0100 Subject: [PATCH 7/9] fixed a bug with locking packages with uncanonical names --- news/6056.bugfix.rst | 1 + .../resolution/resolvelib/factory.py | 3 ++- tests/integration/test_lock.py | 27 +++++++++++++++++++ 3 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 news/6056.bugfix.rst diff --git a/news/6056.bugfix.rst b/news/6056.bugfix.rst new file mode 100644 index 0000000000..8ff134e3c6 --- /dev/null +++ b/news/6056.bugfix.rst @@ -0,0 +1 @@ +Fix a bug with locking projects that contains packages with non canonical names from private indexes diff --git a/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py b/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py index fb8274a59b..250006ad6a 100644 --- a/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py +++ b/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py @@ -247,6 +247,7 @@ def _iter_found_candidates( # Hopefully the Project model can correct this mismatch in the future. template = ireqs[0] assert template.req, "Candidates found on index must be PEP 508" + project_name = template.req.name name = canonicalize_name(template.req.name) extras: FrozenSet[str] = frozenset() @@ -282,7 +283,7 @@ def _get_installed_candidate() -> Optional[Candidate]: def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: result = self._finder.find_best_candidate( - project_name=name, + project_name=project_name, specifier=specifier, hashes=hashes, ) diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 0259f6ba13..99f5cd559c 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -285,6 +285,33 @@ def test_private_index_lock_requirements(pipenv_instance_private_pypi): assert c.returncode == 0 +@pytest.mark.lock +@pytest.mark.index +@pytest.mark.install # private indexes need to be uncached for resolution +@pytest.mark.requirements +@pytest.mark.needs_internet +def test_private_index_lock_requirements_for_not_canonical_package(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, 'w') as f: + contents = """ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[[source]] +url = "https://test.pypi.org/simple" +verify_ssl = true +name = "testpypi" + +[packages] +pipenv_test_private_package = {version = "*", index = "testpypi"} + """.strip() + f.write(contents) + c = p.pipenv('lock') + assert c.returncode == 0 + + @pytest.mark.index @pytest.mark.install def test_lock_updated_source(pipenv_instance_private_pypi): From 463d9c8999caa75ec13d5187073d3cbf39345d97 Mon Sep 17 00:00:00 2001 From: Aleksandr Mangin Date: Mon, 8 Jan 2024 13:08:19 +0100 Subject: [PATCH 8/9] built index mapping using canonical package names instead of raw package names --- .../resolution/resolvelib/factory.py | 3 +-- pipenv/utils/resolver.py | 20 ++++++++++++++----- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py b/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py index 250006ad6a..fb8274a59b 100644 --- a/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py +++ b/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py @@ -247,7 +247,6 @@ def _iter_found_candidates( # Hopefully the Project model can correct this mismatch in the future. template = ireqs[0] assert template.req, "Candidates found on index must be PEP 508" - project_name = template.req.name name = canonicalize_name(template.req.name) extras: FrozenSet[str] = frozenset() @@ -283,7 +282,7 @@ def _get_installed_candidate() -> Optional[Candidate]: def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: result = self._finder.find_best_candidate( - project_name=project_name, + project_name=name, specifier=specifier, hashes=hashes, ) diff --git a/pipenv/utils/resolver.py b/pipenv/utils/resolver.py index 858f7047c1..a8c313e5ca 100644 --- a/pipenv/utils/resolver.py +++ b/pipenv/utils/resolver.py @@ -25,6 +25,7 @@ from pipenv.patched.pip._internal.req.req_install import InstallRequirement from pipenv.patched.pip._internal.utils.temp_dir import global_tempdir_manager from pipenv.patched.pip._vendor import pkg_resources, rich +from pipenv.patched.pip._vendor.packaging.utils import canonicalize_name from pipenv.project import Project from pipenv.utils.fileutils import create_tracked_tempdir from pipenv.utils.requirements import normalize_name @@ -200,6 +201,7 @@ def create( for package_name, dep in deps.items(): # Build up the index and markers lookups if not dep: continue + canonical_package_name = canonicalize_name(package_name) is_constraint = True install_req, _ = expansive_install_req_from_line(dep, expand_env=True) original_deps[package_name] = dep @@ -210,14 +212,18 @@ def create( pipfile_entries[package_name] = pipfile_entry if isinstance(pipfile_entry, dict): if packages[package_name].get("index"): - index_lookup[package_name] = packages[package_name].get("index") + index_lookup[canonical_package_name] = packages[package_name].get( + "index" + ) if packages[package_name].get("skip_resolver"): is_constraint = False skipped[package_name] = dep elif index: - index_lookup[package_name] = index + index_lookup[canonical_package_name] = index else: - index_lookup[package_name] = project.get_default_index()["name"] + index_lookup[canonical_package_name] = project.get_default_index()[ + "name" + ] if install_req.markers: markers_lookup[package_name] = install_req.markers if is_constraint: @@ -546,9 +552,13 @@ def collect_hashes(self, ireq): return set() sources = self.sources # Enforce index restrictions - if ireq.name in self.index_lookup: + canonical_ireq_name = canonicalize_name(ireq.name) + if canonical_ireq_name in self.index_lookup: sources = list( - filter(lambda s: s.get("name") == self.index_lookup[ireq.name], sources) + filter( + lambda s: s.get("name") == self.index_lookup[canonical_ireq_name], + sources, + ) ) source = sources[0] if len(sources) else None if source: From cdaaa3084809ed48a7e7f684838d25f77978a1a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 19:55:59 +0000 Subject: [PATCH 9/9] Bump jinja2 from 3.1.2 to 3.1.3 Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.2 to 3.1.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.2...3.1.3) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 899fc02ad6..4d4584ed82 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -6,7 +6,7 @@ click==8.0.3 docutils==0.17.1 idna==3.4 imagesize==1.4.1 -Jinja2==3.1.2 +Jinja2==3.1.3 MarkupSafe==2.1.2 myst-parser[linkify]==0.18.1 -e .