pipdeptree-2.30.0/0000775000175000017510000000000015105004414013260 5ustar nileshnileshpipdeptree-2.30.0/tox.toml0000664000175000017510000000413115105004414014766 0ustar nileshnileshrequires = [ "tox>=4.31", "tox-uv>=1.28.1" ] env_list = [ "fix", "3.14", "3.13", "3.12", "3.11", "3.10", "type", "pkg_meta", ] skip_missing_interpreters = true [env_run_base] description = "run the unit tests with pytest under {base_python}" package = "wheel" wheel_build_env = ".pkg" extras = [ "graphviz", "test" ] pass_env = [ "DIFF_AGAINST", "PYTEST_*" ] set_env.COVERAGE_FILE = "{work_dir}/.coverage.{env_name}" commands = [ [ "python", "-m", "pytest", "{tty:--color=yes}", { replace = "posargs", extend = true, default = [ "--cov", "{env_site_packages_dir}{/}pipdeptree", "--cov", "{tox_root}{/}tests", "--cov-config=pyproject.toml", "--no-cov-on-fail", "--cov-report", "term-missing:skip-covered", "--cov-context=test", "--cov-report", "html:{env_tmp_dir}{/}htmlcov", "--cov-report", "xml:{work_dir}{/}coverage.{env_name}.xml", "--junitxml", "{work_dir}{/}junit.{env_name}.xml", "tests", ] }, ], [ "diff-cover", "--compare-branch", "{env:DIFF_AGAINST:origin/main}", "{work_dir}{/}coverage.{env_name}.xml", "--fail-under", "100", ], ] [env.fix] description = "format the code base to adhere to our styles, and complain about what we cannot do automatically" skip_install = true deps = [ "pre-commit-uv>=4.1.3" ] commands = [ [ "pre-commit", "run", "--all-files", "--show-diff-on-failure" ] ] [env.type] description = "run type check on code base" deps = [ "mypy==1.11.2" ] commands = [ [ "mypy", "src" ], [ "mypy", "tests" ] ] [env.pkg_meta] description = "check that the long description is valid" skip_install = true deps = [ "check-wheel-contents>=0.6", "twine>=5.1.1", "uv>=0.4.10" ] commands = [ [ "uv", "build", "--sdist", "--wheel", "--out-dir", "{env_tmp_dir}", "." ], [ "twine", "check", "{env_tmp_dir}{/}*" ], [ "check-wheel-contents", "--no-config", "{env_tmp_dir}" ], ] [env.dev] description = "generate a DEV environment" package = "editable" commands = [ [ "uv", "pip", "tree" ], [ "python", "-c", "import sys; print(sys.executable)" ] ] pipdeptree-2.30.0/tests/0000775000175000017510000000000015105004414014422 5ustar nileshnileshpipdeptree-2.30.0/tests/test_warning.py0000664000175000017510000000216415105004414017503 0ustar nileshnileshfrom __future__ import annotations import pytest from pipdeptree._warning import WarningPrinter, WarningType @pytest.mark.parametrize( ("warning", "expected_type"), [ ("silence", WarningType.SILENCE), ("suppress", WarningType.SUPPRESS), ("fail", WarningType.FAIL), ], ) def test_warning_type_from_str_normal(warning: str, expected_type: WarningType) -> None: warning_type = WarningType.from_str(warning) assert expected_type == warning_type def test_warning_type_from_str_invalid_warning() -> None: with pytest.raises(ValueError, match="Unknown WarningType string value provided"): WarningType.from_str("non-existent-warning-type") def test_warning_printer_print_single_line(capsys: pytest.CaptureFixture[str]) -> None: # Use WarningType.FAIL so that we can be able to test to see if WarningPrinter remembers it has warned before. warning_printer = WarningPrinter(WarningType.FAIL) warning_printer.print_single_line("test") assert warning_printer.has_warned_with_failure() out, err = capsys.readouterr() assert len(out) == 0 assert err == "test\n" pipdeptree-2.30.0/tests/test_validate.py0000664000175000017510000001314715105004414017632 0ustar nileshnileshfrom __future__ import annotations from typing import TYPE_CHECKING import pytest from pipdeptree._models import PackageDAG from pipdeptree._validate import conflicting_deps, cyclic_deps, render_conflicts_text, render_cycles_text, validate if TYPE_CHECKING: from collections.abc import Callable, Iterator from unittest.mock import Mock from tests.our_types import MockGraph @pytest.mark.parametrize( ("mpkgs", "expected_keys", "expected_output"), [ pytest.param( { ("a", "1.0.1"): [("b", [(">=", "2.0.0")])], ("b", "2.3.0"): [("a", [(">=", "1.0.1")])], ("c", "4.5.0"): [("d", [("==", "2.0")])], ("d", "2.0"): [], }, [["a", "b", "a"], ["b", "a", "b"]], ["* b => a => b", "* a => b => a"], id="depth-of-2", ), pytest.param( { ("a", "1.0.1"): [("b", [(">=", "2.0.0")]), ("e", [("==", "2.0")])], ("b", "2.3.0"): [("c", [(">=", "4.5.0")])], ("c", "4.5.0"): [("d", [("==", "1.0.1")])], ("d", "1.0.0"): [("a", [("==", "1.0.1")]), ("e", [("==", "2.0")])], ("e", "2.0"): [], }, [ ["b", "c", "d", "a", "b"], ["c", "d", "a", "b", "c"], ["d", "a", "b", "c", "d"], ["a", "b", "c", "d", "a"], ], [ "* b => c => d => a => b", "* c => d => a => b => c", "* d => a => b => c => d", "* a => b => c => d => a", ], id="depth-greater-than-2", ), pytest.param( {("a", "1.0.1"): [("b", [(">=", "2.0.0")])], ("b", "2.0.0"): []}, [], [], id="no-cycle", ), pytest.param( { ("a", "1.0.1"): [("b", [(">=", "2.0.0")])], }, [], [], id="dependency-not-installed", ), pytest.param({("a", "1.0.1"): []}, [], [], id="no-dependencies"), ], ) def test_cyclic_deps( capsys: pytest.CaptureFixture[str], mpkgs: MockGraph, expected_keys: list[list[str]], expected_output: list[str], mock_pkgs: Callable[[MockGraph], Iterator[Mock]], ) -> None: tree = PackageDAG.from_pkgs(list(mock_pkgs(mpkgs))) result = cyclic_deps(tree) result_keys = [[dep.key for dep in deps] for deps in result] assert sorted(expected_keys) == sorted(result_keys) render_cycles_text(result) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.err.strip() @pytest.mark.parametrize( ("mpkgs", "expected_keys", "expected_output"), [ ( {("a", "1.0.1"): [("b", [(">=", "2.3.0")])], ("b", "1.9.1"): []}, {"a": ["b"]}, [ "* a==1.0.1", " - b [required: >=2.3.0, installed: 1.9.1]", ], ), ( {("a", "1.0.1"): [("c", [(">=", "9.4.1")])], ("b", "2.3.0"): [("c", [(">=", "7.0")])], ("c", "8.0.1"): []}, {"a": ["c"]}, [ "* a==1.0.1", " - c [required: >=9.4.1, installed: 8.0.1]", ], ), ( {("a", "1.0.1"): [("c", [(">=", "9.4.1")])], ("b", "2.3.0"): [("c", [(">=", "9.4.0")])]}, {"a": ["c"], "b": ["c"]}, [ "* a==1.0.1", " - c [required: >=9.4.1, installed: ?]", "* b==2.3.0", " - c [required: >=9.4.0, installed: ?]", ], ), ( {("a", "1.0.1"): [("c", [(">=", "9.4.1")])], ("b", "2.3.0"): [("c", [(">=", "7.0")])], ("c", "9.4.1"): []}, {}, [], ), ], ) def test_conflicting_deps( capsys: pytest.CaptureFixture[str], mpkgs: MockGraph, expected_keys: dict[str, list[str]], expected_output: list[str], mock_pkgs: Callable[[MockGraph], Iterator[Mock]], ) -> None: tree = PackageDAG.from_pkgs(list(mock_pkgs(mpkgs))) result = conflicting_deps(tree) result_keys = {k.key: [v.key for v in vs] for k, vs in result.items()} assert expected_keys == result_keys render_conflicts_text(result) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.err.strip() @pytest.mark.parametrize( ("mpkgs", "expected_output"), [ ( {("a", "1.0.1"): [("b", [(">=", "2.3.0")])], ("b", "1.9.1"): []}, [ "Warning!!! Possibly conflicting dependencies found:", "* a==1.0.1", " - b [required: >=2.3.0, installed: 1.9.1]", "------------------------------------------------------------------------", ], ), ( { ("a", "1.0.1"): [("b", [(">=", "2.0.0")])], ("b", "2.3.0"): [("a", [(">=", "1.0.1")])], ("c", "4.5.0"): [], }, [ "Warning!!! Cyclic dependencies found:", "* b => a => b", "* a => b => a", "------------------------------------------------------------------------", ], ), ], ) def test_validate( capsys: pytest.CaptureFixture[str], mock_pkgs: Callable[[MockGraph], Iterator[Mock]], mpkgs: MockGraph, expected_output: list[str], ) -> None: tree = PackageDAG.from_pkgs(list(mock_pkgs(mpkgs))) validate(tree) out, err = capsys.readouterr() assert len(out) == 0 assert "\n".join(expected_output).strip() == err.strip() pipdeptree-2.30.0/tests/test_pipdeptree.py0000664000175000017510000000414415105004414020177 0ustar nileshnileshfrom __future__ import annotations import sys from importlib import metadata from subprocess import CompletedProcess, check_call # noqa: S404 from typing import TYPE_CHECKING import pytest from pipdeptree.__main__ import main if TYPE_CHECKING: from pathlib import Path from pytest_mock import MockFixture def test_main() -> None: check_call([sys.executable, "-m", "pipdeptree", "--help"]) def test_console_script() -> None: try: dist = metadata.distribution("pipdeptree") except Exception as e: # noqa: BLE001 # pragma: no cover pytest.fail(f"Unexpected error when retrieving pipdeptree metadata: {e}") entry_points = dist.entry_points assert len(entry_points) == 1 if sys.version_info >= (3, 11): # pragma: >=3.11 entry_point = entry_points["pipdeptree"] else: entry_point = entry_points[0] try: pipdeptree = entry_point.load() except Exception as e: # noqa: BLE001 # pragma: no cover pytest.fail(f"Unexpected error: {e}") with pytest.raises(SystemExit, match="0"): pipdeptree(["", "--help"]) def test_main_log_resolved(tmp_path: Path, mocker: MockFixture, capsys: pytest.CaptureFixture[str]) -> None: mocker.patch("sys.argv", ["", "--python", "auto"]) mocker.patch("pipdeptree.__main__.detect_active_interpreter", return_value=str(tmp_path)) mock_subprocess_run = mocker.patch("subprocess.run") valid_sys_path = str([str(tmp_path)]) mock_subprocess_run.return_value = CompletedProcess( args=["python", "-c", "import sys; print(sys.path)"], returncode=0, stdout=valid_sys_path, stderr="", ) main() captured = capsys.readouterr() assert captured.err.startswith(f"(resolved python: {tmp_path!s}") def test_main_include_and_exclude_overlap(mocker: MockFixture, capsys: pytest.CaptureFixture[str]) -> None: cmd = ["", "--packages", "a,b,c", "--exclude", "a"] mocker.patch("pipdeptree.__main__.sys.argv", cmd) main() captured = capsys.readouterr() assert "Cannot have --packages and --exclude contain the same entries" in captured.err pipdeptree-2.30.0/tests/test_non_host.py0000664000175000017510000001265315105004414017671 0ustar nileshnileshfrom __future__ import annotations import sys from platform import python_implementation from typing import TYPE_CHECKING from unittest.mock import Mock import pytest import virtualenv from pipdeptree.__main__ import main if TYPE_CHECKING: from pathlib import Path from pytest_mock import MockerFixture @pytest.fixture(scope="session") def expected_venv_pkgs() -> frozenset[str]: implementation = python_implementation() if implementation == "CPython": # pragma: cpython cover expected = {"pip", "setuptools"} elif implementation == "PyPy": # pragma: pypy cover expected = {"cffi", "greenlet", "pip", "hpy", "setuptools"} else: # pragma: no cover raise ValueError(implementation) if sys.version_info >= (3, 12): # pragma: >=3.12 cover expected -= {"setuptools"} return frozenset(expected) @pytest.mark.parametrize("args_joined", [True, False]) def test_custom_interpreter( tmp_path: Path, mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch, capfd: pytest.CaptureFixture[str], args_joined: bool, expected_venv_pkgs: frozenset[str], ) -> None: # Delete $PYTHONPATH so that it cannot be passed to the custom interpreter process (since we don't know what # distribution metadata to expect when it's used). monkeypatch.delenv("PYTHONPATH", False) monkeypatch.chdir(tmp_path) result = virtualenv.cli_run([str(tmp_path / "venv"), "--activators", ""]) py = str(result.creator.exe.relative_to(tmp_path)) cmd = ["", f"--python={result.creator.exe}"] if args_joined else ["", "--python", py] cmd += ["--all", "--depth", "0"] mocker.patch("pipdeptree._discovery.sys.argv", cmd) main() out, _ = capfd.readouterr() found = {i.split("==")[0] for i in out.splitlines()} assert expected_venv_pkgs == found, out def test_custom_interpreter_with_local_only( tmp_path: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str], ) -> None: venv_path = str(tmp_path / "venv") result = virtualenv.cli_run([venv_path, "--system-site-packages", "--activators", ""]) cmd = ["", f"--python={result.creator.exe}", "--local-only"] mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) mocker.patch("pipdeptree._discovery.sys.argv", cmd) main() out, _ = capfd.readouterr() found = {i.split("==")[0] for i in out.splitlines()} expected = {"pip", "setuptools"} if sys.version_info >= (3, 12): # pragma: >=3.12 cover expected -= {"setuptools"} assert expected == found, out def test_custom_interpreter_with_user_only( tmp_path: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str] ) -> None: # ensures there is no output when --user-only and --python are passed venv_path = str(tmp_path / "venv") result = virtualenv.cli_run([venv_path, "--activators", ""]) cmd = ["", f"--python={result.creator.exe}", "--user-only"] mocker.patch("pipdeptree.__main__.sys.argv", cmd) main() out, err = capfd.readouterr() assert not err # Here we expect 1 element because print() adds a newline. found = out.splitlines() assert len(found) == 1 assert not found[0] def test_custom_interpreter_with_user_only_and_system_site_pkgs_enabled( tmp_path: Path, fake_dist: Path, mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch, capfd: pytest.CaptureFixture[str], ) -> None: # ensures that we provide user site metadata when --user-only and --python are passed and the custom interpreter has # system site packages enabled # Make a fake user site directory since we don't know what to expect from the real one. fake_user_site = str(fake_dist.parent) mocker.patch("pipdeptree._discovery.site.getusersitepackages", Mock(return_value=fake_user_site)) # Create a temporary virtual environment. venv_path = str(tmp_path / "venv") result = virtualenv.cli_run([venv_path, "--activators", ""]) # Use $PYTHONPATH to add the fake user site into the custom interpreter's environment so that it will include it in # its sys.path. monkeypatch.setenv("PYTHONPATH", str(fake_user_site)) cmd = ["", f"--python={result.creator.exe}", "--user-only"] mocker.patch("pipdeptree.__main__.sys.argv", cmd) main() out, err = capfd.readouterr() assert not err found = {i.split("==")[0] for i in out.splitlines()} expected = {"bar"} assert expected == found def test_custom_interpreter_ensure_pythonpath_envar_is_honored( tmp_path: Path, mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch, capfd: pytest.CaptureFixture[str], expected_venv_pkgs: frozenset[str], ) -> None: # ensures that we honor $PYTHONPATH when passing it to the custom interpreter process venv_path = str(tmp_path / "venv") result = virtualenv.cli_run([venv_path, "--activators", ""]) another_path = tmp_path / "another-path" fake_dist = another_path / "foo-1.2.3.dist-info" fake_dist.mkdir(parents=True) fake_metadata = fake_dist / "METADATA" with fake_metadata.open("w") as f: f.write("Metadata-Version: 2.3\nName: foo\nVersion: 1.2.3\n") cmd = ["", f"--python={result.creator.exe}", "--all", "--depth", "0"] mocker.patch("pipdeptree._discovery.sys.argv", cmd) monkeypatch.setenv("PYTHONPATH", str(another_path)) main() out, _ = capfd.readouterr() found = {i.split("==")[0] for i in out.splitlines()} assert {*expected_venv_pkgs, "foo"} == found, out pipdeptree-2.30.0/tests/test_freeze.py0000664000175000017510000000052515105004414017315 0ustar nileshnileshfrom __future__ import annotations from unittest.mock import Mock from pipdeptree._freeze import dist_to_frozen_repr def test_dist_to_frozen_repr() -> None: foo = Mock(metadata={"Name": "foo"}, version="20.4.1") foo.read_text = Mock(return_value=None) expected = "foo==20.4.1" assert dist_to_frozen_repr(foo) == expected pipdeptree-2.30.0/tests/test_discovery.py0000664000175000017510000001610615105004414020046 0ustar nileshnileshfrom __future__ import annotations import site import sys from pathlib import Path from typing import TYPE_CHECKING from unittest.mock import Mock import virtualenv from pipdeptree.__main__ import main from pipdeptree._discovery import get_installed_distributions if TYPE_CHECKING: import pytest from pytest_mock import MockerFixture def test_local_only(tmp_path: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: venv_path = str(tmp_path / "venv") result = virtualenv.cli_run([venv_path, "--activators", ""]) venv_site_packages = site.getsitepackages([venv_path]) fake_dist = Path(venv_site_packages[0]) / "foo-1.2.5.dist-info" fake_dist.mkdir() fake_metadata = Path(fake_dist) / "METADATA" with fake_metadata.open("w") as f: f.write("Metadata-Version: 2.3\nName: foo\nVersion: 1.2.5\n") cmd = [str(result.creator.exe.parent / "python3"), "--local-only"] mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) sys_path = sys.path.copy() mock_path = sys_path + venv_site_packages mocker.patch("pipdeptree._discovery.sys.path", mock_path) mocker.patch("pipdeptree._discovery.sys.argv", cmd) main() out, _ = capfd.readouterr() found = {i.split("==")[0] for i in out.splitlines()} expected = {"foo", "pip", "setuptools"} if sys.version_info >= (3, 12): expected -= {"setuptools"} # pragma: no cover assert found == expected def test_user_only(fake_dist: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: # Make a fake user site. fake_user_site = str(fake_dist.parent) mocker.patch("pipdeptree._discovery.site.getusersitepackages", Mock(return_value=fake_user_site)) # Add fake user site directory into a fake sys.path (normal environments will have the user site in sys.path). fake_sys_path = [*sys.path, fake_user_site] mocker.patch("pipdeptree._discovery.sys.path", fake_sys_path) cmd = ["", "--user-only"] mocker.patch("pipdeptree.__main__.sys.argv", cmd) main() out, err = capfd.readouterr() assert not err found = {i.split("==")[0] for i in out.splitlines()} expected = {"bar"} assert found == expected def test_user_only_when_in_virtual_env( tmp_path: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str] ) -> None: # ensures that we follow `pip list` by not outputting anything when --user-only is set and pipdeptree is running in # a virtual environment # Create a virtual environment and mock sys.path to point to the venv's site packages. venv_path = str(tmp_path / "venv") virtualenv.cli_run([venv_path, "--activators", ""]) venv_site_packages = site.getsitepackages([venv_path]) mocker.patch("pipdeptree._discovery.sys.path", venv_site_packages) mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) cmd = ["", "--user-only"] mocker.patch("pipdeptree.__main__.sys.argv", cmd) main() out, err = capfd.readouterr() assert not err # Here we expect 1 element because print() adds a newline. found = out.splitlines() assert len(found) == 1 assert not found[0] def test_user_only_when_in_virtual_env_and_system_site_pkgs_enabled( tmp_path: Path, fake_dist: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str] ) -> None: # ensures that we provide user site metadata when --user-only is set and we're in a virtual env with system site # packages enabled # Make a fake user site directory since we don't know what to expect from the real one. fake_user_site = str(fake_dist.parent) mocker.patch("pipdeptree._discovery.site.getusersitepackages", Mock(return_value=fake_user_site)) # Create a temporary virtual environment. Add the fake user site to path (since user site packages should normally # be there). venv_path = str(tmp_path / "venv") virtualenv.cli_run([venv_path, "--system-site-packages", "--activators", ""]) venv_site_packages = site.getsitepackages([venv_path]) mock_path = sys.path + venv_site_packages + [fake_user_site] mocker.patch("pipdeptree._discovery.sys.path", mock_path) mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) cmd = ["", "--user-only"] mocker.patch("pipdeptree.__main__.sys.argv", cmd) main() out, err = capfd.readouterr() assert not err found = {i.split("==")[0] for i in out.splitlines()} expected = {"bar"} assert found == expected def test_interpreter_query_failure(mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: cmd = ["", "--python", "i-dont-exist"] mocker.patch("pipdeptree._discovery.sys.argv", cmd) main() _, err = capfd.readouterr() assert err.startswith("Failed to query custom interpreter") def test_duplicate_metadata(mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: mocker.patch( "pipdeptree._discovery.distributions", Mock( return_value=[ Mock(metadata={"Name": "foo"}, version="1.2.5", locate_file=Mock(return_value="/path/1")), Mock(metadata={"Name": "foo"}, version="5.9.0", locate_file=Mock(return_value="/path/2")), ] ), ) dists = get_installed_distributions() assert len(dists) == 1 # we expect it to use the first distribution found assert dists[0].version == "1.2.5" _, err = capfd.readouterr() expected = ( 'Warning!!! Duplicate package metadata found:\n"/path/2"\n foo 5.9.0 ' ' (using 1.2.5, "/path/1")\nNOTE: This warning isn\'t a failure warning.\n---------------------------------' "---------------------------------------\n" ) assert err == expected def test_invalid_metadata( mocker: MockerFixture, capfd: pytest.CaptureFixture[str], fake_dist_with_invalid_metadata: Path ) -> None: fake_site_dir = str(fake_dist_with_invalid_metadata.parent) mocked_sys_path = [fake_site_dir] mocker.patch("pipdeptree._discovery.sys.path", mocked_sys_path) dists = get_installed_distributions() assert len(dists) == 0 out, err = capfd.readouterr() assert not out assert err == ( "Warning!!! Missing or invalid metadata found in the following site dirs:\n" f"{fake_site_dir}\n" "------------------------------------------------------------------------\n" ) def test_paths(fake_dist: Path) -> None: fake_site_dir = str(fake_dist.parent) mocked_path = [fake_site_dir] dists = get_installed_distributions(supplied_paths=mocked_path) assert len(dists) == 1 assert dists[0].metadata["Name"] == "bar" def test_paths_when_in_virtual_env(tmp_path: Path, fake_dist: Path) -> None: # tests to ensure that we use only the user-supplied path, not paths in the virtual env fake_site_dir = str(fake_dist.parent) mocked_path = [fake_site_dir] venv_path = str(tmp_path / "venv") s = virtualenv.cli_run([venv_path, "--activators", ""]) dists = get_installed_distributions(interpreter=str(s.creator.exe), supplied_paths=mocked_path) assert len(dists) == 1 assert dists[0].metadata["Name"] == "bar" pipdeptree-2.30.0/tests/test_detect_env.py0000664000175000017510000000352615105004414020161 0ustar nileshnileshfrom __future__ import annotations from pathlib import Path from subprocess import CompletedProcess # noqa: S404 from typing import TYPE_CHECKING import pytest from pipdeptree._detect_env import detect_active_interpreter if TYPE_CHECKING: from pytest_mock import MockFixture @pytest.mark.parametrize(("env_var"), ["VIRTUAL_ENV", "CONDA_PREFIX"]) def test_detect_active_interpreter_using_env_vars(tmp_path: Path, mocker: MockFixture, env_var: str) -> None: mocker.patch("pipdeptree._detect_env.os.environ", {env_var: str(tmp_path)}) mocker.patch("pipdeptree._detect_env.Path.exists", return_value=True) actual_path = detect_active_interpreter() assert actual_path.startswith(str(tmp_path)) def test_detect_active_interpreter_poetry(tmp_path: Path, mocker: MockFixture) -> None: faked_result = CompletedProcess("", 0, stdout=str(tmp_path)) mocker.patch("pipdeptree._detect_env.subprocess.run", return_value=faked_result) mocker.patch("pipdeptree._detect_env.os.environ", {}) actual_path = detect_active_interpreter() assert str(tmp_path) == actual_path def test_detect_active_interpreter_non_supported_python_implementation( tmp_path: Path, mocker: MockFixture, ) -> None: mocker.patch("pipdeptree._detect_env.os.environ", {"VIRTUAL_ENV": str(tmp_path)}) mocker.patch("pipdeptree._detect_env.Path.exists", return_value=True) mocker.patch("pipdeptree._detect_env.platform.python_implementation", return_value="NotSupportedPythonImpl") with pytest.raises(SystemExit): detect_active_interpreter() def test_detect_active_interpreter_non_existent_path( mocker: MockFixture, ) -> None: fake_path = str(Path(*("i", "dont", "exist"))) mocker.patch("pipdeptree._detect_env.os.environ", {"VIRTUAL_ENV": fake_path}) with pytest.raises(SystemExit): detect_active_interpreter() pipdeptree-2.30.0/tests/test_cli.py0000664000175000017510000000754115105004414016611 0ustar nileshnileshfrom __future__ import annotations import pytest from pipdeptree._cli import build_parser, get_options def test_get_options_default() -> None: get_options([]) @pytest.mark.parametrize("flag", ["-j", "--json"]) def test_get_options_json(flag: str) -> None: options = get_options([flag]) assert options.json assert options.output_format == "json" def test_get_options_json_tree() -> None: options = get_options(["--json-tree"]) assert options.json_tree assert not options.json assert options.output_format == "json-tree" def test_get_options_mermaid() -> None: options = get_options(["--mermaid"]) assert options.mermaid assert options.output_format == "mermaid" def test_get_options_pdf() -> None: options = get_options(["--graph-output", "pdf"]) assert options.graphviz_format == "pdf" assert options.output_format == "graphviz-pdf" def test_get_options_svg() -> None: options = get_options(["--graph-output", "svg"]) assert options.graphviz_format == "svg" assert options.output_format == "graphviz-svg" @pytest.mark.parametrize(("fmt"), ["freeze", "json", "json-tree", "mermaid", "graphviz-png"]) def test_get_options_output_format(fmt: str) -> None: options = get_options(["-o", fmt]) assert options.output_format == fmt def test_get_options_output_format_that_does_not_exist(capsys: pytest.CaptureFixture[str]) -> None: with pytest.raises(SystemExit, match="2"): get_options(["-o", "i-dont-exist"]) out, err = capsys.readouterr() assert not out assert 'i-dont-exist" is not a known output format.' in err def test_get_options_license_and_freeze_together_not_supported(capsys: pytest.CaptureFixture[str]) -> None: with pytest.raises(SystemExit, match="2"): get_options(["--license", "--freeze"]) out, err = capsys.readouterr() assert not out assert "cannot use --license with --freeze" in err @pytest.mark.parametrize( "args", [ pytest.param(["--path", "/random/path", "--local-only"], id="path-with-local"), pytest.param(["--path", "/random/path", "--user-only"], id="path-with-user"), ], ) def test_get_options_path_with_either_local_or_user_not_supported( args: list[str], capsys: pytest.CaptureFixture[str] ) -> None: with pytest.raises(SystemExit, match="2"): get_options(args) out, err = capsys.readouterr() assert not out assert "cannot use --path with --user-only or --local-only" in err def test_get_options_exclude_dependencies_without_exclude(capsys: pytest.CaptureFixture[str]) -> None: with pytest.raises(SystemExit, match="2"): get_options(["--exclude-dependencies"]) out, err = capsys.readouterr() assert not out assert "must use --exclude-dependencies with --exclude" in err @pytest.mark.parametrize( ("should_be_error", "depth_arg", "expected_value"), [ (True, ["-d", "-1"], None), (True, ["--depth", "string"], None), (False, ["-d", "0"], 0), (False, ["--depth", "8"], 8), (False, [], float("inf")), ], ) def test_parser_depth(should_be_error: bool, depth_arg: list[str], expected_value: float | None) -> None: parser = build_parser() if should_be_error: with pytest.raises(SystemExit): parser.parse_args(depth_arg) else: args = parser.parse_args(depth_arg) assert args.depth == expected_value @pytest.mark.parametrize( "warning", [ "silence", "suppress", "fail", ], ) def test_parse_warn_option_normal(warning: str) -> None: options = get_options(["-w", warning]) assert options.warn == warning options = get_options(["--warn", warning]) assert options.warn == warning def test_parse_warn_option_invalid() -> None: with pytest.raises(SystemExit, match="2"): get_options(["--warn", "non-existent-warning-type"]) pipdeptree-2.30.0/tests/render/0000775000175000017510000000000015105004414015701 5ustar nileshnileshpipdeptree-2.30.0/tests/render/test_text.py0000664000175000017510000005361415105004414020307 0ustar nileshnileshfrom __future__ import annotations from typing import TYPE_CHECKING import pytest from pipdeptree._models import PackageDAG from pipdeptree._models.package import Package from pipdeptree._render.text import render_text if TYPE_CHECKING: from collections.abc import Callable, Iterator from unittest.mock import Mock from tests.our_types import MockGraph @pytest.mark.parametrize( ("list_all", "reverse", "unicode", "expected_output"), [ ( True, False, True, [ "a==3.4.0", "├── b [required: >=2.0.0, installed: 2.3.1]", "│ └── d [required: >=2.30,<2.42, installed: 2.35]", "│ └── e [required: >=0.9.0, installed: 0.12.1]", "└── c [required: >=5.7.1, installed: 5.10.0]", " ├── d [required: >=2.30, installed: 2.35]", " │ └── e [required: >=0.9.0, installed: 0.12.1]", " └── e [required: >=0.12.1, installed: 0.12.1]", "b==2.3.1", "└── d [required: >=2.30,<2.42, installed: 2.35]", " └── e [required: >=0.9.0, installed: 0.12.1]", "c==5.10.0", "├── d [required: >=2.30, installed: 2.35]", "│ └── e [required: >=0.9.0, installed: 0.12.1]", "└── e [required: >=0.12.1, installed: 0.12.1]", "d==2.35", "└── e [required: >=0.9.0, installed: 0.12.1]", "e==0.12.1", "f==3.1", "└── b [required: >=2.1.0, installed: 2.3.1]", " └── d [required: >=2.30,<2.42, installed: 2.35]", " └── e [required: >=0.9.0, installed: 0.12.1]", "g==6.8.3rc1", "├── e [required: >=0.9.0, installed: 0.12.1]", "└── f [required: >=3.0.0, installed: 3.1]", " └── b [required: >=2.1.0, installed: 2.3.1]", " └── d [required: >=2.30,<2.42, installed: 2.35]", " └── e [required: >=0.9.0, installed: 0.12.1]", ], ), ( True, True, True, [ "a==3.4.0", "b==2.3.1", "├── a==3.4.0 [requires: b>=2.0.0]", "└── f==3.1 [requires: b>=2.1.0]", " └── g==6.8.3rc1 [requires: f>=3.0.0]", "c==5.10.0", "└── a==3.4.0 [requires: c>=5.7.1]", "d==2.35", "├── b==2.3.1 [requires: d>=2.30,<2.42]", "│ ├── a==3.4.0 [requires: b>=2.0.0]", "│ └── f==3.1 [requires: b>=2.1.0]", "│ └── g==6.8.3rc1 [requires: f>=3.0.0]", "└── c==5.10.0 [requires: d>=2.30]", " └── a==3.4.0 [requires: c>=5.7.1]", "e==0.12.1", "├── c==5.10.0 [requires: e>=0.12.1]", "│ └── a==3.4.0 [requires: c>=5.7.1]", "├── d==2.35 [requires: e>=0.9.0]", "│ ├── b==2.3.1 [requires: d>=2.30,<2.42]", "│ │ ├── a==3.4.0 [requires: b>=2.0.0]", "│ │ └── f==3.1 [requires: b>=2.1.0]", "│ │ └── g==6.8.3rc1 [requires: f>=3.0.0]", "│ └── c==5.10.0 [requires: d>=2.30]", "│ └── a==3.4.0 [requires: c>=5.7.1]", "└── g==6.8.3rc1 [requires: e>=0.9.0]", "f==3.1", "└── g==6.8.3rc1 [requires: f>=3.0.0]", "g==6.8.3rc1", ], ), ( False, False, True, [ "a==3.4.0", "├── b [required: >=2.0.0, installed: 2.3.1]", "│ └── d [required: >=2.30,<2.42, installed: 2.35]", "│ └── e [required: >=0.9.0, installed: 0.12.1]", "└── c [required: >=5.7.1, installed: 5.10.0]", " ├── d [required: >=2.30, installed: 2.35]", " │ └── e [required: >=0.9.0, installed: 0.12.1]", " └── e [required: >=0.12.1, installed: 0.12.1]", "g==6.8.3rc1", "├── e [required: >=0.9.0, installed: 0.12.1]", "└── f [required: >=3.0.0, installed: 3.1]", " └── b [required: >=2.1.0, installed: 2.3.1]", " └── d [required: >=2.30,<2.42, installed: 2.35]", " └── e [required: >=0.9.0, installed: 0.12.1]", ], ), ( False, True, True, [ "e==0.12.1", "├── c==5.10.0 [requires: e>=0.12.1]", "│ └── a==3.4.0 [requires: c>=5.7.1]", "├── d==2.35 [requires: e>=0.9.0]", "│ ├── b==2.3.1 [requires: d>=2.30,<2.42]", "│ │ ├── a==3.4.0 [requires: b>=2.0.0]", "│ │ └── f==3.1 [requires: b>=2.1.0]", "│ │ └── g==6.8.3rc1 [requires: f>=3.0.0]", "│ └── c==5.10.0 [requires: d>=2.30]", "│ └── a==3.4.0 [requires: c>=5.7.1]", "└── g==6.8.3rc1 [requires: e>=0.9.0]", ], ), ( True, False, False, [ "a==3.4.0", " - b [required: >=2.0.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", " - c [required: >=5.7.1, installed: 5.10.0]", " - d [required: >=2.30, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", " - e [required: >=0.12.1, installed: 0.12.1]", "b==2.3.1", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", "c==5.10.0", " - d [required: >=2.30, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", " - e [required: >=0.12.1, installed: 0.12.1]", "d==2.35", " - e [required: >=0.9.0, installed: 0.12.1]", "e==0.12.1", "f==3.1", " - b [required: >=2.1.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", "g==6.8.3rc1", " - e [required: >=0.9.0, installed: 0.12.1]", " - f [required: >=3.0.0, installed: 3.1]", " - b [required: >=2.1.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", ], ), ( True, True, False, [ "a==3.4.0", "b==2.3.1", " - a==3.4.0 [requires: b>=2.0.0]", " - f==3.1 [requires: b>=2.1.0]", " - g==6.8.3rc1 [requires: f>=3.0.0]", "c==5.10.0", " - a==3.4.0 [requires: c>=5.7.1]", "d==2.35", " - b==2.3.1 [requires: d>=2.30,<2.42]", " - a==3.4.0 [requires: b>=2.0.0]", " - f==3.1 [requires: b>=2.1.0]", " - g==6.8.3rc1 [requires: f>=3.0.0]", " - c==5.10.0 [requires: d>=2.30]", " - a==3.4.0 [requires: c>=5.7.1]", "e==0.12.1", " - c==5.10.0 [requires: e>=0.12.1]", " - a==3.4.0 [requires: c>=5.7.1]", " - d==2.35 [requires: e>=0.9.0]", " - b==2.3.1 [requires: d>=2.30,<2.42]", " - a==3.4.0 [requires: b>=2.0.0]", " - f==3.1 [requires: b>=2.1.0]", " - g==6.8.3rc1 [requires: f>=3.0.0]", " - c==5.10.0 [requires: d>=2.30]", " - a==3.4.0 [requires: c>=5.7.1]", " - g==6.8.3rc1 [requires: e>=0.9.0]", "f==3.1", " - g==6.8.3rc1 [requires: f>=3.0.0]", "g==6.8.3rc1", ], ), ( False, False, False, [ "a==3.4.0", " - b [required: >=2.0.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", " - c [required: >=5.7.1, installed: 5.10.0]", " - d [required: >=2.30, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", " - e [required: >=0.12.1, installed: 0.12.1]", "g==6.8.3rc1", " - e [required: >=0.9.0, installed: 0.12.1]", " - f [required: >=3.0.0, installed: 3.1]", " - b [required: >=2.1.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", ], ), ( False, True, False, [ "e==0.12.1", " - c==5.10.0 [requires: e>=0.12.1]", " - a==3.4.0 [requires: c>=5.7.1]", " - d==2.35 [requires: e>=0.9.0]", " - b==2.3.1 [requires: d>=2.30,<2.42]", " - a==3.4.0 [requires: b>=2.0.0]", " - f==3.1 [requires: b>=2.1.0]", " - g==6.8.3rc1 [requires: f>=3.0.0]", " - c==5.10.0 [requires: d>=2.30]", " - a==3.4.0 [requires: c>=5.7.1]", " - g==6.8.3rc1 [requires: e>=0.9.0]", ], ), ], ) def test_render_text( example_dag: PackageDAG, capsys: pytest.CaptureFixture[str], list_all: bool, reverse: bool, unicode: bool, expected_output: list[str], ) -> None: tree = example_dag.reverse() if reverse else example_dag encoding = "utf-8" if unicode else "ascii" render_text(tree, max_depth=float("inf"), encoding=encoding, list_all=list_all) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.out.strip() @pytest.mark.parametrize( ("unicode", "level", "expected_output"), [ ( True, 0, [ "a==3.4.0", "b==2.3.1", "c==5.10.0", "d==2.35", "e==0.12.1", "f==3.1", "g==6.8.3rc1", ], ), ( False, 0, [ "a==3.4.0", "b==2.3.1", "c==5.10.0", "d==2.35", "e==0.12.1", "f==3.1", "g==6.8.3rc1", ], ), ( True, 2, [ "a==3.4.0", "├── b [required: >=2.0.0, installed: 2.3.1]", "│ └── d [required: >=2.30,<2.42, installed: 2.35]", "└── c [required: >=5.7.1, installed: 5.10.0]", " ├── d [required: >=2.30, installed: 2.35]", " └── e [required: >=0.12.1, installed: 0.12.1]", "b==2.3.1", "└── d [required: >=2.30,<2.42, installed: 2.35]", " └── e [required: >=0.9.0, installed: 0.12.1]", "c==5.10.0", "├── d [required: >=2.30, installed: 2.35]", "│ └── e [required: >=0.9.0, installed: 0.12.1]", "└── e [required: >=0.12.1, installed: 0.12.1]", "d==2.35", "└── e [required: >=0.9.0, installed: 0.12.1]", "e==0.12.1", "f==3.1", "└── b [required: >=2.1.0, installed: 2.3.1]", " └── d [required: >=2.30,<2.42, installed: 2.35]", "g==6.8.3rc1", "├── e [required: >=0.9.0, installed: 0.12.1]", "└── f [required: >=3.0.0, installed: 3.1]", " └── b [required: >=2.1.0, installed: 2.3.1]", ], ), ( False, 2, [ "a==3.4.0", " - b [required: >=2.0.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", " - c [required: >=5.7.1, installed: 5.10.0]", " - d [required: >=2.30, installed: 2.35]", " - e [required: >=0.12.1, installed: 0.12.1]", "b==2.3.1", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", "c==5.10.0", " - d [required: >=2.30, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", " - e [required: >=0.12.1, installed: 0.12.1]", "d==2.35", " - e [required: >=0.9.0, installed: 0.12.1]", "e==0.12.1", "f==3.1", " - b [required: >=2.1.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", "g==6.8.3rc1", " - e [required: >=0.9.0, installed: 0.12.1]", " - f [required: >=3.0.0, installed: 3.1]", " - b [required: >=2.1.0, installed: 2.3.1]", ], ), ], ) def test_render_text_given_depth( capsys: pytest.CaptureFixture[str], unicode: str, level: int, expected_output: list[str], example_dag: PackageDAG, ) -> None: render_text(example_dag, max_depth=level, encoding="utf-8" if unicode else "ascii") captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.out.strip() @pytest.mark.parametrize( ("level", "encoding", "expected_output"), [ ( 0, "utf-8", [ "a==3.4.0", "b==2.3.1", "c==5.10.0", "d==2.35", "e==0.12.1", "f==3.1", "g==6.8.3rc1", ], ), ( 2, "utf-8", [ "a==3.4.0", "├── b [required: >=2.0.0, installed: 2.3.1]", "│ └── d [required: >=2.30,<2.42, installed: 2.35]", "└── c [required: >=5.7.1, installed: 5.10.0]", " ├── d [required: >=2.30, installed: 2.35]", " └── e [required: >=0.12.1, installed: 0.12.1]", "b==2.3.1", "└── d [required: >=2.30,<2.42, installed: 2.35]", " └── e [required: >=0.9.0, installed: 0.12.1]", "c==5.10.0", "├── d [required: >=2.30, installed: 2.35]", "│ └── e [required: >=0.9.0, installed: 0.12.1]", "└── e [required: >=0.12.1, installed: 0.12.1]", "d==2.35", "└── e [required: >=0.9.0, installed: 0.12.1]", "e==0.12.1", "f==3.1", "└── b [required: >=2.1.0, installed: 2.3.1]", " └── d [required: >=2.30,<2.42, installed: 2.35]", "g==6.8.3rc1", "├── e [required: >=0.9.0, installed: 0.12.1]", "└── f [required: >=3.0.0, installed: 3.1]", " └── b [required: >=2.1.0, installed: 2.3.1]", ], ), ( 2, "ascii", [ "a==3.4.0", " - b [required: >=2.0.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", " - c [required: >=5.7.1, installed: 5.10.0]", " - d [required: >=2.30, installed: 2.35]", " - e [required: >=0.12.1, installed: 0.12.1]", "b==2.3.1", " - d [required: >=2.30,<2.42, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", "c==5.10.0", " - d [required: >=2.30, installed: 2.35]", " - e [required: >=0.9.0, installed: 0.12.1]", " - e [required: >=0.12.1, installed: 0.12.1]", "d==2.35", " - e [required: >=0.9.0, installed: 0.12.1]", "e==0.12.1", "f==3.1", " - b [required: >=2.1.0, installed: 2.3.1]", " - d [required: >=2.30,<2.42, installed: 2.35]", "g==6.8.3rc1", " - e [required: >=0.9.0, installed: 0.12.1]", " - f [required: >=3.0.0, installed: 3.1]", " - b [required: >=2.1.0, installed: 2.3.1]", ], ), ], ) def test_render_text_encoding( capsys: pytest.CaptureFixture[str], level: int, encoding: str, expected_output: list[str], example_dag: PackageDAG, ) -> None: render_text(example_dag, max_depth=level, encoding=encoding, list_all=True) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.out.strip() def test_render_text_list_all_and_packages_options_used( capsys: pytest.CaptureFixture[str], mock_pkgs: Callable[[MockGraph], Iterator[Mock]], ) -> None: graph: MockGraph = { ("examplePy", "1.2.3"): [("hellopy", [(">=", "2.0.0")]), ("worldpy", [(">=", "0.0.2")])], ("HelloPy", "2.0.0"): [], ("worldpy", "0.0.2"): [], ("anotherpy", "0.1.2"): [("hellopy", [(">=", "2.0.0")])], ("YetAnotherPy", "3.1.2"): [], } package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) # NOTE: Mimicking the --packages option being used here. package_dag = package_dag.filter_nodes(["examplePy"], None) render_text(package_dag, max_depth=float("inf"), encoding="utf-8", list_all=True) captured = capsys.readouterr() expected_output = [ "examplePy==1.2.3", "├── HelloPy [required: >=2.0.0, installed: 2.0.0]", "└── worldpy [required: >=0.0.2, installed: 0.0.2]", "HelloPy==2.0.0", "worldpy==0.0.2", ] assert "\n".join(expected_output).strip() == captured.out.strip() @pytest.mark.parametrize( ("encoding", "expected_output"), [ ( "utf-8", [ "a==3.4.0 (TEST)", "└── c [required: ==1.0.0, installed: 1.0.0]", "b==2.3.1 (TEST)", "c==1.0.0 (TEST)", ], ), ( "ascii", [ "a==3.4.0 (TEST)", " - c [required: ==1.0.0, installed: 1.0.0]", "b==2.3.1 (TEST)", "c==1.0.0 (TEST)", ], ), ], ) def test_render_text_with_license_info( encoding: str, expected_output: str, mock_pkgs: Callable[[MockGraph], Iterator[Mock]], capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch, ) -> None: graph: MockGraph = { ("a", "3.4.0"): [("c", [("==", "1.0.0")])], ("b", "2.3.1"): [], ("c", "1.0.0"): [], } dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) monkeypatch.setattr(Package, "licenses", lambda _: "(TEST)") render_text(dag, max_depth=float("inf"), encoding=encoding, include_license=True) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.out.strip() @pytest.mark.parametrize( ("encoding", "expected_output"), [ ( "utf-8", [ "a==3.4.0 (TEST)", "b==2.3.1 (TEST)", "└── a==3.4.0 [requires: b==2.3.1]", "c==1.0.0 (TEST)", "├── a==3.4.0 [requires: c==1.0.0]", "└── b==2.3.1 [requires: c==1.0.0]", " └── a==3.4.0 [requires: b==2.3.1]", ], ), ( "ascii", [ "a==3.4.0 (TEST)", "b==2.3.1 (TEST)", " - a==3.4.0 [requires: b==2.3.1]", "c==1.0.0 (TEST)", " - a==3.4.0 [requires: c==1.0.0]", " - b==2.3.1 [requires: c==1.0.0]", " - a==3.4.0 [requires: b==2.3.1]", ], ), ], ) def test_render_text_with_license_info_and_reversed_tree( encoding: str, expected_output: str, mock_pkgs: Callable[[MockGraph], Iterator[Mock]], capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch, ) -> None: graph: MockGraph = { ("a", "3.4.0"): [("b", [("==", "2.3.1")]), ("c", [("==", "1.0.0")])], ("b", "2.3.1"): [("c", [("==", "1.0.0")])], ("c", "1.0.0"): [], } dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) dag = dag.reverse() monkeypatch.setattr(Package, "licenses", lambda _: "(TEST)") render_text(dag, max_depth=float("inf"), encoding=encoding, include_license=True) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.out.strip() pipdeptree-2.30.0/tests/render/test_render.py0000664000175000017510000000373615105004414020602 0ustar nileshnileshfrom __future__ import annotations from math import inf from typing import TYPE_CHECKING from unittest.mock import ANY import pytest from pipdeptree.__main__ import main if TYPE_CHECKING: from pytest_mock import MockerFixture @pytest.mark.parametrize("option", [["--json"], ["--output", "json"]]) def test_json_routing(option: list[str], mocker: MockerFixture) -> None: render = mocker.patch("pipdeptree._render.render_json") main(option) render.assert_called_once_with(ANY) @pytest.mark.parametrize("option", [["--json-tree"], ["--output", "json-tree"]]) def test_json_tree_routing(option: list[str], mocker: MockerFixture) -> None: render = mocker.patch("pipdeptree._render.render_json_tree") main(option) render.assert_called_once_with(ANY) @pytest.mark.parametrize("option", [["--mermaid"], ["--output", "mermaid"]]) def test_mermaid_routing(option: list[str], mocker: MockerFixture) -> None: render = mocker.patch("pipdeptree._render.render_mermaid") main(option) render.assert_called_once_with(ANY) @pytest.mark.parametrize("option", [["--graph-output", "dot"], ["--output", "graphviz-dot"]]) def test_grahpviz_routing(option: list[str], mocker: MockerFixture) -> None: render = mocker.patch("pipdeptree._render.render_graphviz") main(option) render.assert_called_once_with(ANY, output_format="dot", reverse=False) @pytest.mark.parametrize("option", [[], ["--output", "text"]]) def test_text_routing(option: list[str], mocker: MockerFixture) -> None: render = mocker.patch("pipdeptree._render.render_text") main(option) render.assert_called_once_with(ANY, encoding="utf-8", max_depth=inf, list_all=False, include_license=False) @pytest.mark.parametrize("option", [["--freeze"], ["--output", "freeze"]]) def test_freeze_routing(option: list[str], mocker: MockerFixture) -> None: render = mocker.patch("pipdeptree._render.render_freeze") main(option) render.assert_called_once_with(ANY, max_depth=inf, list_all=False) pipdeptree-2.30.0/tests/render/test_mermaid.py0000664000175000017510000000755715105004414020746 0ustar nileshnileshfrom __future__ import annotations from textwrap import dedent, indent from typing import TYPE_CHECKING from pipdeptree._models import PackageDAG from pipdeptree._render.mermaid import render_mermaid if TYPE_CHECKING: from collections.abc import Callable, Iterator from unittest.mock import Mock import pytest from tests.our_types import MockGraph def test_render_mermaid( example_dag: PackageDAG, randomized_example_dag: PackageDAG, capsys: pytest.CaptureFixture[str] ) -> None: """Check both the sorted and randomized package tree produces the same sorted Mermaid output. Rendering a reverse dependency tree should produce the same set of nodes. Edges should have the same version spec label, but be resorted after swapping node positions. `See how this renders `_. """ nodes = dedent( """\ flowchart TD classDef missing stroke-dasharray: 5 a["a\\n3.4.0"] b["b\\n2.3.1"] c["c\\n5.10.0"] d["d\\n2.35"] e["e\\n0.12.1"] f["f\\n3.1"] g["g\\n6.8.3rc1"] """, ) dependency_edges = indent( dedent( """\ a -- ">=2.0.0" --> b a -- ">=5.7.1" --> c b -- ">=2.30,<2.42" --> d c -- ">=0.12.1" --> e c -- ">=2.30" --> d d -- ">=0.9.0" --> e f -- ">=2.1.0" --> b g -- ">=0.9.0" --> e g -- ">=3.0.0" --> f """, ), " " * 4, ).rstrip() reverse_dependency_edges = indent( dedent( """\ b -- ">=2.0.0" --> a b -- ">=2.1.0" --> f c -- ">=5.7.1" --> a d -- ">=2.30" --> c d -- ">=2.30,<2.42" --> b e -- ">=0.12.1" --> c e -- ">=0.9.0" --> d e -- ">=0.9.0" --> g f -- ">=3.0.0" --> g """, ), " " * 4, ).rstrip() for package_tree in (example_dag, randomized_example_dag): render_mermaid(package_tree) output = capsys.readouterr() assert output.out.rstrip() == nodes + dependency_edges render_mermaid(package_tree.reverse()) output = capsys.readouterr() assert output.out.rstrip() == nodes + reverse_dependency_edges def test_mermaid_reserved_ids( mock_pkgs: Callable[[MockGraph], Iterator[Mock]], capsys: pytest.CaptureFixture[str] ) -> None: graph = {("click", "3.4.0"): [("click-extra", [(">=", "2.0.0")])]} package_tree = PackageDAG.from_pkgs(list(mock_pkgs(graph))) render_mermaid(package_tree) output = capsys.readouterr() assert ( output.out.rstrip() == dedent( """\ flowchart TD classDef missing stroke-dasharray: 5 click-extra["click-extra\\n(missing)"]:::missing click_0["click\\n3.4.0"] click_0 -.-> click-extra """, ).rstrip() ) pipdeptree-2.30.0/tests/render/test_json_tree.py0000664000175000017510000000204015105004414021276 0ustar nileshnileshfrom __future__ import annotations from typing import TYPE_CHECKING import pytest from pipdeptree._models.dag import PackageDAG from pipdeptree._render.json_tree import render_json_tree if TYPE_CHECKING: from collections.abc import Callable, Iterator from unittest.mock import Mock from tests.our_types import MockGraph @pytest.mark.parametrize( ("version_spec_tuple", "expected_version_spec"), [ pytest.param((), "Any"), pytest.param((">=", "2.0.0"), ">=2.0.0"), ], ) def test_json_tree_given_req_package_with_version_spec( mock_pkgs: Callable[[MockGraph], Iterator[Mock]], version_spec_tuple: tuple[str, str], expected_version_spec: str, capsys: pytest.CaptureFixture[str], ) -> None: graph: MockGraph = { ("a", "1.2.3"): [("b", [version_spec_tuple])], ("b", "2.2.0"): [], } package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) render_json_tree(package_dag) captured = capsys.readouterr() assert captured.out.find(expected_version_spec) != -1 pipdeptree-2.30.0/tests/render/test_json.py0000664000175000017510000000266515105004414020274 0ustar nileshnileshfrom __future__ import annotations from textwrap import dedent from typing import TYPE_CHECKING from pipdeptree._models.dag import PackageDAG from pipdeptree._render.json import render_json if TYPE_CHECKING: from collections.abc import Callable, Iterator from unittest.mock import Mock import pytest from tests.our_types import MockGraph def test_render_json(mock_pkgs: Callable[[MockGraph], Iterator[Mock]], capsys: pytest.CaptureFixture[str]) -> None: graph: MockGraph = { ("a", "1.2.3"): [("b", [(">=", "4.0.0")])], ("b", "4.5.6"): [], } expected_output = dedent("""\ [ { "package": { "key": "a", "package_name": "a", "installed_version": "1.2.3" }, "dependencies": [ { "key": "b", "package_name": "b", "installed_version": "4.5.6", "required_version": ">=4.0.0" } ] }, { "package": { "key": "b", "package_name": "b", "installed_version": "4.5.6" }, "dependencies": [] } ] """) package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) render_json(package_dag) output = capsys.readouterr() assert output.out == expected_output pipdeptree-2.30.0/tests/render/test_graphviz.py0000664000175000017510000000432315105004414021146 0ustar nileshnileshfrom __future__ import annotations import sys from textwrap import dedent from typing import TYPE_CHECKING import pytest from pipdeptree._render.graphviz import dump_graphviz, print_graphviz if TYPE_CHECKING: from pathlib import Path from pytest_mock import MockerFixture from pipdeptree._models import PackageDAG def test_render_dot( capsys: pytest.CaptureFixture[str], example_dag: PackageDAG, randomized_example_dag: PackageDAG, ) -> None: # Check both the sorted and randomized package tree produces the same sorted graphviz output. for package_tree in (example_dag, randomized_example_dag): output = dump_graphviz(package_tree, output_format="dot") print_graphviz(output) out, _ = capsys.readouterr() assert out == dedent( """\ digraph { \ta -> b [label=">=2.0.0"] \ta -> c [label=">=5.7.1"] \ta [label="a\\n3.4.0"] \tb -> d [label=">=2.30,<2.42"] \tb [label="b\\n2.3.1"] \tc -> d [label=">=2.30"] \tc -> e [label=">=0.12.1"] \tc [label="c\\n5.10.0"] \td -> e [label=">=0.9.0"] \td [label="d\\n2.35"] \te [label="e\\n0.12.1"] \tf -> b [label=">=2.1.0"] \tf [label="f\\n3.1"] \tg -> e [label=">=0.9.0"] \tg -> f [label=">=3.0.0"] \tg [label="g\\n6.8.3rc1"] } """, ) def test_render_pdf(tmp_path: Path, mocker: MockerFixture, example_dag: PackageDAG) -> None: output = dump_graphviz(example_dag, output_format="pdf") res = tmp_path / "file" with pytest.raises(OSError, match="Bad file"): # noqa: PT012, SIM117 # because we reopen the file with res.open("wb") as buf: mocker.patch.object(sys, "stdout", buf) print_graphviz(output) assert res.read_bytes()[:4] == b"%PDF" def test_render_svg(capsys: pytest.CaptureFixture[str], example_dag: PackageDAG) -> None: output = dump_graphviz(example_dag, output_format="svg") print_graphviz(output) out, _ = capsys.readouterr() assert out.startswith("") pipdeptree-2.30.0/tests/render/test_freeze.py0000664000175000017510000001012615105004414020572 0ustar nileshnileshfrom __future__ import annotations from math import inf from typing import TYPE_CHECKING from unittest.mock import PropertyMock import pytest from pipdeptree._freeze import PipBaseDistributionAdapter from pipdeptree._render.freeze import render_freeze if TYPE_CHECKING: from pipdeptree._models.dag import PackageDAG @pytest.fixture def patch_pip_adapter(monkeypatch: pytest.MonkeyPatch) -> None: """ Patches `PipBaseDistributionAdapter` such that `editable` returns `False` and `direct_url` returns `None`. This will have the pip API always return a frozen req in the "name==version" format. """ monkeypatch.setattr(PipBaseDistributionAdapter, "editable", PropertyMock(return_value=False)) monkeypatch.setattr(PipBaseDistributionAdapter, "direct_url", PropertyMock(return_value=None)) @pytest.mark.parametrize( ("list_all", "expected_output"), [ ( True, [ "a==3.4.0", " b==2.3.1", " d==2.35", " e==0.12.1", " c==5.10.0", " d==2.35", " e==0.12.1", " e==0.12.1", "b==2.3.1", " d==2.35", " e==0.12.1", "c==5.10.0", " d==2.35", " e==0.12.1", " e==0.12.1", "d==2.35", " e==0.12.1", "e==0.12.1", "f==3.1", " b==2.3.1", " d==2.35", " e==0.12.1", "g==6.8.3rc1", " e==0.12.1", " f==3.1", " b==2.3.1", " d==2.35", " e==0.12.1", ], ), ( False, [ "a==3.4.0", " b==2.3.1", " d==2.35", " e==0.12.1", " c==5.10.0", " d==2.35", " e==0.12.1", " e==0.12.1", "g==6.8.3rc1", " e==0.12.1", " f==3.1", " b==2.3.1", " d==2.35", " e==0.12.1", ], ), ], ) @pytest.mark.usefixtures("patch_pip_adapter") def test_render_freeze( example_dag: PackageDAG, capsys: pytest.CaptureFixture[str], list_all: bool, expected_output: list[str], ) -> None: render_freeze(example_dag, max_depth=inf, list_all=list_all) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.out.strip() @pytest.mark.parametrize( ("depth", "expected_output"), [ ( 0, [ "a==3.4.0", "b==2.3.1", "c==5.10.0", "d==2.35", "e==0.12.1", "f==3.1", "g==6.8.3rc1", ], ), ( 2, [ "a==3.4.0", " b==2.3.1", " d==2.35", " c==5.10.0", " d==2.35", " e==0.12.1", "b==2.3.1", " d==2.35", " e==0.12.1", "c==5.10.0", " d==2.35", " e==0.12.1", " e==0.12.1", "d==2.35", " e==0.12.1", "e==0.12.1", "f==3.1", " b==2.3.1", " d==2.35", "g==6.8.3rc1", " e==0.12.1", " f==3.1", " b==2.3.1", ], ), ], ) @pytest.mark.usefixtures("patch_pip_adapter") def test_render_freeze_given_depth( example_dag: PackageDAG, capsys: pytest.CaptureFixture[str], depth: int, expected_output: list[str], ) -> None: render_freeze(example_dag, max_depth=depth) captured = capsys.readouterr() assert "\n".join(expected_output).strip() == captured.out.strip() pipdeptree-2.30.0/tests/our_types.py0000664000175000017510000000024315105004414017024 0ustar nileshnileshfrom __future__ import annotations MockGraph = dict[tuple[str, str], list[tuple[str, list[tuple[str, str]]]]] # pragma: no cover __all__ = [ "MockGraph", ] pipdeptree-2.30.0/tests/conftest.py0000664000175000017510000000645715105004414016635 0ustar nileshnileshfrom __future__ import annotations import locale from pathlib import Path from random import shuffle from typing import TYPE_CHECKING from unittest.mock import Mock import pytest from pipdeptree._models import PackageDAG if TYPE_CHECKING: from collections.abc import Callable, Iterator from tests.our_types import MockGraph @pytest.fixture(scope="session") def mock_pkgs() -> Callable[[MockGraph], Iterator[Mock]]: def func(simple_graph: MockGraph) -> Iterator[Mock]: for node, children in simple_graph.items(): nk, nv = node m = Mock(metadata={"Name": nk}, version=nv) reqs = [] for ck, cv in children: r = ck for item in cv: if item: rs, rv = item r = r + rs + rv if item != cv[-1]: r += "," reqs.append(r) m.requires = reqs yield m return func @pytest.fixture def example_dag(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> PackageDAG: packages: MockGraph = { ("a", "3.4.0"): [("b", [(">=", "2.0.0")]), ("c", [(">=", "5.7.1")])], ("b", "2.3.1"): [("d", [(">=", "2.30"), ("<", "2.42")])], ("c", "5.10.0"): [("d", [(">=", "2.30")]), ("e", [(">=", "0.12.1")])], ("d", "2.35"): [("e", [(">=", "0.9.0")])], ("e", "0.12.1"): [], ("f", "3.1"): [("b", [(">=", "2.1.0")])], ("g", "6.8.3rc1"): [("e", [(">=", "0.9.0")]), ("f", [(">=", "3.0.0")])], } return PackageDAG.from_pkgs(list(mock_pkgs(packages))) @pytest.fixture def randomized_example_dag(example_dag: PackageDAG) -> PackageDAG: """Returns a copy of the package tree fixture with dependencies in randomized order.""" # Extract the dependency graph from the package tree and randomize it. randomized_graph = {} randomized_nodes = list(example_dag._obj.keys()) # noqa: SLF001 shuffle(randomized_nodes) for node in randomized_nodes: edges = example_dag._obj[node].copy() # noqa: SLF001 shuffle(edges) randomized_graph[node] = edges assert set(randomized_graph) == set(example_dag._obj) # noqa: SLF001 # Create a randomized package tree. randomized_dag = PackageDAG(randomized_graph) assert len(example_dag) == len(randomized_dag) return randomized_dag @pytest.fixture def fake_dist(tmp_path: Path) -> Path: """Creates a fake site package (that you get using Path.parent) and a fake dist-info called bar-2.4.5.dist-info.""" fake_site_pkgs = tmp_path / "site-packages" fake_dist_path = fake_site_pkgs / "bar-2.4.5.dist-info" fake_dist_path.mkdir(parents=True) fake_metadata = Path(fake_dist_path) / "METADATA" with fake_metadata.open("w", encoding=locale.getpreferredencoding(False)) as f: f.write("Metadata-Version: 2.3\nName: bar\nVersion: 2.4.5\n") return fake_dist_path @pytest.fixture def fake_dist_with_invalid_metadata(tmp_path: Path) -> Path: "Similar to `fake_dist()`, but creates an invalid METADATA file." fake_site_pkgs = tmp_path / "site-packages" fake_dist_path = fake_site_pkgs / "bar-2.4.5.dist-info" fake_dist_path.mkdir(parents=True) fake_metadata = Path(fake_dist_path) / "METADATA" fake_metadata.touch() return fake_dist_path pipdeptree-2.30.0/tests/_models/0000775000175000017510000000000015105004414016044 5ustar nileshnileshpipdeptree-2.30.0/tests/_models/test_package.py0000664000175000017510000002045015105004414021051 0ustar nileshnileshfrom __future__ import annotations from importlib.metadata import PackageNotFoundError from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock, Mock import pytest from packaging.specifiers import SpecifierSet from pipdeptree._models import DistPackage, ReqPackage from pipdeptree._models.package import Package if TYPE_CHECKING: from pytest_mock import MockerFixture def sort_map_values(m: dict[str, Any]) -> dict[str, Any]: return {k: sorted(v) for k, v in m.items()} def test_guess_version_setuptools(mocker: MockerFixture) -> None: mocker.patch("pipdeptree._models.package.version", side_effect=PackageNotFoundError) r = MagicMock() r.name = "setuptools" result = ReqPackage(r).installed_version assert result == "?" def test_package_as_frozen_repr(mocker: MockerFixture) -> None: foo = Mock(metadata={"Name": "foo"}, version="1.2.3") dp = DistPackage(foo) expected = "test" mocker.patch("pipdeptree._models.package.dist_to_frozen_repr", Mock(return_value=expected)) assert Package.as_frozen_repr(dp.unwrap()) == expected def test_dist_package_requires() -> None: foo = Mock( metadata={"Name": "foo"}, requires=["bar", "baz >=2.7.2"], ) dp = DistPackage(foo) reqs = list(dp.requires()) assert len(reqs) == 2 def test_dist_package_requires_with_environment_markers_that_eval_to_false() -> None: foo = Mock( metadata={"Name": "foo"}, requires=['foo ; sys_platform == "NoexistOS"', "bar >=2.7.2 ; extra == 'testing'"], ) dp = DistPackage(foo) reqs = list(dp.requires()) assert len(reqs) == 0 def test_dist_package_render_as_root() -> None: foo = Mock(metadata={"Name": "foo"}, version="20.4.1") dp = DistPackage(foo) assert dp.render_as_root(frozen=False) == "foo==20.4.1" def test_dist_package_render_as_branch() -> None: foo = Mock(metadata={"Name": "foo"}, version="20.4.1") bar = Mock(metadata={"Name": "bar"}, version="4.1.0") bar_req = MagicMock(version="4.1.0", specifier=[">=4.0"]) bar_req.name = "bar" rp = ReqPackage(bar_req, dist=bar) dp = DistPackage(foo).as_parent_of(rp) assert dp.render_as_branch(frozen=False) == "foo==20.4.1 [requires: bar>=4.0]" def test_dist_package_render_as_root_with_frozen(mocker: MockerFixture) -> None: foo = Mock(metadata={"Name": "foo"}, version="1.2.3") dp = DistPackage(foo) expected = "test" mocker.patch("pipdeptree._models.package.dist_to_frozen_repr", Mock(return_value=expected)) assert dp.render_as_root(frozen=True) == expected def test_dist_package_as_parent_of() -> None: foo = Mock(metadata={"Name": "foo"}, version="20.4.1") dp = DistPackage(foo) assert dp.req is None bar = Mock(metadata={"Name": "bar"}, version="4.1.0") bar_req = MagicMock(version="4.1.0", specifier=[">=4.0"]) bar_req.name = "bar" rp = ReqPackage(bar_req, dist=bar) dp1 = dp.as_parent_of(rp) assert dp1._obj == dp._obj # noqa: SLF001 assert dp1.req is rp dp2 = dp.as_parent_of(None) assert dp2 is dp def test_dist_package_as_dict() -> None: foo = Mock(metadata={"Name": "foo"}, version="1.3.2b1") dp = DistPackage(foo) result = dp.as_dict() expected = {"key": "foo", "package_name": "foo", "installed_version": "1.3.2b1"} assert expected == result @pytest.mark.parametrize( ("mocked_metadata", "expected_output"), [ pytest.param( Mock(__getitem__=Mock(return_value=None), get_all=Mock(return_value=[])), Package.UNKNOWN_LICENSE_STR, id="no-license", ), pytest.param( Mock( __getitem__=Mock(return_value=None), get_all=Mock( return_value=[ "License :: OSI Approved :: GNU General Public License v2 (GPLv2)", "Operating System :: OS Independent", ] ), ), "(GNU General Public License v2 (GPLv2))", id="one-license-with-one-non-license", ), pytest.param( Mock( __getitem__=Mock(return_value=None), get_all=Mock( return_value=[ "License :: OSI Approved :: GNU General Public License v2 (GPLv2)", "License :: OSI Approved :: Apache Software License", ] ), ), "(GNU General Public License v2 (GPLv2), Apache Software License)", id="more-than-one-license", ), pytest.param( Mock(__getitem__=Mock(return_value="MIT"), get_all=Mock(return_value=[])), "(MIT)", id="license-expression", ), pytest.param( Mock( __getitem__=Mock(return_value="MIT"), get_all=Mock( return_value=[ "License :: OSI Approved :: MIT License", ] ), ), "(MIT)", id="license-expression-with-license-classifier", ), ], ) def test_dist_package_licenses(mocked_metadata: Mock, expected_output: str, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setattr("pipdeptree._models.package.metadata", Mock(return_value=mocked_metadata)) dist = DistPackage(Mock(metadata={"Name": "a"})) licenses_str = dist.licenses() assert licenses_str == expected_output def test_dist_package_licenses_importlib_cant_find_package(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setattr("pipdeptree._models.package.metadata", Mock(side_effect=PackageNotFoundError())) dist = DistPackage(Mock(metadata={"Name": "a"})) licenses_str = dist.licenses() assert licenses_str == Package.UNKNOWN_LICENSE_STR def test_dist_package_key_pep503_normalized() -> None: foobar = Mock(metadata={"Name": "foo.bar"}, version="20.4.1") dp = DistPackage(foobar) assert dp.key == "foo-bar" def test_req_package_key_pep503_normalized() -> None: bar_req = MagicMock(specifier=[">=4.0"]) bar_req.name = "bar.bar-bar-bar" rp = ReqPackage(bar_req) assert rp.key == "bar-bar-bar-bar" def test_req_package_render_as_root() -> None: bar = Mock(metadata={"Name": "bar"}, version="4.1.0") bar_req = MagicMock(specifier=[">=4.0"]) bar_req.name = "bar" rp = ReqPackage(bar_req, dist=bar) assert rp.render_as_root(frozen=False) == "bar==4.1.0" def test_req_package_render_as_root_with_frozen(mocker: MockerFixture) -> None: bar = Mock(metadata={"Name": "bar"}, version="4.1.0") dp = DistPackage(bar) bar_req = MagicMock(specifier=[">=4.0"]) bar_req.name = "bar" rp = ReqPackage(bar_req, dp) expected = "test" mocker.patch("pipdeptree._models.package.dist_to_frozen_repr", Mock(return_value=expected)) assert rp.render_as_root(frozen=True) == expected def test_req_package_render_as_branch() -> None: bar = Mock(metadata={"Name": "bar"}, version="4.1.0") bar_req = MagicMock(specifier=[">=4.0"]) bar_req.name = "bar" rp = ReqPackage(bar_req, dist=bar) assert rp.render_as_branch(frozen=False) == "bar [required: >=4.0, installed: 4.1.0]" def test_req_package_is_conflicting_handle_dev_versions() -> None: # ensure that we can handle development versions when detecting conflicts # see https://github.com/tox-dev/pipdeptree/issues/393 bar = Mock(metadata={"Name": "bar"}, version="1.2.3.dev0") bar_req = MagicMock(specifier=SpecifierSet(">1.2.0")) bar_req.name = "bar" rp = ReqPackage(bar_req, dist=bar) assert not rp.is_conflicting() def test_req_package_as_dict() -> None: bar = Mock(metadata={"Name": "bar"}, version="4.1.0") bar_req = MagicMock(specifier=[">=4.0"]) bar_req.name = "bar" rp = ReqPackage(bar_req, dist=bar) result = rp.as_dict() expected = {"key": "bar", "package_name": "bar", "installed_version": "4.1.0", "required_version": ">=4.0"} assert expected == result def test_req_package_as_dict_with_no_version_spec() -> None: bar = Mock(key="bar", version="4.1.0") bar_req = MagicMock(specifier=[]) bar_req.name = "bar" rp = ReqPackage(bar_req, dist=bar) result = rp.as_dict() expected = {"key": "bar", "package_name": "bar", "installed_version": "4.1.0", "required_version": "Any"} assert expected == result pipdeptree-2.30.0/tests/_models/test_dag.py0000664000175000017510000002154315105004414020215 0ustar nileshnileshfrom __future__ import annotations from itertools import chain from typing import TYPE_CHECKING, Any import pytest from pipdeptree._models import DistPackage, PackageDAG, ReqPackage, ReversedPackageDAG from pipdeptree._models.dag import IncludeExcludeOverlapError, IncludePatternNotFoundError if TYPE_CHECKING: from collections.abc import Callable, Iterator from unittest.mock import Mock from tests.our_types import MockGraph def test_package_dag_get_node_as_parent(example_dag: PackageDAG) -> None: node = example_dag.get_node_as_parent("b") assert node is not None assert node.key == "b" node = example_dag.get_node_as_parent("c") assert node is not None assert node.key == "c" @pytest.fixture(scope="session") def t_fnmatch(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> PackageDAG: graph: MockGraph = { ("a-a", "1"): [("a-b", []), ("a-c", [])], ("a-b", "1"): [("a-c", [])], ("b-a", "1"): [("b-b", [])], ("b-b", "1"): [("a-b", [])], } return PackageDAG.from_pkgs(list(mock_pkgs(graph))) def dag_to_dict(g: PackageDAG) -> dict[str, list[str]]: return {k.key: [v.key for v in vs] for k, vs in g._obj.items()} # noqa: SLF001 def test_package_dag_filter_fnmatch_include_a(t_fnmatch: PackageDAG) -> None: # test include for a-*in the result we got only a-* nodes graph = dag_to_dict(t_fnmatch.filter_nodes(["a-*"], None)) assert graph == {"a-a": ["a-b", "a-c"], "a-b": ["a-c"]} def test_package_dag_filter_fnmatch_include_b(t_fnmatch: PackageDAG) -> None: # test include for b-*, which has a-b and a-c in tree, but not a-a # in the result we got the b-* nodes plus the a-b node as child in the tree graph = dag_to_dict(t_fnmatch.filter_nodes(["b-*"], None)) assert graph == {"b-a": ["b-b"], "b-b": ["a-b"], "a-b": ["a-c"]} def test_package_dag_filter_fnmatch_exclude_c(t_fnmatch: PackageDAG) -> None: # test exclude for b-* in the result we got only a-* nodes graph = dag_to_dict(t_fnmatch.filter_nodes(None, {"b-*"})) assert graph == {"a-a": ["a-b", "a-c"], "a-b": ["a-c"]} def test_package_dag_filter_fnmatch_exclude_a(t_fnmatch: PackageDAG) -> None: # test exclude for a-* in the result we got only b-* nodes graph = dag_to_dict(t_fnmatch.filter_nodes(None, {"a-*"})) assert graph == {"b-a": ["b-b"], "b-b": []} def test_package_dag_filter_include_exclude_normal(t_fnmatch: PackageDAG) -> None: graph = dag_to_dict(t_fnmatch.filter_nodes(["a-*"], {"a-a"})) assert graph == {"a-b": ["a-c"]} def test_package_dag_filter_include_exclude_overlap(t_fnmatch: PackageDAG) -> None: with pytest.raises(IncludeExcludeOverlapError): t_fnmatch.filter_nodes(["a-a", "a-b"], {"a-b"}) def test_package_dag_filter_include_nonexistent_packages(t_fnmatch: PackageDAG) -> None: with pytest.raises(IncludePatternNotFoundError, match="No packages matched using the following patterns: x, y, z"): t_fnmatch.filter_nodes(["x", "y", "z"], None) def test_package_dag_filter_packages_uses_pep503normalize( mock_pkgs: Callable[[MockGraph], Iterator[Mock]], ) -> None: graph: MockGraph = { ("Pie.Pie", "1"): [], } pkgs = PackageDAG.from_pkgs(list(mock_pkgs(graph))) pkgs = pkgs.filter_nodes(["Pie.Pie"], None) assert len(pkgs) == 1 assert pkgs.get_node_as_parent("pie-pie") is not None pkgs = pkgs.filter_nodes(None, {"Pie.Pie"}) assert len(pkgs) == 0 @pytest.mark.parametrize( ("graph", "exclude", "expected"), [ pytest.param( { ("a", "1.0.0"): [("b", []), ("c", []), ("d", []), ("e", [])], ("b", "2.0.0"): [("x", []), ("y", []), ("z", [])], ("c", "3.0.0"): [("d", [])], ("d", "4.0.0"): [], ("e", "5.0.0"): [("y", [])], ("f", "6.0.0"): [("d", [])], # Make sure that "d" is not removed since "f" is not excluded ("g", "7.0.0"): [], ("x", "8.0.0"): [], ("y", "9.0.0"): [], ("z", "10.0.0"): [], }, {"a"}, {"d", "f", "g"}, id="with-dependencies", ), pytest.param( { ("acorn", "1.0.0"): [("b", []), ("c", []), ("d", []), ("e", [])], ("b", "2.0.0"): [("x", []), ("y", []), ("z", [])], ("c", "3.0.0"): [("d", [])], ("d", "4.0.0"): [], ("e", "5.0.0"): [("y", [])], ("f", "6.0.0"): [("d", [])], ("g", "7.0.0"): [], ("x", "8.0.0"): [], ("y", "9.0.0"): [], ("z", "10.0.0"): [], }, {"a*", "g"}, {"d", "f"}, id="with-multiple-excludes-and-with-patterns", ), pytest.param( { ("b", "2.0.0"): [("x", []), ("y", []), ("z", [])], ("x", "8.0.0"): [], ("y", "9.0.0"): [], ("z", "10.0.0"): [], }, {"dontexist"}, {"b", "x", "y", "z"}, id="with-non-existent-exclude", ), pytest.param( { ("a", "1.0.0"): [("b", [])], ("b", "2.0.0"): [("c", [])], ("c", "3.0.0"): [("d", [])], ("d", "4.0.0"): [("e", [])], ("e", "5.0.0"): [("f", [])], ("f", "6.0.0"): [("g", [])], ("g", "7.0.0"): [("x", [])], ("x", "8.0.0"): [("y", [])], ("y", "9.0.0"): [("z", [])], ("z", "10.0.0"): [], }, {"c"}, {"a", "b"}, id="with-package-having-large-depth", ), ], ) def test_package_dag_filter_packages_given_exclude_dependencies( mock_pkgs: Callable[[MockGraph], Iterator[Mock]], graph: MockGraph, exclude: set[str], expected: list[str] ) -> None: pkgs = PackageDAG.from_pkgs(list(mock_pkgs(graph))) pkgs = pkgs.filter_nodes(None, exclude, exclude_deps=True) assert len(pkgs) == len(expected) assert all(p.key in expected for p in pkgs) def test_package_dag_reverse(example_dag: PackageDAG) -> None: def sort_map_values(m: dict[str, Any]) -> dict[str, Any]: return {k: sorted(v) for k, v in m.items()} t1 = example_dag.reverse() expected = {"a": [], "b": ["a", "f"], "c": ["a"], "d": ["b", "c"], "e": ["c", "d", "g"], "f": ["g"], "g": []} assert isinstance(t1, ReversedPackageDAG) assert sort_map_values(expected) == sort_map_values(dag_to_dict(t1)) assert all(isinstance(k, ReqPackage) for k in t1) assert all(isinstance(v, DistPackage) for v in chain.from_iterable(t1.values())) # testing reversal of ReversedPackageDAG instance expected = {"a": ["b", "c"], "b": ["d"], "c": ["d", "e"], "d": ["e"], "e": [], "f": ["b"], "g": ["e", "f"]} t2 = t1.reverse() assert isinstance(t2, PackageDAG) assert sort_map_values(expected) == sort_map_values(dag_to_dict(t2)) assert all(isinstance(k, DistPackage) for k in t2) assert all(isinstance(v, ReqPackage) for v in chain.from_iterable(t2.values())) def test_package_dag_from_pkgs(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> None: # when pip's _vendor.packaging.requirements.Requirement's requires() gives a lowercased package name but the actual # package name in PyPI is mixed case, expect the mixed case version graph: MockGraph = { ("examplePy", "1.2.3"): [("hellopy", [(">=", "2.0.0")])], ("HelloPy", "2.2.0"): [], } package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) parent_key = "examplepy" c = package_dag.get_children(parent_key) assert len(c) == 1 assert c[0].project_name == "HelloPy" def test_package_dag_from_pkgs_uses_pep503normalize(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> None: # ensure that requirement gets matched with a dists even when it's key needs pep503 normalization to match graph: MockGraph = { ("parent-package", "1.2.3"): [("flufl.lock", [(">=", "2.0.0")])], ("flufl-lock", "2.2.0"): [], } package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) parent_key = "parent-package" c = package_dag.get_children(parent_key) assert c[0].dist assert c[0].key == "flufl-lock" def test_package_from_pkgs_given_invalid_requirements( mock_pkgs: Callable[[MockGraph], Iterator[Mock]], capfd: pytest.CaptureFixture[str] ) -> None: graph: MockGraph = { ("a-package", "1.2.3"): [("BAD**requirement", [(">=", "2.0.0")])], } package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) assert len(package_dag) == 1 out, err = capfd.readouterr() assert not out assert err == ( "Warning!!! Invalid requirement strings found for the following distributions:\na-package\n " 'Skipping "BAD**requirement>=2.0.0"\n------------------------------------------------------------------------\n' ) pipdeptree-2.30.0/tests/__init__.py0000664000175000017510000000000015105004414016521 0ustar nileshnileshpipdeptree-2.30.0/src/0000775000175000017510000000000015105004414014047 5ustar nileshnileshpipdeptree-2.30.0/src/pipdeptree/0000775000175000017510000000000015105004414016210 5ustar nileshnileshpipdeptree-2.30.0/src/pipdeptree/py.typed0000664000175000017510000000000015105004414017675 0ustar nileshnileshpipdeptree-2.30.0/src/pipdeptree/_warning.py0000664000175000017510000000477115105004414020377 0ustar nileshnileshfrom __future__ import annotations import sys from enum import Enum from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable class WarningType(Enum): FAIL = "fail" SILENCE = "silence" SUPPRESS = "suppress" @classmethod def from_str(cls, string: str) -> WarningType: if string == "silence": return WarningType.SILENCE if string == "suppress": return WarningType.SUPPRESS if string == "fail": return WarningType.FAIL msg = "Unknown WarningType string value provided" raise ValueError(msg) class WarningPrinter: """Non-thread safe class that handles printing warning logic.""" def __init__(self, warning_type: WarningType = WarningType.SUPPRESS) -> None: self._warning_type = warning_type self._has_warned = False @property def warning_type(self) -> WarningType: return self._warning_type @warning_type.setter def warning_type(self, new_warning_type: WarningType) -> None: self._warning_type = new_warning_type def should_warn(self) -> bool: return self._warning_type != WarningType.SILENCE def has_warned_with_failure(self) -> bool: return self._has_warned and self.warning_type == WarningType.FAIL def print_single_line(self, line: str) -> None: self._has_warned = True print(line, file=sys.stderr) # noqa: T201 def print_multi_line(self, summary: str, print_func: Callable[[], None], ignore_fail: bool = False) -> None: # noqa: FBT001, FBT002 """ Print a multi-line warning, delegating most of the printing logic to the caller. :param summary: a summary of the warning :param print_func: a callback that the caller passes that performs most of the multi-line printing :param ignore_fail: if True, this warning won't be a fail when `self.warning_type == WarningType.FAIL` """ print(f"Warning!!! {summary}:", file=sys.stderr) # noqa: T201 print_func() if ignore_fail: print("NOTE: This warning isn't a failure warning.", file=sys.stderr) # noqa: T201 else: self._has_warned = True print("-" * 72, file=sys.stderr) # noqa: T201 _shared_warning_printer = WarningPrinter() def get_warning_printer() -> WarningPrinter: """Shared warning printer, representing a module-level singleton object.""" return _shared_warning_printer __all__ = ["WarningPrinter", "get_warning_printer"] pipdeptree-2.30.0/src/pipdeptree/_validate.py0000664000175000017510000000752515105004414020523 0ustar nileshnileshfrom __future__ import annotations import sys from collections import defaultdict from typing import TYPE_CHECKING from pipdeptree._warning import get_warning_printer if TYPE_CHECKING: from pipdeptree._models.package import Package from ._models import DistPackage, PackageDAG, ReqPackage def validate(tree: PackageDAG) -> None: # Before any reversing or filtering, show warnings to console, about possibly conflicting or cyclic deps if found # and warnings are enabled (i.e. only if output is to be printed to console) warning_printer = get_warning_printer() if warning_printer.should_warn(): conflicts = conflicting_deps(tree) if conflicts: warning_printer.print_multi_line( "Possibly conflicting dependencies found", lambda: render_conflicts_text(conflicts) ) cycles = cyclic_deps(tree) if cycles: warning_printer.print_multi_line("Cyclic dependencies found", lambda: render_cycles_text(cycles)) def conflicting_deps(tree: PackageDAG) -> dict[DistPackage, list[ReqPackage]]: """ Return dependencies which are not present or conflict with the requirements of other packages. e.g. will warn if pkg1 requires pkg2==2.0 and pkg2==1.0 is installed :param tree: the requirements tree (dict) :returns: dict of DistPackage -> list of unsatisfied/unknown ReqPackage :rtype: dict """ conflicting = defaultdict(list) for package, requires in tree.items(): for req in requires: if req.is_conflicting(): conflicting[package].append(req) return conflicting def render_conflicts_text(conflicts: dict[DistPackage, list[ReqPackage]]) -> None: # Enforce alphabetical order when listing conflicts pkgs = sorted(conflicts.keys()) for p in pkgs: pkg = p.render_as_root(frozen=False) print(f"* {pkg}", file=sys.stderr) # noqa: T201 for req in conflicts[p]: req_str = req.render_as_branch(frozen=False) print(f" - {req_str}", file=sys.stderr) # noqa: T201 def cyclic_deps(tree: PackageDAG) -> list[list[Package]]: """ Return cyclic dependencies as list of lists. :param tree: package tree/dag :returns: list of lists, where each list represents a cycle """ def dfs(root: DistPackage, current: Package, visited: set[str], cdeps: list[Package]) -> bool: if current.key not in visited: visited.add(current.key) current_dist = tree.get_node_as_parent(current.key) if not current_dist: return False reqs = tree.get(current_dist) if not reqs: return False for req in reqs: if dfs(root, req, visited, cdeps): cdeps.append(current) return True elif current.key == root.key: cdeps.append(current) return True return False cycles: list[list[Package]] = [] for p in tree: cdeps: list[Package] = [] visited: set[str] = set() if dfs(p, p, visited, cdeps): cdeps.reverse() cycles.append(cdeps) return cycles def render_cycles_text(cycles: list[list[Package]]) -> None: # List in alphabetical order the dependency that caused the cycle (i.e. the second-to-last Package element) cycles = sorted(cycles, key=lambda c: c[len(c) - 2].key) for cycle in cycles: print("*", end=" ", file=sys.stderr) # noqa: T201 size = len(cycle) - 1 for idx, pkg in enumerate(cycle): if idx == size: print(f"{pkg.project_name}", end="", file=sys.stderr) # noqa: T201 else: print(f"{pkg.project_name} =>", end=" ", file=sys.stderr) # noqa: T201 print(file=sys.stderr) # noqa: T201 __all__ = [ "validate", ] pipdeptree-2.30.0/src/pipdeptree/_render/0000775000175000017510000000000015105004414017626 5ustar nileshnileshpipdeptree-2.30.0/src/pipdeptree/_render/text.py0000664000175000017510000001164715105004414021175 0ustar nileshnileshfrom __future__ import annotations from itertools import chain from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from pipdeptree._models import DistPackage, PackageDAG, ReqPackage def render_text( tree: PackageDAG, *, max_depth: float, encoding: str, list_all: bool = True, include_license: bool = False, ) -> None: """ Print tree as text on console. :param tree: the package tree :param max_depth: the maximum depth of the dependency tree :param encoding: encoding to use (use "utf-8", "utf-16", "utf-32" for unicode or anything else for legacy output) :param list_all: whether to list all the pkgs at the root level or only those that are the sub-dependencies :param include_license: provide license information :returns: None """ nodes = get_top_level_nodes(tree, list_all=list_all) if encoding in {"utf-8", "utf-16", "utf-32"}: _render_text_with_unicode(tree, nodes, max_depth, include_license) else: _render_text_without_unicode(tree, nodes, max_depth, include_license) def get_top_level_nodes(tree: PackageDAG, *, list_all: bool) -> list[DistPackage]: """ Get a list of nodes that will appear at the first depth of the dependency tree. :param tree: the package tree :param list_all: whether to list all the pkgs at the root level or only those that are the sub-dependencies """ tree = tree.sort() nodes = list(tree.keys()) branch_keys = {r.key for r in chain.from_iterable(tree.values())} if not list_all: nodes = [p for p in nodes if p.key not in branch_keys] return nodes def _render_text_with_unicode( tree: PackageDAG, nodes: list[DistPackage], max_depth: float, include_license: bool, # noqa: FBT001 ) -> None: def aux( # noqa: PLR0913, PLR0917 node: DistPackage | ReqPackage, parent: DistPackage | ReqPackage | None = None, indent: int = 0, cur_chain: list[str] | None = None, prefix: str = "", depth: int = 0, has_grand_parent: bool = False, # noqa: FBT001, FBT002 is_last_child: bool = False, # noqa: FBT001, FBT002 parent_is_last_child: bool = False, # noqa: FBT001, FBT002 ) -> list[Any]: cur_chain = cur_chain or [] node_str = node.render(parent, frozen=False) next_prefix = "" next_indent = indent + 2 if parent: bullet = "├── " if is_last_child: bullet = "└── " if has_grand_parent: next_indent -= 1 if parent_is_last_child: prefix += " " * (indent + 1 - depth) else: prefix += "│" + " " * (indent - depth) # Without this extra space, bullets will point to the space just before the project name prefix += " " next_prefix = prefix node_str = prefix + bullet + node_str elif include_license: node_str += " " + node.licenses() result = [node_str] children = tree.get_children(node.key) children_strings = [ aux( c, node, indent=next_indent, cur_chain=[*cur_chain, c.project_name], prefix=next_prefix, depth=depth + 1, has_grand_parent=parent is not None, is_last_child=c is children[-1], parent_is_last_child=is_last_child, ) for c in children if c.project_name not in cur_chain and depth + 1 <= max_depth ] result += list(chain.from_iterable(children_strings)) return result lines = chain.from_iterable([aux(p) for p in nodes]) print("\n".join(lines)) # noqa: T201 def _render_text_without_unicode( tree: PackageDAG, nodes: list[DistPackage], max_depth: float, include_license: bool, # noqa: FBT001 ) -> None: def aux( node: DistPackage | ReqPackage, parent: DistPackage | ReqPackage | None = None, indent: int = 0, cur_chain: list[str] | None = None, depth: int = 0, ) -> list[Any]: cur_chain = cur_chain or [] node_str = node.render(parent, frozen=False) if parent: prefix = " " * indent + "- " node_str = prefix + node_str elif include_license: node_str += " " + node.licenses() result = [node_str] children = [ aux(c, node, indent=indent + 2, cur_chain=[*cur_chain, c.project_name], depth=depth + 1) for c in tree.get_children(node.key) if c.project_name not in cur_chain and depth + 1 <= max_depth ] result += list(chain.from_iterable(children)) return result lines = chain.from_iterable([aux(p) for p in nodes]) print("\n".join(lines)) # noqa: T201 __all__ = ["get_top_level_nodes", "render_text"] pipdeptree-2.30.0/src/pipdeptree/_render/mermaid.py0000664000175000017510000000751115105004414021622 0ustar nileshnileshfrom __future__ import annotations import itertools as it from typing import TYPE_CHECKING, Final from pipdeptree._models import DistPackage, ReqPackage, ReversedPackageDAG if TYPE_CHECKING: from pipdeptree._models import PackageDAG _RESERVED_IDS: Final[frozenset[str]] = frozenset( [ "C4Component", "C4Container", "C4Deployment", "C4Dynamic", "_blank", "_parent", "_self", "_top", "call", "class", "classDef", "click", "end", "flowchart", "flowchart-v2", "graph", "interpolate", "linkStyle", "style", "subgraph", ], ) def render_mermaid(tree: PackageDAG) -> None: # noqa: C901 """ Produce a Mermaid flowchart from the dependency graph. :param tree: dependency graph """ # List of reserved keywords in Mermaid that cannot be used as node names. # See: https://github.com/mermaid-js/mermaid/issues/4182#issuecomment-1454787806 node_ids_map: dict[str, str] = {} def mermaid_id(key: str) -> str: """Return a valid Mermaid node ID from a string.""" # If we have already seen this key, return the canonical ID. canonical_id = node_ids_map.get(key) if canonical_id is not None: return canonical_id # If the key is not a reserved keyword, return it as is, and update the map. if key not in _RESERVED_IDS: node_ids_map[key] = key return key # If the key is a reserved keyword, append a number to it. for number in it.count(): new_id = f"{key}_{number}" if new_id not in node_ids_map: node_ids_map[key] = new_id return new_id raise NotImplementedError # Use a sets to avoid duplicate entries. nodes: set[str] = set() edges: set[str] = set() if isinstance(tree, ReversedPackageDAG): for package, reverse_dependencies in tree.items(): assert isinstance(package, ReqPackage) package_label = "\\n".join( (package.project_name, "(missing)" if package.is_missing else package.installed_version), ) package_key = mermaid_id(package.key) nodes.add(f'{package_key}["{package_label}"]') for reverse_dependency in reverse_dependencies: assert isinstance(reverse_dependency, DistPackage) edge_label = ( reverse_dependency.req.version_spec if reverse_dependency.req is not None else None ) or "any" reverse_dependency_key = mermaid_id(reverse_dependency.key) edges.add(f'{package_key} -- "{edge_label}" --> {reverse_dependency_key}') else: for package, dependencies in tree.items(): package_label = f"{package.project_name}\\n{package.version}" package_key = mermaid_id(package.key) nodes.add(f'{package_key}["{package_label}"]') for dependency in dependencies: edge_label = dependency.version_spec or "any" dependency_key = mermaid_id(dependency.key) if dependency.is_missing: dependency_label = f"{dependency.project_name}\\n(missing)" nodes.add(f'{dependency_key}["{dependency_label}"]:::missing') edges.add(f"{package_key} -.-> {dependency_key}") else: edges.add(f'{package_key} -- "{edge_label}" --> {dependency_key}') # Produce the Mermaid Markdown. lines = [ "flowchart TD", "classDef missing stroke-dasharray: 5", *sorted(nodes), *sorted(edges), ] print("".join(f"{' ' if i else ''}{line}\n" for i, line in enumerate(lines))) # noqa: T201 __all__ = [ "render_mermaid", ] pipdeptree-2.30.0/src/pipdeptree/_render/json_tree.py0000664000175000017510000000323715105004414022175 0ustar nileshnileshfrom __future__ import annotations import json from itertools import chain from typing import TYPE_CHECKING, Any from pipdeptree._models import ReqPackage if TYPE_CHECKING: from pipdeptree._models import DistPackage, PackageDAG def render_json_tree(tree: PackageDAG) -> None: """ Convert the tree into a nested json representation. The json repr will be a list of hashes, each hash having the following fields: - package_name - key - required_version - installed_version - dependencies: list of dependencies :param tree: dependency tree :returns: json representation of the tree """ tree = tree.sort() branch_keys = {r.key for r in chain.from_iterable(tree.values())} nodes = [p for p in tree if p.key not in branch_keys] def aux( node: DistPackage | ReqPackage, parent: DistPackage | ReqPackage | None = None, cur_chain: list[str] | None = None, ) -> dict[str, Any]: if cur_chain is None: cur_chain = [node.project_name] d: dict[str, str | list[Any] | None] = node.as_dict() # type: ignore[assignment] if parent: d["required_version"] = node.version_spec if isinstance(node, ReqPackage) and node.version_spec else "Any" else: d["required_version"] = d["installed_version"] d["dependencies"] = [ aux(c, parent=node, cur_chain=[*cur_chain, c.project_name]) for c in tree.get_children(node.key) if c.project_name not in cur_chain ] return d print(json.dumps([aux(p) for p in nodes], indent=4)) # noqa: T201 __all__ = [ "render_json_tree", ] pipdeptree-2.30.0/src/pipdeptree/_render/json.py0000664000175000017510000000131515105004414021151 0ustar nileshnileshfrom __future__ import annotations import json from typing import TYPE_CHECKING if TYPE_CHECKING: from pipdeptree._models import PackageDAG def render_json(tree: PackageDAG) -> None: """ Convert the tree into a flat json representation. The json repr will be a list of hashes, each hash having 2 fields: - package - dependencies: list of dependencies :param tree: dependency tree :returns: JSON representation of the tree """ tree = tree.sort() output = json.dumps( [{"package": k.as_dict(), "dependencies": [v.as_dict() for v in vs]} for k, vs in tree.items()], indent=4, ) print(output) # noqa: T201 __all__ = [ "render_json", ] pipdeptree-2.30.0/src/pipdeptree/_render/graphviz.py0000664000175000017510000000765015105004414022042 0ustar nileshnileshfrom __future__ import annotations import os import sys from typing import TYPE_CHECKING from pipdeptree._models import DistPackage, ReqPackage if TYPE_CHECKING: from pipdeptree._models import PackageDAG def dump_graphviz( # noqa: C901 tree: PackageDAG, output_format: str = "dot", is_reverse: bool = False, # noqa: FBT001, FBT002 ) -> str | bytes: """ Output dependency graph as one of the supported GraphViz output formats. :param dict tree: dependency graph :param string output_format: output format :param bool is_reverse: reverse or not :returns: representation of tree in the specified output format :rtype: str or binary representation depending on the output format """ try: from graphviz import Digraph # noqa: PLC0415 except ImportError as exc: print( # noqa: T201 "graphviz is not available, but necessary for the output option. Please install it.", file=sys.stderr, ) raise SystemExit(1) from exc from graphviz import parameters # noqa: PLC0415 valid_formats = parameters.FORMATS if output_format not in valid_formats: print(f"{output_format} is not a supported output format.", file=sys.stderr) # noqa: T201 print(f"Supported formats are: {', '.join(sorted(valid_formats))}", file=sys.stderr) # noqa: T201 raise SystemExit(1) graph = Digraph(format=output_format) if is_reverse: for dep_rev, parents in tree.items(): assert isinstance(dep_rev, ReqPackage) dep_label = f"{dep_rev.project_name}\\n{dep_rev.installed_version}" graph.node(dep_rev.key, label=dep_label) for parent in parents: # req reference of the dep associated with this particular parent package assert isinstance(parent, DistPackage) edge_label = (parent.req.version_spec if parent.req is not None else None) or "any" graph.edge(dep_rev.key, parent.key, label=edge_label) else: for pkg, deps in tree.items(): pkg_label = f"{pkg.project_name}\\n{pkg.version}" graph.node(pkg.key, label=pkg_label) for dep in deps: edge_label = dep.version_spec or "any" if dep.is_missing: dep_label = f"{dep.project_name}\\n(missing)" graph.node(dep.key, label=dep_label, style="dashed") graph.edge(pkg.key, dep.key, style="dashed") else: graph.edge(pkg.key, dep.key, label=edge_label) # Allow output of dot format, even if GraphViz isn't installed. if output_format == "dot": # Emulates graphviz.dot.Dot.__iter__() to force the sorting of graph.body. # Fixes https://github.com/tox-dev/pipdeptree/issues/188 # That way we can guarantee the output of the dot format is deterministic # and stable. return "".join([next(iter(graph)), *sorted(graph.body), graph._tail]) # noqa: SLF001 # As it's unknown if the selected output format is binary or not, try to # decode it as UTF8 and only print it out in binary if that's not possible. try: return graph.pipe().decode("utf-8") # type: ignore[no-any-return] except UnicodeDecodeError: return graph.pipe() # type: ignore[no-any-return] def print_graphviz(dump_output: str | bytes) -> None: """ Dump the data generated by GraphViz to stdout. :param dump_output: The output from dump_graphviz """ if hasattr(dump_output, "encode"): print(dump_output) # noqa: T201 else: with os.fdopen(sys.stdout.fileno(), "wb") as bytestream: bytestream.write(dump_output) def render_graphviz(tree: PackageDAG, *, output_format: str, reverse: bool) -> None: output = dump_graphviz(tree, output_format=output_format, is_reverse=reverse) print_graphviz(output) __all__ = [ "render_graphviz", ] pipdeptree-2.30.0/src/pipdeptree/_render/freeze.py0000664000175000017510000000245615105004414021467 0ustar nileshnileshfrom __future__ import annotations from itertools import chain from typing import TYPE_CHECKING, Any from .text import get_top_level_nodes if TYPE_CHECKING: from pipdeptree._models.dag import PackageDAG from pipdeptree._models.package import DistPackage, ReqPackage def render_freeze(tree: PackageDAG, *, max_depth: float, list_all: bool = True) -> None: nodes = get_top_level_nodes(tree, list_all=list_all) def aux( node: DistPackage | ReqPackage, parent: DistPackage | ReqPackage | None = None, indent: int = 0, cur_chain: list[str] | None = None, depth: int = 0, ) -> list[Any]: cur_chain = cur_chain or [] node_str = node.render(parent, frozen=True) if parent: prefix = " " * indent node_str = prefix + node_str result = [node_str] children = [ aux(c, node, indent=indent + 2, cur_chain=[*cur_chain, c.project_name], depth=depth + 1) for c in tree.get_children(node.key) if c.project_name not in cur_chain and depth + 1 <= max_depth ] result += list(chain.from_iterable(children)) return result lines = chain.from_iterable([aux(p) for p in nodes]) print("\n".join(lines)) # noqa: T201 __all__ = [ "render_freeze", ] pipdeptree-2.30.0/src/pipdeptree/_render/__init__.py0000664000175000017510000000224015105004414021735 0ustar nileshnileshfrom __future__ import annotations from typing import TYPE_CHECKING from .freeze import render_freeze from .graphviz import render_graphviz from .json import render_json from .json_tree import render_json_tree from .mermaid import render_mermaid from .text import render_text if TYPE_CHECKING: from pipdeptree._cli import Options from pipdeptree._models import PackageDAG def render(options: Options, tree: PackageDAG) -> None: output_format = options.output_format if output_format == "json": render_json(tree) elif output_format == "json-tree": render_json_tree(tree) elif output_format == "mermaid": render_mermaid(tree) elif output_format == "freeze": render_freeze(tree, max_depth=options.depth, list_all=options.all) elif output_format.startswith("graphviz-"): render_graphviz(tree, output_format=output_format[len("graphviz-") :], reverse=options.reverse) else: render_text( tree, max_depth=options.depth, encoding=options.encoding, list_all=options.all, include_license=options.license, ) __all__ = [ "render", ] pipdeptree-2.30.0/src/pipdeptree/_models/0000775000175000017510000000000015105004414017632 5ustar nileshnileshpipdeptree-2.30.0/src/pipdeptree/_models/package.py0000664000175000017510000002051515105004414021602 0ustar nileshnileshfrom __future__ import annotations from abc import ABC, abstractmethod from importlib import import_module from importlib.metadata import Distribution, PackageNotFoundError, metadata, version from inspect import ismodule from typing import TYPE_CHECKING from packaging.requirements import InvalidRequirement, Requirement from packaging.utils import canonicalize_name from pipdeptree._freeze import dist_to_frozen_repr if TYPE_CHECKING: from collections.abc import Iterator from importlib.metadata import Distribution class InvalidRequirementError(ValueError): """ An invalid requirement string was found. When raising an exception, this should provide just the problem requirement string. """ class Package(ABC): """Abstract class for wrappers around objects that pip returns.""" UNKNOWN_LICENSE_STR = "(Unknown license)" def __init__(self, project_name: str) -> None: self.project_name = project_name self.key = canonicalize_name(project_name) def licenses(self) -> str: try: dist_metadata = metadata(self.key) except PackageNotFoundError: return self.UNKNOWN_LICENSE_STR if license_str := dist_metadata[("License-Expression")]: return f"({license_str})" license_strs: list[str] = [] classifiers = dist_metadata.get_all("Classifier", []) for classifier in classifiers: line = str(classifier) if line.startswith("License"): license_str = line.rsplit(":: ", 1)[-1] license_strs.append(license_str) if len(license_strs) == 0: return self.UNKNOWN_LICENSE_STR return f"({', '.join(license_strs)})" @abstractmethod def render_as_root(self, *, frozen: bool) -> str: raise NotImplementedError @abstractmethod def render_as_branch(self, *, frozen: bool) -> str: raise NotImplementedError @abstractmethod def as_dict(self) -> dict[str, str]: raise NotImplementedError def render( self, parent: DistPackage | ReqPackage | None = None, *, frozen: bool = False, ) -> str: render = self.render_as_branch if parent else self.render_as_root return render(frozen=frozen) @staticmethod def as_frozen_repr(dist: Distribution) -> str: return dist_to_frozen_repr(dist) def __repr__(self) -> str: return f'<{self.__class__.__name__}("{self.key}")>' def __lt__(self, rhs: Package) -> bool: return self.key < rhs.key class DistPackage(Package): """ Wrapper class for importlib.metadata.Distribution instances. :param obj: importlib.metadata.Distribution to wrap over :param req: optional ReqPackage object to associate this DistPackage with. This is useful for displaying the tree in reverse """ def __init__(self, obj: Distribution, req: ReqPackage | None = None) -> None: super().__init__(obj.metadata["Name"]) self._obj = obj self.req = req def requires(self) -> Iterator[Requirement]: """ Return an iterator of the distribution's required dependencies. :raises InvalidRequirementError: If the metadata contains invalid requirement strings. """ for r in self._obj.requires or []: try: req = Requirement(r) except InvalidRequirement: raise InvalidRequirementError(r) from None if not req.marker or req.marker.evaluate(): # Make sure that we're either dealing with a dependency that has no environment markers or does but # are evaluated True against the existing environment (if it's False, it means they cannot be # installed). "extra" markers are always evaluated False here which is what we want when retrieving # only required dependencies. yield req @property def version(self) -> str: return self._obj.version def unwrap(self) -> Distribution: """Exposes the internal `importlib.metadata.Distribution` object.""" return self._obj def render_as_root(self, *, frozen: bool) -> str: return self.as_frozen_repr(self._obj) if frozen else f"{self.project_name}=={self.version}" def render_as_branch(self, *, frozen: bool) -> str: assert self.req is not None if not frozen: parent_ver_spec = self.req.version_spec parent_str = self.req.project_name if parent_ver_spec: parent_str += parent_ver_spec return f"{self.project_name}=={self.version} [requires: {parent_str}]" return self.render_as_root(frozen=frozen) def as_requirement(self) -> ReqPackage: """Return a ReqPackage representation of this DistPackage.""" spec = f"{self.project_name}=={self.version}" return ReqPackage(Requirement(spec), dist=self) def as_parent_of(self, req: ReqPackage | None) -> DistPackage: """ Return a DistPackage instance associated to a requirement. This association is necessary for reversing the PackageDAG. If `req` is None, and the `req` attribute of the current instance is also None, then the same instance will be returned. :param ReqPackage req: the requirement to associate with :returns: DistPackage instance """ if req is None and self.req is None: return self return self.__class__(self._obj, req) def as_dict(self) -> dict[str, str]: return {"key": self.key, "package_name": self.project_name, "installed_version": self.version} class ReqPackage(Package): """ Wrapper class for Requirement instance. :param obj: The `Requirement` instance to wrap over :param dist: optional `importlib.metadata.Distribution` instance for this requirement """ UNKNOWN_VERSION = "?" def __init__(self, obj: Requirement, dist: DistPackage | None = None) -> None: super().__init__(obj.name) self._obj = obj self.dist = dist def render_as_root(self, *, frozen: bool) -> str: if not frozen: return f"{self.project_name}=={self.installed_version}" if self.dist: return self.as_frozen_repr(self.dist.unwrap()) return self.project_name def render_as_branch(self, *, frozen: bool) -> str: if not frozen: req_ver = self.version_spec or "Any" return f"{self.project_name} [required: {req_ver}, installed: {self.installed_version}]" return self.render_as_root(frozen=frozen) @property def version_spec(self) -> str | None: result = None specs = sorted(map(str, self._obj.specifier), reverse=True) # `reverse` makes '>' prior to '<' if specs: result = ",".join(specs) return result @property def installed_version(self) -> str: if not self.dist: try: return version(self.key) except PackageNotFoundError: pass # Avoid AssertionError with setuptools, see https://github.com/tox-dev/pipdeptree/issues/162 if self.key == "setuptools": return self.UNKNOWN_VERSION try: m = import_module(self.key) except ImportError: return self.UNKNOWN_VERSION else: v = getattr(m, "__version__", self.UNKNOWN_VERSION) if ismodule(v): return getattr(v, "__version__", self.UNKNOWN_VERSION) return v return self.dist.version def is_conflicting(self) -> bool: """If installed version conflicts with required version.""" # unknown installed version is also considered conflicting if self.is_missing: return True return not self._obj.specifier.contains(self.installed_version, prereleases=True) @property def is_missing(self) -> bool: return self.installed_version == self.UNKNOWN_VERSION def as_dict(self) -> dict[str, str]: return { "key": self.key, "package_name": self.project_name, "installed_version": self.installed_version, "required_version": self.version_spec if self.version_spec is not None else "Any", } __all__ = [ "DistPackage", "ReqPackage", ] pipdeptree-2.30.0/src/pipdeptree/_models/dag.py0000664000175000017510000003464115105004414020747 0ustar nileshnileshfrom __future__ import annotations import sys from collections import defaultdict, deque from collections.abc import Iterator, Mapping from fnmatch import fnmatch from itertools import chain from typing import TYPE_CHECKING from packaging.utils import canonicalize_name if TYPE_CHECKING: from importlib.metadata import Distribution from pipdeptree._warning import get_warning_printer from .package import DistPackage, InvalidRequirementError, ReqPackage class IncludeExcludeOverlapError(Exception): """Include and exclude sets passed as input violate mutual exclusivity requirement.""" class IncludePatternNotFoundError(Exception): """Include patterns weren't found when filtering a `PackageDAG`.""" def render_invalid_reqs_text(dist_name_to_invalid_reqs_dict: dict[str, list[str]]) -> None: for dist_name, invalid_reqs in dist_name_to_invalid_reqs_dict.items(): print(dist_name, file=sys.stderr) # noqa: T201 for invalid_req in invalid_reqs: print(f' Skipping "{invalid_req}"', file=sys.stderr) # noqa: T201 class PackageDAG(Mapping[DistPackage, list[ReqPackage]]): """ Representation of Package dependencies as directed acyclic graph using a dict as the underlying datastructure. The nodes and their relationships (edges) are internally stored using a map as follows, {a: [b, c], b: [d], c: [d, e], d: [e], e: [], f: [b], g: [e, f]} Here, node `a` has 2 children nodes `b` and `c`. Consider edge direction from `a` -> `b` and `a` -> `c` respectively. A node is expected to be an instance of a subclass of `Package`. The keys are must be of class `DistPackage` and each item in values must be of class `ReqPackage`. (See also ReversedPackageDAG where the key and value types are interchanged). """ @classmethod def from_pkgs(cls, pkgs: list[Distribution]) -> PackageDAG: warning_printer = get_warning_printer() dist_pkgs = [DistPackage(p) for p in pkgs] idx = {p.key: p for p in dist_pkgs} m: dict[DistPackage, list[ReqPackage]] = {} dist_name_to_invalid_reqs_dict: dict[str, list[str]] = {} for p in dist_pkgs: reqs = [] requires_iterator = p.requires() while True: try: req = next(requires_iterator) except InvalidRequirementError as err: # We can't work with invalid requirement strings. Let's warn the user about them. if warning_printer.should_warn(): dist_name_to_invalid_reqs_dict.setdefault(p.project_name, []).append(str(err)) continue except StopIteration: break d = idx.get(canonicalize_name(req.name)) # Distribution.requires only returns the name of requirements in the metadata file, which may not be the # same as the name in PyPI. We should try to retain the original package names for requirements. # See https://github.com/tox-dev/pipdeptree/issues/242 req.name = d.project_name if d is not None else req.name pkg = ReqPackage(req, d) reqs.append(pkg) m[p] = reqs should_print_warning = warning_printer.should_warn() and dist_name_to_invalid_reqs_dict if should_print_warning: warning_printer.print_multi_line( "Invalid requirement strings found for the following distributions", lambda: render_invalid_reqs_text(dist_name_to_invalid_reqs_dict), ) return cls(m) def __init__(self, m: dict[DistPackage, list[ReqPackage]]) -> None: """ Initialize the PackageDAG object. :param dict m: dict of node objects (refer class docstring) :returns: None :rtype: NoneType """ self._obj: dict[DistPackage, list[ReqPackage]] = m self._index: dict[str, DistPackage] = {p.key: p for p in list(self._obj)} def get_node_as_parent(self, node_key: str) -> DistPackage | None: """ Get the node from the keys of the dict representing the DAG. This method is useful if the dict representing the DAG contains different kind of objects in keys and values. Use this method to look up a node obj as a parent (from the keys of the dict) given a node key. :param node_key: identifier corresponding to key attr of node obj :returns: node obj (as present in the keys of the dict) """ try: return self._index[node_key] except KeyError: return None def get_children(self, node_key: str) -> list[ReqPackage]: """ Get child nodes for a node by its key. :param node_key: key of the node to get children of :returns: child nodes """ node = self.get_node_as_parent(node_key) return self._obj[node] if node else [] def filter_nodes( # noqa: C901, PLR0912 self, include: list[str] | None, exclude: set[str] | None, exclude_deps: bool = False, # noqa: FBT001, FBT002 ) -> PackageDAG: """ Filter nodes in a graph by given parameters. If a node is included, then all it's children are also included. :param include: list of node keys to include (or None) :param exclude: set of node keys to exclude (or None) :raises IncludeExcludeOverlapError: if include and exclude contains the same elements :raises IncludePatternNotFoundError: if include has patterns that do not match anything in the graph :returns: filtered version of the graph """ # If neither of the filters are specified, short circuit if include is None and exclude is None: return self include_with_casing_preserved: list[str] = [] if include: include_with_casing_preserved = include include = [canonicalize_name(i) for i in include] exclude = {canonicalize_name(s) for s in exclude} if exclude else set() # Check for mutual exclusion of include and exclude sets # after normalizing the values to lowercase if include and exclude and (set(include) & exclude): raise IncludeExcludeOverlapError if exclude_deps: exclude = self._build_exclusion_set_with_dependencies(exclude) # Filter nodes that are explicitly included/excluded stack: deque[DistPackage] = deque() m: dict[DistPackage, list[ReqPackage]] = {} seen = set() matched_includes: set[str] = set() for node in self._obj: if should_exclude_node(node.key, exclude): continue if include is None: stack.append(node) else: should_append = False for i in include: if fnmatch(node.key, i): # Add all patterns that match with the node key. Otherwise if we break, patterns like py* or # pytest* (which both should match "pytest") may cause one pattern to be missed and will # raise an error matched_includes.add(i) should_append = True if should_append: stack.append(node) # Perform DFS on the explicitly included nodes so that we can also include their dependencies, if applicable while stack: n = stack.pop() cldn = [c for c in self._obj[n] if not should_exclude_node(c.key, exclude)] m[n] = cldn seen.add(n.key) for c in cldn: if c.key not in seen: cld_node = self.get_node_as_parent(c.key) if cld_node: stack.append(cld_node) else: # It means there's no root node corresponding to the child node i.e. # a dependency is missing continue non_existent_includes = [ i for i in include_with_casing_preserved if canonicalize_name(i) not in matched_includes ] if non_existent_includes: raise IncludePatternNotFoundError( "No packages matched using the following patterns: " + ", ".join(non_existent_includes) ) return self.__class__(m) def _build_exclusion_set_with_dependencies(self, old_exclude: set[str]) -> set[str]: """ Build a new exclusion set using the fnmatch patterns in `old_exclude` to also grab dependencies. Note that it will actually resolve the patterns in old_exclude to actual nodes and use that result instead of keeping the patterns. """ # First, resolve old_exclude to actual nodes in the graph as old_exclude may instead contain patterns that are # used by fnmatch (or the exclusion may not even exist in the graph) resolved_exclude: set[str] = set() resolved_exclude.update(node.key for node in self._obj if should_exclude_node(node.key, old_exclude)) # Find all possible candidate nodes for exclusion using DFS candidates: set[str] = set() stack = list(resolved_exclude) while stack: candidate = stack.pop() if candidate not in candidates: candidates.add(candidate) stack.extend(dep.key for dep in self.get_children(candidate)) # Build a reverse graph to know the dependents of a candidate node reverse_graph = self.reverse() # Precompute number of dependents for each candidate dependents_count: defaultdict[str, int] = defaultdict(int) for node in candidates: dependents_count[node] += len(reverse_graph.get_children(node)) new_exclude = set() # Determine what nodes should actually be excluded # Use the resolved exclude set as a starting point as these nodes are explicitly excluded queue = deque(resolved_exclude) while queue: node = queue.popleft() new_exclude.add(node) for child in self.get_children(node): child_key = child.key dependents_count[child_key] -= 1 # If all dependents of child are excluded, it itself is now eligible for exclusion # If this branch is never reached, this means there is a dependant that is outside the exclusion set # that needs child # # We also don't want to add child nodes that are in the resolved exclude set, as they are explicitly # excluded and have either already been processed or are in the queue awaiting processing if ( dependents_count[child_key] == 0 and child_key not in new_exclude and child_key not in resolved_exclude ): queue.append(child_key) return new_exclude def reverse(self) -> ReversedPackageDAG: """ Reverse the DAG, or turn it upside-down. In other words, the directions of edges of the nodes in the DAG will be reversed. Note that this function purely works on the nodes in the graph. This implies that to perform a combination of filtering and reversing, the order in which `filter` and `reverse` methods should be applied is important. For e.g., if reverse is called on a filtered graph, then only the filtered nodes and it's children will be considered when reversing. On the other hand, if filter is called on reversed DAG, then the definition of "child" nodes is as per the reversed DAG. :returns: DAG in the reversed form """ m: defaultdict[ReqPackage, list[DistPackage]] = defaultdict(list) child_keys = {r.key for r in chain.from_iterable(self._obj.values())} for k, vs in self._obj.items(): for v in vs: # if v is already added to the dict, then ensure that # we are using the same object. This check is required # as we're using array mutation node: ReqPackage = next((p for p in m if p.key == v.key), v) m[node].append(k.as_parent_of(v)) if k.key not in child_keys: m[k.as_requirement()] = [] return ReversedPackageDAG(dict(m)) # type: ignore[arg-type] def sort(self) -> PackageDAG: """ Return sorted tree in which the underlying _obj dict is an dict, sorted alphabetically by the keys. :returns: Instance of same class with dict """ return self.__class__({k: sorted(v) for k, v in sorted(self._obj.items())}) # Methods required by the abstract base class Mapping def __getitem__(self, arg: DistPackage) -> list[ReqPackage] | None: # type: ignore[override] return self._obj.get(arg) def __iter__(self) -> Iterator[DistPackage]: return self._obj.__iter__() def __len__(self) -> int: return len(self._obj) def should_exclude_node(key: str, exclude: set[str]) -> bool: return any(fnmatch(key, e) for e in exclude) class ReversedPackageDAG(PackageDAG): """ Representation of Package dependencies in the reverse order. Similar to it's super class `PackageDAG`, the underlying datastructure is a dict, but here the keys are expected to be of type `ReqPackage` and each item in the values of type `DistPackage`. Typically, this object will be obtained by calling `PackageDAG.reverse`. """ def reverse(self) -> PackageDAG: # type: ignore[override] """ Reverse the already reversed DAG to get the PackageDAG again. :returns: reverse of the reversed DAG """ m: defaultdict[DistPackage, list[ReqPackage]] = defaultdict(list) child_keys = {r.key for r in chain.from_iterable(self._obj.values())} for k, vs in self._obj.items(): for v in vs: assert isinstance(v, DistPackage) node = next((p for p in m if p.key == v.key), v.as_parent_of(None)) m[node].append(k) if k.key not in child_keys: assert isinstance(k, ReqPackage) assert k.dist is not None m[k.dist] = [] return PackageDAG(dict(m)) __all__ = [ "PackageDAG", "ReversedPackageDAG", ] pipdeptree-2.30.0/src/pipdeptree/_models/__init__.py0000664000175000017510000000034115105004414021741 0ustar nileshnileshfrom __future__ import annotations from .dag import PackageDAG, ReversedPackageDAG from .package import DistPackage, ReqPackage __all__ = [ "DistPackage", "PackageDAG", "ReqPackage", "ReversedPackageDAG", ] pipdeptree-2.30.0/src/pipdeptree/_freeze.py0000664000175000017510000000547215105004414020211 0ustar nileshnileshfrom __future__ import annotations import locale from json import JSONDecodeError from pathlib import Path from typing import TYPE_CHECKING, Any from pip._internal.models.direct_url import ( DirectUrl, # noqa: PLC2701 DirectUrlValidationError, # noqa: PLC2701 ) from pip._internal.utils.egg_link import egg_link_path_from_sys_path # noqa: PLC2701 from pip._vendor.packaging.version import Version # noqa: PLC2701 if TYPE_CHECKING: from importlib.metadata import Distribution def dist_to_frozen_repr(dist: Distribution) -> str: """Return the frozen requirement repr of a `importlib.metadata.Distribution` object.""" from pip._internal.operations.freeze import FrozenRequirement # noqa: PLC0415, PLC2701 adapter = PipBaseDistributionAdapter(dist) fr = FrozenRequirement.from_dist(adapter) # type: ignore[arg-type] return str(fr).strip() class PipBaseDistributionAdapter: """ An adapter class for pip's `pip._internal.metadata.BaseDistribution` abstract class. It essentially wraps over an importlib.metadata.Distribution object and provides just enough fields/methods found in pip's `BaseDistribution` so that we can use `pip._internal.operations.freeze.FrozenRequirement.from_dist()`. :param dist: Represents an `importlib.metadata.Distribution` object. """ DIRECT_URL_METADATA_NAME = "direct_url.json" def __init__(self, dist: Distribution) -> None: self._dist = dist self._raw_name = dist.metadata["Name"] self._version = Version(dist.version) @property def raw_name(self) -> str | Any: return self._raw_name @property def version(self) -> Version: return self._version @property def editable(self) -> bool: return self.editable_project_location is not None @property def direct_url(self) -> DirectUrl | None: result = None json_str = self._dist.read_text(self.DIRECT_URL_METADATA_NAME) try: if json_str: result = DirectUrl.from_json(json_str) except ( UnicodeDecodeError, JSONDecodeError, DirectUrlValidationError, ): return result return result @property def editable_project_location(self) -> str | None: direct_url = self.direct_url if direct_url and direct_url.is_local_editable(): from pip._internal.utils.urls import url_to_path # noqa: PLC2701, PLC0415 return url_to_path(direct_url.url) result = None egg_link_path = egg_link_path_from_sys_path(self.raw_name) if egg_link_path: with Path(egg_link_path).open("r", encoding=locale.getpreferredencoding(False)) as f: # noqa: FBT003 result = f.readline().rstrip() return result __all__ = ["dist_to_frozen_repr"] pipdeptree-2.30.0/src/pipdeptree/_discovery.py0000664000175000017510000001416515105004414020737 0ustar nileshnileshfrom __future__ import annotations import ast import site import subprocess # noqa: S404 import sys from importlib.metadata import Distribution, distributions from pathlib import Path from typing import TYPE_CHECKING from packaging.utils import canonicalize_name from pipdeptree._warning import get_warning_printer if TYPE_CHECKING: from collections.abc import Iterable class InterpreterQueryError(Exception): """A problem occurred while trying to query a custom interpreter.""" def get_installed_distributions( interpreter: str = sys.executable or "", supplied_paths: list[str] | None = None, local_only: bool = False, # noqa: FBT001, FBT002 user_only: bool = False, # noqa: FBT001, FBT002 ) -> list[Distribution]: """ Return the distributions installed in the interpreter's environment. :raises InterpreterQueryError: If a failure occurred while querying the interpreter. """ # sys.path is used by importlib.metadata.PathDistribution and pip by default. computed_paths = supplied_paths or sys.path # See https://docs.python.org/3/library/venv.html#how-venvs-work for more details. in_venv = sys.prefix != sys.base_prefix should_query_interpreter = not supplied_paths and (Path(interpreter).absolute() != Path(sys.executable).absolute()) if should_query_interpreter: computed_paths = query_interpreter_for_paths(interpreter, local_only=local_only) elif local_only and in_venv: computed_paths = [p for p in computed_paths if p.startswith(sys.prefix)] if user_only: computed_paths = [p for p in computed_paths if p.startswith(site.getusersitepackages())] return filter_valid_distributions(distributions(path=computed_paths)) def query_interpreter_for_paths(interpreter: str, *, local_only: bool = False) -> list[str]: """ Query an interpreter for paths containing distribution metadata. :raises InterpreterQueryError: If a failure occurred while querying the interpreter. """ # We query the interpreter directly to get its `sys.path`. If both --python and --local-only are given, only # snatch metadata associated to the interpreter's environment. if local_only: cmd = "import sys; print([p for p in sys.path if p.startswith(sys.prefix)])" else: cmd = "import sys; print(sys.path)" args = [interpreter, "-c", cmd] try: result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=True, text=True) # noqa: S603 return ast.literal_eval(result.stdout) # type: ignore[no-any-return] except Exception as e: raise InterpreterQueryError(str(e)) from e def filter_valid_distributions(iterable_dists: Iterable[Distribution]) -> list[Distribution]: warning_printer = get_warning_printer() # Since importlib.metadata.distributions() can return duplicate packages, we need to handle this. pip's approach is # to keep track of each package metadata it finds, and if it encounters one again it will simply just ignore it. We # take it one step further and warn the user that there are duplicate packages in their environment. # See https://github.com/pypa/pip/blob/7c49d06ea4be4635561f16a524e3842817d1169a/src/pip/_internal/metadata/importlib/_envs.py#L34 seen_dists: dict[str, Distribution] = {} first_seen_to_already_seen_dists_dict: dict[Distribution, list[Distribution]] = {} # We also need to handle invalid metadata, though we can't get paths to invalid distribution metadata directly since # importlib doesn't expose an API for it. We do have the directory they reside in, so let's use that. site_dir_with_invalid_metadata: set[str] = set() dists = [] for dist in iterable_dists: if not has_valid_metadata(dist): site_dir = str(dist.locate_file("")) site_dir_with_invalid_metadata.add(site_dir) continue normalized_name = canonicalize_name(dist.metadata["Name"]) if normalized_name not in seen_dists: seen_dists[normalized_name] = dist dists.append(dist) continue if warning_printer.should_warn(): already_seen_dists = first_seen_to_already_seen_dists_dict.setdefault(seen_dists[normalized_name], []) already_seen_dists.append(dist) if warning_printer.should_warn(): if site_dir_with_invalid_metadata: warning_printer.print_multi_line( "Missing or invalid metadata found in the following site dirs", lambda: render_invalid_metadata_text(site_dir_with_invalid_metadata), ) if first_seen_to_already_seen_dists_dict: warning_printer.print_multi_line( "Duplicate package metadata found", lambda: render_duplicated_dist_metadata_text(first_seen_to_already_seen_dists_dict), ignore_fail=True, ) return dists def has_valid_metadata(dist: Distribution) -> bool: return "Name" in dist.metadata def render_invalid_metadata_text(site_dirs_with_invalid_metadata: set[str]) -> None: for site_dir in site_dirs_with_invalid_metadata: print(site_dir, file=sys.stderr) # noqa: T201 FirstSeenWithDistsPair = tuple[Distribution, Distribution] def render_duplicated_dist_metadata_text( first_seen_to_already_seen_dists_dict: dict[Distribution, list[Distribution]], ) -> None: entries_to_pairs_dict: dict[str, list[FirstSeenWithDistsPair]] = {} for first_seen, dists in first_seen_to_already_seen_dists_dict.items(): for dist in dists: entry = str(dist.locate_file("")) dist_list = entries_to_pairs_dict.setdefault(entry, []) dist_list.append((first_seen, dist)) for entry, pairs in entries_to_pairs_dict.items(): print(f'"{entry}"', file=sys.stderr) # noqa: T201 for first_seen, dist in pairs: print( # noqa: T201 ( f" {dist.metadata['Name']:<32} {dist.version:<16} (using {first_seen.version}," f' "{first_seen.locate_file("")}")' ), file=sys.stderr, ) __all__ = [ "get_installed_distributions", ] pipdeptree-2.30.0/src/pipdeptree/_detect_env.py0000664000175000017510000000605715105004414021051 0ustar nileshnileshfrom __future__ import annotations import os import platform import subprocess # noqa: S404 import sys from pathlib import Path from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable def detect_active_interpreter() -> str: """ Attempt to detect a venv, virtualenv, poetry, or conda environment by looking for certain markers. If it fails to find any, it will fail with a message. """ detection_funcs: list[Callable[[], Path | None]] = [ detect_venv_or_virtualenv_interpreter, detect_conda_env_interpreter, detect_poetry_env_interpreter, ] for detect in detection_funcs: path = detect() if not path: continue if not path.exists(): break return str(path) print("Unable to detect virtual environment.", file=sys.stderr) # noqa: T201 raise SystemExit(1) def detect_venv_or_virtualenv_interpreter() -> Path | None: # Both virtualenv and venv set this environment variable. env_var = os.environ.get("VIRTUAL_ENV") if not env_var: return None path = Path(env_var) path /= determine_bin_dir() file_name = determine_interpreter_file_name() return path / file_name if file_name else None def determine_bin_dir() -> str: return "Scripts" if os.name == "nt" else "bin" def detect_conda_env_interpreter() -> Path | None: # Env var mentioned in https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#saving-environment-variables. env_var = os.environ.get("CONDA_PREFIX") if not env_var: return None path = Path(env_var) # On POSIX systems, conda adds the python executable to the /bin directory. On Windows, it resides in the parent # directory of /bin (i.e. the root directory). # See https://docs.anaconda.com/free/working-with-conda/configurations/python-path/#examples. if os.name == "posix": # pragma: posix cover path /= "bin" file_name = determine_interpreter_file_name() return path / file_name if file_name else None def detect_poetry_env_interpreter() -> Path | None: # poetry doesn't expose an environment variable like other implementations, so we instead use its CLI to snatch the # active interpreter. # See https://python-poetry.org/docs/managing-environments/#displaying-the-environment-information. try: result = subprocess.run( ("poetry", "env", "info", "--executable"), check=True, text=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, ) except Exception: # noqa: BLE001 return None return Path(result.stdout.strip()) def determine_interpreter_file_name() -> str | None: impl_name_to_file_name_dict = {"CPython": "python", "PyPy": "pypy"} name = impl_name_to_file_name_dict.get(platform.python_implementation()) if not name: return None if os.name == "nt": # pragma: nt cover return name + ".exe" return name __all__ = ["detect_active_interpreter"] pipdeptree-2.30.0/src/pipdeptree/_cli.py0000664000175000017510000001615515105004414017500 0ustar nileshnileshfrom __future__ import annotations import sys from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, ArgumentTypeError, Namespace from typing import TYPE_CHECKING, cast from .version import __version__ if TYPE_CHECKING: from collections.abc import Sequence class Options(Namespace): freeze: bool python: str path: list[str] all: bool local_only: bool user_only: bool warn: str reverse: bool packages: str exclude: str exclude_dependencies: bool json: bool json_tree: bool mermaid: bool graphviz_format: str | None output_format: str depth: float encoding: str license: bool # NOTE: graphviz-* has been intentionally left out. Users of this var should handle it separately. ALLOWED_RENDER_FORMATS = ["freeze", "json", "json-tree", "mermaid", "text"] class _Formatter(ArgumentDefaultsHelpFormatter): def __init__(self, prog: str) -> None: super().__init__(prog, max_help_position=22, width=240) def build_parser() -> ArgumentParser: parser = ArgumentParser(description="Dependency tree of the installed python packages", formatter_class=_Formatter) parser.add_argument("-v", "--version", action="version", version=f"{__version__}") parser.add_argument( "-w", "--warn", dest="warn", type=str, choices=["silence", "suppress", "fail"], default="suppress", help=( "warning control: suppress will show warnings but return 0 whether or not they are present; silence will " "not show warnings at all and always return 0; fail will show warnings and return 1 if any are present" ), ) select = parser.add_argument_group(title="select", description="choose what to render") select.add_argument( "--python", default=sys.executable, help=( 'Python interpreter to inspect. With "auto", it attempts to detect your virtual environment and fails if' " it can't." ), ) select.add_argument( "--path", help="passes a path used to restrict where packages should be looked for (can be used multiple times)", action="append", ) select.add_argument( "-p", "--packages", help="comma separated list of packages to show - wildcards are supported, like 'somepackage.*'", metavar="P", ) select.add_argument( "-e", "--exclude", help="comma separated list of packages to not show - wildcards are supported, like 'somepackage.*'. " "(cannot combine with -p or -a)", metavar="P", ) select.add_argument( "--exclude-dependencies", help="used along with --exclude to also exclude dependencies of packages", action="store_true", ) scope = select.add_mutually_exclusive_group() scope.add_argument( "-l", "--local-only", action="store_true", help="if in a virtualenv that has global access do not show globally installed packages", ) scope.add_argument("-u", "--user-only", action="store_true", help="only show installations in the user site dir") render = parser.add_argument_group( title="render", description="choose how to render the dependency tree", ) render.add_argument( "-f", "--freeze", action="store_true", help="(Deprecated, use -o) print names so as to write freeze files" ) render.add_argument( "--encoding", dest="encoding", default=sys.stdout.encoding, help="the encoding to use when writing to the output", metavar="E", ) render.add_argument( "-a", "--all", action="store_true", help="list all deps at top level (text and freeze render only)" ) render.add_argument( "-d", "--depth", type=lambda x: int(x) if x.isdigit() and (int(x) >= 0) else parser.error("Depth must be a number that is >= 0"), default=float("inf"), help="limit the depth of the tree (text and freeze render only)", metavar="D", ) render.add_argument( "-r", "--reverse", action="store_true", default=False, help=( "render the dependency tree in the reverse fashion ie. the sub-dependencies are listed with the list of " "packages that need them under them" ), ) render.add_argument( "--license", action="store_true", help="list the license(s) of a package (text render only)", ) render_type = render.add_mutually_exclusive_group() render_type.add_argument( "-j", "--json", action="store_true", default=False, help="(Deprecated, use -o) raw JSON - this will yield output that may be used by external tools", ) render_type.add_argument( "--json-tree", action="store_true", default=False, help="(Deprecated, use -o) nested JSON - mimics the text format layout", ) render_type.add_argument( "--mermaid", action="store_true", default=False, help="(Deprecated, use -o) https://mermaid.js.org flow diagram", ) render_type.add_argument( "--graph-output", metavar="FMT", dest="graphviz_format", help="(Deprecated, use -o) Graphviz rendering with the value being the graphviz output e.g.:\ dot, jpeg, pdf, png, svg", ) render_type.add_argument( "-o", "--output", metavar="FMT", dest="output_format", type=_validate_output_format, default="text", help=f"specify how to render the tree; supported formats: {', '.join(ALLOWED_RENDER_FORMATS)}, or graphviz-*\ (e.g. graphviz-png, graphviz-dot)", ) return parser def get_options(args: Sequence[str] | None) -> Options: parser = build_parser() parsed_args = parser.parse_args(args) options = cast("Options", parsed_args) options.output_format = _handle_legacy_render_options(options) if options.exclude_dependencies and not options.exclude: return parser.error("must use --exclude-dependencies with --exclude") if options.license and options.freeze: return parser.error("cannot use --license with --freeze") if options.path and (options.local_only or options.user_only): return parser.error("cannot use --path with --user-only or --local-only") return options def _handle_legacy_render_options(options: Options) -> str: if options.freeze: return "freeze" if options.json: return "json" if options.json_tree: return "json-tree" if options.mermaid: return "mermaid" if options.graphviz_format: return f"graphviz-{options.graphviz_format}" return options.output_format def _validate_output_format(value: str) -> str: if value in ALLOWED_RENDER_FORMATS: return value if value.startswith("graphviz-"): return value msg = f'"{value}" is not a known output format. Must be one of {", ".join(ALLOWED_RENDER_FORMATS)}, or graphviz-*' raise ArgumentTypeError(msg) __all__ = [ "Options", "get_options", ] pipdeptree-2.30.0/src/pipdeptree/__main__.py0000664000175000017510000000577215105004414020315 0ustar nileshnilesh"""The main entry point used for CLI.""" from __future__ import annotations import sys from typing import TYPE_CHECKING from pipdeptree._cli import Options, get_options from pipdeptree._detect_env import detect_active_interpreter from pipdeptree._discovery import InterpreterQueryError, get_installed_distributions from pipdeptree._models import PackageDAG from pipdeptree._models.dag import IncludeExcludeOverlapError, IncludePatternNotFoundError from pipdeptree._render import render from pipdeptree._validate import validate from pipdeptree._warning import WarningPrinter, WarningType, get_warning_printer if TYPE_CHECKING: from collections.abc import Sequence def main(args: Sequence[str] | None = None) -> int | None: """CLI - The main function called as entry point.""" options = get_options(args) # Warnings are only enabled when using text output. if not _is_text_output(options): options.warn = "silence" warning_printer = get_warning_printer() warning_printer.warning_type = WarningType.from_str(options.warn) if options.python == "auto": resolved_path = detect_active_interpreter() options.python = resolved_path print(f"(resolved python: {resolved_path})", file=sys.stderr) # noqa: T201 try: pkgs = get_installed_distributions( interpreter=options.python, supplied_paths=options.path or None, local_only=options.local_only, user_only=options.user_only, ) except InterpreterQueryError as e: print(f"Failed to query custom interpreter: {e}", file=sys.stderr) # noqa: T201 return 1 tree = PackageDAG.from_pkgs(pkgs) validate(tree) # Reverse the tree (if applicable) before filtering, thus ensuring, that the filter will be applied on ReverseTree if options.reverse: tree = tree.reverse() include = options.packages.split(",") if options.packages else None exclude = set(options.exclude.split(",")) if options.exclude else None if include is not None or exclude is not None: try: tree = tree.filter_nodes(include, exclude, exclude_deps=options.exclude_dependencies) except IncludeExcludeOverlapError: print("Cannot have --packages and --exclude contain the same entries", file=sys.stderr) # noqa: T201 return 1 except IncludePatternNotFoundError as e: if warning_printer.should_warn(): warning_printer.print_single_line(str(e)) return _determine_return_code(warning_printer) render(options, tree) return _determine_return_code(warning_printer) def _is_text_output(options: Options) -> bool: if any([options.json, options.json_tree, options.graphviz_format, options.mermaid]): return False return options.output_format in {"freeze", "text"} def _determine_return_code(warning_printer: WarningPrinter) -> int: return 1 if warning_printer.has_warned_with_failure() else 0 if __name__ == "__main__": sys.exit(main()) pipdeptree-2.30.0/src/pipdeptree/__init__.py0000664000175000017510000000000015105004414020307 0ustar nileshnileshpipdeptree-2.30.0/pyproject.toml0000664000175000017510000000764415105004414016207 0ustar nileshnilesh[build-system] build-backend = "hatchling.build" requires = [ "hatch-vcs>=0.5", "hatchling>=1.27", ] [project] name = "pipdeptree" description = "Command line utility to show dependency tree of packages." readme = "README.md" keywords = [ "application", "cache", "directory", "log", "user", ] license = "MIT" license-files = [ "LICENSE", ] maintainers = [ { name = "Bernát Gábor", email = "gaborjbernat@gmail.com" }, { name = "Vineet Naik", email = "naikvin@gmail.com" }, ] requires-python = ">=3.10" classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", ] dynamic = [ "version", ] dependencies = [ "packaging>=25", "pip>=25.2", ] optional-dependencies.graphviz = [ "graphviz>=0.21", ] optional-dependencies.test = [ "covdefaults>=2.3", "diff-cover>=9.7.1", "pytest>=8.4.2", "pytest-cov>=7", "pytest-mock>=3.15.1", "virtualenv<21,>=20.34", ] urls.Changelog = "https://github.com/tox-dev/pipdeptree/releases" urls.Documentation = "https://github.com/tox-dev/pipdeptree/blob/main/README.md#pipdeptree" urls.Homepage = "https://github.com/tox-dev/pipdeptree" urls.Source = "https://github.com/tox-dev/pipdeptree" urls.Tracker = "https://github.com/tox-dev/pipdeptree/issues" scripts.pipdeptree = "pipdeptree.__main__:main" [tool.hatch] build.hooks.vcs.version-file = "src/pipdeptree/version.py" version.source = "vcs" [tool.ruff] line-length = 120 format.preview = true format.docstring-code-line-length = 100 format.docstring-code-format = true lint.select = [ "ALL", ] lint.ignore = [ "A005", # Don't care about shadowing builtin modules "ANN401", # Dynamically typed expressions (typing.Any) are disallowed in "COM812", # Conflict with formatter "CPY", # No copyright statements "D104", # Missing docstring in public package "D203", # `one-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible "D212", # `multi-line-summary-first-line` (D212) and `multi-line-summary-second-line` (D213) are incompatible "DOC201", # TODO: Read the comment for DOC501 "DOC402", # TODO: Read the comment for DOC501 "DOC501", # TODO: Remove this once ruff supports Sphinx-style doc-strings; see https://github.com/astral-sh/ruff/issues/12434 "INP001", # no implicit namespace "ISC001", # Conflict with formatter "S101", # asserts allowed "S104", # Possible binding to all interface ] lint.per-file-ignores."tests/**/*.py" = [ "D", # don"t care about documentation in tests "FBT", # don"t care about booleans as positional arguments in tests "PLC2701", # Private import in tests "PLR0913", # any number of arguments in tests "PLR0917", # any number of arguments in tests "PLR2004", # Magic value used in comparison, consider replacing with a constant variable "S603", # `subprocess` call: check for execution of untrusted input ] lint.isort = { known-first-party = [ "pipdeptree", ], required-imports = [ "from __future__ import annotations", ] } lint.preview = true [tool.codespell] builtin = "clear,usage,en-GB_to_en-US" count = true quiet-level = 3 ignore-words-list = "master" [tool.pyproject-fmt] max_supported_python = "3.14" [tool.coverage] html.show_contexts = true html.skip_covered = false paths.source = [ "src", ".tox/*/lib/python*/site-packages", "*/src", ] run.parallel = true run.plugins = [ "covdefaults", ] report.fail_under = 88 subtract_omit = "*/__main__.py" [tool.mypy] show_error_codes = true strict = true overrides = [ { module = [ "graphviz.*", "virtualenv.*", ], ignore_missing_imports = true }, ] pipdeptree-2.30.0/README.md0000664000175000017510000003207215105004414014543 0ustar nileshnilesh# pipdeptree [![PyPI](https://img.shields.io/pypi/v/pipdeptree)](https://pypi.org/project/pipdeptree/) [![Supported Python versions](https://img.shields.io/pypi/pyversions/pipdeptree.svg)](https://pypi.org/project/pipdeptree/) [![Downloads](https://static.pepy.tech/badge/pipdeptree/month)](https://pepy.tech/project/pipdeptree) [![check](https://github.com/tox-dev/pipdeptree/actions/workflows/check.yaml/badge.svg)](https://github.com/tox-dev/pipdeptree/actions/workflows/check.yaml) [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/tox-dev/pipdeptree/main.svg)](https://results.pre-commit.ci/latest/github/tox-dev/pipdeptree/main) `pipdeptree` is a command line utility for displaying the installed python packages in form of a dependency tree. It works for packages installed globally on a machine as well as in a virtualenv. Since `pip freeze` shows all dependencies as a flat list, finding out which are the top level packages and which packages do they depend on requires some effort. It\'s also tedious to resolve conflicting dependencies that could have been installed because older version of `pip` didn\'t have true dependency resolution[^1]. `pipdeptree` can help here by identifying conflicting dependencies installed in the environment. To some extent, `pipdeptree` is inspired by the `lein deps :tree` command of [Leiningen](http://leiningen.org/). ## Installation ```bash pip install pipdeptree ``` ## Running in virtualenvs `New in ver. 2.0.0` If you want to run pipdeptree in the context of a particular virtualenv, you can specify the `--python` option. Note that this capability has been recently added in version `2.0.0`. Alternatively, you may also install pipdeptree inside the virtualenv and then run it from there. As of version `2.21.0`, you may also pass `--python auto`, where it will attempt to detect your virtual environment and grab the interpreter from there. It will fail if it is unable to detect one. ## Usage and examples To give you a brief idea, here is the output of `pipdeptree` compared with `pip freeze`: ```bash $ pip freeze Flask==0.10.1 itsdangerous==0.24 Jinja2==2.11.2 -e git+git@github.com:naiquevin/lookupy.git@cdbe30c160e1c29802df75e145ea4ad903c05386#egg=Lookupy MarkupSafe==0.22 pipdeptree @ file:///private/tmp/pipdeptree-2.0.0b1-py3-none-any.whl Werkzeug==0.11.2 ``` And now see what `pipdeptree` outputs, ```bash $ pipdeptree Warning!!! Possibly conflicting dependencies found: * Jinja2==2.11.2 - MarkupSafe [required: >=0.23, installed: 0.22] ------------------------------------------------------------------------ Flask==0.10.1 - itsdangerous [required: >=0.21, installed: 0.24] - Jinja2 [required: >=2.4, installed: 2.11.2] - MarkupSafe [required: >=0.23, installed: 0.22] - Werkzeug [required: >=0.7, installed: 0.11.2] Lookupy==0.1 pipdeptree==2.0.0b1 - pip [required: >=6.0.0, installed: 20.1.1] setuptools==47.1.1 wheel==0.34.2 ``` ## Is it possible to find out why a particular package is installed? `New in ver. 0.5.0` Yes, there\'s a `--reverse` (or simply `-r`) flag for this. To find out which packages depend on a particular package(s), it can be combined with `--packages` option as follows: ```bash $ pipdeptree --reverse --packages itsdangerous,MarkupSafe Warning!!! Possibly conflicting dependencies found: * Jinja2==2.11.2 - MarkupSafe [required: >=0.23, installed: 0.22] ------------------------------------------------------------------------ itsdangerous==0.24 - Flask==0.10.1 [requires: itsdangerous>=0.21] MarkupSafe==0.22 - Jinja2==2.11.2 [requires: MarkupSafe>=0.23] - Flask==0.10.1 [requires: Jinja2>=2.4] ``` ## What\'s with the warning about conflicting dependencies? As seen in the above output, `pipdeptree` by default warns about possible conflicting dependencies. Any package that\'s specified as a dependency of multiple packages with different versions is considered as a conflicting dependency. Conflicting dependencies are possible if older version of pip\<=20.2 ([without the new resolver](https://github.com/pypa/pip/issues/988)[^2]) was ever used to install dependencies at some point. The warning is printed to stderr instead of stdout and it can be completely silenced by specifying the `-w silence` or `--warn silence` option. On the other hand, it can be made mode strict with `--warn fail`, in which case the command will not only print the warnings to stderr but also exit with a non-zero status code. This is useful if you want to fit this tool into your CI pipeline. **Note**: The `--warn` option is added in version `0.6.0`. If you are using an older version, use `--nowarn` flag to silence the warnings. ## Warnings about circular dependencies In case any of the packages have circular dependencies (eg. package A depends on package B and package B depends on package A), then `pipdeptree` will print warnings about that as well. ```bash $ pipdeptree --exclude pip,pipdeptree,setuptools,wheel Warning!!! Cyclic dependencies found: - CircularDependencyA => CircularDependencyB => CircularDependencyA - CircularDependencyB => CircularDependencyA => CircularDependencyB ------------------------------------------------------------------------ wsgiref==0.1.2 argparse==1.2.1 ``` Similar to the warnings about conflicting dependencies, these too are printed to stderr and can be controlled using the `--warn` option. In the above example, you can also see `--exclude` option which is the opposite of `--packages` ie. these packages will be excluded from the output. ## Using pipdeptree to write requirements.txt file If you wish to track only top level packages in your `requirements.txt` file, it\'s possible by grep-ing[^3]. only the top-level lines from the output, ```bash $ pipdeptree --warn silence | grep -E '^\w+' Flask==0.10.1 gnureadline==8.0.0 Lookupy==0.1 pipdeptree==2.0.0b1 setuptools==47.1.1 wheel==0.34.2 ``` There is a problem here though - The output doesn\'t mention anything about `Lookupy` being installed as an _editable_ package (refer to the output of `pip freeze` above) and information about its source is lost. To fix this, `pipdeptree` must be run with a `-f` or `--freeze` flag. ```bash $ pipdeptree -f --warn silence | grep -E '^[a-zA-Z0-9\-]+' Flask==0.10.1 gnureadline==8.0.0 -e git+git@github.com:naiquevin/lookupy.git@cdbe30c160e1c29802df75e145ea4ad903c05386#egg=Lookupy pipdeptree @ file:///private/tmp/pipdeptree-2.0.0b1-py3-none-any.whl setuptools==47.1.1 wheel==0.34.2 $ pipdeptree -f --warn silence | grep -E '^[a-zA-Z0-9\-]+' > requirements.txt ``` The freeze flag will not prefix child dependencies with hyphens, so you could dump the entire output of `pipdeptree -f` to the requirements.txt file thus making it human-friendly (due to indentations) as well as pip-friendly. ```bash $ pipdeptree -f | tee locked-requirements.txt Flask==0.10.1 itsdangerous==0.24 Jinja2==2.11.2 MarkupSafe==0.23 Werkzeug==0.11.2 gnureadline==8.0.0 -e git+git@github.com:naiquevin/lookupy.git@cdbe30c160e1c29802df75e145ea4ad903c05386#egg=Lookupy pipdeptree @ file:///private/tmp/pipdeptree-2.0.0b1-py3-none-any.whl pip==20.1.1 setuptools==47.1.1 wheel==0.34.2 ``` On confirming that there are no conflicting dependencies, you can even treat this as a \"lock file\" where all packages, including the transient dependencies will be pinned to their currently installed versions. Note that the `locked-requirements.txt` file could end up with duplicate entries. Although `pip install` wouldn\'t complain about that, you can avoid duplicate lines (at the cost of losing indentation) as follows, ```bash $ pipdeptree -f | sed 's/ //g' | sort -u > locked-requirements.txt ``` ## Using pipdeptree with external tools `New in ver. 0.5.0` It\'s also possible to have `pipdeptree` output json representation of the dependency tree so that it may be used as input to other external tools. ```bash $ pipdeptree --json ``` Note that `--json` will output a flat list of all packages with their immediate dependencies. This is not very useful in itself. To obtain nested json, use `--json-tree` `New in ver. 0.11.0` ```bash $ pipdeptree --json-tree ``` ## Visualizing the dependency graph The dependency graph can also be visualized using [GraphViz](http://www.graphviz.org/): ```bash $ pipdeptree --graph-output dot > dependencies.dot $ pipdeptree --graph-output pdf > dependencies.pdf $ pipdeptree --graph-output png > dependencies.png $ pipdeptree --graph-output svg > dependencies.svg ``` Note that `graphviz` is an optional dependency that's required only if you want to use `--graph-output`. Since version `2.0.0b1`, `--package` and `--reverse` flags are supported for all output formats ie. text, json, json-tree and graph. In earlier versions, `--json`, `--json-tree` and `--graph-output` options override `--package` and `--reverse`. ## Usage ```text % pipdeptree --help usage: pipdeptree [-h] [-v] [-w {silence,suppress,fail}] [--python PYTHON] [--path PATH] [-p P] [-e P] [--exclude-dependencies] [-l | -u] [-f] [--encoding E] [-a] [-d D] [-r] [--license] [-j | --json-tree | --mermaid | --graph-output FMT | -o FMT] Dependency tree of the installed python packages options: -h, --help show this help message and exit -v, --version show program's version number and exit -w {silence,suppress,fail}, --warn {silence,suppress,fail} warning control: suppress will show warnings but return 0 whether or not they are present; silence will not show warnings at all and always return 0; fail will show warnings and return 1 if any are present (default: suppress) select: choose what to render --python PYTHON Python interpreter to inspect. With "auto", it attempts to detect your virtual environment and fails if it can't. (default: /usr/local/bin/python) --path PATH passes a path used to restrict where packages should be looked for (can be used multiple times) (default: None) -p P, --packages P comma separated list of packages to show - wildcards are supported, like 'somepackage.*' (default: None) -e P, --exclude P comma separated list of packages to not show - wildcards are supported, like 'somepackage.*'. (cannot combine with -p or -a) (default: None) --exclude-dependencies used along with --exclude to also exclude dependencies of packages (default: False) -l, --local-only if in a virtualenv that has global access do not show globally installed packages (default: False) -u, --user-only only show installations in the user site dir (default: False) render: choose how to render the dependency tree -f, --freeze (Deprecated, use -o) print names so as to write freeze files (default: False) --encoding E the encoding to use when writing to the output (default: utf-8) -a, --all list all deps at top level (text and freeze render only) (default: False) -d D, --depth D limit the depth of the tree (text and freeze render only) (default: inf) -r, --reverse render the dependency tree in the reverse fashion ie. the sub-dependencies are listed with the list of packages that need them under them (default: False) --license list the license(s) of a package (text render only) (default: False) -j, --json (Deprecated, use -o) raw JSON - this will yield output that may be used by external tools (default: False) --json-tree (Deprecated, use -o) nested JSON - mimics the text format layout (default: False) --mermaid (Deprecated, use -o) https://mermaid.js.org flow diagram (default: False) --graph-output FMT (Deprecated, use -o) Graphviz rendering with the value being the graphviz output e.g.: dot, jpeg, pdf, png, svg (default: None) -o FMT, --output FMT specify how to render the tree; supported formats: freeze, json, json-tree, mermaid, text, or graphviz-* (e.g. graphviz-png, graphviz-dot) (default: text) ``` ## Known issues 1. `pipdeptree` relies on the internal API of `pip`. I fully understand that it\'s a bad idea but it mostly works! On rare occasions, it breaks when a new version of `pip` is out with backward incompatible changes in internal API. So beware if you are using this tool in environments in which `pip` version is unpinned, specially automation or CD/CI pipelines. ## Limitations & Alternatives `pipdeptree` merely looks at the installed packages in the current environment using pip, constructs the tree, then outputs it in the specified format. If you want to generate the dependency tree without installing the packages, then you need a dependency resolver. You might want to check alternatives such as [pipgrip](https://github.com/ddelange/pipgrip) or [poetry](https://github.com/python-poetry/poetry). ## License MIT (See [LICENSE](./LICENSE)) ## Footnotes [^1]: pip version 20.3 has been released in Nov 2020 with the dependency resolver \<\>\_ [^2]: pip version 20.3 has been released in Nov 2020 with the dependency resolver \<\>\_ [^3]: If you are on windows (powershell) you can run `pipdeptree --warn silence | Select-String -Pattern '^\w+'` instead of grep pipdeptree-2.30.0/LICENSE0000664000175000017510000000205015105004414014262 0ustar nileshnileshCopyright (c) The pipdeptree developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pipdeptree-2.30.0/.pre-commit-config.yaml0000664000175000017510000000222015105004414017535 0ustar nileshnileshrepos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v6.0.0 hooks: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.34.1 hooks: - id: check-github-workflows args: ["--verbose"] - repo: https://github.com/codespell-project/codespell rev: v2.4.1 hooks: - id: codespell additional_dependencies: ["tomli>=2.3"] - repo: https://github.com/tox-dev/tox-toml-fmt rev: "v1.2.0" hooks: - id: tox-toml-fmt - repo: https://github.com/tox-dev/pyproject-fmt rev: "v2.11.1" hooks: - id: pyproject-fmt - repo: https://github.com/astral-sh/ruff-pre-commit rev: "v0.14.4" hooks: - id: ruff-format - id: ruff-check args: ["--fix", "--unsafe-fixes", "--exit-non-zero-on-fix"] - repo: https://github.com/rbubley/mirrors-prettier rev: "v3.6.2" hooks: - id: prettier additional_dependencies: - prettier@3.6.2 - "@prettier/plugin-xml@3.4.2" - repo: meta hooks: - id: check-hooks-apply - id: check-useless-excludes pipdeptree-2.30.0/.gitignore0000664000175000017510000000007015105004414015245 0ustar nileshnilesh*.pyc *.egg-info dist/ .tox/ /src/pipdeptree/version.py