Skip to content

Commit

Permalink
Print diff report when generating Python lockfiles. (pantsbuild#17347)
Browse files Browse the repository at this point in the history
Enable with `--diff` (or `--diff-include-unchanged`). Only support for Python/PEX lockfiles currently implemented.
  • Loading branch information
kaos authored Jan 31, 2023
1 parent 7117bad commit 6b2c302
Show file tree
Hide file tree
Showing 15 changed files with 681 additions and 20 deletions.
1 change: 1 addition & 0 deletions pants.toml
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,7 @@ template_by_globs = "@build-support/preambles/config.yaml"

[generate-lockfiles]
custom_command = "build-support/bin/generate_all_lockfiles.sh"
diff = true

[jvm]
default_resolve = "jvm_testprojects"
Expand Down
14 changes: 13 additions & 1 deletion src/python/pants/backend/python/goals/lockfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.target_types import PythonRequirementResolveField, PythonRequirementsField
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.lockfile_diff import _generate_python_lockfile_diff
from pants.backend.python.util_rules.lockfile_metadata import PythonLockfileMetadata
from pants.backend.python.util_rules.pex_cli import PexCliProcess
from pants.backend.python.util_rules.pex_requirements import ( # noqa: F401
Expand Down Expand Up @@ -72,6 +73,7 @@ def from_tool(
interpreter_constraints=InterpreterConstraints(),
resolve_name=subsystem.options_scope,
lockfile_dest=subsystem.lockfile,
diff=False,
)
return cls(
requirements=FrozenOrderedSet((*subsystem.all_requirements, *extra_requirements)),
Expand All @@ -82,6 +84,7 @@ def from_tool(
),
resolve_name=subsystem.options_scope,
lockfile_dest=subsystem.lockfile,
diff=False,
)

@property
Expand Down Expand Up @@ -218,7 +221,15 @@ async def generate_lockfile(
final_lockfile_digest = await Get(
Digest, CreateDigest([FileContent(req.lockfile_dest, lockfile_with_header)])
)
return GenerateLockfileResult(final_lockfile_digest, req.resolve_name, req.lockfile_dest)

if req.diff:
diff = await _generate_python_lockfile_diff(
final_lockfile_digest, req.resolve_name, req.lockfile_dest
)
else:
diff = None

return GenerateLockfileResult(final_lockfile_digest, req.resolve_name, req.lockfile_dest, diff)


class RequestedPythonUserResolveNames(RequestedUserResolveNames):
Expand Down Expand Up @@ -266,6 +277,7 @@ async def setup_user_lockfile_requests(
),
resolve_name=resolve,
lockfile_dest=python_setup.resolves[resolve],
diff=False,
)
for resolve in requested
)
Expand Down
3 changes: 3 additions & 0 deletions src/python/pants/backend/python/goals/lockfile_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ def _generate(
interpreter_constraints=InterpreterConstraints(),
resolve_name="test",
lockfile_dest="test.lock",
diff=False,
)
],
)
Expand Down Expand Up @@ -240,11 +241,13 @@ def test_multiple_resolves() -> None:
),
resolve_name="a",
lockfile_dest="a.lock",
diff=False,
),
GeneratePythonLockfile(
requirements=FrozenOrderedSet(["b"]),
interpreter_constraints=InterpreterConstraints(["==3.7.*"]),
resolve_name="b",
lockfile_dest="b.lock",
diff=False,
),
}
2 changes: 2 additions & 0 deletions src/python/pants/backend/python/typecheck/mypy/subsystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,7 @@ async def setup_mypy_extra_type_stubs_lockfile(
interpreter_constraints=InterpreterConstraints(),
resolve_name=request.resolve_name,
lockfile_dest=mypy.extra_type_stubs_lockfile,
diff=False,
)

# While MyPy will run in partitions, we need a set of constraints that works with every
Expand All @@ -427,6 +428,7 @@ async def setup_mypy_extra_type_stubs_lockfile(
interpreter_constraints=interpreter_constraints,
resolve_name=request.resolve_name,
lockfile_dest=mypy.extra_type_stubs_lockfile,
diff=False,
)


Expand Down
119 changes: 119 additions & 0 deletions src/python/pants/backend/python/util_rules/lockfile_diff.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

from __future__ import annotations

import itertools
import json
import logging
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Mapping

from packaging.version import parse

if TYPE_CHECKING:
# We seem to get a version of `packaging` that doesn't have `LegacyVersion` when running
# pytest..
from packaging.version import LegacyVersion, Version

from pants.backend.python.util_rules.pex_requirements import (
LoadedLockfile,
LoadedLockfileRequest,
Lockfile,
LockfileContent,
)
from pants.base.exceptions import EngineError
from pants.core.goals.generate_lockfiles import LockfileDiff, LockfilePackages, PackageName
from pants.engine.fs import Digest, DigestContents
from pants.engine.rules import Get, rule_helper
from pants.util.frozendict import FrozenDict

logger = logging.getLogger(__name__)


@dataclass(frozen=True, order=True)
class PythonRequirementVersion:
_parsed: LegacyVersion | Version

@classmethod
def parse(cls, version: str) -> PythonRequirementVersion:
return cls(parse(version))

def __str__(self) -> str:
return str(self._parsed)

def __getattr__(self, key: str) -> Any:
return getattr(self._parsed, key)


def _pex_lockfile_requirements(
lockfile_data: Mapping[str, Any] | None, path: str | None = None
) -> LockfilePackages:
if not lockfile_data:
return LockfilePackages({})

try:
# Setup generators
locked_resolves = (
(
(PackageName(r["project_name"]), PythonRequirementVersion.parse(r["version"]))
for r in resolve["locked_requirements"]
)
for resolve in lockfile_data["locked_resolves"]
)
requirements = dict(itertools.chain.from_iterable(locked_resolves))
except KeyError as e:
if path:
logger.warning(f"{path}: Failed to parse lockfile: {e}")

requirements = {}

return LockfilePackages(requirements)


@rule_helper
async def _parse_lockfile(lockfile: Lockfile | LockfileContent) -> FrozenDict[str, Any] | None:
try:
loaded = await Get(
LoadedLockfile,
LoadedLockfileRequest(lockfile),
)
fc = await Get(DigestContents, Digest, loaded.lockfile_digest)
parsed_lockfile = json.loads(fc[0].content)
return FrozenDict.deep_freeze(parsed_lockfile)
except EngineError:
# May fail in case the file doesn't exist, which is expected when parsing the "old" lockfile
# the first time a new lockfile is generated.
return None
except json.JSONDecodeError as e:
file_path = (
lockfile.file_path if isinstance(lockfile, Lockfile) else lockfile.file_content.path
)
logger.debug(f"{file_path}: Failed to parse lockfile contents: {e}")
return None


@rule_helper
async def _generate_python_lockfile_diff(
digest: Digest, resolve_name: str, path: str
) -> LockfileDiff:
new_content = await Get(DigestContents, Digest, digest)
new = await _parse_lockfile(
LockfileContent(
file_content=next(c for c in new_content if c.path == path),
resolve_name=resolve_name,
)
)
old = await _parse_lockfile(
Lockfile(
file_path=path,
file_path_description_of_origin="generated lockfile",
resolve_name=resolve_name,
)
)
return LockfileDiff.create(
path=path,
resolve_name=resolve_name,
old=_pex_lockfile_requirements(old),
new=_pex_lockfile_requirements(new, path),
)
Loading

0 comments on commit 6b2c302

Please sign in to comment.