Skip to content

Commit

Permalink
Use PipRequirement for only_binary and no_binary lockfile metad…
Browse files Browse the repository at this point in the history
…ata (pantsbuild#16559)

Closes pantsbuild#16527.

Also adds `description_of_origin` to several lockfile related places using `PipRequirement.parse()`.
  • Loading branch information
Eric-Arellano authored Aug 18, 2022
1 parent dc63ac0 commit dc99037
Show file tree
Hide file tree
Showing 8 changed files with 132 additions and 62 deletions.
9 changes: 7 additions & 2 deletions src/python/pants/backend/python/goals/lockfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,8 +300,13 @@ async def generate_lockfile(
initial_lockfile_digest_contents = await Get(DigestContents, Digest, result.output_digest)
metadata = PythonLockfileMetadata.new(
valid_for_interpreter_constraints=req.interpreter_constraints,
# TODO(#12314) Improve error message on `Requirement.parse`
requirements={PipRequirement.parse(i) for i in req.requirements},
requirements={
PipRequirement.parse(
i,
description_of_origin=f"the lockfile {req.lockfile_dest} for the resolve {req.resolve_name}",
)
for i in req.requirements
},
indexes=set(pip_args_setup.resolve_config.indexes),
find_links=set(pip_args_setup.resolve_config.find_links),
manylinux=pip_args_setup.resolve_config.manylinux,
Expand Down
59 changes: 45 additions & 14 deletions src/python/pants/backend/python/subsystems/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import os
from typing import Iterable, List, Optional, TypeVar, cast

from pants.backend.python.pip_requirement import PipRequirement
from pants.core.goals.generate_lockfiles import UnrecognizedResolveNamesError
from pants.option.option_types import (
BoolOption,
Expand Down Expand Up @@ -673,7 +674,7 @@ def resolves_to_constraints_file(
@memoized_method
def resolves_to_no_binary(
self, all_python_tool_resolve_names: tuple[str, ...]
) -> dict[str, list[str]]:
) -> dict[str, list[PipRequirement]]:
if self.no_binary:
if self._resolves_to_no_binary:
raise ValueError(
Expand All @@ -687,20 +688,35 @@ def resolves_to_no_binary(
"""
)
)
no_binary_opt = [
PipRequirement.parse(v, description_of_origin="the option `[python].no_binary`")
for v in self.no_binary
]
return {
resolve: list(self.no_binary)
resolve: no_binary_opt
for resolve in {*self.resolves, *all_python_tool_resolve_names}
}
return self._resolves_to_option_helper(
self._resolves_to_no_binary,
"resolves_to_no_binary",
all_python_tool_resolve_names,
)
return {
resolve: [
PipRequirement.parse(
v,
description_of_origin=(
f"the option `[python].resolves_to_no_binary` for the resolve {resolve}"
),
)
for v in vals
]
for resolve, vals in self._resolves_to_option_helper(
self._resolves_to_no_binary,
"resolves_to_no_binary",
all_python_tool_resolve_names,
).items()
}

@memoized_method
def resolves_to_only_binary(
self, all_python_tool_resolve_names: tuple[str, ...]
) -> dict[str, list[str]]:
) -> dict[str, list[PipRequirement]]:
if self.only_binary:
if self._resolves_to_only_binary:
raise ValueError(
Expand All @@ -714,15 +730,30 @@ def resolves_to_only_binary(
"""
)
)
only_binary_opt = [
PipRequirement.parse(v, description_of_origin="the option `[python].only_binary`")
for v in self.only_binary
]
return {
resolve: list(self.only_binary)
resolve: only_binary_opt
for resolve in {*self.resolves, *all_python_tool_resolve_names}
}
return self._resolves_to_option_helper(
self._resolves_to_only_binary,
"resolves_to_only_binary",
all_python_tool_resolve_names,
)
return {
resolve: [
PipRequirement.parse(
v,
description_of_origin=(
f"the option `[python].resolves_to_only_binary` for the resolve {resolve}"
),
)
for v in vals
]
for resolve, vals in self._resolves_to_option_helper(
self._resolves_to_only_binary,
"resolves_to_only_binary",
all_python_tool_resolve_names,
).items()
}

def resolve_all_constraints_was_set_explicitly(self) -> bool:
return not self.options.is_default("resolve_all_constraints")
Expand Down
21 changes: 15 additions & 6 deletions src/python/pants/backend/python/subsystems/setup_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import pytest

from pants.backend.python.pip_requirement import PipRequirement
from pants.backend.python.subsystems.setup import PythonSetup
from pants.core.goals.generate_lockfiles import UnrecognizedResolveNamesError
from pants.testutil.option_util import create_subsystem
Expand Down Expand Up @@ -44,7 +45,7 @@ def create(resolves_to_constraints_file: dict[str, str]) -> dict[str, str]:
def test_resolves_to_no_binary_and_only_binary() -> None:
def create(
resolves_to_projects: dict[str, list[str]], deprecated_options: list[str] | None = None
) -> dict[str, list[str]]:
) -> dict[str, list[PipRequirement]]:
subsystem = create_subsystem(
PythonSetup,
resolves={"a": "a.lock"},
Expand All @@ -62,15 +63,23 @@ def create(
assert only_binary == no_binary
return only_binary

assert create({"a": ["p1"], "tool1": ["p2"]}) == {"a": ["p1"], "tool1": ["p2"]}
p1_req = PipRequirement.parse("p1")
assert create({"a": ["p1"], "tool1": ["p2"]}) == {
"a": [p1_req],
"tool1": [PipRequirement.parse("p2")],
}
assert create({"__default__": ["p1"], "tool2": ["override"]}) == {
"a": ["p1"],
"tool1": ["p1"],
"tool2": ["override"],
"a": [p1_req],
"tool1": [p1_req],
"tool2": [PipRequirement.parse("override")],
}
with pytest.raises(UnrecognizedResolveNamesError):
create({"fake": []})

assert create({}, deprecated_options=["p1"]) == {"a": ["p1"], "tool1": ["p1"], "tool2": ["p1"]}
assert create({}, deprecated_options=["p1"]) == {
"a": [p1_req],
"tool1": [p1_req],
"tool2": [p1_req],
}
with pytest.raises(ValueError):
create({"a": ["p1"]}, deprecated_options=["p2"])
52 changes: 34 additions & 18 deletions src/python/pants/backend/python/util_rules/lockfile_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ def new(
find_links: set[str],
manylinux: str | None,
requirement_constraints: set[PipRequirement],
only_binary: set[str],
no_binary: set[str],
only_binary: set[PipRequirement],
no_binary: set[PipRequirement],
) -> PythonLockfileMetadata:
"""Call the most recent version of the `LockfileMetadata` class to construct a concrete
instance.
Expand Down Expand Up @@ -91,8 +91,8 @@ def is_valid_for(
find_links: Iterable[str],
manylinux: str | None,
requirement_constraints: Iterable[PipRequirement],
only_binary: Iterable[str],
no_binary: Iterable[str],
only_binary: Iterable[PipRequirement],
no_binary: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
"""Returns Truthy if this `PythonLockfileMetadata` can be used in the current execution
context."""
Expand Down Expand Up @@ -140,8 +140,8 @@ def is_valid_for(
find_links: Iterable[str],
manylinux: str | None,
requirement_constraints: Iterable[PipRequirement],
only_binary: Iterable[str],
no_binary: Iterable[str],
only_binary: Iterable[PipRequirement],
no_binary: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons: set[InvalidPythonLockfileReason] = set()

Expand Down Expand Up @@ -182,7 +182,9 @@ def _from_json_dict(
requirements = metadata(
"generated_with_requirements",
Set[PipRequirement],
lambda l: {PipRequirement.parse(i) for i in l},
lambda l: {
PipRequirement.parse(i, description_of_origin=lockfile_description) for i in l
},
)
interpreter_constraints = metadata(
"valid_for_interpreter_constraints", InterpreterConstraints, InterpreterConstraints
Expand Down Expand Up @@ -210,8 +212,8 @@ def is_valid_for(
find_links: Iterable[str],
manylinux: str | None,
requirement_constraints: Iterable[PipRequirement],
only_binary: Iterable[str],
no_binary: Iterable[str],
only_binary: Iterable[PipRequirement],
no_binary: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons = set()

Expand Down Expand Up @@ -240,8 +242,8 @@ class PythonLockfileMetadataV3(PythonLockfileMetadataV2):
find_links: set[str]
manylinux: str | None
requirement_constraints: set[PipRequirement]
only_binary: set[str]
no_binary: set[str]
only_binary: set[PipRequirement]
no_binary: set[PipRequirement]

@classmethod
def _from_json_dict(
Expand All @@ -258,10 +260,24 @@ def _from_json_dict(
requirement_constraints = metadata(
"requirement_constraints",
Set[PipRequirement],
lambda l: {PipRequirement.parse(i) for i in l},
lambda l: {
PipRequirement.parse(i, description_of_origin=lockfile_description) for i in l
},
)
only_binary = metadata(
"only_binary",
Set[PipRequirement],
lambda l: {
PipRequirement.parse(i, description_of_origin=lockfile_description) for i in l
},
)
no_binary = metadata(
"no_binary",
Set[PipRequirement],
lambda l: {
PipRequirement.parse(i, description_of_origin=lockfile_description) for i in l
},
)
only_binary = metadata("only_binary", Set[str], lambda l: set(l))
no_binary = metadata("no_binary", Set[str], lambda l: set(l))

return PythonLockfileMetadataV3(
valid_for_interpreter_constraints=v2_metadata.valid_for_interpreter_constraints,
Expand All @@ -282,8 +298,8 @@ def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
"find_links": sorted(instance.find_links),
"manylinux": instance.manylinux,
"requirement_constraints": sorted(str(i) for i in instance.requirement_constraints),
"only_binary": sorted(instance.only_binary),
"no_binary": sorted(instance.no_binary),
"only_binary": sorted(str(i) for i in instance.only_binary),
"no_binary": sorted(str(i) for i in instance.no_binary),
}

def is_valid_for(
Expand All @@ -298,8 +314,8 @@ def is_valid_for(
find_links: Iterable[str],
manylinux: str | None,
requirement_constraints: Iterable[PipRequirement],
only_binary: Iterable[str],
no_binary: Iterable[str],
only_binary: Iterable[PipRequirement],
no_binary: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons = (
super()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ def test_metadata_header_round_trip() -> None:
find_links={"find-links"},
manylinux="manylinux2014",
requirement_constraints={PipRequirement.parse("constraint")},
only_binary={"bdist"},
no_binary={"sdist"},
only_binary={PipRequirement.parse("bdist")},
no_binary={PipRequirement.parse("sdist")},
)
serialized_lockfile = input_metadata.add_header_to_lockfile(
b"req1==1.0", regenerate_command="./pants lock", delimeter="#"
Expand Down Expand Up @@ -99,8 +99,8 @@ def line_by_line(b: bytes) -> list[bytes]:
find_links={"find-links"},
manylinux=None,
requirement_constraints={PipRequirement.parse("constraint")},
only_binary={"bdist"},
no_binary={"sdist"},
only_binary={PipRequirement.parse("bdist")},
no_binary={PipRequirement.parse("sdist")},
)
result = metadata.add_header_to_lockfile(
input_lockfile, regenerate_command="./pants lock", delimeter="#"
Expand Down Expand Up @@ -298,8 +298,8 @@ def test_is_valid_for_v3_metadata(is_tool: bool) -> None:
find_links={"find-links"},
manylinux=None,
requirement_constraints={PipRequirement.parse("c1")},
only_binary={"bdist"},
no_binary={"sdist"},
only_binary={PipRequirement.parse("bdist")},
no_binary={PipRequirement.parse("sdist")},
).is_valid_for(
is_tool=is_tool,
expected_invalidation_digest="",
Expand All @@ -310,8 +310,8 @@ def test_is_valid_for_v3_metadata(is_tool: bool) -> None:
find_links={"different-find-links"},
manylinux="manylinux2014",
requirement_constraints={PipRequirement.parse("c2")},
only_binary={"not-bdist"},
no_binary={"not-sdist"},
only_binary={PipRequirement.parse("not-bdist")},
no_binary={PipRequirement.parse("not-sdist")},
)
assert result.failure_reasons == {
InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH,
Expand Down
12 changes: 6 additions & 6 deletions src/python/pants/backend/python/util_rules/pex_requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,8 +302,8 @@ class ResolvePexConfig:
find_links: tuple[str, ...]
manylinux: str | None
constraints_file: ResolvePexConstraintsFile | None
only_binary: tuple[str, ...]
no_binary: tuple[str, ...]
only_binary: FrozenOrderedSet[PipRequirement]
no_binary: FrozenOrderedSet[PipRequirement]

def indexes_and_find_links_and_manylinux_pex_args(self) -> Iterator[str]:
# NB: In setting `--no-pypi`, we rely on the default value of `[python-repos].indexes`
Expand Down Expand Up @@ -350,8 +350,8 @@ async def determine_resolve_pex_config(
find_links=python_repos.repos,
manylinux=python_setup.manylinux,
constraints_file=None,
no_binary=(),
only_binary=(),
no_binary=FrozenOrderedSet(),
only_binary=FrozenOrderedSet(),
)

all_python_tool_resolve_names = tuple(
Expand Down Expand Up @@ -416,8 +416,8 @@ async def determine_resolve_pex_config(
find_links=python_repos.repos,
manylinux=python_setup.manylinux,
constraints_file=constraints_file,
no_binary=tuple(no_binary),
only_binary=tuple(only_binary),
no_binary=FrozenOrderedSet(no_binary),
only_binary=FrozenOrderedSet(only_binary),
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@
find_links={"find-links"},
manylinux=None,
requirement_constraints={PipRequirement.parse("abc")},
only_binary={"bdist"},
no_binary={"sdist"},
only_binary={PipRequirement.parse("bdist")},
no_binary={PipRequirement.parse("sdist")},
)


Expand Down Expand Up @@ -149,8 +149,12 @@ def test_validate_tool_lockfiles(
{PipRequirement.parse("xyz" if invalid_constraints_file else "abc")}
),
),
no_binary=("not-sdist" if invalid_no_binary else "sdist",),
only_binary=("not-bdist" if invalid_only_binary else "bdist",),
no_binary=FrozenOrderedSet(
[PipRequirement.parse("not-sdist" if invalid_no_binary else "sdist")]
),
only_binary=FrozenOrderedSet(
[PipRequirement.parse("not-bdist" if invalid_only_binary else "bdist")]
),
),
)

Expand Down Expand Up @@ -273,8 +277,12 @@ def test_validate_user_lockfiles(
{PipRequirement.parse("xyz" if invalid_constraints_file else "abc")}
),
),
no_binary=("not-sdist" if invalid_no_binary else "sdist",),
only_binary=("not-bdist" if invalid_only_binary else "bdist",),
no_binary=FrozenOrderedSet(
[PipRequirement.parse("not-sdist" if invalid_no_binary else "sdist")]
),
only_binary=FrozenOrderedSet(
[PipRequirement.parse("not-bdist" if invalid_only_binary else "bdist")]
),
),
)

Expand Down
Loading

0 comments on commit dc99037

Please sign in to comment.