diff --git a/src/python/pants/backend/project_info/filedeps_test.py b/src/python/pants/backend/project_info/filedeps_test.py index 0db92748f5f..42368a8b8ab 100644 --- a/src/python/pants/backend/project_info/filedeps_test.py +++ b/src/python/pants/backend/project_info/filedeps_test.py @@ -3,10 +3,12 @@ from typing import List, Optional, Set +import pytest + from pants.backend.codegen.protobuf.target_types import ProtobufLibrary from pants.backend.project_info import filedeps from pants.engine.target import Dependencies, Sources, Target -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner class MockTarget(Target): @@ -14,164 +16,181 @@ class MockTarget(Target): core_fields = (Sources, Dependencies) -class FiledepsTest(TestBase): - @classmethod - def rules(cls): - return (*super().rules(), *filedeps.rules()) - - @classmethod - def target_types(cls): - return [MockTarget, ProtobufLibrary] - - def setup_target( - self, - path: str, - *, - sources: Optional[List[str]] = None, - dependencies: Optional[List[str]] = None, - ) -> None: - if sources: - self.create_files(path, sources) - self.add_to_build_file( - path, - f"tgt(sources={sources or []}, dependencies={dependencies or []})", - ) - - def assert_filedeps( - self, - *, - targets: List[str], - expected: Set[str], - transitive: bool = False, - globs: bool = False, - ) -> None: - args = [] - if globs: - args.append("--filedeps-globs") - if transitive: - args.append("--filedeps-transitive") - result = self.run_goal_rule(filedeps.Filedeps, args=(*args, *targets)) - assert result.stdout.splitlines() == sorted(expected) - - def test_no_target(self) -> None: - self.assert_filedeps(targets=[], expected=set()) - - def test_one_target_no_source(self) -> None: - self.setup_target("some/target") - self.assert_filedeps(targets=["some/target"], expected={"some/target/BUILD"}) - - def test_one_target_one_source(self) -> None: - self.setup_target("some/target", sources=["file.py"]) - self.assert_filedeps( - targets=["some/target"], expected={"some/target/BUILD", "some/target/file.py"} - ) - - def test_one_target_multiple_source(self) -> None: - self.setup_target("some/target", sources=["file1.py", "file2.py"]) - self.assert_filedeps( - targets=["some/target"], - expected={"some/target/BUILD", "some/target/file1.py", "some/target/file2.py"}, - ) - - def test_one_target_no_source_one_dep(self) -> None: - self.setup_target("dep/target", sources=["file.py"]) - self.setup_target("some/target", dependencies=["dep/target"]) - self.assert_filedeps(targets=["some/target"], expected={"some/target/BUILD"}) - self.assert_filedeps( - targets=["some/target"], - transitive=True, - expected={"some/target/BUILD", "dep/target/BUILD", "dep/target/file.py"}, - ) - - def test_one_target_one_source_with_dep(self) -> None: - self.setup_target("dep/target", sources=["file.py"]) - self.setup_target("some/target", sources=["file.py"], dependencies=["dep/target"]) - direct_files = {"some/target/BUILD", "some/target/file.py"} - self.assert_filedeps(targets=["some/target"], expected=direct_files) - self.assert_filedeps( - targets=["some/target"], - transitive=True, - expected={ - *direct_files, - "dep/target/BUILD", - "dep/target/file.py", - }, - ) - - def test_multiple_targets_one_source(self) -> None: - self.setup_target("some/target", sources=["file.py"]) - self.setup_target("other/target", sources=["file.py"]) - self.assert_filedeps( - targets=["some/target", "other/target"], - expected={ - "some/target/BUILD", - "some/target/file.py", - "other/target/BUILD", - "other/target/file.py", - }, - ) - - def test_multiple_targets_one_source_with_dep(self) -> None: - self.setup_target("dep1/target", sources=["file.py"]) - self.setup_target("dep2/target", sources=["file.py"]) - self.setup_target("some/target", sources=["file.py"], dependencies=["dep1/target"]) - self.setup_target("other/target", sources=["file.py"], dependencies=["dep2/target"]) - direct_files = { - "some/target/BUILD", - "some/target/file.py", - "other/target/BUILD", - "other/target/file.py", - } - self.assert_filedeps( - targets=["some/target", "other/target"], - expected=direct_files, - ) - self.assert_filedeps( - targets=["some/target", "other/target"], - transitive=True, - expected={ - *direct_files, - "dep1/target/BUILD", - "dep1/target/file.py", - "dep2/target/BUILD", - "dep2/target/file.py", - }, - ) - - def test_multiple_targets_one_source_overlapping(self) -> None: - self.setup_target("dep/target", sources=["file.py"]) - self.setup_target("some/target", sources=["file.py"], dependencies=["dep/target"]) - self.setup_target("other/target", sources=["file.py"], dependencies=["dep/target"]) - direct_files = { +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(rules=filedeps.rules(), target_types=[MockTarget, ProtobufLibrary]) + + +def setup_target( + rule_runner: RuleRunner, + path: str, + *, + sources: Optional[List[str]] = None, + dependencies: Optional[List[str]] = None, +) -> None: + if sources: + rule_runner.create_files(path, sources) + rule_runner.add_to_build_file( + path, + f"tgt(sources={sources or []}, dependencies={dependencies or []})", + ) + + +def assert_filedeps( + rule_runner: RuleRunner, + *, + targets: List[str], + expected: Set[str], + transitive: bool = False, + globs: bool = False, +) -> None: + args = [] + if globs: + args.append("--filedeps-globs") + if transitive: + args.append("--filedeps-transitive") + result = rule_runner.run_goal_rule(filedeps.Filedeps, args=(*args, *targets)) + assert result.stdout.splitlines() == sorted(expected) + + +def test_no_target(rule_runner: RuleRunner) -> None: + assert_filedeps(rule_runner, targets=[], expected=set()) + + +def test_one_target_no_source(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "some/target") + assert_filedeps(rule_runner, targets=["some/target"], expected={"some/target/BUILD"}) + + +def test_one_target_one_source(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "some/target", sources=["file.py"]) + assert_filedeps( + rule_runner, targets=["some/target"], expected={"some/target/BUILD", "some/target/file.py"} + ) + + +def test_one_target_multiple_source(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "some/target", sources=["file1.py", "file2.py"]) + assert_filedeps( + rule_runner, + targets=["some/target"], + expected={"some/target/BUILD", "some/target/file1.py", "some/target/file2.py"}, + ) + + +def test_one_target_no_source_one_dep(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "dep/target", sources=["file.py"]) + setup_target(rule_runner, "some/target", dependencies=["dep/target"]) + assert_filedeps(rule_runner, targets=["some/target"], expected={"some/target/BUILD"}) + assert_filedeps( + rule_runner, + targets=["some/target"], + transitive=True, + expected={"some/target/BUILD", "dep/target/BUILD", "dep/target/file.py"}, + ) + + +def test_one_target_one_source_with_dep(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "dep/target", sources=["file.py"]) + setup_target(rule_runner, "some/target", sources=["file.py"], dependencies=["dep/target"]) + direct_files = {"some/target/BUILD", "some/target/file.py"} + assert_filedeps(rule_runner, targets=["some/target"], expected=direct_files) + assert_filedeps( + rule_runner, + targets=["some/target"], + transitive=True, + expected={ + *direct_files, + "dep/target/BUILD", + "dep/target/file.py", + }, + ) + + +def test_multiple_targets_one_source(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "some/target", sources=["file.py"]) + setup_target(rule_runner, "other/target", sources=["file.py"]) + assert_filedeps( + rule_runner, + targets=["some/target", "other/target"], + expected={ "some/target/BUILD", "some/target/file.py", "other/target/BUILD", "other/target/file.py", - } - self.assert_filedeps(targets=["some/target", "other/target"], expected=direct_files) - self.assert_filedeps( - targets=["some/target", "other/target"], - transitive=True, - expected={*direct_files, "dep/target/BUILD", "dep/target/file.py"}, - ) - - def test_globs(self) -> None: - self.create_files("some/target", ["test1.py", "test2.py"]) - self.add_to_build_file("some/target", target="tgt(sources=['test*.py'])") - self.assert_filedeps( - targets=["some/target"], - expected={"some/target/BUILD", "some/target/test*.py"}, - globs=True, - ) - - def test_build_with_file_ext(self) -> None: - self.create_file("some/target/BUILD.ext", contents="tgt()") - self.assert_filedeps(targets=["some/target"], expected={"some/target/BUILD.ext"}) - - def test_codegen_targets_use_protocol_files(self) -> None: - # That is, don't output generated files. - self.create_file("some/target/f.proto") - self.add_to_build_file("some/target", "protobuf_library()") - self.assert_filedeps( - targets=["some/target"], expected={"some/target/BUILD", "some/target/f.proto"} - ) + }, + ) + + +def test_multiple_targets_one_source_with_dep(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "dep1/target", sources=["file.py"]) + setup_target(rule_runner, "dep2/target", sources=["file.py"]) + setup_target(rule_runner, "some/target", sources=["file.py"], dependencies=["dep1/target"]) + setup_target(rule_runner, "other/target", sources=["file.py"], dependencies=["dep2/target"]) + direct_files = { + "some/target/BUILD", + "some/target/file.py", + "other/target/BUILD", + "other/target/file.py", + } + assert_filedeps( + rule_runner, + targets=["some/target", "other/target"], + expected=direct_files, + ) + assert_filedeps( + rule_runner, + targets=["some/target", "other/target"], + transitive=True, + expected={ + *direct_files, + "dep1/target/BUILD", + "dep1/target/file.py", + "dep2/target/BUILD", + "dep2/target/file.py", + }, + ) + + +def test_multiple_targets_one_source_overlapping(rule_runner: RuleRunner) -> None: + setup_target(rule_runner, "dep/target", sources=["file.py"]) + setup_target(rule_runner, "some/target", sources=["file.py"], dependencies=["dep/target"]) + setup_target(rule_runner, "other/target", sources=["file.py"], dependencies=["dep/target"]) + direct_files = { + "some/target/BUILD", + "some/target/file.py", + "other/target/BUILD", + "other/target/file.py", + } + assert_filedeps(rule_runner, targets=["some/target", "other/target"], expected=direct_files) + assert_filedeps( + rule_runner, + targets=["some/target", "other/target"], + transitive=True, + expected={*direct_files, "dep/target/BUILD", "dep/target/file.py"}, + ) + + +def test_globs(rule_runner: RuleRunner) -> None: + rule_runner.create_files("some/target", ["test1.py", "test2.py"]) + rule_runner.add_to_build_file("some/target", target="tgt(sources=['test*.py'])") + assert_filedeps( + rule_runner, + targets=["some/target"], + expected={"some/target/BUILD", "some/target/test*.py"}, + globs=True, + ) + + +def test_build_with_file_ext(rule_runner: RuleRunner) -> None: + rule_runner.create_file("some/target/BUILD.ext", contents="tgt()") + assert_filedeps(rule_runner, targets=["some/target"], expected={"some/target/BUILD.ext"}) + + +def test_codegen_targets_use_protocol_files(rule_runner: RuleRunner) -> None: + # That is, don't output generated files. + rule_runner.create_file("some/target/f.proto") + rule_runner.add_to_build_file("some/target", "protobuf_library()") + assert_filedeps( + rule_runner, targets=["some/target"], expected={"some/target/BUILD", "some/target/f.proto"} + ) diff --git a/src/python/pants/backend/project_info/list_roots_test.py b/src/python/pants/backend/project_info/list_roots_test.py index d6eaa2d4b70..5d9b3273b00 100644 --- a/src/python/pants/backend/project_info/list_roots_test.py +++ b/src/python/pants/backend/project_info/list_roots_test.py @@ -3,49 +3,56 @@ from typing import List, Optional +import pytest + from pants.backend.project_info import list_roots from pants.backend.project_info.list_roots import Roots -from pants.testutil.test_base import TestBase - - -class RootsTest(TestBase): - @classmethod - def rules(cls): - return [*super().rules(), *list_roots.rules()] - - def assert_roots( - self, - configured: List[str], - *, - marker_files: Optional[List[str]] = None, - expected: Optional[List[str]] = None, - ) -> None: - result = self.run_goal_rule( - Roots, - args=[ - f"--source-root-patterns={configured}", - f"--source-marker-filenames={marker_files or []}", - ], - ) - assert result.stdout.splitlines() == sorted(expected or configured) - - def test_single_source_root(self) -> None: - self.create_dir("fakeroot") - self.assert_roots(["fakeroot"]) - - def test_multiple_source_roots(self) -> None: - self.create_dir("fakerootA") - self.create_dir("fakerootB") - self.assert_roots(["fakerootA", "fakerootB"]) - - def test_buildroot_is_source_root(self) -> None: - self.assert_roots(["/"], expected=["."]) - - def test_marker_file(self) -> None: - self.create_file("fakerootA/SOURCE_ROOT") - self.create_file("fakerootB/setup.py") - self.assert_roots( - configured=[], - marker_files=["SOURCE_ROOT", "setup.py"], - expected=["fakerootA", "fakerootB"], - ) +from pants.testutil.rule_runner import RuleRunner + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(rules=list_roots.rules()) + + +def assert_roots( + rule_runner: RuleRunner, + configured: List[str], + *, + marker_files: Optional[List[str]] = None, + expected: Optional[List[str]] = None, +) -> None: + result = rule_runner.run_goal_rule( + Roots, + args=[ + f"--source-root-patterns={configured}", + f"--source-marker-filenames={marker_files or []}", + ], + ) + assert result.stdout.splitlines() == sorted(expected or configured) + + +def test_single_source_root(rule_runner: RuleRunner) -> None: + rule_runner.create_dir("fakeroot") + assert_roots(rule_runner, ["fakeroot"]) + + +def test_multiple_source_roots(rule_runner: RuleRunner) -> None: + rule_runner.create_dir("fakerootA") + rule_runner.create_dir("fakerootB") + assert_roots(rule_runner, ["fakerootA", "fakerootB"]) + + +def test_buildroot_is_source_root(rule_runner: RuleRunner) -> None: + assert_roots(rule_runner, ["/"], expected=["."]) + + +def test_marker_file(rule_runner: RuleRunner) -> None: + rule_runner.create_file("fakerootA/SOURCE_ROOT") + rule_runner.create_file("fakerootB/setup.py") + assert_roots( + rule_runner, + configured=[], + marker_files=["SOURCE_ROOT", "setup.py"], + expected=["fakerootA", "fakerootB"], + ) diff --git a/src/python/pants/backend/python/pants_requirement_test.py b/src/python/pants/backend/python/pants_requirement_test.py index ff6594dcada..d224e68e396 100644 --- a/src/python/pants/backend/python/pants_requirement_test.py +++ b/src/python/pants/backend/python/pants_requirement_test.py @@ -11,85 +11,94 @@ PythonRequirementsField, ) from pants.base.build_environment import pants_version -from pants.build_graph.build_file_aliases import BuildFileAliases from pants.engine.addresses import Address from pants.engine.internals.scheduler import ExecutionError from pants.engine.target import WrappedTarget from pants.testutil.option_util import create_options_bootstrapper -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner from pants.util.frozendict import FrozenDict -class PantsRequirementTest(TestBase): - @classmethod - def alias_groups(cls): - return BuildFileAliases( - context_aware_object_factories={PantsRequirement.alias: PantsRequirement}, - ) +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + target_types=[PythonRequirementLibrary], + context_aware_object_factories={PantsRequirement.alias: PantsRequirement}, + ) - @classmethod - def target_types(cls): - return [PythonRequirementLibrary] - - def assert_pants_requirement( - self, - build_file_entry: str, - *, - expected_target_name: str, - expected_dist: str = "pantsbuild.pants", - expected_module: str = "pants", - ) -> None: - self.add_to_build_file("3rdparty/python", f"{build_file_entry}\n") - target = self.request_product( - WrappedTarget, - [ - Address("3rdparty/python", target_name=expected_target_name), - create_options_bootstrapper(), - ], - ).target - assert isinstance(target, PythonRequirementLibrary) - assert target[PythonRequirementsField].value == ( - Requirement.parse(f"{expected_dist}=={pants_version()}"), - ) - assert target[ModuleMappingField].value == FrozenDict({expected_dist: (expected_module,)}) - def test_target_name(self) -> None: - self.assert_pants_requirement("pants_requirement()", expected_target_name="python") - self.assert_pants_requirement( - "pants_requirement(name='pantsbuild.pants')", expected_target_name="pantsbuild.pants" - ) +def assert_pants_requirement( + rule_runner: RuleRunner, + build_file_entry: str, + *, + expected_target_name: str, + expected_dist: str = "pantsbuild.pants", + expected_module: str = "pants", +) -> None: + rule_runner.add_to_build_file("3rdparty/python", f"{build_file_entry}\n") + target = rule_runner.request_product( + WrappedTarget, + [ + Address("3rdparty/python", target_name=expected_target_name), + create_options_bootstrapper(), + ], + ).target + assert isinstance(target, PythonRequirementLibrary) + assert target[PythonRequirementsField].value == ( + Requirement.parse(f"{expected_dist}=={pants_version()}"), + ) + assert target[ModuleMappingField].value == FrozenDict({expected_dist: (expected_module,)}) - def test_dist(self) -> None: - self.assert_pants_requirement( - "pants_requirement(dist='pantsbuild.pants')", expected_target_name="pantsbuild.pants" - ) - def test_contrib(self) -> None: - dist = "pantsbuild.pants.contrib.bob" - module = "pants.contrib.bob" - self.assert_pants_requirement( - f"pants_requirement(dist='{dist}')", - expected_target_name=dist, - expected_dist=dist, - expected_module=module, - ) - self.assert_pants_requirement( - f"pants_requirement(name='bob', dist='{dist}')", - expected_target_name="bob", - expected_dist=dist, - expected_module=module, - ) +def test_target_name(rule_runner: RuleRunner) -> None: + assert_pants_requirement(rule_runner, "pants_requirement()", expected_target_name="python") + assert_pants_requirement( + rule_runner, + "pants_requirement(name='pantsbuild.pants')", + expected_target_name="pantsbuild.pants", + ) + - def test_bad_dist(self) -> None: - with pytest.raises(ExecutionError): - self.assert_pants_requirement( - "pants_requirement(name='jane', dist='pantsbuild.pantsish')", - expected_target_name="jane", - ) - - def test_modules_override(self) -> None: - self.assert_pants_requirement( - "pants_requirement(dist='pantsbuild.pants', modules=['fake'])", - expected_target_name="pantsbuild.pants", - expected_module="fake", +def test_dist(rule_runner: RuleRunner) -> None: + assert_pants_requirement( + rule_runner, + "pants_requirement(dist='pantsbuild.pants')", + expected_target_name="pantsbuild.pants", + ) + + +def test_contrib(rule_runner: RuleRunner) -> None: + dist = "pantsbuild.pants.contrib.bob" + module = "pants.contrib.bob" + assert_pants_requirement( + rule_runner, + f"pants_requirement(dist='{dist}')", + expected_target_name=dist, + expected_dist=dist, + expected_module=module, + ) + assert_pants_requirement( + rule_runner, + f"pants_requirement(name='bob', dist='{dist}')", + expected_target_name="bob", + expected_dist=dist, + expected_module=module, + ) + + +def test_bad_dist(rule_runner: RuleRunner) -> None: + with pytest.raises(ExecutionError): + assert_pants_requirement( + rule_runner, + "pants_requirement(name='jane', dist='pantsbuild.pantsish')", + expected_target_name="jane", ) + + +def test_modules_override(rule_runner: RuleRunner) -> None: + assert_pants_requirement( + rule_runner, + "pants_requirement(dist='pantsbuild.pants', modules=['fake'])", + expected_target_name="pantsbuild.pants", + expected_module="fake", + ) diff --git a/src/python/pants/backend/python/pipenv_requirements_test.py b/src/python/pants/backend/python/pipenv_requirements_test.py index 806a7a3511e..d7f2a8cad43 100644 --- a/src/python/pants/backend/python/pipenv_requirements_test.py +++ b/src/python/pants/backend/python/pipenv_requirements_test.py @@ -5,108 +5,98 @@ from textwrap import dedent from typing import Iterable +import pytest from pkg_resources import Requirement from pants.backend.python.pipenv_requirements import PipenvRequirements from pants.backend.python.target_types import PythonRequirementLibrary, PythonRequirementsFile from pants.base.specs import AddressSpecs, DescendantAddresses, FilesystemSpecs, Specs -from pants.build_graph.build_file_aliases import BuildFileAliases from pants.engine.addresses import Address from pants.engine.rules import QueryRule from pants.engine.target import Targets from pants.option.options_bootstrapper import OptionsBootstrapper from pants.testutil.option_util import create_options_bootstrapper -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner -class PipenvRequirementsTest(TestBase): - @classmethod - def alias_groups(cls): - return BuildFileAliases( - context_aware_object_factories={"pipenv_requirements": PipenvRequirements}, - ) +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[QueryRule(Targets, (OptionsBootstrapper, Specs))], + target_types=[PythonRequirementLibrary, PythonRequirementsFile], + context_aware_object_factories={"pipenv_requirements": PipenvRequirements}, + ) - @classmethod - def rules(cls): - return ( - *super().rules(), - QueryRule(Targets, (OptionsBootstrapper, Specs)), - ) - @classmethod - def target_types(cls): - return [PythonRequirementLibrary, PythonRequirementsFile] +def assert_pipenv_requirements( + rule_runner: RuleRunner, + build_file_entry: str, + pipfile_lock: dict, + *, + expected_file_dep: PythonRequirementsFile, + expected_targets: Iterable[PythonRequirementLibrary], + pipfile_lock_relpath: str = "Pipfile.lock", +) -> None: + rule_runner.add_to_build_file("", f"{build_file_entry}\n") + rule_runner.create_file(pipfile_lock_relpath, dumps(pipfile_lock)) + targets = rule_runner.request_product( + Targets, + [ + Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([])), + create_options_bootstrapper(), + ], + ) - def assert_pipenv_requirements( - self, - build_file_entry: str, - pipfile_lock: dict, - *, - expected_file_dep: PythonRequirementsFile, - expected_targets: Iterable[PythonRequirementLibrary], - pipfile_lock_relpath: str = "Pipfile.lock", - ) -> None: - self.add_to_build_file("", f"{build_file_entry}\n") - self.create_file(pipfile_lock_relpath, dumps(pipfile_lock)) - targets = self.request_product( - Targets, - [ - Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([])), - create_options_bootstrapper(), - ], - ) + assert {expected_file_dep, *expected_targets} == set(targets) - assert {expected_file_dep, *expected_targets} == set(targets) - def test_pipfile_lock(self) -> None: - """This tests that we correctly create a new python_requirement_library for each entry in a - Pipfile.lock file. +def test_pipfile_lock(rule_runner: RuleRunner) -> None: + """This tests that we correctly create a new python_requirement_library for each entry in a + Pipfile.lock file. - Edge cases: - * Develop and Default requirements are used - * If a module_mapping is given, and the project is in the map, we copy over a subset of the mapping to the created target. - """ - - self.assert_pipenv_requirements( - "pipenv_requirements(module_mapping={'ansicolors': ['colors']})", - { - "default": { - "ansicolors": {"version": ">=1.18.0"}, + Edge cases: + * Develop and Default requirements are used + * If a module_mapping is given, and the project is in the map, we copy over a subset of the + mapping to the created target. + """ + assert_pipenv_requirements( + rule_runner, + "pipenv_requirements(module_mapping={'ansicolors': ['colors']})", + { + "default": {"ansicolors": {"version": ">=1.18.0"}}, + "develop": {"cachetools": {"markers": "python_version ~= '3.5'", "version": "==4.1.1"}}, + }, + expected_file_dep=PythonRequirementsFile( + {"sources": ["Pipfile.lock"]}, address=Address("", target_name="Pipfile.lock") + ), + expected_targets=[ + PythonRequirementLibrary( + { + "requirements": [Requirement.parse("ansicolors>=1.18.0")], + "dependencies": [":Pipfile.lock"], + "module_mapping": {"ansicolors": ["colors"]}, }, - "develop": { - "cachetools": {"markers": "python_version ~= '3.5'", "version": "==4.1.1"}, + address=Address("", target_name="ansicolors"), + ), + PythonRequirementLibrary( + { + "requirements": [ + Requirement.parse("cachetools==4.1.1;python_version ~= '3.5'") + ], + "dependencies": [":Pipfile.lock"], }, - }, - expected_file_dep=PythonRequirementsFile( - {"sources": ["Pipfile.lock"]}, address=Address("", target_name="Pipfile.lock") + address=Address("", target_name="cachetools"), ), - expected_targets=[ - PythonRequirementLibrary( - { - "requirements": [Requirement.parse("ansicolors>=1.18.0")], - "dependencies": [":Pipfile.lock"], - "module_mapping": {"ansicolors": ["colors"]}, - }, - address=Address("", target_name="ansicolors"), - ), - PythonRequirementLibrary( - { - "requirements": [ - Requirement.parse("cachetools==4.1.1;python_version ~= '3.5'") - ], - "dependencies": [":Pipfile.lock"], - }, - address=Address("", target_name="cachetools"), - ), - ], - ) + ], + ) - def test_supply_python_requirements_file(self) -> None: - """This tests that we can supply our own `_python_requirements_file`.""" - self.assert_pipenv_requirements( - dedent( - """ +def test_supply_python_requirements_file(rule_runner: RuleRunner) -> None: + """This tests that we can supply our own `_python_requirements_file`.""" + assert_pipenv_requirements( + rule_runner, + dedent( + """ pipenv_requirements( requirements_relpath='custom/pipfile/Pipfile.lock', pipfile_target='//:custom_pipfile_target' @@ -117,24 +107,20 @@ def test_supply_python_requirements_file(self) -> None: sources=['custom/pipfile/Pipfile.lock'] ) """ - ), - { - "default": { - "ansicolors": {"version": ">=1.18.0"}, + ), + {"default": {"ansicolors": {"version": ">=1.18.0"}}}, + expected_file_dep=PythonRequirementsFile( + {"sources": ["custom/pipfile/Pipfile.lock"]}, + address=Address("", target_name="custom_pipfile_target"), + ), + expected_targets=[ + PythonRequirementLibrary( + { + "requirements": [Requirement.parse("ansicolors>=1.18.0")], + "dependencies": ["//:custom_pipfile_target"], }, - }, - expected_file_dep=PythonRequirementsFile( - {"sources": ["custom/pipfile/Pipfile.lock"]}, - address=Address("", target_name="custom_pipfile_target"), + address=Address("", target_name="ansicolors"), ), - expected_targets=[ - PythonRequirementLibrary( - { - "requirements": [Requirement.parse("ansicolors>=1.18.0")], - "dependencies": ["//:custom_pipfile_target"], - }, - address=Address("", target_name="ansicolors"), - ), - ], - pipfile_lock_relpath="custom/pipfile/Pipfile.lock", - ) + ], + pipfile_lock_relpath="custom/pipfile/Pipfile.lock", + ) diff --git a/src/python/pants/backend/python/python_requirements_test.py b/src/python/pants/backend/python/python_requirements_test.py index 52bd66af1c2..0e5d39f7993 100644 --- a/src/python/pants/backend/python/python_requirements_test.py +++ b/src/python/pants/backend/python/python_requirements_test.py @@ -10,135 +10,131 @@ from pants.backend.python.python_requirements import PythonRequirements from pants.backend.python.target_types import PythonRequirementLibrary, PythonRequirementsFile from pants.base.specs import AddressSpecs, DescendantAddresses, FilesystemSpecs, Specs -from pants.build_graph.build_file_aliases import BuildFileAliases from pants.engine.addresses import Address from pants.engine.internals.scheduler import ExecutionError from pants.engine.rules import QueryRule from pants.engine.target import Targets from pants.option.options_bootstrapper import OptionsBootstrapper from pants.testutil.option_util import create_options_bootstrapper -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner -class PantsRequirementTest(TestBase): - @classmethod - def alias_groups(cls): - return BuildFileAliases( - context_aware_object_factories={"python_requirements": PythonRequirements}, - ) +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[QueryRule(Targets, (OptionsBootstrapper, Specs))], + target_types=[PythonRequirementLibrary, PythonRequirementsFile], + context_aware_object_factories={"python_requirements": PythonRequirements}, + ) - @classmethod - def rules(cls): - return ( - *super().rules(), - QueryRule(Targets, (OptionsBootstrapper, Specs)), - ) - @classmethod - def target_types(cls): - return [PythonRequirementLibrary, PythonRequirementsFile] +def assert_python_requirements( + rule_runner: RuleRunner, + build_file_entry: str, + requirements_txt: str, + *, + expected_file_dep: PythonRequirementsFile, + expected_targets: Iterable[PythonRequirementLibrary], + requirements_txt_relpath: str = "requirements.txt", +) -> None: + rule_runner.add_to_build_file("", f"{build_file_entry}\n") + rule_runner.create_file(requirements_txt_relpath, requirements_txt) + targets = rule_runner.request_product( + Targets, + [ + Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([])), + create_options_bootstrapper(), + ], + ) + assert {expected_file_dep, *expected_targets} == set(targets) - def assert_python_requirements( - self, - build_file_entry: str, - requirements_txt: str, - *, - expected_file_dep: PythonRequirementsFile, - expected_targets: Iterable[PythonRequirementLibrary], - requirements_txt_relpath: str = "requirements.txt", - ) -> None: - self.add_to_build_file("", f"{build_file_entry}\n") - self.create_file(requirements_txt_relpath, requirements_txt) - targets = self.request_product( - Targets, - [ - Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([])), - create_options_bootstrapper(), - ], - ) - assert {expected_file_dep, *expected_targets} == set(targets) - def test_requirements_txt(self) -> None: - """This tests that we correctly create a new python_requirement_library for each entry in a - requirements.txt file. +def test_requirements_txt(rule_runner: RuleRunner) -> None: + """This tests that we correctly create a new python_requirement_library for each entry in a + requirements.txt file. - Some edge cases: - * We ignore comments and options (values that start with `--`). - * If a module_mapping is given, and the project is in the map, we copy over a subset of the - mapping to the created target. - * Projects get normalized thanks to Requirement.parse(). - """ - self.assert_python_requirements( - "python_requirements(module_mapping={'ansicolors': ['colors']})", - dedent( - """\ - # Comment. - --find-links=https://duckduckgo.com - ansicolors>=1.18.0 - Django==3.2 ; python_version>'3' - Un-Normalized-PROJECT # Inline comment. - """ + Some edge cases: + * We ignore comments and options (values that start with `--`). + * If a module_mapping is given, and the project is in the map, we copy over a subset of the + mapping to the created target. + * Projects get normalized thanks to Requirement.parse(). + """ + assert_python_requirements( + rule_runner, + "python_requirements(module_mapping={'ansicolors': ['colors']})", + dedent( + """\ + # Comment. + --find-links=https://duckduckgo.com + ansicolors>=1.18.0 + Django==3.2 ; python_version>'3' + Un-Normalized-PROJECT # Inline comment. + """ + ), + expected_file_dep=PythonRequirementsFile( + {"sources": ["requirements.txt"]}, + address=Address("", target_name="requirements.txt"), + ), + expected_targets=[ + PythonRequirementLibrary( + { + "dependencies": [":requirements.txt"], + "requirements": [Requirement.parse("ansicolors>=1.18.0")], + "module_mapping": {"ansicolors": ["colors"]}, + }, + address=Address("", target_name="ansicolors"), + ), + PythonRequirementLibrary( + { + "dependencies": [":requirements.txt"], + "requirements": [Requirement.parse("Django==3.2 ; python_version>'3'")], + }, + address=Address("", target_name="Django"), ), - expected_file_dep=PythonRequirementsFile( - {"sources": ["requirements.txt"]}, - address=Address("", target_name="requirements.txt"), + PythonRequirementLibrary( + { + "dependencies": [":requirements.txt"], + "requirements": [Requirement.parse("Un_Normalized_PROJECT")], + }, + address=Address("", target_name="Un-Normalized-PROJECT"), ), - expected_targets=[ - PythonRequirementLibrary( - { - "dependencies": [":requirements.txt"], - "requirements": [Requirement.parse("ansicolors>=1.18.0")], - "module_mapping": {"ansicolors": ["colors"]}, - }, - address=Address("", target_name="ansicolors"), - ), - PythonRequirementLibrary( - { - "dependencies": [":requirements.txt"], - "requirements": [Requirement.parse("Django==3.2 ; python_version>'3'")], - }, - address=Address("", target_name="Django"), - ), - PythonRequirementLibrary( - { - "dependencies": [":requirements.txt"], - "requirements": [Requirement.parse("Un_Normalized_PROJECT")], - }, - address=Address("", target_name="Un-Normalized-PROJECT"), - ), - ], + ], + ) + + +def test_invalid_req(rule_runner: RuleRunner) -> None: + """Test that we give a nice error message.""" + with pytest.raises(ExecutionError) as exc: + assert_python_requirements( + rule_runner, + "python_requirements()", + "\n\nNot A Valid Req == 3.7", + expected_file_dep=PythonRequirementsFile({}, address=Address("doesnt_matter")), + expected_targets=[], ) + assert ( + "Invalid requirement in requirements.txt at line 3 due to value 'Not A Valid Req == " + "3.7'." + ) in str(exc.value) - def test_invalid_req(self) -> None: - """Test that we give a nice error message.""" - with pytest.raises(ExecutionError) as exc: - self.assert_python_requirements( - "python_requirements()", - "\n\nNot A Valid Req == 3.7", - expected_file_dep=PythonRequirementsFile({}, address=Address("doesnt_matter")), - expected_targets=[], - ) - assert ( - "Invalid requirement in requirements.txt at line 3 due to value 'Not A Valid Req == " - "3.7'." - ) in str(exc.value) - def test_relpath_override(self) -> None: - self.assert_python_requirements( - "python_requirements(requirements_relpath='subdir/requirements.txt')", - "ansicolors>=1.18.0", - requirements_txt_relpath="subdir/requirements.txt", - expected_file_dep=PythonRequirementsFile( - {"sources": ["subdir/requirements.txt"]}, - address=Address("", target_name="subdir/requirements.txt"), +def test_relpath_override(rule_runner: RuleRunner) -> None: + assert_python_requirements( + rule_runner, + "python_requirements(requirements_relpath='subdir/requirements.txt')", + "ansicolors>=1.18.0", + requirements_txt_relpath="subdir/requirements.txt", + expected_file_dep=PythonRequirementsFile( + {"sources": ["subdir/requirements.txt"]}, + address=Address("", target_name="subdir/requirements.txt"), + ), + expected_targets=[ + PythonRequirementLibrary( + { + "dependencies": [":subdir/requirements.txt"], + "requirements": [Requirement.parse("ansicolors>=1.18.0")], + }, + address=Address("", target_name="ansicolors"), ), - expected_targets=[ - PythonRequirementLibrary( - { - "dependencies": [":subdir/requirements.txt"], - "requirements": [Requirement.parse("ansicolors>=1.18.0")], - }, - address=Address("", target_name="ansicolors"), - ), - ], - ) + ], + ) diff --git a/src/python/pants/core/util_rules/archive_test.py b/src/python/pants/core/util_rules/archive_test.py index 5d89c27af75..2438f3cddb2 100644 --- a/src/python/pants/core/util_rules/archive_test.py +++ b/src/python/pants/core/util_rules/archive_test.py @@ -5,83 +5,66 @@ import zipfile from io import BytesIO +import pytest + from pants.core.util_rules.archive import ExtractedDigest, MaybeExtractable from pants.core.util_rules.archive import rules as archive_rules from pants.engine.fs import DigestContents, FileContent from pants.engine.rules import QueryRule -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(rules=[*archive_rules(), QueryRule(ExtractedDigest, (MaybeExtractable,))]) + + +FILES = {"foo": b"bar", "hello/world": b"Hello, World!"} +EXPECTED_DIGEST_CONTENTS = DigestContents( + [FileContent(name, content) for name, content in FILES.items()] +) -class ArchiveTest(TestBase): +@pytest.mark.parametrize("compression", [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED]) +def test_extract_zip(rule_runner: RuleRunner, compression: int) -> None: + io = BytesIO() + with zipfile.ZipFile(io, "w", compression=compression) as zf: + for name, content in FILES.items(): + zf.writestr(name, content) + io.flush() + input_snapshot = rule_runner.make_snapshot({"test.zip": io.getvalue()}) + extracted_digest = rule_runner.request_product( + ExtractedDigest, [MaybeExtractable(input_snapshot.digest)] + ) + + digest_contents = rule_runner.request_product(DigestContents, [extracted_digest.digest]) + assert digest_contents == EXPECTED_DIGEST_CONTENTS + + +@pytest.mark.parametrize("compression", ["", "gz", "bz2", "xz"]) +def test_extract_tar(rule_runner: RuleRunner, compression: str) -> None: + io = BytesIO() + mode = f"w:{compression}" if compression else "w" + with tarfile.open(mode=mode, fileobj=io) as tf: + for name, content in FILES.items(): + tarinfo = tarfile.TarInfo(name) + tarinfo.size = len(content) + tf.addfile(tarinfo, BytesIO(content)) + ext = f"tar.{compression}" if compression else "tar" + input_snapshot = rule_runner.make_snapshot({f"test.{ext}": io.getvalue()}) + extracted_digest = rule_runner.request_product( + ExtractedDigest, [MaybeExtractable(input_snapshot.digest)] + ) + + digest_contents = rule_runner.request_product(DigestContents, [extracted_digest.digest]) + assert digest_contents == EXPECTED_DIGEST_CONTENTS - files = {"foo": b"bar", "hello/world": b"Hello, World!"} - expected_digest_contents = DigestContents( - [FileContent(name, content) for name, content in files.items()] +def test_non_archive(rule_runner: RuleRunner) -> None: + input_snapshot = rule_runner.make_snapshot({"test.sh": b"# A shell script"}) + extracted_digest = rule_runner.request_product( + ExtractedDigest, [MaybeExtractable(input_snapshot.digest)] ) - @classmethod - def rules(cls): - return (*super().rules(), *archive_rules(), QueryRule(ExtractedDigest, (MaybeExtractable,))) - - # TODO: Figure out a way to run these tests without a TestBase subclass, and use - # pytest.mark.parametrize. - def _do_test_extract_zip(self, compression) -> None: - io = BytesIO() - with zipfile.ZipFile(io, "w", compression=compression) as zf: - for name, content in self.files.items(): - zf.writestr(name, content) - io.flush() - input_snapshot = self.make_snapshot({"test.zip": io.getvalue()}) - extracted_digest = self.request_product( - ExtractedDigest, [MaybeExtractable(input_snapshot.digest)] - ) - - digest_contents = self.request_product(DigestContents, [extracted_digest.digest]) - assert self.expected_digest_contents == digest_contents - - def test_extract_zip_stored(self) -> None: - self._do_test_extract_zip(zipfile.ZIP_STORED) - - def test_extract_zip_deflated(self) -> None: - self._do_test_extract_zip(zipfile.ZIP_DEFLATED) - - # TODO: Figure out a way to run these tests without a TestBase subclass, and use - # pytest.mark.parametrize. - def _do_test_extract_tar(self, compression) -> None: - io = BytesIO() - mode = f"w:{compression}" if compression else "w" - with tarfile.open(mode=mode, fileobj=io) as tf: - for name, content in self.files.items(): - tarinfo = tarfile.TarInfo(name) - tarinfo.size = len(content) - tf.addfile(tarinfo, BytesIO(content)) - ext = f"tar.{compression}" if compression else "tar" - input_snapshot = self.make_snapshot({f"test.{ext}": io.getvalue()}) - extracted_digest = self.request_product( - ExtractedDigest, [MaybeExtractable(input_snapshot.digest)] - ) - - digest_contents = self.request_product(DigestContents, [extracted_digest.digest]) - assert self.expected_digest_contents == digest_contents - - def test_extract_tar(self) -> None: - self._do_test_extract_tar("") - - def test_extract_tar_gz(self) -> None: - self._do_test_extract_tar("gz") - - def test_extract_tar_bz2(self) -> None: - self._do_test_extract_tar("bz2") - - def test_extract_tar_xz(self) -> None: - self._do_test_extract_tar("xz") - - def test_non_archive(self) -> None: - input_snapshot = self.make_snapshot({"test.sh": b"# A shell script"}) - extracted_digest = self.request_product( - ExtractedDigest, [MaybeExtractable(input_snapshot.digest)] - ) - - digest_contents = self.request_product(DigestContents, [extracted_digest.digest]) - assert DigestContents([FileContent("test.sh", b"# A shell script")]) == digest_contents + digest_contents = rule_runner.request_product(DigestContents, [extracted_digest.digest]) + assert DigestContents([FileContent("test.sh", b"# A shell script")]) == digest_contents diff --git a/src/python/pants/core/util_rules/distdir_test.py b/src/python/pants/core/util_rules/distdir_test.py index 2971e7f8d71..b6e5c9802b0 100644 --- a/src/python/pants/core/util_rules/distdir_test.py +++ b/src/python/pants/core/util_rules/distdir_test.py @@ -8,7 +8,7 @@ from pants.core.util_rules.distdir import DistDir, InvalidDistDir, validate_distdir -def test_distdir(): +def test_distdir() -> None: buildroot = Path("/buildroot") assert DistDir(relpath=Path("dist")) == validate_distdir(Path("dist"), buildroot) assert DistDir(relpath=Path("dist")) == validate_distdir(Path("/buildroot/dist"), buildroot) diff --git a/src/python/pants/core/util_rules/filter_empty_sources_test.py b/src/python/pants/core/util_rules/filter_empty_sources_test.py index 1119438095b..c78430f6915 100644 --- a/src/python/pants/core/util_rules/filter_empty_sources_test.py +++ b/src/python/pants/core/util_rules/filter_empty_sources_test.py @@ -3,6 +3,8 @@ from dataclasses import dataclass +import pytest + from pants.core.util_rules.filter_empty_sources import ( FieldSetsWithSources, FieldSetsWithSourcesRequest, @@ -15,67 +17,67 @@ from pants.engine.target import FieldSet, Sources, Tags, Target from pants.option.options_bootstrapper import OptionsBootstrapper from pants.testutil.option_util import create_options_bootstrapper -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner -class FilterEmptySourcesTest(TestBase): - @classmethod - def rules(cls): - return ( - *super().rules(), +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ *filter_empty_sources_rules(), QueryRule(FieldSetsWithSources, (FieldSetsWithSourcesRequest, OptionsBootstrapper)), QueryRule(TargetsWithSources, (TargetsWithSourcesRequest, OptionsBootstrapper)), - ) + ] + ) + + +def test_filter_field_sets(rule_runner: RuleRunner) -> None: + @dataclass(frozen=True) + class MockFieldSet(FieldSet): + sources: Sources + # Another field to demo that we will preserve the whole FieldSet data structure. + tags: Tags - def test_filter_field_sets(self) -> None: - @dataclass(frozen=True) - class MockFieldSet(FieldSet): - sources: Sources - # Another field to demo that we will preserve the whole FieldSet data structure. - tags: Tags + rule_runner.create_file("f1.txt") + valid_addr = Address("", target_name="valid") + valid_field_set = MockFieldSet( + valid_addr, Sources(["f1.txt"], address=valid_addr), Tags(None, address=valid_addr) + ) - self.create_file("f1.txt") - valid_addr = Address("", target_name="valid") - valid_field_set = MockFieldSet( - valid_addr, Sources(["f1.txt"], address=valid_addr), Tags(None, address=valid_addr) - ) + empty_addr = Address("", target_name="empty") + empty_field_set = MockFieldSet( + empty_addr, Sources(None, address=empty_addr), Tags(None, address=empty_addr) + ) - empty_addr = Address("", target_name="empty") - empty_field_set = MockFieldSet( - empty_addr, Sources(None, address=empty_addr), Tags(None, address=empty_addr) - ) + result = rule_runner.request_product( + FieldSetsWithSources, + [ + FieldSetsWithSourcesRequest([valid_field_set, empty_field_set]), + create_options_bootstrapper(), + ], + ) + assert tuple(result) == (valid_field_set,) - result = self.request_product( - FieldSetsWithSources, - [ - FieldSetsWithSourcesRequest([valid_field_set, empty_field_set]), - create_options_bootstrapper(), - ], - ) - assert tuple(result) == (valid_field_set,) - def test_filter_targets(self) -> None: - class MockTarget(Target): - alias = "target" - core_fields = (Sources,) +def test_filter_targets(rule_runner: RuleRunner) -> None: + class MockTarget(Target): + alias = "target" + core_fields = (Sources,) - class MockTargetWithNoSourcesField(Target): - alias = "no_sources" - core_fields = () + class MockTargetWithNoSourcesField(Target): + alias = "no_sources" + core_fields = () - self.create_file("f1.txt") - valid_tgt = MockTarget( - {Sources.alias: ["f1.txt"]}, address=Address("", target_name="valid") - ) - empty_tgt = MockTarget({}, address=Address("", target_name="empty")) - invalid_tgt = MockTargetWithNoSourcesField({}, address=Address("", target_name="invalid")) + rule_runner.create_file("f1.txt") + valid_tgt = MockTarget({Sources.alias: ["f1.txt"]}, address=Address("", target_name="valid")) + empty_tgt = MockTarget({}, address=Address("", target_name="empty")) + invalid_tgt = MockTargetWithNoSourcesField({}, address=Address("", target_name="invalid")) - result = self.request_product( - TargetsWithSources, - [ - TargetsWithSourcesRequest([valid_tgt, empty_tgt, invalid_tgt]), - create_options_bootstrapper(), - ], - ) - assert tuple(result) == (valid_tgt,) + result = rule_runner.request_product( + TargetsWithSources, + [ + TargetsWithSourcesRequest([valid_tgt, empty_tgt, invalid_tgt]), + create_options_bootstrapper(), + ], + ) + assert tuple(result) == (valid_tgt,) diff --git a/src/python/pants/core/util_rules/source_files_test.py b/src/python/pants/core/util_rules/source_files_test.py index 4a24cf73001..a4643f8fa0e 100644 --- a/src/python/pants/core/util_rules/source_files_test.py +++ b/src/python/pants/core/util_rules/source_files_test.py @@ -2,9 +2,12 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). import itertools +from functools import partial from pathlib import PurePath from typing import Iterable, List, NamedTuple, Type +import pytest + from pants.core.target_types import FilesSources from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest from pants.core.util_rules.source_files import rules as source_files_rules @@ -13,7 +16,17 @@ from pants.engine.target import Sources as SourcesField from pants.option.options_bootstrapper import OptionsBootstrapper from pants.testutil.option_util import create_options_bootstrapper -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + *source_files_rules(), + QueryRule(SourceFiles, (SourceFilesRequest, OptionsBootstrapper)), + ], + ) class TargetSources(NamedTuple): @@ -30,70 +43,67 @@ def full_paths(self) -> List[str]: SOURCES3 = TargetSources("src/java", ["j1.java", "j2.java"]) -class SourceFilesTest(TestBase): - @classmethod - def rules(cls): - return ( - *super().rules(), - *source_files_rules(), - QueryRule(SourceFiles, (SourceFilesRequest, OptionsBootstrapper)), - ) - - def mock_sources_field( - self, - sources: TargetSources, - *, - include_sources: bool = True, - sources_field_cls: Type[SourcesField] = SourcesField, - ) -> SourcesField: - sources_field = sources_field_cls( - sources.source_files if include_sources else [], - address=Address.parse(f"{sources.source_root}:lib"), - ) - self.create_files(path=sources.source_root, files=sources.source_files) - return sources_field - - def assert_sources_resolved( - self, - sources_fields: Iterable[SourcesField], - *, - expected: Iterable[TargetSources], - expected_unrooted: Iterable[str] = (), - ) -> None: - result = self.request_product( - SourceFiles, - [SourceFilesRequest(sources_fields), create_options_bootstrapper()], - ) - assert list(result.snapshot.files) == sorted( - set(itertools.chain.from_iterable(sources.full_paths for sources in expected)) - ) - assert list(result.unrooted_files) == sorted(expected_unrooted) - - def test_address_specs(self) -> None: - sources_field1 = self.mock_sources_field(SOURCES1) - sources_field2 = self.mock_sources_field(SOURCES2) - sources_field3 = self.mock_sources_field(SOURCES3) - sources_field4 = self.mock_sources_field(SOURCES1) - - self.assert_sources_resolved([sources_field1], expected=[SOURCES1]) - self.assert_sources_resolved([sources_field2], expected=[SOURCES2]) - self.assert_sources_resolved([sources_field3], expected=[SOURCES3]) - self.assert_sources_resolved([sources_field4], expected=[SOURCES1]) - - # NB: sources_field1 and sources_field4 refer to the same files. We should be able to - # handle this gracefully. - self.assert_sources_resolved( - [sources_field1, sources_field2, sources_field3, sources_field4], - expected=[SOURCES1, SOURCES2, SOURCES3], - ) - - def test_file_sources(self) -> None: - sources = TargetSources("src/python", ["README.md"]) - field = self.mock_sources_field(sources, sources_field_cls=FilesSources) - self.assert_sources_resolved( - [field], expected=[sources], expected_unrooted=sources.full_paths - ) - - def test_gracefully_handle_no_sources(self) -> None: - sources_field = self.mock_sources_field(SOURCES1, include_sources=False) - self.assert_sources_resolved([sources_field], expected=[]) +def mock_sources_field( + rule_runner: RuleRunner, + sources: TargetSources, + *, + include_sources: bool = True, + sources_field_cls: Type[SourcesField] = SourcesField, +) -> SourcesField: + sources_field = sources_field_cls( + sources.source_files if include_sources else [], + address=Address.parse(f"{sources.source_root}:lib"), + ) + rule_runner.create_files(path=sources.source_root, files=sources.source_files) + return sources_field + + +def assert_sources_resolved( + rule_runner: RuleRunner, + sources_fields: Iterable[SourcesField], + *, + expected: Iterable[TargetSources], + expected_unrooted: Iterable[str] = (), +) -> None: + result = rule_runner.request_product( + SourceFiles, + [SourceFilesRequest(sources_fields), create_options_bootstrapper()], + ) + assert list(result.snapshot.files) == sorted( + set(itertools.chain.from_iterable(sources.full_paths for sources in expected)) + ) + assert list(result.unrooted_files) == sorted(expected_unrooted) + + +def test_address_specs(rule_runner: RuleRunner) -> None: + mock_sources = partial(mock_sources_field, rule_runner) + sources_field1 = mock_sources(SOURCES1) + sources_field2 = mock_sources(SOURCES2) + sources_field3 = mock_sources(SOURCES3) + sources_field4 = mock_sources(SOURCES1) + + assert_sources = partial(assert_sources_resolved, rule_runner) + assert_sources([sources_field1], expected=[SOURCES1]) + assert_sources([sources_field2], expected=[SOURCES2]) + assert_sources([sources_field3], expected=[SOURCES3]) + assert_sources([sources_field4], expected=[SOURCES1]) + + # NB: sources_field1 and sources_field4 refer to the same files. We should be able to + # handle this gracefully. + assert_sources( + [sources_field1, sources_field2, sources_field3, sources_field4], + expected=[SOURCES1, SOURCES2, SOURCES3], + ) + + +def test_file_sources(rule_runner: RuleRunner) -> None: + sources = TargetSources("src/python", ["README.md"]) + field = mock_sources_field(rule_runner, sources, sources_field_cls=FilesSources) + assert_sources_resolved( + rule_runner, [field], expected=[sources], expected_unrooted=sources.full_paths + ) + + +def test_gracefully_handle_no_sources(rule_runner: RuleRunner) -> None: + sources_field = mock_sources_field(rule_runner, SOURCES1, include_sources=False) + assert_sources_resolved(rule_runner, [sources_field], expected=[]) diff --git a/src/python/pants/core/util_rules/stripped_source_files_test.py b/src/python/pants/core/util_rules/stripped_source_files_test.py index 4b0fd182aef..a8fec4e15b1 100644 --- a/src/python/pants/core/util_rules/stripped_source_files_test.py +++ b/src/python/pants/core/util_rules/stripped_source_files_test.py @@ -14,101 +14,103 @@ from pants.engine.rules import QueryRule from pants.option.options_bootstrapper import OptionsBootstrapper from pants.testutil.option_util import create_options_bootstrapper -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner -class StrippedSourceFilesTest(TestBase): - @classmethod - def rules(cls): - return ( - *super().rules(), +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ *stripped_source_files.rules(), QueryRule(StrippedSourceFiles, (SourceFiles, OptionsBootstrapper)), - ) + ] + ) + - def get_stripped_files( - self, - request: SourceFiles, +def get_stripped_files( + rule_runner: RuleRunner, + request: SourceFiles, + *, + args: Optional[List[str]] = None, +) -> List[str]: + args = args or [] + has_source_root_patterns = False + for arg in args: + if arg.startswith("--source-root-patterns"): + has_source_root_patterns = True + break + if not has_source_root_patterns: + source_root_patterns = ["src/python", "src/java", "tests/python"] + args.append(f"--source-root-patterns={json.dumps(source_root_patterns)}") + result = rule_runner.request_product( + StrippedSourceFiles, + [request, create_options_bootstrapper(args=args)], + ) + return list(result.snapshot.files) + + +def test_strip_snapshot(rule_runner: RuleRunner) -> None: + def get_stripped_files_for_snapshot( + paths: List[str], *, args: Optional[List[str]] = None, ) -> List[str]: - args = args or [] - has_source_root_patterns = False - for arg in args: - if arg.startswith("--source-root-patterns"): - has_source_root_patterns = True - break - if not has_source_root_patterns: - source_root_patterns = ["src/python", "src/java", "tests/python"] - args.append(f"--source-root-patterns={json.dumps(source_root_patterns)}") - result = self.request_product( - StrippedSourceFiles, - [request, create_options_bootstrapper(args=args)], - ) - return list(result.snapshot.files) - - def test_strip_snapshot(self) -> None: - def get_stripped_files_for_snapshot( - paths: List[str], - *, - args: Optional[List[str]] = None, - ) -> List[str]: - input_snapshot = self.make_snapshot_of_empty_files(paths) - request = SourceFiles(input_snapshot, ()) - return self.get_stripped_files(request, args=args) - - # Normal source roots - assert get_stripped_files_for_snapshot(["src/python/project/example.py"]) == [ - "project/example.py" - ] - assert ( - get_stripped_files_for_snapshot( - ["src/python/project/example.py"], - ) - == ["project/example.py"] + input_snapshot = rule_runner.make_snapshot_of_empty_files(paths) + request = SourceFiles(input_snapshot, ()) + return get_stripped_files(rule_runner, request, args=args) + + # Normal source roots + assert get_stripped_files_for_snapshot(["src/python/project/example.py"]) == [ + "project/example.py" + ] + assert ( + get_stripped_files_for_snapshot( + ["src/python/project/example.py"], ) + == ["project/example.py"] + ) - assert get_stripped_files_for_snapshot(["src/java/com/project/example.java"]) == [ - "com/project/example.java" - ] - assert get_stripped_files_for_snapshot(["tests/python/project_test/example.py"]) == [ - "project_test/example.py" - ] + assert get_stripped_files_for_snapshot(["src/java/com/project/example.java"]) == [ + "com/project/example.java" + ] + assert get_stripped_files_for_snapshot(["tests/python/project_test/example.py"]) == [ + "project_test/example.py" + ] - # Unrecognized source root - unrecognized_source_root = "no-source-root/example.txt" - with pytest.raises(ExecutionError) as exc: - get_stripped_files_for_snapshot([unrecognized_source_root]) - assert f"NoSourceRootError: No source root found for `{unrecognized_source_root}`." in str( - exc.value - ) + # Unrecognized source root + unrecognized_source_root = "no-source-root/example.txt" + with pytest.raises(ExecutionError) as exc: + get_stripped_files_for_snapshot([unrecognized_source_root]) + assert f"NoSourceRootError: No source root found for `{unrecognized_source_root}`." in str( + exc.value + ) - # Support for multiple source roots - file_names = ["src/python/project/example.py", "src/java/com/project/example.java"] - assert get_stripped_files_for_snapshot(file_names) == [ - "com/project/example.java", - "project/example.py", - ] + # Support for multiple source roots + file_names = ["src/python/project/example.py", "src/java/com/project/example.java"] + assert get_stripped_files_for_snapshot(file_names) == [ + "com/project/example.java", + "project/example.py", + ] - # Test a source root at the repo root. We have performance optimizations for this case - # because there is nothing to strip. - source_root_config = [f"--source-root-patterns={json.dumps(['/'])}"] + # Test a source root at the repo root. We have performance optimizations for this case + # because there is nothing to strip. + source_root_config = [f"--source-root-patterns={json.dumps(['/'])}"] - assert ( - get_stripped_files_for_snapshot( - ["project/f1.py", "project/f2.py"], - args=source_root_config, - ) - == ["project/f1.py", "project/f2.py"] + assert ( + get_stripped_files_for_snapshot( + ["project/f1.py", "project/f2.py"], + args=source_root_config, ) + == ["project/f1.py", "project/f2.py"] + ) - assert ( - get_stripped_files_for_snapshot( - ["dir1/f.py", "dir2/f.py"], - args=source_root_config, - ) - == ["dir1/f.py", "dir2/f.py"] + assert ( + get_stripped_files_for_snapshot( + ["dir1/f.py", "dir2/f.py"], + args=source_root_config, ) + == ["dir1/f.py", "dir2/f.py"] + ) - # Gracefully handle an empty snapshot - assert self.get_stripped_files(SourceFiles(EMPTY_SNAPSHOT, ())) == [] + # Gracefully handle an empty snapshot + assert get_stripped_files(rule_runner, SourceFiles(EMPTY_SNAPSHOT, ())) == [] diff --git a/src/python/pants/engine/internals/build_files_test.py b/src/python/pants/engine/internals/build_files_test.py index 3f4fd514864..8e65f0df9ee 100644 --- a/src/python/pants/engine/internals/build_files_test.py +++ b/src/python/pants/engine/internals/build_files_test.py @@ -40,7 +40,7 @@ from pants.option.options_bootstrapper import OptionsBootstrapper from pants.testutil.engine_util import MockGet, run_rule from pants.testutil.option_util import create_options_bootstrapper, create_subsystem -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner from pants.util.frozendict import FrozenDict @@ -123,288 +123,312 @@ class MockTgt(Target): core_fields = (Dependencies, Sources, Tags) -class BuildFileIntegrationTest(TestBase): - @classmethod - def target_types(cls): - return [MockTgt] +def test_resolve_address() -> None: + rule_runner = RuleRunner(rules=[QueryRule(Address, (AddressInput,))]) - @classmethod - def rules(cls): - return ( - *super().rules(), - QueryRule(Address, (AddressInput,)), - QueryRule(TargetAdaptor, (Address, OptionsBootstrapper)), - QueryRule(BuildFileAddress, (Address, OptionsBootstrapper)), - QueryRule(AddressesWithOrigins, (AddressSpecs, OptionsBootstrapper)), - ) + def assert_is_expected(address_input: AddressInput, expected: Address) -> None: + assert rule_runner.request_product(Address, [address_input]) == expected - def test_resolve_address(self) -> None: - def assert_is_expected(address_input: AddressInput, expected: Address) -> None: - assert self.request_product(Address, [address_input]) == expected + rule_runner.create_file("a/b/c.txt") + assert_is_expected( + AddressInput("a/b/c.txt"), Address("a/b", target_name=None, relative_file_path="c.txt") + ) + assert_is_expected( + AddressInput("a/b"), Address("a/b", target_name=None, relative_file_path=None) + ) - self.create_file("a/b/c.txt") - assert_is_expected( - AddressInput("a/b/c.txt"), Address("a/b", target_name=None, relative_file_path="c.txt") - ) - assert_is_expected( - AddressInput("a/b"), Address("a/b", target_name=None, relative_file_path=None) - ) + assert_is_expected(AddressInput("a/b", target_component="c"), Address("a/b", target_name="c")) + assert_is_expected( + AddressInput("a/b/c.txt", target_component="c"), + Address("a/b", relative_file_path="c.txt", target_name="c"), + ) - assert_is_expected( - AddressInput("a/b", target_component="c"), Address("a/b", target_name="c") - ) - assert_is_expected( - AddressInput("a/b/c.txt", target_component="c"), - Address("a/b", relative_file_path="c.txt", target_name="c"), - ) + # Top-level addresses will not have a path_component, unless they are a file address. + rule_runner.create_file("f.txt") + assert_is_expected( + AddressInput("f.txt", target_component="original"), + Address("", relative_file_path="f.txt", target_name="original"), + ) + assert_is_expected(AddressInput("", target_component="t"), Address("", target_name="t")) - # Top-level addresses will not have a path_component, unless they are a file address. - self.create_file("f.txt") - assert_is_expected( - AddressInput("f.txt", target_component="original"), - Address("", relative_file_path="f.txt", target_name="original"), - ) - assert_is_expected(AddressInput("", target_component="t"), Address("", target_name="t")) + with pytest.raises(ExecutionError) as exc: + rule_runner.request_product(Address, [AddressInput("a/b/fake")]) + assert "'a/b/fake' does not exist on disk" in str(exc.value) - with pytest.raises(ExecutionError) as exc: - self.request_product(Address, [AddressInput("a/b/fake")]) - assert "'a/b/fake' does not exist on disk" in str(exc.value) - - def test_target_adaptor_parsed_correctly(self) -> None: - self.add_to_build_file( - "helloworld", - dedent( - """\ - mock_tgt( - fake_field=42, - dependencies=[ - # Because we don't follow dependencies or even parse dependencies, this - # self-cycle should be fine. - "helloworld", - ":sibling", - "helloworld/util", - "helloworld/util:tests", - ], - ) - """ - ), - ) - addr = Address("helloworld") - target_adaptor = self.request_product(TargetAdaptor, [addr, create_options_bootstrapper()]) - assert target_adaptor.name == "helloworld" - assert target_adaptor.type_alias == "mock_tgt" - assert target_adaptor.kwargs["dependencies"] == [ - "helloworld", - ":sibling", - "helloworld/util", - "helloworld/util:tests", - ] - # NB: TargetAdaptors do not validate what fields are valid. The Target API should error - # when encountering this, but it's fine at this stage. - assert target_adaptor.kwargs["fake_field"] == 42 - - def test_target_adaptor_not_found(self) -> None: - bootstrapper = create_options_bootstrapper() - with pytest.raises(ExecutionError) as exc: - self.request_product(TargetAdaptor, [Address("helloworld"), bootstrapper]) - assert "Directory \\'helloworld\\' does not contain any BUILD files" in str(exc) - self.add_to_build_file("helloworld", "mock_tgt(name='other_tgt')") - expected_rx_str = re.escape( - "'helloworld' was not found in namespace 'helloworld'. Did you mean one of:\n :other_tgt" - ) - with pytest.raises(ExecutionError, match=expected_rx_str): - self.request_product(TargetAdaptor, [Address("helloworld"), bootstrapper]) - - def test_build_file_address(self) -> None: - self.create_file("helloworld/BUILD.ext", "mock_tgt()") - bootstrapper = create_options_bootstrapper() - - def assert_bfa_resolved(address: Address) -> None: - expected_bfa = BuildFileAddress(rel_path="helloworld/BUILD.ext", address=address) - bfa = self.request_product(BuildFileAddress, [address, bootstrapper]) - assert bfa == expected_bfa - - assert_bfa_resolved(Address("helloworld")) - # File addresses should use their base target to find the BUILD file. - assert_bfa_resolved(Address("helloworld", relative_file_path="f.txt")) - - def resolve_address_specs( - self, specs: Iterable[AddressSpec], bootstrapper: Optional[OptionsBootstrapper] = None - ) -> Set[AddressWithOrigin]: - result = self.request_product( - AddressesWithOrigins, - [ - AddressSpecs(specs, filter_by_global_options=True), - bootstrapper or create_options_bootstrapper(), - ], - ) - return set(result) +@pytest.fixture +def target_adaptor_rule_runner() -> RuleRunner: + return RuleRunner( + rules=[QueryRule(TargetAdaptor, (Address, OptionsBootstrapper))], target_types=[MockTgt] + ) - def test_address_specs_deduplication(self) -> None: - """When multiple specs cover the same address, we should deduplicate to one single - AddressWithOrigin. - We should use the most specific origin spec possible, such as AddressLiteralSpec > - SiblingAddresses. - """ - self.create_file("demo/f.txt") - self.add_to_build_file("demo", "mock_tgt(sources=['f.txt'])") - # We also include a file address to ensure that that is included in the result. - specs = [ - AddressLiteralSpec("demo", "demo"), - AddressLiteralSpec("demo/f.txt", "demo"), - SiblingAddresses("demo"), - DescendantAddresses("demo"), - AscendantAddresses("demo"), - ] - assert self.resolve_address_specs(specs) == { - AddressWithOrigin(Address("demo"), AddressLiteralSpec("demo", "demo")), - AddressWithOrigin( - Address("demo", relative_file_path="f.txt"), - AddressLiteralSpec("demo/f.txt", "demo"), - ), - } - - def test_address_specs_filter_by_tag(self) -> None: - self.create_file("demo/f.txt") - self.add_to_build_file( - "demo", - dedent( - """\ - mock_tgt(name="a", sources=["f.txt"]) - mock_tgt(name="b", sources=["f.txt"], tags=["integration"]) - mock_tgt(name="c", sources=["f.txt"], tags=["ignore"]) - """ - ), - ) - bootstrapper = create_options_bootstrapper(args=["--tag=+integration"]) - - assert self.resolve_address_specs( - [SiblingAddresses("demo")], bootstrapper=bootstrapper - ) == {AddressWithOrigin(Address("demo", target_name="b"), SiblingAddresses("demo"))} - - # The same filtering should work when given literal addresses, including file addresses. - # For file addresses, we look up the `tags` field of the original base target. - literals_result = self.resolve_address_specs( - [ - AddressLiteralSpec("demo", "a"), - AddressLiteralSpec("demo", "b"), - AddressLiteralSpec("demo", "c"), - AddressLiteralSpec("demo/f.txt", "a"), - AddressLiteralSpec("demo/f.txt", "b"), - AddressLiteralSpec("demo/f.txt", "c"), - ], - bootstrapper=bootstrapper, - ) - assert literals_result == { - AddressWithOrigin( - Address("demo", relative_file_path="f.txt", target_name="b"), - AddressLiteralSpec("demo/f.txt", "b"), - ), - AddressWithOrigin(Address("demo", target_name="b"), AddressLiteralSpec("demo", "b")), - } - - def test_address_specs_filter_by_exclude_pattern(self) -> None: - self.create_file("demo/f.txt") - self.add_to_build_file( - "demo", - dedent( - """\ - mock_tgt(name="exclude_me", sources=["f.txt"]) - mock_tgt(name="not_me", sources=["f.txt"]) - """ - ), +def test_target_adaptor_parsed_correctly(target_adaptor_rule_runner: RuleRunner) -> None: + target_adaptor_rule_runner.add_to_build_file( + "helloworld", + dedent( + """\ + mock_tgt( + fake_field=42, + dependencies=[ + # Because we don't follow dependencies or even parse dependencies, this + # self-cycle should be fine. + "helloworld", + ":sibling", + "helloworld/util", + "helloworld/util:tests", + ], + ) + """ + ), + ) + addr = Address("helloworld") + target_adaptor = target_adaptor_rule_runner.request_product( + TargetAdaptor, [addr, create_options_bootstrapper()] + ) + assert target_adaptor.name == "helloworld" + assert target_adaptor.type_alias == "mock_tgt" + assert target_adaptor.kwargs["dependencies"] == [ + "helloworld", + ":sibling", + "helloworld/util", + "helloworld/util:tests", + ] + # NB: TargetAdaptors do not validate what fields are valid. The Target API should error + # when encountering this, but it's fine at this stage. + assert target_adaptor.kwargs["fake_field"] == 42 + + +def test_target_adaptor_not_found(target_adaptor_rule_runner: RuleRunner) -> None: + bootstrapper = create_options_bootstrapper() + with pytest.raises(ExecutionError) as exc: + target_adaptor_rule_runner.request_product( + TargetAdaptor, [Address("helloworld"), bootstrapper] ) - bootstrapper = create_options_bootstrapper(args=["--exclude-target-regexp=exclude_me.*"]) - - assert self.resolve_address_specs( - [SiblingAddresses("demo")], bootstrapper=bootstrapper - ) == {AddressWithOrigin(Address("demo", target_name="not_me"), SiblingAddresses("demo"))} - - # The same filtering should work when given literal addresses, including file addresses. - # The filtering will operate against the normalized Address.spec. - literals_result = self.resolve_address_specs( - [ - AddressLiteralSpec("demo", "exclude_me"), - AddressLiteralSpec("demo", "not_me"), - AddressLiteralSpec("demo/f.txt", "exclude_me"), - AddressLiteralSpec("demo/f.txt", "not_me"), - ], - bootstrapper=bootstrapper, + assert "Directory \\'helloworld\\' does not contain any BUILD files" in str(exc) + + target_adaptor_rule_runner.add_to_build_file("helloworld", "mock_tgt(name='other_tgt')") + expected_rx_str = re.escape( + "'helloworld' was not found in namespace 'helloworld'. Did you mean one of:\n :other_tgt" + ) + with pytest.raises(ExecutionError, match=expected_rx_str): + target_adaptor_rule_runner.request_product( + TargetAdaptor, [Address("helloworld"), bootstrapper] ) - assert literals_result == { - AddressWithOrigin( - Address("demo", relative_file_path="f.txt", target_name="not_me"), - AddressLiteralSpec("demo/f.txt", "not_me"), - ), - AddressWithOrigin( - Address("demo", target_name="not_me"), AddressLiteralSpec("demo", "not_me") - ), - } - def test_address_specs_do_not_exist(self) -> None: - self.create_file("real/f.txt") - self.add_to_build_file("real", "mock_tgt(sources=['f.txt'])") - self.add_to_build_file("empty", "# empty") +def test_build_file_address() -> None: + rule_runner = RuleRunner( + rules=[QueryRule(BuildFileAddress, (Address, OptionsBootstrapper))], target_types=[MockTgt] + ) + rule_runner.create_file("helloworld/BUILD.ext", "mock_tgt()") + bootstrapper = create_options_bootstrapper() - def assert_resolve_error(specs: Iterable[AddressSpec], *, expected: str) -> None: - with pytest.raises(ExecutionError) as exc: - self.resolve_address_specs(specs) - assert expected in str(exc.value) + def assert_bfa_resolved(address: Address) -> None: + expected_bfa = BuildFileAddress(rel_path="helloworld/BUILD.ext", address=address) + bfa = rule_runner.request_product(BuildFileAddress, [address, bootstrapper]) + assert bfa == expected_bfa - # Literal addresses require both a BUILD file to exist and for a target to be resolved. - assert_resolve_error( - [AddressLiteralSpec("fake", "tgt")], expected="'fake' does not exist on disk" - ) - assert_resolve_error( - [AddressLiteralSpec("fake/f.txt", "tgt")], - expected="'fake/f.txt' does not exist on disk", - ) - did_you_mean = ResolveError.did_you_mean( - bad_name="fake_tgt", known_names=["real"], namespace="real" - ) - assert_resolve_error([AddressLiteralSpec("real", "fake_tgt")], expected=str(did_you_mean)) - assert_resolve_error( - [AddressLiteralSpec("real/f.txt", "fake_tgt")], expected=str(did_you_mean) - ) + assert_bfa_resolved(Address("helloworld")) + # File addresses should use their base target to find the BUILD file. + assert_bfa_resolved(Address("helloworld", relative_file_path="f.txt")) - # SiblingAddresses require the BUILD file to exist, but are okay if no targets are resolved. - assert_resolve_error( - [SiblingAddresses("fake")], - expected=( - "'fake' does not contain any BUILD files, but 'fake:' expected matching targets " - "there." - ), - ) - assert not self.resolve_address_specs([SiblingAddresses("empty")]) - # DescendantAddresses requires at least one match, even if BUILD files exist. - assert_resolve_error( - [DescendantAddresses("fake"), DescendantAddresses("empty")], - expected="Address spec 'fake::' does not match any targets", - ) +@pytest.fixture +def address_specs_rule_runner() -> RuleRunner: + return RuleRunner( + rules=[QueryRule(AddressesWithOrigins, (AddressSpecs, OptionsBootstrapper))], + target_types=[MockTgt], + ) - # AscendantAddresses does not require any matches or BUILD files. - assert not self.resolve_address_specs( - [AscendantAddresses("fake"), AscendantAddresses("empty")] - ) - def test_address_specs_file_does_not_belong_to_target(self) -> None: - """Even if a file's address file exists and target exist, we should validate that the file - actually belongs to that target.""" - self.create_file("demo/f.txt") - self.add_to_build_file( - "demo", - dedent( - """\ - mock_tgt(name='owner', sources=['f.txt']) - mock_tgt(name='not_owner') - """ - ), - ) +def resolve_address_specs( + rule_runner: RuleRunner, + specs: Iterable[AddressSpec], + bootstrapper: Optional[OptionsBootstrapper] = None, +) -> Set[AddressWithOrigin]: + result = rule_runner.request_product( + AddressesWithOrigins, + [ + AddressSpecs(specs, filter_by_global_options=True), + bootstrapper or create_options_bootstrapper(), + ], + ) + return set(result) + + +def test_address_specs_deduplication(address_specs_rule_runner: RuleRunner) -> None: + """When multiple specs cover the same address, we should deduplicate to one single + AddressWithOrigin. + + We should use the most specific origin spec possible, such as AddressLiteralSpec > + SiblingAddresses. + """ + address_specs_rule_runner.create_file("demo/f.txt") + address_specs_rule_runner.add_to_build_file("demo", "mock_tgt(sources=['f.txt'])") + # We also include a file address to ensure that that is included in the result. + specs = [ + AddressLiteralSpec("demo", "demo"), + AddressLiteralSpec("demo/f.txt", "demo"), + SiblingAddresses("demo"), + DescendantAddresses("demo"), + AscendantAddresses("demo"), + ] + assert resolve_address_specs(address_specs_rule_runner, specs) == { + AddressWithOrigin(Address("demo"), AddressLiteralSpec("demo", "demo")), + AddressWithOrigin( + Address("demo", relative_file_path="f.txt"), + AddressLiteralSpec("demo/f.txt", "demo"), + ), + } + + +def test_address_specs_filter_by_tag(address_specs_rule_runner: RuleRunner) -> None: + address_specs_rule_runner.create_file("demo/f.txt") + address_specs_rule_runner.add_to_build_file( + "demo", + dedent( + """\ + mock_tgt(name="a", sources=["f.txt"]) + mock_tgt(name="b", sources=["f.txt"], tags=["integration"]) + mock_tgt(name="c", sources=["f.txt"], tags=["ignore"]) + """ + ), + ) + bootstrapper = create_options_bootstrapper(args=["--tag=+integration"]) + + assert resolve_address_specs( + address_specs_rule_runner, [SiblingAddresses("demo")], bootstrapper=bootstrapper + ) == {AddressWithOrigin(Address("demo", target_name="b"), SiblingAddresses("demo"))} + + # The same filtering should work when given literal addresses, including file addresses. + # For file addresses, we look up the `tags` field of the original base target. + literals_result = resolve_address_specs( + address_specs_rule_runner, + [ + AddressLiteralSpec("demo", "a"), + AddressLiteralSpec("demo", "b"), + AddressLiteralSpec("demo", "c"), + AddressLiteralSpec("demo/f.txt", "a"), + AddressLiteralSpec("demo/f.txt", "b"), + AddressLiteralSpec("demo/f.txt", "c"), + ], + bootstrapper=bootstrapper, + ) + assert literals_result == { + AddressWithOrigin( + Address("demo", relative_file_path="f.txt", target_name="b"), + AddressLiteralSpec("demo/f.txt", "b"), + ), + AddressWithOrigin(Address("demo", target_name="b"), AddressLiteralSpec("demo", "b")), + } + + +def test_address_specs_filter_by_exclude_pattern(address_specs_rule_runner: RuleRunner) -> None: + address_specs_rule_runner.create_file("demo/f.txt") + address_specs_rule_runner.add_to_build_file( + "demo", + dedent( + """\ + mock_tgt(name="exclude_me", sources=["f.txt"]) + mock_tgt(name="not_me", sources=["f.txt"]) + """ + ), + ) + bootstrapper = create_options_bootstrapper(args=["--exclude-target-regexp=exclude_me.*"]) + + assert resolve_address_specs( + address_specs_rule_runner, [SiblingAddresses("demo")], bootstrapper=bootstrapper + ) == {AddressWithOrigin(Address("demo", target_name="not_me"), SiblingAddresses("demo"))} + + # The same filtering should work when given literal addresses, including file addresses. + # The filtering will operate against the normalized Address.spec. + literals_result = resolve_address_specs( + address_specs_rule_runner, + [ + AddressLiteralSpec("demo", "exclude_me"), + AddressLiteralSpec("demo", "not_me"), + AddressLiteralSpec("demo/f.txt", "exclude_me"), + AddressLiteralSpec("demo/f.txt", "not_me"), + ], + bootstrapper=bootstrapper, + ) + assert literals_result == { + AddressWithOrigin( + Address("demo", relative_file_path="f.txt", target_name="not_me"), + AddressLiteralSpec("demo/f.txt", "not_me"), + ), + AddressWithOrigin( + Address("demo", target_name="not_me"), AddressLiteralSpec("demo", "not_me") + ), + } + + +def test_address_specs_do_not_exist(address_specs_rule_runner: RuleRunner) -> None: + address_specs_rule_runner.create_file("real/f.txt") + address_specs_rule_runner.add_to_build_file("real", "mock_tgt(sources=['f.txt'])") + address_specs_rule_runner.add_to_build_file("empty", "# empty") + + def assert_resolve_error(specs: Iterable[AddressSpec], *, expected: str) -> None: with pytest.raises(ExecutionError) as exc: - self.resolve_address_specs([AddressLiteralSpec("demo/f.txt", "not_owner")]) - assert "does not match a file demo/f.txt" in str(exc.value) + resolve_address_specs(address_specs_rule_runner, specs) + assert expected in str(exc.value) + + # Literal addresses require both a BUILD file to exist and for a target to be resolved. + assert_resolve_error( + [AddressLiteralSpec("fake", "tgt")], expected="'fake' does not exist on disk" + ) + assert_resolve_error( + [AddressLiteralSpec("fake/f.txt", "tgt")], + expected="'fake/f.txt' does not exist on disk", + ) + did_you_mean = ResolveError.did_you_mean( + bad_name="fake_tgt", known_names=["real"], namespace="real" + ) + assert_resolve_error([AddressLiteralSpec("real", "fake_tgt")], expected=str(did_you_mean)) + assert_resolve_error([AddressLiteralSpec("real/f.txt", "fake_tgt")], expected=str(did_you_mean)) + + # SiblingAddresses require the BUILD file to exist, but are okay if no targets are resolved. + assert_resolve_error( + [SiblingAddresses("fake")], + expected=( + "'fake' does not contain any BUILD files, but 'fake:' expected matching targets " + "there." + ), + ) + assert not resolve_address_specs(address_specs_rule_runner, [SiblingAddresses("empty")]) + + # DescendantAddresses requires at least one match, even if BUILD files exist. + assert_resolve_error( + [DescendantAddresses("fake"), DescendantAddresses("empty")], + expected="Address spec 'fake::' does not match any targets", + ) + + # AscendantAddresses does not require any matches or BUILD files. + assert not resolve_address_specs( + address_specs_rule_runner, [AscendantAddresses("fake"), AscendantAddresses("empty")] + ) + + +def test_address_specs_file_does_not_belong_to_target( + address_specs_rule_runner: RuleRunner, +) -> None: + """Even if a file's address file exists and target exist, we should validate that the file + actually belongs to that target.""" + address_specs_rule_runner.create_file("demo/f.txt") + address_specs_rule_runner.add_to_build_file( + "demo", + dedent( + """\ + mock_tgt(name='owner', sources=['f.txt']) + mock_tgt(name='not_owner') + """ + ), + ) + + with pytest.raises(ExecutionError) as exc: + resolve_address_specs( + address_specs_rule_runner, [AddressLiteralSpec("demo/f.txt", "not_owner")] + ) + assert "does not match a file demo/f.txt" in str(exc.value) diff --git a/src/python/pants/engine/internals/uuid_test.py b/src/python/pants/engine/internals/uuid_test.py index f57cfafa2fa..7f5d77d8cd3 100644 --- a/src/python/pants/engine/internals/uuid_test.py +++ b/src/python/pants/engine/internals/uuid_test.py @@ -3,23 +3,26 @@ from uuid import UUID +import pytest + from pants.engine.internals.uuid import UUIDRequest from pants.engine.internals.uuid import rules as uuid_rules from pants.engine.rules import QueryRule -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(rules=[*uuid_rules(), QueryRule(UUID, (UUIDRequest,))]) -class UUIDTest(TestBase): - @classmethod - def rules(cls): - return (*super().rules(), *uuid_rules(), QueryRule(UUID, (UUIDRequest,))) +def test_distinct_uuids(rule_runner: RuleRunner) -> None: + uuid1 = rule_runner.request_product(UUID, [UUIDRequest()]) + uuid2 = rule_runner.request_product(UUID, [UUIDRequest()]) + assert uuid1 != uuid2 - def test_distinct_uuids(self): - uuid1 = self.request_product(UUID, [UUIDRequest()]) - uuid2 = self.request_product(UUID, [UUIDRequest()]) - assert uuid1 != uuid2 - def test_identical_uuids(self): - uuid1 = self.request_product(UUID, [UUIDRequest(randomizer=0.0)]) - uuid2 = self.request_product(UUID, [UUIDRequest(randomizer=0.0)]) - assert uuid1 == uuid2 +def test_identical_uuids(rule_runner: RuleRunner) -> None: + uuid1 = rule_runner.request_product(UUID, [UUIDRequest(randomizer=0.0)]) + uuid2 = rule_runner.request_product(UUID, [UUIDRequest(randomizer=0.0)]) + assert uuid1 == uuid2 diff --git a/src/python/pants/engine/platform_test.py b/src/python/pants/engine/platform_test.py index 928ea42ccb8..65fb62f8224 100644 --- a/src/python/pants/engine/platform_test.py +++ b/src/python/pants/engine/platform_test.py @@ -4,19 +4,15 @@ from pants.engine.platform import Platform from pants.engine.process import FallibleProcessResultWithPlatform, Process from pants.engine.rules import QueryRule -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner -class PlatformTest(TestBase): - @classmethod - def rules(cls): - return (*super().rules(), QueryRule(FallibleProcessResultWithPlatform, (Process,))) - - def test_platform_on_local_epr_result(self) -> None: - this_platform = Platform.current - process = Process( - argv=("/bin/echo", "test"), description="Run some program that will exit cleanly." - ) - result = self.request_product(FallibleProcessResultWithPlatform, [process]) - assert result.exit_code == 0 - assert result.platform == this_platform +def test_platform_on_local_epr_result() -> None: + rule_runner = RuleRunner(rules=[QueryRule(FallibleProcessResultWithPlatform, (Process,))]) + this_platform = Platform.current + process = Process( + argv=("/bin/echo", "test"), description="Run some program that will exit cleanly." + ) + result = rule_runner.request_product(FallibleProcessResultWithPlatform, [process]) + assert result.exit_code == 0 + assert result.platform == this_platform diff --git a/src/python/pants/source/filespec_test.py b/src/python/pants/source/filespec_test.py index 1cc54b28dd5..e03f871f5c7 100644 --- a/src/python/pants/source/filespec_test.py +++ b/src/python/pants/source/filespec_test.py @@ -3,80 +3,81 @@ from typing import Tuple +import pytest + from pants.engine.fs import PathGlobs, Snapshot from pants.source.filespec import matches_filespec -from pants.testutil.test_base import TestBase - - -class FilespecTest(TestBase): - def assert_rule_match( - self, glob: str, paths: Tuple[str, ...], *, should_match: bool = True - ) -> None: - # Confirm in-memory behavior. - matched_filespec = matches_filespec({"includes": [glob]}, paths=paths) - if should_match: - assert matched_filespec == paths - else: - assert not matched_filespec - - # Confirm on-disk behavior. - for expected_match in paths: - if expected_match.endswith("/"): - self.create_dir(expected_match) - else: - self.create_file(expected_match) - snapshot = self.request_product(Snapshot, [PathGlobs([glob])]) - if should_match: - assert sorted(paths) == sorted(snapshot.files) - else: - assert not snapshot.files - - def test_matches_single_star_0(self) -> None: - self.assert_rule_match("a/b/*/f.py", ("a/b/c/f.py", "a/b/q/f.py")) - - def test_matches_single_star_0_neg(self) -> None: - self.assert_rule_match("a/b/*/f.py", ("a/b/c/d/f.py", "a/b/f.py"), should_match=False) - - def test_matches_single_star_1(self) -> None: - self.assert_rule_match("foo/bar/*", ("foo/bar/baz", "foo/bar/bar")) - - def test_matches_single_star_2(self) -> None: - self.assert_rule_match("*/bar/b*", ("foo/bar/baz", "foo/bar/bar")) - - def test_matches_single_star_2_neg(self) -> None: - self.assert_rule_match( - "*/bar/b*", ("foo/koo/bar/baz", "foo/bar/bar/zoo"), should_match=False - ) - - def test_matches_single_star_3(self) -> None: - self.assert_rule_match("*/[be]*/b*", ("foo/bar/baz", "foo/bar/bar")) +from pants.testutil.rule_runner import RuleRunner - def test_matches_single_star_4(self) -> None: - self.assert_rule_match("foo*/bar", ("foofighters/bar", "foofighters.venv/bar")) - def test_matches_single_star_4_neg(self) -> None: - self.assert_rule_match("foo*/bar", ("foofighters/baz/bar",), should_match=False) +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner() - def test_matches_double_star_0(self) -> None: - self.assert_rule_match("**", ("a/b/c", "b")) - def test_matches_double_star_1(self) -> None: - self.assert_rule_match("a/**/f", ("a/f", "a/b/c/d/e/f")) +def assert_rule_match( + rule_runner: RuleRunner, glob: str, paths: Tuple[str, ...], *, should_match: bool +) -> None: + # Confirm in-memory behavior. + matched_filespec = matches_filespec({"includes": [glob]}, paths=paths) + if should_match: + assert matched_filespec == paths + else: + assert not matched_filespec - def test_matches_double_star_2(self) -> None: - self.assert_rule_match("a/b/**", ("a/b/d", "a/b/c/d/e/f")) - - def test_matches_double_star_2_neg(self) -> None: - self.assert_rule_match("a/b/**", ("a/b",), should_match=False) - - def test_matches_dots(self) -> None: - self.assert_rule_match(".*", (".dots", ".dips")) - - def test_matches_dots_relative(self) -> None: - self.assert_rule_match("./*.py", ("f.py", "g.py")) - - def test_matches_dots_neg(self) -> None: - self.assert_rule_match( + # Confirm on-disk behavior. + for expected_match in paths: + if expected_match.endswith("/"): + rule_runner.create_dir(expected_match) + else: + rule_runner.create_file(expected_match) + snapshot = rule_runner.request_product(Snapshot, [PathGlobs([glob])]) + if should_match: + assert sorted(paths) == sorted(snapshot.files) + else: + assert not snapshot.files + + +@pytest.mark.parametrize( + "glob,paths", + [ + # Single stars. + ("a/b/*/f.py", ("a/b/c/f.py", "a/b/q/f.py")), + ("foo/bar/*", ("foo/bar/baz", "foo/bar/bar")), + ("*/bar/b*", ("foo/bar/baz", "foo/bar/bar")), + ("*/[be]*/b*", ("foo/bar/baz", "foo/bar/bar")), + ("foo*/bar", ("foofighters/bar", "foofighters.venv/bar")), + # Double stars. + ("**", ("a/b/c", "b")), + ("a/**/f", ("a/f", "a/b/c/d/e/f")), + ("a/b/**", ("a/b/d", "a/b/c/d/e/f")), + # Dots. + (".*", (".dots", ".dips")), + ("./*.py", ("f.py", "g.py")), + # Dirs. + ("dist/", ("dist",)), + ("build-support/*.venv/", ("build-support/blah.venv", "build-support/rbt.venv")), + # Literals. + ("a", ("a",)), + ("a/b/c", ("a/b/c",)), + ("a/b/c.py", ("a/b/c.py",)), + ], +) +def test_valid_matches(rule_runner: RuleRunner, glob: str, paths: Tuple[str, ...]) -> None: + assert_rule_match(rule_runner, glob, paths, should_match=True) + + +@pytest.mark.parametrize( + "glob,paths", + [ + # Single stars. + ("a/b/*/f.py", ("a/b/c/d/f.py", "a/b/f.py")), + ("*/bar/b*", ("foo/koo/bar/baz", "foo/bar/bar/zoo")), + ("foo*/bar", ("foofighters/baz/bar",)), + # Double stars. + ("a/b/**", ("a/b",)), + # Dots. + ( ".*", ( "b", @@ -85,34 +86,11 @@ def test_matches_dots_neg(self) -> None: "all/nested/.dot", ".some/hidden/nested/dir/file.py", ), - should_match=False, - ) - - def test_matches_dirs(self) -> None: - self.assert_rule_match("dist/", ("dist",)) - - def test_matches_dirs_neg(self) -> None: - self.assert_rule_match( - "dist/", ("not_dist", "cdist", "dist.py", "dist/dist"), should_match=False - ) - - def test_matches_dirs_dots(self) -> None: - self.assert_rule_match( - "build-support/*.venv/", ("build-support/blah.venv", "build-support/rbt.venv") - ) - - def test_matches_dirs_dots_neg(self) -> None: - self.assert_rule_match( - "build-support/*.venv/", - ("build-support/rbt.venv.but_actually_a_file",), - should_match=False, - ) - - def test_matches_literals(self) -> None: - self.assert_rule_match("a", ("a",)) - - def test_matches_literal_dir(self) -> None: - self.assert_rule_match("a/b/c", ("a/b/c",)) - - def test_matches_literal_file(self) -> None: - self.assert_rule_match("a/b/c.py", ("a/b/c.py",)) + ), + # Dirs. + ("dist/", ("not_dist", "cdist", "dist.py", "dist/dist")), + ("build-support/*.venv/", ("build-support/rbt.venv.but_actually_a_file",)), + ], +) +def test_invalid_matches(rule_runner: RuleRunner, glob: str, paths: Tuple[str, ...]) -> None: + assert_rule_match(rule_runner, glob, paths, should_match=False) diff --git a/src/python/pants/source/source_root_test.py b/src/python/pants/source/source_root_test.py index c9bbab6fa18..ce68347de88 100644 --- a/src/python/pants/source/source_root_test.py +++ b/src/python/pants/source/source_root_test.py @@ -23,7 +23,7 @@ from pants.source.source_root import rules as source_root_rules from pants.testutil.engine_util import MockGet, run_rule from pants.testutil.option_util import create_options_bootstrapper, create_subsystem -from pants.testutil.test_base import TestBase +from pants.testutil.rule_runner import RuleRunner def _find_root( @@ -194,122 +194,118 @@ def find_root(path): assert "project2/src/python" == find_root("project2/src/python/baz/qux.py") -class AllRootsTest(TestBase): - def test_all_roots(self) -> None: +def test_all_roots() -> None: + dirs = ( + "contrib/go/examples/src/go/src", + "src/java", + "src/python", + "src/python/subdir/src/python", # We allow source roots under source roots. + "src/kotlin", + "my/project/src/java", + "src/example/java", + "src/example/python", + "fixed/root/jvm", + ) - dirs = ( + source_root_config = create_subsystem( + SourceRootConfig, + root_patterns=[ + "src/*", + "src/example/*", "contrib/go/examples/src/go/src", - "src/java", - "src/python", - "src/python/subdir/src/python", # We allow source roots under source roots. - "src/kotlin", - "my/project/src/java", - "src/example/java", - "src/example/python", + # Dir does not exist, should not be listed as a root. + "java", "fixed/root/jvm", - ) + ], + marker_filenames=[], + ) - source_root_config = create_subsystem( - SourceRootConfig, - root_patterns=[ - "src/*", - "src/example/*", - "contrib/go/examples/src/go/src", - # Dir does not exist, should not be listed as a root. - "java", - "fixed/root/jvm", - ], - marker_filenames=[], - ) + # This function mocks out reading real directories off the file system. + def provider_rule(_: PathGlobs) -> Snapshot: + return Snapshot(Digest("abcdef", 10), (), dirs) + + def source_root_mock_rule(req: SourceRootRequest) -> OptionalSourceRoot: + for d in dirs: + if str(req.path).startswith(d): + return OptionalSourceRoot(SourceRoot(str(req.path))) + return OptionalSourceRoot(None) + + output = run_rule( + all_roots, + rule_args=[source_root_config], + mock_gets=[ + MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=provider_rule), + MockGet( + product_type=OptionalSourceRoot, + subject_type=SourceRootRequest, + mock=source_root_mock_rule, + ), + ], + ) - # This function mocks out reading real directories off the file system. - def provider_rule(_: PathGlobs) -> Snapshot: - return Snapshot(Digest("abcdef", 10), (), dirs) - - def source_root_mock_rule(req: SourceRootRequest) -> OptionalSourceRoot: - for d in dirs: - if str(req.path).startswith(d): - return OptionalSourceRoot(SourceRoot(str(req.path))) - return OptionalSourceRoot(None) - - output = run_rule( - all_roots, - rule_args=[source_root_config], - mock_gets=[ - MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=provider_rule), - MockGet( - product_type=OptionalSourceRoot, - subject_type=SourceRootRequest, - mock=source_root_mock_rule, - ), - ], - ) + assert { + SourceRoot("contrib/go/examples/src/go/src"), + SourceRoot("src/java"), + SourceRoot("src/python"), + SourceRoot("src/python/subdir/src/python"), + SourceRoot("src/kotlin"), + SourceRoot("src/example/java"), + SourceRoot("src/example/python"), + SourceRoot("my/project/src/java"), + SourceRoot("fixed/root/jvm"), + } == set(output) - assert { - SourceRoot("contrib/go/examples/src/go/src"), - SourceRoot("src/java"), - SourceRoot("src/python"), - SourceRoot("src/python/subdir/src/python"), - SourceRoot("src/kotlin"), - SourceRoot("src/example/java"), - SourceRoot("src/example/python"), - SourceRoot("my/project/src/java"), - SourceRoot("fixed/root/jvm"), - } == set(output) - - def test_all_roots_with_root_at_buildroot(self) -> None: - source_root_config = create_subsystem( - SourceRootConfig, - root_patterns=["/"], - marker_filenames=[], - ) - # This function mocks out reading real directories off the file system - def provider_rule(_: PathGlobs) -> Snapshot: - dirs = ("foo",) # A python package at the buildroot. - return Snapshot(Digest("abcdef", 10), (), dirs) - - output = run_rule( - all_roots, - rule_args=[source_root_config], - mock_gets=[ - MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=provider_rule), - MockGet( - product_type=OptionalSourceRoot, - subject_type=SourceRootRequest, - mock=lambda req: OptionalSourceRoot(SourceRoot(".")), - ), - ], - ) - assert {SourceRoot(".")} == set(output) +def test_all_roots_with_root_at_buildroot() -> None: + source_root_config = create_subsystem( + SourceRootConfig, + root_patterns=["/"], + marker_filenames=[], + ) + + # This function mocks out reading real directories off the file system + def provider_rule(_: PathGlobs) -> Snapshot: + dirs = ("foo",) # A python package at the buildroot. + return Snapshot(Digest("abcdef", 10), (), dirs) + + output = run_rule( + all_roots, + rule_args=[source_root_config], + mock_gets=[ + MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=provider_rule), + MockGet( + product_type=OptionalSourceRoot, + subject_type=SourceRootRequest, + mock=lambda req: OptionalSourceRoot(SourceRoot(".")), + ), + ], + ) + assert {SourceRoot(".")} == set(output) -class SourceRootsRequestTest(TestBase): - @classmethod - def rules(cls): - return [ - *super().rules(), +def test_source_roots_request() -> None: + rule_runner = RuleRunner( + rules=[ *source_root_rules(), QueryRule(SourceRootsResult, (SourceRootsRequest, OptionsBootstrapper)), ] - - def test_source_roots_request(self) -> None: - req = SourceRootsRequest( - files=(PurePath("src/python/foo/bar.py"), PurePath("tests/python/foo/bar_test.py")), - dirs=(PurePath("src/python/foo"), PurePath("src/python/baz/qux")), - ) - res = self.request_product( - SourceRootsResult, - [ - req, - create_options_bootstrapper( - args=["--source-root-patterns=['src/python','tests/python']"] - ), - ], - ) - assert { - PurePath("src/python/foo/bar.py"): SourceRoot("src/python"), - PurePath("tests/python/foo/bar_test.py"): SourceRoot("tests/python"), - PurePath("src/python/foo"): SourceRoot("src/python"), - PurePath("src/python/baz/qux"): SourceRoot("src/python"), - } == dict(res.path_to_root) + ) + req = SourceRootsRequest( + files=(PurePath("src/python/foo/bar.py"), PurePath("tests/python/foo/bar_test.py")), + dirs=(PurePath("src/python/foo"), PurePath("src/python/baz/qux")), + ) + res = rule_runner.request_product( + SourceRootsResult, + [ + req, + create_options_bootstrapper( + args=["--source-root-patterns=['src/python','tests/python']"] + ), + ], + ) + assert { + PurePath("src/python/foo/bar.py"): SourceRoot("src/python"), + PurePath("tests/python/foo/bar_test.py"): SourceRoot("tests/python"), + PurePath("src/python/foo"): SourceRoot("src/python"), + PurePath("src/python/baz/qux"): SourceRoot("src/python"), + } == dict(res.path_to_root) diff --git a/src/python/pants/testutil/BUILD b/src/python/pants/testutil/BUILD index 1f53d44ffb1..187b17b8cf5 100644 --- a/src/python/pants/testutil/BUILD +++ b/src/python/pants/testutil/BUILD @@ -20,6 +20,7 @@ python_distribution( python_library( sources=[ "*.py", + "!rule_runner.py", "!test_base.py", "!pants_integration_test.py", "!pants_run_integration_test.py", @@ -28,8 +29,8 @@ python_library( ) python_library( - name="test_base", - sources=["test_base.py"], + name="rule_runner", + sources=["rule_runner.py", "test_base.py"], dependencies=["//:build_root"], ) diff --git a/src/python/pants/testutil/rule_runner.py b/src/python/pants/testutil/rule_runner.py new file mode 100644 index 00000000000..4cb03699c2d --- /dev/null +++ b/src/python/pants/testutil/rule_runner.py @@ -0,0 +1,241 @@ +# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +import os +from dataclasses import dataclass +from io import StringIO +from pathlib import PurePath +from tempfile import mkdtemp +from typing import Any, Dict, Iterable, Mapping, Optional, Type, TypeVar, Union, cast + +from pants.base.build_root import BuildRoot +from pants.base.specs_parser import SpecsParser +from pants.build_graph.build_configuration import BuildConfiguration +from pants.build_graph.build_file_aliases import BuildFileAliases +from pants.engine.addresses import Address +from pants.engine.console import Console +from pants.engine.fs import PathGlobs, PathGlobsAndRoot, Snapshot, Workspace +from pants.engine.goal import Goal +from pants.engine.internals.native import Native +from pants.engine.internals.scheduler import SchedulerSession +from pants.engine.internals.selectors import Params +from pants.engine.process import InteractiveRunner +from pants.engine.rules import QueryRule, Rule +from pants.engine.target import Target, WrappedTarget +from pants.init.engine_initializer import EngineInitializer +from pants.option.global_options import ExecutionOptions, GlobalOptions +from pants.option.options_bootstrapper import OptionsBootstrapper +from pants.source import source_root +from pants.testutil.option_util import create_options_bootstrapper +from pants.util.collections import assert_single_element +from pants.util.contextutil import temporary_dir +from pants.util.dirutil import recursive_dirname, safe_file_dump, safe_mkdir, safe_open +from pants.util.meta import frozen_after_init +from pants.util.ordered_set import FrozenOrderedSet + +_P = TypeVar("_P") + + +@dataclass(frozen=True) +class GoalRuleResult: + exit_code: int + stdout: str + stderr: str + + @staticmethod + def noop() -> "GoalRuleResult": + return GoalRuleResult(0, stdout="", stderr="") + + +@frozen_after_init +@dataclass(unsafe_hash=True) +class RuleRunner: + build_root: str + build_config: BuildConfiguration + scheduler: SchedulerSession + + def __init__( + self, + *, + rules: Optional[Iterable] = None, + target_types: Optional[Iterable[Type[Target]]] = None, + objects: Optional[Dict[str, Any]] = None, + context_aware_object_factories: Optional[Dict[str, Any]] = None, + ) -> None: + self.build_root = os.path.realpath(mkdtemp(suffix="_BUILD_ROOT")) + safe_mkdir(self.build_root, clean=True) + safe_mkdir(self.pants_workdir) + BuildRoot().path = self.build_root + + # TODO: Redesign rule registration for tests to be more ergonomic and to make this less + # special-cased. + all_rules = ( + *(rules or ()), + *source_root.rules(), + QueryRule(WrappedTarget, (Address, OptionsBootstrapper)), + ) + build_config_builder = BuildConfiguration.Builder() + build_config_builder.register_aliases( + BuildFileAliases( + objects=objects, context_aware_object_factories=context_aware_object_factories + ) + ) + build_config_builder.register_rules(all_rules) + build_config_builder.register_target_types(target_types or ()) + self.build_config = build_config_builder.create() + + options_bootstrapper = OptionsBootstrapper.create( + env={}, args=["--pants-config-files=[]"], allow_pantsrc=False + ) + global_options = options_bootstrapper.bootstrap_options.for_global_scope() + local_store_dir = global_options.local_store_dir + local_execution_root_dir = global_options.local_execution_root_dir + named_caches_dir = global_options.named_caches_dir + + graph_session = EngineInitializer.setup_graph_extended( + pants_ignore_patterns=[], + use_gitignore=False, + local_store_dir=local_store_dir, + local_execution_root_dir=local_execution_root_dir, + named_caches_dir=named_caches_dir, + native=Native(), + options_bootstrapper=options_bootstrapper, + build_root=self.build_root, + build_configuration=self.build_config, + execution_options=ExecutionOptions.from_bootstrap_options(global_options), + ).new_session(build_id="buildid_for_test", should_report_workunits=True) + self.scheduler = graph_session.scheduler_session + + @property + def pants_workdir(self) -> str: + return os.path.join(self.build_root, ".pants.d") + + @property + def rules(self) -> FrozenOrderedSet[Rule]: + return self.build_config.rules + + @property + def target_types(self) -> FrozenOrderedSet[Type[Target]]: + return self.build_config.target_types + + def request_product(self, product_type: Type[_P], subjects: Iterable[Any]) -> _P: + result = assert_single_element( + self.scheduler.product_request(product_type, [Params(*subjects)]) + ) + return cast(_P, result) + + def run_goal_rule( + self, + goal: Type[Goal], + *, + global_args: Optional[Iterable[str]] = None, + args: Optional[Iterable[str]] = None, + env: Optional[Mapping[str, str]] = None, + ) -> GoalRuleResult: + options_bootstrapper = create_options_bootstrapper( + args=(*(global_args or []), goal.name, *(args or [])), + env=env, + ) + + raw_specs = options_bootstrapper.get_full_options( + [*GlobalOptions.known_scope_infos(), *goal.subsystem_cls.known_scope_infos()] + ).specs + specs = SpecsParser(self.build_root).parse_specs(raw_specs) + + stdout, stderr = StringIO(), StringIO() + console = Console(stdout=stdout, stderr=stderr) + + exit_code = self.scheduler.run_goal_rule( + goal, + Params( + specs, + console, + options_bootstrapper, + Workspace(self.scheduler), + InteractiveRunner(self.scheduler), + ), + ) + + console.flush() + return GoalRuleResult(exit_code, stdout.getvalue(), stderr.getvalue()) + + def _invalidate_for(self, *relpaths): + """Invalidates all files from the relpath, recursively up to the root. + + Many python operations implicitly create parent directories, so we assume that touching a + file located below directories that do not currently exist will result in their creation. + """ + files = {f for relpath in relpaths for f in recursive_dirname(relpath)} + return self.scheduler.invalidate_files(files) + + def create_dir(self, relpath: str) -> str: + """Creates a directory under the buildroot. + + :API: public + + relpath: The relative path to the directory from the build root. + """ + path = os.path.join(self.build_root, relpath) + safe_mkdir(path) + self._invalidate_for(relpath) + return path + + def create_file(self, relpath: str, contents: str = "", mode: str = "w") -> str: + """Writes to a file under the buildroot. + + :API: public + + relpath: The relative path to the file from the build root. + contents: A string containing the contents of the file - '' by default.. + mode: The mode to write to the file in - over-write by default. + """ + path = os.path.join(self.build_root, relpath) + with safe_open(path, mode=mode) as fp: + fp.write(contents) + self._invalidate_for(relpath) + return path + + def create_files(self, path: str, files: Iterable[str]) -> None: + """Writes to a file under the buildroot with contents same as file name. + + :API: public + + path: The relative path to the file from the build root. + files: List of file names. + """ + for f in files: + self.create_file(os.path.join(path, f), contents=f) + + def add_to_build_file(self, relpath: Union[str, PurePath], target: str) -> str: + """Adds the given target specification to the BUILD file at relpath. + + :API: public + + relpath: The relative path to the BUILD file from the build root. + target: A string containing the target definition as it would appear in a BUILD file. + """ + build_path = ( + relpath if PurePath(relpath).name.startswith("BUILD") else PurePath(relpath, "BUILD") + ) + return self.create_file(str(build_path), target, mode="a") + + def make_snapshot(self, files: Dict[str, Union[str, bytes]]) -> Snapshot: + """Makes a snapshot from a map of file name to file content.""" + with temporary_dir() as temp_dir: + for file_name, content in files.items(): + mode = "wb" if isinstance(content, bytes) else "w" + safe_file_dump(os.path.join(temp_dir, file_name), content, mode=mode) + return cast( + Snapshot, + self.scheduler.capture_snapshots((PathGlobsAndRoot(PathGlobs(("**",)), temp_dir),))[ + 0 + ], + ) + + def make_snapshot_of_empty_files(self, files: Iterable[str]) -> Snapshot: + """Makes a snapshot with empty content for each file. + + This is a convenience around `TestBase.make_snapshot`, which allows specifying the content + for each file. + """ + return self.make_snapshot({fp: "" for fp in files}) diff --git a/src/python/pants/testutil/test_base.py b/src/python/pants/testutil/test_base.py index 43ccbadb945..966fe12291a 100644 --- a/src/python/pants/testutil/test_base.py +++ b/src/python/pants/testutil/test_base.py @@ -6,7 +6,6 @@ import unittest from abc import ABC, ABCMeta, abstractmethod from contextlib import contextmanager -from dataclasses import dataclass from io import StringIO from pathlib import PurePath from tempfile import mkdtemp @@ -25,6 +24,7 @@ ) from pants.base.build_root import BuildRoot +from pants.base.deprecated import warn_or_error from pants.base.specs_parser import SpecsParser from pants.build_graph.build_configuration import BuildConfiguration from pants.build_graph.build_file_aliases import BuildFileAliases @@ -45,6 +45,7 @@ from pants.source import source_root from pants.testutil.engine_util import Params from pants.testutil.option_util import create_options_bootstrapper +from pants.testutil.rule_runner import GoalRuleResult as GoalRuleResult from pants.util.collections import assert_single_element from pants.util.contextutil import temporary_dir from pants.util.dirutil import ( @@ -89,17 +90,6 @@ def add_test(cls, method_name, method): setattr(cls, method_name, method) -@dataclass(frozen=True) -class GoalRuleResult: - exit_code: int - stdout: str - stderr: str - - @staticmethod - def noop() -> "GoalRuleResult": - return GoalRuleResult(0, stdout="", stderr="") - - class TestBase(unittest.TestCase, metaclass=ABCMeta): """A baseclass useful for tests that run rules with a temporary build root. @@ -257,6 +247,18 @@ def setUp(self): BuildRoot().path = self.build_root self.addCleanup(BuildRoot().reset) + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + warn_or_error( + removal_version="2.1.0.dev0", + deprecated_entity_description="pants.testutil.test_base.TestBase", + hint=( + "Use `pants.testutil.rule_runner.RuleRunner` instead, which uses a Pytest fixture " + "style. See https://www.pantsbuild.org/v2.0/docs/rules-api-testing." + ), + ) + def _reset_engine(self): if self._scheduler is not None: self._scheduler.invalidate_all_files() diff --git a/testprojects/pants-plugins/tests/python/test_pants_plugin/test_pants_plugin_pants_requirement.py b/testprojects/pants-plugins/tests/python/test_pants_plugin/test_pants_plugin_pants_requirement.py index ab76d552fb3..e2bf75378f3 100644 --- a/testprojects/pants-plugins/tests/python/test_pants_plugin/test_pants_plugin_pants_requirement.py +++ b/testprojects/pants-plugins/tests/python/test_pants_plugin/test_pants_plugin_pants_requirement.py @@ -3,9 +3,7 @@ from pants.base.build_environment import pants_version from pants.version import VERSION as _VERSION -from pants.testutil.test_base import TestBase -class PantsPluginPantsRequirementTest(TestBase): - def test_version(self): - self.assertEqual(pants_version(), _VERSION) +def test_version() -> None: + assert pants_version() == _VERSION