Skip to content

Commit

Permalink
Refactor pants_run_integration_test.py and pex_build_util.py (pan…
Browse files Browse the repository at this point in the history
…tsbuild#8180)

Both these files are very important to understanding how Pytest works with V1 and how we run integration tests. While reading them to understand what's going on, I made these changes:

* Replace lambdas with full `def` so that we can use intermediate variables and type hints
* Use `pathlib.Path`
* Use f-strings
* Replace datatype() with dataclasses
* Fix `BUILD` entries
* Add type hints (can't be type checked yet)
  • Loading branch information
Eric-Arellano authored Aug 20, 2019
1 parent fe6a2d3 commit fb3ba3f
Show file tree
Hide file tree
Showing 5 changed files with 133 additions and 128 deletions.
1 change: 1 addition & 0 deletions 3rdparty/python/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ beautifulsoup4>=4.6.0,<4.7
cffi==1.11.5
contextlib2==0.5.5
coverage>=4.5,<4.6
dataclasses==0.6
docutils==0.14
fasteners==0.14.1
Markdown==2.1.1
Expand Down
110 changes: 53 additions & 57 deletions src/python/pants/backend/python/subsystems/pex_build_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import logging
import os
from collections import defaultdict
from pathlib import Path
from typing import Callable, Sequence, Set

from pex.fetcher import Fetcher
from pex.pex_builder import PEXBuilder
Expand All @@ -22,39 +24,40 @@
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.build_graph.files import Files
from pants.build_graph.target import Target
from pants.subsystem.subsystem import Subsystem
from pants.util.collections import assert_single_element
from pants.util.contextutil import temporary_file


def is_python_target(tgt):
def is_python_target(tgt: Target) -> bool:
# We'd like to take all PythonTarget subclasses, but currently PythonThriftLibrary and
# PythonAntlrLibrary extend PythonTarget, and until we fix that (which we can't do until
# we remove the old python pipeline entirely) we want to ignore those target types here.
return isinstance(tgt, (PythonLibrary, PythonTests, PythonBinary))


def has_python_sources(tgt):
def has_python_sources(tgt: Target) -> bool:
return is_python_target(tgt) and tgt.has_sources()


def is_local_python_dist(tgt):
def is_local_python_dist(tgt: Target) -> bool:
return isinstance(tgt, PythonDistribution)


def has_resources(tgt):
def has_resources(tgt: Target) -> bool:
return isinstance(tgt, Files) and tgt.has_sources()


def has_python_requirements(tgt):
def has_python_requirements(tgt: Target) -> bool:
return isinstance(tgt, PythonRequirementLibrary)


def always_uses_default_python_platform(tgt):
def always_uses_default_python_platform(tgt: Target) -> bool:
return isinstance(tgt, PythonTests)


def may_have_explicit_python_platform(tgt):
def may_have_explicit_python_platform(tgt: Target) -> bool:
return isinstance(tgt, PythonBinary)


Expand All @@ -76,7 +79,7 @@ def targets_by_platform(targets, python_setup):
return dict(explicit_platform_settings)


def identify_missing_init_files(sources):
def identify_missing_init_files(sources: Sequence[str]) -> Set[str]:
"""Return the list of paths that would need to be added to ensure that every package has
an __init__.py. """
packages = set()
Expand All @@ -92,39 +95,29 @@ def identify_missing_init_files(sources):
return {os.path.join(package, '__init__.py') for package in packages} - set(sources)


def _create_source_dumper(builder, tgt):
if type(tgt) == Files:
# Loose `Files` as opposed to `Resources` or `PythonTarget`s have no (implied) package structure
# and so we chroot them relative to the build root so that they can be accessed via the normal
# python filesystem APIs just as they would be accessed outside the chrooted environment.
# NB: This requires we mark the pex as not zip safe so these `Files` can still be accessed in
# the context of a built pex distribution.
chroot_path = lambda relpath: relpath
builder.info.zip_safe = False
else:
chroot_path = lambda relpath: os.path.relpath(relpath, tgt.target_base)

dump = builder.add_resource if has_resources(tgt) else builder.add_source
def _create_source_dumper(builder: PEXBuilder, tgt: Target) -> Callable[[str], None]:
buildroot = get_buildroot()
return lambda relpath: dump(os.path.join(buildroot, relpath), chroot_path(relpath))

def get_chroot_path(relpath: str) -> str:
if type(tgt) == Files:
# Loose `Files`, as opposed to `Resources` or `PythonTarget`s, have no (implied) package
# structure and so we chroot them relative to the build root so that they can be accessed
# via the normal Python filesystem APIs just as they would be accessed outside the
# chrooted environment. NB: This requires we mark the pex as not zip safe so
# these `Files` can still be accessed in the context of a built pex distribution.
builder.info.zip_safe = False
return relpath
return str(Path(relpath).relative_to(tgt.target_base))

def dump_sources(builder, tgt, log):
dump_source = _create_source_dumper(builder, tgt)
log.debug(' Dumping sources: {}'.format(tgt))
for relpath in tgt.sources_relative_to_buildroot():
try:
dump_source(relpath)
except OSError:
log.error('Failed to copy {} for target {}'.format(relpath, tgt.address.spec))
raise
def dump_source(relpath: str) -> None:
source_path = str(Path(buildroot) / relpath)
dest_path = get_chroot_path(relpath)
if has_resources(tgt):
builder.add_resource(filename=source_path, env_filename=dest_path)
else:
builder.add_source(filename=source_path, env_filename=dest_path)

if (getattr(tgt, '_resource_target_specs', None) or
getattr(tgt, '_synthetic_resources_target', None)):
# No one should be on old-style resources any more. And if they are,
# switching to the new python pipeline will be a great opportunity to fix that.
raise TaskError('Old-style resources not supported for target {}. '
'Depend on resources() targets instead.'.format(tgt.address.spec))
return dump_source


class PexBuilderWrapper:
Expand All @@ -150,7 +143,7 @@ def subsystem_dependencies(cls):
@classmethod
def create(cls, builder, log=None):
options = cls.global_instance().get_options()
setuptools_requirement = 'setuptools=={}'.format(options.setuptools_version)
setuptools_requirement = f'setuptools=={options.setuptools_version}'

log = log or logging.getLogger(__name__)

Expand Down Expand Up @@ -216,15 +209,15 @@ def extract_single_dist_for_current_platform(self, reqs, dist_key):
))
except (StopIteration, ValueError) as e:
raise self.SingleDistExtractionError(
"Exactly one dist was expected to match name {} in requirements {}: {}"
.format(dist_key, reqs, e))
f"Exactly one dist was expected to match name {dist_key} in requirements {reqs}: {e!r}"
)
return matched_dist

def _resolve_distributions_by_platform(self, reqs, platforms):
deduped_reqs = OrderedSet(reqs)
find_links = OrderedSet()
for req in deduped_reqs:
self._log.debug(' Dumping requirement: {}'.format(req))
self._log.debug(f' Dumping requirement: {req}')
self._builder.add_requirement(str(req.requirement))
if req.repository:
find_links.add(req.repository)
Expand All @@ -249,7 +242,7 @@ def add_resolved_requirements(self, reqs, platforms=None):
for platform, dists in distributions.items():
for dist in dists:
if dist.location not in locations:
self._log.debug(' Dumping distribution: .../{}'.format(os.path.basename(dist.location)))
self._log.debug(f' Dumping distribution: .../{os.path.basename(dist.location)}')
self.add_distribution(dist)
locations.add(dist.location)

Expand Down Expand Up @@ -291,24 +284,26 @@ def _resolve_multi(self, interpreter, requirements, platforms, find_links):

return distributions

def add_sources_from(self, tgt):
def add_sources_from(self, tgt: Target) -> None:
dump_source = _create_source_dumper(self._builder, tgt)
self._log.debug(' Dumping sources: {}'.format(tgt))
self._log.debug(f' Dumping sources: {tgt}')
for relpath in tgt.sources_relative_to_buildroot():
try:
dump_source(relpath)
except OSError:
self._log.error('Failed to copy {} for target {}'.format(relpath, tgt.address.spec))
self._log.error(f'Failed to copy {relpath} for target {tgt.address.spec}')
raise

if (getattr(tgt, '_resource_target_specs', None) or
getattr(tgt, '_synthetic_resources_target', None)):
# No one should be on old-style resources any more. And if they are,
# switching to the new python pipeline will be a great opportunity to fix that.
raise TaskError('Old-style resources not supported for target {}. '
'Depend on resources() targets instead.'.format(tgt.address.spec))
raise TaskError(
f'Old-style resources not supported for target {tgt.address.spec}. Depend on resources() '
'targets instead.'
)

def _prepare_inits(self):
def _prepare_inits(self) -> Set[str]:
chroot = self._builder.chroot()
sources = chroot.get('source') | chroot.get('resource')
missing_init_files = identify_missing_init_files(sources)
Expand All @@ -317,20 +312,21 @@ def _prepare_inits(self):
ns_package.write(b'__import__("pkg_resources").declare_namespace(__name__)')
ns_package.flush()
for missing_init_file in missing_init_files:
self._builder.add_source(ns_package.name, missing_init_file)
self._builder.add_source(filename=ns_package.name, env_filename=missing_init_file)
return missing_init_files

def set_emit_warnings(self, emit_warnings):
self._builder.info.emit_warnings = emit_warnings

def freeze(self):
if not self._frozen:
if self._prepare_inits():
dist = self._distributions.get('setuptools')
if not dist:
self.add_resolved_requirements([self._setuptools_requirement])
self._builder.freeze(bytecode_compile=False)
self._frozen = True
def freeze(self) -> None:
if self._frozen:
return
if self._prepare_inits():
dist = self._distributions.get('setuptools')
if not dist:
self.add_resolved_requirements([self._setuptools_requirement])
self._builder.freeze(bytecode_compile=False)
self._frozen = True

def set_entry_point(self, entry_point):
self._builder.set_entry_point(entry_point)
Expand Down
6 changes: 5 additions & 1 deletion tests/python/pants_test/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,20 @@ python_library(
],
dependencies = [
'3rdparty/python:ansicolors',
'3rdparty/python:dataclasses',
'build-support/regexes',
'src/python/pants/base:build_environment',
'src/python/pants/base:build_file',
'src/python/pants/base:exiter',
'src/python/pants/fs',
'src/python/pants/subsystem',
'src/python/pants/util:contextutil',
'src/python/pants/util:dirutil',
'src/python/pants/util:objects',
'src/python/pants/util:osutil',
'src/python/pants/util:process_handler',
'src/python/pants/util:strutil',
'tests/python/pants_test/testutils:file_test_util',
'tests/python/pants_test/testutils:pexrc_util',
]
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,12 @@ def basic_binary(self, config):
pants_run.workdir,
'compile/rsc/current/testprojects.src.scala.org.pantsbuild.testproject.mutual.mutual/current/zinc',
'classes/org/pantsbuild/testproject/mutual/A.class')
self.assertIsFile(zinc_compiled_classfile)
self.assert_is_file(zinc_compiled_classfile)
rsc_header_jar = os.path.join(
pants_run.workdir,
'compile/rsc/current/testprojects.src.scala.org.pantsbuild.testproject.mutual.mutual/current/rsc',
'm.jar')
self.assertIsFile(rsc_header_jar)
self.assert_is_file(rsc_header_jar)

@_for_all_supported_execution_environments
def executing_multi_target_binary(self, config):
Expand Down
Loading

0 comments on commit fb3ba3f

Please sign in to comment.