Skip to content

Commit

Permalink
[internal] lint/style fixes related to flake8-comprehensions plugin (p…
Browse files Browse the repository at this point in the history
…antsbuild#14476)

[ci skip-rust]
  • Loading branch information
asherf authored Feb 14, 2022
1 parent fbc13ad commit 114eb36
Show file tree
Hide file tree
Showing 14 changed files with 33 additions and 38 deletions.
2 changes: 1 addition & 1 deletion build-support/bin/generate_user_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ class OrgPair:


def main():
orgs = sorted(list(_orgs), key=lambda x: x.name.lower())
orgs = sorted(_orgs, key=lambda x: x.name.lower())
# Ensure an even number of cells, leaving one to render blankly if necessary.
if len(orgs) % 2 == 1:
orgs.append(Org("", "", ""))
Expand Down
4 changes: 2 additions & 2 deletions build-support/migration-support/convert_source_to_sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@

def main() -> None:
args = create_parser().parse_args()
build_files: Set[Path] = set(
build_files: Set[Path] = {
fp
for folder in args.folders
for fp in [*folder.rglob("BUILD"), *folder.rglob("BUILD.*")]
# Check that it really is a BUILD file
if fp.is_file() and fp.stem == "BUILD"
)
}
updates: Dict[Path, List[str]] = {}
for build in build_files:
possibly_new_build = maybe_rewrite_build(build)
Expand Down
4 changes: 2 additions & 2 deletions build-support/migration-support/fix_deprecated_globs_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@

def main() -> None:
args = create_parser().parse_args()
build_files: Set[Path] = set(
build_files: Set[Path] = {
fp
for folder in args.folders
for fp in [*folder.rglob("BUILD"), *folder.rglob("BUILD.*")]
# Check that it really is a BUILD file
if fp.is_file() and fp.stem == "BUILD"
)
}
updates: Dict[Path, List[str]] = {}
for build in build_files:
try:
Expand Down
8 changes: 5 additions & 3 deletions src/python/pants/backend/codegen/avro/tailor.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,11 @@ async def find_putative_targets(
Get(Paths, PathGlobs, req.search_paths.path_globs("*.avpr")),
Get(Paths, PathGlobs, req.search_paths.path_globs("*.avdl")),
)
unowned_avro_files = set(
[*all_avsc_files.files, *all_avpr_files.files, *all_avdl_files.files]
) - set(all_owned_sources)
unowned_avro_files = {
*all_avsc_files.files,
*all_avpr_files.files,
*all_avdl_files.files,
} - set(all_owned_sources)
pts = [
PutativeTarget.for_target_type(
AvroSourcesGeneratorTarget,
Expand Down
4 changes: 2 additions & 2 deletions src/python/pants/backend/python/goals/coverage_py.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def _validate_and_update_config(
f"file {config_path}"
)
coverage_config.set("run", "relative_files", "True")
omit_elements = [em for em in run_section.get("omit", "").split("\n")] or ["\n"]
omit_elements = list(run_section.get("omit", "").split("\n")) or ["\n"]
if "pytest.pex/*" not in omit_elements:
omit_elements.append("pytest.pex/*")
run_section["omit"] = "\n".join(omit_elements)
Expand Down Expand Up @@ -402,7 +402,7 @@ async def merge_coverage_data(
"coverage": {
"run": {
"relative_files": True,
"source": list(source_root.path for source_root in source_roots),
"source": [source_root.path for source_root in source_roots],
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def __call__(

requirements = {**lock_info.get("default", {}), **lock_info.get("develop", {})}
for req, info in requirements.items():
extras = [x for x in info.get("extras", [])]
extras = list(info.get("extras", []))
extras_str = f"[{','.join(extras)}]" if extras else ""
req_str = f"{req}{extras_str}{info.get('version','')}"
if info.get("markers"):
Expand Down
10 changes: 4 additions & 6 deletions src/python/pants/backend/scala/target_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,12 +186,10 @@ class ScalaSourceTarget(Target):


class ScalaSourcesGeneratorSourcesField(ScalaGeneratorSourcesField):
default = tuple(
(
"*.scala",
*(f"!{pat}" for pat in (ScalaJunitTestsGeneratorSourcesField.default)),
*(f"!{pat}" for pat in (ScalatestTestsGeneratorSourcesField.default)),
)
default = (
"*.scala",
*(f"!{pat}" for pat in (ScalaJunitTestsGeneratorSourcesField.default)),
*(f"!{pat}" for pat in (ScalatestTestsGeneratorSourcesField.default)),
)


Expand Down
14 changes: 5 additions & 9 deletions src/python/pants/base/hash_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,20 +85,16 @@ def default(self, o):
# We disallow OrderedSet (although it is not a stdlib collection) for the same reasons as
# OrderedDict above.
if isinstance(o, OrderedSet):
raise TypeError(
"{cls} does not support OrderedSet inputs: {val!r}.".format(
cls=type(self).__name__, val=o
)
)
raise TypeError(f"{type(self).__name__} does not support OrderedSet inputs: {o!r}.")
# Set order is arbitrary in python 3.6 and 3.7, so we need to keep this sorted() call.
return sorted(self.default(i) for i in o)
if isinstance(o, Iterable) and not isinstance(o, (bytes, list, str)):
return list(self.default(i) for i in o)
return [self.default(i) for i in o]
logger.debug(
"Our custom json encoder {} is trying to hash a primitive type, but has gone through"
f"Our custom json encoder {type(self).__name__} is trying to hash a primitive type, but has gone through"
"checking every other registered type class before. These checks are expensive,"
"so you should consider registering the type {} within"
"this function ({}.default)".format(type(self).__name__, type(o), type(self).__name__)
f"so you should consider registering the type {type(o)} within"
f"this function ({type(self).__name__}.default)"
)
return o

Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/engine/collection_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def test_collection_contains() -> None:
def test_collection_iteration() -> None:
c1 = Collection([1, 2])
assert list(iter(c1)) == [1, 2]
assert [x for x in c1] == [1, 2]
assert list(c1) == [1, 2]


def test_collection_length() -> None:
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/engine/internals/engine_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ def test_streaming_workunits_parent_id_and_rule_metadata(self, tmp_path: Path) -
# Because of the artificial delay in rule_one, it should have time to be reported as
# started but not yet finished.
started = list(itertools.chain.from_iterable(tracker.started_workunit_chunks))
assert len(list(item for item in started if item["name"] == "canonical_rule_one")) > 0
assert len([item for item in started if item["name"] == "canonical_rule_one"]) > 0

assert {item["name"] for item in tracker.finished_workunit_chunks[1]} == {
"canonical_rule_one"
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/engine/internals/graph_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -941,7 +941,7 @@ def assert_generated(
}
)
parametrizations = rule_runner.request(_TargetParametrizations, [address])
assert expected == set(t for t in parametrizations.parametrizations.values())
assert expected == set(parametrizations.parametrizations.values())


def test_generate_multiple(generated_targets_rule_runner: RuleRunner) -> None:
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/engine/internals/scheduler_test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def execute(self, scheduler, product, *subjects):
with temporary_file_path(cleanup=False, suffix=".dot") as dot_file:
scheduler.visualize_graph_to_file(dot_file)
raise ValueError(f"At least one root failed: {throws}. Visualized as {dot_file}")
return list(state.value for _, state in returns)
return [state.value for _, state in returns]

def execute_expecting_one_result(self, scheduler, product, subject):
request = scheduler.execution_request([product], [subject])
Expand Down
11 changes: 6 additions & 5 deletions src/python/pants/jvm/resolve/coursier_fetch_filter_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,9 @@ def test_filter_transitive_includes_transitive_deps(lockfile: CoursierResolvedLo

def filter(coordinate, lockfile, transitive) -> Sequence[Coordinate]:
key = CoursierResolveKey("example", "example.json", EMPTY_DIGEST)
if transitive:
root, deps = lockfile.dependencies(key, coordinate)
else:
root, deps = lockfile.direct_dependencies(key, coordinate)
return list(i.coord for i in (root, *deps))
root, deps = (
lockfile.dependencies(key, coordinate)
if transitive
else lockfile.direct_dependencies(key, coordinate)
)
return [i.coord for i in (root, *deps)]
4 changes: 1 addition & 3 deletions src/python/pants/util/collections_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,7 @@ def test_partition_sequentially(size_target: int) -> None:
# item becomes a boundary) buckets in the output.

def partitioned_buckets(items: list[str]) -> set[tuple[str, ...]]:
return set(
tuple(p) for p in partition_sequentially(items, key=str, size_target=size_target)
)
return {tuple(p) for p in partition_sequentially(items, key=str, size_target=size_target)}

# We start with base items containing every other element from a sorted sequence.
all_items = sorted((f"item{i}" for i in range(0, 1024)))
Expand Down

0 comments on commit 114eb36

Please sign in to comment.