From e39b6add28c10803d2380749ab589cd7afeffa41 Mon Sep 17 00:00:00 2001 From: Fillipe Goulart Date: Mon, 26 Apr 2021 17:49:47 -0300 Subject: [PATCH] Add black formatter (#15) * Add black formatter to dependencies * Document contributing changes Add formatter check in CI tests * Run black formatter * Limit line length to 79 characters * Add flake8 to docs --- .github/workflows/python-app.yml | 3 + docs/quickstart.md | 40 +++++ loggibud/v1/baselines/run_task1.py | 4 +- loggibud/v1/baselines/shared/ortools.py | 16 +- .../task1/kmeans_aggregation_ortools.py | 17 +- .../task1/kmeans_partition_ortools.py | 8 +- loggibud/v1/baselines/task1/lkh_3.py | 5 +- loggibud/v1/baselines/task2/kmeans_greedy.py | 44 +++-- loggibud/v1/baselines/task2/qrp_sweep.py | 8 +- loggibud/v1/data_conversion.py | 18 +- loggibud/v1/distances.py | 17 +- loggibud/v1/eval/task1.py | 8 +- loggibud/v1/instance_generation/generate.py | 3 +- loggibud/v1/instance_generation/generators.py | 21 ++- .../v1/instance_generation/preprocessing.py | 8 +- loggibud/v1/types.py | 4 +- poetry.lock | 156 +++++++++++++++++- pyproject.toml | 4 + 18 files changed, 329 insertions(+), 55 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 72cc0b5..9ced7ee 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -37,6 +37,9 @@ jobs: #- name: Check type hints with mypy #run: | #poetry run mypy --ignore-missing-imports . + - name: Check whether black formatter was run + run: | + poetry run black --check . - name: Run tests and check coverage run: | poetry run pytest tests/ diff --git a/docs/quickstart.md b/docs/quickstart.md index 4c72ab7..9f5267d 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -165,3 +165,43 @@ poetry run python -m loggibud.v1.eval.task1 \ --instance tests/results/cvrp-instances/train/rj-0-cvrp-0.json \ --solution results/rj-0-cvrp-0.json ``` + + +## Contributing + +First of all, thanks for the interest in the project. If you found a bug or have any question, feel free to open an issue. + +If you prefer to contribute with code or documentation, first fork the repository, make the appropriate changes and open a pull request to our `master` branch. + +Notice we use Python Poetry to manage dependencies. So if you need to add, remove or update any dependency make sure to use the proper [`poetry`](https://python-poetry.org/docs/) commands to write the changes in the `pyproject.toml` and `poetry.lock` files. + +Moreover, before opening a pull request, make sure the following were taken care: + +- The `black` formatter was run: +```bash +poetry run black . +``` + +- The code is conformant with `flake8`: +```bash +poetry run flake8 . +``` + +- The tests are still passing: +```bash +poetry run pytest tests/ +``` + +### Note to Windows users + +In some cases, Windows uses CRLF as end of line instead of LF, which is the norm in Unix-based systems. This erroneously makes git thinks that a whole file was changed when saved in different operating systems. + +To alleviate this issue, we recommend Windows users to do one of the following: + +- When installing Git for Windows, choose the option "Checkout Windows-style, commit Unix-style line endings" [(see this StackOverflow answer)](https://stackoverflow.com/questions/1889559/git-diff-to-ignore-m) + +- If Git is already installed, write the following in the LoggiBUD repository before making any commit: + +```bash +git config core.whitespace cr-at-eol +``` diff --git a/loggibud/v1/baselines/run_task1.py b/loggibud/v1/baselines/run_task1.py index 258679c..4a483ed 100644 --- a/loggibud/v1/baselines/run_task1.py +++ b/loggibud/v1/baselines/run_task1.py @@ -27,7 +27,9 @@ # Load method from path. module = importlib.import_module(args.module) method = getattr(module, args.method) - params_class = getattr(module, args.params_class) if args.params_class else None + params_class = ( + getattr(module, args.params_class) if args.params_class else None + ) # Load instance and heuristic params. path = Path(args.instances) diff --git a/loggibud/v1/baselines/shared/ortools.py b/loggibud/v1/baselines/shared/ortools.py index b7810a9..7b3ba69 100644 --- a/loggibud/v1/baselines/shared/ortools.py +++ b/loggibud/v1/baselines/shared/ortools.py @@ -68,13 +68,17 @@ def solve( model = pywrapcp.RoutingModel(manager) # Unwrap the size index for every point. - sizes = np.array([0] + [d.size for d in instance.deliveries], dtype=np.int32) + sizes = np.array( + [0] + [d.size for d in instance.deliveries], dtype=np.int32 + ) def capacity_callback(src): src = manager.IndexToNode(src) return sizes[src] - capacity_callback_index = model.RegisterUnaryTransitCallback(capacity_callback) + capacity_callback_index = model.RegisterUnaryTransitCallback( + capacity_callback + ) model.AddDimension( capacity_callback_index, 0, instance.vehicle_capacity, True, "Capacity" ) @@ -84,7 +88,9 @@ def capacity_callback(src): # Compute the distance matrix between points. logger.info("Computing distance matrix.") - distance_matrix = (calculate_distance_matrix_m(locations) * 10).astype(np.int32) + distance_matrix = (calculate_distance_matrix_m(locations) * 10).astype( + np.int32 + ) def distance_callback(src, dst): x = manager.IndexToNode(src) @@ -97,7 +103,9 @@ def distance_callback(src, dst): search_parameters = pywrapcp.DefaultRoutingSearchParameters() search_parameters.first_solution_strategy = params.first_solution_strategy - search_parameters.local_search_metaheuristic = params.local_search_metaheuristic + search_parameters.local_search_metaheuristic = ( + params.local_search_metaheuristic + ) if params.solution_limit: search_parameters.solution_limit = params.solution_limit diff --git a/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py b/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py index d22e83e..638ed2d 100644 --- a/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py +++ b/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py @@ -21,7 +21,12 @@ import numpy as np from sklearn.cluster import MiniBatchKMeans -from loggibud.v1.types import CVRPInstance, CVRPSolution, CVRPSolutionVehicle, Delivery +from loggibud.v1.types import ( + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, + Delivery, +) from ..shared.ortools import solve_cvrp as ortools_solve, ORToolsParams @@ -64,7 +69,9 @@ def solve( logger.info(f"Clustering instance into {num_clusters} subinstances") clustering = MiniBatchKMeans(num_clusters, random_state=params.seed) - points = np.array([[d.point.lng, d.point.lat] for d in instance.deliveries]) + points = np.array( + [[d.point.lng, d.point.lat] for d in instance.deliveries] + ) clusters = clustering.fit_predict(points) delivery_array = np.array(instance.deliveries) @@ -123,7 +130,11 @@ def aggregate_deliveries(idx, deliveries): deliveries=[ d for v in solve_cluster( - [d for groups in v.deliveries for d in subsolutions[int(groups.id)]] + [ + d + for groups in v.deliveries + for d in subsolutions[int(groups.id)] + ] ) for d in v ], diff --git a/loggibud/v1/baselines/task1/kmeans_partition_ortools.py b/loggibud/v1/baselines/task1/kmeans_partition_ortools.py index d098cec..6109f17 100644 --- a/loggibud/v1/baselines/task1/kmeans_partition_ortools.py +++ b/loggibud/v1/baselines/task1/kmeans_partition_ortools.py @@ -59,13 +59,17 @@ def solve( num_deliveries = len(instance.deliveries) num_clusters = int( params.fixed_num_clusters - or np.ceil(num_deliveries / (params.variable_num_clusters or num_deliveries)) + or np.ceil( + num_deliveries / (params.variable_num_clusters or num_deliveries) + ) ) logger.info(f"Clustering instance into {num_clusters} subinstances") clustering = KMeans(num_clusters, random_state=params.seed) - points = np.array([[d.point.lng, d.point.lat] for d in instance.deliveries]) + points = np.array( + [[d.point.lng, d.point.lat] for d in instance.deliveries] + ) clusters = clustering.fit_predict(points) delivery_array = np.array(instance.deliveries) diff --git a/loggibud/v1/baselines/task1/lkh_3.py b/loggibud/v1/baselines/task1/lkh_3.py index b80a715..1a7a9d1 100644 --- a/loggibud/v1/baselines/task1/lkh_3.py +++ b/loggibud/v1/baselines/task1/lkh_3.py @@ -16,7 +16,10 @@ import numpy as np from loggibud.v1.types import ( - CVRPInstance, CVRPSolution, CVRPSolutionVehicle, JSONDataclassMixin + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, + JSONDataclassMixin, ) from loggibud.v1.data_conversion import to_tsplib diff --git a/loggibud/v1/baselines/task2/kmeans_greedy.py b/loggibud/v1/baselines/task2/kmeans_greedy.py index 0e5a4a6..8c8129a 100644 --- a/loggibud/v1/baselines/task2/kmeans_greedy.py +++ b/loggibud/v1/baselines/task2/kmeans_greedy.py @@ -16,7 +16,12 @@ from sklearn.cluster import KMeans from tqdm import tqdm -from loggibud.v1.types import Delivery, CVRPInstance, CVRPSolution, CVRPSolutionVehicle +from loggibud.v1.types import ( + Delivery, + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, +) from loggibud.v1.baselines.shared.ortools import ( solve as ortools_solve, ORToolsParams, @@ -67,7 +72,9 @@ def pretrain( num_deliveries = len(points) num_clusters = int( params.fixed_num_clusters - or np.ceil(num_deliveries / (params.variable_num_clusters or num_deliveries)) + or np.ceil( + num_deliveries / (params.variable_num_clusters or num_deliveries) + ) ) logger.info(f"Clustering instance into {num_clusters} subinstances") @@ -80,13 +87,17 @@ def pretrain( ) -def finetune(model: KMeansGreedyModel, instance: CVRPInstance) -> KMeansGreedyModel: +def finetune( + model: KMeansGreedyModel, instance: CVRPInstance +) -> KMeansGreedyModel: """Prepare the model for one particular instance.""" return KMeansGreedyModel( params=model.params, clustering=model.clustering, - cluster_subsolutions={i: [] for i in range(model.clustering.n_clusters)}, + cluster_subsolutions={ + i: [] for i in range(model.clustering.n_clusters) + }, # Just fill some random instance. subinstance=instance, ) @@ -95,12 +106,17 @@ def finetune(model: KMeansGreedyModel, instance: CVRPInstance) -> KMeansGreedyMo def route(model: KMeansGreedyModel, delivery: Delivery) -> KMeansGreedyModel: """Route a single delivery using the model instance.""" - cluster = model.clustering.predict([[delivery.point.lng, delivery.point.lat]])[0] + cluster = model.clustering.predict( + [[delivery.point.lng, delivery.point.lat]] + )[0] subsolution = model.cluster_subsolutions[cluster] def is_feasible(route): - return route.occupation + delivery.size < model.subinstance.vehicle_capacity + return ( + route.occupation + delivery.size + < model.subinstance.vehicle_capacity + ) # TODO: We could make this method faster by using a route size table, but seems a bit # overkill since it's not a bottleneck. @@ -114,7 +130,9 @@ def is_feasible(route): route_idx, route = max(feasible_routes, key=lambda v: v[1].occupation) else: - route = CVRPSolutionVehicle(origin=model.subinstance.origin, deliveries=[]) + route = CVRPSolutionVehicle( + origin=model.subinstance.origin, deliveries=[] + ) subsolution.append(route) route_idx = len(subsolution) - 1 @@ -146,7 +164,9 @@ def finish(instance: CVRPInstance, model: KMeansGreedyModel) -> CVRPSolution: return CVRPSolution( name=instance.name, - vehicles=[v for subsolution in subsolutions for v in subsolution.vehicles], + vehicles=[ + v for subsolution in subsolutions for v in subsolution.vehicles + ], ) @@ -179,11 +199,15 @@ def solve_instance( # Load instance and heuristic params. eval_path = Path(args.eval_instances) eval_path_dir = eval_path if eval_path.is_dir() else eval_path.parent - eval_files = [eval_path] if eval_path.is_file() else list(eval_path.iterdir()) + eval_files = ( + [eval_path] if eval_path.is_file() else list(eval_path.iterdir()) + ) train_path = Path(args.train_instances) train_path_dir = train_path if train_path.is_dir() else train_path.parent - train_files = [train_path] if train_path.is_file() else list(train_path.iterdir()) + train_files = ( + [train_path] if train_path.is_file() else list(train_path.iterdir()) + ) # params = params_class.from_file(args.params) if args.params else None diff --git a/loggibud/v1/baselines/task2/qrp_sweep.py b/loggibud/v1/baselines/task2/qrp_sweep.py index d64dce1..03aacac 100644 --- a/loggibud/v1/baselines/task2/qrp_sweep.py +++ b/loggibud/v1/baselines/task2/qrp_sweep.py @@ -49,7 +49,10 @@ from tqdm import tqdm from loggibud.v1.types import ( - Delivery, CVRPInstance, CVRPSolution, CVRPSolutionVehicle + Delivery, + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, ) from loggibud.v1.baselines.shared.ortools import ( solve as ortools_solve, @@ -99,8 +102,7 @@ def predict(self, delivery: Delivery) -> int: """ point_translated = ( - np.array([delivery.point.lng, delivery.point.lat]) - - self.center + np.array([delivery.point.lng, delivery.point.lat]) - self.center ) angle = np.arctan2(point_translated[1], point_translated[0]) diff --git a/loggibud/v1/data_conversion.py b/loggibud/v1/data_conversion.py index 0708975..776b213 100644 --- a/loggibud/v1/data_conversion.py +++ b/loggibud/v1/data_conversion.py @@ -53,10 +53,7 @@ def to_tsplib( tspfile += "\n" # Demand section - tspfile += ( - "DEMAND_SECTION\n" - "1 0\n" - ) + tspfile += "DEMAND_SECTION\n" "1 0\n" tspfile += "\n".join( f"{i} {delivery.size}" for i, delivery in enumerate(instance.deliveries, start=2) @@ -64,25 +61,22 @@ def to_tsplib( tspfile += "\n" # Depot section: ensure node 1 is the depot (-1 to terminate the list) - tspfile += ( - "DEPOT_SECTION\n" - "1\n" - "-1\n" - ) + tspfile += "DEPOT_SECTION\n" "1\n" "-1\n" # Edge section: # Compute distance matrix locations = [instance.origin] + [ delivery.point for delivery in instance.deliveries ] - distance_matrix = ( - calculate_distance_matrix_m(locations) * 10 - ).astype(np.int32) + distance_matrix = (calculate_distance_matrix_m(locations) * 10).astype( + np.int32 + ) tspfile += "EDGE_WEIGHT_SECTION\n" def print_row(row): return " ".join(str(el) for el in row) + tspfile += "\n".join(print_row(row) for row in distance_matrix) if not file_name: diff --git a/loggibud/v1/distances.py b/loggibud/v1/distances.py index bebbdf0..b002344 100644 --- a/loggibud/v1/distances.py +++ b/loggibud/v1/distances.py @@ -24,7 +24,9 @@ def calculate_distance_matrix_m( if len(points) < 2: return 0 - coords_uri = ";".join(["{},{}".format(point.lng, point.lat) for point in points]) + coords_uri = ";".join( + ["{},{}".format(point.lng, point.lat) for point in points] + ) response = requests.get( f"{config.host}/table/v1/driving/{coords_uri}?annotations=distance", @@ -44,7 +46,9 @@ def calculate_route_distance_m( if len(points) < 2: return 0 - coords_uri = ";".join("{},{}".format(point.lng, point.lat) for point in points) + coords_uri = ";".join( + "{},{}".format(point.lng, point.lat) for point in points + ) response = requests.get( f"{config.host}/route/v1/driving/{coords_uri}?annotations=distance&continue_straight=false", @@ -57,7 +61,7 @@ def calculate_route_distance_m( def calculate_distance_matrix_great_circle_m( - points: Iterable[Point] + points: Iterable[Point], ) -> np.ndarray: """Distance matrix using the Great Circle distance This is an Euclidean-like distance but on spheres [1]. In this case it is @@ -88,16 +92,17 @@ def calculate_distance_matrix_great_circle_m( delta_sigma = np.arctan2( np.sqrt( - (np.cos(phi2) * np.sin(delta_lambda))**2 + (np.cos(phi2) * np.sin(delta_lambda)) ** 2 + ( np.cos(phi1) * np.sin(phi2) - np.sin(phi1) * np.cos(phi2) * np.cos(delta_lambda) - )**2 + ) + ** 2 ), ( np.sin(phi1) * np.sin(phi2) + np.cos(phi1) * np.cos(phi2) * np.cos(delta_lambda) - ) + ), ) return EARTH_RADIUS_METERS * delta_sigma diff --git a/loggibud/v1/eval/task1.py b/loggibud/v1/eval/task1.py index 9d97006..bb1b271 100644 --- a/loggibud/v1/eval/task1.py +++ b/loggibud/v1/eval/task1.py @@ -12,7 +12,9 @@ def evaluate_solution(instance: CVRPInstance, solution: CVRPSolution): assert solution_demands == set(instance.deliveries) # Check if max capacity is respected. - max_capacity = max(sum(d.size for d in v.deliveries) for v in solution.vehicles) + max_capacity = max( + sum(d.size for d in v.deliveries) for v in solution.vehicles + ) assert max_capacity <= instance.vehicle_capacity # Check if maximum number of origins is consistent. @@ -60,6 +62,8 @@ def evaluate_solution(instance: CVRPInstance, solution: CVRPSolution): stems = instances.keys() - results = [evaluate_solution(instances[stem], solutions[stem]) for stem in stems] + results = [ + evaluate_solution(instances[stem], solutions[stem]) for stem in stems + ] print(sum(results)) diff --git a/loggibud/v1/instance_generation/generate.py b/loggibud/v1/instance_generation/generate.py index b6aec18..cad9353 100644 --- a/loggibud/v1/instance_generation/generate.py +++ b/loggibud/v1/instance_generation/generate.py @@ -44,7 +44,6 @@ max_hubs=2, save_to="./data/delivery-instances-1.0", ), - } @@ -83,4 +82,4 @@ cvrp_config = CVRP_CONFIGS.get(instance) if cvrp_config: - generate_cvrp_subinstances(cvrp_config, delivery_result) \ No newline at end of file + generate_cvrp_subinstances(cvrp_config, delivery_result) diff --git a/loggibud/v1/instance_generation/generators.py b/loggibud/v1/instance_generation/generators.py index 90d4a0c..8f13e34 100644 --- a/loggibud/v1/instance_generation/generators.py +++ b/loggibud/v1/instance_generation/generators.py @@ -88,7 +88,9 @@ def new_point(polygon): while True: # Generate using a uniform distribution inside the bounding box. minx, miny, maxx, maxy = polygon.bounds - p = ShapelyPoint(random.uniform(minx, maxx), random.uniform(miny, maxy)) + p = ShapelyPoint( + random.uniform(minx, maxx), random.uniform(miny, maxy) + ) # If is contained, return. if polygon.contains(p): @@ -199,7 +201,9 @@ def generate_cvrp_subinstances( # Compute the number of deliveries in every cluster. cluster_weights = Counter(clusters) - demands = np.array([cluster_weights[i] for i in range(config.num_clusters)]) + demands = np.array( + [cluster_weights[i] for i in range(config.num_clusters)] + ) # Compute the street distance between points. logger.info("Computing distances between clusters.") @@ -219,7 +223,8 @@ def generate_cvrp_subinstances( # Map every cluster into a hub. hub_allocations = { - i: [j for j, a in enumerate(row) if a] for i, row in enumerate(allocations) + i: [j for j, a in enumerate(row) if a] + for i, row in enumerate(allocations) } def aggregate_subinstances(instance): @@ -233,14 +238,20 @@ def aggregate_subinstances(instance): cluster_deliveries = { key: [d for _, d in group] for key, group in itertools.groupby( - sorted(zip(cluster_index, instance.deliveries), key=lambda v: v[0]), + sorted( + zip(cluster_index, instance.deliveries), key=lambda v: v[0] + ), key=lambda v: v[0], ) } # Aggregate clusters into subinstances according to the hub assignment. subinstance_deliveries = [ - [d for cluster in clusters for d in cluster_deliveries.get(cluster, [])] + [ + d + for cluster in clusters + for d in cluster_deliveries.get(cluster, []) + ] for hub_cluster, clusters in hub_allocations.items() if clusters ] diff --git a/loggibud/v1/instance_generation/preprocessing.py b/loggibud/v1/instance_generation/preprocessing.py index f177958..3fa8630 100644 --- a/loggibud/v1/instance_generation/preprocessing.py +++ b/loggibud/v1/instance_generation/preprocessing.py @@ -70,7 +70,9 @@ def int_or_zero(s): ) # Sector code to string. - census_income_df["code_tract"] = census_income_df.Cod_setor.apply(lambda s: str(s)) + census_income_df["code_tract"] = census_income_df.Cod_setor.apply( + lambda s: str(s) + ) # Total income (V002) to int removing empty fields. census_income_df["total_income"] = census_income_df.V002.apply(int_or_zero) @@ -94,7 +96,9 @@ def prepare_census_data(instance_name): census_geo_df = load_geodata_per_sector(INSTANCE_UF[instance_name]) census_income_df = load_income_per_sector(INSTANCE_UF[instance_name]) - tract_df = pd.merge(left=census_geo_df, right=census_income_df, on="code_tract") + tract_df = pd.merge( + left=census_geo_df, right=census_income_df, on="code_tract" + ) municipalities = MUNICIPALITIES[instance_name] tract_df = tract_df[tract_df.name_muni.str.lower().isin(municipalities)] diff --git a/loggibud/v1/types.py b/loggibud/v1/types.py index 5a881f1..e3113f2 100644 --- a/loggibud/v1/types.py +++ b/loggibud/v1/types.py @@ -99,7 +99,9 @@ class CVRPSolutionVehicle: @property def circuit(self) -> List[Point]: - return [self.origin] + [d.point for d in self.deliveries] + [self.origin] + return ( + [self.origin] + [d.point for d in self.deliveries] + [self.origin] + ) @property def occupation(self) -> int: diff --git a/poetry.lock b/poetry.lock index 3840649..998b1a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -9,6 +9,14 @@ python-versions = "*" [package.dependencies] six = "*" +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "appnope" version = "0.1.2" @@ -47,6 +55,29 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "black" +version = "21.4b0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +appdirs = "*" +click = ">=7.1.2" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.6,<1" +regex = ">=2020.1.8" +toml = ">=0.10.1" +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] +python2 = ["typed-ast (>=1.4.2)"] + [[package]] name = "branca" version = "0.4.2" @@ -380,6 +411,14 @@ six = "*" testing = ["pytest", "coverage", "astroid (>=1.5.3,<1.6.0)", "pylint (>=1.7.2,<1.8.0)", "astroid (>=2.0)", "pylint (>=2.3.1,<2.4.0)"] yaml = ["PyYAML (>=5.1.0)"] +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "numpy" version = "1.20.2" @@ -439,6 +478,14 @@ python-versions = ">=3.6" qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["docopt", "pytest (<6.0.0)"] +[[package]] +name = "pathspec" +version = "0.8.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "pexpect" version = "4.8.0" @@ -609,6 +656,14 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "regex" +version = "2021.4.4" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "requests" version = "2.25.1" @@ -733,6 +788,14 @@ ipython-genutils = "*" [package.extras] test = ["pytest"] +[[package]] +name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "typing-extensions" version = "3.7.4.3" @@ -776,13 +839,17 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = ">=3.7.1" -content-hash = "5b16d0d182ffd27ef735dd3dbccad6666ac70db8c3e83e64635e9bd9e278b59b" +content-hash = "9f1402ddaadebecc27aac9de8da19d638a3ffbee5b0d28705e09610df58b1bcf" [metadata.files] absl-py = [ {file = "absl-py-0.12.0.tar.gz", hash = "sha256:b44f68984a5ceb2607d135a615999b93924c771238a63920d17d3387b0d229d5"}, {file = "absl_py-0.12.0-py3-none-any.whl", hash = "sha256:afe94e3c751ff81aad55d33ab6e630390da32780110b5af72ae81ecff8418d9e"}, ] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] appnope = [ {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, @@ -799,6 +866,10 @@ backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +black = [ + {file = "black-21.4b0-py3-none-any.whl", hash = "sha256:2db7040bbbbaa46247bfcc05c6efdebd7ebe50c1c3ca745ca6e0f6776438c96c"}, + {file = "black-21.4b0.tar.gz", hash = "sha256:915d916c48646dbe8040d5265cff7111421a60a3dfe7f7e07273176a57c24a34"}, +] branca = [ {file = "branca-0.4.2-py3-none-any.whl", hash = "sha256:62c2e777f074fc1830cd40ba9e650beb941861075980babafead8d97856b1a4b"}, {file = "branca-0.4.2.tar.gz", hash = "sha256:c111453617b17ab2bda60a4cd71787d6f2b59c85cdf71ab160a737606ac66c31"}, @@ -993,6 +1064,10 @@ munch = [ {file = "munch-2.5.0-py2.py3-none-any.whl", hash = "sha256:6f44af89a2ce4ed04ff8de41f70b226b984db10a91dcc7b9ac2efc1c77022fdd"}, {file = "munch-2.5.0.tar.gz", hash = "sha256:2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2"}, ] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] numpy = [ {file = "numpy-1.20.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e9459f40244bb02b2f14f6af0cd0732791d72232bbb0dc4bab57ef88e75f6935"}, {file = "numpy-1.20.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a8e6859913ec8eeef3dbe9aed3bf475347642d1cdd6217c30f28dee8903528e6"}, @@ -1059,6 +1134,10 @@ parso = [ {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, ] +pathspec = [ + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, +] pexpect = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, @@ -1164,6 +1243,49 @@ pytz = [ {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, ] +regex = [ + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, +] requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, @@ -1259,6 +1381,38 @@ traitlets = [ {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, ] +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, diff --git a/pyproject.toml b/pyproject.toml index d66082e..e5f9273 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,11 @@ mock = "^4.0.3" ipdb = "^0.13.7" pytest-cov = "^2.11.1" flake8 = "^3.9.0" +black = "^21.4b0" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 79