Skip to content

Commit

Permalink
Add and Run precommit hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
leavauchier committed Dec 21, 2023
1 parent cb78adc commit 6d987e8
Show file tree
Hide file tree
Showing 13 changed files with 80 additions and 48 deletions.
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[flake8]
max-line-length = 119
16 changes: 16 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
repos:
- repo: https://github.com/ambv/black
rev: 23.12.0
hooks:
- id: black
language_version: python3.11
- repo: https://github.com/pycqa/flake8
rev: 6.1.0
hooks:
- id: flake8
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
name: isort (python)
args: ["--profile", "black"]
49 changes: 33 additions & 16 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -6,41 +6,58 @@
.ONESHELL:
SHELL = /bin/bash

deploy: check
twine upload dist/*

check: dist/ign-pdal-tool*.tar.gz
twine check dist/*
##############################
# Install
##############################

dist/ign-pdal-tool*.tar.gz:
python -m build
mamba-env-create:
mamba env create -n pdaltools -f environment.yml

build: clean
python -m build
mamba-env-update:
mamba env update -n pdaltools -f environment.yml

install:
pip install -e .


##############################
# Dev/Contrib tools
##############################

testing:
python -m pytest ./test -s --log-cli-level DEBUG -m "not geoportail"

testing_full:
python -m pytest ./test -s --log-cli-level DEBUG

install-precommit:
pre-commit install


##############################
# Build/deploy pip lib
##############################

deploy: check
twine upload dist/*

check: dist/ign-pdal-tool*.tar.gz
twine check dist/*

dist/ign-pdal-tool*.tar.gz:
python -m build

build: clean
python -m build

clean:
rm -rf tmp
rm -rf ign_pdal_tools.egg-info
rm -rf dist

mamba-env-create:
mamba env create -n pdaltools -f environment.yml

mamba-env-update:
mamba env update -n pdaltools -f environment.yml


##############################
# Docker
# Build/deploy Docker image
##############################

PROJECT_NAME=ignimagelidar/ign-pdal-tools
Expand Down
8 changes: 5 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,18 +62,20 @@ Every time the code is changed, think of updating the version file: [pdaltools/_

Please log your changes in [CHANGELOG.md](CHANGELOG.md)

Before committing your changes, run the precommit hooks. They can be installed to run automatically with `make install-precommit`

## Tests

Create the conda environment: `./script/createCondaEnv.sh`
Create the conda environment: `make mamba-env-create`

Run unit tests: `./script/test.sh`
Run unit tests: `make testing`

## Pip package

To generate a pip package and deploy it on pypi, use the [Makefile](Makefile) at the root of the repo:

* `make build`: build the library
* `make install`: instal the library in an editable way (`pip -e`)
* `make install`: install the library in an editable way (`pip -e`)
* `make deploy` : deploy it on pypi

## Docker image
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def compute_count_one_file(filepath: str, attribute: str = "Classification") ->
# Try to prettify the value by converting it to an integer (eg. for Classification that
# returns values such as 1.0000 instead of 1 or 1.)
counts = Counter({str(int(float(value))): int(count) for value, count in split_counts})
except ValueError as e:
except ValueError:
# in case value is not a number, float(value) returns a ValueError
# fallback: use the raw value
counts = Counter({value: int(count) for value, count in split_counts})
Expand Down
6 changes: 3 additions & 3 deletions pdaltools/las_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def las_info_pipeline(filename: str, spatial_ref: str = "EPSG:2154"):
# Extract metadata
metadata = pipeline.metadata

if type(metadata) == str:
if isinstance(metadata, str):
metadata = json.loads(metadata)

return metadata["metadata"]["filters.info"]
Expand Down Expand Up @@ -94,10 +94,10 @@ def las_get_xy_bounds(filename: str, buffer_width: int = 0, spatial_ref: str = "
metadata = las_info_metadata(filename)
bounds_dict = metadata["bounds"]

except RuntimeError as e:
except RuntimeError:
metadata = las_info_pipeline(filename, spatial_ref)
bounds_dict = metadata["bbox"]
if type(metadata) == str:
if isinstance(metadata, str):
metadata = json.loads(metadata)
# Export bound (maxy, maxy, minx and miny), then creating a buffer with 100 m
_x.append(float((bounds_dict["minx"]) - buffer_width)) # coordinate minX
Expand Down
7 changes: 2 additions & 5 deletions test/count_occurences/test_count_occurences_for_attribute.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,7 @@
import shutil
from collections import Counter

import pytest

from pdaltools.count_occurences.count_occurences_for_attribute import \
compute_count
from pdaltools.count_occurences.count_occurences_for_attribute import compute_count

test_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # File is in subdirectory
tmp_path = os.path.join(test_path, "tmp")
Expand Down Expand Up @@ -51,7 +48,7 @@ def test_count_by_attribute_values_with_json():


def test_count_by_attribute_values_one_file():
count = compute_count([single_input_file], attribute, output_file)
compute_count([single_input_file], attribute, output_file)
with open(counts_single_json, "r") as f:
expected = Counter(json.load(f))
with open(output_file, "r") as f:
Expand Down
2 changes: 0 additions & 2 deletions test/count_occurences/test_merge_occurences_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
import shutil
from collections import Counter

import pytest

from pdaltools.count_occurences.merge_occurences_counts import merge_counts

test_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # File is in subdirectory
Expand Down
5 changes: 2 additions & 3 deletions test/test_las_add_buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import laspy
import numpy as np
import pytest

from pdaltools.las_add_buffer import create_las_with_buffer

Expand Down Expand Up @@ -35,7 +34,7 @@ def setup_module(module):
os.mkdir(tmp_path)


## Utils functions
# Utils functions
def get_nb_points(path):
"""Get number of points in a las file"""
with laspy.open(path) as f:
Expand All @@ -53,7 +52,7 @@ def get_2d_bounding_box(path):
return mins[:2], maxs[:2]


## Tests
# Tests
def test_create_las_with_buffer():
buffer_width = 10
create_las_with_buffer(
Expand Down
5 changes: 2 additions & 3 deletions test/test_las_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import laspy
import numpy as np
import pytest

from pdaltools.las_clip import las_crop

Expand Down Expand Up @@ -34,7 +33,7 @@ def setup_module(module):
os.mkdir(tmp_path)


## Utils functions
# Utils functions
def get_nb_points(path):
"""Get number of points in a las file"""
with laspy.open(path) as f:
Expand All @@ -52,7 +51,7 @@ def get_2d_bounding_box(path):
return mins[:2], maxs[:2]


## Tests
# Tests
def test_las_crop():
bounds = ([expected_out_mins[0], expected_out_maxs[0]], [expected_out_mins[1], expected_out_maxs[1]])
las_crop(input_file, output_file, bounds)
Expand Down
6 changes: 2 additions & 4 deletions test/test_las_merge.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import logging
import os
import shutil

import laspy
import numpy as np
import pytest

from pdaltools.las_merge import las_merge

Expand Down Expand Up @@ -34,7 +32,7 @@
# os.mkdir(tmp_path)


## Utils functions
# Utils functions
def get_nb_points(path):
"""Get number of points in a las file"""
with laspy.open(path) as f:
Expand All @@ -52,7 +50,7 @@ def get_2d_bounding_box(path):
return mins[:2], maxs[:2]


## Tests
# Tests
def test_las_merge():
las_merge(input_dir, input_file, output_file, tile_width=tile_width, tile_coord_scale=tile_coord_scale)

Expand Down
11 changes: 7 additions & 4 deletions test/test_replace_attribute_in_las.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,14 @@

import pytest

from pdaltools.count_occurences.count_occurences_for_attribute import \
compute_count_one_file
from pdaltools.count_occurences.count_occurences_for_attribute import (
compute_count_one_file,
)
from pdaltools.replace_attribute_in_las import (
parse_replacement_map_from_path_or_json_string, replace_values,
replace_values_clean)
parse_replacement_map_from_path_or_json_string,
replace_values,
replace_values_clean,
)
from pdaltools.standardize_format import get_writer_parameters

test_path = os.path.dirname(os.path.abspath(__file__))
Expand Down
9 changes: 5 additions & 4 deletions test/test_standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
import pdal
import pytest

from pdaltools.standardize_format import (exec_las2las, rewrite_with_pdal,
standardize)
from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize

# Note: tile 77050_627760 is cropped to simulate missing data in neighbors during merge
test_path = os.path.dirname(os.path.abspath(__file__))
Expand Down Expand Up @@ -46,7 +45,7 @@ def _test_standardize_format_one_params_set(params):
metadata = json_info["summary"]["metadata"][1]
else:
metadata = json_info["summary"]["metadata"]
assert metadata["compressed"] == True
assert metadata["compressed"] is True
assert metadata["minor_version"] == 4
assert metadata["global_encoding"] == 17
assert metadata["dataformat_id"] == params["dataformat_id"]
Expand Down Expand Up @@ -90,7 +89,9 @@ def test_exec_las2las_error():

def test_standardize_does_NOT_produce_any_warning_with_Lasinfo():
# bad file on the store (44 Mo)
# input_file = "/var/data/store-lidarhd/developpement/standaLAS/demo_standardization/Semis_2022_0584_6880_LA93_IGN69.laz"
# input_file = (
# "/var/data/store-lidarhd/developpement/standaLAS/demo_standardization/Semis_2022_0584_6880_LA93_IGN69.laz"
# )

input_file = os.path.join(test_path, "data/classified_laz/test_data_77050_627755_LA93_IGN69.laz")
output_file = os.path.join(tmp_path, "test_standardize_produce_no_warning_with_lasinfo.las")
Expand Down

0 comments on commit 6d987e8

Please sign in to comment.