Skip to content

Commit

Permalink
Merge pull request iobis#160 from ayushanand18/fix-logging-print
Browse files Browse the repository at this point in the history
[fix] uses `logging` instead of `print`
  • Loading branch information
pieterprovoost authored Sep 11, 2024
2 parents 92b9883 + a46cc06 commit 165fd1b
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 23 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ python -m pip install -e .
# test your installation
python -m pytest
# test and generate a coverage report
python -m pytest -rxs --cov=pyobis tests
python -m pytest -rxs --cov=pyobis ./pyobis --vcr-record=none
```

## Documentation
Expand Down
12 changes: 5 additions & 7 deletions pyobis/checklist/checklist.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
build_api_url,
handle_arrint,
handle_arrstr,
logger,
obis_baseurl,
obis_GET,
)
Expand Down Expand Up @@ -55,17 +56,14 @@ def execute(self):
pass

# fetch first 10 records, and print number of estimated records
print(f"Estimated records: {out['total']}")
print(
logger.info(f"Estimated records: {out['total']}")
logger.info(
"{}[{}{}] {}".format(
"Fetching: ",
"█" * int(len(out["results"]) * 100 / out["total"]),
"." * (100 - int(len(out["results"]) * 100 / out["total"])),
len(out["results"]),
),
end="\r",
file=sys.stdout,
flush=True,
)
# now paginate until the response is null
while True:
Expand All @@ -79,7 +77,7 @@ def execute(self):
break
out["results"] += res["results"]
# print the progress bar
print(
logger.info(
"{}[{}{}] {}".format(
"Fetching: ",
"█" * int(len(out["results"]) * 100 / out["total"]),
Expand All @@ -94,7 +92,7 @@ def execute(self):
# continue to fetch next 5000 records
i += 5000
# print actual number of fetched records
print(f"\nFetched {len(out['results'])} records.")
logger.info(f"\nFetched {len(out['results'])} records.")
else:
out = obis_GET(
self.__url,
Expand Down
10 changes: 10 additions & 0 deletions pyobis/obisutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,22 @@
Utility functions for internal use across various modules.
"""

import logging
from urllib.parse import urlencode

import requests

obis_baseurl = "https://api.obis.org/v3/"

# export logger, and setup basic configurations
logger = logging.getLogger(__name__)
logging.basicConfig(
encoding="utf-8",
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)


class NoResultException(Exception):
"""
Expand Down
30 changes: 15 additions & 15 deletions pyobis/occurrences/occurrences.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"""

import json
import sys
import warnings
from time import time
from urllib.parse import urlencode
Expand All @@ -15,6 +14,7 @@
build_api_url,
handle_arrint,
handle_arrstr,
logger,
obis_baseurl,
obis_GET,
)
Expand Down Expand Up @@ -71,10 +71,10 @@ def __init__(self, url, args, isSearch, hasMapper, isKML):
# the total time can be estimated easily although this might not be accurate
# for larger than 100k records, because the network download takes
# even smaller fraction of the total round-trip time
print(
f"{self.__total_records} to be fetched. Estimated time = ",
(ending_time - starting_time) * 0.995,
f" {(ending_time - starting_time) * 0.005 * self.__total_records:.0f} ",
logger.info(
f"{self.__total_records} to be fetched. Estimated time ="
f"{(ending_time - starting_time) * 0.995}"
f"{(ending_time - starting_time) * 0.005 * self.__total_records:.0f} "
"seconds",
)

Expand Down Expand Up @@ -115,17 +115,14 @@ def execute(self, **kwargs):
if "id" not in outdf.columns:
break
self.__args["size"] = 10000
print(
logger.info(
"{}[{}{}] {}/{}".format(
"Fetching: ",
"█" * int((i - 1) * 100 / size),
"." * (100 - int((i + 1) * 100 / size)),
i,
size,
),
end="\r",
file=sys.stdout,
flush=True,
)
res = obis_GET(
self.__url,
Expand All @@ -146,11 +143,14 @@ def execute(self, **kwargs):
self.__args["size"] = size % 10000
# we have already fetched records as a set of 5000 records each time,
# now we need to get remaining records from the total
print(
"{}[{}{}] {}/{}".format("Fetching: ", "█" * 100, "." * 0, size, size),
end="\r",
file=sys.stdout,
flush=True,
logger.info(
"{}[{}{}] {}/{}".format(
"Fetching: ",
"\u2588" * 100,
"." * 0,
size,
size,
),
)
res = obis_GET(
self.__url,
Expand All @@ -162,7 +162,7 @@ def execute(self, **kwargs):
[outdf.infer_objects(), pd.DataFrame(res["results"]).infer_objects()],
ignore_index=True,
)
print(f"\nFetched {size} records.")
logger.info(f"Fetched {size} records.")

if mof and self.__total_records > 0:
mofNormalized = pd.json_normalize(
Expand Down

0 comments on commit 165fd1b

Please sign in to comment.