forked from langchain-ai/langchain
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpackages_yml_get_downloads.py
71 lines (54 loc) · 1.83 KB
/
packages_yml_get_downloads.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
from datetime import datetime, timedelta, timezone
from pathlib import Path
import requests
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
yaml = YAML()
PACKAGE_YML = Path(__file__).parents[2] / "libs" / "packages.yml"
def _get_downloads(p: dict) -> int:
url = f"https://pypistats.org/api/packages/{p['name']}/recent?period=month"
r = requests.get(url)
r.raise_for_status()
return r.json()["data"]["last_month"]
current_datetime = datetime.now(timezone.utc)
yesterday = current_datetime - timedelta(days=1)
with open(PACKAGE_YML) as f:
data = yaml.load(f)
def _reorder_keys(p):
keys = p.keys()
key_order = [
"name",
"name_title",
"path",
"repo",
"type",
"provider_page",
"js",
"downloads",
"downloads_updated_at",
]
if set(keys) - set(key_order):
raise ValueError(f"Unexpected keys: {set(keys) - set(key_order)}")
return CommentedMap((k, p[k]) for k in key_order if k in p)
data["packages"] = [_reorder_keys(p) for p in data["packages"]]
seen = set()
for p in data["packages"]:
if p["name"] in seen:
raise ValueError(f"Duplicate package: {p['name']}")
seen.add(p["name"])
downloads_updated_at_str = p.get("downloads_updated_at")
downloads_updated_at = (
datetime.fromisoformat(downloads_updated_at_str)
if downloads_updated_at_str
else None
)
if downloads_updated_at is not None and downloads_updated_at > yesterday:
print(f"done: {p['name']}: {p['downloads']}")
continue
p["downloads"] = _get_downloads(p)
p["downloads_updated_at"] = current_datetime.isoformat()
with open(PACKAGE_YML, "w") as f:
yaml.dump(data, f)
print(f"{p['name']}: {p['downloads']}")
with open(PACKAGE_YML, "w") as f:
yaml.dump(data, f)