forked from wasmerio/wasmer
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy paths3-cache-cleanup.py
executable file
·44 lines (33 loc) · 1.23 KB
/
s3-cache-cleanup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#!/usr/bin/env python3
# Deletes all objects in an S3 bucket that are older than a given number of days.
# Used for cleaning up the custom Github Actions cache.
import boto3
import datetime
import os
# Define the S3 bucket name and the number of days to retain objects
days_to_retain = 7
bucket_name = os.environ['AWS_BUCKET_NAME']
access_key = os.environ['AWS_ACCESS_KEY_ID']
secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
endpoint = os.environ['AWS_ENDPOINT']
# Create a connection to the S3 service
s3 = boto3.resource('s3',
endpoint_url = endpoint,
aws_access_key_id = access_key,
aws_secret_access_key = secret_key,
region_name = 'auto',
)
bucket = s3.Bucket(bucket_name)
# Calculate the retention date.
cutoff_date = (datetime.datetime.now() - datetime.timedelta(days=days_to_retain))
cutoff_date = cutoff_date.replace(tzinfo=datetime.timezone.utc)
print(f'Deleting all objects in bucket {bucket_name} older than {cutoff_date}...')
total_count = 0
deleted_count = 0
for obj in bucket.objects.all():
total_count += 1
if obj.last_modified < cutoff_date:
print(f'Deleting {obj.key}...')
obj.delete()
deleted_count += 1
print(f'Complete! Deleted {deleted_count} objects out of a total {total_count}.')