1
1
#!/usr/bin/env python2.7
2
+ # pylint: disable=C0301
2
3
from __future__ import absolute_import , unicode_literals , print_function , division
3
4
4
5
from sys import argv
5
6
from os import environ , stat , chdir , remove as _delete_file
6
- from os .path import isfile , dirname , basename , abspath , realpath , expandvars
7
+ from os .path import dirname , basename , abspath , realpath , expandvars
7
8
from hashlib import sha256
8
9
from subprocess import check_call as run
9
- from json import load
10
+ from json import load , dump as save
10
11
from contextlib import contextmanager
11
12
from datetime import datetime
12
13
16
17
17
18
18
19
CONFIG_FILE = './S3Cachefile.json'
19
- NEED_TO_UPLOAD_MARKER = '.need-to-upload '
20
+ UPLOAD_TODO_FILE = './S3CacheTodo.json '
20
21
BYTES_PER_MB = 1024 * 1024
21
22
22
23
@@ -29,6 +30,24 @@ def timer():
29
30
print ("\t Done. Took" , int (elapsed .total_seconds ()), "second(s)." )
30
31
31
32
33
+ @contextmanager
34
+ def todo_file (writeback = True ):
35
+ try :
36
+ with open (UPLOAD_TODO_FILE , 'rt' ) as json_file :
37
+ todo = load (json_file )
38
+ except (IOError , OSError , ValueError ):
39
+ todo = {}
40
+
41
+ yield todo
42
+
43
+ if writeback :
44
+ try :
45
+ with open (UPLOAD_TODO_FILE , 'wt' ) as json_file :
46
+ save (todo , json_file )
47
+ except (OSError , IOError ) as save_err :
48
+ print ("Error saving {}:" .format (UPLOAD_TODO_FILE ), save_err )
49
+
50
+
32
51
def _sha256_of_file (filename ):
33
52
hasher = sha256 ()
34
53
with open (filename , 'rb' ) as input_file :
@@ -45,6 +64,21 @@ def _delete_file_quietly(filename):
45
64
pass
46
65
47
66
67
+ def mark_needs_uploading (cache_name ):
68
+ with todo_file () as todo :
69
+ todo [cache_name ] = True
70
+
71
+
72
+ def mark_uploaded (cache_name ):
73
+ with todo_file () as todo :
74
+ todo .pop (cache_name , None )
75
+
76
+
77
+ def need_to_upload (cache_name ):
78
+ with todo_file (writeback = False ) as todo :
79
+ return todo .get (cache_name , False )
80
+
81
+
48
82
def _tarball_size (directory ):
49
83
kib = stat (_tarball_filename_for (directory )).st_size // BYTES_PER_MB
50
84
return "{} MiB" .format (kib )
@@ -67,14 +101,13 @@ def _extract_tarball(directory):
67
101
68
102
69
103
def download (directory ):
70
- _delete_file_quietly ( NEED_TO_UPLOAD_MARKER )
104
+ mark_uploaded ( cache_name ) # reset
71
105
try :
72
106
print ("Downloading {} tarball from S3..." .format (cache_name ))
73
107
with timer ():
74
108
key .get_contents_to_filename (_tarball_filename_for (directory ))
75
109
except S3ResponseError as err :
76
- open (NEED_TO_UPLOAD_MARKER , 'a' ).close ()
77
- print (err )
110
+ mark_needs_uploading (cache_name )
78
111
raise SystemExit ("Cached {} download failed!" .format (cache_name ))
79
112
print ("Downloaded {}." .format (_tarball_size (directory )))
80
113
_extract_tarball (directory )
@@ -87,7 +120,7 @@ def upload(directory):
87
120
with timer ():
88
121
key .set_contents_from_filename (_tarball_filename_for (directory ))
89
122
print ("{} cache successfully updated." .format (cache_name ))
90
- _delete_file_quietly ( NEED_TO_UPLOAD_MARKER )
123
+ mark_uploaded ( cache_name )
91
124
92
125
93
126
if __name__ == '__main__' :
@@ -135,7 +168,7 @@ def upload(directory):
135
168
if mode == 'download' :
136
169
download (directory )
137
170
elif mode == 'upload' :
138
- if isfile ( NEED_TO_UPLOAD_MARKER ): # FIXME
171
+ if need_to_upload ( cache_name ):
139
172
upload (directory )
140
173
else :
141
174
print ("No need to upload anything." )
0 commit comments