forked from docker-archive/docker-registry
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy paths3.py
61 lines (47 loc) · 1.62 KB
/
s3.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import gevent.monkey
gevent.monkey.patch_all()
import cStringIO as StringIO
import logging
import boto.s3.connection
import boto.s3.key
import cache_lru
from boto_base import BotoStorage
logger = logging.getLogger(__name__)
class S3Storage(BotoStorage):
def __init__(self, config):
BotoStorage.__init__(self, config)
def makeConnection(self):
return boto.s3.connection.S3Connection(
self._config.s3_access_key,
self._config.s3_secret_key,
is_secure=(self._config.s3_secure is True))
def makeKey(self, path):
return boto.s3.key.Key(self._boto_bucket, path)
@cache_lru.put
def put_content(self, path, content):
path = self._init_path(path)
key = self.makeKey(path)
key.set_contents_from_string(
content, encrypt_key=(self._config.s3_encrypt is True))
return path
def stream_write(self, path, fp):
# Minimum size of upload part size on S3 is 5MB
buffer_size = 5 * 1024 * 1024
if self.buffer_size > buffer_size:
buffer_size = self.buffer_size
path = self._init_path(path)
mp = self._boto_bucket.initiate_multipart_upload(
path, encrypt_key=(self._config.s3_encrypt is True))
num_part = 1
try:
while True:
buf = fp.read(buffer_size)
if not buf:
break
io = StringIO.StringIO(buf)
mp.upload_part_from_file(io, num_part)
num_part += 1
io.close()
except IOError:
pass
mp.complete_upload()