Skip to content

Commit

Permalink
Fix message generation on 5TB size limit
Browse files Browse the repository at this point in the history
Fixes aws#1842
  • Loading branch information
JordonPhillips committed Mar 21, 2016
1 parent 6f65ad7 commit 8a38e3c
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 2 deletions.
2 changes: 1 addition & 1 deletion awscli/customizations/s3/s3handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def _enqueue_tasks(self, files):
if too_large and filename.operation_name == 'upload':
warning_message = "File exceeds s3 upload limit of 5 TB."
warning = create_warning(relative_path(filename.src),
message=warning_message)
warning_message)
self.result_queue.put(warning)
# Warn and skip over glacier incompatible tasks.
elif not self.params.get('force_glacier_transfer') and \
Expand Down
24 changes: 23 additions & 1 deletion tests/unit/customizations/s3/test_s3handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from awscli.customizations.s3.fileinfo import FileInfo
from awscli.customizations.s3.tasks import CreateMultipartUploadTask, \
UploadPartTask, CreateLocalFileTask
from awscli.customizations.s3.utils import MAX_PARTS
from awscli.customizations.s3.utils import MAX_PARTS, MAX_UPLOAD_SIZE
from awscli.customizations.s3.transferconfig import RuntimeConfig
from tests.unit.customizations.s3 import make_loc_files, clean_loc_files, \
MockStdIn, S3HandlerBaseTest
Expand Down Expand Up @@ -195,6 +195,28 @@ def test_upload_fail(self):
stdout, stderr, rc = self.run_s3_handler(self.s3_handler, tasks)
self.assertEqual(rc.num_tasks_failed, 1)

def test_max_size_limit(self):
"""
This test verifies that we're warning on file uploads which are greater
than the max upload size (5TB currently).
"""
tasks = [FileInfo(
src=self.loc_files[0],
dest=self.bucket + '/test1.txt',
compare_key=None,
src_type='local',
dest_type='s3',
operation_name='upload',
size=MAX_UPLOAD_SIZE+1,
last_update=None,
client=self.client
)]
self.parsed_responses = []
_, _, rc = self.run_s3_handler(self.s3_handler, tasks)
# The task should *warn*, not fail
self.assertEqual(rc.num_tasks_failed, 0)
self.assertEqual(rc.num_tasks_warned, 1)

def test_multi_upload(self):
"""
This test only checks that the multipart upload process works.
Expand Down

0 comments on commit 8a38e3c

Please sign in to comment.