aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/s3transfer/py2/tests/unit/test_s3transfer.py
blob: 51083e5c94301d5d693dbd70839f1528a3c99d65 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import tempfile
import shutil
import socket
from __tests__ import unittest
from contextlib import closing

import mock
from botocore.compat import six
from concurrent import futures

from s3transfer.exceptions import RetriesExceededError
from s3transfer.exceptions import S3UploadFailedError
from s3transfer import ReadFileChunk, StreamReaderProgress
from s3transfer import S3Transfer
from s3transfer import OSUtils, TransferConfig
from s3transfer import MultipartDownloader, MultipartUploader
from s3transfer import ShutdownQueue
from s3transfer import QueueShutdownError
from s3transfer import random_file_extension
from s3transfer import disable_upload_callbacks, enable_upload_callbacks


class InMemoryOSLayer(OSUtils):
    def __init__(self, filemap):
        self.filemap = filemap

    def get_file_size(self, filename):
        return len(self.filemap[filename])

    def open_file_chunk_reader(self, filename, start_byte, size, callback):
        return closing(six.BytesIO(self.filemap[filename]))

    def open(self, filename, mode):
        if 'wb' in mode:
            fileobj = six.BytesIO()
            self.filemap[filename] = fileobj
            return closing(fileobj)
        else:
            return closing(self.filemap[filename])

    def remove_file(self, filename):
        if filename in self.filemap:
            del self.filemap[filename]

    def rename_file(self, current_filename, new_filename):
        if current_filename in self.filemap:
            self.filemap[new_filename] = self.filemap.pop(
                current_filename)


class SequentialExecutor(object):
    def __init__(self, max_workers):
        pass

    def __enter__(self):
        return self

    def __exit__(self, *args, **kwargs):
        pass

    # The real map() interface actually takes *args, but we specifically do
    # _not_ use this interface.
    def map(self, function, args):
        results = []
        for arg in args:
            results.append(function(arg))
        return results

    def submit(self, function):
        future = futures.Future()
        future.set_result(function())
        return future


class TestOSUtils(unittest.TestCase):
    def setUp(self):
        self.tempdir = tempfile.mkdtemp()

    def tearDown(self):
        shutil.rmtree(self.tempdir)

    def test_get_file_size(self):
        with mock.patch('os.path.getsize') as m:
            OSUtils().get_file_size('myfile')
            m.assert_called_with('myfile')

    def test_open_file_chunk_reader(self):
        with mock.patch('s3transfer.ReadFileChunk') as m:
            OSUtils().open_file_chunk_reader('myfile', 0, 100, None)
            m.from_filename.assert_called_with('myfile', 0, 100,
                                               None, enable_callback=False)

    def test_open_file(self):
        fileobj = OSUtils().open(os.path.join(self.tempdir, 'foo'), 'w')
        self.assertTrue(hasattr(fileobj, 'write'))

    def test_remove_file_ignores_errors(self):
        with mock.patch('os.remove') as remove:
            remove.side_effect = OSError('fake error')
            OSUtils().remove_file('foo')
        remove.assert_called_with('foo')

    def test_remove_file_proxies_remove_file(self):
        with mock.patch('os.remove') as remove:
            OSUtils().remove_file('foo')
            remove.assert_called_with('foo')

    def test_rename_file(self):
        with mock.patch('s3transfer.compat.rename_file') as rename_file:
            OSUtils().rename_file('foo', 'newfoo')
            rename_file.assert_called_with('foo', 'newfoo')


class TestReadFileChunk(unittest.TestCase):
    def setUp(self):
        self.tempdir = tempfile.mkdtemp()

    def tearDown(self):
        shutil.rmtree(self.tempdir)

    def test_read_entire_chunk(self):
        filename = os.path.join(self.tempdir, 'foo')
        with open(filename, 'wb') as f:
            f.write(b'onetwothreefourfivesixseveneightnineten')
        chunk = ReadFileChunk.from_filename(
            filename, start_byte=0, chunk_size=3)
        self.assertEqual(chunk.read(), b'one')
        self.assertEqual(chunk.read(), b'')

    def test_read_with_amount_size(self):
        filename = os.path.join(self.tempdir, 'foo')
        with open(filename, 'wb') as f:
            f.write(b'onetwothreefourfivesixseveneightnineten')
        chunk = ReadFileChunk.from_filename(
            filename, start_byte=11, chunk_size=4)
        self.assertEqual(chunk.read(1), b'f')
        self.assertEqual(chunk.read(1), b'o')
        self.assertEqual(chunk.read(1), b'u')
        self.assertEqual(chunk.read(1), b'r')
        self.assertEqual(chunk.read(1), b'')

    def test_reset_stream_emulation(self):
        filename = os.path.join(self.tempdir, 'foo')
        with open(filename, 'wb') as f:
            f.write(b'onetwothreefourfivesixseveneightnineten')
        chunk = ReadFileChunk.from_filename(
            filename, start_byte=11, chunk_size=4)
        self.assertEqual(chunk.read(), b'four')
        chunk.seek(0)
        self.assertEqual(chunk.read(), b'four')

    def test_read_past_end_of_file(self):
        filename = os.path.join(self.tempdir, 'foo')
        with open(filename, 'wb') as f:
            f.write(b'onetwothreefourfivesixseveneightnineten')
        chunk = ReadFileChunk.from_filename(
            filename, start_byte=36, chunk_size=100000)
        self.assertEqual(chunk.read(), b'ten')
        self.assertEqual(chunk.read(), b'')
        self.assertEqual(len(chunk), 3)

    def test_tell_and_seek(self):
        filename = os.path.join(self.tempdir, 'foo')
        with open(filename, 'wb') as f:
            f.write(b'onetwothreefourfivesixseveneightnineten')
        chunk = ReadFileChunk.from_filename(
            filename, start_byte=36, chunk_size=100000)
        self.assertEqual(chunk.tell(), 0)
        self.assertEqual(chunk.read(), b'ten')
        self.assertEqual(chunk.tell(), 3)
        chunk.seek(0)
        self.assertEqual(chunk.tell(), 0)

    def test_file_chunk_supports_context_manager(self):
        filename = os.path.join(self.tempdir, 'foo')
        with open(filename, 'wb') as f:
            f.write(b'abc')
        with ReadFileChunk.from_filename(filename,
                                         start_byte=0,
                                         chunk_size=2) as chunk:
            val = chunk.read()
            self.assertEqual(val, b'ab')

    def test_iter_is_always_empty(self):
        # This tests the workaround for the httplib bug (see
        # the source for more info).
        filename = os.path.join(self.tempdir, 'foo')
        open(filename, 'wb').close()
        chunk = ReadFileChunk.from_filename(
            filename, start_byte=0, chunk_size=10)
        self.assertEqual(list(chunk), [])


class TestReadFileChunkWithCallback(TestReadFileChunk):
    def setUp(self):
        super(TestReadFileChunkWithCallback, self).setUp()
        self.filename = os.path.join(self.tempdir, 'foo')
        with open(self.filename, 'wb') as f:
            f.write(b'abc')
        self.amounts_seen = []

    def callback(self, amount):
        self.amounts_seen.append(amount)

    def test_callback_is_invoked_on_read(self):
        chunk = ReadFileChunk.from_filename(
            self.filename, start_byte=0, chunk_size=3, callback=self.callback)
        chunk.read(1)
        chunk.read(1)
        chunk.read(1)
        self.assertEqual(self.amounts_seen, [1, 1, 1])

    def test_callback_can_be_disabled(self):
        chunk = ReadFileChunk.from_filename(
            self.filename, start_byte=0, chunk_size=3, callback=self.callback)
        chunk.disable_callback()
        # Now reading from the ReadFileChunk should not invoke
        # the callback.
        chunk.read()
        self.assertEqual(self.amounts_seen, [])

    def test_callback_will_also_be_triggered_by_seek(self):
        chunk = ReadFileChunk.from_filename(
            self.filename, start_byte=0, chunk_size=3, callback=self.callback)
        chunk.read(2)
        chunk.seek(0)
        chunk.read(2)
        chunk.seek(1)
        chunk.read(2)
        self.assertEqual(self.amounts_seen, [2, -2, 2, -1, 2])


class TestStreamReaderProgress(unittest.TestCase):

    def test_proxies_to_wrapped_stream(self):
        original_stream = six.StringIO('foobarbaz')
        wrapped = StreamReaderProgress(original_stream)
        self.assertEqual(wrapped.read(), 'foobarbaz')

    def test_callback_invoked(self):
        amounts_seen = []

        def callback(amount):
            amounts_seen.append(amount)

        original_stream = six.StringIO('foobarbaz')
        wrapped = StreamReaderProgress(original_stream, callback)
        self.assertEqual(wrapped.read(), 'foobarbaz')
        self.assertEqual(amounts_seen, [9])


class TestMultipartUploader(unittest.TestCase):
    def test_multipart_upload_uses_correct_client_calls(self):
        client = mock.Mock()
        uploader = MultipartUploader(
            client, TransferConfig(),
            InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor)
        client.create_multipart_upload.return_value = {'UploadId': 'upload_id'}
        client.upload_part.return_value = {'ETag': 'first'}

        uploader.upload_file('filename', 'bucket', 'key', None, {})

        # We need to check both the sequence of calls (create/upload/complete)
        # as well as the params passed between the calls, including
        # 1. The upload_id was plumbed through
        # 2. The collected etags were added to the complete call.
        client.create_multipart_upload.assert_called_with(
            Bucket='bucket', Key='key')
        # Should be two parts.
        client.upload_part.assert_called_with(
            Body=mock.ANY, Bucket='bucket',
            UploadId='upload_id', Key='key', PartNumber=1)
        client.complete_multipart_upload.assert_called_with(
            MultipartUpload={'Parts': [{'PartNumber': 1, 'ETag': 'first'}]},
            Bucket='bucket',
            UploadId='upload_id',
            Key='key')

    def test_multipart_upload_injects_proper_kwargs(self):
        client = mock.Mock()
        uploader = MultipartUploader(
            client, TransferConfig(),
            InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor)
        client.create_multipart_upload.return_value = {'UploadId': 'upload_id'}
        client.upload_part.return_value = {'ETag': 'first'}

        extra_args = {
            'SSECustomerKey': 'fakekey',
            'SSECustomerAlgorithm': 'AES256',
            'StorageClass': 'REDUCED_REDUNDANCY'
        }
        uploader.upload_file('filename', 'bucket', 'key', None, extra_args)

        client.create_multipart_upload.assert_called_with(
            Bucket='bucket', Key='key',
            # The initial call should inject all the storage class params.
            SSECustomerKey='fakekey',
            SSECustomerAlgorithm='AES256',
            StorageClass='REDUCED_REDUNDANCY')
        client.upload_part.assert_called_with(
            Body=mock.ANY, Bucket='bucket',
            UploadId='upload_id', Key='key', PartNumber=1,
            # We only have to forward certain **extra_args in subsequent
            # UploadPart calls.
            SSECustomerKey='fakekey',
            SSECustomerAlgorithm='AES256',
        )
        client.complete_multipart_upload.assert_called_with(
            MultipartUpload={'Parts': [{'PartNumber': 1, 'ETag': 'first'}]},
            Bucket='bucket',
            UploadId='upload_id',
            Key='key')

    def test_multipart_upload_is_aborted_on_error(self):
        # If the create_multipart_upload succeeds and any upload_part
        # fails, then abort_multipart_upload will be called.
        client = mock.Mock()
        uploader = MultipartUploader(
            client, TransferConfig(),
            InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor)
        client.create_multipart_upload.return_value = {'UploadId': 'upload_id'}
        client.upload_part.side_effect = Exception(
            "Some kind of error occurred.")

        with self.assertRaises(S3UploadFailedError):
            uploader.upload_file('filename', 'bucket', 'key', None, {})

        client.abort_multipart_upload.assert_called_with(
            Bucket='bucket', Key='key', UploadId='upload_id')


class TestMultipartDownloader(unittest.TestCase):

    maxDiff = None

    def test_multipart_download_uses_correct_client_calls(self):
        client = mock.Mock()
        response_body = b'foobarbaz'
        client.get_object.return_value = {'Body': six.BytesIO(response_body)}

        downloader = MultipartDownloader(client, TransferConfig(),
                                         InMemoryOSLayer({}),
                                         SequentialExecutor)
        downloader.download_file('bucket', 'key', 'filename',
                                 len(response_body), {})

        client.get_object.assert_called_with(
            Range='bytes=0-',
            Bucket='bucket',
            Key='key'
        )

    def test_multipart_download_with_multiple_parts(self):
        client = mock.Mock()
        response_body = b'foobarbaz'
        client.get_object.return_value = {'Body': six.BytesIO(response_body)}
        # For testing purposes, we're testing with a multipart threshold
        # of 4 bytes and a chunksize of 4 bytes.  Given b'foobarbaz',
        # this should result in 3 calls.  In python slices this would be:
        # r[0:4], r[4:8], r[8:9].  But the Range param will be slightly
        # different because they use inclusive ranges.
        config = TransferConfig(multipart_threshold=4,
                                multipart_chunksize=4)

        downloader = MultipartDownloader(client, config,
                                         InMemoryOSLayer({}),
                                         SequentialExecutor)
        downloader.download_file('bucket', 'key', 'filename',
                                 len(response_body), {})

        # We're storing these in **extra because the assertEqual
        # below is really about verifying we have the correct value
        # for the Range param.
        extra = {'Bucket': 'bucket', 'Key': 'key'}
        self.assertEqual(client.get_object.call_args_list,
                         # Note these are inclusive ranges.
                         [mock.call(Range='bytes=0-3', **extra),
                          mock.call(Range='bytes=4-7', **extra),
                          mock.call(Range='bytes=8-', **extra)])

    def test_retry_on_failures_from_stream_reads(self):
        # If we get an exception during a call to the response body's .read()
        # method, we should retry the request.
        client = mock.Mock()
        response_body = b'foobarbaz'
        stream_with_errors = mock.Mock()
        stream_with_errors.read.side_effect = [
            socket.error("fake error"),
            response_body
        ]
        client.get_object.return_value = {'Body': stream_with_errors}
        config = TransferConfig(multipart_threshold=4,
                                multipart_chunksize=4)

        downloader = MultipartDownloader(client, config,
                                         InMemoryOSLayer({}),
                                         SequentialExecutor)
        downloader.download_file('bucket', 'key', 'filename',
                                 len(response_body), {})

        # We're storing these in **extra because the assertEqual
        # below is really about verifying we have the correct value
        # for the Range param.
        extra = {'Bucket': 'bucket', 'Key': 'key'}
        self.assertEqual(client.get_object.call_args_list,
                         # The first call to range=0-3 fails because of the
                         # side_effect above where we make the .read() raise a
                         # socket.error.
                         # The second call to range=0-3 then succeeds.
                         [mock.call(Range='bytes=0-3', **extra),
                          mock.call(Range='bytes=0-3', **extra),
                          mock.call(Range='bytes=4-7', **extra),
                          mock.call(Range='bytes=8-', **extra)])

    def test_exception_raised_on_exceeded_retries(self):
        client = mock.Mock()
        response_body = b'foobarbaz'
        stream_with_errors = mock.Mock()
        stream_with_errors.read.side_effect = socket.error("fake error")
        client.get_object.return_value = {'Body': stream_with_errors}
        config = TransferConfig(multipart_threshold=4,
                                multipart_chunksize=4)

        downloader = MultipartDownloader(client, config,
                                         InMemoryOSLayer({}),
                                         SequentialExecutor)
        with self.assertRaises(RetriesExceededError):
            downloader.download_file('bucket', 'key', 'filename',
                                     len(response_body), {})

    def test_io_thread_failure_triggers_shutdown(self):
        client = mock.Mock()
        response_body = b'foobarbaz'
        client.get_object.return_value = {'Body': six.BytesIO(response_body)}
        os_layer = mock.Mock()
        mock_fileobj = mock.MagicMock()
        mock_fileobj.__enter__.return_value = mock_fileobj
        mock_fileobj.write.side_effect = Exception("fake IO error")
        os_layer.open.return_value = mock_fileobj

        downloader = MultipartDownloader(client, TransferConfig(),
                                         os_layer, SequentialExecutor)
        # We're verifying that the exception raised from the IO future
        # propogates back up via download_file().
        with self.assertRaisesRegexp(Exception, "fake IO error"):
            downloader.download_file('bucket', 'key', 'filename',
                                     len(response_body), {})

    def test_download_futures_fail_triggers_shutdown(self):
        class FailedDownloadParts(SequentialExecutor):
            def __init__(self, max_workers):
                self.is_first = True

            def submit(self, function):
                future = futures.Future()
                if self.is_first:
                    # This is the download_parts_thread.
                    future.set_exception(
                        Exception("fake download parts error"))
                    self.is_first = False
                return future

        client = mock.Mock()
        response_body = b'foobarbaz'
        client.get_object.return_value = {'Body': six.BytesIO(response_body)}

        downloader = MultipartDownloader(client, TransferConfig(),
                                         InMemoryOSLayer({}),
                                         FailedDownloadParts)
        with self.assertRaisesRegexp(Exception, "fake download parts error"):
            downloader.download_file('bucket', 'key', 'filename',
                                     len(response_body), {})


class TestS3Transfer(unittest.TestCase):
    def setUp(self):
        self.client = mock.Mock()
        self.random_file_patch = mock.patch(
            's3transfer.random_file_extension')
        self.random_file = self.random_file_patch.start()
        self.random_file.return_value = 'RANDOM'

    def tearDown(self):
        self.random_file_patch.stop()

    def test_callback_handlers_register_on_put_item(self):
        osutil = InMemoryOSLayer({'smallfile': b'foobar'})
        transfer = S3Transfer(self.client, osutil=osutil)
        transfer.upload_file('smallfile', 'bucket', 'key')
        events = self.client.meta.events
        events.register_first.assert_called_with(
            'request-created.s3',
            disable_upload_callbacks,
            unique_id='s3upload-callback-disable',
        )
        events.register_last.assert_called_with(
            'request-created.s3',
            enable_upload_callbacks,
            unique_id='s3upload-callback-enable',
        )

    def test_upload_below_multipart_threshold_uses_put_object(self):
        fake_files = {
            'smallfile': b'foobar',
        }
        osutil = InMemoryOSLayer(fake_files)
        transfer = S3Transfer(self.client, osutil=osutil)
        transfer.upload_file('smallfile', 'bucket', 'key')
        self.client.put_object.assert_called_with(
            Bucket='bucket', Key='key', Body=mock.ANY
        )

    def test_extra_args_on_uploaded_passed_to_api_call(self):
        extra_args = {'ACL': 'public-read'}
        fake_files = {
            'smallfile': b'hello world'
        }
        osutil = InMemoryOSLayer(fake_files)
        transfer = S3Transfer(self.client, osutil=osutil)
        transfer.upload_file('smallfile', 'bucket', 'key',
                             extra_args=extra_args)
        self.client.put_object.assert_called_with(
            Bucket='bucket', Key='key', Body=mock.ANY,
            ACL='public-read'
        )

    def test_uses_multipart_upload_when_over_threshold(self):
        with mock.patch('s3transfer.MultipartUploader') as uploader:
            fake_files = {
                'smallfile': b'foobar',
            }
            osutil = InMemoryOSLayer(fake_files)
            config = TransferConfig(multipart_threshold=2,
                                    multipart_chunksize=2)
            transfer = S3Transfer(self.client, osutil=osutil, config=config)
            transfer.upload_file('smallfile', 'bucket', 'key')

            uploader.return_value.upload_file.assert_called_with(
                'smallfile', 'bucket', 'key', None, {})

    def test_uses_multipart_download_when_over_threshold(self):
        with mock.patch('s3transfer.MultipartDownloader') as downloader:
            osutil = InMemoryOSLayer({})
            over_multipart_threshold = 100 * 1024 * 1024
            transfer = S3Transfer(self.client, osutil=osutil)
            callback = mock.sentinel.CALLBACK
            self.client.head_object.return_value = {
                'ContentLength': over_multipart_threshold,
            }
            transfer.download_file('bucket', 'key', 'filename',
                                   callback=callback)

            downloader.return_value.download_file.assert_called_with(
                # Note how we're downloading to a temorary random file.
                'bucket', 'key', 'filename.RANDOM', over_multipart_threshold,
                {}, callback)

    def test_download_file_with_invalid_extra_args(self):
        below_threshold = 20
        osutil = InMemoryOSLayer({})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        with self.assertRaises(ValueError):
            transfer.download_file('bucket', 'key', '/tmp/smallfile',
                                   extra_args={'BadValue': 'foo'})

    def test_upload_file_with_invalid_extra_args(self):
        osutil = InMemoryOSLayer({})
        transfer = S3Transfer(self.client, osutil=osutil)
        bad_args = {"WebsiteRedirectLocation": "/foo"}
        with self.assertRaises(ValueError):
            transfer.upload_file('bucket', 'key', '/tmp/smallfile',
                                 extra_args=bad_args)

    def test_download_file_fowards_extra_args(self):
        extra_args = {
            'SSECustomerKey': 'foo',
            'SSECustomerAlgorithm': 'AES256',
        }
        below_threshold = 20
        osutil = InMemoryOSLayer({'smallfile': b'hello world'})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        self.client.get_object.return_value = {
            'Body': six.BytesIO(b'foobar')
        }
        transfer.download_file('bucket', 'key', '/tmp/smallfile',
                               extra_args=extra_args)

        # Note that we need to invoke the HeadObject call
        # and the PutObject call with the extra_args.
        # This is necessary.  Trying to HeadObject an SSE object
        # will return a 400 if you don't provide the required
        # params.
        self.client.get_object.assert_called_with(
            Bucket='bucket', Key='key', SSECustomerAlgorithm='AES256',
            SSECustomerKey='foo')

    def test_get_object_stream_is_retried_and_succeeds(self):
        below_threshold = 20
        osutil = InMemoryOSLayer({'smallfile': b'hello world'})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        self.client.get_object.side_effect = [
            # First request fails.
            socket.error("fake error"),
            # Second succeeds.
            {'Body': six.BytesIO(b'foobar')}
        ]
        transfer.download_file('bucket', 'key', '/tmp/smallfile')

        self.assertEqual(self.client.get_object.call_count, 2)

    def test_get_object_stream_uses_all_retries_and_errors_out(self):
        below_threshold = 20
        osutil = InMemoryOSLayer({})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        # Here we're raising an exception every single time, which
        # will exhaust our retry count and propogate a
        # RetriesExceededError.
        self.client.get_object.side_effect = socket.error("fake error")
        with self.assertRaises(RetriesExceededError):
            transfer.download_file('bucket', 'key', 'smallfile')

        self.assertEqual(self.client.get_object.call_count, 5)
        # We should have also cleaned up the in progress file
        # we were downloading to.
        self.assertEqual(osutil.filemap, {})

    def test_download_below_multipart_threshold(self):
        below_threshold = 20
        osutil = InMemoryOSLayer({'smallfile': b'hello world'})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        self.client.get_object.return_value = {
            'Body': six.BytesIO(b'foobar')
        }
        transfer.download_file('bucket', 'key', 'smallfile')

        self.client.get_object.assert_called_with(Bucket='bucket', Key='key')

    def test_can_create_with_just_client(self):
        transfer = S3Transfer(client=mock.Mock())
        self.assertIsInstance(transfer, S3Transfer)


class TestShutdownQueue(unittest.TestCase):
    def test_handles_normal_put_get_requests(self):
        q = ShutdownQueue()
        q.put('foo')
        self.assertEqual(q.get(), 'foo')

    def test_put_raises_error_on_shutdown(self):
        q = ShutdownQueue()
        q.trigger_shutdown()
        with self.assertRaises(QueueShutdownError):
            q.put('foo')


class TestRandomFileExtension(unittest.TestCase):
    def test_has_proper_length(self):
        self.assertEqual(
            len(random_file_extension(num_digits=4)), 4)


class TestCallbackHandlers(unittest.TestCase):
    def setUp(self):
        self.request = mock.Mock()

    def test_disable_request_on_put_object(self):
        disable_upload_callbacks(self.request,
                                 'PutObject')
        self.request.body.disable_callback.assert_called_with()

    def test_disable_request_on_upload_part(self):
        disable_upload_callbacks(self.request,
                                 'UploadPart')
        self.request.body.disable_callback.assert_called_with()

    def test_enable_object_on_put_object(self):
        enable_upload_callbacks(self.request,
                                 'PutObject')
        self.request.body.enable_callback.assert_called_with()

    def test_enable_object_on_upload_part(self):
        enable_upload_callbacks(self.request,
                                 'UploadPart')
        self.request.body.enable_callback.assert_called_with()

    def test_dont_disable_if_missing_interface(self):
        del self.request.body.disable_callback
        disable_upload_callbacks(self.request,
                                 'PutObject')
        self.assertEqual(self.request.body.method_calls, [])

    def test_dont_enable_if_missing_interface(self):
        del self.request.body.enable_callback
        enable_upload_callbacks(self.request,
                                'PutObject')
        self.assertEqual(self.request.body.method_calls, [])

    def test_dont_disable_if_wrong_operation(self):
        disable_upload_callbacks(self.request,
                                 'OtherOperation')
        self.assertFalse(
            self.request.body.disable_callback.called)

    def test_dont_enable_if_wrong_operation(self):
        enable_upload_callbacks(self.request,
                                'OtherOperation')
        self.assertFalse(
            self.request.body.enable_callback.called)