示例#1
0
class MultipartUploadTestCase(TestCase):

    def setUp(self):
        super(MultipartUploadTestCase, self).setUp()
        self.log = FakeLog()
        self.clock = FakeClock()

    def test_str(self):
        upload = MultipartUpload(None, None, None, None, None, Deferred())
        upload.retry_strategy.clock = self.clock
        r = MultipartInitiationResponse('', '', '1234567890')
        upload.init_response = r
        self.assertEqual(str(upload), "MultipartUpload upload_id=1234567890")

    def _assertPartsCompleted(self, parts_generator, part_handler, received,
                              task, client):
        expected = [('aaaaaaaaaa', 1),
                    ('bbbbbbbbbb', 2),
                    ('cccccccccc', 3),
                    ('dddddddddd', 4),
                    ('eeeeeeeeee', 5),
                    ('ffffffffff', 6),
                    ('gggggggggg', 7),
                    ('hhhhhhhhhh', 8),
                    ('iiiiiiiiii', 9),
                    ('jjjjjjjjjj', 10)]
        self.assertEqual(parts_generator.generated, expected)
        self.assertEqual(part_handler.handled, expected)
        expected = [
            ('e09c80c42fda55f9d992e59ca6b3307d', 1),
            ('82136b4240d6ce4ea7d03e51469a393b', 2),
            ('e9b3390206d8dfc5ffc9b09284c0bbde', 3),
            ('a9e49c7aefe022f0a8540361cce7575c', 4),
            ('c5ba867d9056b7cecf87f8ce88af90f8', 5),
            ('66952c6203ae23242590c0061675234d', 6),
            ('fdc68ea4cf2763996cf215451b291c63', 7),
            ('70270ca63a3de2d8905a9181a0245e58', 8),
            ('d98bcb28df1de541e95a6722c5e983ea', 9),
            ('c71a8da22bf4053760a604897627474c', 10)]
        self.assertEqual(received, expected)
        self.assertIsInstance(task.init_response,
            MultipartInitiationResponse)
        self.assertIsInstance(task.completion_response,
            MultipartCompletionResponse)
        self.assertEqual(client.calls, [
            ('init_multipart_upload', 'mybucket',
             'mykey', '', {}, {'acl': 'public-read'})])

    def test_upload(self):
        client = FakeS3Client()
        parts_generator = DummyPartsGenerator()
        part_handler = DummyPartHandler()
        counter = PartsTransferredCounter('?')
        d = Deferred()
        received = []
        amz_headers = {'acl': 'public-read'}
        upload = MultipartUpload(client, None, parts_generator, part_handler,
            counter, d, self.log)
        upload.retry_strategy.clock = self.clock

        def check(task):
            self.assertIdentical(task, upload)
            self._assertPartsCompleted(parts_generator, part_handler,
                                      received, task, client)

        d.addCallback(check)
        received = []
        upload.on_part_generated = received.append
        upload.upload('mybucket', 'mykey', '', {}, amz_headers)
        return d

    def test_upload_error_recovery(self):
        client = FakeS3Client()
        parts_generator = DummyPartsGenerator()
        part_handler = ErroringPartHandler()
        counter = PartsTransferredCounter('?')
        d = Deferred()
        received = []
        amz_headers = {'acl': 'public-read'}
        upload = MultipartUpload(client, None, parts_generator, part_handler,
            counter, d, self.log)
        upload.retry_strategy.clock = self.clock

        def check(task):
            self.flushLoggedErrors()
            self.assertEqual(len(self.clock.calls), 30)
            self.assertIdentical(task, upload)
            self._assertPartsCompleted(parts_generator, part_handler,
                                      received, task, client)

        d.addCallback(check)
        received = []
        upload.on_part_generated = received.append
        upload.upload('mybucket', 'mykey', '', {}, amz_headers)
        return d

    def test_upload_error_timeout_finally(self):
        client = FakeS3Client()
        parts_generator = DummyPartsGenerator()
        part_handler = ErroringPartHandler(100)
        counter = PartsTransferredCounter('?')
        d = Deferred()
        received = []
        amz_headers = {'acl': 'public-read'}
        upload = MultipartUpload(client, None, parts_generator, part_handler,
            counter, d, self.log)
        upload.retry_strategy.clock = self.clock
        received = []
        upload.on_part_generated = received.append
        upload.upload('mybucket', 'mykey', '', {}, amz_headers)

        def eb(why):
            self.flushLoggedErrors()
            self.assertEquals(len(self.clock.calls), 110)
            return why

        d.addErrback(eb)
        return self.assertFailure(d, ValueError)

    def test_upload_on_complete_error_recovery(self):
        client = ErroringFakeS3Client()
        parts_generator = DummyPartsGenerator()
        part_handler = DummyPartHandler()
        counter = PartsTransferredCounter('?')
        d = Deferred()
        amz_headers = {'acl': 'public-read'}
        upload = MultipartUpload(client, None, parts_generator, part_handler,
            counter, d, self.log)
        upload.retry_strategy.clock = self.clock

        def check(task):
            self.flushLoggedErrors()
            self.assertEqual(len(self.clock.calls), 3)
            self.assertIdentical(task, upload)
            self._assertPartsCompleted(parts_generator, part_handler,
                                      received, task, client)

        d.addCallback(check)
        received = []
        upload.on_part_generated = received.append
        upload.upload('mybucket', 'mykey', '', {}, amz_headers)
        return d

    def test_upload_on_complete_error_timeout_finally(self):
        client = ErroringFakeS3Client(100)
        parts_generator = DummyPartsGenerator()
        part_handler = DummyPartHandler()
        counter = PartsTransferredCounter('?')
        d = Deferred()
        amz_headers = {'acl': 'public-read'}
        upload = MultipartUpload(client, None, parts_generator, part_handler,
            counter, d, self.log)
        upload.retry_strategy.clock = self.clock
        received = []
        upload.on_part_generated = received.append
        upload.upload('mybucket', 'mykey', '', {}, amz_headers)

        def eb(why):
            self.flushLoggedErrors()
            self.assertEquals(len(self.clock.calls), 11)
            return why

        d.addErrback(eb)
        return self.assertFailure(d, ValueError)

    def test_upload_with_throughput_counter(self):
        client = FakeS3Client()
        parts_generator = DummyPartsGenerator()
        part_handler = DummyPartHandler()
        counter = PartsTransferredCounter('?')
        stats = SlidingStats(self.clock.seconds(), size=100)
        throughput_counter = ThroughputCounter(clock=self.clock, stats=stats)
        d = Deferred()
        amz_headers = {'acl': 'public-read'}
        upload = MultipartUpload(client, None, parts_generator, part_handler,
            counter, d, self.log)
        upload.retry_strategy.clock = self.clock
        upload.throughput_counter = throughput_counter

        def check(task):
            self.assertIdentical(task, upload)
            self._assertPartsCompleted(parts_generator, part_handler,
                                      received, task, client)
            self.assertEquals(throughput_counter.read()[-1], (0, 100))

        d.addCallback(check)
        received = []
        upload.on_part_generated = received.append
        upload.upload('mybucket', 'mykey', '', {}, amz_headers)
        return d

    def test_upload_with_throughput_counter_and_error(self):
        client = FakeS3Client()
        parts_generator = DummyPartsGenerator()
        part_handler = ErroringPartHandler(100)
        counter = PartsTransferredCounter('?')
        stats = SlidingStats(self.clock.seconds(), size=100)
        throughput_counter = ThroughputCounter(clock=self.clock, stats=stats)
        d = Deferred()
        amz_headers = {'acl': 'public-read'}
        upload = MultipartUpload(client, None, parts_generator, part_handler,
            counter, d, self.log)
        upload.retry_strategy.clock = self.clock
        upload.throughput_counter = throughput_counter
        received = []
        upload.on_part_generated = received.append

        def eb(why):
            self.flushLoggedErrors()
            self.assertEquals(len(self.clock.calls), 110)
            self.assertEquals(throughput_counter.read()[-1], (0, 0))
            return why

        upload.upload('mybucket', 'mykey', '', {}, amz_headers)
        d.addErrback(eb)
        return self.assertFailure(d, ValueError)
示例#2
0
 def setUp(self):
     super(MultipartUploadTestCase, self).setUp()
     self.log = FakeLog()
     self.clock = FakeClock()
示例#3
0
 def test_initialization(self):
     clock = FakeClock()
     clock.tick(200)
     counter = ThroughputCounter(clock)
     last = counter.stats.slots[-1][0]
     self.assertEquals(last, 200)