Пример #1
0
 def test_load_items_from_stdin(self):
     command = ['ddb', 'put', 'mytable', '-']
     expected_params = {
         'TableName': 'mytable',
         'ReturnConsumedCapacity': 'NONE',
         'Item': {
             "foo": {
                 "S": "bar"
             }
         },
     }
     with capture_input(b'{foo: bar}'):
         self.assert_params_for_cmd(command, expected_params, expected_rc=0)
Пример #2
0
    def test_upload_modifies_chunksize_if_too_high(self):
        config = runtime_config(multipart_chunksize=6 * (1024 ** 3))
        handler = S3TransferStreamHandler(
            self.session, self.params, runtime_config=config,
            manager=self.transfer_manager)
        file = FileInfo('-', 'foo-bucket/bar.txt', is_stream=True,
                        operation_name='upload')

        with capture_input(b'foobar'):
            handler.call([file])

        chunksize = handler.config.multipart_chunksize
        self.assert_chunk_size_in_range(chunksize)
Пример #3
0
    def test_upload_swallows_exceptions(self):
        handler = S3TransferStreamHandler(
            self.session, self.params, manager=self.transfer_manager)
        file = FileInfo('-', 'foo-bucket/bar.txt', is_stream=True,
                        operation_name='upload')

        self.transfer_future.result.side_effect = Exception()

        with capture_input(b'foobar'):
            response = handler.call([file])

        self.assertEqual(response.num_tasks_failed, 1)
        self.assertEqual(response.num_tasks_warned, 0)
Пример #4
0
    def test_upload_stream(self):
        handler = S3TransferStreamHandler(
            self.session, self.params, manager=self.transfer_manager)
        file = FileInfo('-', 'foo-bucket/bar.txt', is_stream=True,
                        operation_name='upload')

        with capture_input(b'foobar'):
            response = handler.call([file])

        self.assertEqual(response.num_tasks_failed, 0)
        self.assertEqual(response.num_tasks_warned, 0)

        upload_args = self.transfer_manager.upload.call_args[1]
        self.assertEqual(upload_args['bucket'], 'foo-bucket')
        self.assertEqual(upload_args['key'], 'bar.txt')
Пример #5
0
    def test_upload_stream_with_expected_size(self):
        expected_size = 6
        self.params['expected_size'] = expected_size
        handler = S3TransferStreamHandler(
            self.session, self.params, manager=self.transfer_manager)
        file = FileInfo('-', 'foo-bucket/bar.txt', is_stream=True,
                        operation_name='upload')

        with capture_input(b'foobar'):
            handler.call([file])

        # Assert that there is a subscriber.
        call_args = self.transfer_manager.upload.call_args[1]
        subscribers = call_args.get('subscribers', [])
        self.assertTrue(len(subscribers) == 1)

        # Make sure that subscriber is the right kind
        subscriber = subscribers[0]
        self.assertIsInstance(subscriber, ProvideSizeSubscriber)

        # Validate that the size on the subscriber is the expected size
        self.assertEqual(subscriber.size, expected_size)
Пример #6
0
    def test_upload_modifies_chunksize_for_max_parts_if_size_known(self):
        expected_size = 6 * (1024 ** 3)
        max_parts = awscli.customizations.s3.utils.MAX_PARTS

        # Set the chunksize to end up with way more than the max parts.
        chunksize = int((expected_size / (max_parts * 2)) + 1)
        self.params['expected_size'] = expected_size
        config = runtime_config(multipart_chunksize=chunksize)
        handler = S3TransferStreamHandler(
            self.session, self.params, runtime_config=config,
            manager=self.transfer_manager)
        file = FileInfo('-', 'foo-bucket/bar.txt', is_stream=True,
                        operation_name='upload')

        with capture_input(b'foobar'):
            handler.call([file])

        # The chunksize should at least be large enough to fit within max parts
        minimum_chunksize = int(expected_size / max_parts)
        actual_chunksize = handler.config.multipart_chunksize
        self.assert_chunk_size_in_range(
            actual_chunksize, minimum=minimum_chunksize)