Exemple #1
0
 def test_download(self):
     # Create file info objects to perform download.
     tasks = []
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(FileInfo(
             src=self.s3_files[i], src_type='s3',
             dest=self.loc_files[i], dest_type='local',
             last_update=time, operation_name='download',
             size=0, client=self.client))
     self.parsed_responses = [
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': six.BytesIO(b'This is a test.')},
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': six.BytesIO(b'This is a test.')},
     ]
     ref_calls = [
         ('GetObject', {'Bucket': self.bucket, 'Key': 'text1.txt'}),
         ('GetObject',
          {'Bucket': self.bucket, 'Key': 'another_directory/text2.txt'}),
     ]
     # Perform the download.
     self.assert_operations_for_s3_handler(self.s3_handler, tasks,
                                           ref_calls)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure the contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
 def test_validates_valid_log_files(self):
     key_provider, digest_provider, validator = create_scenario(
         ['gap', 'link', 'link'],
         [[self._logs[2]], [], [self._logs[0], self._logs[1]]])
     self.parsed_responses = [
         {
             'LocationConstraint': ''
         },
         {
             'Body': six.BytesIO(_gz_compress(self._logs[0]['_raw_value']))
         },
         {
             'Body': six.BytesIO(_gz_compress(self._logs[1]['_raw_value']))
         },
         {
             'Body': six.BytesIO(_gz_compress(self._logs[2]['_raw_value']))
         },
     ]
     _setup_mock_traverser(self._mock_traverser, key_provider,
                           digest_provider, validator)
     stdout, stderr, rc = self.run_cmd(
         "cloudtrail validate-logs --trail-arn %s --start-time %s --verbose"
         % (TEST_TRAIL_ARN, START_TIME_ARG), 0)
     self.assertIn('s3://1/key1', stdout)
     self.assertIn('s3://1/key2', stdout)
     self.assertIn('s3://1/key3', stdout)
Exemple #3
0
    def test_ranged_download(self):
        cmdline = '%s s3://mybucket/mykey %s --request-payer' % (
            self.prefix, self.files.rootdir)

        self.parsed_responses = [{
            "ContentLength": 10 * (1024**2),
            "LastModified": "00:00:00Z"
        }, {
            'ETag': '"foo-1"',
            'Body': six.BytesIO(b'foo')
        }, {
            'ETag': '"foo-1"',
            'Body': six.BytesIO(b'foo')
        }]

        self.run_cmd(cmdline, expected_rc=0)
        self.assert_operations_called([('HeadObject', {
            'Bucket': 'mybucket',
            'Key': 'mykey',
            'RequestPayer': 'requester',
        }),
                                       ('GetObject', {
                                           'Bucket': 'mybucket',
                                           'Key': 'mykey',
                                           'RequestPayer': 'requester',
                                           'Range': mock.ANY,
                                       }),
                                       ('GetObject', {
                                           'Bucket': 'mybucket',
                                           'Key': 'mykey',
                                           'RequestPayer': 'requester',
                                           'Range': mock.ANY,
                                       })])
 def test_ensures_digest_has_proper_metadata(self):
     out = six.BytesIO()
     with gzip.GzipFile(fileobj=out, mode="wb") as f:
         f.write('{"foo":"bar"}'.encode())
     gzipped_data = out.getvalue()
     s3_client = Mock()
     s3_client.get_object.return_value = {
         'Body': six.BytesIO(gzipped_data),
         'Metadata': {}}
     provider = self._get_mock_provider(s3_client)
     with self.assertRaises(DigestSignatureError):
         provider.fetch_digest('bucket', 'key')
 def test_ensures_digests_can_be_json_parsed(self):
     json_str = '{{{'
     out = six.BytesIO()
     with gzip.GzipFile(fileobj=out, mode="wb") as f:
         f.write(json_str.encode())
     gzipped_data = out.getvalue()
     s3_client = Mock()
     s3_client.get_object.return_value = {
         'Body': six.BytesIO(gzipped_data),
         'Metadata': {'signature': 'abc', 'signature-algorithm': 'SHA256'}}
     provider = self._get_mock_provider(s3_client)
     with self.assertRaises(InvalidDigestFormat):
         provider.fetch_digest('bucket', 'key')
 def test_fetches_digests(self):
     json_str = '{"foo":"bar"}'
     out = six.BytesIO()
     with gzip.GzipFile(fileobj=out, mode="wb") as f:
         f.write(json_str.encode())
     gzipped_data = out.getvalue()
     s3_client = Mock()
     s3_client.get_object.return_value = {
         'Body': six.BytesIO(gzipped_data),
         'Metadata': {'signature': 'abc', 'signature-algorithm': 'SHA256'}}
     provider = self._get_mock_provider(s3_client)
     result = provider.fetch_digest('bucket', 'key')
     self.assertEqual({'foo': 'bar', '_signature': 'abc',
                       '_signature_algorithm': 'SHA256'}, result[0])
     self.assertEqual(json_str.encode(), result[1])
Exemple #7
0
 def _multipart_upload_to_s3(self, params, bundle, size_remaining):
     create_response = self.s3.create_multipart_upload(Bucket=params.bucket,
                                                       Key=params.key)
     upload_id = create_response['UploadId']
     try:
         part_num = 1
         multipart_list = []
         bundle.seek(0)
         while size_remaining > 0:
             data = bundle.read(MULTIPART_LIMIT)
             upload_response = self.s3.upload_part(Bucket=params.bucket,
                                                   Key=params.key,
                                                   UploadId=upload_id,
                                                   PartNumber=part_num,
                                                   Body=six.BytesIO(data))
             multipart_list.append({
                 'PartNumber': part_num,
                 'ETag': upload_response['ETag']
             })
             part_num += 1
             size_remaining -= len(data)
         return self.s3.complete_multipart_upload(
             Bucket=params.bucket,
             Key=params.key,
             UploadId=upload_id,
             MultipartUpload={'Parts': multipart_list})
     except (ServerError, ClientError) as e:
         self.s3.abort_multipart_upload(Bucket=params.bucket,
                                        Key=params.key,
                                        UploadId=upload_id)
         raise e
Exemple #8
0
    def test_streaming_download(self):
        command = "s3 cp s3://bucket/streaming.txt -"
        self.parsed_responses = [{
            "AcceptRanges": "bytes",
            "LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
            "ContentLength": 4,
            "ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
            "Metadata": {},
            "ContentType": "binary/octet-stream"
        }, {
            "AcceptRanges": "bytes",
            "Metadata": {},
            "ContentType": "binary/octet-stream",
            "ContentLength": 4,
            "ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
            "LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
            "Body": six.BytesIO(b'foo\n')
        }]

        stdout, stderr, rc = self.run_cmd(command)
        self.assertEqual(stdout, 'foo\n')

        # Ensures no extra operations were called
        self.assertEqual(len(self.operations_called), 2)
        ops = [op[0].name for op in self.operations_called]
        expected_ops = ['HeadObject', 'GetObject']
        self.assertEqual(ops, expected_ops)
Exemple #9
0
    def setUp(self):
        self.session = FakeSession({'config_file': 'myconfigfile'})
        self.subscribe = CloudTrailSubscribe(self.session)

        self.subscribe.iam = Mock()
        self.subscribe.iam.GetUser = Mock(
            return_value={'User': {
                'Arn': '::::123:456'
            }})

        self.subscribe.s3 = Mock()
        self.subscribe.s3.endpoint = Mock()
        self.subscribe.s3.endpoint.region_name = 'us-east-1'
        policy_template = six.BytesIO(six.b(u'{"Statement": []}'))
        self.subscribe.s3.GetObject = Mock(
            return_value={'Body': policy_template})
        self.subscribe.s3.ListBuckets = Mock(
            return_value={'Buckets': [{
                'Name': 'test2'
            }]})

        self.subscribe.sns = Mock()
        self.subscribe.sns.endpoint = Mock()
        self.subscribe.sns.endpoint.region_name = 'us-east-1'
        self.subscribe.sns.ListTopics = Mock(
            return_value={'Topics': [{
                'TopicArn': ':test2'
            }]})
        self.subscribe.sns.CreateTopic = Mock(return_value={'TopicArn': 'foo'})
        self.subscribe.sns.GetTopicAttributes = Mock(
            return_value={'Attributes': {
                'Policy': '{"Statement": []}'
            }})
    def setUp(self):
        self.session = FakeSession({'config_file': 'myconfigfile'})
        self.subscribe = CloudTrailSubscribe(self.session)
        self.subscribe.region_name = 'us-east-1'

        self.subscribe.iam = Mock()
        self.subscribe.iam.get_user = Mock(
            return_value={'User': {
                'Arn': '::::123:456'
            }})

        self.subscribe.s3 = Mock()
        self.subscribe.s3.meta.region_name = 'us-east-1'
        policy_template = six.BytesIO(six.b(u'{"Statement": []}'))
        self.subscribe.s3.get_object = Mock(
            return_value={'Body': policy_template})
        self.subscribe.s3.head_bucket.return_value = {}

        self.subscribe.sns = Mock()
        self.subscribe.sns.meta.region_name = 'us-east-1'
        self.subscribe.sns.list_topics = Mock(
            return_value={'Topics': [{
                'TopicArn': ':test2'
            }]})
        self.subscribe.sns.create_topic = Mock(
            return_value={'TopicArn': 'foo'})
        self.subscribe.sns.get_topic_attributes = Mock(
            return_value={'Attributes': {
                'Policy': '{"Statement": []}'
            }})
Exemple #11
0
 def test_recursive_download(self):
     cmdline = '%s s3://mybucket/ %s --request-payer --recursive' % (
         self.prefix, self.files.rootdir)
     self.parsed_responses = [
         {
             'Contents': [
                 {
                     'Key': 'mykey',
                     'LastModified': '00:00:00Z',
                     'Size': 100
                 },
             ],
             'CommonPrefixes': []
         },
         {
             'ETag': '"foo-1"',
             'Body': six.BytesIO(b'foo')
         },
     ]
     self.run_cmd(cmdline, expected_rc=0)
     self.assert_operations_called([('ListObjectsV2', {
         'Bucket': 'mybucket',
         'Prefix': '',
         'EncodingType': 'url',
         'RequestPayer': 'requester',
     }),
                                    ('GetObject', {
                                        'Bucket': 'mybucket',
                                        'Key': 'mykey',
                                        'RequestPayer': 'requester',
                                    })])
Exemple #12
0
 def _multipart_upload_to_s3(self, parsed_args, bundle, size_remaining):
     create_response = self.s3.CreateMultipartUpload(
         bucket=parsed_args.bucket, key=parsed_args.key)
     upload_id = create_response['UploadId']
     try:
         part_num = 1
         multipart_list = []
         bundle.seek(0)
         while size_remaining > 0:
             data = bundle.read(MULTIPART_LIMIT)
             upload_response = self.s3.UploadPart(bucket=parsed_args.bucket,
                                                  key=parsed_args.key,
                                                  upload_id=upload_id,
                                                  part_number=part_num,
                                                  body=six.BytesIO(data))
             multipart_list.append({
                 'PartNumber': part_num,
                 'ETag': upload_response['ETag']
             })
             part_num += 1
             size_remaining -= len(data)
         return self.s3.CompleteMultipartUpload(
             bucket=parsed_args.bucket,
             key=parsed_args.key,
             upload_id=upload_id,
             multipart_upload={'Parts': multipart_list})
     except Exception as e:
         self.s3.AbortMultipartUpload(bucket=parsed_args.bucket,
                                      key=parsed_args.key,
                                      upload_id=upload_id)
         raise e
 def test_fails_when_digest_metadata_is_missing(self):
     key = MockDigestProvider([]).get_key_at_position(1)
     self.parsed_responses = [
         {
             'LocationConstraint': ''
         },
         {
             'Contents': [{
                 'Key': key
             }]
         },
         {
             'Body': six.BytesIO(_gz_compress(self._logs[0]['_raw_value'])),
             'Metadata': {}
         },
     ]
     s3_client_provider = S3ClientProvider(self.driver.session, 'us-east-1')
     digest_provider = DigestProvider(s3_client_provider, TEST_ACCOUNT_ID,
                                      'foo', 'us-east-1')
     key_provider = Mock()
     key_provider.get_public_keys.return_value = {
         'a': {
             'Value': VALID_TEST_KEY
         }
     }
     _setup_mock_traverser(self._mock_traverser, key_provider,
                           digest_provider, Mock())
     stdout, stderr, rc = self.run_cmd(
         ("cloudtrail validate-logs --trail-arn %s --start-time %s "
          "--region us-east-1") % (TEST_TRAIL_ARN, START_TIME_ARG), 1)
     self.assertIn(
         'Digest file\ts3://1/%s\tINVALID: signature verification failed' %
         key, stderr)
 def test_follows_trails_when_bucket_changes(self):
     self.parsed_responses = [
         {'LocationConstraint': 'us-east-1'},
         {'Body': six.BytesIO(_gz_compress(self._logs[0]['_raw_value']))},
         {'LocationConstraint': 'us-west-2'},
         {'LocationConstraint': 'eu-west-1'}
     ]
     key_provider, digest_provider, validator = create_scenario(
         ['gap', 'bucket_change', 'link', 'bucket_change', 'link'],
         [[], [self._logs[0]], [], [], []])
     _setup_mock_traverser(self._mock_traverser, key_provider,
                           digest_provider, validator)
     stdout, stderr, rc = self.run_cmd(
         ("cloudtrail validate-logs --trail-arn %s --start-time %s "
          "--region us-east-1 --verbose")
         % (TEST_TRAIL_ARN, START_TIME_ARG), 0)
     self.assertIn('Digest file\ts3://3/%s\tvalid'
                   % digest_provider.digests[0], stdout)
     self.assertIn('Digest file\ts3://2/%s\tvalid'
                   % digest_provider.digests[1], stdout)
     self.assertIn('Digest file\ts3://2/%s\tvalid'
                   % digest_provider.digests[2], stdout)
     self.assertIn('Digest file\ts3://1/%s\tvalid'
                   % digest_provider.digests[3], stdout)
     self.assertIn('Digest file\ts3://1/%s\tvalid'
                   % digest_provider.digests[4], stdout)
    def test_download_move_with_request_payer(self):
        cmdline = '%s s3://mybucket/mykey %s --request-payer' % (
            self.prefix, self.files.rootdir)

        self.parsed_responses = [
            # Response for HeadObject
            {
                "ContentLength": 100,
                "LastModified": "00:00:00Z"
            },
            # Response for GetObject
            {
                'ETag': '"foo-1"',
                'Body': six.BytesIO(b'foo')
            },
            # Response for DeleteObject
            {}
        ]

        self.run_cmd(cmdline, expected_rc=0)
        self.assert_operations_called([('HeadObject', {
            'Bucket': 'mybucket',
            'Key': 'mykey',
            'RequestPayer': 'requester',
        }),
                                       ('GetObject', {
                                           'Bucket': 'mybucket',
                                           'Key': 'mykey',
                                           'RequestPayer': 'requester',
                                       }),
                                       ('DeleteObject', {
                                           'Bucket': 'mybucket',
                                           'Key': 'mykey',
                                           'RequestPayer': 'requester',
                                       })])
 def test_glacier_sync_with_force_glacier(self):
     self.parsed_responses = [
         {
             'Contents': [
                 {
                     'Key': 'foo/bar.txt',
                     'ContentLength': '100',
                     'LastModified': '00:00:00Z',
                     'StorageClass': 'GLACIER',
                     'Size': 100
                 },
             ],
             'CommonPrefixes': []
         },
         {
             'ETag': '"foo-1"',
             'Body': six.BytesIO(b'foo')
         },
     ]
     cmdline = '%s s3://bucket/foo %s --force-glacier-transfer' % (
         self.prefix, self.files.rootdir)
     self.run_cmd(cmdline, expected_rc=0)
     self.assertEqual(len(self.operations_called), 2,
                      self.operations_called)
     self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
     self.assertEqual(self.operations_called[1][0].name, 'GetObject')
Exemple #17
0
 def __init__(self, input_bytes=b''):
     input_data = six.BytesIO(input_bytes)
     if six.PY3:
         mock_object = Mock()
         mock_object.buffer = input_data
     else:
         mock_object = input_data
     self._patch = patch('sys.stdin', mock_object)
Exemple #18
0
 def test_ensures_digest_can_be_gzip_inflated(self):
     s3_client = Mock()
     s3_client.get_object.return_value = {
         'Body': six.BytesIO('foo'.encode()),
         'Metadata': {}}
     provider = self._get_mock_provider(s3_client)
     with self.assertRaises(InvalidDigestFormat):
         provider.fetch_digest('bucket', 'key')
Exemple #19
0
 def setUp(self):
     super(TestGetGameSessionLog, self).setUp()
     self.files = FileCreator()
     self.filename = os.path.join(self.files.rootdir, 'myfile')
     self.urlopen_patch = mock.patch(
         'awscli.customizations.gamelift.getlog.urlopen')
     self.contents = b'My Contents'
     self.urlopen_mock = self.urlopen_patch.start()
     self.urlopen_mock.return_value = six.BytesIO(self.contents)
Exemple #20
0
def capture_input(input_bytes=b''):
    input_data = six.BytesIO(input_bytes)
    if six.PY3:
        mock_object = mock.Mock()
        mock_object.buffer = input_data
    else:
        mock_object = input_data

    with mock.patch('sys.stdin', mock_object):
        yield input_data
Exemple #21
0
 def setUp(self):
     self.tempdir = tempfile.mkdtemp()
     self.filename = os.path.join(self.tempdir, 'dir1', 'dir2', 'foo.txt')
     etag = md5()
     etag.update(b'foobar')
     etag = etag.hexdigest()
     self.response_data = {
         'Body': six.BytesIO(b'foobar'),
         'ETag': '"%s"' % etag,
     }
     self.last_update = datetime.now()
Exemple #22
0
 def test_cp_fails_with_utime_errors_but_continues(self):
     full_path = self.files.create_file('foo.txt', '')
     cmdline = '%s s3://bucket/key.txt %s' % (self.prefix, full_path)
     self.parsed_responses = [
         {"ContentLength": "100", "LastModified": "00:00:00Z"},
         {'ETag': '"foo-1"', 'Body': six.BytesIO(b'foo')}
     ]
     with mock.patch('os.utime') as mock_utime:
         mock_utime.side_effect = OSError(1, '')
         _, err, _ = self.run_cmd(cmdline, expected_rc=2)
         self.assertIn('attempting to modify the utime', err)
Exemple #23
0
 def test_operations_used_in_download_file(self):
     self.parsed_responses = [
         {"ContentLength": "100", "LastModified": "00:00:00Z"},
         {'ETag': '"foo-1"', 'Body': six.BytesIO(b'foo')},
     ]
     cmdline = '%s s3://bucket/key.txt %s' % (self.prefix,
                                              self.files.rootdir)
     self.run_cmd(cmdline, expected_rc=0)
     # The only operations we should have called are HeadObject/GetObject.
     self.assertEqual(len(self.operations_called), 2, self.operations_called)
     self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
     self.assertEqual(self.operations_called[1][0].name, 'GetObject')
Exemple #24
0
 def _pull_from_stream(self, amount_requested):
     """
     This function pulls data from stdin until it hits the amount
     requested or there is no more left to pull in from stdin.  The
     function wraps the data into a ``BytesIO`` object that is returned
     along with a boolean telling whether the amount requested is
     the amount returned.
     """
     stream_filein = sys.stdin
     if six.PY3:
         stream_filein = sys.stdin.buffer
     payload = stream_filein.read(amount_requested)
     payload_file = six.BytesIO(payload)
     return payload_file, len(payload) == amount_requested
 def test_fails_and_warns_when_log_hash_is_invalid(self):
     key_provider, digest_provider, validator = create_scenario(
         ['gap'], [[self._logs[0]]])
     self.parsed_responses = [
         {'LocationConstraint': ''},
         {'Body': six.BytesIO(_gz_compress('does not match'))}
     ]
     _setup_mock_traverser(self._mock_traverser, key_provider,
                           digest_provider, validator)
     stdout, stderr, rc = self.run_cmd(
         ("cloudtrail validate-logs --trail-arn %s --start-time "
          "--region us-east-1 '%s'") % (TEST_TRAIL_ARN, START_TIME_ARG), 1)
     self.assertIn(
         'Log file\ts3://1/key1\tINVALID: hash value doesn\'t match', stderr)
Exemple #26
0
 def test_sync_to_non_existant_directory(self):
     key = 'foo.txt'
     non_existant_directory = os.path.join(self.files.rootdir, 'fakedir')
     cmdline = '%s s3://bucket/ %s' % (self.prefix, non_existant_directory)
     self.parsed_responses = [
         {"CommonPrefixes": [], "Contents": [
             {"Key": key, "Size": 3,
              "LastModified": "2014-01-09T20:45:49.000Z"}]},
         {'ETag': '"c8afdb36c52cf4727836669019e69222-"',
          'Body': six.BytesIO(b'foo')}
     ]
     self.run_cmd(cmdline, expected_rc=0)
     # Make sure the file now exists.
     self.assertTrue(
         os.path.exists(os.path.join(non_existant_directory, key)))
Exemple #27
0
    def test_cp_with_sse_c_copy_source_fileb(self):
        self.parsed_responses = [{
            "AcceptRanges": "bytes",
            "LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
            "ContentLength": 4,
            "ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
            "Metadata": {},
            "ContentType": "binary/octet-stream"
        }, {
            "AcceptRanges": "bytes",
            "Metadata": {},
            "ContentType": "binary/octet-stream",
            "ContentLength": 4,
            "ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
            "LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
            "Body": six.BytesIO(b'foo\n')
        }, {}]

        file_path = self.files.create_file('foo.txt', '')
        key_path = self.files.create_file('foo.key', '')
        with open(key_path, 'wb') as f:
            f.write(b'K\xc9G\xe1\xf9&\xee\xd1\x03\xf3\xd4\x10\x18o9E\xc2\xaeD'
                    b'\x89(\x18\xea\xda\xf6\x81\xc3\xd2\x9d\\\xa8\xe6')
        cmdline = ('%s s3://bucket-one/key.txt s3://bucket/key.txt '
                   '--sse-c-copy-source --sse-c-copy-source-key fileb://%s' %
                   (self.prefix, key_path))
        self.run_cmd(cmdline, expected_rc=0)
        self.assertEqual(len(self.operations_called), 2)
        self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
        self.assertEqual(self.operations_called[1][0].name, 'CopyObject')

        expected_args = {
            'Key':
            'key.txt',
            'Bucket':
            'bucket',
            'ContentType':
            'text/plain',
            'CopySource':
            'bucket-one/key.txt',
            'CopySourceSSECustomerAlgorithm':
            'AES256',
            'CopySourceSSECustomerKey':
            ('S8lH4fkm7tED89QQGG85RcKuRIkoGOra9oHD0p1cqOY='),
            'CopySourceSSECustomerKeyMD5':
            'mL8/mshNgBObhAC1j5BOLw==',
        }
        self.assertDictEqual(self.operations_called[1][1], expected_args)
Exemple #28
0
    def setUp(self):
        self.create_client_patch = mock.patch(
            'botocore.session.Session.create_client')
        self.mock_create_client = self.create_client_patch.start()
        self.session = get_session()

        self.client = mock.Mock()
        self.mock_create_client.return_value = self.client

        self.cmd = GetGameSessionLogCommand(self.session)

        self.contents = b'mycontents'
        self.file_creator = FileCreator()
        self.urlopen_patch = mock.patch(
            'awscli.customizations.gamelift.getlog.urlopen')
        self.urlopen_mock = self.urlopen_patch.start()
        self.urlopen_mock.return_value = six.BytesIO(self.contents)
 def test_get_media_streaming_output(self):
     cmdline = (
         'kinesis-video-media get-media --stream-name test-stream '
         '--start-selector StartSelectorType=EARLIEST %s'
     )
     self.parsed_response = {
         'ContentType': 'video/webm',
         'Payload': six.BytesIO(b'testbody')
     }
     outpath = self.files.full_path('outfile')
     params = {
         'StartSelector': {'StartSelectorType': 'EARLIEST'},
         'StreamName': 'test-stream'
     }
     self.assert_params_for_cmd(cmdline % outpath, params)
     with open(outpath, 'rb') as outfile:
         self.assertEqual(outfile.read(), b'testbody')
Exemple #30
0
    def setUp(self):
        self.tempdir = tempfile.mkdtemp()
        self.filename = os.path.join(self.tempdir, 'dir1', 'dir2', 'foo.txt')
        etag = '3858f62230ac3c915f300c664312c63f'
        self.response_data = {
            'Body': six.BytesIO(b'foobar'),
            'ETag': '"%s"' % etag,
        }
        self.last_update = datetime.now()

        # Setup MD5 patches
        self.md5_object = mock.Mock()
        self.md5_object.hexdigest.return_value = etag
        md5_builder = mock.Mock(return_value=self.md5_object)
        self.md5_patch = mock.patch('hashlib.md5', md5_builder)
        self.md5_patch.start()
        self._md5_available_patch = None
        self.set_md5_available()