def test_too_long(self): raw = b'123456789' wrapped = HashingInput(BytesIO(raw), 8, lambda: md5(usedforsecurity=False), md5(raw, usedforsecurity=False).hexdigest()) self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'56', wrapped.read(2)) # even though the hash matches, there was more data than we expected with self.assertRaises(swob.HTTPException) as raised: wrapped.read(3) self.assertEqual(raised.exception.status, '422 Unprocessable Entity') # the error causes us to close the input self.assertTrue(wrapped._input.closed)
def test_multi(self): memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'], logger=self.logger) mock = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock, mock)] * 2) memcache_client.set_multi( {'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key') self.assertEqual( memcache_client.get_multi(('some_key2', 'some_key1'), 'multi_key'), [[4, 5, 6], [1, 2, 3]]) for key in (b'some_key1', b'some_key2'): key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') self.assertIn(key, mock.cache) _junk, cache_timeout, _junk = mock.cache[key] self.assertEqual(cache_timeout, b'0') memcache_client.set_multi( {'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key', time=20) for key in (b'some_key1', b'some_key2'): key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') _junk, cache_timeout, _junk = mock.cache[key] self.assertEqual(cache_timeout, b'20') fortydays = 50 * 24 * 60 * 60 esttimeout = time.time() + fortydays memcache_client.set_multi( {'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key', time=fortydays) for key in (b'some_key1', b'some_key2'): key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') _junk, cache_timeout, _junk = mock.cache[key] self.assertAlmostEqual(float(cache_timeout), esttimeout, delta=1) self.assertEqual(memcache_client.get_multi( ('some_key2', 'some_key1', 'not_exists'), 'multi_key'), [[4, 5, 6], [1, 2, 3], None]) # Now lets simulate a lost connection and make sure we don't get # the index out of range stack trace when it does mock_stderr = six.StringIO() not_expected = "IndexError: list index out of range" with patch("sys.stderr", mock_stderr): mock.read_return_empty_str = True self.assertEqual(memcache_client.get_multi( ('some_key2', 'some_key1', 'not_exists'), 'multi_key'), None) self.assertFalse(not_expected in mock_stderr.getvalue())
def test_good(self): raw = b'123456789' wrapped = HashingInput(BytesIO(raw), 9, lambda: md5(usedforsecurity=False), md5(raw, usedforsecurity=False).hexdigest()) self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'56', wrapped.read(2)) # trying to read past the end gets us whatever's left self.assertEqual(b'789', wrapped.read(4)) # can continue trying to read -- but it'll be empty self.assertEqual(b'', wrapped.read(2)) self.assertFalse(wrapped._input.closed) wrapped.close() self.assertTrue(wrapped._input.closed)
def md5hash(key): if not isinstance(key, bytes): if six.PY2: key = key.encode('utf-8') else: key = key.encode('utf-8', errors='surrogateescape') return md5(key, usedforsecurity=False).hexdigest().encode('ascii')
def write_diskfile(df, timestamp, data=b'test data', frag_index=None, commit=True, legacy_durable=False, extra_metadata=None): # Helper method to write some data and metadata to a diskfile. # Optionally do not commit the diskfile, or commit but using a legacy # durable file with df.create() as writer: writer.write(data) metadata = { 'ETag': md5(data, usedforsecurity=False).hexdigest(), 'X-Timestamp': timestamp.internal, 'Content-Length': str(len(data)), } if extra_metadata: metadata.update(extra_metadata) if frag_index is not None: metadata['X-Object-Sysmeta-Ec-Frag-Index'] = str(frag_index) writer.put(metadata) if commit and legacy_durable: # simulate legacy .durable file creation durable_file = os.path.join(df._datadir, timestamp.internal + '.durable') with open(durable_file, 'wb'): pass elif commit: writer.commit(timestamp) # else: don't make it durable return metadata
def _test_put_no_body(self, use_content_length=False, use_transfer_encoding=False, string_to_md5=b''): content_md5 = base64.b64encode( md5(string_to_md5, usedforsecurity=False).digest()).strip() with UnreadableInput(self) as fake_input: req = Request.blank('/bucket?acl', environ={ 'REQUEST_METHOD': 'PUT', 'wsgi.input': fake_input }, headers={ 'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header(), 'Content-MD5': content_md5 }, body='') if not use_content_length: req.environ.pop('CONTENT_LENGTH') if use_transfer_encoding: req.environ['HTTP_TRANSFER_ENCODING'] = 'chunked' status, headers, body = self.call_s3api(req) self.assertEqual(status, '400 Bad Request') self.assertEqual(self._get_error_code(body), 'MissingSecurityHeader') self.assertEqual(self._get_error_message(body), 'Your request was missing a required header.') self.assertIn(b'<MissingHeaderName>x-amz-acl</MissingHeaderName>', body)
def test_direct_put_container(self): body = b'Let us begin with a quick introduction' headers = { 'x-foo': 'bar', 'Content-Length': str(len(body)), 'Content-Type': 'application/json', 'User-Agent': 'my UA' } with mocked_http_conn(204) as conn: rv = direct_client.direct_put_container(self.node, self.part, self.account, self.container, contents=body, headers=headers) self.assertEqual(conn.host, self.node['ip']) self.assertEqual(conn.port, self.node['port']) self.assertEqual(conn.method, 'PUT') self.assertEqual(conn.path, self.container_path) self.assertEqual(conn.req_headers['Content-Length'], str(len(body))) self.assertEqual(conn.req_headers['Content-Type'], 'application/json') self.assertEqual(conn.req_headers['User-Agent'], 'my UA') self.assertTrue('x-timestamp' in conn.req_headers) self.assertEqual('bar', conn.req_headers.get('x-foo')) self.assertEqual( md5(body, usedforsecurity=False).hexdigest(), conn.etag.hexdigest()) self.assertIsNone(rv)
def test_direct_put_container_chunked(self): body = b'Let us begin with a quick introduction' headers = {'x-foo': 'bar', 'Content-Type': 'application/json'} with mocked_http_conn(204) as conn: rv = direct_client.direct_put_container(self.node, self.part, self.account, self.container, contents=body, headers=headers) self.assertEqual(conn.host, self.node['ip']) self.assertEqual(conn.port, self.node['port']) self.assertEqual(conn.method, 'PUT') self.assertEqual(conn.path, self.container_path) self.assertEqual(conn.req_headers['Transfer-Encoding'], 'chunked') self.assertEqual(conn.req_headers['Content-Type'], 'application/json') self.assertTrue('x-timestamp' in conn.req_headers) self.assertEqual('bar', conn.req_headers.get('x-foo')) self.assertNotIn('Content-Length', conn.req_headers) expected_sent = b'%0x\r\n%s\r\n0\r\n\r\n' % (len(body), body) self.assertEqual( md5(expected_sent, usedforsecurity=False).hexdigest(), conn.etag.hexdigest()) self.assertIsNone(rv)
def test_get_versioned_object(self): etags = [] obj_name = self.create_name('versioned-obj') for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') # TODO: pull etag from response instead etags.insert(0, md5(obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj_name) resp = self.client.list_object_versions(Bucket=self.bucket_name) objs = resp.get('Versions', []) versions = [] for obj in objs: obj.pop('LastModified') obj.pop('Owner') versions.append(obj.pop('VersionId')) self.assertEqual([{ 'ETag': '"%s"' % etags[0], 'IsLatest': True, 'Key': obj_name, 'Size': len(obj_data), 'StorageClass': 'STANDARD', }, { 'ETag': '"%s"' % etags[1], 'IsLatest': False, 'Key': obj_name, 'Size': len(obj_data), 'StorageClass': 'STANDARD', }, { 'ETag': '"%s"' % etags[2], 'IsLatest': False, 'Key': obj_name, 'Size': len(obj_data), 'StorageClass': 'STANDARD', }], objs) # un-versioned get_object returns IsLatest resp = self.client.get_object(Bucket=self.bucket_name, Key=obj_name) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) self.assertEqual('"%s"' % etags[0], resp['ETag']) # but you can get any object by version for i, version in enumerate(versions): resp = self.client.get_object( Bucket=self.bucket_name, Key=obj_name, VersionId=version) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) self.assertEqual('"%s"' % etags[i], resp['ETag']) # and head_object works about the same resp = self.client.head_object(Bucket=self.bucket_name, Key=obj_name) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) self.assertEqual('"%s"' % etags[0], resp['ETag']) self.assertEqual(versions[0], resp['VersionId']) for version, etag in zip(versions, etags): resp = self.client.head_object( Bucket=self.bucket_name, Key=obj_name, VersionId=version) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) self.assertEqual(version, resp['VersionId']) self.assertEqual('"%s"' % etag, resp['ETag'])
def _test_object_multi_DELETE(self, account): self.keys = ['Key1', 'Key2'] self.swift.register( 'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[0], swob.HTTPNoContent, {}, None) self.swift.register( 'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[1], swob.HTTPNotFound, {}, None) elem = Element('Delete') for key in self.keys: obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key body = tostring(elem, use_s3ns=False) content_md5 = ( base64.b64encode(md5(body, usedforsecurity=False).digest()) .strip()) req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, headers={'Authorization': 'AWS %s:hmac' % account, 'Date': self.get_date_header(), 'Content-MD5': content_md5}, body=body) req.date = datetime.now() req.content_type = 'text/plain' return self.call_s3api(req)
def test_object_multi_DELETE_lots_of_keys(self): elem = Element('Delete') for i in range(self.s3api.conf.max_multi_delete_objects): status = swob.HTTPOk if i % 2 else swob.HTTPNotFound name = 'x' * 1000 + str(i) self.swift.register('HEAD', '/v1/AUTH_test/bucket/%s' % name, status, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/%s' % name, swob.HTTPNoContent, {}, None) obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = name body = tostring(elem, use_s3ns=False) content_md5 = (base64.b64encode( md5(body, usedforsecurity=False).digest()).strip()) req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header(), 'Content-MD5': content_md5}, body=body) status, headers, body = self.call_s3api(req) self.assertEqual('200 OK', status) elem = fromstring(body) self.assertEqual(len(elem.findall('Deleted')), self.s3api.conf.max_multi_delete_objects)
def test_object_multi_DELETE_quiet(self): self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2', swob.HTTPNotFound, {}, None) elem = Element('Delete') SubElement(elem, 'Quiet').text = 'true' for key in ['Key1', 'Key2']: obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key body = tostring(elem, use_s3ns=False) content_md5 = base64.b64encode( md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header(), 'Content-MD5': content_md5}, body=body) status, headers, body = self.call_s3api(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body) self.assertEqual(len(elem.findall('Deleted')), 0)
def test_set_get_json(self): memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'], logger=self.logger) mock = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock, mock)] * 2) cache_key = md5(b'some_key', usedforsecurity=False).hexdigest().encode('ascii') memcache_client.set('some_key', [1, 2, 3]) self.assertEqual(memcache_client.get('some_key'), [1, 2, 3]) # See JSON_FLAG self.assertEqual(mock.cache, {cache_key: (b'2', b'0', b'[1, 2, 3]')}) memcache_client.set('some_key', [4, 5, 6]) self.assertEqual(memcache_client.get('some_key'), [4, 5, 6]) self.assertEqual(mock.cache, {cache_key: (b'2', b'0', b'[4, 5, 6]')}) memcache_client.set('some_key', ['simple str', 'utf8 str éà']) # As per http://wiki.openstack.org/encoding, # we should expect to have unicode self.assertEqual( memcache_client.get('some_key'), ['simple str', u'utf8 str éà']) self.assertEqual(mock.cache, {cache_key: ( b'2', b'0', b'["simple str", "utf8 str \\u00e9\\u00e0"]')}) memcache_client.set('some_key', [1, 2, 3], time=20) self.assertEqual(mock.cache, {cache_key: (b'2', b'20', b'[1, 2, 3]')}) sixtydays = 60 * 24 * 60 * 60 esttimeout = time.time() + sixtydays memcache_client.set('some_key', [1, 2, 3], time=sixtydays) _junk, cache_timeout, _junk = mock.cache[cache_key] self.assertAlmostEqual(float(cache_timeout), esttimeout, delta=1)
def getheaders(self): etag = self.etag if not etag: if isinstance(self.body, bytes): etag = ('"' + md5( self.body, usedforsecurity=False).hexdigest() + '"') else: etag = '"68b329da9893e34099c7d8ad5cb9c940"' am_slow, _junk = self.get_slow() headers = HeaderKeyDict({ 'content-length': len(self.body), 'content-type': 'x-application/test', 'x-timestamp': self.timestamp, 'x-backend-timestamp': self.timestamp, 'last-modified': self.timestamp, 'x-object-meta-test': 'testing', 'x-delete-at': '9876543210', 'etag': etag, 'x-works': 'yes', }) if self.status // 100 == 2: headers['x-account-container-count'] = \ kwargs.get('count', 12345) if not self.timestamp: # when timestamp is None, HeaderKeyDict raises KeyError headers.pop('x-timestamp', None) try: if next(container_ts_iter) is False: headers['x-container-timestamp'] = '1' except StopIteration: pass headers.update(self.headers) return headers.items()
def direct_get(self, node, part, require_durable=True, extra_headers=None): req_headers = {'X-Backend-Storage-Policy-Index': int(self.policy)} if extra_headers: req_headers.update(extra_headers) if not require_durable: req_headers.update( {'X-Backend-Fragment-Preferences': json.dumps([])}) # node dict has unicode values so utf8 decode our path parts too in # case they have non-ascii characters if six.PY2: acc, con, obj = (s.decode('utf8') for s in (self.account, self.container_name, self.object_name)) else: acc, con, obj = self.account, self.container_name, self.object_name headers, data = direct_client.direct_get_object(node, part, acc, con, obj, headers=req_headers, resp_chunk_size=64 * 2**20) hasher = md5(usedforsecurity=False) for chunk in data: hasher.update(chunk) return headers, hasher.hexdigest()
def test_incr_w_timeout(self): memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'], logger=self.logger) mock = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock, mock)] * 2) cache_key = md5(b'some_key', usedforsecurity=False).hexdigest().encode('ascii') memcache_client.incr('some_key', delta=5, time=55) self.assertEqual(memcache_client.get('some_key'), b'5') self.assertEqual(mock.cache, {cache_key: (b'0', b'55', b'5')}) memcache_client.delete('some_key') self.assertIsNone(memcache_client.get('some_key')) fiftydays = 50 * 24 * 60 * 60 esttimeout = time.time() + fiftydays memcache_client.incr('some_key', delta=5, time=fiftydays) self.assertEqual(memcache_client.get('some_key'), b'5') _junk, cache_timeout, _junk = mock.cache[cache_key] self.assertAlmostEqual(float(cache_timeout), esttimeout, delta=1) memcache_client.delete('some_key') self.assertIsNone(memcache_client.get('some_key')) memcache_client.incr('some_key', delta=5) self.assertEqual(memcache_client.get('some_key'), b'5') self.assertEqual(mock.cache, {cache_key: (b'0', b'0', b'5')}) memcache_client.incr('some_key', delta=5, time=55) self.assertEqual(memcache_client.get('some_key'), b'10') self.assertEqual(mock.cache, {cache_key: (b'0', b'0', b'10')})
def test_object_multi_DELETE(self): self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2', swob.HTTPNotFound, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key3', swob.HTTPOk, {'x-static-large-object': 'True'}, None) slo_delete_resp = { 'Number Not Found': 0, 'Response Status': '200 OK', 'Errors': [], 'Response Body': '', 'Number Deleted': 8 } self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key3', swob.HTTPOk, {}, json.dumps(slo_delete_resp)) self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key4', swob.HTTPOk, {'x-static-large-object': 'True', 'x-object-sysmeta-s3api-etag': 'some-etag'}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key4', swob.HTTPNoContent, {}, None) elem = Element('Delete') for key in ['Key1', 'Key2', 'Key3', 'Key4']: obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key body = tostring(elem, use_s3ns=False) content_md5 = base64.b64encode( md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, headers={'Authorization': 'AWS test:tester:hmac', 'Content-Type': 'multipart/form-data', 'Date': self.get_date_header(), 'Content-MD5': content_md5}, body=body) status, headers, body = self.call_s3api(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body) self.assertEqual(len(elem.findall('Deleted')), 4) self.assertEqual(len(elem.findall('Error')), 0) self.assertEqual(self.swift.calls, [ ('HEAD', '/v1/AUTH_test/bucket'), ('HEAD', '/v1/AUTH_test/bucket/Key1?symlink=get'), ('DELETE', '/v1/AUTH_test/bucket/Key1'), ('HEAD', '/v1/AUTH_test/bucket/Key2?symlink=get'), ('DELETE', '/v1/AUTH_test/bucket/Key2'), ('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'), ('DELETE', '/v1/AUTH_test/bucket/Key3?multipart-manifest=delete'), ('HEAD', '/v1/AUTH_test/bucket/Key4?symlink=get'), ('DELETE', '/v1/AUTH_test/bucket/Key4?async=on&multipart-manifest=delete'), ])
def test_object_multi_DELETE_versioned_suspended(self): self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) t1 = next(self.ts) key1 = '/v1/AUTH_test/bucket/Key1' + \ '?symlink=get&version-id=%s' % t1.normal self.swift.register('HEAD', key1, swob.HTTPOk, {}, None) self.swift.register('DELETE', key1, swob.HTTPNoContent, {}, None) t2 = next(self.ts) key2 = '/v1/AUTH_test/bucket/Key2' + \ '?symlink=get&version-id=%s' % t2.normal self.swift.register('HEAD', key2, swob.HTTPNotFound, {}, None) self.swift.register('DELETE', key2, swob.HTTPNotFound, {}, None) key3 = '/v1/AUTH_test/bucket/Key3' self.swift.register('HEAD', key3, swob.HTTPOk, {}, None) self.swift.register('DELETE', key3, swob.HTTPNoContent, {}, None) elem = Element('Delete') items = ( ('Key1', t1), ('Key2', t2), ('Key3', None), ) for key, ts in items: obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key if ts: SubElement(obj, 'VersionId').text = ts.normal body = tostring(elem, use_s3ns=False) content_md5 = base64.b64encode( md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, headers={ 'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header(), 'Content-MD5': content_md5 }, body=body) status, headers, body = self.call_s3api(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body) self.assertEqual(len(elem.findall('Deleted')), 3) self.assertEqual(self.swift.calls, [ ('HEAD', '/v1/AUTH_test/bucket'), ('HEAD', '/v1/AUTH_test/bucket/Key1' '?symlink=get&version-id=%s' % t1.normal), ('DELETE', '/v1/AUTH_test/bucket/Key1' '?symlink=get&version-id=%s' % t1.normal), ('HEAD', '/v1/AUTH_test/bucket/Key2' '?symlink=get&version-id=%s' % t2.normal), ('DELETE', '/v1/AUTH_test/bucket/Key2' '?symlink=get&version-id=%s' % t2.normal), ('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'), ('DELETE', '/v1/AUTH_test/bucket/Key3'), ])
def test_list_objects(self): etags = defaultdict(list) for i in range(3): obj_name = self.create_name('versioned-obj') for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') etags[obj_name].insert(0, md5( obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj_name) # both unversioned list_objects responses are similar expected = [] for name, obj_etags in sorted(etags.items()): expected.append({ 'ETag': '"%s"' % obj_etags[0], 'Key': name, 'Size': len(obj_data), 'StorageClass': 'STANDARD', }) resp = self.client.list_objects(Bucket=self.bucket_name) objs = resp.get('Contents', []) for obj in objs: obj.pop('LastModified') # one difference seems to be the Owner key self.assertEqual({'DisplayName', 'ID'}, set(obj.pop('Owner').keys())) self.assertEqual(expected, objs) resp = self.client.list_objects_v2(Bucket=self.bucket_name) objs = resp.get('Contents', []) for obj in objs: obj.pop('LastModified') self.assertEqual(expected, objs) # versioned listings has something for everyone expected = [] for name, obj_etags in sorted(etags.items()): is_latest = True for etag in obj_etags: expected.append({ 'ETag': '"%s"' % etag, 'IsLatest': is_latest, 'Key': name, 'Size': len(obj_data), 'StorageClass': 'STANDARD', }) is_latest = False resp = self.client.list_object_versions(Bucket=self.bucket_name) objs = resp.get('Versions', []) versions = [] for obj in objs: obj.pop('LastModified') obj.pop('Owner') versions.append(obj.pop('VersionId')) self.assertEqual(expected, objs)
def proxy_get(self): # GET object headers, body = client.get_object(self.url, self.token, self.container_name, self.object_name, resp_chunk_size=64 * 2 ** 10) resp_checksum = md5(usedforsecurity=False) for chunk in body: resp_checksum.update(chunk) return headers, resp_checksum.hexdigest()
def hash_mod(self, name, divisor): """ :param name: a task object name :param divisor: a divisor number :return: an integer to decide which expirer is assigned to the task """ if not isinstance(name, bytes): name = name.encode('utf8') # md5 is only used for shuffling mod return int(md5(name, usedforsecurity=False).hexdigest(), 16) % divisor
def test_put_object_expect(self): obj = 'object' content = b'abcdefghij' etag = md5(content, usedforsecurity=False).hexdigest() headers = {'Expect': '100-continue'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers, content) self.assertEqual(status, 200) self.assertCommonResponseHeaders(headers) self._assertObjectEtag(self.bucket, obj, etag)
def test_bad_hash(self): raw = b'123456789' wrapped = HashingInput(BytesIO(raw), 9, hashlib.sha256, md5(raw, usedforsecurity=False).hexdigest()) self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'5678', wrapped.read(4)) with self.assertRaises(swob.HTTPException) as raised: wrapped.read(4) self.assertEqual(raised.exception.status, '422 Unprocessable Entity') self.assertTrue(wrapped._input.closed)
def test_put_object_storage_class(self): obj = 'object' content = b'abcdefghij' etag = md5(content, usedforsecurity=False).hexdigest() headers = {'X-Amz-Storage-Class': 'STANDARD'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers, content) self.assertEqual(status, 200) self.assertCommonResponseHeaders(headers) self._assertObjectEtag(self.bucket, obj, etag)
def get_object(self, container_name, object_name): headers, body = client.get_object(self.url, self.token, container_name, object_name, resp_chunk_size=64 * 2**10) resp_checksum = md5(usedforsecurity=False) for chunk in body: resp_checksum.update(chunk) return resp_checksum.hexdigest()
def test_copy_object(self): etags = [] obj_name = self.create_name('versioned-obj') for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') etags.insert(0, md5( obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj_name) resp = self.client.list_object_versions(Bucket=self.bucket_name) objs = resp.get('Versions', []) versions = [] for obj in objs: versions.append(obj.pop('VersionId')) # CopySource can just be Bucket/Key string first_target = self.create_name('target-obj1') copy_resp = self.client.copy_object( Bucket=self.bucket_name, Key=first_target, CopySource='%s/%s' % (self.bucket_name, obj_name)) self.assertEqual(versions[0], copy_resp['CopySourceVersionId']) # and you'll just get the most recent version resp = self.client.head_object(Bucket=self.bucket_name, Key=first_target) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) self.assertEqual('"%s"' % etags[0], resp['ETag']) # or you can be more explicit explicit_target = self.create_name('target-%s' % versions[0]) copy_source = {'Bucket': self.bucket_name, 'Key': obj_name, 'VersionId': versions[0]} copy_resp = self.client.copy_object( Bucket=self.bucket_name, Key=explicit_target, CopySource=copy_source) self.assertEqual(versions[0], copy_resp['CopySourceVersionId']) # and you still get the same thing resp = self.client.head_object(Bucket=self.bucket_name, Key=explicit_target) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) self.assertEqual('"%s"' % etags[0], resp['ETag']) # but you can also copy from a specific version version_target = self.create_name('target-%s' % versions[2]) copy_source['VersionId'] = versions[2] copy_resp = self.client.copy_object( Bucket=self.bucket_name, Key=version_target, CopySource=copy_source) self.assertEqual(versions[2], copy_resp['CopySourceVersionId']) resp = self.client.head_object(Bucket=self.bucket_name, Key=version_target) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) self.assertEqual('"%s"' % etags[2], resp['ETag'])
def write_part(i): body = b'VERIFY%0.2d' % i + b'\x00' * 1048576 part_name = self.get_object_name('manifest_part_%0.2d' % i) manifest_entry = { "path": "/%s/%s" % (self.container_name, part_name), "etag": md5(body, usedforsecurity=False).hexdigest(), "size_bytes": len(body), } self.brain.client.put_object(self.container_name, part_name, {}, body) manifest_data.append(manifest_entry)
def test_multi_delete(self): memcache_client = memcached.MemcacheRing( ['1.2.3.4:11211', '1.2.3.5:11211'], logger=self.logger) mock1 = MockMemcached() mock2 = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock1, mock1)] * 2) memcache_client._client_cache['1.2.3.5:11211'] = MockedMemcachePool( [(mock2, mock2)] * 2) # MemcacheRing will put 'some_key0' on server 1.2.3.5:11211 and # 'some_key1' and 'multi_key' on '1.2.3.4:11211' memcache_client.set_multi( { 'some_key0': [1, 2, 3], 'some_key1': [4, 5, 6] }, 'multi_key') self.assertEqual( memcache_client.get_multi(('some_key1', 'some_key0'), 'multi_key'), [[4, 5, 6], [1, 2, 3]]) for key in (b'some_key0', b'some_key1'): key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') self.assertIn(key, mock1.cache) _junk, cache_timeout, _junk = mock1.cache[key] self.assertEqual(cache_timeout, b'0') memcache_client.set('some_key0', [7, 8, 9]) self.assertEqual(memcache_client.get('some_key0'), [7, 8, 9]) key = md5(b'some_key0', usedforsecurity=False).hexdigest().encode('ascii') self.assertIn(key, mock2.cache) # Delete 'some_key0' with server_key='multi_key' memcache_client.delete('some_key0', server_key='multi_key') self.assertEqual( memcache_client.get_multi(('some_key0', 'some_key1'), 'multi_key'), [None, [4, 5, 6]]) # 'some_key0' have to be available on 1.2.3.5:11211 self.assertEqual(memcache_client.get('some_key0'), [7, 8, 9]) self.assertIn(key, mock2.cache)
def test_put_object_content_type(self): obj = 'object' content = b'abcdefghij' etag = md5(content, usedforsecurity=False).hexdigest() headers = {'Content-Type': 'text/plain'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers, content) self.assertEqual(status, 200) status, headers, body = \ self.conn.make_request('HEAD', self.bucket, obj) self.assertEqual(headers['content-type'], 'text/plain') self.assertCommonResponseHeaders(headers) self._assertObjectEtag(self.bucket, obj, etag)
def compute_md5sum(cls, data): block_size = 4096 if isinstance(data, bytes): data = io.BytesIO(data) checksum = md5(usedforsecurity=False) buff = data.read(block_size) while buff: checksum.update(buff) buff = data.read(block_size) data.seek(0) return checksum.hexdigest()