def setup_objects(self): self.objects = (('rose', '2011-01-05T02:19:14.275290', 0, 303), ('viola', '2011-01-05T02:19:14.275290', 0, 3909), ('lily', '2011-01-05T02:19:14.275290', 0, 3909), ('with space', '2011-01-05T02:19:14.275290', 0, 390), ('with%20space', '2011-01-05T02:19:14.275290', 0, 390)) objects = map( lambda item: {'name': str(item[0]), 'last_modified': str(item[1]), 'hash': str(item[2]), 'bytes': str(item[3])}, list(self.objects)) object_list = json.dumps(objects) self.prefixes = ['rose', 'viola', 'lily'] object_list_subdir = [] for p in self.prefixes: object_list_subdir.append({"subdir": p}) self.swift.register('HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None) self.swift.register('GET', '/v1/AUTH_test/junk', swob.HTTPOk, {}, object_list) self.swift.register('GET', '/v1/AUTH_test/junk_subdir', swob.HTTPOk, {}, json.dumps(object_list_subdir))
def test_v2_obj_response(self): req = Request.blank('/endpoints/v2/a/c/o1') resp = req.get_response(self.list_endpoints) expected = { 'endpoints': ["http://10.1.1.1:6000/sdb1/1/a/c/o1", "http://10.1.2.2:6000/sdd1/1/a/c/o1"], 'headers': {'X-Backend-Storage-Policy-Index': "0"}, } self.assertEqual(resp.body, json.dumps(expected)) for policy in POLICIES: patch_path = 'swift.common.middleware.list_endpoints' \ '.get_container_info' mock_get_container_info = lambda *args, **kwargs: \ {'storage_policy': int(policy)} with mock.patch(patch_path, mock_get_container_info): resp = req.get_response(self.list_endpoints) part, nodes = policy.object_ring.get_nodes('a', 'c', 'o1') [node.update({'part': part}) for node in nodes] path = 'http://%(ip)s:%(port)s/%(device)s/%(part)s/a/c/o1' expected = { 'headers': { 'X-Backend-Storage-Policy-Index': str(int(policy))}, 'endpoints': [path % node for node in nodes], } self.assertEqual(resp.body, json.dumps(expected))
def test_parse_input(self): self.assertRaises(HTTPException, slo.parse_input, "some non json") data = json.dumps([{"path": "/cont/object", "etag": "etagoftheobjecitsegment", "size_bytes": 100}]) self.assertEquals("/cont/object", slo.parse_input(data)[0]["path"]) bad_data = json.dumps([{"path": "/cont/object", "size_bytes": 100}]) self.assertRaises(HTTPException, slo.parse_input, bad_data)
def test_parse_input(self): self.assertRaises(HTTPException, slo.parse_input, 'some non json') data = json.dumps( [{'path': '/cont/object', 'etag': 'etagoftheobjecitsegment', 'size_bytes': 100}]) self.assertEquals('/cont/object', slo.parse_input(data)[0]['path']) bad_data = json.dumps([{'path': '/cont/object', 'size_bytes': 100}]) self.assertRaises(HTTPException, slo.parse_input, bad_data)
def getAll(self): """ Dump everything, used for debugging. """ obj_data = [record.oRecordData for record in self.conn.query("SELECT FROM Metadata")] return ''.join([ json.dumps(obj_data), "\n\n", json.dumps(con_data), "\n\n", json.dumps(acc_data) ])
def get_response_body(data_format, data_dict, error_list): """ Returns a properly formatted response body according to format. Handles json and xml, otherwise will return text/plain. Note: xml response does not include xml declaration. :params data_format: resulting format :params data_dict: generated data about results. :params error_list: list of quoted filenames that failed """ if data_format == "application/json": data_dict["Errors"] = error_list return json.dumps(data_dict) if data_format and data_format.endswith("/xml"): output = "<delete>\n" for key in sorted(data_dict.keys()): xml_key = key.replace(" ", "_").lower() output += "<%s>%s</%s>\n" % (xml_key, data_dict[key], xml_key) output += "<errors>\n" output += "\n".join( [ "<object>" "<name>%s</name><status>%s</status>" "</object>" % (saxutils.escape(name), status) for name, status in error_list ] ) output += "</errors>\n</delete>\n" return output output = "" for key in sorted(data_dict.keys()): output += "%s: %s\n" % (key, data_dict[key]) output += "Errors:\n" output += "\n".join(["%s, %s" % (name, status) for name, status in error_list]) return output
def verify_request(self,req,env): try: version, account, container, obj = split_path(req.path_info, minsegs=1, maxsegs=4, rest_with_last=True) except ValueError: self.logger.increment('errors') return jresponse('-1','not found',req,404) token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) verify_flag = False if token : user_info = self.get_cache_user_info(env, token) if user_info: tenant = 'AUTH_' + user_info.replace('@','').replace('.','') if account != tenant: self.logger.increment('unauthorized') verify_flag = False verify_flag = True else: self.logger.increment('unauthorized') verify_flag = False else: self.logger.increment('unauthorized') verify_flag = False oauth_data_list = json.dumps({'verify_flag':str(verify_flag).lower()}) return Response(body=oauth_data_list,request=req)
def _reclaim(self, conn, timestamp): """ Removes any empty metadata values older than the timestamp using the given database connection. This function will not call commit on the conn, but will instead return True if the database needs committing. This function was created as a worker to limit transactions and commits from other related functions. :param conn: Database connection to reclaim metadata within. :param timestamp: Empty metadata items last updated before this timestamp will be removed. :returns: True if conn.commit() should be called """ try: md = conn.execute('SELECT metadata FROM %s_stat' % self.db_type).fetchone()[0] if md: md = json.loads(md) keys_to_delete = [] for key, (value, value_timestamp) in md.iteritems(): if value == '' and value_timestamp < timestamp: keys_to_delete.append(key) if keys_to_delete: for key in keys_to_delete: del md[key] conn.execute('UPDATE %s_stat SET metadata = ?' % self.db_type, (json.dumps(md),)) return True except sqlite3.OperationalError as err: if 'no such column: metadata' not in str(err): raise return False
def test_handle_multipart_put_check_data_bad(self): bad_data = json.dumps( [ {"path": "/checktest/a_1", "etag": "a", "size_bytes": "2"}, {"path": "/checktest/badreq", "etag": "a", "size_bytes": "1"}, {"path": "/checktest/b_2", "etag": "not-b", "size_bytes": "2"}, {"path": "/checktest/slob", "etag": "not-slob", "size_bytes": "2"}, ] ) req = Request.blank( "/v1/AUTH_test/checktest/man?multipart-manifest=put", environ={"REQUEST_METHOD": "PUT"}, headers={"Accept": "application/json"}, body=bad_data, ) status, headers, body = self.call_slo(req) self.assertEquals(self.app.call_count, 4) errors = json.loads(body)["Errors"] self.assertEquals(len(errors), 5) self.assertEquals(errors[0][0], "/checktest/a_1") self.assertEquals(errors[0][1], "Size Mismatch") self.assertEquals(errors[1][0], "/checktest/badreq") self.assertEquals(errors[1][1], "400 Bad Request") self.assertEquals(errors[2][0], "/checktest/b_2") self.assertEquals(errors[2][1], "Etag Mismatch") self.assertEquals(errors[3][0], "/checktest/slob") self.assertEquals(errors[3][1], "Size Mismatch") self.assertEquals(errors[4][0], "/checktest/slob") self.assertEquals(errors[4][1], "Etag Mismatch")
def get_response_body(self, data_format, data_dict, error_list): """ Returns a properly formatted response body according to format. :params data_format: resulting format :params data_dict: generated data about results. :params error_list: list of quoted filenames that failed """ if data_format == "text/plain": output = "" for key in sorted(data_dict.keys()): output += "%s: %s\n" % (key, data_dict[key]) output += "Errors:\n" output += "\n".join(["%s, %s" % (name, status) for name, status in error_list]) return output if data_format == "application/json": data_dict["Errors"] = error_list return json.dumps(data_dict) if data_format.endswith("/xml"): output = '<?xml version="1.0" encoding="UTF-8"?>\n<delete>\n' for key in sorted(data_dict.keys()): xml_key = key.replace(" ", "_").lower() output += "<%s>%s</%s>\n" % (xml_key, data_dict[key], xml_key) output += "<errors>\n" output += "\n".join( [ "<object>" "<name>%s</name><status>%s</status>" "</object>" % (saxutils.escape(name), status) for name, status in error_list ] ) output += "</errors>\n</delete>\n" return output raise HTTPNotAcceptable("Invalid output type")
def get_response_body(data_format, data_dict, error_list): """ Returns a properly formatted response body according to format. :params data_format: resulting format :params data_dict: generated data about results. :params error_list: list of quoted filenames that failed """ if data_format == 'text/plain': output = '' for key in sorted(data_dict.keys()): output += '%s: %s\n' % (key, data_dict[key]) output += 'Errors:\n' output += '\n'.join( ['%s, %s' % (name, status) for name, status in error_list]) return output if data_format == 'application/json': data_dict['Errors'] = error_list return json.dumps(data_dict) if data_format.endswith('/xml'): output = '<?xml version="1.0" encoding="UTF-8"?>\n<delete>\n' for key in sorted(data_dict.keys()): xml_key = key.replace(' ', '_').lower() output += '<%s>%s</%s>\n' % (xml_key, data_dict[key], xml_key) output += '<errors>\n' output += '\n'.join( ['<object>' '<name>%s</name><status>%s</status>' '</object>' % (saxutils.escape(name), status) for name, status in error_list]) output += '</errors>\n</delete>\n' return output raise HTTPNotAcceptable('Invalid output type')
def test_container_usage_error_key(self): self.conf['quota'] = json.dumps( { 'container_count': { 'default': 5, 'L10': 10 }, 'object_count': { 'default': 5, 'L10': 10 }, 'container_usage': { 'default': 5, 'L10': 10 } } ) qa = quota.filter_factory(self.conf)(FakeApp()) req = Request.blank('/v1/a2/c/obj') req.method = 'PUT' req.environ['CONTENT_LENGTH'] = 1024 # no memcached resp = req.get_response(qa) self.assertEquals(resp.status_int, 201) # no cache req.environ['swift.cache'] = FakeMemcache() resp = req.get_response(qa) self.assertEquals(resp.status_int, 201) # cached resp = req.get_response(qa) self.assertEquals(resp.status_int, 201)
def test_handle_multipart_put_check_data_bad(self): bad_data = json.dumps( [ {"path": "/c/a_1", "etag": "a", "size_bytes": "1"}, {"path": "/c/a_2", "etag": "a", "size_bytes": "1"}, {"path": "/d/b_2", "etag": "b", "size_bytes": "2"}, ] ) req = Request.blank( "/test_good/A/c/man?multipart-manifest=put", environ={"REQUEST_METHOD": "PUT"}, headers={"Accept": "application/json"}, body=bad_data, ) try: self.slo.handle_multipart_put(req) except HTTPException, e: self.assertEquals(self.app.calls, 3) data = json.loads(e.body) errors = data["Errors"] self.assertEquals(errors[0][0], "/test_good/A/c/a_1") self.assertEquals(errors[0][1], "Size Mismatch") self.assertEquals(errors[2][1], "400 Bad Request") self.assertEquals(errors[-1][0], "/test_good/A/d/b_2") self.assertEquals(errors[-1][1], "Etag Mismatch")
def _handle_sync_request(self, broker, remote_info): """ Update metadata, timestamps, sync points. """ with self.debug_timing('info'): try: info = self._get_synced_replication_info(broker, remote_info) except (Exception, Timeout) as e: if 'no such table' in str(e): self.logger.error(_("Quarantining DB %s"), broker) quarantine_db(broker.db_file, broker.db_type) return HTTPNotFound() raise if remote_info['metadata']: with self.debug_timing('update_metadata'): broker.update_metadata(remote_info['metadata']) sync_timestamps = ('created_at', 'put_timestamp', 'delete_timestamp') if any(info[ts] != remote_info[ts] for ts in sync_timestamps): with self.debug_timing('merge_timestamps'): broker.merge_timestamps(*(remote_info[ts] for ts in sync_timestamps)) with self.debug_timing('get_sync'): info['point'] = broker.get_sync(remote_info['id']) if remote_info['hash'] == info['hash'] and \ info['point'] < remote_info['point']: with self.debug_timing('merge_syncs'): translate = { 'remote_id': 'id', 'sync_point': 'point', } data = dict((k, remote_info[v]) for k, v in translate.items()) broker.merge_syncs([data]) info['point'] = remote_info['point'] return Response(json.dumps(info))
def object_update(self, req, broker, name, timestamp): metadata = json.dumps(dict([val for val in req.headers.iteritems() if is_user_meta('object', val[0])])) broker.put_object(name, timestamp, int(req.headers['x-size']), req.headers['x-content-type'], req.headers['x-etag'], metadata=metadata)
def get_response_body(data_format, data_dict, error_list): """ Returns a properly formatted response body according to format. Handles json and xml, otherwise will return text/plain. Note: xml response does not include xml declaration. :params data_format: resulting format :params data_dict: generated data about results. :params error_list: list of quoted filenames that failed """ if data_format == 'application/json': data_dict['Errors'] = error_list return json.dumps(data_dict) if data_format and data_format.endswith('/xml'): output = '<delete>\n' for key in sorted(data_dict): xml_key = key.replace(' ', '_').lower() output += '<%s>%s</%s>\n' % (xml_key, data_dict[key], xml_key) output += '<errors>\n' output += '\n'.join( ['<object>' '<name>%s</name><status>%s</status>' '</object>' % (saxutils.escape(name), status) for name, status in error_list]) output += '</errors>\n</delete>\n' return output output = '' for key in sorted(data_dict): output += '%s: %s\n' % (key, data_dict[key]) output += 'Errors:\n' output += '\n'.join( ['%s, %s' % (name, status) for name, status in error_list]) return output
def test_handle_multipart_put_check_data_bad(self): bad_data = json.dumps( [{'path': '/c/a_1', 'etag': 'a', 'size_bytes': '1'}, {'path': '/c/a_2', 'etag': 'a', 'size_bytes': '1'}, {'path': '/d/b_2', 'etag': 'b', 'size_bytes': '2'}, {'path': '/d/slob', 'etag': 'a', 'size_bytes': '2'}]) req = Request.blank( '/test_good/A/c/man?multipart-manifest=put', environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'application/json'}, body=bad_data) try: self.slo.handle_multipart_put(req, fake_start_response) except HTTPException as e: self.assertEquals(self.app.calls, 4) data = json.loads(e.body) errors = data['Errors'] self.assertEquals(errors[0][0], '/c/a_1') self.assertEquals(errors[0][1], 'Size Mismatch') self.assertEquals(errors[2][0], '/c/a_2') self.assertEquals(errors[2][1], '400 Bad Request') self.assertEquals(errors[4][0], '/d/b_2') self.assertEquals(errors[4][1], 'Etag Mismatch') self.assertEquals(errors[-1][0], '/d/slob') self.assertEquals(errors[-1][1], 'Etag Mismatch') else: self.assert_(False)
def jresponse(status,msg,req,status_int,headers=None,statusstr='',param=None): msg = msg.lower() data = {'status':str(status),'msg':str(msg)} if param: data.update(param) container_list = json.dumps(data) if headers: ret = Response(body=container_list, request=req,headers=headers) else: ret = Response(body=container_list, request=req) ret.content_type = 'application/json' ret.charset = 'utf-8' ret.status_int = status_int if statusstr: ret.status = statusstr if status != '0' and req.method == 'PUT': pass # syslog.syslog(syslog.LOG_ERR,'jresponse: status: ' + str(status_int)+' path: '+str(req.path)+' msg: '+str(msg) + ' tx_id: '+ req.environ.get('swift.trans_id','')) # print 'jresponse: status: ' + str(status_int)+' path: '+str(req.path)+' msg: '+str(msg) + ' tx_id: '+ req.environ.get('swift.trans_id','') + ' method: '+str(req.method) + ' headers: '+str(req.headers) + ' params: '+str(req.GET) # traceback.print_stack() # syslog.syslog(syslog.LOG_ERR,'jresponse: '+str(traceback.extract_stack() )) # syslog.syslog(syslog.LOG_ERR,'jresponse: '+str(traceback.print_stack() )) return ret
def set_multi(self, mapping, server_key, serialize=True, timeout=0): """ Sets multiple key/value pairs in memcache. :param mapping: dictonary of keys and values to be set in memcache :param servery_key: key to use in determining which server in the ring is used :param serialize: if True, value is serialized with JSON before sending to memcache, or with pickle if configured to use pickle instead of JSON (to avoid cache poisoning) :param timeout: ttl for memcache """ server_key = md5hash(server_key) timeout = sanitize_timeout(timeout) msg = '' for key, value in mapping.iteritems(): key = md5hash(key) flags = 0 if serialize and self._allow_pickle: value = pickle.dumps(value, PICKLE_PROTOCOL) flags |= PICKLE_FLAG elif serialize: value = json.dumps(value) flags |= JSON_FLAG msg += ('set %s %d %d %s noreply\r\n%s\r\n' % (key, flags, timeout, len(value), value)) for (server, fp, sock) in self._get_conns(server_key): try: sock.sendall(msg) self._return_conn(server, fp, sock) return except Exception, e: self._exception_occurred(server, e)
def set(self, key, value, serialize=True, timeout=0): """ Set a key/value pair in memcache :param key: key :param value: value :param serialize: if True, value is serialized with JSON before sending to memcache, or with pickle if configured to use pickle instead of JSON (to avoid cache poisoning) :param timeout: ttl in memcache """ key = md5hash(key) timeout = sanitize_timeout(timeout) flags = 0 if serialize and self._allow_pickle: value = pickle.dumps(value, PICKLE_PROTOCOL) flags |= PICKLE_FLAG elif serialize: value = json.dumps(value) flags |= JSON_FLAG for (server, fp, sock) in self._get_conns(key): try: sock.sendall('set %s %d %d %s noreply\r\n%s\r\n' % (key, flags, timeout, len(value), value)) self._return_conn(server, fp, sock) return except Exception, e: self._exception_occurred(server, e)
def uploadObj(self, a, c, o): metaListO = [self.getTestObjDict(a, c, o)] reqO = Request.blank( '/', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}, headers={'user-agent': 'object_crawler'}, body=json.dumps(metaListO)) respO = reqO.get_response(self.controller) self.assert_(respO.status.startswith('204'))
def getAll(self): """ Dump everything """ self.conn.row_factory = dict_factory cur = self.conn.cursor() cur.execute("SELECT * FROM object_metadata") obj_data = cur.fetchall() cur.execute("SELECT * FROM container_metadata") con_data = cur.fetchall() cur.execute("SELECT * FROM account_metadata") acc_data = cur.fetchall() return ''.join([ json.dumps(obj_data), "\n\n", json.dumps(con_data), "\n\n", json.dumps(acc_data) ])
def generate_s3acl_environ(account, swift, owner): def gen_grant(permission): # generate Grant with a grantee named by "permission" account_name = '%s:%s' % (account, permission.lower()) return Grant(User(account_name), permission) grants = map(gen_grant, PERMISSIONS) container_headers = _gen_test_headers(owner, grants) object_headers = _gen_test_headers(owner, grants, 'object') object_body = 'hello' object_headers['Content-Length'] = len(object_body) # TEST method is used to resolve a tenant name swift.register('TEST', '/v1/AUTH_test', swob.HTTPMethodNotAllowed, {}, None) swift.register('TEST', '/v1/AUTH_X', swob.HTTPMethodNotAllowed, {}, None) # for bucket swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, container_headers, None) swift.register('HEAD', '/v1/AUTH_test/bucket+segments', swob.HTTPNoContent, container_headers, None) swift.register('PUT', '/v1/AUTH_test/bucket', swob.HTTPCreated, {}, None) swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPNoContent, container_headers, json.dumps([])) swift.register('POST', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) swift.register('DELETE', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) # necessary for canned-acl tests public_headers = _gen_test_headers(owner, [Grant(AllUsers(), 'READ')]) swift.register('GET', '/v1/AUTH_test/public', swob.HTTPNoContent, public_headers, json.dumps([])) authenticated_headers = _gen_test_headers( owner, [Grant(AuthenticatedUsers(), 'READ')], 'bucket') swift.register('GET', '/v1/AUTH_test/authenticated', swob.HTTPNoContent, authenticated_headers, json.dumps([])) # for object swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPOk, object_headers, None)
def create_bucket_list_json(buckets): """ Create a json from bucket list :param buckets: a list of tuples (or lists) consist of elements orderd as name, count, bytes """ bucket_list = [{'name': item[0], 'count': item[1], 'bytes': item[2]} for item in buckets] return json.dumps(bucket_list)
def uploadAcc(self, a): metaListA = [self.getTestAccDict(a)] reqA = Request.blank( '/', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}, headers={'user-agent': 'account_crawler'}, body=json.dumps(metaListA)) respA = reqA.get_response(self.controller) self.assert_(respA.status.startswith('204'))
def uploadCon(self, a, c): metaListC = [self.getTestConDict(a, c)] reqC = Request.blank( '/', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}, headers={'user-agent': 'container_crawler'}, body=json.dumps(metaListC)) respC = reqC.get_response(self.controller) self.assert_(respC.status.startswith('204'))
def setup_objects(self): self.objects = (('rose', '2011-01-05T02:19:14.275290', 0, 303), ('viola', '2011-01-05T02:19:14.275290', '0', 3909), ('lily', '2011-01-05T02:19:14.275290', '0', '3909'), ('mu', '2011-01-05T02:19:14.275290', 'md5-of-the-manifest; s3_etag=0', '3909'), ('with space', '2011-01-05T02:19:14.275290', 0, 390), ('with%20space', '2011-01-05T02:19:14.275290', 0, 390)) objects = [ {'name': str(item[0]), 'last_modified': str(item[1]), 'hash': str(item[2]), 'bytes': str(item[3])} for item in self.objects] object_list = json.dumps(objects) self.prefixes = ['rose', 'viola', 'lily'] object_list_subdir = [{"subdir": p} for p in self.prefixes] self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments', swob.HTTPNoContent, {}, json.dumps([])) for name, _, _, _ in self.objects: self.swift.register( 'DELETE', '/v1/AUTH_test/bucket+segments/' + name, swob.HTTPNoContent, {}, json.dumps([])) self.swift.register( 'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=with%2520space', swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'}, json.dumps([])) self.swift.register( 'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=', swob.HTTPOk, {'Content-Type': 'application/json'}, object_list) self.swift.register( 'HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None) self.swift.register( 'HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None) self.swift.register( 'GET', '/v1/AUTH_test/junk', swob.HTTPOk, {'Content-Type': 'application/json'}, object_list) self.swift.register( 'GET', '/v1/AUTH_test/junk?delimiter=a&format=json&limit=3&marker=viola', swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'}, json.dumps(objects[2:])) self.swift.register( 'GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'}, json.dumps(object_list_subdir)) self.swift.register( 'GET', '/v1/AUTH_test/subdirs?delimiter=/&format=json&limit=3', swob.HTTPOk, {}, json.dumps([ {'subdir': 'nothing/'}, {'subdir': 'but/'}, {'subdir': 'subdirs/'}, ]))
def account_listing_response(account, req, response_content_type, broker=None, limit='', marker='', end_marker='', prefix='', delimiter='', reverse=False): """ This is an exact copy of swift.account.utis.account_listing_response() except for one difference i.e this method passes response_content_type to broker.list_containers_iter() method. """ if broker is None: broker = FakeAccountBroker() resp_headers = get_response_headers(broker) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter, response_content_type, reverse) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, put_tstamp, is_subdir) in account_list: if is_subdir: data.append({'subdir': name}) else: data.append({'name': name, 'count': object_count, 'bytes': bytes_used, 'last_modified': Timestamp(put_tstamp).isoformat}) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = ['<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account)] for (name, object_count, bytes_used, put_tstamp, is_subdir) in account_list: if is_subdir: output_list.append( '<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes><last_modified>%s</last_modified> \ </container>' % \ (saxutils.escape(name), object_count, bytes_used, Timestamp(put_tstamp).isoformat) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not account_list: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in account_list) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def GETorHEAD(self, req): """Handler for HTTP GET/HEAD requests.""" """ Handles requests to /info Should return a WSGI-style callable (such as swob.Response). :param req: swob.Request object """ if not self.expose_info: return HTTPForbidden(request=req) admin_request = False sig = req.params.get('swiftinfo_sig', '') expires = req.params.get('swiftinfo_expires', '') if sig != '' or expires != '': admin_request = True if not self.admin_key: return HTTPForbidden(request=req) try: expires = int(expires) except ValueError: return HTTPUnauthorized(request=req) if expires < time(): return HTTPUnauthorized(request=req) valid_sigs = [] for method in self.allowed_hmac_methods[req.method]: valid_sigs.append(get_hmac(method, '/info', expires, self.admin_key)) # While it's true that any() will short-circuit, this doesn't # affect the timing-attack resistance since the only way this will # short-circuit is when a valid signature is passed in. is_valid_hmac = any(streq_const_time(valid_sig, sig) for valid_sig in valid_sigs) if not is_valid_hmac: return HTTPUnauthorized(request=req) headers = {} if 'Origin' in req.headers: headers['Access-Control-Allow-Origin'] = req.headers['Origin'] headers['Access-Control-Expose-Headers'] = ', '.join( ['x-trans-id']) #json.dumps(dict)可以将字典形式的dict对象转换为json格式的对象 info = json.dumps(get_swift_info( admin=admin_request, disallowed_sections=self.disallowed_sections)) return HTTPOk(request=req, headers=headers, body=info, content_type='application/json; charset=UTF-8')
def __call__(self, env, start_response): request = Request(env) if not request.path.startswith(self.endpoints_path): return self.app(env, start_response) if request.method != 'GET': return HTTPMethodNotAllowed( req=request, headers={"Allow": "GET"})(env, start_response) try: clean_path = request.path[len(self.endpoints_path) - 1:] account, container, obj = \ split_path(clean_path, 1, 3, True) except ValueError: return HTTPBadRequest('No account specified')(env, start_response) if account is not None: account = unquote(account) if container is not None: container = unquote(container) if obj is not None: obj = unquote(obj) if obj is not None: partition, nodes = self.object_ring.get_nodes( account, container, obj) endpoint_template = 'http://{ip}:{port}/{device}/{partition}/' + \ '{account}/{container}/{obj}' elif container is not None: partition, nodes = self.container_ring.get_nodes( account, container) endpoint_template = 'http://{ip}:{port}/{device}/{partition}/' + \ '{account}/{container}' else: partition, nodes = self.account_ring.get_nodes( account) endpoint_template = 'http://{ip}:{port}/{device}/{partition}/' + \ '{account}' endpoints = [] for node in nodes: endpoint = endpoint_template.format( ip=node['ip'], port=node['port'], device=node['device'], partition=partition, account=quote(account), container=quote(container or ''), obj=quote(obj or '')) endpoints.append(endpoint) return Response(json.dumps(endpoints), content_type='application/json')(env, start_response)
def setup_objects(self): self.objects = (('lily', '2011-01-05T02:19:14.275290', '0', '3909'), ('rose', '2011-01-05T02:19:14.275290', 0, 303), ('viola', '2011-01-05T02:19:14.275290', '0', 3909), (u'lily-\u062a', '2011-01-05T02:19:14.275290', 0, 390), ('mu', '2011-01-05T02:19:14.275290', 'md5-of-the-manifest; s3_etag=0', '3909'), ('with space', '2011-01-05T02:19:14.275290', 0, 390), ('with%20space', '2011-01-05T02:19:14.275290', 0, 390)) objects = [{ 'name': item[0], 'last_modified': str(item[1]), 'hash': str(item[2]), 'bytes': str(item[3]) } for item in self.objects] object_list = json.dumps(objects) self.prefixes = ['rose', 'viola', 'lily'] object_list_subdir = [{"subdir": p} for p in self.prefixes] self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments', swob.HTTPNoContent, {}, json.dumps([])) for name, _, _, _ in self.objects: self.swift.register( 'DELETE', '/v1/AUTH_test/bucket+segments/' + name.encode('utf-8'), swob.HTTPNoContent, {}, json.dumps([])) self.swift.register( 'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=with%2520space', swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'}, json.dumps([])) self.swift.register( 'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=', swob.HTTPOk, {'Content-Type': 'application/json'}, object_list) self.swift.register('HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None) self.swift.register('GET', '/v1/AUTH_test/junk', swob.HTTPOk, {'Content-Type': 'application/json'}, object_list) self.swift.register( 'GET', '/v1/AUTH_test/junk?delimiter=a&format=json&limit=3&marker=viola', swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'}, json.dumps(objects[2:])) self.swift.register( 'GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'}, json.dumps(object_list_subdir)) self.swift.register( 'GET', '/v1/AUTH_test/subdirs?delimiter=/&format=json&limit=3', swob.HTTPOk, {}, json.dumps([ { 'subdir': 'nothing/' }, { 'subdir': 'but/' }, { 'subdir': 'subdirs/' }, ]))
data = [] for (name, created_at, size, content_type, etag) in container_list: if content_type is None: data.append({"subdir": name}) else: created_at = datetime.utcfromtimestamp( float(created_at)).isoformat() # python isoformat() doesn't include msecs when zero if len(created_at) < len("1970-01-01T00:00:00.000000"): created_at += ".000000" content_type, size = self.derive_content_type_metadata( content_type, size) data.append({'last_modified': created_at, 'bytes': size, 'content_type': content_type, 'hash': etag, 'name': name}) container_list = json.dumps(data) elif out_content_type.endswith('/xml'): xml_output = [] for (name, created_at, size, content_type, etag) in container_list: # escape name and format date here name = saxutils.escape(name) created_at = datetime.utcfromtimestamp( float(created_at)).isoformat() # python isoformat() doesn't include msecs when zero if len(created_at) < len("1970-01-01T00:00:00.000000"): created_at += ".000000" if content_type is None: xml_output.append('<subdir name="%s"><name>%s</name>' '</subdir>' % (name, name)) else: content_type, size = self.derive_content_type_metadata(
def audit_all_objects(self, mode='once'): self.logger.info( _('Begin object audit "%s" mode (%s)') % (mode, self.auditor_type)) begin = reported = time.time() self.total_bytes_processed = 0 self.total_files_processed = 0 total_quarantines = 0 total_errors = 0 time_auditing = 0 all_locs = audit_location_generator(self.devices, object_server.DATADIR, '.data', mount_check=self.mount_check, logger=self.logger) for path, device, partition in all_locs: loop_time = time.time() self.failsafe_object_audit(path, device, partition) self.logger.timing_since('timing', loop_time) self.files_running_time = ratelimit_sleep( self.files_running_time, self.max_files_per_second) self.total_files_processed += 1 now = time.time() if now - reported >= self.log_time: self.logger.info( _('Object audit (%(type)s). ' 'Since %(start_time)s: Locally: %(passes)d passed, ' '%(quars)d quarantined, %(errors)d errors ' 'files/sec: %(frate).2f , bytes/sec: %(brate).2f, ' 'Total time: %(total).2f, Auditing time: %(audit).2f, ' 'Rate: %(audit_rate).2f') % { 'type': self.auditor_type, 'start_time': time.ctime(reported), 'passes': self.passes, 'quars': self.quarantines, 'errors': self.errors, 'frate': self.passes / (now - reported), 'brate': self.bytes_processed / (now - reported), 'total': (now - begin), 'audit': time_auditing, 'audit_rate': time_auditing / (now - begin) }) dump_recon_cache( { 'object_auditor_stats_%s' % self.auditor_type: { 'errors': self.errors, 'passes': self.passes, 'quarantined': self.quarantines, 'bytes_processed': self.bytes_processed, 'start_time': reported, 'audit_time': time_auditing } }, self.rcache, self.logger) reported = now total_quarantines += self.quarantines total_errors += self.errors self.passes = 0 self.quarantines = 0 self.errors = 0 self.bytes_processed = 0 time_auditing += (now - loop_time) # Avoid divide by zero during very short runs elapsed = (time.time() - begin) or 0.000001 self.logger.info( _('Object audit (%(type)s) "%(mode)s" mode ' 'completed: %(elapsed).02fs. Total quarantined: %(quars)d, ' 'Total errors: %(errors)d, Total files/sec: %(frate).2f , ' 'Total bytes/sec: %(brate).2f, Auditing time: %(audit).2f, ' 'Rate: %(audit_rate).2f') % { 'type': self.auditor_type, 'mode': mode, 'elapsed': elapsed, 'quars': total_quarantines, 'errors': total_errors, 'frate': self.total_files_processed / elapsed, 'brate': self.total_bytes_processed / elapsed, 'audit': time_auditing, 'audit_rate': time_auditing / elapsed }) if self.stats_sizes: self.logger.info( _('Object audit stats: %s') % json.dumps(self.stats_buckets))
def test_bucket_GET_with_versions(self): versioned_objects = [ {'name': '004rose/2', 'hash': '0', 'bytes': '0', 'last_modified': '2010-03-01T17:09:51.510928', 'content_type': DELETE_MARKER_CONTENT_TYPE}, {'name': '004rose/1', 'hash': '1234', 'bytes': '6', 'last_modified': '2010-03-01T17:09:50.510928'}, ] self.swift.register('GET', '/v1/AUTH_test/junk+versioning', swob.HTTPOk, {}, json.dumps(versioned_objects)) for obj in self.objects: if obj[0] == 'rose': headers = {'X-Object-Sysmeta-Version-Id': '3'} else: headers = {} self.swift.register('HEAD', '/v1/AUTH_test/junk/%s' % obj[0], swob.HTTPOk, headers, None) req = Request.blank('/junk?versions', environ={'REQUEST_METHOD': 'GET'}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header()}) status, headers, body = self.call_swift3(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body, 'ListVersionsResult') self.assertEqual(elem.find('./Name').text, 'junk') delete_markers = elem.findall('./DeleteMarker') self.assertEqual(len(delete_markers), 1) self.assertEqual(delete_markers[0].find('./IsLatest').text, 'false') self.assertEqual(delete_markers[0].find('./VersionId').text, '2') self.assertEqual(delete_markers[0].find('./Key').text, 'rose') versions = elem.findall('./Version') self.assertEqual(len(versions), len(self.objects) + 1) self.assertEqual(versions[2].find('./IsLatest').text, 'false') self.assertEqual(versions[2].find('./VersionId').text, '1') # Test that version id is retrieved from sysmeta self.assertEqual(versions[1].find('./VersionId').text, '3') self.assertEqual(versions[2].find('./Key').text, 'rose') # with max keys req = Request.blank('/junk?versions&max-keys=3', environ={'REQUEST_METHOD': 'GET'}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header()}) status, headers, body = self.call_swift3(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body, 'ListVersionsResult') self.assertEqual(elem.find('./MaxKeys').text, '3') self.assertEqual(elem.find('./IsTruncated').text, 'true') delete_markers = elem.findall('./DeleteMarker') self.assertEqual(len(delete_markers), 1) versions = elem.findall('./Version') self.assertEqual(len(versions), 2) self.assertEqual([v.find('./Key').text for v in versions], ['lily', 'rose']) self.assertEqual([v.find('./VersionId').text for v in versions], ['null', '3']) self.assertEqual([v.find('./IsLatest').text for v in versions], ['true', 'true']) # with key-marker req = Request.blank('/junk?versions&max-keys=2&key-marker=rose', environ={'REQUEST_METHOD': 'GET'}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header()}) status, headers, body = self.call_swift3(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body, 'ListVersionsResult') self.assertEqual(elem.find('./MaxKeys').text, '2') self.assertEqual(elem.find('./IsTruncated').text, 'true') delete_markers = elem.findall('./DeleteMarker') self.assertEqual(len(delete_markers), 1) self.assertEqual(delete_markers[0].find('./Key').text, 'rose') self.assertEqual(delete_markers[0].find('./VersionId').text, '2') versions = elem.findall('./Version') self.assertEqual(len(versions), 1) self.assertEqual(versions[0].find('./Key').text, 'rose') self.assertEqual(versions[0].find('./VersionId').text, '3') self.assertEqual(versions[0].find('./IsLatest').text, 'true') # with key-marker and version-id-marker req = Request.blank( '/junk?versions&key-marker=rose&version-id-marker=2', environ={'REQUEST_METHOD': 'GET'}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header()}) status, headers, body = self.call_swift3(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body, 'ListVersionsResult') self.assertEqual(elem.find('./IsTruncated').text, 'false') delete_markers = elem.findall('./DeleteMarker') self.assertEqual(len(delete_markers), 0) versions = elem.findall('./Version') self.assertEqual(len(versions), len(self.objects) - 1) self.assertEqual(versions[0].find('./Key').text, 'rose') self.assertEqual(versions[0].find('./VersionId').text, '1') self.assertEqual(versions[0].find('./IsLatest').text, 'false') # with key-marker and non-existent version-id-marker req = Request.blank( '/junk?versions&key-marker=rose&version-id-marker=x', environ={'REQUEST_METHOD': 'GET'}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header()}) status, headers, body = self.call_swift3(req) self.assertEqual(status.split()[0], '200') elem = fromstring(body, 'ListVersionsResult') self.assertEqual(elem.find('./Name').text, 'junk') delete_markers = elem.findall('./DeleteMarker') self.assertEqual(len(delete_markers), 0) versions = elem.findall('./Version') self.assertEqual(len(versions), len(self.objects) - 2) self.assertEqual(versions[0].find('./Key').text, 'viola') self.assertEqual(versions[0].find('./IsLatest').text, 'true') self.assertEqual(versions[0].find('./VersionId').text, 'null')
def account_listing_response(account, req, response_content_type, info=None, listing=None, s3_buckets_only=False): now = time.time() if info is None: info = { 'containers': 0, 'objects': 0, 'bytes': 0, 'metadata': {}, 'ctime': Timestamp(now).internal } if listing is None: listing = [] elif listing and len(listing[0]) < 5: # oio-sds < 4.2 does not return mtime listing = [x + [now] for x in listing] resp_headers = get_response_headers(info) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, is_subdir, mtime) in listing: if is_subdir: if not s3_buckets_only: data.append({'subdir': name}) else: data.append({ 'name': name, 'count': object_count, 'bytes': bytes_used, 'last_modified': Timestamp(mtime).isoformat }) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = [ '<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account) ] for (name, object_count, bytes_used, is_subdir, mtime) in listing: if is_subdir: if not s3_buckets_only: output_list.append('<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes><last_modified>%s</last_modified>' \ '</container>' % \ (saxutils.escape(name), object_count, bytes_used, Timestamp(mtime).isoformat) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not listing: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in listing) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def setUp(self): super(TestSloDeleteManifest, self).setUp() _submanifest_data = json.dumps([{ 'name': '/deltest/b_2', 'hash': 'a', 'bytes': '1' }, { 'name': '/deltest/c_3', 'hash': 'b', 'bytes': '2' }]) self.app.register('GET', '/v1/AUTH_test/deltest/man_404', swob.HTTPNotFound, {}, None) self.app.register( 'GET', '/v1/AUTH_test/deltest/man', swob.HTTPOk, { 'Content-Type': 'application/json', 'X-Static-Large-Object': 'true' }, json.dumps([{ 'name': '/deltest/gone', 'hash': 'a', 'bytes': '1' }, { 'name': '/deltest/b_2', 'hash': 'b', 'bytes': '2' }])) self.app.register('DELETE', '/v1/AUTH_test/deltest/man', swob.HTTPNoContent, {}, None) self.app.register( 'GET', '/v1/AUTH_test/deltest/man-all-there', swob.HTTPOk, { 'Content-Type': 'application/json', 'X-Static-Large-Object': 'true' }, json.dumps([{ 'name': '/deltest/b_2', 'hash': 'a', 'bytes': '1' }, { 'name': '/deltest/c_3', 'hash': 'b', 'bytes': '2' }])) self.app.register('DELETE', '/v1/AUTH_test/deltest/gone', swob.HTTPNotFound, {}, None) self.app.register('GET', '/v1/AUTH_test/deltest/a_1', swob.HTTPOk, {'Content-Length': '1'}, 'a') self.app.register('DELETE', '/v1/AUTH_test/deltest/a_1', swob.HTTPNoContent, {}, None) self.app.register('DELETE', '/v1/AUTH_test/deltest/b_2', swob.HTTPNoContent, {}, None) self.app.register('DELETE', '/v1/AUTH_test/deltest/c_3', swob.HTTPNoContent, {}, None) self.app.register('DELETE', '/v1/AUTH_test/deltest/d_3', swob.HTTPNoContent, {}, None) self.app.register( 'GET', '/v1/AUTH_test/deltest/manifest-with-submanifest', swob.HTTPOk, { 'Content-Type': 'application/json', 'X-Static-Large-Object': 'true' }, json.dumps([{ 'name': '/deltest/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/deltest/submanifest', 'sub_slo': True, 'hash': 'submanifest-etag', 'bytes': len(_submanifest_data) }, { 'name': '/deltest/d_3', 'hash': 'd', 'bytes': '3' }])) self.app.register('DELETE', '/v1/AUTH_test/deltest/manifest-with-submanifest', swob.HTTPNoContent, {}, None) self.app.register('GET', '/v1/AUTH_test/deltest/submanifest', swob.HTTPOk, { 'Content-Type': 'application/json', 'X-Static-Large-Object': 'true' }, _submanifest_data) self.app.register('DELETE', '/v1/AUTH_test/deltest/submanifest', swob.HTTPNoContent, {}, None) self.app.register( 'GET', '/v1/AUTH_test/deltest/manifest-missing-submanifest', swob.HTTPOk, { 'Content-Type': 'application/json', 'X-Static-Large-Object': 'true' }, json.dumps([{ 'name': '/deltest/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/deltest/missing-submanifest', 'hash': 'a', 'bytes': '2', 'sub_slo': True }, { 'name': '/deltest/d_3', 'hash': 'd', 'bytes': '3' }])) self.app.register( 'DELETE', '/v1/AUTH_test/deltest/manifest-missing-submanifest', swob.HTTPNoContent, {}, None) self.app.register('GET', '/v1/AUTH_test/deltest/missing-submanifest', swob.HTTPNotFound, {}, None) self.app.register('GET', '/v1/AUTH_test/deltest/manifest-badjson', swob.HTTPOk, { 'Content-Type': 'application/json', 'X-Static-Large-Object': 'true' }, "[not {json (at ++++all") self.app.register( 'GET', '/v1/AUTH_test/deltest/manifest-with-unauth-segment', swob.HTTPOk, { 'Content-Type': 'application/json', 'X-Static-Large-Object': 'true' }, json.dumps([{ 'name': '/deltest/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/deltest-unauth/q_17', 'hash': '11', 'bytes': '17' }])) self.app.register( 'DELETE', '/v1/AUTH_test/deltest/manifest-with-unauth-segment', swob.HTTPNoContent, {}, None) self.app.register('DELETE', '/v1/AUTH_test/deltest-unauth/q_17', swob.HTTPUnauthorized, {}, None)
headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response) test_xml_data = '''<?xml version="1.0" encoding="UTF-8"?> <static_large_object> <object_segment> <path>/cont/object</path> <etag>etagoftheobjectsegment</etag> <size_bytes>100</size_bytes> </object_segment> </static_large_object> ''' test_json_data = json.dumps([{ 'path': '/cont/object', 'etag': 'etagoftheobjectsegment', 'size_bytes': 100 }]) def fake_start_response(*args, **kwargs): pass class TestStaticLargeObject(unittest.TestCase): def setUp(self): self.app = FakeApp() self.slo = slo.filter_factory({})(self.app) self.slo.min_segment_size = 1 def tearDown(self):
def handle_container_listing(self, env, start_response): # This code may be clearer by using Request(env).get_response() # instead of self._app_call(env) api_vers, account, container_name = split_path( env['PATH_INFO'], 3, 3, True) sub_env = env.copy() orig_container = get_unversioned_container(container_name) if orig_container != container_name: # Check that container_name is actually the versioning # container for orig_container sub_env['PATH_INFO'] = '/%s/%s/%s' % (api_vers, account, orig_container) info = get_container_info(sub_env, self.app, swift_source='VW') vers_loc = info.get('sysmeta', {}).get('versions-location') # Sometimes we receive versioned listing requests whereas # versioning is not enabled (vers_loc is None or empty). if vers_loc and vers_loc != container_name: # The container specified in the request ends with the # versioning suffix, but user has asked the versions to # be saved elsewhere, thus we will consider this as a # regular listing request. orig_container = container_name if orig_container != container_name: qs = parse_qs(sub_env.get('QUERY_STRING', '')) if 'marker' in qs: marker, _ = swift3_split_object_name_version(qs['marker'][0]) qs['marker'] = [marker] if 'prefix' in qs: prefix, _ = swift3_split_object_name_version(qs['prefix'][0]) qs['prefix'] = prefix qs['format'] = 'json' sub_env['QUERY_STRING'] = urlencode(qs, True) sub_env['oio.query'] = {'versions': True} resp = super(OioVersionedWritesContext, self).handle_container_request( sub_env, lambda x, y, z: None) if orig_container != container_name and \ self._response_status == '200 OK': with closing_if_possible(resp): versioned_objects = json.loads("".join(resp)) # Discard the latest version of each object, because it is # not supposed to appear in the versioning container. # Also keep object prefixes as some of them may be shadowed # from the "main" container. latest = dict() subdirs = [] for obj in versioned_objects: if 'subdir' in obj: subdirs.append(obj) continue ver = int(obj.get('version', '0')) # An integer is always strictly greater than None if ver > latest.get(obj['name']): latest[obj['name']] = ver versioned_objects = [ obj for obj in versioned_objects if 'subdir' not in obj and (int(obj.get('version', '0')) != latest[obj['name']] or is_deleted(obj)) ] for obj in versioned_objects: obj['name'] = swift3_versioned_object_name( obj['name'], obj.get('version', '')) versioned_objects += subdirs resp = json.dumps(versioned_objects) self._response_headers = [x for x in self._response_headers if x[0] != 'Content-Length'] self._response_headers.append(('Content-Length', str(len(resp)))) start_response(self._response_status, self._response_headers, self._response_exc_info) return resp
def test_handle_multipart_put_bad_data(self): bad_data = json.dumps([{ 'path': '/cont/object', 'etag': 'etagoftheobj', 'size_bytes': 'lala' }]) req = Request.blank( '/test_good/AUTH_test/c/man?multipart-manifest=put', environ={'REQUEST_METHOD': 'PUT'}, body=bad_data) self.assertRaises(HTTPException, self.slo.handle_multipart_put, req, fake_start_response) for bad_data in [ json.dumps([{ 'path': '/cont', 'etag': 'etagoftheobj', 'size_bytes': 100 }]), json.dumps('asdf'), json.dumps(None), json.dumps(5), 'not json', '1234', None, '', json.dumps({'path': None}), json.dumps([{ 'path': '/c/o', 'etag': None, 'size_bytes': 12 }]), json.dumps([{ 'path': '/c/o', 'etag': 'asdf', 'size_bytes': 'sd' }]), json.dumps([{ 'path': 12, 'etag': 'etagoftheobj', 'size_bytes': 100 }]), json.dumps([{ 'path': u'/cont/object\u2661', 'etag': 'etagoftheobj', 'size_bytes': 100 }]), json.dumps([{ 'path': 12, 'size_bytes': 100 }]), json.dumps([{ 'path': 12, 'size_bytes': 100 }]), json.dumps([{ 'path': None, 'etag': 'etagoftheobj', 'size_bytes': 100 }]) ]: req = Request.blank( '/test_good/AUTH_test/c/man?multipart-manifest=put', environ={'REQUEST_METHOD': 'PUT'}, body=bad_data) self.assertRaises(HTTPException, self.slo.handle_multipart_put, req, fake_start_response)
def __call__(self, env, start_response): self.calls += 1 if env['PATH_INFO'] == '/': return Response(status=200, body='passed')(env, start_response) if env['PATH_INFO'].startswith('/test_good/'): j, v, a, cont, obj = env['PATH_INFO'].split('/') if obj == 'a_2': return Response(status=400)(env, start_response) cont_len = 100 if obj == 'small_object': cont_len = 10 headers = { 'etag': 'etagoftheobjectsegment', 'Content-Length': cont_len } if obj == 'slob': headers['X-Static-Large-Object'] = 'true' return Response(status=200, headers=headers)(env, start_response) if env['PATH_INFO'].startswith('/test_good_check/'): j, v, a, cont, obj = env['PATH_INFO'].split('/') etag, size = obj.split('_') last_mod = 'Fri, 01 Feb 2012 20:38:36 GMT' if obj == 'a_1': last_mod = '' return Response(status=200, headers={ 'etag': etag, 'Last-Modified': last_mod, 'Content-Length': size })(env, start_response) if env['PATH_INFO'].startswith('/test_get/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) return Response(status=200, headers={ 'X-Static-Large-Object': 'True', 'Content-Type': 'html;swift_bytes=55' }, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_get_broke_json/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data[:-5])(env, start_response) if env['PATH_INFO'].startswith('/test_get_bad_json/'): bad_data = json.dumps([{ 'name': '/c/a_1', 'something': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'bytes': '2' }]) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=bad_data)(env, start_response) if env['PATH_INFO'].startswith('/test_get_not_slo/'): return Response(status=200, body='lalala')(env, start_response) if env['PATH_INFO'].startswith('/test_delete_404/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) if env['PATH_INFO'].endswith('/c/man_404'): return Response(status=404)(env, start_response) if env['PATH_INFO'].endswith('/c/a_1'): return Response(status=404)(env, start_response) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_delete/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_delete_nested/'): nested_data = json.dumps([{ 'name': '/b/b_2', 'hash': 'a', 'bytes': '1' }, { 'name': '/c/c_3', 'hash': 'b', 'bytes': '2' }]) good_data = json.dumps([{ 'name': '/a/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/a/sub_nest', 'hash': 'a', 'sub_slo': True, 'bytes': len(nested_data) }, { 'name': '/d/d_3', 'hash': 'b', 'bytes': '2' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) if 'sub_nest' in env['PATH_INFO']: return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=nested_data)(env, start_response) else: return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_delete_nested_404/'): good_data = json.dumps([{ 'name': '/a/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/a/sub_nest', 'hash': 'a', 'bytes': '2', 'sub_slo': True }, { 'name': '/d/d_3', 'hash': 'b', 'bytes': '3' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) if 'sub_nest' in env['PATH_INFO']: return Response(status=404)(env, start_response) else: return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_delete_bad_json/'): self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body='bad json')(env, start_response) if env['PATH_INFO'].startswith('/test_delete_bad_man/'): self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) return Response(status=200, body='')(env, start_response) if env['PATH_INFO'].startswith('/test_delete_401/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) if env['PATH_INFO'].endswith('/d/b_2'): return Response(status=401)(env, start_response) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response)
def SERVICES(self, req): body = {} attr_list = [] body['min_base_api_version'] = "v1" body['max_base_api_version'] = "v1" body['search_provider'] = "HP(UCSC)" body['search_enabled'] = "true" body['min_search_api_version'] = "v1" body['max_search_api_version'] = self.version body['freshness_complete'] = "false" body['freshness_partial'] = "false" body['complex_boolean_expr'] = "true" body['attr_list'] = attr_list #Account Attributes attr_list.append({ "attr_name": "account_uri", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "account_name", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "account_tenant_id", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "account_first_use_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "account_last_modified_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "account_last_changed_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "account_delete_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "account_last_activity_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "account_container_count", "data_type": "numeric", "sortable": "true" }) attr_list.append({ "attr_name": "account_object_count", "data_type": "numeric", "sortable": "true" }) attr_list.append({ "attr_name": "account_bytes_used", "data_type": "numeric", "sortable": "true" }) #Container Attrobutes attr_list.append({ "attr_name": "container_uri", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "container_name", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "container_account_name", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "container_create_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "container_last_modified_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "container_last_changed_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "container_delete_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "container_last_activity_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "container_read_permissions", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "container_write_permissions", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "container_sync_to", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "container_sync_key", "data_type": "string", "sortable": "true" }) # attr_list.append({ # "attr_name" : "container_versions_location", # "data_type" : "string", # "sortable" : "true"}) attr_list.append({ "attr_name": "container_object_count", "data_type": "numeric", "sortable": "true" }) attr_list.append({ "attr_name": "container_bytes_used", "data_type": "numeric", "sortable": "true" }) #Object Attributes attr_list.append({ "attr_name": "object_uri", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_name", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_account_name", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_container_name", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_location", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_uri_create_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "object_last_modified_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "object_last_changed_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "object_delete_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "object_last_activity_time", "data_type": "date", "sortable": "true" }) attr_list.append({ "attr_name": "object_etag_hash", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_content_type", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_content_length", "data_type": "numeric", "sortable": "true" }) attr_list.append({ "attr_name": "object_content_encoding", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_content_disposition", "data_type": "string", "sortable": "true" }) attr_list.append({ "attr_name": "object_content_language", "data_type": "string", "sortable": "true" }) # attr_list.append({ # "attr_name" : "object_cache_control", # "data_type" : "string", # "sortable" : "true"}) attr_list.append({ "attr_name": "object_delete_at", "data_type": "date", "sortable": "true" }) # attr_list.append({ # "attr_name" : "object_manifest_type", # "data_type" : "numeric", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_manifest", # "data_type" : "string", # "sortable" : "true"}) #Object CORS Attributes # attr_list.append({ # "attr_name" : "object_access_control_allow_origin", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_access_control_allow_credentials", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_access_control_expose_headers", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_access_control_max_age", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_access_control_allow_methods", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_access_control_allow_headers", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_origin", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_access_control_request_method", # "data_type" : "string", # "sortable" : "true"}) # attr_list.append({ # "attr_name" : "object_access_control_request_headers", # "data_type" : "string", # "sortable" : "true"}) body = json.dumps(body, indent=4, separators=(',', ' : ')) return Response(request=req, body=body, content_type="json")
def handle_multipart_put(self, req): """ Will handle the PUT of a SLO manifest. Heads every object in manifest to check if is valid and if so will save a manifest generated from the user input. :params req: a swob.Request with an obj in path :raises: HttpException on errors """ try: vrs, account, container, obj = req.split_path(1, 4, True) except ValueError: return self.app if req.content_length > self.max_manifest_size: raise HTTPRequestEntityTooLarge( "Manifest File > %d bytes" % self.max_manifest_size) if req.headers.get('X-Copy-From'): raise HTTPMethodNotAllowed( 'Multipart Manifest PUTs cannot be Copy requests') if req.content_length is None and \ req.headers.get('transfer-encoding', '').lower() != 'chunked': raise HTTPLengthRequired(request=req) parsed_data = parse_input(req.body_file.read(self.max_manifest_size)) problem_segments = [] if len(parsed_data) > self.max_manifest_segments: raise HTTPRequestEntityTooLarge( 'Number segments must be <= %d' % self.max_manifest_segments) total_size = 0 out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if not out_content_type: out_content_type = 'text/plain' data_for_storage = [] for index, seg_dict in enumerate(parsed_data): obj_path = '/'.join( ['', vrs, account, seg_dict['path'].lstrip('/')]) try: seg_size = int(seg_dict['size_bytes']) except (ValueError, TypeError): raise HTTPBadRequest('Invalid Manifest File') if seg_size < self.min_segment_size and \ (index == 0 or index < len(parsed_data) - 1): raise HTTPBadRequest( 'Each segment, except the last, must be larger than ' '%d bytes.' % self.min_segment_size) new_env = req.environ.copy() if isinstance(obj_path, unicode): obj_path = obj_path.encode('utf-8') new_env['PATH_INFO'] = obj_path new_env['REQUEST_METHOD'] = 'HEAD' new_env['swift.source'] = 'SLO' del(new_env['wsgi.input']) del(new_env['QUERY_STRING']) new_env['CONTENT_LENGTH'] = 0 new_env['HTTP_USER_AGENT'] = \ '%s MultipartPUT' % req.environ.get('HTTP_USER_AGENT') head_seg_resp = \ Request.blank(obj_path, new_env).get_response(self.app) if head_seg_resp.is_success: total_size += seg_size if seg_size != head_seg_resp.content_length: problem_segments.append([quote(obj_path), 'Size Mismatch']) if seg_dict['etag'] != head_seg_resp.etag: problem_segments.append([quote(obj_path), 'Etag Mismatch']) if head_seg_resp.last_modified: last_modified = head_seg_resp.last_modified else: # shouldn't happen last_modified = datetime.now() last_modified_formatted = \ last_modified.strftime('%Y-%m-%dT%H:%M:%S.%f') seg_data = {'name': '/' + seg_dict['path'].lstrip('/'), 'bytes': seg_size, 'hash': seg_dict['etag'], 'content_type': head_seg_resp.content_type, 'last_modified': last_modified_formatted} if config_true_value( head_seg_resp.headers.get('X-Static-Large-Object')): seg_data['sub_slo'] = True data_for_storage.append(seg_data) else: problem_segments.append([quote(obj_path), head_seg_resp.status]) if problem_segments: resp_body = get_response_body( out_content_type, {}, problem_segments) raise HTTPBadRequest(resp_body, content_type=out_content_type) env = req.environ if not env.get('CONTENT_TYPE'): guessed_type, _junk = mimetypes.guess_type(req.path_info) env['CONTENT_TYPE'] = guessed_type or 'application/octet-stream' env['swift.content_type_overriden'] = True env['CONTENT_TYPE'] += ";swift_bytes=%d" % total_size env['HTTP_X_STATIC_LARGE_OBJECT'] = 'True' json_data = json.dumps(data_for_storage) env['CONTENT_LENGTH'] = str(len(json_data)) env['wsgi.input'] = StringIO(json_data) return self.app
def output_json(metaList): """ Converts the list of dicts into a JSON format """ return json.dumps(metaList, indent=4, separators=(',', ' : '))
def response_iter(): # NB: XML requires that the XML declaration, if present, be at the # very start of the document. Clients *will* call us out on not # being valid XML if we pass through whitespace before it. # Track whether we've sent anything yet so we can yield out that # declaration *first* yielded_anything = False try: try: # TODO: add support for versioning put_resp = req.get_response( self.app, 'PUT', body=json.dumps(manifest), query={'multipart-manifest': 'put', 'heartbeat': 'on'}, headers=headers) if put_resp.status_int == 202: body = [] put_resp.fix_conditional_response() for chunk in put_resp.response_iter: if not chunk.strip(): if time.time() - start_time < 10: # Include some grace period to keep # ceph-s3tests happy continue if not yielded_anything: yield (b'<?xml version="1.0" ' b'encoding="UTF-8"?>\n') yielded_anything = True yield chunk continue body.append(chunk) body = json.loads(b''.join(body)) if body['Response Status'] != '201 Created': for seg, err in body['Errors']: if err == too_small_message: raise EntityTooSmall() elif err in ('Etag Mismatch', '404 Not Found'): raise InvalidPart(upload_id=upload_id) raise InvalidRequest( status=body['Response Status'], msg='\n'.join(': '.join(err) for err in body['Errors'])) except BadSwiftRequest as e: msg = str(e) if too_small_message in msg: raise EntityTooSmall(msg) elif ', Etag Mismatch' in msg: raise InvalidPart(upload_id=upload_id) elif ', 404 Not Found' in msg: raise InvalidPart(upload_id=upload_id) else: raise # clean up the multipart-upload record obj = '%s/%s' % (req.object_name, upload_id) try: req.get_response(self.app, 'DELETE', container, obj) except NoSuchKey: # We know that this existed long enough for us to HEAD pass result_elem = Element('CompleteMultipartUploadResult') # NOTE: boto with sig v4 appends port to HTTP_HOST value at # the request header when the port is non default value and it # makes req.host_url like as http://localhost:8080:8080/path # that obviously invalid. Probably it should be resolved at # swift.common.swob though, tentatively we are parsing and # reconstructing the correct host_url info here. # in detail, https://github.com/boto/boto/pull/3513 parsed_url = urlparse(req.host_url) host_url = '%s://%s' % (parsed_url.scheme, parsed_url.hostname) # Why are we doing our own port parsing? Because py3 decided # to start raising ValueErrors on access after parsing such # an invalid port netloc = parsed_url.netloc.split('@')[-1].split(']')[-1] if ':' in netloc: port = netloc.split(':', 2)[1] host_url += ':%s' % port SubElement(result_elem, 'Location').text = host_url + req.path SubElement(result_elem, 'Bucket').text = req.container_name SubElement(result_elem, 'Key').text = req.object_name SubElement(result_elem, 'ETag').text = '"%s"' % s3_etag resp.headers.pop('ETag', None) if yielded_anything: yield b'\n' yield tostring(result_elem, xml_declaration=not yielded_anything) except ErrorResponse as err_resp: if yielded_anything: err_resp.xml_declaration = False yield b'\n' else: # Oh good, we can still change HTTP status code, too! resp.status = err_resp.status for chunk in err_resp({}, lambda *a: None): yield chunk
def POST(self, req): """ Handles Complete Multipart Upload. """ upload_id = req.params['uploadId'] resp = _get_upload_info(req, self.app, upload_id) headers = {} for key, val in resp.headers.iteritems(): _key = key.lower() if _key.startswith('x-amz-meta-'): headers['x-object-meta-' + _key[11:]] = val elif _key == 'content-type': headers['Content-Type'] = val # Query for the objects in the segments area to make sure it completed query = { 'format': 'json', 'prefix': '%s/%s/' % (req.object_name, upload_id), 'delimiter': '/' } container = req.container_name + MULTIUPLOAD_SUFFIX resp = req.get_response(self.app, 'GET', container, '', query=query) objinfo = json.loads(resp.body) objtable = dict((o['name'], {'path': '/'.join(['', container, o['name']]), 'etag': o['hash'], 'size_bytes': o['bytes']}) for o in objinfo) manifest = [] previous_number = 0 try: xml = req.xml(MAX_COMPLETE_UPLOAD_BODY_SIZE) complete_elem = fromstring(xml, 'CompleteMultipartUpload') for part_elem in complete_elem.iterchildren('Part'): part_number = int(part_elem.find('./PartNumber').text) if part_number <= previous_number: raise InvalidPartOrder(upload_id=upload_id) previous_number = part_number etag = part_elem.find('./ETag').text if len(etag) >= 2 and etag[0] == '"' and etag[-1] == '"': # strip double quotes etag = etag[1:-1] info = objtable.get("%s/%s/%s" % (req.object_name, upload_id, part_number)) if info is None or info['etag'] != etag: raise InvalidPart(upload_id=upload_id, part_number=part_number) manifest.append(info) except (XMLSyntaxError, DocumentInvalid): raise MalformedXML() except ErrorResponse: raise except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() LOGGER.error(e) raise exc_type, exc_value, exc_traceback # Following swift commit 7f636a5, zero-byte segments aren't allowed, # even as the final segment if int(info['size_bytes']) == 0: manifest.pop() # Ordinarily, we just let SLO check segment sizes. However, we # just popped off a zero-byte segment; if there was a second # zero-byte segment and it was at the end, it would succeed on # Swift < 2.6.0 and fail on newer Swift. It seems reasonable that # it should always fail. if manifest and int(manifest[-1]['size_bytes']) == 0: raise EntityTooSmall() try: # TODO: add support for versioning if manifest: resp = req.get_response(self.app, 'PUT', body=json.dumps(manifest), query={'multipart-manifest': 'put'}, headers=headers) else: # the upload must have consisted of a single zero-length part # just write it directly resp = req.get_response(self.app, 'PUT', body='', headers=headers) except BadSwiftRequest as e: msg = str(e) msg_pre_260 = 'Each segment, except the last, must be at least ' # see https://github.com/openstack/swift/commit/c0866ce msg_260 = ('too small; each segment, except the last, must be ' 'at least ') # see https://github.com/openstack/swift/commit/7f636a5 msg_post_260 = 'too small; each segment must be at least 1 byte' if msg.startswith(msg_pre_260) or \ msg_260 in msg or msg_post_260 in msg: # FIXME: AWS S3 allows a smaller object than 5 MB if there is # only one part. Use a COPY request to copy the part object # from the segments container instead. raise EntityTooSmall(msg) else: raise if int(info['size_bytes']) == 0: # clean up the zero-byte segment empty_seg_cont, empty_seg_name = info['path'].split('/', 2)[1:] req.get_response(self.app, 'DELETE', container=empty_seg_cont, obj=empty_seg_name) # clean up the multipart-upload record obj = '%s/%s' % (req.object_name, upload_id) req.get_response(self.app, 'DELETE', container, obj) result_elem = Element('CompleteMultipartUploadResult') # NOTE: boto with sig v4 appends port to HTTP_HOST value at the # request header when the port is non default value and it makes # req.host_url like as http://localhost:8080:8080/path # that obviously invalid. Probably it should be resolved at # swift.common.swob though, tentatively we are parsing and # reconstructing the correct host_url info here. # in detail, https://github.com/boto/boto/pull/3513 parsed_url = urlparse(req.host_url) host_url = '%s://%s' % (parsed_url.scheme, parsed_url.hostname) if parsed_url.port: host_url += ':%s' % parsed_url.port SubElement(result_elem, 'Location').text = host_url + req.path SubElement(result_elem, 'Bucket').text = req.container_name SubElement(result_elem, 'Key').text = req.object_name SubElement(result_elem, 'ETag').text = resp.etag resp.body = tostring(result_elem) resp.status = 200 resp.content_type = "application/xml" return resp
def test_admin(self): if tf.skip3: raise SkipTest def get_listing(url, token, parsed, conn): conn.request('GET', '%s/%s' % (parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) def post_account(url, token, parsed, conn, headers): new_headers = dict({'X-Auth-Token': token}, **headers) conn.request('POST', parsed.path, '', new_headers) return check_response(conn) def get(url, token, parsed, conn, name): conn.request('GET', '%s/%s/%s' % (parsed.path, self.container, name), '', {'X-Auth-Token': token}) return check_response(conn) def put(url, token, parsed, conn, name): conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container, name), 'test', {'X-Auth-Token': token}) return check_response(conn) def delete(url, token, parsed, conn, name): conn.request('DELETE', '%s/%s/%s' % (parsed.path, self.container, name), '', {'X-Auth-Token': token}) return check_response(conn) # cannot list objects resp = retry(get_listing, use_account=3) resp.read() self.assertEquals(resp.status, 403) # cannot get object resp = retry(get, self.obj, use_account=3) resp.read() self.assertEquals(resp.status, 403) # grant admin access acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) resp.read() self.assertEqual(resp.status, 204) # can list objects resp = retry(get_listing, use_account=3) listing = resp.read() self.assertEquals(resp.status, 200) self.assert_(self.obj in listing) # can get object resp = retry(get, self.obj, use_account=3) body = resp.read() self.assertEquals(resp.status, 200) self.assertEquals(body, 'test') # can put an object obj_name = str(uuid4()) resp = retry(put, obj_name, use_account=3) body = resp.read() self.assertEquals(resp.status, 201) # can delete an object resp = retry(delete, self.obj, use_account=3) body = resp.read() self.assertEquals(resp.status, 204) # sanity with account1 resp = retry(get_listing, use_account=3) listing = resp.read() self.assertEquals(resp.status, 200) self.assert_(obj_name in listing) self.assert_(self.obj not in listing)
def response_iter(): # NB: XML requires that the XML declaration, if present, be at the # very start of the document. Clients *will* call us out on not # being valid XML if we pass through whitespace before it. # Track whether we've sent anything yet so we can yield out that # declaration *first* yielded_anything = False try: try: # TODO: add support for versioning put_resp = req.get_response(self.app, 'PUT', body=json.dumps(manifest), query={ 'multipart-manifest': 'put', 'heartbeat': 'on' }, headers=headers) if put_resp.status_int == 202: body = [] put_resp.fix_conditional_response() for chunk in put_resp.response_iter: if not chunk.strip(): if time.time() - start_time < 10: # Include some grace period to keep # ceph-s3tests happy continue if not yielded_anything: yield (b'<?xml version="1.0" ' b'encoding="UTF-8"?>\n') yielded_anything = True yield chunk continue body.append(chunk) body = json.loads(b''.join(body)) if body['Response Status'] != '201 Created': for seg, err in body['Errors']: if err == too_small_message: raise EntityTooSmall() elif err in ('Etag Mismatch', '404 Not Found'): raise InvalidPart(upload_id=upload_id) raise InvalidRequest( status=body['Response Status'], msg='\n'.join(': '.join(err) for err in body['Errors'])) except BadSwiftRequest as e: msg = str(e) if too_small_message in msg: raise EntityTooSmall(msg) elif ', Etag Mismatch' in msg: raise InvalidPart(upload_id=upload_id) elif ', 404 Not Found' in msg: raise InvalidPart(upload_id=upload_id) else: raise # clean up the multipart-upload record obj = '%s/%s' % (req.object_name, upload_id) try: req.get_response(self.app, 'DELETE', container, obj) except NoSuchKey: # The important thing is that we wrote out a tombstone to # make sure the marker got cleaned up. If it's already # gone (e.g., because of concurrent completes or a retried # complete), so much the better. pass yield _make_complete_body(req, s3_etag, yielded_anything) except ErrorResponse as err_resp: if yielded_anything: err_resp.xml_declaration = False yield b'\n' else: # Oh good, we can still change HTTP status code, too! resp.status = err_resp.status for chunk in err_resp({}, lambda *a: None): yield chunk
def v1_format_response(self, req, endpoints, **kwargs): return Response(json.dumps(endpoints), content_type='application/json')
def test_recon_get_version(self): req = Request.blank('/recon/version', environ={'REQUEST_METHOD': 'GET'}) resp = self.app(req.environ, start_response) self.assertEquals(resp, [json.dumps({'version': swiftver})])
resp_headers = { 'X-Container-Object-Count': info['object_count'], 'X-Container-Bytes-Used': info['bytes_used'], 'X-Timestamp': info['created_at'], 'X-PUT-Timestamp': info['put_timestamp'], } for key, (value, timestamp) in broker.metadata.iteritems(): if value and (key.lower() in self.save_headers or key.lower().startswith('x-container-meta-')): resp_headers[key] = value ret = Response(request=req, headers=resp_headers, content_type=out_content_type, charset='utf-8') container_list = broker.list_objects_iter(limit, marker, end_marker, prefix, delimiter, path) if out_content_type == 'application/json': ret.body = json.dumps([self.update_data_record(record) for record in container_list]) elif out_content_type.endswith('/xml'): doc = Element('container', name=container.decode('utf-8')) for obj in container_list: record = self.update_data_record(obj) if 'subdir' in record: name = record['subdir'].decode('utf-8') sub = SubElement(doc, 'subdir', name=name) SubElement(sub, 'name').text = name else: obj_element = SubElement(doc, 'object') for field in ["name", "hash", "bytes", "content_type", "last_modified"]: SubElement(obj_element, field).text = str( record.pop(field)).decode('utf-8') for field in sorted(record.keys()):
def GET(self, req): """Handle HTTP GET request.""" drive, part, account, container, obj = split_and_validate_path( req, 4, 5, True) path = get_param(req, 'path') prefix = get_param(req, 'prefix') delimiter = get_param(req, 'delimiter') if delimiter and (len(delimiter) > 1 or ord(delimiter) > 254): # delimiters can be made more flexible later return HTTPPreconditionFailed(body='Bad delimiter') marker = get_param(req, 'marker', '') end_marker = get_param(req, 'end_marker') limit = CONTAINER_LISTING_LIMIT given_limit = get_param(req, 'limit') if given_limit and given_limit.isdigit(): limit = int(given_limit) if limit > CONTAINER_LISTING_LIMIT: return HTTPPreconditionFailed( request=req, body='Maximum limit is %d' % CONTAINER_LISTING_LIMIT) out_content_type = get_listing_content_type(req) if self.mount_check and not check_mount(self.root, drive): return HTTPInsufficientStorage(drive=drive, request=req) broker = self._get_container_broker(drive, part, account, container, pending_timeout=0.1, stale_reads_ok=True) if broker.is_deleted(): return HTTPNotFound(request=req) info = broker.get_info() resp_headers = { 'X-Container-Object-Count': info['object_count'], 'X-Container-Bytes-Used': info['bytes_used'], 'X-Timestamp': info['created_at'], 'X-PUT-Timestamp': info['put_timestamp'], } for key, (value, timestamp) in broker.metadata.iteritems(): if value and (key.lower() in self.save_headers or key.lower().startswith('x-container-meta-')): resp_headers[key] = value ret = Response(request=req, headers=resp_headers, content_type=out_content_type, charset='utf-8') container_list = broker.list_objects_iter(limit, marker, end_marker, prefix, delimiter, path) if out_content_type == 'application/json': ret.body = json.dumps([self.update_data_record(record) for record in container_list]) elif out_content_type.endswith('/xml'): doc = Element('container', name=container.decode('utf-8')) for obj in container_list: record = self.update_data_record(obj) if 'subdir' in record: name = record['subdir'].decode('utf-8') sub = SubElement(doc, 'subdir', name=name) SubElement(sub, 'name').text = name else: obj_element = SubElement(doc, 'object') for field in ["name", "hash", "bytes", "content_type", "last_modified"]: SubElement(obj_element, field).text = str( record.pop(field)).decode('utf-8') for field in sorted(record): SubElement(obj_element, field).text = str( record[field]).decode('utf-8') ret.body = tostring(doc, encoding='UTF-8').replace( "<?xml version='1.0' encoding='UTF-8'?>", '<?xml version="1.0" encoding="UTF-8"?>', 1) else: if not container_list: return HTTPNoContent(request=req, headers=resp_headers) ret.body = '\n'.join(rec[0] for rec in container_list) + '\n' return ret
def setup_objects(self): self.objects = (('rose', '2011-01-05T02:19:14.275290', 0, 303), ('viola', '2011-01-05T02:19:14.275290', '0', 3909), ('lily', '2011-01-05T02:19:14.275290', '0', '3909'), ('with space', '2011-01-05T02:19:14.275290', 0, 390), ('with%20space', '2011-01-05T02:19:14.275290', 0, 390)) objects = map( lambda item: { 'name': str(item[0]), 'last_modified': str(item[1]), 'hash': str(item[2]), 'bytes': str(item[3]) }, list(self.objects)) object_list = json.dumps(objects) self.prefixes = ['rose', 'viola', 'lily'] object_list_subdir = [] for p in self.prefixes: object_list_subdir.append({"subdir": p}) self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments', swob.HTTPNoContent, {}, json.dumps([])) self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/rose', swob.HTTPNoContent, {}, json.dumps([])) self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/viola', swob.HTTPNoContent, {}, json.dumps([])) self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/lily', swob.HTTPNoContent, {}, json.dumps([])) self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/with' ' space', swob.HTTPNoContent, {}, json.dumps([])) self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/with%20' 'space', swob.HTTPNoContent, {}, json.dumps([])) self.swift.register( 'GET', '/v1/AUTH_test/bucket+segments?format=json' '&marker=with%2520space', swob.HTTPOk, {}, json.dumps([])) self.swift.register( 'GET', '/v1/AUTH_test/bucket+segments?format=json' '&marker=', swob.HTTPOk, {}, object_list) self.swift.register('HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None) self.swift.register('GET', '/v1/AUTH_test/junk', swob.HTTPOk, {}, object_list) self.swift.register( 'GET', '/v1/AUTH_test/junk?delimiter=a&format=json&limit=3&marker=viola', swob.HTTPOk, {}, json.dumps(objects[2:])) self.swift.register('GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk, {}, json.dumps(object_list_subdir)) self.swift.register( 'GET', '/v1/AUTH_test/subdirs?delimiter=/&format=json&limit=3', swob.HTTPOk, {}, json.dumps([ { 'subdir': 'nothing/' }, { 'subdir': 'but/' }, { 'subdir': 'subdirs/' }, ]))
def account_listing_response(account, req, response_content_type, broker=None, limit='', marker='', end_marker='', prefix='', delimiter='', reverse=False): """ This is an exact copy of swift.account.utis.account_listing_response() except for one difference i.e this method passes response_content_type to broker.list_containers_iter() method. """ if broker is None: broker = FakeAccountBroker() resp_headers = get_response_headers(broker) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter, response_content_type, reverse) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, put_tstamp, is_subdir) in account_list: if is_subdir: data.append({'subdir': name}) else: data.append({ 'name': name, 'count': object_count, 'bytes': bytes_used, 'last_modified': Timestamp(put_tstamp).isoformat }) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = [ '<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account) ] for (name, object_count, bytes_used, put_tstamp, is_subdir) in account_list: if is_subdir: output_list.append('<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes><last_modified>%s</last_modified> \ </container>' % \ (saxutils.escape(name), object_count, bytes_used, Timestamp(put_tstamp).isoformat) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not account_list: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in account_list) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def audit_all_objects(self, mode='once', device_dirs=None): description = '' if device_dirs: device_dir_str = ','.join(sorted(device_dirs)) description = _(' - %s') % device_dir_str self.logger.info( _('Begin object audit "%s" mode (%s%s)') % (mode, self.auditor_type, description)) begin = reported = time.time() self.total_bytes_processed = 0 self.total_files_processed = 0 total_quarantines = 0 total_errors = 0 time_auditing = 0 all_locs = self.diskfile_mgr.object_audit_location_generator( device_dirs=device_dirs) for location in all_locs: loop_time = time.time() self.failsafe_object_audit(location) self.logger.timing_since('timing', loop_time) self.files_running_time = ratelimit_sleep( self.files_running_time, self.max_files_per_second) self.total_files_processed += 1 now = time.time() if now - reported >= self.log_time: self.logger.info( _('Object audit (%(type)s). ' 'Since %(start_time)s: Locally: %(passes)d passed, ' '%(quars)d quarantined, %(errors)d errors ' 'files/sec: %(frate).2f , bytes/sec: %(brate).2f, ' 'Total time: %(total).2f, Auditing time: %(audit).2f, ' 'Rate: %(audit_rate).2f') % { 'type': '%s%s' % (self.auditor_type, description), 'start_time': time.ctime(reported), 'passes': self.passes, 'quars': self.quarantines, 'errors': self.errors, 'frate': self.passes / (now - reported), 'brate': self.bytes_processed / (now - reported), 'total': (now - begin), 'audit': time_auditing, 'audit_rate': time_auditing / (now - begin) }) cache_entry = self.create_recon_nested_dict( 'object_auditor_stats_%s' % (self.auditor_type), device_dirs, { 'errors': self.errors, 'passes': self.passes, 'quarantined': self.quarantines, 'bytes_processed': self.bytes_processed, 'start_time': reported, 'audit_time': time_auditing }) dump_recon_cache(cache_entry, self.rcache, self.logger) reported = now total_quarantines += self.quarantines total_errors += self.errors self.passes = 0 self.quarantines = 0 self.errors = 0 self.bytes_processed = 0 time_auditing += (now - loop_time) # Avoid divide by zero during very short runs elapsed = (time.time() - begin) or 0.000001 self.logger.info( _('Object audit (%(type)s) "%(mode)s" mode ' 'completed: %(elapsed).02fs. Total quarantined: %(quars)d, ' 'Total errors: %(errors)d, Total files/sec: %(frate).2f, ' 'Total bytes/sec: %(brate).2f, Auditing time: %(audit).2f, ' 'Rate: %(audit_rate).2f') % { 'type': '%s%s' % (self.auditor_type, description), 'mode': mode, 'elapsed': elapsed, 'quars': total_quarantines + self.quarantines, 'errors': total_errors + self.errors, 'frate': self.total_files_processed / elapsed, 'brate': self.total_bytes_processed / elapsed, 'audit': time_auditing, 'audit_rate': time_auditing / elapsed }) # Clear recon cache entry if device_dirs is set if device_dirs: cache_entry = self.create_recon_nested_dict( 'object_auditor_stats_%s' % (self.auditor_type), device_dirs, {}) dump_recon_cache(cache_entry, self.rcache, self.logger) if self.stats_sizes: self.logger.info( _('Object audit stats: %s') % json.dumps(self.stats_buckets))
def setUp(self): super(TestSwift3MultiUpload, self).setUp() segment_bucket = '/v1/AUTH_test/bucket+segments' self.etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1' last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT' put_headers = {'etag': self.etag, 'last-modified': last_modified} objects = map( lambda item: { 'name': item[0], 'last_modified': item[1], 'hash': item[2], 'bytes': item[3] }, objects_template) object_list = json.dumps(objects) self.swift.register('PUT', '/v1/AUTH_test/bucket+segments', swob.HTTPAccepted, {}, None) self.swift.register('GET', segment_bucket, swob.HTTPOk, {}, object_list) self.swift.register('HEAD', segment_bucket + '/object/X', swob.HTTPOk, {'x-object-meta-foo': 'bar'}, None) self.swift.register('PUT', segment_bucket + '/object/X', swob.HTTPCreated, {}, None) self.swift.register('DELETE', segment_bucket + '/object/X', swob.HTTPNoContent, {}, None) self.swift.register('GET', segment_bucket + '/object/invalid', swob.HTTPNotFound, {}, None) self.swift.register('PUT', segment_bucket + '/object/X/1', swob.HTTPCreated, put_headers, None) self.swift.register('DELETE', segment_bucket + '/object/X/1', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', segment_bucket + '/object/X/2', swob.HTTPNoContent, {}, None) self.swift.register('HEAD', segment_bucket + '/object/Y', swob.HTTPOk, {}, None) self.swift.register('PUT', segment_bucket + '/object/Y', swob.HTTPCreated, {}, None) self.swift.register('DELETE', segment_bucket + '/object/Y', swob.HTTPNoContent, {}, None) self.swift.register('PUT', segment_bucket + '/object/Y/1', swob.HTTPCreated, {}, None) self.swift.register('DELETE', segment_bucket + '/object/Y/1', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', segment_bucket + '/object/Y/2', swob.HTTPNoContent, {}, None) self.swift.register('HEAD', segment_bucket + '/object2/Z', swob.HTTPOk, {}, None) self.swift.register('PUT', segment_bucket + '/object2/Z', swob.HTTPCreated, {}, None) self.swift.register('DELETE', segment_bucket + '/object2/Z', swob.HTTPNoContent, {}, None) self.swift.register('PUT', segment_bucket + '/object2/Z/1', swob.HTTPCreated, {}, None) self.swift.register('DELETE', segment_bucket + '/object2/Z/1', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', segment_bucket + '/object2/Z/2', swob.HTTPNoContent, {}, None)
if not out_content_type: return HTTPNotAcceptable(request=req) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter) if out_content_type == 'application/json': data = [] for (name, object_count, bytes_used, is_subdir) in account_list: if is_subdir: data.append({'subdir': name}) else: data.append({ 'name': name, 'count': object_count, 'bytes': bytes_used }) account_list = json.dumps(data) elif out_content_type.endswith('/xml'): output_list = [ '<?xml version="1.0" encoding="UTF-8"?>', '<account name="%s">' % account ] for (name, object_count, bytes_used, is_subdir) in account_list: name = saxutils.escape(name) if is_subdir: output_list.append('<subdir name="%s" />' % name) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes></container>' % \ (name, object_count, bytes_used) output_list.append(item) output_list.append('</account>')
def POST(self, req): """ Handles Complete Multipart Upload. """ upload_id = req.params['uploadId'] resp = _get_upload_info(req, self.app, upload_id) headers = {} for key, val in resp.headers.items(): _key = key.lower() if _key.startswith('x-amz-meta-'): headers['x-object-meta-' + _key[11:]] = val hct_header = sysmeta_header('object', 'has-content-type') if resp.sysmeta_headers.get(hct_header) == 'yes': content_type = resp.sysmeta_headers.get( sysmeta_header('object', 'content-type')) elif hct_header in resp.sysmeta_headers: # has-content-type is present but false, so no content type was # set on initial upload. In that case, we won't set one on our # PUT request. Swift will end up guessing one based on the # object name. content_type = None else: content_type = resp.headers.get('Content-Type') if content_type: headers['Content-Type'] = content_type # Query for the objects in the segments area to make sure it completed query = { 'format': 'json', 'prefix': '%s/%s/' % (req.object_name, upload_id), 'delimiter': '/' } container = req.container_name + MULTIUPLOAD_SUFFIX resp = req.get_response(self.app, 'GET', container, '', query=query) objinfo = json.loads(resp.body) objtable = dict((o['name'], { 'path': '/'.join(['', container, o['name']]), 'etag': o['hash'], 'size_bytes': o['bytes'] }) for o in objinfo) s3_etag_hasher = md5() manifest = [] previous_number = 0 try: xml = req.xml(MAX_COMPLETE_UPLOAD_BODY_SIZE) if not xml: raise InvalidRequest(msg='You must specify at least one part') complete_elem = fromstring(xml, 'CompleteMultipartUpload', self.logger) for part_elem in complete_elem.iterchildren('Part'): part_number = int(part_elem.find('./PartNumber').text) if part_number <= previous_number: raise InvalidPartOrder(upload_id=upload_id) previous_number = part_number etag = part_elem.find('./ETag').text if len(etag) >= 2 and etag[0] == '"' and etag[-1] == '"': # strip double quotes etag = etag[1:-1] info = objtable.get("%s/%s/%s" % (req.object_name, upload_id, part_number)) if info is None or info['etag'] != etag: raise InvalidPart(upload_id=upload_id, part_number=part_number) s3_etag_hasher.update(etag.decode('hex')) info['size_bytes'] = int(info['size_bytes']) manifest.append(info) except (XMLSyntaxError, DocumentInvalid): raise MalformedXML() except ErrorResponse: raise except Exception as e: self.logger.error(e) raise s3_etag = '%s-%d' % (s3_etag_hasher.hexdigest(), len(manifest)) headers[sysmeta_header('object', 'etag')] = s3_etag # Leave base header value blank; SLO will populate c_etag = '; s3_etag=%s' % s3_etag headers['X-Object-Sysmeta-Container-Update-Override-Etag'] = c_etag # Check the size of each segment except the last and make sure they are # all more than the minimum upload chunk size for info in manifest[:-1]: if info['size_bytes'] < self.conf.min_segment_size: raise EntityTooSmall() try: # TODO: add support for versioning if manifest: resp = req.get_response(self.app, 'PUT', body=json.dumps(manifest), query={'multipart-manifest': 'put'}, headers=headers) else: # the upload must have consisted of a single zero-length part # just write it directly resp = req.get_response(self.app, 'PUT', body='', headers=headers) except BadSwiftRequest as e: msg = str(e) expected_msg = 'too small; each segment must be at least 1 byte' if expected_msg in msg: # FIXME: AWS S3 allows a smaller object than 5 MB if there is # only one part. Use a COPY request to copy the part object # from the segments container instead. raise EntityTooSmall(msg) else: raise # clean up the multipart-upload record obj = '%s/%s' % (req.object_name, upload_id) try: req.get_response(self.app, 'DELETE', container, obj) except NoSuchKey: pass # We know that this existed long enough for us to HEAD result_elem = Element('CompleteMultipartUploadResult') # NOTE: boto with sig v4 appends port to HTTP_HOST value at the # request header when the port is non default value and it makes # req.host_url like as http://localhost:8080:8080/path # that obviously invalid. Probably it should be resolved at # swift.common.swob though, tentatively we are parsing and # reconstructing the correct host_url info here. # in detail, https://github.com/boto/boto/pull/3513 parsed_url = urlparse(req.host_url) host_url = '%s://%s' % (parsed_url.scheme, parsed_url.hostname) if parsed_url.port: host_url += ':%s' % parsed_url.port SubElement(result_elem, 'Location').text = host_url + req.path SubElement(result_elem, 'Bucket').text = req.container_name SubElement(result_elem, 'Key').text = req.object_name SubElement(result_elem, 'ETag').text = '"%s"' % s3_etag del resp.headers['ETag'] resp.body = tostring(result_elem) resp.status = 200 resp.content_type = "application/xml" return resp