def get_response_body(data_format, data_dict, error_list):
    """
    Returns a properly formatted response body according to format. Handles
    json and xml, otherwise will return text/plain. Note: xml response does not
    include xml declaration.
    :params data_format: resulting format
    :params data_dict: generated data about results.
    :params error_list: list of quoted filenames that failed
    """
    if data_format == 'application/json':
        data_dict['Errors'] = error_list
        return json.dumps(data_dict)
    if data_format and data_format.endswith('/xml'):
        output = '<delete>\n'
        for key in sorted(data_dict):
            xml_key = key.replace(' ', '_').lower()
            output += '<%s>%s</%s>\n' % (xml_key, data_dict[key], xml_key)
        output += '<errors>\n'
        output += '\n'.join([
            '<object>'
            '<name>%s</name><status>%s</status>'
            '</object>' % (saxutils.escape(name), status)
            for name, status in error_list
        ])
        output += '</errors>\n</delete>\n'
        return output

    output = ''
    for key in sorted(data_dict):
        output += '%s: %s\n' % (key, data_dict[key])
    output += 'Errors:\n'
    output += '\n'.join(
        ['%s, %s' % (name, status) for name, status in error_list])
    return output
Example #2
0
    def set_multi(self,
                  mapping,
                  server_key,
                  serialize=True,
                  timeout=0,
                  time=0,
                  min_compress_len=0):
        """
        Sets multiple key/value pairs in memcache.

        :param mapping: dictonary of keys and values to be set in memcache
        :param servery_key: key to use in determining which server in the ring
                            is used
        :param serialize: if True, value is serialized with JSON before sending
                          to memcache, or with pickle if configured to use
                          pickle instead of JSON (to avoid cache poisoning)
        :param timeout: ttl for memcache. This parameter is now deprecated, it
                        will be removed in next release of OpenStack, use time
                        parameter instead in the future
        :time: equalvent to timeout, this parameter is added to keep the
               signature compatible with python-memcached interface. This
               implementation will take this value and sign it to parameter
               timeout
        :min_compress_len: minimum compress length, this parameter was added
                           to keep the signature compatible with
                           python-memcached interface. This implementation
                           ignores it
        """
        if timeout:
            logging.warn("parameter timeout has been deprecated, use time")

        server_key = md5hash(server_key)
        timeout = sanitize_timeout(time or timeout)
        msg = ''
        for key, value in mapping.iteritems():
            key = md5hash(key)
            flags = 0
            if serialize and self._allow_pickle:
                value = pickle.dumps(value, PICKLE_PROTOCOL)
                flags |= PICKLE_FLAG
            elif serialize:
                value = json.dumps(value)
                flags |= JSON_FLAG
            msg += ('set %s %d %d %s\r\n%s\r\n' %
                    (key, flags, timeout, len(value), value))
        for (server, fp, sock) in self._get_conns(server_key):
            try:
                with Timeout(self._io_timeout):
                    sock.sendall(msg)
                    # Wait for the set to complete
                    for _ in range(len(mapping)):
                        fp.readline()
                    self._return_conn(server, fp, sock)
                    return
            except (Exception, Timeout) as e:
                self._exception_occurred(server, e, sock=sock, fp=fp)
Example #3
0
    def GETorHEAD(self, req):
        """Handler for HTTP GET/HEAD requests."""
        """
        Handles requests to /info
        Should return a WSGI-style callable (such as swob.Response).

        :param req: swob.Request object
        """
        if not self.expose_info:
            return HTTPForbidden(request=req)

        admin_request = False
        sig = req.params.get('swiftinfo_sig', '')
        expires = req.params.get('swiftinfo_expires', '')

        if sig != '' or expires != '':
            admin_request = True
            if not self.admin_key:
                return HTTPForbidden(request=req)
            try:
                expires = int(expires)
            except ValueError:
                return HTTPUnauthorized(request=req)
            if expires < time():
                return HTTPUnauthorized(request=req)

            valid_sigs = []
            for method in self.allowed_hmac_methods[req.method]:
                valid_sigs.append(
                    get_hmac(method, '/info', expires, self.admin_key))

            # While it's true that any() will short-circuit, this doesn't
            # affect the timing-attack resistance since the only way this will
            # short-circuit is when a valid signature is passed in.
            is_valid_hmac = any(
                streq_const_time(valid_sig, sig) for valid_sig in valid_sigs)
            if not is_valid_hmac:
                return HTTPUnauthorized(request=req)

        headers = {}
        if 'Origin' in req.headers:
            headers['Access-Control-Allow-Origin'] = req.headers['Origin']
            headers['Access-Control-Expose-Headers'] = ', '.join(
                ['x-trans-id'])

        info = json.dumps(
            get_osd_info(admin=admin_request,
                         disallowed_sections=self.disallowed_sections))

        return HTTPOk(request=req,
                      headers=headers,
                      body=info,
                      content_type='application/json; charset=UTF-8')
Example #4
0
    def set(self,
            key,
            value,
            serialize=True,
            timeout=0,
            time=0,
            min_compress_len=0):
        """
        Set a key/value pair in memcache

        :param key: key
        :param value: value
        :param serialize: if True, value is serialized with JSON before sending
                          to memcache, or with pickle if configured to use
                          pickle instead of JSON (to avoid cache poisoning)
        :param timeout: ttl in memcache, this parameter is now deprecated. It
                        will be removed in next release of OpenStack,
                        use time parameter instead in the future
        :time: equivalent to timeout, this parameter is added to keep the
               signature compatible with python-memcached interface. This
               implementation will take this value and sign it to the
               parameter timeout
        :min_compress_len: minimum compress length, this parameter was added
                           to keep the signature compatible with
                           python-memcached interface. This implementation
                           ignores it.
        """
        key = md5hash(key)
        if timeout:
            logging.warn("parameter timeout has been deprecated, use time")
        timeout = sanitize_timeout(time or timeout)
        flags = 0
        if serialize and self._allow_pickle:
            value = pickle.dumps(value, PICKLE_PROTOCOL)
            flags |= PICKLE_FLAG
        elif serialize:
            value = json.dumps(value)
            flags |= JSON_FLAG
        for (server, fp, sock) in self._get_conns(key):
            try:
                with Timeout(self._io_timeout):
                    sock.sendall('set %s %d %d %s\r\n%s\r\n' %
                                 (key, flags, timeout, len(value), value))
                    # Wait for the set to complete
                    fp.readline()
                    self._return_conn(server, fp, sock)
                    return
            except (Exception, Timeout) as e:
                self._exception_occurred(server, e, sock=sock, fp=fp)
Example #5
0
def format_acl_v2(acl_dict):
    """
    Returns a version-2 Swift ACL JSON string.

    HTTP headers for Version 2 ACLs have the following form:
      Header-Name: {"arbitrary":"json","encoded":"string"}

    JSON will be forced ASCII (containing six-char \uNNNN sequences rather
    than UTF-8; UTF-8 is valid JSON but clients vary in their support for
    UTF-8 headers), and without extraneous whitespace.

    Advantages over V1: forward compatibility (new keys don't cause parsing
    exceptions); Unicode support; no reserved words (you can have a user
    named .rlistings if you want).

    :param acl_dict: dict of arbitrary data to put in the ACL; see specific
                     auth systems such as tempauth for supported values
    :returns: a JSON string which encodes the ACL
    """
    return json.dumps(acl_dict, ensure_ascii=True, separators=(',', ':'),
                      sort_keys=True)
Example #6
0
class AccountServiceInterface:
    """An interface class between account service and library."""
    account_lib_obj = libaccountLib.AccountLibraryImpl()
    asyn_helper = AsyncLibraryHelper(account_lib_obj)

    @classmethod
    def start_event_wait_func(cls, logger):
        logger.info('wait_event called!')
        AccountServiceInterface.asyn_helper.wait_event()

    @classmethod
    def stop_event_wait_func(cls, logger):
        logger.info('wait_event_stop called!')
        AccountServiceInterface.asyn_helper.wait_event_stop()

    @classmethod
    def get_metadata(self, req, logger):
        """
        Creates metadata string from request object and
        container stat if present
        param: Request object
        Returns metadata dictionary
        """
        try:
            new_meta = {}
            metadata = {}
            # get metadata from request headers
            metadata.update((key.lower(), value)
                            for key, value in req.headers.iteritems()
                            if key.lower() in HEADERS
                            or is_sys_or_user_meta('account', key))
            for key, value in metadata.iteritems():
                if key == 'x-account-read':
                    new_meta.update({'r-': value})
                elif key == 'x-account-write':
                    new_meta.update({'w-': value})
                else:
                    ser_key = key.split('-')[2]
                    if ser_key == 'meta':
                        new_key = '%s-%s' % ('m', key.split('-', 3)[-1])
                        new_meta.update({new_key: value})
                    elif ser_key == 'sysmeta':
                        new_key = '%s-%s' % ('sm', key.split('-', 3)[-1])
                        new_meta.update({new_key: value})
                    else:
                        logger.debug('Expected metadata not found')
            return new_meta
        except Exception as err:
            logger.error(
                ('get_metadata failed ', 'close failure: %(exc)s : %(stack)s'),
                {
                    'exc': err,
                    'stack': ''.join(traceback.format_stack())
                })
            raise err

    @classmethod
    def create_AccountStat_object(cls, info):
        """An interface to create an object of AccountStat, from info map"""
        if 'account' not in info:
            info['account'] = '-1'
        if 'created_at' not in info:
            info['created_at'] = '0'
        if 'put_timestamp' not in info:
            info['put_timestamp'] = '0'
        if 'delete_timestamp' not in info:
            info['delete_timestamp'] = '0'
        if 'container_count' not in info:
            info['container_count'] = 0
        if 'object_count' not in info:
            info['object_count'] = 0
        if 'bytes_used' not in info:
            info['bytes_used'] = 0
        if 'hash' not in info:
            info['hash'] = '-1'
        if 'id' not in info:
            info['id'] = '-1'
        if 'status' not in info:
            info['status'] = '-1'
        if 'status_changed_at' not in info:
            info['status_changed_at'] = '0'
        if 'metadata' not in info:
            info['metadata'] = {}
        return libaccountLib.AccountStat(
                                    info['account'],
                                    info['created_at'],
                                    normalize_timestamp(\
                                        info['put_timestamp']),
                                    normalize_timestamp(\
                                        info['delete_timestamp']),
                                    info['container_count'],
                                    info['object_count'],
                                    info['bytes_used'],
                                    info['hash'],
                                    info['id'],
                                    info['status'],
                                    info['status_changed_at'],
                                    info['metadata']
                                    )

    @classmethod
    def create_container_record(cls, name, hash, info, deleted):
        """An interface to create an object of ContainerRecord, from info map"""
        if 'x-object-count' not in info:
            info['x-object-count'] = 0
        if not info['x-put-timestamp']:
            info['x-put-timestamp'] = '0'
        if not info['x-delete-timestamp']:
            info['x-delete-timestamp'] = '0'
        return libaccountLib.ContainerRecord(0, name, hash, \
                normalize_timestamp(str(info['x-put-timestamp'])), \
                normalize_timestamp(str(info['x-delete-timestamp'])), \
                long(info['x-object-count']), \
                long(info['x-bytes-used']), deleted)

    @classmethod
    def create_container_record_for_updater(cls, name, hash, info):
        """An interface to create an object of ContainerRecord for account \
           updater, from info map
        """
        if 'x-object-count' not in info:
            info['x-object-count'] = 0
        if not info['put_timestamp']:
            info['put_timestamp'] = '0'
        if not info['delete_timestamp']:
            info['delete_timestamp'] = '0'
        return libaccountLib.ContainerRecord(0, name, hash, \
                normalize_timestamp(str(info['put_timestamp'])), \
                normalize_timestamp(str(info['delete_timestamp'])), \
                long(info['object_count']), \
                long(info['bytes_used']), \
                info['deleted'])

    @classmethod
    def list_account(cls, temp_path, account_path, out_content_type, req, limit, marker, \
        end_marker, prefix, delimiter, logger):
        """An interface to list the containers in account."""
        logger.debug("Get account stats for path: %s" % account_path)
        container_record_list = []
        resp = libaccountLib.AccountStatWithStatus()
        AccountServiceInterface.__get_account_stat(resp, temp_path,
                                                   account_path, logger)
        logger.info("Account library responded with: %s for get_account_stat \
            in GET" % resp.get_return_status())
        if resp.get_return_status() == INFO_FILE_OPERATION_SUCCESS:
            resp_headers = {
                'X-Account-Container-Count': \
                    resp.get_account_stat().get_container_count(),
                'X-Account-Object-Count': \
                    resp.get_account_stat().get_object_count(),
                'X-Account-Bytes-Used':
                    resp.get_account_stat().get_bytes_used(),
                'X-Timestamp': resp.get_account_stat().get_created_at(),
                'X-PUT-Timestamp': resp.get_account_stat().get_put_timestamp()
                }
            modified_meta = {}
            for key, value in resp.get_account_stat().get_metadata().iteritems(
            ):
                if key == 'r-':
                    modified_meta.update({'x-account-read': value})
                elif key == 'w-':
                    modified_meta.update({'x-account-write': value})
                else:
                    ser_key = key.split('-')[0]
                    if ser_key == 'm':
                        key = 'x-account-meta-' + key.split('-', 1)[1]
                    modified_meta.update({key: value})

            resp_headers.update(
                (key, value) for (key, value) in modified_meta.iteritems())

            if limit:
                logger.debug("Calling list_container")
                resp = libaccountLib.ListContainerWithStatus()
                try:
                    AccountServiceInterface.asyn_helper.call("list_container", \
                            resp, account_path, limit, marker, \
                            end_marker, prefix, delimiter)
                except Exception, err:
                    logger.error(('list_container for %(ac_dir)s failed ' \
                        'close failure: %(exc)s : %(stack)s'), {'ac_dir' : \
                        account_path, 'exc': err, 'stack': \
                        ''.join(traceback.format_stack())})
                    raise err
                logger.info("Account library responded with: %s for \
                    list_container in GET" % resp.get_return_status())
                if resp.get_return_status() == INFO_FILE_OPERATION_SUCCESS:
                    container_record_list = resp.get_container_record()
                    if delimiter:
                        container_list_new = []
                        for obj in container_record_list:
                            name = obj.get_name()
                            if prefix:
                                match = re.match("^" + prefix + ".*", name)
                                if match:
                                    replace = re.sub("^" + prefix, '', name)
                                    replace = replace.split(delimiter)
                                    if len(replace) >= 2:
                                        obj.set_name(prefix + replace[0] +
                                                     delimiter)
                                        if marker != obj.get_name(
                                        ) or marker > obj.get_name():
                                            container_list_new.append((obj, (0, 1)[delimiter in obj.get_name() and \
                                                                   obj.get_name().endswith(delimiter)]))
                                    else:
                                        obj.set_name(prefix + replace[0])
                                        if marker != obj.get_name(
                                        ) or marker > obj.get_name():
                                            container_list_new.append((obj, 0))
                            else:
                                replace = name.split(delimiter)
                                if len(replace) >= 2:
                                    obj.set_name(replace[0] + delimiter)
                                if marker != obj.get_name(
                                ) or marker > obj.get_name():
                                    container_list_new.append((obj, (0, 1)[delimiter in obj.get_name() and \
                                                           obj.get_name().endswith(delimiter)]))
                        container_record_list = container_list_new
                    else:
                        container_record_list = [
                            (record, 0) for record in container_record_list
                        ]
        if resp.get_return_status() == INFO_FILE_OPERATION_SUCCESS:
            logger.debug("Populating container list")
        elif resp.get_return_status() == INFO_FILE_NOT_FOUND:
            return HTTPNotFound(request=req, charset='utf-8')
        elif resp.get_return_status() == INFO_FILE_DELETED:
            headers = {'X-Account-Status': 'Deleted'}
            return HTTPNotFound(request=req,
                                headers=headers,
                                charset='utf-8',
                                body='')
        else:
            return HTTPInternalServerError(request=req)
        if out_content_type == 'application/json':
            data = []
            for (container_record, is_subdir) in container_record_list:
                if is_subdir:
                    data.append({'subdir': container_record.get_name()})
                else:
                    data.append({
                        'name':
                        container_record.get_name(),
                        'created_at':
                        time.strftime(
                            "%a, %d %b %Y %H:%M:%S GMT",
                            time.gmtime(
                                float(container_record.get_put_timestamp()))),
                        'count':
                        container_record.get_object_count(),
                        'bytes':
                        container_record.get_bytes_used()
                    })
            container_record_list = json.dumps(data)
        elif out_content_type.endswith('/xml'):
            #Directly used req.path to get account name.
            output_list = ['<?xml version="1.0" encoding="UTF-8"?>',
                           '<account name=%s>' % \
                           saxutils.quoteattr(req.path.split('/')[3])]
            for (container_record, is_subdir) in container_record_list:
                if is_subdir:
                    output_list.append(
                        '<subdir name=%s />' %
                        saxutils.quoteattr(container_record.get_name()))
                else:
                    item = '<container><name>%s</name><created_at>%s</created_at><count>%s</count>' \
                            '<bytes>%s</bytes></container>' % \
                            (saxutils.escape(container_record.get_name()), \
    time.strftime("%a, %d %b %Y %H:%M:%S GMT",time.gmtime(float(container_record.get_put_timestamp()))), \
                                container_record.get_object_count(), \
                                container_record.get_bytes_used()
                            )
                    output_list.append(item)
            output_list.append('</account>')
            container_record_list = '\n'.join(output_list)
        else:
            if not container_record_list:
                logger.debug("No any container in account")
                resp = HTTPNoContent(request=req, headers=resp_headers)
                resp.content_type = out_content_type
                resp.charset = 'utf-8'
                return resp
            container_record_list = '\n'.join(container_record.get_name() for \
            (container_record, is_subdir) in container_record_list) + '\n'
        ret = HTTPOk(body=container_record_list,
                     request=req,
                     headers=resp_headers)
        ret.content_type = out_content_type
        ret.charset = 'utf-8'
        return ret
 def __get_cont_list(self, path, container, req):
     """
     Helper function to get list of objects in container
     param:path: Hashed account/container path
     param:account: Account name
     param:container: Container name
     param:req: HTTP Request object
     """
     ret = ''
     marker = get_param(req, 'marker', '')
     end_marker = get_param(req, 'end_marker', '')
     limit = get_param(req, 'limit', str(CONTAINER_LISTING_LIMIT))
     if limit:
         limit = int(limit)
     else:
         limit = CONTAINER_LISTING_LIMIT
     prefix = get_param(req, 'prefix', '')
     delimiter = get_param(req, 'delimiter', '')
     out_content_type = get_listing_content_type(req)
     # get list of objects
     container_list = []
     if limit > 0:
         self.logger.debug('Called list container interface of library')
         list_obj = ListObjectWithStatus()
         self.asyn_helper.call("list_container", path, list_obj, limit, \
             marker, end_marker, prefix, delimiter)
         status = list_obj.get_return_status()
         self.logger.info(('Status from container library comes '
             'out to be: %(status)s'),
             {'status' : status})
         if status != OsdExceptionCode.OSD_OPERATION_SUCCESS:
             return status
         container_list = list_obj.object_record
         self.logger.debug('Got container list')
     # modify the list for delimiter
     if delimiter:
         container_list_new = []
         for obj in container_list:
             name = obj.get_name()
             if prefix:
                 match = re.match("^" + prefix + ".*", name)
                 if match:
                     replace = re.sub("^" + prefix, '', name)
                     replace = replace.split(delimiter)
                     if len(replace) == 2 or len(replace) > 2:
                         obj.set_name(prefix + replace[0] + delimiter)
                     else:
                         obj.set_name(prefix + replace[0])
             else:
                 replace = name.split(delimiter)
                 if len(replace) >= 2:
                     obj.set_name(replace[0] + delimiter)
             if delimiter in obj.get_name() and obj.get_name().endswith(delimiter):
                 obj.content_type = "None"
             if marker != obj.get_name() or marker > obj.get_name():
                 container_list_new.append(obj)
         container_list = container_list_new
     # Get body of response
     if out_content_type == 'application/json':
         ret = json.dumps([self.__update_data_record(record)
                                for record in container_list])
     elif out_content_type.endswith('/xml'):
         doc = Element('container', name=container.decode('utf-8'))
         for obj in container_list:
             record = self.__update_data_record(obj)
             if 'subdir' in record:
                 name = record['subdir'].decode('utf-8')
                 sub = SubElement(doc, 'subdir', name=name)
                 SubElement(sub, 'name').text = name
             else:
                 obj_element = SubElement(doc, 'object')
                 for field in ["name", "hash", "bytes", "content_type",
                               "last_modified"]:
                     SubElement(obj_element, field).text = str(
                         record.pop(field)).decode('utf-8')
                 for field in sorted(record):
                     SubElement(obj_element, field).text = str(
                         record[field]).decode('utf-8')
         ret = tostring(doc, encoding='UTF-8').replace(
             "<?xml version='1.0' encoding='UTF-8'?>",
             '<?xml version="1.0" encoding="UTF-8"?>', 1)
     else:
         if not container_list:
             self.logger.debug('No object list found!')
             return ret
         ret = '%s\n' % ('\n'.join([obj.get_name() \
             for obj in container_list]))
     return ret
Example #8
0
    def handle_multipart_put(self, req, start_response):
        """
        Will handle the PUT of a SLO manifest.
        Heads every object in manifest to check if is valid and if so will
        save a manifest generated from the user input. Uses WSGIContext to
        call self and start_response and returns a WSGI iterator.

        :params req: a swob.Request with an obj in path
        :raises: HttpException on errors
        """
        try:
            vrs, account, container, obj = req.split_path(1, 4, True)
            self.logger.info("Received manifest file: %s for upload" %
                             (req.path))
        except ValueError:
            return self.app(req.environ, start_response)
        if req.content_length > self.max_manifest_size:
            raise HTTPRequestEntityTooLarge("Manifest File > %d bytes" %
                                            self.max_manifest_size)
        if req.headers.get('X-Copy-From'):
            raise HTTPMethodNotAllowed(
                'Multipart Manifest PUTs cannot be COPY requests')
        if req.content_length is None and \
                req.headers.get('transfer-encoding', '').lower() != 'chunked':
            raise HTTPLengthRequired(request=req)
        parsed_data = parse_input(req.body_file.read(self.max_manifest_size))
        problem_segments = []

        if len(parsed_data) > self.max_manifest_segments:
            raise HTTPRequestEntityTooLarge(
                'Number of segments must be <= %d' %
                self.max_manifest_segments)
        total_size = 0
        out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
        if not out_content_type:
            out_content_type = 'text/plain'
        data_for_storage = []
        slo_etag = md5()
        for index, seg_dict in enumerate(parsed_data):
            obj_name = seg_dict['path']
            if isinstance(obj_name, unicode):
                obj_name = obj_name.encode('utf-8')
            obj_path = '/'.join(['', vrs, account, obj_name.lstrip('/')])
            try:
                seg_size = int(seg_dict['size_bytes'])
            except (ValueError, TypeError):
                raise HTTPBadRequest('Invalid Manifest File')
            if seg_size < self.min_segment_size and \
                    (index == 0 or index < len(parsed_data) - 1):
                raise HTTPBadRequest(
                    'Each segment, except the last, must be at least '
                    '%d bytes.' % self.min_segment_size)

            new_env = req.environ.copy()
            new_env['PATH_INFO'] = obj_path
            new_env['REQUEST_METHOD'] = 'HEAD'
            new_env['swift.source'] = 'SLO'
            del (new_env['wsgi.input'])
            del (new_env['QUERY_STRING'])
            new_env['CONTENT_LENGTH'] = 0
            new_env['HTTP_USER_AGENT'] = \
                '%s MultipartPUT' % req.environ.get('HTTP_USER_AGENT')
            head_seg_resp = \
                Request.blank(obj_path, new_env).get_response(self)
            if head_seg_resp.is_success:
                total_size += seg_size
                if seg_size != head_seg_resp.content_length:
                    problem_segments.append([quote(obj_name), 'Size Mismatch'])
                if seg_dict['etag'] == head_seg_resp.etag:
                    slo_etag.update(seg_dict['etag'])
                else:
                    problem_segments.append([quote(obj_name), 'Etag Mismatch'])
                if head_seg_resp.last_modified:
                    last_modified = head_seg_resp.last_modified
                else:
                    # shouldn't happen
                    last_modified = datetime.now()

                last_modified_formatted = \
                    last_modified.strftime('%Y-%m-%dT%H:%M:%S.%f')
                seg_data = {
                    'name': '/' + seg_dict['path'].lstrip('/'),
                    'bytes': seg_size,
                    'hash': seg_dict['etag'],
                    'content_type': head_seg_resp.content_type,
                    'last_modified': last_modified_formatted
                }
                if config_true_value(
                        head_seg_resp.headers.get('X-Static-Large-Object')):
                    seg_data['sub_slo'] = True
                data_for_storage.append(seg_data)

            else:
                problem_segments.append(
                    [quote(obj_name), head_seg_resp.status])
        if problem_segments:
            resp_body = get_response_body(out_content_type, {},
                                          problem_segments)
            raise HTTPBadRequest(resp_body, content_type=out_content_type)
        env = req.environ

        if not env.get('CONTENT_TYPE'):
            guessed_type, _junk = mimetypes.guess_type(req.path_info)
            env['CONTENT_TYPE'] = guessed_type or 'application/octet-stream'
        env['swift.content_type_overridden'] = True
        env['CONTENT_TYPE'] += ";swift_bytes=%d" % total_size
        env['HTTP_X_STATIC_LARGE_OBJECT'] = 'True'
        json_data = json.dumps(data_for_storage)
        env['CONTENT_LENGTH'] = str(len(json_data))
        env['wsgi.input'] = StringIO(json_data)

        slo_put_context = SloPutContext(self, slo_etag)
        return slo_put_context.handle_slo_put(req, start_response)
Example #9
0
def account_listing_response(account,
                             req,
                             response_content_type,
                             broker=None,
                             limit='',
                             marker='',
                             end_marker='',
                             prefix='',
                             delimiter=''):
    if broker is None:
        broker = FakeAccountBroker()

    info = broker.get_info()
    resp_headers = {
        'X-Account-Container-Count': info['container_count'],
        'X-Account-Object-Count': info['object_count'],
        'X-Account-Bytes-Used': info['bytes_used'],
        'X-Timestamp': info['created_at'],
        'X-PUT-Timestamp': info['put_timestamp']
    }
    resp_headers.update(
        (key, value)
        for key, (value, timestamp) in broker.metadata.iteritems()
        if value != '')

    account_list = broker.list_containers_iter(limit, marker, end_marker,
                                               prefix, delimiter)
    if response_content_type == 'application/json':
        data = []
        for (name, object_count, bytes_used, is_subdir) in account_list:
            if is_subdir:
                data.append({'subdir': name})
            else:
                data.append({
                    'name': name,
                    'count': object_count,
                    'bytes': bytes_used
                })
        account_list = json.dumps(data)
    elif response_content_type.endswith('/xml'):
        output_list = [
            '<?xml version="1.0" encoding="UTF-8"?>',
            '<account name=%s>' % saxutils.quoteattr(account)
        ]
        for (name, object_count, bytes_used, is_subdir) in account_list:
            if is_subdir:
                output_list.append('<subdir name=%s />' %
                                   saxutils.quoteattr(name))
            else:
                item = '<container><name>%s</name><count>%s</count>' \
                       '<bytes>%s</bytes></container>' % \
                       (saxutils.escape(name), object_count, bytes_used)
                output_list.append(item)
        output_list.append('</account>')
        account_list = '\n'.join(output_list)
    else:
        if not account_list:
            resp = HTTPNoContent(request=req, headers=resp_headers)
            resp.content_type = response_content_type
            resp.charset = 'utf-8'
            return resp
        account_list = '\n'.join(r[0] for r in account_list) + '\n'
    ret = HTTPOk(body=account_list, request=req, headers=resp_headers)
    ret.content_type = response_content_type
    ret.charset = 'utf-8'
    return ret