Ejemplo n.º 1
0
    def PUT(self, req):
        """
        Handle PUT Object and PUT Object (Copy) request
        """
        # set X-Timestamp by oss2swift to use at copy resp body
        req_timestamp = OssTimestamp.now()
        expireDay = ''
        createDate = ''
        data = req.body
        do_crc64 = crcmod.mkCrcFun(0x142F0E1EBA9EA3693L,
                                   initCrc=0L,
                                   xorOut=0xffffffffffffffffL,
                                   rev=True)
        crcValue = do_crc64(data)

        req.headers['X-Timestamp'] = req_timestamp.internal
        req.headers['x-object-meta-object-type'] = 'Normal'
        req.headers['x-object-meta-hash-crc64ecma'] = str(crcValue)

        if all(h in req.headers
               for h in ('x-oss-copy-source', 'x-oss-copy-source-range')):
            raise InvalidArgument('x-oss-copy-source-range',
                                  req.headers['x-oss-copy-source-range'],
                                  'Illegal copy header')
        req.check_copy_source(self.app)
        bucket_headers = {}
        bucket_headers = req.get_container_info(self.app)
        expireDay, createDate = self._parse_lifecycle(bucket_headers,
                                                      req.object_name)
        if expireDay != '':
            try:
                days = int(expireDay)
                expire_sc = str(
                    int(days * 2 * 3600 + float(req_timestamp.internal)))
                req.headers['X-Delete-At'] = expire_sc
            except:
                raise InvalidArgument('X-Deltete-At', days)
        elif createDate != '':
            try:
                unix_time = to_unixtime(createDate, '%Y-%m-%dT%H:%M:%S.000Z')
                if unix_time <= int(req_timestamp):
                    pass
                else:
                    req.headers['X-Object-Meta-ValidDate'] = unix_time
            except:
                raise InvalidArgument('X-Object-Meta-ValidDate', createDate)
        resp = req.get_response(self.app)

        if 'x-oss-copy-source' in req.headers:
            resp.append_copy_resp_body(req.controller_name,
                                       req_timestamp.ossxmlformat)

            # delete object metadata from response
            for key in list(resp.headers.keys()):
                if key.startswith('x-oss-meta-'):
                    del resp.headers[key]

        resp.status = HTTP_OK
        resp.headers['x-oss-hash-crc64ecma'] = crcValue
        return resp
Ejemplo n.º 2
0
    def get_validated_param(self, param, default, limit=MAX_32BIT_INT):
        value = default
        if param in self.params:
            try:
                if value < int(self.params[param]):
                    value = int(self.params[param])
                if value < 0:
                    err_msg = 'Argument %s must be an integer between 0 and' \
                              ' %d' % (param, MAX_32BIT_INT)
                    raise InvalidArgument(param, self.params[param], err_msg)

                if value > MAX_32BIT_INT:
                    # check the value because int() could build either a long
                    # instance or a 64bit integer.
                    raise ValueError()

                if limit < value:
                    value = limit

            except ValueError:
                err_msg = 'Provided %s not an integer or within ' \
                          'integer range' % param
                raise InvalidArgument(param, self.params[param], err_msg)

        return value
Ejemplo n.º 3
0
    def _validate_headers(self):
        if 'CONTENT_LENGTH' in self.environ:
            try:
                if self.content_length < 0:
                    raise InvalidArgument('Content-Length',
                                          self.content_length)
            except (ValueError, TypeError):
                raise InvalidArgument('Content-Length',
                                      self.environ['CONTENT_LENGTH'])

        self._validate_dates()

        if 'Content-MD5' in self.headers:
            value = self.headers['Content-MD5']
            if not re.match('^[A-Za-z0-9+/]+={0,2}$', value):
                # Non-base64-alphabet characters in value.
                raise InvalidDigest(content_md5=value)
            try:
                self.headers['ETag'] = value.decode('base64').encode('hex')
            except Exception:
                raise InvalidDigest(content_md5=value)

            if len(self.headers['ETag']) != 32:
                raise InvalidDigest(content_md5=value)

        if self.method == 'PUT' and any(
                h in self.headers
                for h in ('If-Match', 'If-None-Match', 'If-Modified-Since',
                          'If-Unmodified-Since')):
            raise OssNotImplemented(
                'Conditional object PUTs are not supported.')

        if 'X-Oss-Copy-Source' in self.headers:
            try:
                check_path_header(self, 'X-Oss-Copy-Source', 2, '')
            except swob.HTTPException:
                msg = 'Copy Source must mention the source bucket and key: ' \
                      'sourcebucket/sourcekey'
                raise InvalidArgument('x-oss-copy-source',
                                      self.headers['X-Oss-Copy-Source'], msg)

        if 'x-oss-metadata-directive' in self.headers:
            value = self.headers['x-oss-metadata-directive']
            if value not in ('COPY', 'REPLACE'):
                err_msg = 'Unknown metadata directive.'
                raise InvalidArgument('x-oss-metadata-directive', value,
                                      err_msg)

        if 'x-oss-storage-class' in self.headers:
            # Only STANDARD is supported now.
            if self.headers['x-oss-storage-class'] != 'STANDARD':
                raise InvalidStorageClass()

        if 'x-oss-mfa' in self.headers:
            raise OssNotImplemented('MFA Delete is not supported.')

        if 'x-oss-server-side-encryption' in self.headers:
            raise OssNotImplemented('Server-side encryption is not supported.')
Ejemplo n.º 4
0
    def to_swift_req(self,
                     method,
                     container,
                     obj,
                     query=None,
                     body=None,
                     headers=None):
        sw_req = super(OssAclRequest,
                       self).to_swift_req(method, container, obj, query, body,
                                          headers)
        if self.account:
            sw_req.environ['swift_owner'] = True  # needed to set ACL
            sw_req.environ['swift.authorize_override'] = True
            sw_req.environ['swift.authorize'] = lambda req: None
        if 'HTTP_X_CONTAINER_SYSMETA_OSS2SWIFT_ACL' in sw_req.environ:

            oss_acl = sw_req.environ['HTTP_X_CONTAINER_SYSMETA_OSS2SWIFT_ACL']
            if sw_req.query_string:
                sw_req.query_string = ''
            if oss_acl == '[]':
                oss_acl = 'private'
            try:
                translated_acl = swift_acl_translate(oss_acl)
            except ACLError:
                raise InvalidArgument('x-oss-acl', oss_acl)

            for header, acl in translated_acl:
                sw_req.headers[header] = acl
        return sw_req
Ejemplo n.º 5
0
    def PUT(self, req):
        """
        Handle PUT Bucket Referer request
        """
        xml = req.xml(MAX_PUT_BUCKET_REFERER_SIZE)
        if xml:
            # check referer
            try:
                elem = fromstring(xml, 'RefererConfiguration')
                allow_empyt_referer=elem.find('AllowEmptyReferer').text
                if allow_empyt_referer not in ['true','false']:
                    raise InvalidArgument()
                referer_list=elem.find('RefererList')
		swift_referers=[]
                for referer in  referer_list.findall('Referer'):
	            swift_referers.append(referer.text)
		if len(swift_referers)==0 :
		    req.headers['X-Container-Read']=' '
		else:
                    req.headers['X-Container-Read'] = '.r:'+','.join(get_real_url(swift_referers))
            except (XMLSyntaxError, DocumentInvalid):
                raise MalformedXML()
            except Exception as e:
                exc_type, exc_value, exc_traceback = sys.exc_info()
                LOGGER.error(e)
                raise exc_type, exc_value, exc_traceback
        resp = req.get_response(self.app)
        resp.status = HTTP_OK
        return resp
Ejemplo n.º 6
0
def handle_acl_header(req):

    oss_acl = req.environ['HTTP_X_OSS_ACL']
    del req.environ['HTTP_X_OSS_ACL']
    if req.query_string:
        req.query_string = ''

    try:
        translated_acl = swift_acl_translate(oss_acl)
    except ACLError:
        raise InvalidArgument('x-oss-acl', oss_acl)

    for header, acl in translated_acl:
        req.headers[header] = acl
Ejemplo n.º 7
0
    def PUT(self, req):
        """
        Handle PUT Bucket CoreRule request
        """
        xml = req.xml(MAX_PUT_BUCKET_CORERULE_SIZE)
        if xml:
            # check location
            try:
		try:

                   elem = fromstring(xml, 'CORSConfiguration')
		except (XMLSyntaxError, DocumentInvalid):
                   raise InvalidArgument()
                for core_rule in  elem.findall('CORSRule'):
                    allowed_origins = _find_all_tags(core_rule,'AllowedOrigin')
                    allowed_methods = _find_all_tags(core_rule,'AllowedMethod')
                    allowed_headers= _find_all_tags(core_rule,'AllowedHeader')
                    expose_headers = _find_all_tags(core_rule,'ExposeHeader')
                    if core_rule.find('MaxAgeSeconds') is not None:
                       max_age_seconds = core_rule.find('MaxAgeSeconds').text
                    req.headers['X-Container-Meta-Access-Control-Allow-Origin'] = _list_str(allowed_origins)
                    req.headers['X-Container-Meta-Access-Control-Allow-Methods']=_list_str(allowed_methods)
                    req.headers['X-Container-Meta-Access-Control-Allow-Headers'] = _list_str(allowed_headers)
                    req.headers['X-Container-Meta-Access-Control-Expose-Headers'] = _list_str(expose_headers)
                    req.headers['X-Container-Meta-Access-Control-Max-Age'] = max_age_seconds
            except (XMLSyntaxError, DocumentInvalid):
                raise MalformedXML()
            except Exception as e:
                exc_type, exc_value, exc_traceback = sys.exc_info()
                LOGGER.error(e)
                raise exc_type, exc_value, exc_traceback
        resp = req.get_response(self.app)

        resp.status = HTTP_OK

        return resp
Ejemplo n.º 8
0
    def PUT(self, req):
        """
        Handles Upload Part and Upload Part Copy.
        """
        if 'uploadId' not in req.params:
            raise InvalidArgument('ResourceType', 'partNumber',
                                  'Unexpected query string parameter')

        try:
            part_number = int(req.params['partNumber'])
            if part_number < 1 or CONF.max_upload_part_num < part_number:
                raise Exception()
        except Exception:
            err_msg = 'Part number must be an integer between 1 and %d,' \
                      ' inclusive' % CONF.max_upload_part_num
            raise InvalidArgument('partNumber', req.params['partNumber'],
                                  err_msg)
        data = req.body
        upload_id = req.params['uploadId']
        _check_upload_info(req, self.app, upload_id)

        req.container_name += MULTIUPLOAD_SUFFIX
        req.object_name = '%s/%s/%d' % (req.object_name, upload_id,
                                        part_number)

        req_timestamp = OssTimestamp.now()
        req.headers['X-Timestamp'] = req_timestamp.internal
        source_resp = req.check_copy_source(self.app)
        
        if 'x-oss-copy-source' in req.headers and \
                'x-oss-copy-source-range' in req.headers:
            rng = req.headers['x-oss-copy-source-range']
  
            header_valid = True
            try:
                rng_obj = Range(rng)
                if len(rng_obj.ranges) != 1:
                    header_valid = False
            except ValueError:
                header_valid = False
            if not header_valid:
                err_msg = ('The x-oss-copy-source-range value must be of the '
                           'form bytes=first-last where first and last are '
                           'the zero-based offsets of the first and last '
                           'bytes to copy')
                raise InvalidArgument('x-oss-source-range', rng, err_msg)
  
            source_size = int(source_resp.headers['Content-Length'])
            if not rng_obj.ranges_for_length(source_size):
                err_msg = ('Range specified is not valid for source object '
                           'of size: %s' % source_size)
                raise InvalidArgument('x-oss-source-range', rng, err_msg)
  
            req.headers['range'] = rng
            del req.headers['x-oss-copy-source-range']
            
        resp = req.get_response(self.app)
         
        do_crc64 = crcmod.mkCrcFun(0x142F0E1EBA9EA3693L, initCrc=0L, xorOut=0xffffffffffffffffL, rev=True)
        if 'x-oss-copy-source' in req.headers:
            resp.append_copy_resp_body(req.controller_name,
                                       req_timestamp.ossxmlformat)
        resp.status = 200
        resp.headers['x-oss-hash-crc64ecma']=do_crc64(data)
        return resp
Ejemplo n.º 9
0
    def GET(self, req):
        """
        Handles List Parts.
        """
        def filter_part_num_marker(o):
            try:
                num = int(os.path.basename(o['name']))
                return num > part_num_marker
            except ValueError:
                return False

        encoding_type = req.params.get('encoding-type')
        if encoding_type is not None and encoding_type != 'url':
            err_msg = 'Invalid Encoding Method specified in Request'
            raise InvalidArgument('encoding-type', encoding_type, err_msg)

        upload_id = req.params['uploadId']
        _check_upload_info(req, self.app, upload_id)

        maxparts = req.get_validated_param(
            'max-parts', DEFAULT_MAX_PARTS_LISTING, CONF.max_parts_listing)
        part_num_marker = req.get_validated_param(
            'part-number-marker', 0)

        query = {
            'format': 'json',
            'limit': maxparts + 1,
            'prefix': '%s/%s/' % (req.object_name, upload_id),
            'delimiter': '/'
        }

        container = req.container_name + MULTIUPLOAD_SUFFIX
        resp = req.get_response(self.app, container=container, obj='',
                                query=query)
        objects = json.loads(resp.body)

        last_part = 0

        # If the caller requested a list starting at a specific part number,
        # construct a sub-set of the object list.
        objList = filter(filter_part_num_marker, objects)

        # pylint: disable-msg=E1103
        objList.sort(key=lambda o: int(o['name'].split('/')[-1]))

        if len(objList) > maxparts:
            objList = objList[:maxparts]
            truncated = True
        else:
            truncated = False
        # TODO: We have to retrieve object list again when truncated is True
        # and some objects filtered by invalid name because there could be no
        # enough objects for limit defined by maxparts.

        if objList:
            o = objList[-1]
            last_part = os.path.basename(o['name'])

        result_elem = Element('ListPartsResult')
        SubElement(result_elem, 'Bucket').text = req.container_name
        SubElement(result_elem, 'Key').text = req.object_name
        SubElement(result_elem, 'UploadId').text = upload_id

        initiator_elem = SubElement(result_elem, 'Initiator')
        SubElement(initiator_elem, 'ID').text = req.user_id
        SubElement(initiator_elem, 'DisplayName').text = req.user_id
        owner_elem = SubElement(result_elem, 'Owner')
        SubElement(owner_elem, 'ID').text = req.user_id
        SubElement(owner_elem, 'DisplayName').text = req.user_id

        SubElement(result_elem, 'StorageClass').text = 'STANDARD'
        SubElement(result_elem, 'PartNumberMarker').text = str(part_num_marker)
        SubElement(result_elem, 'NextPartNumberMarker').text = str(last_part)
        SubElement(result_elem, 'MaxParts').text = str(maxparts)
        if 'encoding-type' in req.params:
            SubElement(result_elem, 'EncodingType').text = \
                req.params['encoding-type']
        SubElement(result_elem, 'IsTruncated').text = \
            'true' if truncated else 'false'

        for i in objList:
            part_elem = SubElement(result_elem, 'Part')
            SubElement(part_elem, 'PartNumber').text = i['name'].split('/')[-1]
            SubElement(part_elem, 'LastModified').text = \
                i['last_modified'][:-6] + '000Z'
            SubElement(part_elem, 'ETag').text = '"%s"' % i['hash']
            SubElement(part_elem, 'Size').text = str(i['bytes'])

        body = tostring(result_elem, encoding_type=encoding_type)

        return HTTPOk(body=body, content_type='application/xml')
Ejemplo n.º 10
0
    def GET(self, req):
        """
        Handles List Multipart Uploads
        """

        def separate_uploads(uploads, prefix, delimiter):
            """
            separate_uploads will separate uploads into non_delimited_uploads
            (a subset of uploads) and common_prefixes according to the
            specified delimiter. non_delimited_uploads is a list of uploads
            which exclude the delimiter. common_prefixes is a set of prefixes
            prior to the specified delimiter. Note that the prefix in the
            common_prefixes includes the delimiter itself.

            i.e. if '/' delimiter specified and then the uploads is consists of
            ['foo', 'foo/bar'], this function will return (['foo'], ['foo/']).

            :param uploads: A list of uploads dictionary
            :param prefix: A string of prefix reserved on the upload path.
                           (i.e. the delimiter must be searched behind the
                            prefix)
            :param delimiter: A string of delimiter to split the path in each
                              upload

            :return (non_delimited_uploads, common_prefixes)
            """
            (prefix, delimiter) = \
                utf8encode(prefix, delimiter)
            non_delimited_uploads = []
            common_prefixes = set()
            for upload in uploads:
                key = upload['key']
                end = key.find(delimiter, len(prefix))
                if end >= 0:
                    common_prefix = key[:end + len(delimiter)]
                    common_prefixes.add(common_prefix)
                else:
                    non_delimited_uploads.append(upload)
            return non_delimited_uploads, sorted(common_prefixes)

        encoding_type = req.params.get('encoding-type')
        if encoding_type is not None and encoding_type != 'url':
            err_msg = 'Invalid Encoding Method specified in Request'
            raise InvalidArgument('encoding-type', encoding_type, err_msg)

        keymarker = req.params.get('key-marker', '')
        uploadid = req.params.get('upload-id-marker', '')
        maxuploads = req.get_validated_param(
            'max-uploads', DEFAULT_MAX_UPLOADS, DEFAULT_MAX_UPLOADS)

        query = {
            'format': 'json',
            'limit': maxuploads + 1,
        }

        if uploadid and keymarker:
            query.update({'marker': '%s/%s' % (keymarker, uploadid)})
        elif keymarker:
            query.update({'marker': '%s/~' % (keymarker)})
        if 'prefix' in req.params:
            query.update({'prefix': req.params['prefix']})

        container = req.container_name + MULTIUPLOAD_SUFFIX
        try:
            resp = req.get_response(self.app, container=container, query=query)
            objects = json.loads(resp.body)
        except NoSuchBucket:
            # Assume NoSuchBucket as no uploads
            objects = []

        def object_to_upload(object_info):
            obj, upid = object_info['name'].rsplit('/', 1)
            obj_dict = {'key': obj,
                        'upload_id': upid,
                        'last_modified': object_info['last_modified']}
            return obj_dict

        # uploads is a list consists of dict, {key, upload_id, last_modified}
        # Note that pattern matcher will drop whole segments objects like as
        # object_name/upload_id/1.
        pattern = re.compile('/[0-9]+$')
        uploads = [object_to_upload(obj) for obj in objects if
                   pattern.search(obj.get('name', '')) is None]

        prefixes = []
        if 'delimiter' in req.params:
            prefix = req.params.get('prefix', '')
            delimiter = req.params['delimiter']
            uploads, prefixes = \
                separate_uploads(uploads, prefix, delimiter)

        if len(uploads) > maxuploads:
            uploads = uploads[:maxuploads]
            truncated = True
        else:
            truncated = False

        nextkeymarker = ''
        nextuploadmarker = ''
        if len(uploads) > 1:
            nextuploadmarker = uploads[-1]['upload_id']
            nextkeymarker = uploads[-1]['key']

        result_elem = Element('ListMultipartUploadsResult')
        SubElement(result_elem, 'Bucket').text = req.container_name
        SubElement(result_elem, 'KeyMarker').text = keymarker
        SubElement(result_elem, 'UploadIdMarker').text = uploadid
        SubElement(result_elem, 'NextKeyMarker').text = nextkeymarker
        SubElement(result_elem, 'NextUploadIdMarker').text = nextuploadmarker
        if 'delimiter' in req.params:
            SubElement(result_elem, 'Delimiter').text = \
                req.params['delimiter']
        if 'prefix' in req.params:
            SubElement(result_elem, 'Prefix').text = req.params['prefix']
        SubElement(result_elem, 'MaxUploads').text = str(maxuploads)
        if encoding_type is not None:
            SubElement(result_elem, 'EncodingType').text = encoding_type
        SubElement(result_elem, 'IsTruncated').text = \
            'true' if truncated else 'false'

        # TODO: don't show uploads which are initiated before this bucket is
        # created.
        for u in uploads:
            upload_elem = SubElement(result_elem, 'Upload')
            SubElement(upload_elem, 'Key').text = u['key']
            SubElement(upload_elem, 'UploadId').text = u['upload_id']
            initiator_elem = SubElement(upload_elem, 'Initiator')
            SubElement(initiator_elem, 'ID').text = req.user_id
            SubElement(initiator_elem, 'DisplayName').text = req.user_id
            owner_elem = SubElement(upload_elem, 'Owner')
            SubElement(owner_elem, 'ID').text = req.user_id
            SubElement(owner_elem, 'DisplayName').text = req.user_id
            SubElement(upload_elem, 'StorageClass').text = 'STANDARD'
            SubElement(upload_elem, 'Initiated').text = \
                u['last_modified'][:-6] + '000Z'

        for p in prefixes:
            elem = SubElement(result_elem, 'CommonPrefixes')
            SubElement(elem, 'Prefix').text = p

        body = tostring(result_elem, encoding_type=encoding_type)

        return HTTPOk(body=body, content_type='application/xml')
Ejemplo n.º 11
0
    def GET(self, req):
        """
        Handle GET Bucket (List Objects) request
        """

        max_keys = req.get_validated_param('max-keys', CONF.max_bucket_listing)
        # TODO: Separate max_bucket_listing and default_bucket_listing
        tag_max_keys = max_keys
        max_keys = min(max_keys, CONF.max_bucket_listing)

        encoding_type = req.params.get('encoding-type')
        if encoding_type is not None and encoding_type != 'url':
            err_msg = 'Invalid Encoding Method specified in Request'
            raise InvalidArgument('encoding-type', encoding_type, err_msg)

        query = {
            'format': 'json',
            'limit': max_keys + 1,
        }
        if 'marker' in req.params:
            query.update({'marker': req.params['marker']})
        if 'prefix' in req.params:
            query.update({'prefix': req.params['prefix']})
        if 'delimiter' in req.params:
            query.update({'delimiter': req.params['delimiter']})

        resp = req.get_response(self.app, query=query)
        if resp.headers['x-oss-website-redirect']:
            index = resp.headers['x-oss-web-index']
            resp = req.get_response(self.app, obj=index)
            return HTTPOk(body=resp.body, content_type='application/xml')
        objects = json.loads(resp.body)

        elem = Element('ListBucketResult')
        SubElement(elem, 'Name').text = req.container_name
        SubElement(elem, 'Prefix').text = req.params.get('prefix')
        SubElement(elem, 'Marker').text = req.params.get('marker')

        # in order to judge that truncated is valid, check whether
        # max_keys + 1 th element exists in swift.
        is_truncated = max_keys > 0 and len(objects) > max_keys
        objects = objects[:max_keys]

        if is_truncated and 'delimiter' in req.params:
            if 'name' in objects[-1]:
                SubElement(elem, 'NextMarker').text = \
                    objects[-1]['name']
            if 'subdir' in objects[-1]:
                SubElement(elem, 'NextMarker').text = \
                    objects[-1]['subdir']

        SubElement(elem, 'MaxKeys').text = str(tag_max_keys)

        if 'delimiter' in req.params:
            SubElement(elem, 'Delimiter').text = req.params['delimiter']

        if encoding_type is not None:
            SubElement(elem, 'EncodingType').text = encoding_type

        SubElement(elem, 'IsTruncated').text = \
            'true' if is_truncated else 'false'

        for o in objects:
            if 'subdir' not in o:
                contents = SubElement(elem, 'Contents')
                SubElement(contents, 'Key').text = o['name']
                SubElement(contents, 'LastModified').text = \
                    o['last_modified'][:-6] + '000Z'
                SubElement(contents, 'ETag').text = '"%s"' % o['hash']
                SubElement(contents, 'Size').text = str(o['bytes'])
                owner = SubElement(contents, 'Owner')
                SubElement(owner, 'ID').text = req.user_id
                SubElement(owner, 'DisplayName').text = req.user_id
                SubElement(contents, 'StorageClass').text = 'STANDARD'
                SubElement(contents, 'Type').text = 'Normal'

        for o in objects:
            if 'subdir' in o:
                common_prefixes = SubElement(elem, 'CommonPrefixes')
                SubElement(common_prefixes, 'Prefix').text = o['subdir']

        body = tostring(elem, encoding_type=encoding_type)

        return HTTPOk(body=body, content_type='application/xml')