Exemple #1
0
def verify_uploaded_data_amount(context, type_name, data_amount=None):
    """Verify if user can upload data based on his quota limits.

    :param context: user context
    :param type_name: name of artifact type
    :param data_amount: number of bytes user wants to upload. Value None means
     that user hasn't specified data amount. In this case don't raise an
     exception, but just return the amount of data he is able to upload.
    :return: number of bytes user can upload if data_amount isn't specified
    """
    global_limit = CONF.max_uploaded_data
    type_limit = getattr(CONF, 'artifact_type:' + type_name).max_uploaded_data

    # update limits if they were reassigned for project
    project_id = context.project_id
    quotas = list_quotas(project_id).get(project_id, {})
    if 'max_uploaded_data' in quotas:
        global_limit = quotas['max_uploaded_data']
    if 'max_uploaded_data:' + type_name in quotas:
        type_limit = quotas['max_uploaded_data:' + type_name]

    session = api.get_session()
    res = -1

    if global_limit != -1:
        # the whole amount of created artifacts
        whole_number = api.calculate_uploaded_data(context, session)
        if data_amount is None:
            res = global_limit - whole_number
        elif whole_number + data_amount > global_limit:
            msg = _("Can't upload %(data_amount)d byte(s) because of global "
                    "quota limit: %(global_limit)d. "
                    "You have %(whole_number)d bytes uploaded.") % {
                        'data_amount': data_amount,
                        'global_limit': global_limit,
                        'whole_number': whole_number
                    }
            raise exception.RequestEntityTooLarge(msg)

    if type_limit != -1:
        # the amount of artifacts for specific type
        type_number = api.calculate_uploaded_data(context, session, type_name)
        if data_amount is None:
            available = type_limit - type_number
            res = available if res == -1 else min(res, available)
        elif type_number + data_amount > type_limit:
            msg = _("Can't upload %(data_amount)d byte(s) because of "
                    "quota limit for artifact type '%(type_name)s': "
                    "%(type_limit)d. You have %(type_number)d bytes "
                    "uploaded for this type.") % {
                        'data_amount': data_amount,
                        'type_name': type_name,
                        'type_limit': type_limit,
                        'type_number': type_number
                    }
            raise exception.RequestEntityTooLarge(msg)
    return res
Exemple #2
0
 def __iter__(self):
     for chunk in self.data:
         self.bytes_read += len(chunk)
         if self.bytes_read > self.limit:
             raise exception.RequestEntityTooLarge()
         else:
             yield chunk
Exemple #3
0
    def read(self, length=None):
        """Return the requested amount of bytes, fetching the next chunk of
        the underlying iterator when needed.

        This is replaced with cooperative_read in __init__ if the underlying
        fd already supports read().
        """
        if length is None:
            if len(self.buffer) - self.position > 0:
                # if no length specified but some data exists in buffer,
                # return that data and clear the buffer
                result = self.buffer[self.position:]
                self.buffer = b''
                self.position = 0
                return str(result)
            else:
                # otherwise read the next chunk from the underlying iterator
                # and return it as a whole. Reset the buffer, as subsequent
                # calls may specify the length
                try:
                    if self.iterator is None:
                        self.iterator = self.__iter__()
                    return next(self.iterator)
                except StopIteration:
                    return ''
                finally:
                    self.buffer = b''
                    self.position = 0
        else:
            result = bytearray()
            while len(result) < length:
                if self.position < len(self.buffer):
                    to_read = length - len(result)
                    chunk = self.buffer[self.position:self.position + to_read]
                    result.extend(chunk)

                    # This check is here to prevent potential OOM issues if
                    # this code is called with unreasonably high values of read
                    # size. Currently it is only called from the HTTP clients
                    # of Glare backend stores, which use httplib for data
                    # streaming, which has readsize hardcoded to 8K, so this
                    # check should never fire. Regardless it still worths to
                    # make the check, as the code may be reused somewhere else.
                    if len(result) >= MAX_COOP_READER_BUFFER_SIZE:
                        raise exception.RequestEntityTooLarge()
                    self.position += len(chunk)
                else:
                    try:
                        if self.iterator is None:
                            self.iterator = self.__iter__()
                        self.buffer = next(self.iterator)
                        self.position = 0
                    except StopIteration:
                        self.buffer = b''
                        self.position = 0
                        return bytes(result)
            return bytes(result)
Exemple #4
0
 def read(self, i):
     result = self.data.read(i)
     len_result = len(result)
     self.bytes_read += len_result
     if len_result:
         self.sha1.update(result)
         self.sha256.update(result)
     if self.bytes_read > self.limit:
         raise exception.RequestEntityTooLarge()
     return result
Exemple #5
0
 def read(self, length=None):
     res = self.data.read() if length is None else self.data.read(length)
     len_result = len(res)
     self.bytes_read += len_result
     if len_result:
         self.md5.update(res)
         self.sha1.update(res)
         self.sha256.update(res)
     if self.bytes_read > self.limit:
         message = _("The server is refusing to process a request because"
                     " the request entity is larger than the server is"
                     " willing or able to process - %s bytes.") % self.limit
         raise exception.RequestEntityTooLarge(message=message)
     return res
Exemple #6
0
def read_data(flobj, limit=16777216):
    """Read data into memory from the file-like object.

    :param flobj: file-like object that contains data
    :param limit: max file size that can be read into memory
    :return: string with data from the object
    """
    bytes_read = 0
    data = b''
    for chunk in flobj:
        bytes_read += len(chunk)
        if bytes_read > limit:
            raise exception.RequestEntityTooLarge()
        data += chunk
    return data
Exemple #7
0
    def pre_upload_hook(cls, context, af, field_name, blob_key, fd):
        flobj = io.BytesIO(fd.read(cls.MAX_BLOB_SIZE))

        # Raise exception if something left in the stream
        if fd.read(1):
            msg = ("The file you are trying to upload is too big. "
                   "The system upper limit is %s.") % cls.MAX_BLOB_SIZE
            raise exception.RequestEntityTooLarge(msg)

        zip_ref = zipfile.ZipFile(flobj, 'r')

        file_utils.unpack_zip_archive_to_artifact_folder(
            context, af, zip_ref, 'content')

        flobj.seek(0)
        return flobj
Exemple #8
0
    def _calculate_allowed_space(self,
                                 context,
                                 af,
                                 field_name,
                                 content_length=None,
                                 blob_key=None):
        """Calculate the maximum amount of data user can upload to the blob."""
        # As a default we take the maximum blob size
        blob_name = self._generate_blob_name(field_name, blob_key)

        max_blob_size = af.get_max_blob_size(field_name)

        if blob_key is not None:
            # For folders we also compare it with the maximum folder size
            blobs_dict = getattr(af, field_name)
            overall_folder_size = sum(blob["size"]
                                      for blob in blobs_dict.values()
                                      if blob["size"] is not None)
            available_folder_space = af.get_max_folder_size(
                field_name) - overall_folder_size  # always non-negative
            max_blob_size = min(max_blob_size, available_folder_space)

        # check quotas
        quota_size = quota.verify_uploaded_data_amount(context,
                                                       af.get_type_name(),
                                                       content_length)

        if content_length is None:
            # if no content_length was provided we have to allocate
            # all allowed space for the blob. It's minimum of max blob size
            # and available quota limit. -1 means that user don't have upload
            # limits.
            size = max_blob_size if quota_size == -1 else min(
                max_blob_size, quota_size)
        else:
            if content_length > max_blob_size:
                msg = _("Can't upload %(content_length)d bytes of data to "
                        "blob %(blob_name)s. Its max allowed size is "
                        "%(max_blob_size)d") % {
                            'content_length': content_length,
                            'blob_name': blob_name,
                            'max_blob_size': max_blob_size
                        }
                raise exception.RequestEntityTooLarge(msg)
            size = content_length

        return size
Exemple #9
0
 def read(self, i):
     result = self.data.read(i)
     self.bytes_read += len(result)
     if self.bytes_read > self.limit:
         raise exception.RequestEntityTooLarge()
     return result