示例#1
0
def save_blob_to_store(blob_id,
                       blob,
                       context,
                       max_size,
                       store_type=None,
                       verifier=None):
    """Save file to specified store type and return location info to the user

    :param store_type: type of the store, None means save to default store.
    :param blob_id: id of artifact
    :param blob: blob file iterator
    :param context: user context
    :param verifier:signature verified
    :return: tuple of values: (location_uri, size, checksums, metadata)
    """
    data = utils.LimitingReader(utils.CooperativeReader(blob), max_size)
    (location, size, md5checksum,
     metadata) = backend.add_to_backend(CONF, blob_id, data, 0, store_type,
                                        context, verifier)
    checksums = {
        "md5": md5checksum,
        "sha1": data.sha1.hexdigest(),
        "sha256": data.sha256.hexdigest()
    }
    return location, size, checksums
示例#2
0
def save_blob_to_store(blob_id,
                       blob,
                       context,
                       max_size,
                       store_type=None,
                       verifier=None):
    """Save file to specified store type and return location info to the user.

    :param store_type: type of the store, None means save to default store.
    :param blob_id: id of blob
    :param blob: blob file iterator
    :param context: user context
    :param verifier:signature verified
    :return: tuple of values: (location_uri, size, checksums)
    """
    data = utils.LimitingReader(utils.CooperativeReader(blob), max_size)

    LOG.debug('Start uploading blob %s.', blob_id)
    if store_type == 'database':
        location = database_api.add_to_backend(blob_id, data, context,
                                               verifier)
    else:
        (location, size, md5checksum,
         __) = backend.add_to_backend(CONF, blob_id, data, 0, store_type,
                                      context, verifier)
    LOG.debug('Uploading of blob %s is finished.', blob_id)

    checksums = {
        "md5": data.md5.hexdigest(),
        "sha1": data.sha1.hexdigest(),
        "sha256": data.sha256.hexdigest()
    }
    return location, data.bytes_read, checksums
示例#3
0
    def test_cooperative_reader_no_read_method(self):
        BYTES = 1024
        stream = [b'*'] * BYTES
        reader = utils.CooperativeReader(stream)
        bytes_read = 0
        byte = reader.read()
        while len(byte) != 0:
            bytes_read += 1
            byte = reader.read()

        self.assertEqual(BYTES, bytes_read)

        # some data may be left in the buffer
        reader = utils.CooperativeReader(stream)
        reader.buffer = 'some data'
        buffer_string = reader.read()
        self.assertEqual('some data', buffer_string)
示例#4
0
 def test_cooperative_reader_no_read_method_buffer_size(self):
     # Decrease buffer size to 1000 bytes to test its overflow
     with mock.patch('glare.common.utils.MAX_COOP_READER_BUFFER_SIZE',
                     1000):
         BYTES = 1024
         stream = [b'*'] * BYTES
         reader = utils.CooperativeReader(stream)
         # Reading 1001 bytes to the buffer leads to 413 error
         self.assertRaises(exc.RequestEntityTooLarge, reader.read, 1001)
示例#5
0
 def test_cooperative_reader_of_iterator_stop_iteration_err(self):
     """Ensure cooperative reader supports iterator backends too"""
     reader = utils.CooperativeReader([l * 3 for l in ''])
     chunks = []
     while True:
         chunks.append(reader.read(3))
         if chunks[-1] == b'':
             break
     meat = b''.join(chunks)
     self.assertEqual(b'', meat)
示例#6
0
    def test_cooperative_reader_iterator(self):
        """Ensure cooperative reader class accesses all bytes of file"""
        BYTES = 1024
        bytes_read = 0
        with tempfile.TemporaryFile('w+') as tmp_fd:
            tmp_fd.write('*' * BYTES)
            tmp_fd.seek(0)
            for chunk in utils.CooperativeReader(tmp_fd):
                bytes_read += len(chunk)

        self.assertEqual(BYTES, bytes_read)
示例#7
0
 def test_cooperative_reader_of_iterator(self):
     """Ensure cooperative reader supports iterator backends too"""
     data = b'abcdefgh'
     data_list = [data[i:i + 1] * 3 for i in range(len(data))]
     reader = utils.CooperativeReader(data_list)
     chunks = []
     while True:
         chunks.append(reader.read(3))
         if chunks[-1] == b'':
             break
     meat = b''.join(chunks)
     self.assertEqual(b'aaabbbcccdddeeefffggghhh', meat)
示例#8
0
 def _test_reader_chunked(self, chunk_size, read_size, max_iterations=5):
     generator = self._create_generator(chunk_size, max_iterations)
     reader = utils.CooperativeReader(generator)
     result = bytearray()
     while True:
         data = reader.read(read_size)
         if len(data) == 0:
             break
         self.assertLessEqual(len(data), read_size)
         result += data
     expected = (b'a' * chunk_size + b'b' * chunk_size + b'c' * chunk_size +
                 b'a' * chunk_size + b'b' * chunk_size)
     self.assertEqual(expected, bytes(result))
示例#9
0
    def test_cooperative_reader_explicit_read(self):
        BYTES = 1024
        bytes_read = 0
        with tempfile.TemporaryFile('w+') as tmp_fd:
            tmp_fd.write('*' * BYTES)
            tmp_fd.seek(0)
            reader = utils.CooperativeReader(tmp_fd)
            byte = reader.read(1)
            while len(byte) != 0:
                bytes_read += 1
                byte = reader.read(1)

        self.assertEqual(BYTES, bytes_read)
示例#10
0
def save_blobs_to_store(blobs,
                        context,
                        max_size,
                        store_type=None,
                        verifier=None):
    """Save several files to specified store.

    :param store_type: type of the store, None means save to default store.
    :param blobs: list of tuples (blob_data_id, data)
    :param context: user context
    :param verifier:signature verified
    :return: dict {blob_data_id: (location_uri, size, checksums)}
    """
    # wrap data in CooperativeReader
    blobs = [(blob_data_id,
              utils.LimitingReader(utils.CooperativeReader(data), max_size))
             for (blob_data_id, data) in blobs]

    if store_type == 'database':
        locations = database_api.add_to_backend_batch(blobs, context, verifier)
    else:
        locations = []
        for blob_data_id, data in blobs:
            (location, __, __,
             __) = backend.add_to_backend(CONF, blob_data_id, data, 0,
                                          store_type, context, verifier)
            locations.append(location)

    # combine location, size and checksums together
    res = {}
    for i in range(len(locations)):
        data = blobs[i][1]
        checksums = {
            "md5": data.md5.hexdigest(),
            "sha1": data.sha1.hexdigest(),
            "sha256": data.sha256.hexdigest()
        }
        res[blobs[i][0]] = (locations[i], data.bytes_read, checksums)

    return res