Beispiel #1
0
    def add(self, image_id, image_file, image_size, context=None):
        """
        Stores an image file with supplied identifier to the backend
        storage system and returns a tuple containing information
        about the stored image.

        :param image_id: The opaque image identifier
        :param image_file: The image data to write, as a file-like object
        :param image_size: The size of the image data to write, in bytes

        :retval tuple of URL in backing store, bytes written, checksum
                and a dictionary with storage system specific information
        :raises `glance.store.exceptions.Duplicate` if the image already
                existed

        :note By default, the backend writes the image data to a file
              `/<DATADIR>/<ID>`, where <DATADIR> is the value of
              the filesystem_store_datadir configuration option and <ID>
              is the supplied image ID.
        """

        datadir = self._find_best_datadir(image_size)
        filepath = os.path.join(datadir, str(image_id))

        if os.path.exists(filepath):
            raise exceptions.Duplicate(image=filepath)

        checksum = hashlib.md5()
        bytes_written = 0
        try:
            with open(filepath, 'wb') as f:
                for buf in utils.chunkreadable(image_file,
                                               ChunkedFile.CHUNKSIZE):
                    bytes_written += len(buf)
                    checksum.update(buf)
                    f.write(buf)
        except IOError as e:
            if e.errno != errno.EACCES:
                self._delete_partial(filepath, image_id)
            errors = {errno.EFBIG: exceptions.StorageFull(),
                      errno.ENOSPC: exceptions.StorageFull(),
                      errno.EACCES: exceptions.StorageWriteDenied()}
            raise errors.get(e.errno, e)
        except Exception:
            with excutils.save_and_reraise_exception():
                self._delete_partial(filepath, image_id)

        checksum_hex = checksum.hexdigest()
        metadata = self._get_metadata()

        LOG.debug(_("Wrote %(bytes_written)d bytes to %(filepath)s with "
                    "checksum %(checksum_hex)s"),
                  {'bytes_written': bytes_written,
                   'filepath': filepath,
                   'checksum_hex': checksum_hex})
        return ('file://%s' % filepath, bytes_written, checksum_hex, metadata)
Beispiel #2
0
    def add(self, image_id, image_file, image_size, context=None):
        """
        Stores an image file with supplied identifier to the backend
        storage system and returns a tuple containing information
        about the stored image.

        :param image_id: The opaque image identifier
        :param image_file: The image data to write, as a file-like object
        :param image_size: The size of the image data to write, in bytes

        :retval tuple of URL in backing store, bytes written, checksum
                and a dictionary with storage system specific information
        :raises `glance.store.exceptions.Duplicate` if the image already
                existed
        """
        checksum = hashlib.md5()
        image_name = str(image_id)
        with rados.Rados(conffile=self.conf_file, rados_id=self.user) as conn:
            fsid = None
            if hasattr(conn, 'get_fsid'):
                fsid = conn.get_fsid()
            with conn.open_ioctx(self.pool) as ioctx:
                order = int(math.log(self.chunk_size, 2))
                LOG.debug('creating image %s with order %d and size %d',
                          image_name, order, image_size)
                if image_size == 0:
                    LOG.warning(_("since image size is zero we will be doing "
                                  "resize-before-write for each chunk which "
                                  "will be considerably slower than normal"))

                try:
                    loc = self._create_image(fsid, ioctx, image_name,
                                             image_size, order)
                except rbd.ImageExists:
                    raise exceptions.Duplicate(message=
                        _('RBD image %s already exists') % image_id)
                try:
                    with rbd.Image(ioctx, image_name) as image:
                        bytes_written = 0
                        offset = 0
                        chunks = utils.chunkreadable(image_file,
                                                     self.chunk_size)
                        for chunk in chunks:
                            # If the image size provided is zero we need to do
                            # a resize for the amount we are writing. This will
                            # be slower so setting a higher chunk size may
                            # speed things up a bit.
                            if image_size == 0:
                                chunk_length = len(chunk)
                                length = offset + chunk_length
                                bytes_written += chunk_length
                                LOG.debug(_("resizing image to %s KiB") %
                                          (length / 1024))
                                image.resize(length)
                            LOG.debug(_("writing chunk at offset %s") %
                                      (offset))
                            offset += image.write(chunk, offset)
                            checksum.update(chunk)
                        if loc.snapshot:
                            image.create_snap(loc.snapshot)
                            image.protect_snap(loc.snapshot)
                except Exception as exc:
                    # Delete image if one was created
                    try:
                        self._delete_image(loc.image, loc.snapshot)
                    except exceptions.NotFound:
                        pass

                    raise exc

        # Make sure we send back the image size whether provided or inferred.
        if image_size == 0:
            image_size = bytes_written

        return (loc.get_uri(), image_size, checksum.hexdigest(), {})
Beispiel #3
0
    def add(self, image_id, image_file, image_size, context=None):
        """
        Stores an image file with supplied identifier to the backend
        storage system and returns a tuple containing information
        about the stored image.

        :param image_id: The opaque image identifier
        :param image_file: The image data to write, as a file-like object
        :param image_size: The size of the image data to write, in bytes

        :retval tuple of URL in backing store, bytes written, checksum
                and a dictionary with storage system specific information
        :raises `glance.store.exceptions.Duplicate` if the image already
                existed

        S3 writes the image data using the scheme:
            s3://<ACCESS_KEY>:<SECRET_KEY>@<S3_URL>/<BUCKET>/<OBJ>
        where:
            <USER> = ``s3_store_user``
            <KEY> = ``s3_store_key``
            <S3_HOST> = ``s3_store_host``
            <BUCKET> = ``s3_store_bucket``
            <ID> = The id of the image being added
        """
        from boto.s3.connection import S3Connection

        loc = StoreLocation({'scheme': self.scheme,
                             'bucket': self.bucket,
                             'key': image_id,
                             's3serviceurl': self.full_s3_host,
                             'accesskey': self.access_key,
                             'secretkey': self.secret_key})

        uformat = self.conf.glance_store.s3_store_bucket_url_format
        calling_format = get_calling_format(s3_store_bucket_url_format=uformat)

        s3_conn = S3Connection(loc.accesskey, loc.secretkey,
                               host=loc.s3serviceurl,
                               is_secure=(loc.scheme == 's3+https'),
                               calling_format=calling_format)

        create_bucket_if_missing(self.bucket, s3_conn)

        bucket_obj = get_bucket(s3_conn, self.bucket)
        obj_name = str(image_id)

        def _sanitize(uri):
            return re.sub('//.*:.*@',
                          '//s3_store_secret_key:s3_store_access_key@',
                          uri)

        key = bucket_obj.get_key(obj_name)
        if key and key.exists():
            raise exceptions.Duplicate(message=_("S3 already has an image at "
                                                 "location %s") %
                                       _sanitize(loc.get_uri()))

        msg = _("Adding image object to S3 using (s3_host=%(s3_host)s, "
                "access_key=%(access_key)s, bucket=%(bucket)s, "
                "key=%(obj_name)s)") % ({'s3_host': self.s3_host,
                                         'access_key': self.access_key,
                                         'bucket': self.bucket,
                                         'obj_name': obj_name})
        LOG.debug(msg)

        key = bucket_obj.new_key(obj_name)

        # We need to wrap image_file, which is a reference to the
        # webob.Request.body_file, with a seekable file-like object,
        # otherwise the call to set_contents_from_file() will die
        # with an error about Input object has no method 'seek'. We
        # might want to call webob.Request.make_body_seekable(), but
        # unfortunately, that method copies the entire image into
        # memory and results in LP Bug #818292 occurring. So, here
        # we write temporary file in as memory-efficient manner as
        # possible and then supply the temporary file to S3. We also
        # take this opportunity to calculate the image checksum while
        # writing the tempfile, so we don't need to call key.compute_md5()

        msg = _("Writing request body file to temporary file "
                "for %s") % _sanitize(loc.get_uri())
        LOG.debug(msg)

        tmpdir = self.s3_store_object_buffer_dir
        temp_file = tempfile.NamedTemporaryFile(dir=tmpdir)
        checksum = hashlib.md5()
        for chunk in utils.chunkreadable(image_file, self.CHUNKSIZE):
            checksum.update(chunk)
            temp_file.write(chunk)
        temp_file.flush()

        msg = (_("Uploading temporary file to S3 for %s") %
               _sanitize(loc.get_uri()))
        LOG.debug(msg)

        # OK, now upload the data into the key
        key.set_contents_from_file(open(temp_file.name, 'r+b'), replace=False)
        size = key.size
        checksum_hex = checksum.hexdigest()

        LOG.debug(_("Wrote %(size)d bytes to S3 key named %(obj_name)s "
                    "with checksum %(checksum_hex)s"),
                  {'size': size, 'obj_name': obj_name,
                   'checksum_hex': checksum_hex})

        return (loc.get_uri(), size, checksum_hex, {})