Exemplo n.º 1
0
    def _create_scoped_lock(self,
                            context,
                            type_name,
                            name,
                            version,
                            owner,
                            visibility='private'):
        """Create scoped lock for artifact."""
        # validate that artifact doesn't exist for the scope
        filters = [('name', 'eq:' + name), ('version', 'eq:' + version)]
        if visibility == 'public':
            filters.extend([('visibility', 'public')])
        elif visibility == 'private':
            filters.extend([('owner', 'eq:' + owner),
                            ('visibility', 'private')])

        scope_id = "%s:%s:%s" % (type_name, name, version)
        if visibility != 'public':
            scope_id += ':%s' % owner
        lock = self.lock_engine.acquire(context, scope_id)

        try:
            if self.list(context, type_name, filters).get("total_count") > 0:
                msg = _("Artifact with this name and version is already "
                        "exists for this scope.")
                raise exception.Conflict(msg)
        except Exception:
            with excutils.save_and_reraise_exception(logger=LOG):
                self.lock_engine.release(lock)

        return lock
Exemplo n.º 2
0
def create_lock(context, lock_key, session):
    """Try to create lock record."""
    try:
        lock = models.ArtifactLock()
        lock.id = lock_key
        lock.save(session=session)
        return lock.id
    except (sqlalchemy.exc.IntegrityError, db_exception.DBDuplicateEntry):
        msg = _("Cannot lock an item with key %s. "
                "Lock already acquired by other request") % lock_key
        raise exception.Conflict(msg)
Exemplo n.º 3
0
 def create_lock(self, context, lock_key):
     global DATA
     item_lock = DATA['locks'].get(lock_key)
     if item_lock:
         msg = _("Cannot lock an item with key %s. "
                 "Lock already acquired by other request.") % lock_key
         raise glare_exc.Conflict(msg)
         # TODO(kairat) Log user data in the log so we can identify who
         # acquired the lock
     else:
         DATA['locks'][lock_key] = lock_key
         return lock_key
Exemplo n.º 4
0
Arquivo: api.py Projeto: Fedosin/glare
def create_lock(context, lock_key, session):
    try:
        session.query(models.ArtifactLock).filter_by(id=lock_key).one()
    except orm.exc.NoResultFound:
        lock = models.ArtifactLock()
        lock.id = lock_key
        lock.save(session=session)
        return lock.id

    msg = _("Cannot lock an item with key %s. "
            "Lock already acquired by other request") % lock_key
    raise exception.Conflict(msg)
Exemplo n.º 5
0
def create_lock(context, lock_key, session):
    """Try to create lock record."""
    with session.begin():
        existing = session.query(models.ArtifactLock).get(lock_key)
        if existing is None:
            try:
                lock = models.ArtifactLock()
                lock.id = lock_key
                lock.save(session=session)
                return lock.id
            except (sqlalchemy.exc.IntegrityError,
                    db_exception.DBDuplicateEntry):
                msg = _("Cannot lock an item with key %s. "
                        "Lock already acquired by other request") % lock_key
                raise exception.Conflict(msg)
        else:
            if timeutils.is_older_than(existing.acquired_at, 5):
                existing.acquired_at = timeutils.utcnow()
                existing.save(session)
                return existing.id
            else:
                msg = _("Cannot lock an item with key %s. "
                        "Lock already acquired by other request") % lock_key
                raise exception.Conflict(msg)
Exemplo n.º 6
0
def create_or_update(context, artifact_id, values, session):
    with session.begin():
        _drop_protected_attrs(models.Artifact, values)
        if artifact_id is None:
            # create new artifact
            artifact = models.Artifact()
            artifact.id = values.pop('id')
        else:
            # update the existing artifact
            artifact = _get(context, None, artifact_id, session)

        if 'version' in values:
            values['version'] = semver_db.parse(values['version'])

        if 'tags' in values:
            tags = values.pop('tags')
            artifact.tags = _do_tags(artifact, tags)

        if 'properties' in values:
            properties = values.pop('properties', {})
            artifact.properties = _do_properties(artifact, properties)

        if 'blobs' in values:
            blobs = values.pop('blobs')
            artifact.blobs = _do_blobs(artifact, blobs)

        artifact.updated_at = timeutils.utcnow()
        if 'status' in values:
            if session.query(exists().where(
                    and_(models.ArtifactBlob.status == 'saving',
                         models.ArtifactBlob.artifact_id ==
                         artifact_id))).one()[0]:
                raise exception.Conflict(
                    "You cannot change artifact status if it has "
                    "uploading blobs.")
            if values['status'] == 'active':
                artifact.activated_at = timeutils.utcnow()
        artifact.update(values)

        artifact.save(session=session)
        LOG.debug("Response from the database was received.")

        return artifact.to_dict()
Exemplo n.º 7
0
    def download_blob(self,
                      context,
                      type_name,
                      artifact_id,
                      field_name,
                      blob_key=None):
        """Download binary data from Glare Artifact.

        :param context: user context
        :param type_name: name of artifact type
        :param artifact_id: id of the artifact to be updated
        :param field_name: name of blob or blob dict field
        :param blob_key: if field_name is blob dict it specifies key
         in this dict
        :return: file iterator for requested file
        """
        download_from_any_artifact = False
        if policy.authorize("artifact:download_from_any_artifact", {},
                            context,
                            do_raise=False):
            download_from_any_artifact = True

        af = self._show_artifact(context,
                                 type_name,
                                 artifact_id,
                                 read_only=True,
                                 get_any_artifact=download_from_any_artifact)

        if not download_from_any_artifact:
            policy.authorize("artifact:download", af.to_dict(), context)

        blob_name = self._generate_blob_name(field_name, blob_key)

        if af.status == 'deleted':
            msg = _("Cannot download data when artifact is deleted")
            raise exception.Forbidden(message=msg)

        blob = self._get_blob_info(af, field_name, blob_key)
        if blob is None:
            msg = _("No data found for blob %s") % blob_name
            raise exception.NotFound(message=msg)
        if blob['status'] != 'active':
            msg = _("%s is not ready for download") % blob_name
            raise exception.Conflict(message=msg)

        af.pre_download_hook(context, af, field_name, blob_key)

        meta = {
            'md5': blob.get('md5'),
            'sha1': blob.get('sha1'),
            'sha256': blob.get('sha256'),
            'external': blob.get('external')
        }
        if blob['external']:
            data = {'url': blob['url']}
        else:
            data = store_api.load_from_store(uri=blob['url'], context=context)
            meta['size'] = blob.get('size')
            meta['content_type'] = blob.get('content_type')

        try:
            # call download hook in the end
            data = af.post_download_hook(context, af, field_name, blob_key,
                                         data)
        except exception.GlareException:
            raise
        except Exception as e:
            raise exception.BadRequest(message=str(e))

        return data, meta
Exemplo n.º 8
0
    def upload_blob(self,
                    context,
                    type_name,
                    artifact_id,
                    field_name,
                    fd,
                    content_type,
                    content_length=None,
                    blob_key=None):
        """Upload Artifact blob.

        :param context: user context
        :param type_name: name of artifact type
        :param artifact_id: id of the artifact to be updated
        :param field_name: name of blob or blob dict field
        :param fd: file descriptor that Glare uses to upload the file
        :param content_type: data content-type
        :param content_length: amount of data user wants to upload
        :param blob_key: if field_name is blob dict it specifies key
         in this dictionary
        :return: dict representation of updated artifact
        """
        blob_name = self._generate_blob_name(field_name, blob_key)
        blob_id = uuidutils.generate_uuid()
        blob_info = {
            'url': None,
            'size': None,
            'md5': None,
            'sha1': None,
            'sha256': None,
            'id': blob_id,
            'status': 'saving',
            'external': False,
            'content_type': content_type
        }

        # Step 1. Initialize blob
        lock_key = "%s:%s" % (type_name, artifact_id)
        with self.lock_engine.acquire(context, lock_key):
            af = self._show_artifact(context, type_name, artifact_id)
            action_name = "artifact:upload"
            policy.authorize(action_name, af.to_dict(), context)

            # create an an empty blob instance in db with 'saving' status
            existing_blob = self._get_blob_info(af, field_name, blob_key)
            existing_blob_status = existing_blob.get("status")\
                if existing_blob else None
            if existing_blob_status == "saving":
                msg = _("Blob %(blob)s already exists for artifact and it"
                        "is in %(status) %(af)s") % {
                            'blob': field_name,
                            'af': af.id,
                            'status': existing_blob_status
                        }
                raise exception.Conflict(message=msg)
            utils.validate_change_allowed(af, field_name)
            blob_info['size'] = self._calculate_allowed_space(
                context, af, field_name, content_length, blob_key)

            af = self._save_blob_info(context, af, field_name, blob_key,
                                      blob_info)

        LOG.debug(
            "Parameters validation for artifact %(artifact)s blob "
            "upload passed for blob %(blob_name)s. "
            "Start blob uploading to backend.", {
                'artifact': af.id,
                'blob_name': blob_name
            })

        # Step 2. Call pre_upload_hook and upload data to the store
        try:
            try:
                # call upload hook first
                if hasattr(af, 'validate_upload'):
                    LOG.warning("Method 'validate_upload' was deprecated. "
                                "Please use 'pre_upload_hook' instead.")
                    fd, path = tpool.execute(af.validate_upload, context, af,
                                             field_name, fd)
                else:
                    fd = tpool.execute(af.pre_upload_hook, context, af,
                                       field_name, blob_key, fd)
            except exception.GlareException:
                raise
            except Exception as e:
                raise exception.BadRequest(message=str(e))

            default_store = getattr(CONF,
                                    'artifact_type:' + type_name).default_store
            # use global parameter if default store isn't set per artifact type
            if default_store is None:
                default_store = CONF.glance_store.default_store

            location_uri, size, checksums = store_api.save_blob_to_store(
                blob_id,
                fd,
                context,
                blob_info['size'],
                store_type=default_store)
            blob_info.update({
                'url': location_uri,
                'status': 'active',
                'size': size
            })
            blob_info.update(checksums)
        except Exception:
            # if upload failed remove blob from db and storage
            with excutils.save_and_reraise_exception(logger=LOG):
                LOG.error("Exception occured: %s", Exception)
                self._save_blob_info(context, af, field_name, blob_key, None)

        LOG.info(
            "Successfully finished blob uploading for artifact "
            "%(artifact)s blob field %(blob)s.", {
                'artifact': af.id,
                'blob': blob_name
            })

        # Step 3. Change blob status to 'active'
        with self.lock_engine.acquire(context, lock_key):
            af = af.show(context, artifact_id)
            af = self._save_blob_info(context, af, field_name, blob_key,
                                      blob_info)

        af.post_upload_hook(context, af, field_name, blob_key)

        Notifier.notify(context, action_name, af)
        return af.to_dict()
Exemplo n.º 9
0
    def add_blob_location(self,
                          context,
                          type_name,
                          artifact_id,
                          field_name,
                          location,
                          blob_meta,
                          blob_key=None):
        """Add external/internal location to blob.

        :param context: user context
        :param type_name: name of artifact type
        :param artifact_id: id of the artifact to be updated
        :param field_name: name of blob or blob dict field
        :param location: blob url
        :param blob_meta: dictionary containing blob metadata like md5 checksum
        :param blob_key: if field_name is blob dict it specifies key
         in this dict
        :return: dict representation of updated artifact
        """
        blob_name = self._generate_blob_name(field_name, blob_key)

        location_type = blob_meta.pop('location_type', 'external')

        if location_type == 'external':
            action_name = 'artifact:set_location'
        elif location_type == 'internal':
            scheme = urlparse.urlparse(location).scheme
            if scheme in store_api.RESTRICTED_URI_SCHEMES:
                msg = _("Forbidden to set internal locations with "
                        "scheme '%s'") % scheme
                raise exception.Forbidden(msg)
            if scheme not in store_api.get_known_schemes():
                msg = _("Unknown scheme '%s'") % scheme
                raise exception.BadRequest(msg)
            action_name = 'artifact:set_internal_location'
        else:
            msg = _("Invalid location type: %s") % location_type
            raise exception.BadRequest(msg)

        blob = {
            'url': location,
            'size': None,
            'md5': blob_meta.get("md5"),
            'sha1': blob_meta.get("sha1"),
            'id': uuidutils.generate_uuid(),
            'sha256': blob_meta.get("sha256"),
            'status': 'active',
            'external': location_type == 'external',
            'content_type': None
        }

        lock_key = "%s:%s" % (type_name, artifact_id)
        with self.lock_engine.acquire(context, lock_key):
            af = self._show_artifact(context, type_name, artifact_id)
            policy.authorize(action_name, af.to_dict(), context)
            if self._get_blob_info(af, field_name, blob_key):
                msg = _("Blob %(blob)s already exists for artifact "
                        "%(af)s") % {
                            'blob': field_name,
                            'af': af.id
                        }
                raise exception.Conflict(message=msg)
            utils.validate_change_allowed(af, field_name)
            af.pre_add_location_hook(context, af, field_name, location,
                                     blob_key)
            af = self._save_blob_info(context, af, field_name, blob_key, blob)

        LOG.info(
            "External location %(location)s has been created "
            "successfully for artifact %(artifact)s blob %(blob)s", {
                'location': location,
                'artifact': af.id,
                'blob': blob_name
            })

        af.post_add_location_hook(context, af, field_name, blob_key)
        Notifier.notify(context, action_name, af)
        return af.to_dict()