def run_child(self): def child_hup(*args): """Shuts down child processes, existing requests are handled.""" signal.signal(signal.SIGHUP, signal.SIG_IGN) eventlet.wsgi.is_accepting = False self.sock.close() pid = os.fork() if pid == 0: signal.signal(signal.SIGHUP, child_hup) signal.signal(signal.SIGTERM, signal.SIG_DFL) # ignore the interrupt signal to avoid a race whereby # a child worker receives the signal before the parent # and is respawned unnecessarily as a result signal.signal(signal.SIGINT, signal.SIG_IGN) # The child has no need to stash the unwrapped # socket, and the reference prevents a clean # exit on sighup self._sock = None self.run_server() LOG.info(_LI('Child %d exiting normally'), os.getpid()) # self.pool.waitall() is now called in wsgi's server so # it's safe to exit here sys.exit(0) else: LOG.info(_LI('Started child %s'), pid) self.children.add(pid)
def _remove_children(self, pid): if pid in self.children: self.children.remove(pid) LOG.info(_LI('Removed dead child %s'), pid) elif pid in self.stale_children: self.stale_children.remove(pid) LOG.info(_LI('Removed stale child %s'), pid) else: LOG.warn(_LW('Unrecognised child %s') % pid)
def release(self, lock): if lock.lock_id is not None: self.lock_api.delete_lock(lock.context, lock.lock_id) LOG.info(_LI("Lock %(lock_id)s released for lock_key %(key)s"), { 'lock_id': lock.lock_id, 'key': lock.lock_key })
def activate(cls, context, af, values): """Activate Artifact and make it available for users :param context: User Context :param af: current Artifact definition in Glare :return: definition of activated Artifact """ # validate that came to artifact as updates if values != {'status': cls.STATUS.ACTIVE}: msg = _("Only {'status': %s} is allowed in a request " "for activation.") % cls.STATUS.ACTIVE raise exception.BadRequest(msg) for name, type_obj in six.iteritems(af.fields): if type_obj.required_on_activate and getattr(af, name) is None: msg = _("'%s' attribute must be set before activation") % name raise exception.BadRequest(msg) cls.validate_activate(context, af) if af.status != cls.STATUS.QUEUED: raise exception.InvalidStatusTransition( orig=af.status, new=cls.STATUS.ACTIVE ) LOG.info(_LI("Parameters validation for artifact %(artifact)s " "activate passed for request %(request)s."), {'artifact': af.id, 'request': context.request_id}) active_af = cls.db_api.update(context, af.id, values) return cls._init_artifact(context, active_af)
def publish(cls, context, af, values): """Make Artifact available for everyone :param context: user context :param af: definition of published Artifact :return: definition of active Artifact """ if values != {'visibility': 'public'}: msg = _("Only {'visibility': 'public'} is allowed in a request " "for artifact publish.") raise exception.BadRequest(msg) with cls.lock_engine.acquire(context, cls._get_versioning_scope( context, values, af)): if af.status != cls.STATUS.ACTIVE: msg = _("Cannot publish non-active artifact") raise exception.BadRequest(msg) cls._validate_versioning(context, af.name, af.version, is_public=True) cls.validate_publish(context, af) LOG.info(_LI("Parameters validation for artifact %(artifact)s " "publish passed for request %(request)s."), {'artifact': af.id, 'request': context.request_id}) af = cls.db_api.update(context, af.id, values) return cls._init_artifact(context, af)
def update(cls, context, af, values): """Update Artifact in Glare repo :param context: user Context :param af: current definition of Artifact in Glare :param values: list of changes for artifact :return: definition of updated Artifact """ # reset all changes of artifact to reuse them after update af.obj_reset_changes() scope = cls._get_versioning_scope(context, values, af) with cls.lock_engine.acquire(context, scope): # validate version if 'name' in values or 'version' in values: new_name = values.get('name') or af.name new_version = values.get('version') or af.version cls._validate_versioning(context, new_name, new_version) # validate other values cls._validate_update_allowed(context, af, list(values)) cls._validate_input_values(context, values) # apply values to the artifact. if all changes applied then update # values in db or raise an exception in other case. for key, value in six.iteritems(values): setattr(af, key, value) LOG.info(_LI("Parameters validation for artifact %(artifact)s " "update passed for request %(request)s."), {'artifact': af.id, 'request': context.request_id}) updated_af = cls.db_api.update( context, af.id, af.obj_changes_to_primitive()) return cls._init_artifact(context, updated_af)
def add_blob_dict_location(cls, context, af, field_name, blob_key, location): cls._validate_upload_allowed(context, af, field_name, blob_key) blob = {'url': None, 'size': None, 'checksum': None, 'status': BlobStatus.SAVING, 'external': True, 'content_type': ""} blob_dict_attr = getattr(af, field_name) blob_dict_attr[blob_key] = blob blob_dict_attr = cls.db_api.update( context, af.id, {field_name: blob_dict_attr})[field_name] try: # validate blob location and get size with checksum size, checksum, content_type = store_api.get_location_info( location, context, cls._get_max_blob_size(field_name)) blob = blob_dict_attr[blob_key] blob['size'] = size blob['status'] = BlobStatus.ACTIVE blob['checksum'] = checksum blob['content_type'] = content_type blob_dict_attr[blob_key] = blob updated_af = cls.db_api.update( context, af.id, {field_name: blob_dict_attr}) LOG.info( _LI("External location %(location)s validated successfully " "for artifact %(artifact)s blob dict %(blob)s with key " "%(key)s"), {'location': location, 'artifact': af.id, 'blob': field_name, 'key': blob_key}) return cls._init_artifact(context, updated_af) except Exception: with excutils.save_and_reraise_exception(logger=LOG): del blob_dict_attr[blob_key] cls.db_api.update(context, af.id, {field_name: blob_dict_attr})
def deactivate(cls, context, af, values): """Deny Artifact downloading due to security concerns If user uploaded suspicious Artifact then Cloud Admins(or other users - it depends on policy configurations) can deny Artifact download by users by making Artifact de-activated. After additional investigation Artifact can be re-activated or deleted from Glare. :param context: user context :param af: Artifact definition in Glare :return: definition of de-activated Artifact """ if values != {'status': cls.STATUS.DEACTIVATED}: msg = _("Only {'status': %s} is allowed in a request " "for deactivation.") % cls.STATUS.DEACTIVATED raise exception.BadRequest(msg) if af.status != cls.STATUS.ACTIVE: raise exception.InvalidStatusTransition( orig=af.status, new=cls.STATUS.ACTIVE ) LOG.info(_LI("Parameters validation for artifact %(artifact)s " "deactivate passed for request %(request)s."), {'artifact': af.id, 'request': context.request_id}) af = cls.db_api.update(context, af.id, values) return cls._init_artifact(context, af)
def create(cls, context, values): """Create new Artifact in Glare repo :param context: user context :param values: Dict with specified artifact properties :return: definition of create Artifact """ if context.tenant is None or context.read_only: msg = _("It's forbidden to anonymous users to create artifacts.") raise exception.Forbidden(msg) else: with cls.lock_engine.acquire( context, cls._get_versioning_scope(context, values)): ver = values.setdefault( 'version', cls.DEFAULT_ARTIFACT_VERSION) cls._validate_versioning(context, values.get('name'), ver) # validate other values cls._validate_input_values(context, values) # validate visibility if 'visibility' in values: msg = _("visibility is not allowed in a request " "for artifact create.") raise exception.BadRequest(msg) values['id'] = str(uuid.uuid4()) values['owner'] = context.tenant values['created_at'] = timeutils.utcnow() values['updated_at'] = values['created_at'] af = cls._init_artifact(context, values) LOG.info(_LI("Parameters validation for artifact creation " "passed for request %s."), context.request_id) af_vals = cls.db_api.create(context, af.obj_changes_to_primitive()) return cls._init_artifact(context, af_vals)
def _single_run(self, application, sock): """Start a WSGI server in a new green thread.""" LOG.info(_LI("Starting single process server")) eventlet.wsgi.server(sock, application, custom_pool=self.pool, log=self._logger, debug=False, keepalive=CONF.http_keepalive, socket_timeout=self.client_socket_timeout)
def delete(cls, context, af): """Delete Artifact and all blobs from Glare. :param context: user context :param af: definition of artifact targeted to delete """ if af.visibility == 'public' and not context.is_admin: msg = _("Only admins are allowed to delete public images") raise exception.Forbidden(msg) # marking all blobs as pending delete blobs = {} for name, field in six.iteritems(af.fields): if cls.is_blob(name): b = getattr(af, name) if b: if b['status'] == BlobStatus.PENDING_DELETE: msg = _('Blob %(name)s is already deleting ' 'for artifact %(id)s') % {'name': name, 'id': af.id} raise exception.Conflict(msg) else: b['status'] = BlobStatus.PENDING_DELETE blobs[name] = b elif cls.is_blob_dict(name): bd = getattr(af, name) if bd: for key, b in six.iteritems(bd): if b['status'] == BlobStatus.PENDING_DELETE: msg = _('Blob %(name)s is already deleting ' 'for artifact %(id)s') % {'name': name, 'id': af.id} raise exception.Conflict(msg) else: b['status'] = BlobStatus.PENDING_DELETE blobs[name] = bd if blobs: LOG.debug("Marked all blobs %(blobs) for artifact %(artifact)s " "as pending delete. Start blobs delete.", {'blobs': blobs, 'artifact': af.id}) cls.db_api.update(context, af.id, blobs) # delete blobs one by one if not CONF.delayed_blob_delete: for name, blob in six.iteritems(blobs): if cls.is_blob(name): store_api.delete_blob(blob['url'], context=context) cls.db_api.update(context, af.id, {name: None}) elif cls.is_blob_dict(name): upd_blob = deepcopy(blob) for key, val in six.iteritems(blob): store_api.delete_blob(val['url'], context=context) del upd_blob[key] cls.db_api.update(context, af.id, {name: upd_blob}) LOG.info(_LI("Blobs successfully deleted for artifact %s"), af.id) # delete artifact itself cls.db_api.delete(context, af.id)
def log_decorator(self, req, *args, **kwargs): LOG.debug("Request %(request_id)s for %(api_method)s successfully " "deserialized. Pass request parameters to Engine", {'request_id': req.context.request_id, 'api_method': f.__name__}) result = f(self, req, *args, **kwargs) LOG.info(_LI( "Request %(request_id)s for artifact %(api_method)s " "successfully executed."), {'request_id': req.context.request_id, 'api_method': f.__name__}) return result
def _verify_and_respawn_children(self, pid, status): if len(self.stale_children) == 0: LOG.debug('No stale children') if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0: LOG.error(_LE('Not respawning child %d, cannot ' 'recover from termination') % pid) if not self.children and not self.stale_children: LOG.info( _LI('All workers have terminated. Exiting')) self.running = False else: if len(self.children) < get_num_workers(): self.run_child()
def acquire(self, context, lock_key): """Acquire lock to update whole artifact Acquire lock to update artifact. If there is some other lock for the same artifact then raise Conflict Error. :param context: user context :param lock_key: lock key :return: lock definition """ if lock_key is not None and len(lock_key) < self.MAX_LOCK_LENGTH: lock_id = self.lock_api.create_lock(context, lock_key) LOG.info( _LI("Lock %(lock_id)s acquired for lock_key " "%(lock_key)s"), { 'lock_id': lock_id, 'lock_key': lock_key }) else: lock_id = None LOG.info(_LI("No lock for lock_key %s"), lock_key) return Lock(context, lock_id, lock_key, self.release)
def start_wsgi(self): workers = get_num_workers() if workers == 0: # Useful for profiling, test, debug etc. self.pool = self.create_pool() self.pool.spawn_n(self._single_run, self.application, self.sock) return else: LOG.info(_LI("Starting %d workers"), workers) signal.signal(signal.SIGTERM, self.kill_children) signal.signal(signal.SIGINT, self.kill_children) signal.signal(signal.SIGHUP, self.hup) while len(self.children) < workers: self.run_child()
def log_decorator(self, req, *args, **kwargs): LOG.debug( "Request %(request_id)s for %(api_method)s successfully " "deserialized. Pass request parameters to Engine", { 'request_id': req.context.request_id, 'api_method': f.__name__ }) result = f(self, req, *args, **kwargs) LOG.info( _LI("Request %(request_id)s for artifact %(api_method)s " "successfully executed."), { 'request_id': req.context.request_id, 'api_method': f.__name__ }) return result
def wait_on_children(self): while self.running: try: pid, status = os.wait() if os.WIFEXITED(status) or os.WIFSIGNALED(status): self._remove_children(pid) self._verify_and_respawn_children(pid, status) except OSError as err: if err.errno not in (errno.EINTR, errno.ECHILD): raise except KeyboardInterrupt: LOG.info(_LI('Caught keyboard interrupt. Exiting.')) break except glare_exc.SIGHUPInterrupt: self.reload() continue eventlet.greenio.shutdown_safe(self.sock) self.sock.close() LOG.debug('Exited')
def upload_blob_dict(cls, context, af, field_name, blob_key, fd, content_type): """Upload binary object as artifact property :param context: user context :param af: current Artifact definition :param blob_key: name of blob key in dict :param fd: file descriptor that Glare uses to upload the file :param field_name: name of blob dict field :param content_type: data content-type :return: updated Artifact definition in Glare """ fd = cls.validate_upload(context, af, field_name, fd) cls._validate_upload_allowed(context, af, field_name, blob_key) LOG.debug("Parameters validation for artifact %(artifact)s blob " "upload passed for blob dict %(blob)s with key %(key)s. " "Start blob uploading to backend.", {'artifact': af.id, 'blob': field_name, 'key': blob_key}) blob = {'url': None, 'size': None, 'checksum': None, 'status': BlobStatus.SAVING, 'external': False, 'content_type': content_type} blob_dict_attr = getattr(af, field_name) blob_dict_attr[blob_key] = blob cls.db_api.update( context, af.id, {field_name: blob_dict_attr}) blob_id = getattr(af, field_name)[blob_key]['id'] try: location_uri, size, checksum = store_api.save_blob_to_store( blob_id, fd, context, cls._get_max_blob_size(field_name)) blob.update({'url': location_uri, 'status': BlobStatus.ACTIVE, 'size': size, 'checksum': checksum}) af_values = cls.db_api.update( context, af.id, {field_name: blob_dict_attr}) LOG.info(_LI("Successfully finished blob upload for artifact " "%(artifact)s blob dict field %(blob)s with key."), {'artifact': af.id, 'blob': field_name, 'key': blob_key}) return cls._init_artifact(context, af_values) except Exception: with excutils.save_and_reraise_exception(logger=LOG): del blob_dict_attr[blob_key] cls.db_api.update(context, af.id, {field_name: blob_dict_attr})
def reactivate(cls, context, af, values): """Make Artifact active after de-activation :param context: user context :param af: definition of de-activated Artifact :return: definition of active Artifact """ # validate that came to artifact as updates if values != {'status': cls.STATUS.ACTIVE}: msg = _("Only {'status': %s} is allowed in a request " "for reactivation.") % cls.STATUS.ACTIVE raise exception.BadRequest(msg) if af.status != cls.STATUS.DEACTIVATED: raise exception.InvalidStatusTransition( orig=af.status, new=cls.STATUS.ACTIVE ) LOG.info(_LI("Parameters validation for artifact %(artifact)s " "reactivate passed for request %(request)s."), {'artifact': af.id, 'request': context.request_id}) af = cls.db_api.update(context, af.id, values) return cls._init_artifact(context, af)
def add_blob_location(cls, context, af, field_name, location): """Upload binary object as artifact property :param context: user context :param af: current Artifact definition :param field_name: name of blob field :param location: blob url :return: updated Artifact definition in Glare """ cls._validate_upload_allowed(context, af, field_name) LOG.debug("Parameters validation for artifact %(artifact)s location " "passed for blob %(blob)s. Start location check for artifact" ".", {'artifact': af.id, 'blob': field_name}) blob = {'url': None, 'size': None, 'checksum': None, 'status': BlobStatus.SAVING, 'external': True, 'content_type': ""} setattr(af, field_name, blob) blob = cls.db_api.update( context, af.id, {field_name: getattr(af, field_name)})[field_name] try: # validate blob location and get size with checksum size, checksum, content_type = store_api.get_location_info( location, context, cls._get_max_blob_size(field_name)) blob['size'] = size blob['status'] = BlobStatus.ACTIVE blob['checksum'] = checksum blob['content_type'] = content_type setattr(af, field_name, blob) updated_af = cls.db_api.update( context, af.id, {field_name: getattr(af, field_name)}) LOG.info( _LI("External location %(location)s validated successfully " "for artifact %(artifact)s blob %(blob)s"), {'location': location, 'artifact': af.id, 'blob': field_name}) return cls._init_artifact(context, updated_af) except Exception: with excutils.save_and_reraise_exception(logger=LOG): cls.db_api.update(context, af.id, {field_name: None})