def put(self, env): """Update an environment. :param env: Required. Environment structure to update """ acl.enforce('environments:update', context.ctx()) if not env.name: raise exceptions.InputException( 'Name of the environment is not provided.' ) LOG.debug("Update environment [name=%s, env=%s]", env.name, cut(env)) definition = json.loads(wsme_pecan.pecan.request.body.decode()) definition.pop('name') self._validate_environment( definition, ['description', 'variables', 'scope'] ) db_model = rest_utils.rest_retry_on_db_error( db_api.update_environment )(env.name, env.to_dict()) return resources.Environment.from_db_model(db_model)
def put(self, member_id, member_info): """Sets the status for a resource member.""" acl.enforce('members:update', context.ctx()) LOG.debug( "Update resource member status. [resource_id=%s, " "member_id=%s, member_info=%s].", self.resource_id, member_id, member_info ) if not member_info.status: msg = "Status must be provided." raise exc.WorkflowException(msg) db_member = rest_utils.rest_retry_on_db_error( db_api.update_resource_member )( self.resource_id, self.type, member_id, {'status': member_info.status} ) return resources.Member.from_db_model(db_member)
def delete(self, name): """Delete cron trigger.""" acl.enforce('cron_triggers:delete', context.ctx()) LOG.info("Delete cron trigger [name=%s]" % name) db_api.delete_cron_trigger(name)
def delete(self, id): """Delete the specified Execution.""" acl.enforce('executions:delete', context.ctx()) LOG.info("Delete execution [id=%s]", id) return db_api.delete_workflow_execution(id)
def put(self, identifier=None, namespace=''): """Update one or more workflows. :param identifier: Optional. If provided, it's UUID of a workflow. Only one workflow can be updated with identifier param. :param namespace: Optional. If provided, it's the namespace of the workflow/workflows. Currently, namespace cannot be changed. The text is allowed to have definitions of multiple workflows. In such case, they all will be updated. """ acl.enforce('workflows:update', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') resources.Workflow.validate_scope(scope) if scope == 'public': acl.enforce('workflows:publicize', context.ctx()) LOG.debug("Update workflow(s) [definition=%s]", definition) db_wfs = rest_utils.rest_retry_on_db_error(workflows.update_workflows)( definition, scope=scope, identifier=identifier, namespace=namespace) workflow_list = [ resources.Workflow.from_db_model(db_wf) for db_wf in db_wfs ] return (workflow_list[0].to_json() if identifier else resources.Workflows(workflows=workflow_list).to_json())
def post(self, member_info): """Shares the resource to a new member.""" acl.enforce('members:create', context.ctx()) LOG.info( "Share resource to a member. [resource_id=%s, " "resource_type=%s, member_info=%s].", self.resource_id, self.type, member_info ) if not member_info.member_id: msg = "Member id must be provided." raise exc.WorkflowException(msg) wf_db = db_api.get_workflow_definition(self.resource_id) if wf_db.scope != 'private': msg = "Only private resource could be shared." raise exc.WorkflowException(msg) resource_member = { 'resource_id': self.resource_id, 'resource_type': self.type, 'member_id': member_info.member_id, 'status': 'pending' } db_member = db_api.create_resource_member(resource_member) return resources.Member.from_dict(db_member.to_dict())
def post(self, namespace=''): """Create a new workbook. :param namespace: Optional. The namespace to create the workbook in. Workbooks with the same name can be added to a given project if they are in two different namespaces. """ acl.enforce('workbooks:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') # If "skip_validation" is present in the query string parameters # then workflow language validation will be disabled. skip_validation = 'skip_validation' in pecan.request.GET resources.Workbook.validate_scope(scope) LOG.debug("Create workbook [definition=%s]", definition) wb_db = rest_utils.rest_retry_on_db_error( workbooks.create_workbook_v2)( definition, namespace=namespace, scope=scope, validate=not skip_validation ) pecan.response.status = 201 return resources.Workbook.from_db_model(wb_db).to_json()
def post(self): """Create a new action. NOTE: This text is allowed to have definitions of multiple actions. In this case they all will be created. """ acl.enforce('actions:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 if scope not in resources.SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (resources.SCOPE_TYPES.values, scope)) LOG.info("Create action(s) [definition=%s]", definition) with db_api.transaction(): db_acts = actions.create_actions(definition, scope=scope) models_dicts = [db_act.to_dict() for db_act in db_acts] action_list = [resources.Action.from_dict(act) for act in models_dicts] return resources.Actions(actions=action_list).to_json()
def delete(self, id): """Delete the specified action_execution. :param id: UUID of action execution to delete """ acl.enforce('action_executions:delete', context.ctx()) LOG.debug("Delete action_execution [id=%s]", id) if not cfg.CONF.api.allow_action_execution_deletion: raise exc.NotAllowedException("Action execution deletion is not " "allowed.") with db_api.transaction(): action_ex = db_api.get_action_execution(id) if action_ex.task_execution_id: raise exc.NotAllowedException( "Only ad-hoc action execution can be deleted.") if not states.is_completed(action_ex.state): raise exc.NotAllowedException( "Only completed action execution can be deleted.") return db_api.delete_action_execution(id)
def post(self, namespace=''): """Create a new workflow. :param namespace: Optional. The namespace to create the workflow in. Workflows with the same name can be added to a given project if they are in two different namespaces. The text is allowed to have definitions of multiple workflows. In such case, they all will be created. """ acl.enforce('workflows:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 resources.Workflow.validate_scope(scope) if scope == 'public': acl.enforce('workflows:publicize', context.ctx()) LOG.debug("Create workflow(s) [definition=%s]", definition) db_wfs = rest_utils.rest_retry_on_db_error(workflows.create_workflows)( definition, scope=scope, namespace=namespace ) workflow_list = [ resources.Workflow.from_db_model(db_wf) for db_wf in db_wfs ] return resources.Workflows(workflows=workflow_list).to_json()
def post(self): """Create a new action. NOTE: This text is allowed to have definitions of multiple actions. In this case they all will be created. """ acl.enforce('actions:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 resources.Action.validate_scope(scope) if scope == 'public': acl.enforce('actions:publicize', context.ctx()) LOG.debug("Create action(s) [definition=%s]", definition) @rest_utils.rest_retry_on_db_error def _create_action_definitions(): with db_api.transaction(): return actions.create_actions(definition, scope=scope) db_acts = _create_action_definitions() action_list = [ resources.Action.from_db_model(db_act) for db_act in db_acts ] return resources.Actions(actions=action_list).to_json()
def post(self, member_info): """Shares the resource to a new member.""" acl.enforce('members:create', context.ctx()) LOG.info( "Share resource to a member. [resource_id=%s, " "resource_type=%s, member_info=%s].", self.resource_id, self.type, member_info) if not member_info.member_id: raise exc.WorkflowException("Member id must be provided.") with db_api.transaction(): wf_db = db_api.get_workflow_definition(self.resource_id) if wf_db.scope != 'private': raise exc.WorkflowException( "Only private resource could be shared.") resource_member = { 'resource_id': self.resource_id, 'resource_type': self.type, 'member_id': member_info.member_id, 'status': 'pending' } db_member = db_api.create_resource_member(resource_member) return resources.Member.from_db_model(db_member)
def put(self, identifier=None): """Update one or more workflows. :param identifier: Optional. If provided, it's UUID of a workflow. Only one workflow can be updated with identifier param. The text is allowed to have definitions of multiple workflows. In this case they all will be updated. """ acl.enforce('workflows:update', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') if scope not in SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (SCOPE_TYPES.values, scope) ) LOG.info("Update workflow(s) [definition=%s]" % definition) db_wfs = workflows.update_workflows( definition, scope=scope, identifier=identifier ) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [Workflow.from_dict(wf) for wf in models_dicts] return (workflow_list[0].to_json() if identifier else Workflows(workflows=workflow_list).to_json())
def post(self): """Create a new workflow. NOTE: The text is allowed to have definitions of multiple workflows. In this case they all will be created. """ acl.enforce('workflows:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 if scope not in resources.SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (resources.SCOPE_TYPES.values, scope) ) LOG.info("Create workflow(s) [definition=%s]" % definition) db_wfs = workflows.create_workflows(definition, scope=scope) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [ resources.Workflow.from_dict(wf) for wf in models_dicts ] return resources.Workflows(workflows=workflow_list).to_json()
def delete(self, name): """Delete the named environment.""" acl.enforce('environments:delete', context.ctx()) LOG.info("Delete environment [name=%s]" % name) db_api.delete_environment(name)
def post(self, name, scope='private', namespace=''): """Creates new code sources. :param name: Code source name (i.e. the name of the module). :param scope: Optional. Scope (private or public). :param namespace: Optional. The namespace to create the code sources in. """ acl.enforce('code_sources:create', context.ctx()) # Extract content directly from the request. content = pecan.request.text LOG.debug('Creating code source [names=%s, scope=%s, namespace=%s]', name, scope, namespace) db_model = rest_utils.rest_retry_on_db_error( db_api.create_code_source)({ 'name': name, 'content': content, 'namespace': namespace, 'scope': scope, 'version': 1, }) pecan.response.status = 201 return resources.CodeSource.from_db_model(db_model).to_json()
def get(self, id): """Return the specified Execution.""" acl.enforce("executions:get", context.ctx()) LOG.info("Fetch execution [id=%s]" % id) return resources.Execution.from_dict( db_api.get_workflow_execution(id).to_dict())
def get_all(self, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', name=None, description=None, variables=None, scope=None, created_at=None, updated_at=None): """Return all environments. Where project_id is the same as the requester or project_id is different but the scope is public. :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at, which is backward compatible. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: desc. The length of sort_dirs can be equal or less than that of sort_keys. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param name: Optional. Keep only resources with a specific name. :param description: Optional. Keep only resources with a specific description. :param variables: Optional. Keep only resources with specific variables. :param scope: Optional. Keep only resources with a specific scope. :param created_at: Optional. Keep only resources created at a specific time and date. :param updated_at: Optional. Keep only resources with specific latest update time and date. """ acl.enforce('environments:list', context.ctx()) filters = rest_utils.filters_to_dict( created_at=created_at, name=name, updated_at=updated_at, description=description, variables=variables, scope=scope ) LOG.info("Fetch environments. marker=%s, limit=%s, sort_keys=%s, " "sort_dirs=%s, filters=%s", marker, limit, sort_keys, sort_dirs, filters) return rest_utils.get_all( resources.Environments, resources.Environment, db_api.get_environments, db_api.get_environment, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, **filters )
def put(self, identifier=None): """Update one or more actions. NOTE: This text is allowed to have definitions of multiple actions. In this case they all will be updated. """ acl.enforce('actions:update', context.ctx()) definition = pecan.request.text LOG.info("Update action(s) [definition=%s]", definition) scope = pecan.request.GET.get('scope', 'private') if scope not in resources.SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (resources.SCOPE_TYPES.values, scope) ) with db_api.transaction(): db_acts = actions.update_actions( definition, scope=scope, identifier=identifier ) models_dicts = [db_act.to_dict() for db_act in db_acts] action_list = [resources.Action.from_dict(act) for act in models_dicts] return resources.Actions(actions=action_list).to_json()
def post(self, cron_trigger): """Creates a new cron trigger. :param cron_trigger: Required. Cron trigger structure. """ acl.enforce('cron_triggers:create', context.ctx()) LOG.debug('Create cron trigger: %s', cron_trigger) values = cron_trigger.to_dict() db_model = rest_utils.rest_retry_on_db_error( triggers.create_cron_trigger )( name=values['name'], workflow_name=values.get('workflow_name'), workflow_input=values.get('workflow_input'), workflow_params=values.get('workflow_params'), pattern=values.get('pattern'), first_time=values.get('first_execution_time'), count=values.get('remaining_executions'), workflow_id=values.get('workflow_id') ) return resources.CronTrigger.from_db_model(db_model)
def put(self, member_id, member_info): """Sets the status for a resource member.""" acl.enforce('members:update', context.ctx()) LOG.debug( "Update resource member status. [resource_id=%s, " "member_id=%s, member_info=%s].", self.resource_id, member_id, member_info ) if not member_info.status: msg = "Status must be provided." raise exc.WorkflowException(msg) db_member = rest_utils.rest_retry_on_db_error( db_api.update_resource_member )( self.resource_id, self.type, member_id, {'status': member_info.status} ) return resources.Member.from_db_model(db_member)
def post(self, action_ex): """Create new action_execution. :param action_ex: Action to execute """ acl.enforce('action_executions:create', context.ctx()) LOG.debug( "Create action_execution [action_execution=%s]", action_ex ) name = action_ex.name description = action_ex.description or None action_input = action_ex.input or {} params = action_ex.params or {} namespace = action_ex.workflow_namespace or '' if not name: raise exc.InputException( "Please provide at least action name to run action." ) values = rpc.get_engine_client().start_action( name, action_input, description=description, namespace=namespace, **params ) return resources.ActionExecution.from_dict(values)
def delete(self, identifier): """Delete a workflow.""" acl.enforce('workflows:delete', context.ctx()) LOG.info("Delete workflow [identifier=%s]" % identifier) with db_api.transaction(): db_api.delete_workflow_definition(identifier)
def post(self, action_ex): """Create new action_execution. :param action_ex: Action to execute """ acl.enforce('action_executions:create', context.ctx()) LOG.debug( "Create action_execution [action_execution=%s]", action_ex ) name = action_ex.name description = action_ex.description or None action_input = action_ex.input or {} params = action_ex.params or {} if not name: raise exc.InputException( "Please provide at least action name to run action." ) values = rpc.get_engine_client().start_action( name, action_input, description=description, **params ) return resources.ActionExecution.from_dict(values)
def put(self, id, event_trigger): """Updates an existing event trigger. The exchange, topic and event can not be updated. The right way to change them is to delete the event trigger first, then create a new event trigger with new params. """ acl.enforce('event_triggers:update', auth_ctx.ctx()) values = event_trigger.to_dict() for field in UPDATE_NOT_ALLOWED: if values.get(field): raise exc.EventTriggerException( "Can not update fields %s of event trigger." % UPDATE_NOT_ALLOWED) LOG.debug('Update event trigger: [id=%s, values=%s]', id, values) @rest_utils.rest_retry_on_db_error def _update_event_trigger(): with db_api.transaction(): # ensure that event trigger exists db_api.get_event_trigger(id) return triggers.update_event_trigger(id, values) db_model = _update_event_trigger() return resources.EventTrigger.from_db_model(db_model)
def delete(self, id): """Delete the specified action_execution. :param id: UUID of action execution to delete """ acl.enforce('action_executions:delete', context.ctx()) LOG.debug("Delete action_execution [id=%s]", id) if not cfg.CONF.api.allow_action_execution_deletion: raise exc.NotAllowedException("Action execution deletion is not " "allowed.") with db_api.transaction(): action_ex = db_api.get_action_execution(id) if action_ex.task_execution_id: raise exc.NotAllowedException( "Only ad-hoc action execution can be deleted." ) if not states.is_completed(action_ex.state): raise exc.NotAllowedException( "Only completed action execution can be deleted." ) return db_api.delete_action_execution(id)
def delete(self, identifier): """Delete a workflow.""" acl.enforce('workflows:delete', context.ctx()) LOG.info("Delete workflow [identifier=%s]" % identifier) with db_api.transaction(): db_api.delete_workflow_definition(identifier)
def get_all(self, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', all_projects=False, **filters): """Return all event triggers.""" acl.enforce('event_triggers:list', auth_ctx.ctx()) if all_projects: acl.enforce('event_triggers:list:all_projects', auth_ctx.ctx()) LOG.debug( "Fetch event triggers. marker=%s, limit=%s, sort_keys=%s, " "sort_dirs=%s, fields=%s, all_projects=%s, filters=%s", marker, limit, sort_keys, sort_dirs, fields, all_projects, filters ) return rest_utils.get_all( resources.EventTriggers, resources.EventTrigger, db_api.get_event_triggers, db_api.get_event_trigger, resource_function=None, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, all_projects=all_projects, **filters )
def post(self, namespace=''): """Create a new workflow. :param namespace: Optional. The namespace to create the workflow in. Workflows with the same name can be added to a given project if they are in two different namespaces. The text is allowed to have definitions of multiple workflows. In such case, they all will be created. """ acl.enforce('workflows:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 resources.Workflow.validate_scope(scope) if scope == 'public': acl.enforce('workflows:publicize', context.ctx()) LOG.debug("Create workflow(s) [definition=%s]", definition) db_wfs = rest_utils.rest_retry_on_db_error(workflows.create_workflows)( definition, scope=scope, namespace=namespace) workflow_list = [ resources.Workflow.from_db_model(db_wf) for db_wf in db_wfs ] return resources.Workflows(workflows=workflow_list).to_json()
def post(self, event_trigger): """Creates a new event trigger.""" acl.enforce('event_triggers:create', auth_ctx.ctx()) values = event_trigger.to_dict() input_keys = [k for k in values if values[k]] if CREATE_MANDATORY - set(input_keys): raise exc.EventTriggerException( "Params %s must be provided for creating event trigger." % CREATE_MANDATORY ) if values.get('scope') == 'public': acl.enforce('event_triggers:create:public', auth_ctx.ctx()) LOG.debug('Create event trigger: %s', values) db_model = rest_utils.rest_retry_on_db_error( triggers.create_event_trigger )( name=values.get('name', ''), exchange=values.get('exchange'), topic=values.get('topic'), event=values.get('event'), workflow_id=values.get('workflow_id'), scope=values.get('scope'), workflow_input=values.get('workflow_input'), workflow_params=values.get('workflow_params'), ) return resources.EventTrigger.from_db_model(db_model)
def put(self, identifier=None): """Update one or more actions. NOTE: This text is allowed to have definitions of multiple actions. In this case they all will be updated. """ acl.enforce('actions:update', context.ctx()) definition = pecan.request.text LOG.info("Update action(s) [definition=%s]", definition) scope = pecan.request.GET.get('scope', 'private') if scope not in resources.SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (resources.SCOPE_TYPES.values, scope)) with db_api.transaction(): db_acts = actions.update_actions(definition, scope=scope, identifier=identifier) models_dicts = [db_act.to_dict() for db_act in db_acts] action_list = [resources.Action.from_dict(act) for act in models_dicts] return resources.Actions(actions=action_list).to_json()
def put(self, id, event_trigger): """Updates an existing event trigger. The exchange, topic and event can not be updated. The right way to change them is to delete the event trigger first, then create a new event trigger with new params. """ acl.enforce('event_triggers:update', auth_ctx.ctx()) values = event_trigger.to_dict() for field in UPDATE_NOT_ALLOWED: if values.get(field): raise exc.EventTriggerException( "Can not update fields %s of event trigger." % UPDATE_NOT_ALLOWED ) LOG.debug('Update event trigger: [id=%s, values=%s]', id, values) @rest_utils.rest_retry_on_db_error def _update_event_trigger(): with db_api.transaction(): # ensure that event trigger exists db_api.get_event_trigger(id) return triggers.update_event_trigger(id, values) db_model = _update_event_trigger() return resources.EventTrigger.from_db_model(db_model)
def post(self, namespace=''): """Create a new workbook. :param namespace: Optional. The namespace to create the workbook in. Workbooks with the same name can be added to a given project if they are in two different namespaces. """ acl.enforce('workbooks:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') resources.Workbook.validate_scope(scope) LOG.debug("Create workbook [definition=%s]", definition) wb_db = rest_utils.rest_retry_on_db_error( workbooks.create_workbook_v2)( definition, namespace=namespace, scope=scope ) pecan.response.status = 201 return resources.Workbook.from_db_model(wb_db).to_json()
def get_all(self): """Return all services.""" acl.enforce('services:list', context.ctx()) LOG.info("Fetch services.") if not cfg.CONF.coordination.backend_url: raise exc.CoordinationException("Service API is not supported.") service_coordinator = coordination.get_service_coordinator() if not service_coordinator.is_active(): raise exc.CoordinationException( "Failed to connect to coordination backend." ) services_list = [] service_group = ['%s_group' % i for i in launch.LAUNCH_OPTIONS] try: for group in service_group: members = service_coordinator.get_members(group) services_list.extend( [Service.from_dict({'type': group, 'name': member}) for member in members] ) except tooz.coordination.ToozError as e: # In the scenario of network interruption or manually shutdown # connection shutdown, ToozError will be raised. raise exc.CoordinationException( "Failed to get service members from coordination backend. %s" % six.text_type(e) ) return Services(services=services_list)
def delete(self, id, force=False): """Delete the specified Execution. :param id: UUID of execution to delete. :param force: Optional. Force the deletion of unfinished executions. Default: false. While the api is backward compatible the behaviour is not the same. The new default is the safer option """ acl.enforce('executions:delete', context.ctx()) LOG.debug("Delete execution [id=%s]", id) if not force: state = db_api.get_workflow_execution( id, fields=(db_models.WorkflowExecution.state, ))[0] if not states.is_completed(state): raise exc.NotAllowedException( "Only completed executions can be deleted. " "Use --force to override this. " "Execution {} is in {} state".format(id, state)) return rest_utils.rest_retry_on_db_error( db_api.delete_workflow_execution)(id)
def post(self, wf_ex): """Create a new Execution. :param wf_ex: Execution object with input content. """ acl.enforce('executions:create', context.ctx()) LOG.info('Create execution [execution=%s]' % wf_ex) engine = rpc.get_engine_client() exec_dict = wf_ex.to_dict() if not (exec_dict.get('workflow_id') or exec_dict.get('workflow_name')): raise exc.WorkflowException( "Workflow ID or workflow name must be provided. Workflow ID is" " recommended." ) result = engine.start_workflow( exec_dict.get('workflow_id', exec_dict.get('workflow_name')), exec_dict.get('input'), exec_dict.get('description', ''), **exec_dict.get('params') or {} ) return Execution.from_dict(result)
def put(self, namespace=''): """Update a workbook. :param namespace: Optional. Namespace of workbook to update. """ acl.enforce('workbooks:update', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') # If "skip_validation" is present in the query string parameters # then workflow language validation will be disabled. skip_validation = 'skip_validation' in pecan.request.GET resources.Workbook.validate_scope(scope) LOG.debug("Update workbook [definition=%s]", definition) wb_db = rest_utils.rest_retry_on_db_error( workbooks.update_workbook_v2)( definition, namespace=namespace, scope=scope, validate=not skip_validation ) return resources.Workbook.from_db_model(wb_db).to_json()
def put(self, id, action_ex): """Update the specified action_execution.""" acl.enforce('action_executions:update', context.ctx()) LOG.info( "Update action_execution [id=%s, action_execution=%s]" % (id, action_ex) ) output = action_ex.output if action_ex.state == states.SUCCESS: result = wf_utils.Result(data=output) elif action_ex.state == states.ERROR: if not output: output = 'Unknown error' result = wf_utils.Result(error=output) else: raise exc.InvalidResultException( "Error. Expected on of %s, actual: %s" % ([states.SUCCESS, states.ERROR], action_ex.state) ) values = rpc.get_engine_client().on_action_complete(id, result) return resources.ActionExecution.from_dict(values)
def post(self): """Create a new workflow. NOTE: The text is allowed to have definitions of multiple workflows. In this case they all will be created. """ acl.enforce('workflows:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 if scope not in resources.SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (resources.SCOPE_TYPES.values, scope)) LOG.info("Create workflow(s) [definition=%s]" % definition) db_wfs = workflows.create_workflows(definition, scope=scope) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [ resources.Workflow.from_dict(wf) for wf in models_dicts ] return resources.Workflows(workflows=workflow_list).to_json()
def get(self, task_execution_id, action_ex_id): """Return the specified action_execution.""" acl.enforce('action_executions:get', context.ctx()) LOG.info("Fetch action_execution [id=%s]" % action_ex_id) return _get_action_execution(action_ex_id)
def put(self, identifier=None): """Update one or more workflows. :param identifier: Optional. If provided, it's UUID of a workflow. Only one workflow can be updated with identifier param. The text is allowed to have definitions of multiple workflows. In this case they all will be updated. """ acl.enforce('workflows:update', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') if scope not in resources.SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (resources.SCOPE_TYPES.values, scope)) LOG.info("Update workflow(s) [definition=%s]" % definition) db_wfs = workflows.update_workflows(definition, scope=scope, identifier=identifier) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [ resources.Workflow.from_dict(wf) for wf in models_dicts ] return (workflow_list[0].to_json() if identifier else resources.Workflows(workflows=workflow_list).to_json())
def delete(self, name): """Delete the named workbook.""" acl.enforce('workbooks:delete', context.ctx()) LOG.info("Delete workbook [name=%s]" % name) db_api.delete_workbook(name)
def post(self, event_trigger): """Creates a new event trigger.""" acl.enforce('event_triggers:create', auth_ctx.ctx()) values = event_trigger.to_dict() input_keys = [k for k in values if values[k]] if CREATE_MANDATORY - set(input_keys): raise exc.EventTriggerException( "Params %s must be provided for creating event trigger." % CREATE_MANDATORY) if values.get('scope') == 'public': acl.enforce('event_triggers:create:public', auth_ctx.ctx()) LOG.debug('Create event trigger: %s', values) db_model = rest_utils.rest_retry_on_db_error( triggers.create_event_trigger)( name=values.get('name', ''), exchange=values.get('exchange'), topic=values.get('topic'), event=values.get('event'), workflow_id=values.get('workflow_id'), scope=values.get('scope'), workflow_input=values.get('workflow_input'), workflow_params=values.get('workflow_params'), ) return resources.EventTrigger.from_db_model(db_model)
def put(self, id, action_ex): """Update the specified action_execution. :param id: UUID of action execution to update :param action_ex: Action execution for update """ acl.enforce('action_executions:update', context.ctx()) LOG.debug("Update action_execution [id=%s, action_execution=%s]", id, action_ex) if action_ex.state not in SUPPORTED_TRANSITION_STATES: raise exc.InvalidResultException( "Error. Expected one of %s, actual: %s" % (SUPPORTED_TRANSITION_STATES, action_ex.state)) if states.is_completed(action_ex.state): output = action_ex.output if action_ex.state == states.SUCCESS: result = ml_actions.Result(data=output) elif action_ex.state == states.ERROR: if not output: output = 'Unknown error' result = ml_actions.Result(error=output) elif action_ex.state == states.CANCELLED: result = ml_actions.Result(cancel=True) values = rpc.get_engine_client().on_action_complete(id, result) if action_ex.state in [states.PAUSED, states.RUNNING]: state = action_ex.state values = rpc.get_engine_client().on_action_update(id, state) return resources.ActionExecution.from_dict(values)
def delete(self, id, force=False): """Delete the specified Execution. :param id: UUID of execution to delete. :param force: Optional. Force the deletion of unfinished executions. Default: false. While the api is backward compatible the behaviour is not the same. The new default is the safer option """ acl.enforce('executions:delete', context.ctx()) LOG.debug("Delete execution [id=%s]", id) if not force: state = db_api.get_workflow_execution( id, fields=(db_models.WorkflowExecution.state,) )[0] if not states.is_completed(state): raise exc.NotAllowedException( "Only completed executions can be deleted. " "Use --force to override this. " "Execution {} is in {} state".format(id, state) ) return rest_utils.rest_retry_on_db_error( db_api.delete_workflow_execution )(id)
def put(self, id, event_trigger): """Updates an existing event trigger. The exchange, topic and event can not be updated. The right way to change them is to delete the event trigger first, then create a new event trigger with new params. """ acl.enforce('event_trigger:update', auth_ctx.ctx()) values = event_trigger.to_dict() for field in UPDATE_NOT_ALLOWED: if values.get(field, None): raise exc.EventTriggerException( "Can not update fields %s of event trigger." % UPDATE_NOT_ALLOWED ) LOG.info('Update event trigger: [id=%s, values=%s]', id, values) with db_api.transaction(): db_api.ensure_event_trigger_exists(id) db_model = triggers.update_event_trigger(id, values) return resources.EventTrigger.from_dict(db_model.to_dict())
def put(self, id, action_ex): """Update the specified action_execution.""" acl.enforce('action_executions:update', context.ctx()) LOG.info( "Update action_execution [id=%s, action_execution=%s]" % (id, action_ex) ) output = action_ex.output if action_ex.state == states.SUCCESS: result = wf_utils.Result(data=output) elif action_ex.state == states.ERROR: if not output: output = 'Unknown error' result = wf_utils.Result(error=output) else: raise exc.InvalidResultException( "Error. Expected on of %s, actual: %s" % ([states.SUCCESS, states.ERROR], action_ex.state) ) values = rpc.get_engine_client().on_action_complete(id, result) return resources.ActionExecution.from_dict(values)
def post(self, event_trigger): """Creates a new event trigger.""" acl.enforce('event_trigger:create', auth_ctx.ctx()) values = event_trigger.to_dict() input_keys = [k for k in values if values[k]] if CREATE_MANDATORY - set(input_keys): raise exc.EventTriggerException( "Params %s must be provided for creating event trigger." % CREATE_MANDATORY ) LOG.info('Create event trigger: %s', values) db_model = triggers.create_event_trigger( values.get('name', ''), values.get('exchange'), values.get('topic'), values.get('event'), values.get('workflow_id'), workflow_input=values.get('workflow_input'), workflow_params=values.get('workflow_params'), ) return resources.EventTrigger.from_dict(db_model.to_dict())
def post(self, event_trigger): """Creates a new event trigger.""" acl.enforce('event_trigger:create', auth_ctx.ctx()) values = event_trigger.to_dict() input_keys = [k for k in values if values[k]] if CREATE_MANDATORY - set(input_keys): raise exc.EventTriggerException( "Params %s must be provided for creating event trigger." % CREATE_MANDATORY) LOG.info('Create event trigger: %s', values) db_model = triggers.create_event_trigger( values.get('name', ''), values.get('exchange'), values.get('topic'), values.get('event'), values.get('workflow_id'), workflow_input=values.get('workflow_input'), workflow_params=values.get('workflow_params'), ) return resources.EventTrigger.from_db_model(db_model)
def put(self, id, event_trigger): """Updates an existing event trigger. The exchange, topic and event can not be updated. The right way to change them is to delete the event trigger first, then create a new event trigger with new params. """ acl.enforce('event_trigger:update', auth_ctx.ctx()) values = event_trigger.to_dict() for field in UPDATE_NOT_ALLOWED: if values.get(field, None): raise exc.EventTriggerException( "Can not update fields %s of event trigger." % UPDATE_NOT_ALLOWED) LOG.info('Update event trigger: [id=%s, values=%s]', id, values) with db_api.transaction(): db_api.ensure_event_trigger_exists(id) db_model = triggers.update_event_trigger(id, values) return resources.EventTrigger.from_db_model(db_model)
def delete(self, id): """Delete the specified Execution.""" acl.enforce('executions:delete', context.ctx()) LOG.info("Delete execution [id=%s]", id) return db_api.delete_workflow_execution(id)
def get_all(self, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', **filters): """Return all event triggers.""" acl.enforce('event_trigger:list', auth_ctx.ctx()) LOG.info( "Fetch event triggers. marker=%s, limit=%s, sort_keys=%s, " "sort_dirs=%s, fields=%s, filters=%s", marker, limit, sort_keys, sort_dirs, fields, filters) return rest_utils.get_all(resources.EventTriggers, resources.EventTrigger, db_api.get_event_triggers, db_api.get_event_trigger, resource_function=None, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, **filters)
def put(self, identifier=None): """Update one or more actions. :param identifier: Optional. If provided, it's UUID or name of an action. Only one action can be updated with identifier param. NOTE: This text is allowed to have definitions of multiple actions. In this case they all will be updated. """ acl.enforce('actions:update', context.ctx()) definition = pecan.request.text LOG.debug("Update action(s) [definition=%s]", definition) scope = pecan.request.GET.get('scope', 'private') resources.Action.validate_scope(scope) @rest_utils.rest_retry_on_db_error def _update_actions(): with db_api.transaction(): return actions.update_actions( definition, scope=scope, identifier=identifier ) db_acts = _update_actions() action_list = [ resources.Action.from_db_model(db_act) for db_act in db_acts ] return resources.Actions(actions=action_list).to_json()
def delete(self, name): """Delete the named workbook.""" acl.enforce('workbooks:delete', context.ctx()) LOG.info("Delete workbook [name=%s]" % name) db_api.delete_workbook(name)
def post(self): """Create a new action. NOTE: This text is allowed to have definitions of multiple actions. In this case they all will be created. """ acl.enforce('actions:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 resources.Action.validate_scope(scope) LOG.debug("Create action(s) [definition=%s]", definition) @rest_utils.rest_retry_on_db_error def _create_action_definitions(): with db_api.transaction(): return actions.create_actions(definition, scope=scope) db_acts = _create_action_definitions() action_list = [ resources.Action.from_db_model(db_act) for db_act in db_acts ] return resources.Actions(actions=action_list).to_json()
def put(self, id, task): """Update the specified task execution. :param id: Task execution ID. :param task: Task execution object. """ acl.enforce('tasks:update', context.ctx()) LOG.debug("Update task execution [id=%s, task=%s]", id, task) @rest_utils.rest_retry_on_db_error def _read_task_params(id, task): with db_api.transaction(): task_ex = db_api.get_task_execution(id) task_spec = spec_parser.get_task_spec(task_ex.spec) task_name = task.name or None reset = task.reset env = task.env or None if task_name and task_name != task_ex.name: raise exc.WorkflowException('Task name does not match.') wf_ex = db_api.get_workflow_execution( task_ex.workflow_execution_id) return env, reset, task_ex, task_spec, wf_ex env, reset, task_ex, task_spec, wf_ex = _read_task_params(id, task) wf_name = task.workflow_name or None if wf_name and wf_name != wf_ex.name: raise exc.WorkflowException('Workflow name does not match.') if task.state != states.RUNNING: raise exc.WorkflowException( 'Invalid task state. ' 'Only updating task to rerun is supported.') if task_ex.state != states.ERROR: raise exc.WorkflowException( 'The current task execution must be in ERROR for rerun.' ' Only updating task to rerun is supported.') if not task_spec.get_with_items() and not reset: raise exc.WorkflowException( 'Only with-items task has the option to not reset.') rpc.get_engine_client().rerun_workflow(task_ex.id, reset=reset, env=env) @rest_utils.rest_retry_on_db_error def _retrieve_task(): with db_api.transaction(): task_ex = db_api.get_task_execution(id) return _get_task_resource_with_result(task_ex) return _retrieve_task()
def get_all(self): """Return all services.""" acl.enforce('services:list', context.ctx()) LOG.info("Fetch services.") if not cfg.CONF.coordination.backend_url: raise exc.CoordinationException("Service API is not supported.") service_coordinator = coordination.get_service_coordinator() if not service_coordinator.is_active(): raise exc.CoordinationException( "Failed to connect to coordination backend.") services_list = [] service_group = ['%s_group' % i for i in launch.LAUNCH_OPTIONS] try: for group in service_group: members = service_coordinator.get_members(group) services_list.extend([ resources.Service.from_dict({ 'type': group, 'name': member }) for member in members ]) except tooz.coordination.ToozError as e: # In the scenario of network interruption or manually shutdown # connection shutdown, ToozError will be raised. raise exc.CoordinationException( "Failed to get service members from coordination backend. %s" % six.text_type(e)) return resources.Services(services=services_list)
def post(self): """Create a new action. NOTE: This text is allowed to have definitions of multiple actions. In this case they all will be created. """ acl.enforce('actions:create', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') pecan.response.status = 201 if scope not in SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (SCOPE_TYPES.values, scope) ) LOG.info("Create action(s) [definition=%s]" % definition) db_acts = actions.create_actions(definition, scope=scope) models_dicts = [db_act.to_dict() for db_act in db_acts] action_list = [Action.from_dict(act) for act in models_dicts] return Actions(actions=action_list).to_json()
def get_all(self, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', created_at=None, definition=None, name=None, scope=None, tags=None, updated_at=None, namespace=None): """Return a list of workbooks. :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: asc. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param name: Optional. Keep only resources with a specific name. :param definition: Optional. Keep only resources with a specific definition. :param tags: Optional. Keep only resources containing specific tags. :param scope: Optional. Keep only resources with a specific scope. :param created_at: Optional. Keep only resources created at a specific time and date. :param updated_at: Optional. Keep only resources with specific latest update time and date. :param namespace: Optional. Keep only resources with specific namespace. """ acl.enforce('workbooks:list', context.ctx()) filters = filter_utils.create_filters_from_request_params( created_at=created_at, definition=definition, name=name, scope=scope, tags=tags, updated_at=updated_at, namespace=namespace ) LOG.debug("Fetch workbooks. marker=%s, limit=%s, sort_keys=%s, " "sort_dirs=%s, fields=%s, filters=%s", marker, limit, sort_keys, sort_dirs, fields, filters) return rest_utils.get_all( resources.Workbooks, resources.Workbook, db_api.get_workbooks, db_api.get_workbook, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, **filters )
def get(self, name): """Return the named action.""" acl.enforce('actions:get', context.ctx()) LOG.info("Fetch action [name=%s]" % name) db_model = db_api.get_action_definition(name) return Action.from_dict(db_model.to_dict())