class Flux(Component): api = MeshServer.deploy(bundles=[API]) docket = MeshDependency('docket') flux = MeshDependency('flux') platoon = MeshDependency('platoon') truss = MeshDependency('truss') @onstartup(service='flux') def startup_flux(self): GENERATED_BY.put() url = self.flux.url for registration in ENTITY_REGISTRATIONS: registration.url = url registration.put() endpoints = {} for operation in OPERATIONS.itervalues(): subject, endpoint = operation().register() endpoints[subject] = endpoint Executor(id='flux', endpoints=endpoints).put() REASSIGN_REQUEST_ASSIGNEE.set_http_task( self.flux.prepare('flux/1.0/request', 'task', None, {'task': 'reassign-request-assignee'}, preparation={'injections': ['event']})) REASSIGN_REQUEST_ASSIGNEE.put()
class Operation(Unit, OperationMixin): """A workflow operation.""" docket = MeshDependency('docket') flux = MeshDependency('flux') platoon = MeshDependency('platoon') process = Process def register(self): from flux.bundles import API Operation = bind(API, 'flux/1.0/operation') Operation(**self.operation).put() return self.id, self.flux.prepare(*self.endpoint, preparation={'type': 'http'})
class Narrative(Component): api = MeshServer.deploy(bundles=[API], path='/') narrative = MeshDependency('narrative') platoon = MeshDependency('platoon') @onstartup() def bootstrap_platoon_tasks(self): if not self.platoon.ping(): raise TemporaryStartupError() DAILY.put() PURGE_ENTRIES.set_http_task( self.narrative.prepare('narrative/1.0/entry', 'task', None, {'task': 'purge-entries'})) PURGE_ENTRIES.put()
class Operation(Unit, OperationMixin): """A workflow operation.""" docket = MeshDependency('docket') docket_entity = MeshDependency('docket.entity') flux = MeshDependency('flux') platoon = MeshDependency('platoon') process = Process id = None endpoint = None def register(self): bind('flux.API', 'flux/1.0/operation')(**self.operation).put() return self.id, self.flux.prepare(*self.endpoint, preparation={'type': 'http'})
class ExecutionController(ModelController): """A step execution controller""" model = WorkflowExecutionModel resource = Execution schema = SchemaDependency('flux') version = (1, 0) flux = MeshDependency('flux') platoon = MeshDependency('platoon') def update(self, request, response, subject, data): session = self.schema.session task = subject.update(session, **data) if task == 'abort': subject.initiate_abort(session) session.call_after_commit( ScheduledTask.queue_http_task, 'abort-run', self.flux.prepare('flux/1.0/execution', 'task', None, { 'task': 'abort-run', 'id': subject.id })) session.commit() response({'id': subject.id}) def task(self, request, response, subject, data): session = self.schema.session if 'id' in data: try: subject = self.model.load(session, id=data['id'], lockmode='update') except NoResultFound: return task = data['task'] if task == 'abort-run': subject.run.abort_executions(session) session.commit()
class Docket(Component): api = MeshServer.deploy(bundles=BUNDLES) schema = SchemaDependency('docket') archetype_registry = Dependency(ArchetypeRegistry) entity_registry = Dependency(EntityRegistry) docket = MeshDependency('docket') platoon = MeshDependency('platoon') @onstartup() def bootstrap(self): self.entity_registry.bootstrap() self.archetype_registry.bootstrap() self.api.server.configure_endpoints() self.schema.purge() @onstartup(service='docket') def startup_docket(self): EVERY_SIX_HOURS.put() SYNC_ALL_ENTITIES.set_http_task( self.docket.prepare('docket/1.0/entity', 'task', None, {'task': 'synchronize-all-entities'})) SYNC_ALL_ENTITIES.put() self.entity_registry.subscribe_to_changes() return {'status': 'yielding', 'stage': 'dependents-ready'} @onstartup(service='docket', stage='dependents-ready') def restart_when_dependents_ready(self): current_runtime().reload() return {'status': 'restarting', 'stage': 'docket-ready'} @onstartup(service='docket', stage='docket-ready') def finish_docket_startup(self): self.entity_registry.synchronize_entities() return {'status': 'ready'}
class Flux(Component): api = MeshServer.deploy(bundles=[API]) docket = MeshDependency('docket') flux = MeshDependency('flux') platoon = MeshDependency('platoon') truss = MeshDependency('truss') @onstartup(service='flux') def startup_flux(self): GENERATED_BY.put() url = self.flux.url for registration in ENTITY_REGISTRATIONS: registration.url = url registration.put() endpoints = {} for operation in OPERATIONS.itervalues(): subject, endpoint = operation().register() endpoints[subject] = endpoint Executor(id='flux', endpoints=endpoints).put()
class ExecutionController(ModelController): """A step execution controller""" model = WorkflowExecutionModel resource = Execution schema = SchemaDependency('flux') version = (1, 0) flux = MeshDependency('flux') platoon = MeshDependency('platoon') def update(self, request, response, subject, data): session = self.schema.session status = data.pop('status') if status == 'aborted' and subject.is_active: subject.abort(session) session.commit() self.flux.execute('flux/1.0/run', 'update', subject.run_id, {'status': 'aborted'}) response({'id': subject.id})
class QueueManager(Unit): """The queue manager.""" flux = MeshDependency('flux') platoon = MeshDependency('platoon') schema = SchemaDependency('flux') def bootstrap(self): session = self.schema.session for operation in session.query(Operation): self._register_queue(operation) def initiate(self, operation, tag, input=None, id=None, timeout=None): params = {'queue_id': operation.queue_id, 'tag': tag} if id is not None: params['id'] = id if input is not None: params['input'] = input if timeout is not None: params['timeout'] = timeout Process.create(**params) def register(self, operation): self._register_queue(operation) def _register_queue(self, operation): endpoint = self.flux.prepare('flux/1.0/operation', 'process', operation.id, preparation={'type': 'http'}) Queue(id=operation.queue_id, subject=operation.id, name=operation.name, endpoint=endpoint).put()
class RunController(ModelController): resource = RunResource version = (1, 0) model = Run mapping = 'id workflow_id name status parameters started ended' schema = SchemaDependency('flux') flux = MeshDependency('flux') platoon = MeshDependency('platoon') @support_returning def create(self, request, response, subject, data): session = self.schema.session subject = self.model.create(session, **data) session.commit() ScheduledTask.queue_http_task( 'initiate-run', self.flux.prepare('flux/1.0/run', 'task', None, { 'task': 'initiate-run', 'id': subject.id })) notify = data.get('notify') if notify: SubscribedTask.queue_http_task('run-completion', self.flux.prepare( 'flux/1.0/run', 'task', None, { 'task': 'run-completion', 'id': subject.id, 'notify': notify }), topic='run:completed', aspects={'id': subject.id}) return subject @support_returning def update(self, request, response, subject, data): session = self.schema.session status = data.pop('status') if status == 'aborted' and subject.is_active: subject.initiate_abort(session) session.commit() ScheduledTask.queue_http_task( 'abort-run', self.flux.prepare('flux/1.0/run', 'task', None, { 'task': 'abort-executions', 'id': subject.id })) return subject def task(self, request, response, subject, data): session = self.schema.session if 'id' in data: try: subject = self.model.load(session, id=data['id'], lockmode='update') except NoResultFound: return task = data['task'] if task == 'initiate-run': subject.initiate(session) session.commit() elif task == 'abort-executions': subject.abort_executions(session) session.commit() elif task == 'run-completion': self._send_completion_email(subject, data) def _annotate_resource(self, request, model, resource, data): if not data: return include = data.get('include') if include and 'executions' in include: attrs = ( 'id', 'execution_id', 'ancestor_id', 'step', 'name', 'status', 'started', 'ended', ) executions = [ e.extract_dict(attrs=attrs) for e in model.executions.all() ] resource['executions'] = executions def _send_completion_email(self, subject, data): recipients = [{'to': data['notify'].split(',')}] email_subject = 'Workflow run "%s" completed' % subject.name body = 'The workflow run "%s" completed and is available for review.' % subject.name Message.create(recipients=recipients, subject=email_subject, body=body)
class WorkflowController(ModelController): resource = WorkflowResource version = (1, 0) model = Workflow mapping = 'id name designation is_service specification modified type' schema = SchemaDependency('flux') uploads = Dependency(UploadManager) flux = MeshDependency('flux') docket_entity = MeshDependency('docket.entity') @support_returning def create(self, request, response, subject, data): if 'type' in data and data['type'] == 'mule': # check the duplication of workflow name before proceed session = self.schema.session if session.query(Workflow).filter( Workflow.name == data['name']).count(): raise OperationError(token='duplicate-workflow-name') data[ 'specification'] = MULE_DUMMY_SPEC # set no-op yaml spec to mule script if not 'mule_extensions' in data: # if mule_extensions doesn't exist, get the extension information from mule archive data['mule_extensions'] = {} if not data['filepath']: raise OperationError(token='mule-script-upload-required') else: shortFilePath = data['filepath'][ 37:] # assume the uuid_ has fixed length of 37, e.g. 25dc766a-7eb6-4ed6-abc6-2f57fbfbc294_helloworld.zip data['mule_extensions']['packageurl'] = ExternalUrl.create( path='/download/mule-flows/%s' % shortFilePath).url try: filepath = self.uploads.find(data.pop('filepath')) except ValueError: raise OperationError( token='mule-script-invalid-upload') endpointurl, readmeurl = self._extract_zipfile(filepath) # check duplication of packageurl, endpointurl and readmeurl if session.query(WorkflowMule).filter_by( packageurl=data['mule_extensions'] ['packageurl']).count(): raise OperationError( token='mule-script-duplicate-packageurl') if session.query(WorkflowMule).filter_by( endpointurl=endpointurl).count(): raise OperationError( token='mule-script-duplicate-endpointurl') if readmeurl.strip() and session.query(WorkflowMule).filter_by( readmeurl=readmeurl).count(): raise OperationError( token='mule-script-duplicate-readmeurl') data['mule_extensions']['endpointurl'] = endpointurl data['mule_extensions']['readmeurl'] = readmeurl self._deploy_mulescript(data['name'], filepath) session = self.schema.session subject = self.model.create(session, **data) try: session.commit() except IntegrityError: raise OperationError(token='duplicate-workflow-name') return subject def delete(self, request, response, subject, data): workflowName = subject.name # check if workflow id has been associated with any policy policies = subject.policies if len(policies): log( 'info', 'workflow %s (%s) cannot be deleted as it is associated with policies %s', subject.id, workflowName, policies) raise OperationError(token='cannot-delete-inuse-workflow') # check if the run has uncompleted instances session = self.schema.session if session.query(Run).filter( Run.workflow_id == subject.id, Run.status.in_(ACTIVE_RUN_STATUSES.split(' '))).count(): log( 'info', 'workflow %s (%s) cannot be deleted as it has run with uncompleted status of either %s', subject.id, workflowName, ACTIVE_RUN_STATUSES) raise OperationError(token='cannot-delete-uncompleted-workflow') # delete the completed runs for run in session.query(Run).filter_by(workflow_id=subject.id).all(): runEntity = self.docket_entity.bind( 'docket.entity/1.0/flux/1.0/run') runInstance = runEntity.get(run.id) runInstance.destroy() # retrieve mule extensions for later use if subject.type == 'mule': mule_extensions = subject.mule_extensions.extract_dict( 'packageurl endpointurl readmeurl') super(WorkflowController, self).delete(request, response, subject, data) self._create_change_event(subject) if subject.type == 'mule': # retrieve mule_extensions info packageurl = mule_extensions['packageurl'] package = packageurl.split('/')[ -1] # get mule app name from packageurl readmeurl = mule_extensions['readmeurl'] readme = '' if readmeurl: readme = readmeurl.split('/')[ -1] # get mule readme name from readmeurl self._undeploy_mulescript(workflowName, package, readme) def generate(self, request, response, subject, data): name = data['name'] description = data.get('description', '') operations = data['operations'] specification = { 'name': name, 'entry': 'step:0', } layout = data.get('layout') schema = data.get('schema') if layout: specification['layout'] = layout if schema: specification['schema'] = schema steps = {} step_name = None for i, op in enumerate(operations): new_step_name = 'step:%s' % i new_step = { 'operation': op['operation'], 'parameters': op['run_params'], 'description': op.get('description'), } if step_name: steps[step_name]['postoperation'] = [{ 'actions': [{ 'action': 'execute-step', 'step': new_step_name, 'parameters': op.get('step_params'), }], 'terminal': False, }] step_name = new_step_name steps[step_name] = new_step specification['steps'] = steps specification = WorkflowEngine.schema.serialize(specification, format='yaml') response({ 'name': name, 'specification': specification, 'description': description }) @support_returning def update(self, request, response, subject, data): if not data: return subject if subject.type == 'mule': if 'type' in data: data.pop('type') # no update of workflow type if 'is_service' in data: data.pop('is_service') # no update of workflow is_service if 'mule_extensions' in data: data.pop('mule_extensions' ) # no update of mule extensions is allowed session = self.schema.session changed = subject.update(session, **data) try: session.commit() except IntegrityError: raise OperationError(token='duplicate-workflow-name') if changed: self._create_change_event(subject) return subject def _annotate_resource(self, request, model, resource, data): if model.type == 'mule': resource['mule_extensions'] = model.mule_extensions.extract_dict( 'packageurl endpointurl readmeurl') include = data and data.get('include') if not include: return if 'form' in include: schema = model.workflow.schema layout = model.workflow.layout form = {} if layout: form['layout'] = layout if schema: form['schema'] = schema resource['form'] = form or None if 'specification' in include: resource['specification'] = model.specification if 'policies' in include: resource['policies'] = model.policies def _create_change_event(self, subject): try: Event.create(topic='workflow:changed', aspects={'id': subject.id}) except Exception: log('exception', 'failed to fire workflow:changed event') def _deploy_mulescript(self, name, filepath): url = MULE_DEPLOY_URL scriptName = name log('info', 'Deploying Mule script %s by endpoint URL = %s', scriptName, url) request = urllib2.Request(url) request.add_header('Content-Type', 'application/json') conn = None try: conn = urllib2.urlopen( request, json.dumps({ 'name': name, 'filepath': filepath })) log('info', 'Response code of deploying mule script (name: %s) is %s', scriptName, conn.getcode()) except urllib2.HTTPError as e: log('info', 'HTTPError Response code of deploying (name: %s) is %s', scriptName, e.code) raise OperationError(token='mule-script-deploy-failed') finally: if conn != None: conn.close() def _undeploy_mulescript(self, name, package, readme): url = MULE_UNDEPLOY_URL scriptName = name log('info', 'UnDeploying Mule script %s by endpoint URL = %s', scriptName, url) request = urllib2.Request(url) request.add_header('Content-Type', 'application/json') conn = None try: conn = urllib2.urlopen( request, json.dumps({ 'name': name, 'package': package, 'readme': readme })) log('info', 'Response code of undeploying mule script (name: %s) is %s', scriptName, conn.getcode()) except urllib2.HTTPError as e: log( 'exception', 'HTTPError Response code of undeploying (name: %s) is %s, failed to undeploy mule script', scriptName, e.code) finally: if conn != None: conn.close() def _extract_zipfile(self, filepath): import zipfile from xml.dom import minidom endpointurl = '' readmeurl = '' if zipfile.is_zipfile(filepath): with zipfile.ZipFile(filepath, 'r') as f: # get all files in zip comp_files = f.namelist() for comp_file in comp_files: if comp_file.endswith('xml') and not '/' in comp_file: # open xml file and file "path" under http:listener cfp = f.open(comp_file, 'r') xmldoc = minidom.parse(cfp) httplistener = xmldoc.getElementsByTagName( 'http:listener') if httplistener: urlpath = httplistener[0].getAttribute('path') if urlpath: if urlpath.startswith('/'): urlpath = urlpath[ 1:] # remove "/" from urlpath endpointurl = MULE_ENDPOINT_URL_PREFIX + urlpath else: raise OperationError( token='mule-script-missing-http-path') else: raise OperationError( token='mule-script-missing-endpoint') if comp_file.endswith( MULE_README_EXT) and not '/' in comp_file: readmeurl = ExternalUrl.create( path='/download/mule-flows/%s' % comp_file).url else: log('info', 'Unable to unzip file %s', filepath) raise OperationError(token='mule-script-bad-zipfile') if not endpointurl.strip(): raise OperationError(token='mule-script-missing-endpoint') return endpointurl, readmeurl
class OperationController(ModelController): resource = OperationResource version = (1, 0) model = Operation mapping = 'id name phase description schema parameters' schema = SchemaDependency('flux') manager = Dependency(QueueManager) flux = MeshDependency('flux') platoon = MeshDependency('platoon') def create(self, request, response, subject, data): session = self.schema.session subject = self.model.create(session, **data) self.manager.register(subject) session.commit() response({'id': subject.id}) def operation(self, request, response, subject, data): operation = OPERATIONS.get(data['subject']) if operation: operation().execute(self.schema.session, response, data) else: raise OperationError('invalid-operation') def process(self, request, response, subject, data): session = self.schema.session try: execution = WorkflowExecution.load(session, id=data['id'], lockmode='update') except NoResultFound: return # todo: address exception properly status = data['status'] if status in ('aborted', 'completed', 'failed', 'timedout'): execution.process(session, status, data.get('output')) elif status == 'executing': execution.update_progress(session, data.get('progress')) session.commit() def task(self, request, response, subject, data): task = data['task'] if task == 'complete-test-operation': TestOperation().complete(self.schema.session, data) def update(self, request, response, subject, data): if not data: return response({'id': subject.id}) session = self.schema.session subject.update(session, **data) self.manager.register(subject) session.commit() response({'id': subject.id}) def _annotate_resource(self, request, model, resource, data): resource['outcomes'] = {} for name, outcome in model.outcomes.iteritems(): resource['outcomes'][name] = outcome.extract_dict( exclude='id operation_id name')
class BastionDependency(Unit): bastion = MeshDependency('security')
class DocketDependency(Unit): docket_entity = MeshDependency('docket.entity') enamel_concept = MeshDependency('enamel.concept')
class EntityRegistry(Unit): """The entity registry.""" docket = MeshDependency('docket') platoon = MeshDependency('platoon') schema = SchemaDependency('docket') def __init__(self): self.proxies = {} self.annotator = Annotator(self.proxies) self.models = {} def bootstrap(self): from docket.bundles import ENTITY_API session = self.schema.session for registration in session.query(Registration).options( undefer('specification')): model = self.models[registration.id] = self._construct_model( registration) self.annotator.process(registration, model) session.commit() ENTITY_API.attach(self.annotator.generate_mounts()) def get_proxy(self, id, version): return self.proxies['%s:%s' % (id, version)] def subscribe_to_changes(self): session = self.schema.session for registration in session.query(Registration): if registration.change_event: self._subscribe_to_changes(registration) def synchronize_entities(self): session = self.schema.session Entity.synchronize_entities(self, session) def unregister(self, registration): table = self._construct_table(registration) if self.schema.table_exists(table): self.schema.drop_table(table) def _construct_model(self, registration): attrs = { 'entity_id': ForeignKey('entity.id', nullable=False, primary_key=True) } for name, attr in sorted(registration.cached_attributes.iteritems()): attrs[name] = attr.contribute_field() tablename = self._prepare_tablename(registration.id) model = self.schema.construct_model( Entity, tablename, attrs, polymorphic_identity=registration.id) registration.annotate(model) self.schema.create_or_update_table(model.__table__) return model def _construct_table(self, registration): metadata = MetaData() entities = Table('entity', metadata, Text(name='id', nullable=False, primary_key=True)) tablename = self._prepare_tablename(registration.id) table = Table( tablename, metadata, ForeignKey(name='entity_id', column=entities.c.id, type_=TextType(), nullable=False, primary_key=True)) for name, attr in sorted(registration.cached_attributes.iteritems()): table.append_column(attr.contribute_field()) return table def _prepare_tablename(self, id): tablename = id.lower().replace(':', '_') return 'entity_' + re.sub(r'[^a-z_]', '', tablename).strip('_') def _subscribe_to_changes(self, registration): task = self.docket.prepare('docket/1.0/entity', 'task', None, {'task': 'synchronize-changed-entity'}) task['injections'] = ['event'] SubscribedTask(id=nsuniqid(TASK_UUID_NAMESPACE, registration.id), tag='%s changes' % registration.id, topic=registration.change_event, task=SubscribedTask.prepare_http_task(task)).put()
class RequestController(ModelController): resource = RequestResource version = (1, 0) model = Request mapping = ( 'id name status originator assignee creator ' 'slot_order claimed completed' ) schema = SchemaDependency('flux') docket_entity = MeshDependency('docket.entity') flux = MeshDependency('flux') platoon = MeshDependency('platoon') @support_returning def create(self, request, response, subject, data): session = self.schema.session message = data.pop('message', None) subject = self.model.create(session, **data) try: session.flush() except IntegrityError: raise OperationError(token='duplicate-request-name') if message: Message.create(session, subject.id, **message) session.commit() if subject.status == 'pending': ScheduledTask.queue_http_task('initiate-request', self.flux.prepare('flux/1.0/request', 'task', None, {'task': 'initiate-request', 'id': subject.id})) return subject def operation(self, request, response, subject, data): operation = OPERATIONS.get(data['subject']) if operation: operation().execute(self.schema.session, response, data) else: raise OperationError('invalid-subject') def task(self, request, response, subject, data): session = self.schema.session if 'id' in data: try: subject = self.model.load(session, id=data['id'], lockmode='update') except NoResultFound: return task = data['task'] if task == 'initiate-request': if not subject.initiate(session): subject.status = 'failed' else: try: Event.create(topic='request:changed', aspects={'id': subject.id}) except Exception: log('exception', 'failed to fire request:started event') session.commit() if subject.status == 'failed': try: Event.create(topic='request:completed', aspects={'id': subject.id}) except Exception: log('exception', 'failed to fire request:completed event') elif task == 'cancel-request': subject.cancel(session) session.commit() elif task == 'decline-request': subject.decline(session) session.commit() elif task == 'complete-request-operation': CreateRequest().complete(session, data) elif task == 'reassign-request-assignee': event = data.get('event') if event: Request.reassign_assignee(session, event['id']) session.commit() @support_returning def update(self, request, response, subject, data): if not data: return subject session = self.schema.session message = data.get('message') new_status = subject.update(session, self.docket_entity, **data) session.flush() if message: Message.create(session, subject.id, **message) session.commit() if new_status: Event.create(topic='request:changed', aspects={'id': subject.id}) if new_status == 'pending': ScheduledTask.queue_http_task('initiate-request', self.flux.prepare('flux/1.0/request', 'task', None, {'task': 'initiate-request', 'id': subject.id})) elif new_status == 'completed': try: Event.create(topic='request:completed', aspects={'id': subject.id}) except Exception: log('exception', 'failed to fire request:completed event') elif new_status == 'canceled': ScheduledTask.queue_http_task('cancel-request', self.flux.prepare('flux/1.0/request', 'task', None, {'task': 'cancel-request', 'id': subject.id})) try: Event.create(topic='request:completed', aspects={'id': subject.id}) except Exception: log('exception', 'failed to fire request:completed event') elif new_status == 'declined': ScheduledTask.queue_http_task('decline-request', self.flux.prepare('flux/1.0/request', 'task', None, {'task': 'decline-request', 'id': subject.id})) try: Event.create(topic='request:completed', aspects={'id': subject.id}) except Exception: log('exception', 'failed to fire request:completed event') return subject def _annotate_resource(self, http_request, model, resource, data): resource['attachments'] = attachments = [] for attachment in model.attachments: attachments.append(attachment.extract_dict('token title attachment')) resource['slots'] = slots = {} for key, value in model.slots.iteritems(): slots[key] = value.extract_dict('title slot') resource['products'] = products = {} for key, value in model.products.iteritems(): products[key] = value.extract_dict('title product') include = None if data and 'include' in data: include = data['include'] if not include: return if 'template' in include: template = model.template if template: resource['template'] = template.template if 'form' in include: resource['form'] = model.generate_form() if 'entities' in include: resource['entities'] = model.generate_entities() if 'messages' in include: resource['messages'] = [ msg.extract_dict('id author occurrence message') for msg in model.messages ]
class PackageController(BaseEntityController): resource = PackageResource version = (1, 0) model = Package registry = Dependency(EntityRegistry) schema = SchemaDependency('docket') docket = MeshDependency('docket') mapping = 'id name designation description created modified package status' def create(self, request, response, subject, data): session = self.schema.session subject = self.model.create(session, **data) log('info', 'create request for package %s', subject.id) try: session.commit() except IntegrityError: raise OperationError(token='duplicate-package') if subject.status == 'deploying': task_params = { 'task': 'deploy-package', 'id': subject.id, } ScheduledTask.queue_http_task( 'deploy-package', self.docket.prepare('docket/1.0/package', 'task', None, task_params)) response({'id': subject.id}) return def task(self, request, response, subject, data): log('info', 'task request to %s', data['task']) registry = self.registry session = self.schema.session if 'id' in data: try: subject = self.model.load(session, id=data['id'], lockmode='update') except NoResultFound: return task = data['task'] if task == 'deploy-package': subject.deploy(registry, session, method='create') elif task == 'update-package': subject.deploy(registry, session, method='update') if subject.status == 'invalid': session.rollback() subject.status = 'invalid' session.commit() return def update(self, request, response, subject, data): log('info', 'update request for package %s', subject.id) session = self.schema.session if not data: return response({'id': subject.id}) subject.update(data) session.commit() if subject.status == 'deploying': task_params = { 'task': 'update-package', 'id': subject.id, } ScheduledTask.queue_http_task( 'update-package', self.docket.prepare('docket/1.0/package', 'task', None, task_params)) response({'id': subject.id})
class DocketDependency(Unit): docket_entity = MeshDependency('docket.entity')