class BatchGroup(Resource): """ Batch group for a batch task. Represents the group that is assigned to the child task from the batching criteria that was used when the task was started. """ value = StringField(read_only=True) fields = DictField(read_only=True) def __str__(self): return '<Batch group>'
class BatchGroup(Resource): """ Batch group for a batch task. Represents the group that is assigned to the child task from the batching criteria that was used when the task was started. """ value = StringField(read_only=True) fields = DictField(read_only=True) @staticmethod def __str__(**kwargs): return six.text_type('<Batch group>')
class VolumeObject(Resource): """ Volume object resource contains information about single file (object) entry in a specific volume. """ href = StringField(read_only=True) location = StringField(read_only=True) volume = StringField(read_only=True) type = StringField(read_only=True) metadata = DictField(read_only=True) def __str__(self): return f'<VolumeObject: location={self.location}>'
class App(Resource): """ Central resource for managing apps. """ _URL = { 'query': '/apps', 'get': '/apps/{id}', 'get_revision': '/apps/{id}/{revision}', 'create_revision': '/apps/{id}/{revision}/raw', 'copy': '/apps/{id}/actions/copy', 'sync': '/apps/{id}/actions/sync', 'raw': '/apps/{id}/raw' } _CONTENT_TYPE = { AppRawFormat.JSON: 'application/json', AppRawFormat.YAML: 'application/yaml' } href = HrefField() _id = StringField(read_only=True, name='id') project = StringField(read_only=True) name = StringField(read_only=True) revision = IntegerField(read_only=True) raw = DictField(read_only=False) @property def id(self): _id, _rev = self._id.rsplit('/', 1) if re.match('^\d*$', _rev): return _id else: return self._id def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) def __str__(self): return six.text_type('<App: id={id} rev={rev}>'.format( id=self.id, rev=self.revision)) @classmethod def query(cls, project=None, visibility=None, q=None, id=None, offset=None, limit=None, api=None): """ Query (List) apps. :param project: Source project. :param visibility: private|public for private or public apps. :param q: List containing search terms. :param id: List contains app ids. Fetch apps with specific ids. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ if project: project = Transform.to_project(project) api = api or cls._API return super(App, cls)._query(url=cls._URL['query'], project=project, visibility=visibility, q=q, id=id, offset=offset, limit=limit, api=api) @classmethod def get_revision(cls, id, revision, api=None): """ Get app revision. :param id: App identifier. :param revision: App revision :param api: Api instance. :return: App object. """ api = api if api else cls._API extra = { 'resource': cls.__name__, 'query': { 'id': id, 'revision': revision } } logger.info('Get revision', extra=extra) app = api.get(url=cls._URL['get_revision'].format( id=id, revision=revision)).json() return App(api=api, **app) @classmethod def install_app(cls, id, raw, api=None, raw_format=None): """ Installs and app. :param id: App identifier. :param raw: Raw cwl data. :param api: Api instance. :param raw_format: Format of raw app data being sent, json by default :return: App object. """ api = api if api else cls._API raw_format = raw_format.lower() if raw_format else AppRawFormat.JSON extra = {'resource': cls.__name__, 'query': {'id': id, 'data': raw}} logger.info('Installing app', extra=extra) # Set content type for raw app data if raw_format not in cls._CONTENT_TYPE.keys(): raise SbgError( 'Unsupported raw data format: "{}".'.format(raw_format)) headers = {'Content-Type': cls._CONTENT_TYPE[raw_format]} app = api.post( url=cls._URL['raw'].format(id=id), data=raw, headers=headers, ).json() app_wrapper = api.get(url=cls._URL['get'].format( id=app['sbg:id'])).json() return App(api=api, **app_wrapper) @classmethod def create_revision(cls, id, revision, raw, api=None): """ Create a new app revision. :param id: App identifier. :param revision: App revision. :param raw: Raw cwl object. :param api: Api instance. :return: App object. """ api = api if api else cls._API extra = {'resource': cls.__name__, 'query': {'id': id, 'data': raw}} logger.info('Creating app revision', extra=extra) app = api.post(url=cls._URL['create_revision'].format( id=id, revision=revision), data=raw).json() app_wrapper = api.get(url=cls._URL['get'].format( id=app['sbg:id'])).json() return App(api=api, **app_wrapper) def copy(self, project, name=None, strategy=None): """ Copies the current app. :param project: Destination project. :param name: Destination app name. :param strategy: App copy strategy. :return: Copied App object. :Copy strategies: clone copy all revisions and continue getting updates form the original app (default method when the key is omitted) direct copy only the latest revision and get the updates from this point on clone_direct copy the app like the direct strategy, but keep all revisions transient copy only the latest revision and continue getting updates from the original app """ strategy = strategy or AppCopyStrategy.CLONE project = Transform.to_project(project) data = {'project': project, 'strategy': strategy} if name: data['name'] = name extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': data } } logger.info('Copying app', extra=extra) app = self._api.post(url=self._URL['copy'].format(id=self.id), data=data).json() return App(api=self._api, **app) def sync(self): """ Syncs the parent app changes with the current app instance. :return: Synced App object. """ app = self._api.post(url=self._URL['sync'].format(id=self.id)).json() return App(api=self._api, **app)
class AutomationRun(Resource): """ Central resource for managing automation runs. """ _URL = { 'query': '/automation/runs', 'get': '/automation/runs/{id}', 'actions': '/automation/runs/{id}/actions/{action}', 'state': '/automation/runs/{id}/state', } href = HrefField(read_only=True) id = StringField(read_only=True) name = StringField(read_only=False) automation = CompoundField(Automation, read_only=True) package = CompoundField(AutomationPackage, read_only=True) inputs = DictField(read_only=False) outputs = DictField(read_only=True) settings = DictField(read_only=False) created_on = DateTimeField(read_only=True) start_time = DateTimeField(read_only=True) end_time = DateTimeField(read_only=True) resumed_from = StringField(read_only=True) created_by = StringField(read_only=True) status = StringField(read_only=True) message = StringField(read_only=True) execution_details = DictField(read_only=True) memory_limit = IntegerField(read_only=False) project_id = StringField(read_only=True) def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id def __str__(self): return f'<AutomationRun: id={self.id}>' @classmethod def query(cls, automation=None, package=None, status=None, name=None, created_by=None, created_from=None, created_to=None, project_id=None, order_by=None, order=None, offset=None, limit=None, api=None): """ Query (List) automation runs. :param name: Automation run name :param automation: Automation template :param package: Package :param status: Run status :param created_by: Username of user that created the run :param order_by: Property by which to order results :param order: Ascending or descending ("asc" or "desc") :param created_from: Date the run is created after :param created_to: Date the run is created before :param project_id: Id of project if Automation run is project based :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ if automation: automation = Transform.to_automation(automation) if package: package = Transform.to_automation_package(package) api = api or cls._API return super()._query( url=cls._URL['query'], name=name, automation_id=automation, package_id=package, status=status, created_by=created_by, created_from=created_from, created_to=created_to, project_id=project_id, order_by=order_by, order=order, offset=offset, limit=limit, api=api, ) @classmethod def create(cls, package, inputs=None, settings=None, resume_from=None, name=None, secret_settings=None, memory_limit=None, api=None): """ Create and start a new run. :param package: Automation package id :param inputs: Input dictionary :param settings: Settings override dictionary :param resume_from: Run to resume from :param name: Automation run name :param secret_settings: dict to override secret_settings from automation template :param memory_limit: Memory limit in MB. :param api: sevenbridges Api instance :return: AutomationRun object """ package = Transform.to_automation_package(package) data = {'package': package} if inputs: data['inputs'] = inputs else: data['inputs'] = dict() if settings: data['settings'] = settings if resume_from: data['resume_from'] = resume_from if name: data['name'] = name if secret_settings: data['secret_settings'] = secret_settings if memory_limit: data['memory_limit'] = memory_limit api = api or cls._API automation_run = api.post( url=cls._URL['query'], data=data, ).json() return AutomationRun(api=api, **automation_run) @inplace_reload def save(self, inplace=True): """ Saves all modification to the automation run on the server. :param inplace Apply edits on the current instance or get a new one. :return: Automation run instance. """ modified_data = self._modified_data() if modified_data: extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'modified_data': modified_data } } logger.info('Saving automation run', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() return AutomationRun(api=self._api, **data) else: raise ResourceNotModified() @classmethod def rerun(cls, id, package=None, inputs=None, settings=None, resume_from=None, name=None, secret_settings=None, merge=True, api=None): """ Create and start rerun of existing automation. :param id: Automation id to rerun :param package: Automation package id :param inputs: Input dictionary :param settings: Settings override dictionary :param resume_from: Run to resume from :param name: Automation run name :param secret_settings: dict to override secret_settings from automation template :param merge: merge settings and inputs of run :param api: sevenbridges Api instance :return: AutomationRun object """ data = {'merge': merge} if package: data['package'] = package if inputs: data['inputs'] = inputs if settings: data['settings'] = settings if resume_from: data['resume_from'] = resume_from if name: data['name'] = name if secret_settings: data['secret_settings'] = secret_settings api = api or cls._API automation_run = api.post(url=cls._URL['actions'].format( id=id, action=AutomationRunActions.RERUN)).json() return AutomationRun(api=api, **automation_run) def stop(self, api=None): """ Stop automation run. :param api: sevenbridges Api instance. :return: AutomationRun object """ api = api or self._API return api.post(url=self._URL['actions'].format( id=self.id, action=AutomationRunActions.STOP)).content def get_log_file(self, api=None): """ Retrieve automation run log. :param api: sevenbridges Api instance :return: Log string """ api = api or self._API log_file_data = self.execution_details.get('log_file') return File(api=api, **log_file_data) if log_file_data else None def get_state(self, api=None): """ Retrieve automation run state. :param api: sevenbridges Api instance :return: State file json contents as string """ api = api or self._API return api.get(self._URL['state'].format(id=self.id)).json()
class Automation(Resource): """ Central resource for managing automations. """ # noinspection PyProtectedMember _URL = { 'query': '/automation/automations', 'get': '/automation/automations/{id}', 'member': AutomationMember._URL['get'], 'members': AutomationMember._URL['query'], 'packages': AutomationPackage._URL['query'], 'archive': '/automation/automations/{automation_id}/actions/archive', 'restore': '/automation/automations/{automation_id}/actions/restore' } href = HrefField(read_only=True) id = UuidField(read_only=True) name = StringField(read_only=False) description = StringField(read_only=False) billing_group = UuidField(read_only=False) owner = StringField(read_only=True) created_by = StringField(read_only=True) created_on = DateTimeField(read_only=True) modified_by = StringField(read_only=True) modified_on = DateTimeField(read_only=False) archived = BooleanField(read_only=True) secret_settings = DictField(read_only=False) memory_limit = IntegerField(read_only=False) project_based = BooleanField(read_only=False) def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id def __str__(self): return f'<Automation: id={self.id} name={self.name}>' @classmethod def query(cls, name=None, include_archived=False, project_based=None, offset=None, limit=None, api=None): """ Query (List) automations. :param name: Automation name. :param include_archived: Include archived automations also :param project_based: Search project based automations :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ api = api or cls._API return super()._query( url=cls._URL['query'], name=name, include_archived=include_archived, project_based=project_based, offset=offset, limit=limit, api=api, ) @classmethod def create(cls, name, description=None, billing_group=None, secret_settings=None, project_based=None, memory_limit=None, api=None): """ Create a automation template. :param name: Automation name. :param billing_group: Automation billing group. :param description: Automation description. :param secret_settings: Automation settings. :param project_based: Create project based automation template. :param memory_limit: Memory limit in MB. :param api: Api instance. :return: """ api = api if api else cls._API if name is None: raise SbgError('Automation name is required!') data = { 'name': name, } if billing_group: data['billing_group'] = Transform.to_billing_group(billing_group) if description: data['description'] = description if secret_settings: data['secret_settings'] = secret_settings if project_based: data['project_based'] = project_based if memory_limit: data['memory_limit'] = memory_limit extra = {'resource': cls.__name__, 'query': data} logger.info('Creating automation template', extra=extra) automation_data = api.post(url=cls._URL['query'], data=data).json() return Automation(api=api, **automation_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the automation template on the server. :param inplace Apply edits on the current instance or get a new one. :return: Automation instance. """ modified_data = self._modified_data() if modified_data: extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'modified_data': modified_data } } logger.info('Saving automation template', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() return Automation(api=self._api, **data) else: raise ResourceNotModified() @inplace_reload def archive(self): """ Archive automation :return: Automation instance. """ extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Archive automation', extra=extra) automation_data = self._api.post(url=self._URL['archive'].format( automation_id=self.id)).json() return Automation(api=self._api, **automation_data) @inplace_reload def restore(self): """ Restore archived automation :return: Automation instance. """ extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Restore archived automation', extra=extra) automation_data = self._api.post(url=self._URL['restore'].format( automation_id=self.id)).json() return Automation(api=self._api, **automation_data) def get_packages(self, offset=None, limit=None, api=None): """ Return list of packages that belong to this automation :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance. :return: AutomationPackage collection """ api = api or self._API return AutomationPackage.query(automation=self.id, offset=offset, limit=limit, api=api) @classmethod def get_package(cls, package, api=None): """ Return specified automation member :param package: Automation Package Id :param api: sevenbridges Api instance. :return: AutomationMember object """ package_id = Transform.to_automation_package(package) api = api or cls._API return AutomationPackage.get(id=package_id, api=api) def add_package(self, version, file_path, schema, file_name=None, retry_count=RequestParameters.DEFAULT_RETRY_COUNT, timeout=RequestParameters.DEFAULT_TIMEOUT, part_size=None, api=None): """ Add a code package to automation template. :param version: The code package version. :param file_path: Path to the code package file to be uploaded. :param schema: IO schema for main step of execution. :param part_size: Size of upload part in bytes. :param file_name: Optional file name. :param retry_count: Upload retry count. :param timeout: Timeout for s3/google session. :param api: sevenbridges Api instance. :return: AutomationPackage """ api = api or self._API if version is None: raise SbgError('Code package version is required!') if file_path is None: raise SbgError('Code package file path is required!') # Multipart upload the code package: upload = CodePackageUpload(file_path, self.id, api=api, part_size=part_size, file_name=file_name, retry_count=retry_count, timeout=timeout) upload.start() upload.wait() package_file = upload.result() # Create the automation package: return AutomationPackage.create(self.id, version=version, location=package_file.id, schema=schema, api=api) def get_member(self, username, api=None): """ Return specified automation member :param username: Member username :param api: sevenbridges Api instance. :return: AutomationMember object """ member = Transform.to_automation_member(username) api = api or self._API return AutomationMember.get(id=member, automation=self.id, api=api) def get_members(self, offset=None, limit=None, api=None): """ Return list of automation members :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance. :return: AutomationMember collection """ api = api or self._API return AutomationMember.query(automation=self.id, offset=offset, limit=limit, api=api) def add_member(self, user, permissions, api=None): """ Add member to the automation :param user: Member username :param permissions: Member permissions :param api: sevenbridges Api instance :return: AutomationMember object """ api = api or self._API return AutomationMember.add(automation=self.id, user=user, permissions=permissions, api=api) def remove_member(self, user, api=None): """ Remove a member from the automation :param user: Member username :param api: sevenbridges Api instance :return: None """ api = api or self._API AutomationMember.remove(automation=self.id, user=user, api=api) def get_runs(self, package=None, status=None, name=None, created_by=None, created_from=None, created_to=None, project_id=None, order_by=None, order=None, offset=None, limit=None, api=None): """ Query automation runs that belong to this automation :param package: Package id :param status: Run status :param name: Automation run name :param created_by: Username of member that created the run :param created_from: Date the run was created after :param created_to: Date the run was created before :param project_id: Search runs by project id, if run is project based :param order_by: Property by which to order results :param order: Ascending or Descending ("asc" or "desc") :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance :return: AutomationRun collection """ api = api or self._API return AutomationRun.query(automation=self.id, package=package, status=status, name=name, created_by=created_by, created_from=created_from, created_to=created_to, project_id=project_id, order_by=order_by, order=order, offset=offset, limit=limit, api=api)
class AutomationPackage(Resource): """ Central resource for managing automation packages. """ _URL = { 'query': '/automation/automations/{automation_id}/packages', 'get': '/automation/packages/{id}', 'archive': "/automation/automations/{automation_id}" "/packages/{id}/actions/archive", 'restore': "/automation/automations/{automation_id}" "/packages/{id}/actions/restore", } id = StringField(read_only=True) automation = UuidField(read_only=True) version = StringField(read_only=True) location = StringField(read_only=True) schema = DictField(read_only=True) created_by = StringField(read_only=True) created_on = DateTimeField(read_only=True) archived = BooleanField(read_only=True) custom_url = StringField(read_only=False) memory_limit = IntegerField(read_only=False) def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id def __str__(self): return f'<AutomationPackage: id={self.id}>' @classmethod def query(cls, automation, offset=None, limit=None, api=None): """ Query (List) automation packages. :param automation: Automation id. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ automation_id = Transform.to_automation(automation) api = api or cls._API return super()._query( url=cls._URL['query'].format(automation_id=automation_id), offset=offset, limit=limit, api=api, ) @classmethod def create(cls, automation, version, location, schema, memory_limit=None, api=None): """ Create a code package. :param automation: Automation id. :param version: File ID of the uploaded code package. :param location: The code package version. :param schema: IO schema for main step of execution. :param memory_limit: Memory limit in MB. :param api: Api instance. :return: """ automation_id = Transform.to_automation(automation) api = api if api else cls._API if version is None: raise SbgError('Code package version is required!') if location is None: raise SbgError('Code package location is required!') if schema is None: raise SbgError('Schema is required!') data = { 'version': version, 'location': location, 'schema': schema, 'memory_limit': memory_limit, } extra = {'resource': cls.__name__, 'query': data} package_data = api.post( cls._URL['query'].format(automation_id=automation_id), data=data).json() logger.info('Add code package to automation with id %s', automation_id, extra=extra) return AutomationPackage(api=api, **package_data) @inplace_reload def archive(self): """ Archive package :return: AutomationPackage object. """ automation_id = Transform.to_automation(self.automation) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Archive automation package', extra=extra) package_data = self._api.post(url=self._URL['archive'].format( automation_id=automation_id, id=self.id)).json() return AutomationPackage(api=self._api, **package_data) @inplace_reload def restore(self): """ Restore archived package :return: AutomationPackage object. """ automation_id = Transform.to_automation(self.automation) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Restore archived automation package', extra=extra) package_data = self._api.post(url=self._URL['restore'].format( automation_id=automation_id, id=self.id)).json() return AutomationPackage(api=self._api, **package_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the automation package on the server. :param inplace Apply edits on the current instance or get a new one. :return: AutomationPackage instance. """ modified_data = self._modified_data() if modified_data: extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'modified_data': modified_data } } logger.info('Saving automation package', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() return AutomationPackage(api=self._api, **data) else: raise ResourceNotModified()
class Task(Resource): """ Central resource for managing tasks. """ _URL = { 'query': '/tasks', 'get': '/tasks/{id}', 'delete': '/tasks/{id}', 'run': '/tasks/{id}/actions/run', 'abort': '/tasks/{id}/actions/abort', 'execution_details': "/tasks/{id}/execution_details" } href = HrefField() id = UuidField() name = StringField() status = StringField(read_only=True) description = StringField(read_only=False) project = StringField() app = StringField() type = StringField(read_only=True) created_by = StringField(read_only=True) executed_by = StringField(read_only=True) start_time = DateTimeField(read_only=True) created_time = DateTimeField(read_only=True) end_time = DateTimeField(read_only=True) batch = BooleanField(read_only=False) batch_by = CompoundField(BatchBy, read_only=False) batch_group = CompoundField(BatchGroup, read_only=True) batch_input = StringField(read_only=False) parent = StringField(read_only=True) execution_status = CompoundField(ExecutionStatus, read_only=True) errors = DictField(read_only=True) warnings = DictField(read_only=True) price = CompoundField(Price, read_only=True) inputs = CompoundField(Input, read_only=False) outputs = CompoundField(Output, read_only=True) use_interruptible_instances = BooleanField() def __str__(self): return six.text_type('<Task: id={id}>'.format(id=self.id)) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) @classmethod def query(cls, project=None, status=None, batch=None, parent=None, created_from=None, created_to=None, started_from=None, started_to=None, ended_from=None, ended_to=None, offset=None, limit=None, api=None): """ Query (List) tasks. Date parameters may be both strings and python date objects. :param project: Target project. optional. :param status: Task status. :param batch: Only batch tasks. :param parent: Parent batch task identifier. :param ended_to: All tasks that ended until this date. :param ended_from: All tasks that ended from this date. :param started_to: All tasks that were started until this date. :param started_from: All tasks that were started from this date. :param created_to: All tasks that were created until this date. :param created_from: All tasks that were created from this date. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if parent: parent = Transform.to_task(parent) if project: project = Transform.to_project(project) if created_from: created_from = Transform.to_datestring(created_from) if created_to: created_to = Transform.to_datestring(created_to) if started_from: started_from = Transform.to_datestring(started_from) if started_to: started_to = Transform.to_datestring(started_to) if ended_from: ended_from = Transform.to_datestring(ended_from) if ended_to: ended_to = Transform.to_datestring(ended_to) return super(Task, cls)._query(url=cls._URL['query'], project=project, status=status, batch=batch, parent=parent, created_from=created_from, created_to=created_to, started_from=started_from, started_to=started_to, ended_from=ended_from, ended_to=ended_to, offset=offset, limit=limit, fields='_all', api=api) @classmethod def create(cls, name, project, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, disable_batch=False, interruptible=True, api=None): """ Creates a task on server. :param name: Task name. :param project: Project identifier. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param disable_batch: If True disables batching of a batch task. :param interruptible: If True interruptible instance will be used. :param api: Api instance. :return: Task object. :raises: TaskValidationError if validation Fails. :raises: SbgError if any exception occurs during request. """ task_data = {} params = {} project = Transform.to_project(project) app_id = Transform.to_app(app) if revision: app_id = app_id + "/" + six.text_type(revision) else: if isinstance(app, App): app_id = app_id + "/" + six.text_type(app.revision) task_inputs = { 'inputs': Task._serialize_inputs(inputs) if inputs else {} } if batch_input and batch_by: task_data['batch_input'] = batch_input task_data['batch_by'] = batch_by if disable_batch: params.update({'batch': False}) task_meta = { 'name': name, 'project': project, 'app': app_id, 'description': description, } task_data.update(task_meta) task_data.update(task_inputs) task_data['use_interruptible_instances'] = interruptible if run: params.update({'action': 'run'}) api = api if api else cls._API created_task = api.post(cls._URL['query'], data=task_data, params=params).json() if run and 'errors' in created_task: if bool(created_task['errors']): raise TaskValidationError( 'Unable to run task! Task contains errors.', task=Task(api=api, **created_task)) return Task(api=api, **created_task) @inplace_reload def abort(self, inplace=True): """ Abort task :param inplace Apply action on the current object or return a new one. :return: Task object. """ extra = {'resource': self.__class__.__name__, 'query': {'id': self.id}} logger.info('Aborting task', extra=extra) task_data = self._api.post(url=self._URL['abort'].format( id=self.id)).json() return Task(api=self._api, **task_data) @inplace_reload def run(self, batch=True, interruptible=True, inplace=True): """ Run task :param batch if False batching will be disabled. :param interruptible: If true interruptible instance will be used. :param inplace Apply action on the current object or return a new one. :return: Task object. """ params = {} if not batch: params['batch'] = False params['use_interruptible_instances'] = interruptible extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'batch': batch } } logger.info('Running task', extra=extra) task_data = self._api.post(url=self._URL['run'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the task on the server. :param inplace Apply edits on the current instance or get a new one. :return: Task instance. """ modified_data = self._modified_data() if bool(modified_data): task_request_data = {} inputs = modified_data.pop('inputs', None) task_request_data.update(modified_data) if inputs: task_request_data['inputs'] = self._serialize_inputs(inputs) extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': task_request_data } } logger.info('Saving task', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=task_request_data).json() task = Task(api=self._api, **data) return task @staticmethod def _serialize_inputs(inputs): """Serialize task input dictionary""" serialized_inputs = {} for input_id, input_value in inputs.items(): if isinstance(input_value, list): serialized_list = Task._serialize_input_list(input_value) serialized_inputs[input_id] = serialized_list else: if isinstance(input_value, File): input_value = Task._to_api_file_format(input_value) serialized_inputs[input_id] = input_value return serialized_inputs @staticmethod def _serialize_input_list(input_value): """Recursively serialize task input list""" input_list = [] for item in input_value: if isinstance(item, list): input_list.append(Task._serialize_input_list(item)) else: if isinstance(item, File): item = Task._to_api_file_format(item) input_list.append(item) return input_list @staticmethod def _to_api_file_format(_file): api_file = {'class': 'File', 'path': _file.id} if _file.name: api_file['name'] = _file.name return api_file def get_execution_details(self): """ Retrieves execution details for a task. :return: Execution details instance. """ extra = {'resource': self.__class__.__name__, 'query': {'id': self.id}} logger.info('Get execution details', extra=extra) data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data) def get_batch_children(self): """ Retrieves batch child tasks for this task if its a batch task. :return: Collection instance. :raises SbError if task is not a batch task. """ if not self.batch: raise SbgError("This task is not a batch task.") return self.query(parent=self.id, api=self._api)
class Export(Resource): """ Central resource for managing exports. """ _URL = { 'query': '/storage/exports', 'get': '/storage/exports/{id}', } href = HrefField() id = StringField(read_only=True) state = StringField(read_only=True) _source = DictField(name='source', read_only=True) destination = CompoundField(VolumeFile, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) properties = CompoundField(VolumeProperties, read_only=True) def __str__(self): return six.text_type('<Export: id={id}>'.format(id=self.id)) @property def source(self): try: return File(id=self._source['file'], api=self._api) except TypeError: return None @property def result(self): try: return File(id=self._result['id'], api=self._api) except TypeError: return None @classmethod def submit_export(cls, file, volume, location, properties=None, overwrite=False, api=None): """ Submit new export job. :param file: File to be exported. :param volume: Volume identifier. :param location: Volume location. :param properties: Properties dictionary. :param overwrite: If true it will overwrite file if exists :param api: Api Instance. :return: Export object. """ data = {} volume = Transform.to_volume(volume) file = Transform.to_file(file) destination = {'volume': volume, 'location': location} source = {'file': file} if properties: data['properties'] = properties data['source'] = source data['destination'] = destination data['overwrite'] = overwrite api = api if api else cls._API _export = api.post(cls._URL['query'], data=data).json() return Export(api=api, **_export) @classmethod def query(cls, project=None, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) exports. :param project: Optional project identifier. :param volume: Optional volume identifier. :param state: Optional import sate. :param api: Api instance. :return: Collection object. """ api = api or cls._API if project: project = Transform.to_project(project) if volume: volume = Transform.to_volume(volume) return super(Export, cls)._query(url=cls._URL['query'], project=project, volume=volume, state=state, offset=offset, limit=limit, fields='_all', api=api)
class Import(Resource): """ Central resource for managing imports. """ _URL = { 'query': '/storage/imports', 'get': '/storage/imports/{id}', } href = HrefField() id = StringField(read_only=True) state = StringField(read_only=True) source = CompoundField(VolumeFile, read_only=True) destination = CompoundField(ImportDestination, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) def __str__(self): return six.text_type('<Import: id={id}>'.format(id=self.id)) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) @property def result(self): try: return File(id=self._result['id'], api=self._api) except TypeError: return None @classmethod def submit_import(cls, volume, location, project, name=None, overwrite=False, properties=None, api=None): """ Submits new import job. :param volume: Volume identifier. :param location: Volume location. :param project: Project identifier. :param name: Optional file name. :param overwrite: If true it will overwrite file if exists. :param properties: Properties dictionary. :param api: Api instance. :return: Import object. """ data = {} volume = Transform.to_volume(volume) project = Transform.to_project(project) source = { 'volume': volume, 'location': location } destination = { 'project': project } if name: destination['name'] = name data['source'] = source data['destination'] = destination data['overwrite'] = overwrite if properties: data['properties'] = properties api = api if api else cls._API extra = { 'resource': cls.__name__, 'query': data } logger.info('Submitting import', extra=extra) _import = api.post(cls._URL['query'], data=data).json() return Import(api=api, **_import) @classmethod def query(cls, project=None, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) imports. :param project: Optional project identifier. :param volume: Optional volume identifier. :param state: Optional import sate. :param api: Api instance. :return: Collection object. """ api = api or cls._API if project: project = Transform.to_project(project) if volume: volume = Transform.to_volume(volume) return super(Import, cls)._query( url=cls._URL['query'], project=project, volume=volume, state=state, fields='_all', offset=offset, limit=limit, api=api )
class AutomationRun(Resource): """ Central resource for managing automation runs. """ _URL = { 'query': '/automation/runs', 'get': '/automation/runs/{id}', 'actions': '/automation/runs/{id}/actions/{action}', 'state': '/automation/runs/{id}/state', } href = HrefField() id = StringField(read_only=True) name = StringField(read_only=True) automation = CompoundField(Automation, read_only=True) package = CompoundField(AutomationPackage, read_only=True) inputs = DictField() settings = DictField() created_on = DateTimeField(read_only=True) start_time = DateTimeField(read_only=True) end_time = DateTimeField(read_only=True) resumed_from = StringField(read_only=True) created_by = StringField(read_only=True) status = StringField(read_only=True) message = StringField(read_only=True) execution_details = DictField(read_only=True) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) def __str__(self): return six.text_type('<AutomationRun: id={id}>'.format(id=self.id)) @classmethod def query(cls, automation=None, package=None, status=None, name=None, created_by=None, created_from=None, created_to=None, order_by=None, order=None, offset=None, limit=None, api=None): """ Query (List) automation runs. :param name: Automation run name :param automation: Automation template :param package: Package :param status: Run status :param created_by: Username of user that created the run :param order_by: Property by which to order results :param order: Ascending or descending ("asc" or "desc") :param created_from: Date the run is created after :param created_to: Date the run is created before :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ if automation: automation = Transform.to_automation(automation) if package: package = Transform.to_automation_package(package) api = api or cls._API return super(AutomationRun, cls)._query( url=cls._URL['query'], name=name, automation=automation, package=package, status=status, created_by=created_by, created_from=created_from, created_to=created_to, order_by=order_by, order=order, offset=offset, limit=limit, api=api, ) @classmethod def create(cls, package, inputs=None, settings=None, resume_from=None, name=None, secret_settings=None, api=None): """ Create and start a new run. :param package: Automation package id :param inputs: Input dictionary :param settings: Settings override dictionary :param resume_from: Run to resume from :param name: Automation run name :param secret_settings: dict to override secret_settings from automation template :param api: sevenbridges Api instance :return: AutomationRun object """ package = Transform.to_automation_package(package) data = {'package': package} if inputs: data['inputs'] = inputs if settings: data['settings'] = settings if resume_from: data['resume_from'] = resume_from if name: data['name'] = name if secret_settings: data['secret_settings'] = secret_settings api = api or cls._API automation_run = api.post( url=cls._URL['query'], data=data, ).json() return AutomationRun(api=api, **automation_run) def stop(self, api=None): """ Stop automation run. :param api: sevenbridges Api instance. :return: AutomationRun object """ api = api or self._API return api.post(url=self._URL['actions'].format( id=self.id, action=AutomationRunActions.STOP)).content def get_log_file(self, api=None): """ Retrieve automation run log. :param api: sevenbridges Api instance :return: Log string """ api = api or self._API log_file_data = self.execution_details.get('log_file') return File(api=api, **log_file_data) if log_file_data else None def get_state(self, api=None): """ Retrieve automation run state. :param api: sevenbridges Api instance :return: State file json contents as string """ api = api or self._API return api.get(self._URL['state'].format(id=self.id)).json()
class Export(Resource): """ Central resource for managing exports. """ _URL = { 'query': '/storage/exports', 'get': '/storage/exports/{id}', 'bulk_get': '/bulk/storage/exports/get', 'bulk_create': '/bulk/storage/exports/create', } href = HrefField(read_only=True) id = StringField(read_only=True) state = StringField(read_only=True) _source = DictField(name='source', read_only=True) destination = CompoundField(VolumeFile, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) properties = CompoundField(VolumeProperties, read_only=True) def __str__(self): return f'<Export: id={self.id}>' def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id @property def source(self): try: return File(id=self._source['file'], api=self._api) except TypeError: return None @property def result(self): try: return File(api=self._api, **self._result) except TypeError: return None @classmethod def submit_export(cls, file, volume, location, properties=None, overwrite=False, copy_only=False, api=None): """ Submit new export job. :param file: File to be exported. :param volume: Volume identifier. :param location: Volume location. :param properties: Properties dictionary. :param overwrite: If true it will overwrite file if exists :param copy_only: If true files are kept on SevenBridges bucket. :param api: Api Instance. :return: Export object. """ data = {} params = {} volume = Transform.to_volume(volume) file = Transform.to_file(file) destination = {'volume': volume, 'location': location} source = {'file': file} if properties: data['properties'] = properties data['source'] = source data['destination'] = destination data['overwrite'] = overwrite extra = {'resource': cls.__name__, 'query': data} logger.info('Submitting export', extra=extra) api = api if api else cls._API if copy_only: params['copy_only'] = True _export = api.post(cls._URL['query'], data=data, params=params).json() else: _export = api.post(cls._URL['query'], data=data).json() return Export(api=api, **_export) @classmethod def query(cls, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) exports. :param volume: Optional volume identifier. :param state: Optional import sate. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if volume: volume = Transform.to_volume(volume) return super()._query(url=cls._URL['query'], volume=volume, state=state, offset=offset, limit=limit, fields='_all', api=api) @classmethod def bulk_get(cls, exports, api=None): """ Retrieve exports in bulk. :param exports: Exports to be retrieved. :param api: Api instance. :return: list of ExportBulkRecord objects. """ api = api or cls._API export_ids = [Transform.to_export(export) for export in exports] data = {'export_ids': export_ids} response = api.post(url=cls._URL['bulk_get'], data=data) return ExportBulkRecord.parse_records(response=response, api=api) @classmethod def bulk_submit(cls, exports, copy_only=False, api=None): """ Create exports in bulk. :param exports: List of dicts describing a wanted export. :param copy_only: If true files are kept on SevenBridges bucket. :param api: Api instance. :return: list of ExportBulkRecord objects. """ if not exports: raise SbgError('Exports are required') api = api or cls._API items = [] for export in exports: file_ = Transform.to_file(export.get('file')) volume = Transform.to_volume(export.get('volume')) location = Transform.to_location(export.get('location')) properties = export.get('properties', {}) overwrite = export.get('overwrite', False) item = { 'source': { 'file': file_ }, 'destination': { 'volume': volume, 'location': location }, 'properties': properties, 'overwrite': overwrite } items.append(item) data = {'items': items} params = {'copy_only': copy_only} response = api.post(url=cls._URL['bulk_create'], params=params, data=data) return ExportBulkRecord.parse_records(response=response, api=api)
class Task(Resource): """ Central resource for managing tasks. """ _URL = { 'query': '/tasks', 'get': '/tasks/{id}', 'delete': '/tasks/{id}', 'run': '/tasks/{id}/actions/run', 'abort': '/tasks/{id}/actions/abort', 'execution_details': "/tasks/{id}/execution_details" } href = HrefField() id = UuidField() name = StringField() status = StringField(read_only=True) description = StringField(read_only=False) project = StringField() app = StringField() type = StringField(read_only=True) created_by = StringField(read_only=True) executed_by = StringField(read_only=True) start_time = DateTimeField(read_only=True) batch = BooleanField(read_only=True) batch_by = CompoundField(BatchBy, read_only=False) batch_group = CompoundField(BatchGroup, read_only=True) batch_input = StringField(read_only=False) parent = StringField(read_only=True) end_time = DateTimeField(read_only=True) execution_status = CompoundField(ExecutionStatus, read_only=True) errors = DictField(read_only=True) warnings = DictField(read_only=True) price = CompoundField(Price, read_only=True) inputs = CompoundField(Input, read_only=False) outputs = CompoundField(Output, read_only=True) def __str__(self): return six.text_type('<Task: id={id}>'.format(id=self.id)) @classmethod def query(cls, project=None, status=None, batch=None, parent=None, offset=None, limit=None, api=None): """ Query (List) tasks :param project: Target project. optional. :param status: Task status. :param batch: Only batch tasks. :param parent: Parent batch task identifier. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if parent: parent = Transform.to_task(parent) if project: project = Transform.to_project(project) return super(Task, cls)._query(url=cls._URL['query'], project=project, status=status, batch=batch, parent=parent, offset=offset, limit=limit, fields='_all', api=api) @classmethod def create(cls, name, project, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, api=None): """ Creates a task on server. :param name: Task name. :param project: Project identifier. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param api: Api instance. :return: Task object. :raises: TaskValidationError if validation Fails. :raises: SbgError if any exception occurs during request. """ task_data = {} project = Transform.to_project(project) app = Transform.to_app(app) if revision: app = app + "/" + six.text_type(revision) task_inputs = {'inputs': {}} for k, v in inputs.items(): if isinstance(v, File): input = { 'class': 'File', 'path': v.id, } task_inputs['inputs'][k] = input elif isinstance(v, list): input_list = [] for inp in v: if isinstance(inp, File): input = { 'class': 'File', 'path': inp.id, } if inp.name: input['name'] = inp.name input_list.append(input) else: input_list.append(inp) task_inputs['inputs'][k] = input_list else: task_inputs['inputs'][k] = v if batch_input: task_data['batch_input'] = batch_input if batch_by: task_data['batch_by'] = batch_by task_meta = { 'name': name, 'project': project, 'app': app, 'description': description } task_data.update(task_meta) task_data.update(task_inputs) params = {'action': 'run'} if run else {} api = api if api else cls._API created_task = api.post(cls._URL['query'], data=task_data, params=params).json() if run and 'errors' in created_task: if bool(created_task['errors']): raise TaskValidationError( 'Unable to run task! Task contains errors.', task=Task(api=api, **created_task)) return Task(api=api, **created_task) @inplace_reload def abort(self, inplace=True): """ Abort task :param inplace Apply action on the current object or return a new one. :return: Task object. """ task_data = self._api.post(url=self._URL['abort'].format( id=self.id)).json() return Task(api=self._api, **task_data) @inplace_reload def run(self, batch=True, inplace=True): """ Run task :param batch if False batching will be disabled. :param inplace Apply action on the current object or return a new one. :return: Task object. """ params = {} if not batch: params['batch'] = False task_data = self._api.post(url=self._URL['run'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the task on the server. :param inplace Apply edits on the current instance or get a new one. :return: Task instance. """ modified_data = self._modified_data() if bool(modified_data): task_request_data = {} inputs = modified_data.pop('inputs', None) task_request_data.update(modified_data) if inputs: task_request_data['inputs'] = {} for input_id, input_value in inputs.items(): if isinstance(input_value, File): in_file = Task._to_api_file_format(input_value) task_request_data['inputs'][input_id] = in_file elif isinstance(input_value, list): in_list = [ item for item in input_value if not isinstance(item, File) ] in_list.extend([ Task._to_api_file_format(item) for item in input_value if isinstance(item, File) ]) task_request_data['inputs'][input_id] = in_list else: task_request_data['inputs'][input_id] = input_value data = self._api.patch(url=self._URL['get'].format(id=self.id), data=task_request_data).json() task = Task(api=self._api, **data) return task @staticmethod def _to_api_file_format(_file): api_file = {'class': 'File', 'path': _file.id} if _file.name: api_file['name'] = _file.name return api_file def get_execution_details(self): """ Retrieves execution details for a task. :return: Execution details instance. """ data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data) def get_batch_children(self): """ Retrieves batch child tasks for this task if its a batch task. :return: Collection instance. :raises SbError if task is not a batch task. """ if not self.batch: raise SbgError("This task is not a batch task.") return self.query(parent=self.id, api=self._api)
class Import(Resource): """ Central resource for managing imports. """ _URL = { 'query': '/storage/imports', 'get': '/storage/imports/{id}', 'bulk_get': '/bulk/storage/imports/get', 'bulk_create': '/bulk/storage/imports/create', } href = HrefField() id = StringField(read_only=True) state = StringField(read_only=True) preserve_folder_structure = BooleanField(read_only=True) source = CompoundField(VolumeFile, read_only=True) destination = CompoundField(ImportDestination, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) def __str__(self): return six.text_type('<Import: id={id}>'.format(id=self.id)) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) @property def result(self): try: return File(api=self._api, **self._result) except TypeError: return None @classmethod def submit_import(cls, volume, location, project=None, name=None, overwrite=False, properties=None, parent=None, preserve_folder_structure=True, api=None): """ Submits new import job. :param volume: Volume identifier. :param location: Volume location. :param project: Project identifier. :param name: Optional file name. :param overwrite: If true it will overwrite file if exists. :param properties: Properties dictionary. :param parent: The ID of the target folder to which the item should be imported. Should not be used together with project. :param preserve_folder_structure: Whether to keep the exact source folder structure. The default value is true if the item being imported is a folder. Should not be used if you are importing a file. :param api: Api instance. :return: Import object. """ data = {} volume = Transform.to_volume(volume) if project and parent: raise SbgError( 'Project and parent identifiers are mutually exclusive') elif project: project = Transform.to_project(project) destination = {'project': project} elif parent: parent = Transform.to_file(parent) destination = {'parent': parent} else: raise SbgError('Project or parent identifier is required.') source = {'volume': volume, 'location': location} if name: destination['name'] = name data['source'] = source data['destination'] = destination data['overwrite'] = overwrite if not preserve_folder_structure: data['preserve_folder_structure'] = preserve_folder_structure if properties: data['properties'] = properties api = api if api else cls._API extra = {'resource': cls.__name__, 'query': data} logger.info('Submitting import', extra=extra) _import = api.post(cls._URL['query'], data=data).json() return Import(api=api, **_import) @classmethod def query(cls, project=None, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) imports. :param project: Optional project identifier. :param volume: Optional volume identifier. :param state: Optional import sate. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if project: project = Transform.to_project(project) if volume: volume = Transform.to_volume(volume) return super(Import, cls)._query(url=cls._URL['query'], project=project, volume=volume, state=state, fields='_all', offset=offset, limit=limit, api=api) @classmethod def bulk_get(cls, imports, api=None): """ Retrieve imports in bulk :param imports: Imports to be retrieved. :param api: Api instance. :return: List of ImportBulkRecord objects. """ api = api or cls._API import_ids = [Transform.to_import(import_) for import_ in imports] data = {'import_ids': import_ids} response = api.post(url=cls._URL['bulk_get'], data=data) return ImportBulkRecord.parse_records(response=response, api=api) @classmethod def bulk_submit(cls, imports, api=None): """ Submit imports in bulk :param imports: Imports to be retrieved. :param api: Api instance. :return: List of ImportBulkRecord objects. """ if not imports: raise SbgError('Imports are required') api = api or cls._API items = [] for import_ in imports: project = import_.get('project') parent = import_.get('parent') if project and parent: raise SbgError( 'Project and parent identifiers are mutually exclusive') elif project: destination = {'project': Transform.to_project(project)} elif parent: destination = {'parent': Transform.to_file(parent)} else: raise SbgError('Project or parent identifier is required.') volume = Transform.to_volume(import_.get('volume')) location = Transform.to_location(import_.get('location')) name = import_.get('name', None) overwrite = import_.get('overwrite', False) if name: destination['name'] = name items.append({ 'source': { 'volume': volume, 'location': location }, 'destination': destination, 'overwrite': overwrite }) data = {'items': items} response = api.post(url=cls._URL['bulk_create'], data=data) return ImportBulkRecord.parse_records(response=response, api=api)
class Task(Resource): """ Central resource for managing tasks. """ _URL = { 'query': '/tasks', 'get': '/tasks/{id}', 'delete': '/tasks/{id}', 'run': '/tasks/{id}/actions/run', 'clone': '/tasks/{id}/actions/clone', 'abort': '/tasks/{id}/actions/abort', 'execution_details': "/tasks/{id}/execution_details", 'bulk_get': '/bulk/tasks/get', } href = HrefField(read_only=True) id = UuidField(read_only=True) name = StringField(read_only=False) status = StringField(read_only=True) description = StringField(read_only=False) project = StringField(read_only=False) app = StringField(read_only=False) type = StringField(read_only=True) created_by = StringField(read_only=True) executed_by = StringField(read_only=True) start_time = DateTimeField(read_only=True) created_time = DateTimeField(read_only=True) end_time = DateTimeField(read_only=True) batch = BooleanField(read_only=False) batch_by = CompoundField(BatchBy, read_only=False) batch_group = CompoundField(BatchGroup, read_only=True) batch_input = StringField(read_only=False) parent = StringField(read_only=True) execution_status = CompoundField(ExecutionStatus, read_only=True) errors = DictField(read_only=True) warnings = DictField(read_only=True) price = CompoundField(Price, read_only=True) inputs = CompoundField(Input, read_only=False) outputs = CompoundField(Output, read_only=True) execution_settings = DictField(read_only=True) use_interruptible_instances = BooleanField(read_only=False) def __str__(self): return f'<Task: id={self.id}>' def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id @classmethod def query(cls, project=None, status=None, batch=None, parent=None, created_from=None, created_to=None, started_from=None, started_to=None, ended_from=None, ended_to=None, offset=None, limit=None, order_by=None, order=None, api=None): """ Query (List) tasks. Date parameters may be both strings and python date objects. :param project: Target project. optional. :param status: Task status. :param batch: Only batch tasks. :param parent: Parent batch task identifier. :param ended_to: All tasks that ended until this date. :param ended_from: All tasks that ended from this date. :param started_to: All tasks that were started until this date. :param started_from: All tasks that were started from this date. :param created_to: All tasks that were created until this date. :param created_from: All tasks that were created from this date. :param offset: Pagination offset. :param limit: Pagination limit. :param order_by: Property to order by. :param order: Ascending or descending ordering. :param api: Api instance. :return: Collection object. """ api = api or cls._API if parent: parent = Transform.to_task(parent) if project: project = Transform.to_project(project) if created_from: created_from = Transform.to_datestring(created_from) if created_to: created_to = Transform.to_datestring(created_to) if started_from: started_from = Transform.to_datestring(started_from) if started_to: started_to = Transform.to_datestring(started_to) if ended_from: ended_from = Transform.to_datestring(ended_from) if ended_to: ended_to = Transform.to_datestring(ended_to) return super()._query(url=cls._URL['query'], project=project, status=status, batch=batch, parent=parent, created_from=created_from, created_to=created_to, started_from=started_from, started_to=started_to, ended_from=ended_from, ended_to=ended_to, offset=offset, limit=limit, order_by=order_by, order=order, fields='_all', api=api) @classmethod def create(cls, name, project, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, disable_batch=False, interruptible=None, execution_settings=None, api=None): """ Creates a task on server. :param name: Task name. :param project: Project identifier. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param disable_batch: If True disables batching of a batch task. :param interruptible: If True interruptible instance will be used. :param execution_settings: Execution settings for the task. :param api: Api instance. :return: Task object. :raises: TaskValidationError if validation Fails. :raises: SbgError if any exception occurs during request. """ task_data = {} params = {} project = Transform.to_project(project) app_id = Transform.to_app(app) if revision: app_id = f'{app_id}/{revision}' else: if isinstance(app, App): app_id = f'{app_id}/{app.revision}' task_inputs = { 'inputs': Task._serialize_inputs(inputs) if inputs else {} } if batch_input and batch_by: task_data['batch_input'] = batch_input task_data['batch_by'] = batch_by if disable_batch: params.update({'batch': False}) task_meta = { 'name': name, 'project': project, 'app': app_id, 'description': description, } task_data.update(task_meta) task_data.update(task_inputs) if interruptible is not None: task_data['use_interruptible_instances'] = interruptible if execution_settings: task_data.update({'execution_settings': execution_settings}) if run: params.update({'action': 'run'}) api = api if api else cls._API created_task = api.post(cls._URL['query'], data=task_data, params=params).json() if run and 'errors' in created_task and created_task['errors']: raise TaskValidationError( 'Unable to run task! Task contains errors.', task=Task(api=api, **created_task)) return Task(api=api, **created_task) @inplace_reload def abort(self, inplace=True): """ Abort task :param inplace Apply action on the current object or return a new one. :return: Task object. """ extra = {'resource': type(self).__name__, 'query': {'id': self.id}} logger.info('Aborting task', extra=extra) task_data = self._api.post(url=self._URL['abort'].format( id=self.id)).json() return Task(api=self._api, **task_data) @inplace_reload def run(self, batch=True, interruptible=None, inplace=True): """ Run task :param batch if False batching will be disabled. :param interruptible: If true interruptible instance will be used. :param inplace Apply action on the current object or return a new one. :return: Task object. """ params = {} if not batch: params['batch'] = False if interruptible is not None: params['use_interruptible_instances'] = interruptible extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'batch': batch } } logger.info('Running task', extra=extra) task_data = self._api.post(url=self._URL['run'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) def clone(self, run=True): """ Clone task :param run: run task after cloning :return: Task object. """ params = {} if run: params.update({'action': 'run'}) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'run': run } } logger.info('Cloning task', extra=extra) task_data = self._api.post(url=self._URL['clone'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the task on the server. :param inplace Apply edits on the current instance or get a new one. :return: Task instance. """ modified_data = self._modified_data() if modified_data: task_request_data = {} inputs = modified_data.pop('inputs', None) execution_settings = modified_data.pop('execution_settings', None) task_request_data.update(modified_data) if inputs: task_request_data['inputs'] = self._serialize_inputs(inputs) if execution_settings: task_request_data['execution_settings'] = ( self._serialize_execution_settings(execution_settings)) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'data': task_request_data } } logger.info('Saving task', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=task_request_data).json() task = Task(api=self._api, **data) return task def _serialize_execution_settings(self, execution_settings): instance_type = execution_settings.get( 'instance_type', self.execution_settings.get('instance_type', None)) max_parallel_instances = execution_settings.get( 'max_parallel_instances', self.execution_settings.get('max_parallel_instances', None)) use_memoization = execution_settings.get( 'use_memoization', self.execution_settings.get('use_memoization', None)) serialized_es_mapping = { 'instance_type': instance_type, 'max_parallel_instances': max_parallel_instances, 'use_memoization': use_memoization } serialized_es = dict() for key, value in serialized_es_mapping.items(): if value is not None: serialized_es[key] = value return serialized_es @staticmethod def _serialize_inputs(input_value): """ Recursively serialises input dictionary. :param input_value: input dictionary to serialize :return: serialized input dictionary """ if isinstance(input_value, list): return_value = [] for elem in input_value: return_value.append(Task._serialize_inputs(elem)) elif isinstance(input_value, dict): return_value = {} for key in input_value: return_value[key] = Task._serialize_inputs(input_value[key]) elif isinstance(input_value, File): return_value = Task._to_api_file_format(input_value) else: return_value = input_value return return_value @staticmethod def _to_api_file_format(_file): return { 'class': (FileApiFormats.FOLDER if _file.is_folder() else FileApiFormats.FILE), 'path': _file.id } def get_execution_details(self): """ Retrieves execution details for a task. :return: Execution details instance. """ extra = {'resource': type(self).__name__, 'query': {'id': self.id}} logger.info('Get execution details', extra=extra) data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data) def get_batch_children(self, status=None, created_from=None, created_to=None, started_from=None, started_to=None, ended_from=None, ended_to=None, order_by=None, order=None, offset=None, limit=None, api=None): """ Retrieves batch child tasks for this task if its a batch task. :return: Collection instance. :raises SbError if task is not a batch task. """ api = api or self._api if not self.batch: raise SbgError("This task is not a batch task.") return self.query( parent=self.id, status=status, created_from=created_from, created_to=created_to, started_from=started_from, started_to=started_to, ended_from=ended_from, ended_to=ended_to, order_by=order_by, order=order, offset=offset, limit=limit, api=api, ) @classmethod def bulk_get(cls, tasks, api=None): """ Retrieve tasks with specified ids in bulk :param tasks: Tasks to be retrieved. :param api: Api instance. :return: List of TaskBulkRecord objects. """ api = api or cls._API task_ids = [Transform.to_task(task) for task in tasks] data = {'task_ids': task_ids} logger.debug('Getting tasks in bulk.') response = api.post(url=cls._URL['bulk_get'], data=data) return TaskBulkRecord.parse_records(response=response, api=api) def wait(self=None, period=10, callback=None, *args, **kwargs): """Wait until task is complete :param period: Time in seconds between reloads :param callback: Function to call after the task has finished, arguments and keyword arguments can be provided for it :return: Return value of provided callback function or None if a callback function was not provided """ while self.status not in TaskStatus.terminal_states: self.reload() time.sleep(period) if callback: return callback(*args, **kwargs)
class App(Resource): """ Central resource for managing apps. """ _URL = { 'query': '/apps', 'get': '/apps/{id}', 'get_revision': '/apps/{id}/{revision}', 'create_revision': '/apps/{id}/{revision}/raw', 'copy': '/apps/{id}/actions/copy', 'raw': '/apps/{id}/raw' } href = HrefField() _id = StringField(read_only=True, name='id') project = StringField(read_only=True) name = StringField(read_only=True) revision = IntegerField(read_only=True) raw = DictField(read_only=False) @property def id(self): _id, _rev = self._id.rsplit('/', 1) if re.match('^\d*$', _rev): return _id else: return self._id def __str__(self): return six.text_type('<App: id={id}>'.format(id=self.id)) @classmethod def query(cls, project=None, visibility=None, offset=None, limit=None, api=None): """ Query (List) apps. :param visibility: :param project: :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ if project: project = Transform.to_project(project) api = api or cls._API return super(App, cls)._query(url=cls._URL['query'], project=project, visibility=visibility, offset=offset, limit=limit, api=api) @classmethod def get_revision(cls, id, revision, api=None): """ Get app revision. :param id: App identifier. :param revision: App revision :param api: Api instance. :return: App object. """ api = api if api else cls._API app = api.get(url=cls._URL['get_revision'].format( id=id, revision=revision)).json() return App(api=api, **app) @classmethod def install_app(cls, id, raw, api=None): """ Installs and app. :param id: App identifier. :param raw: Raw cwl data. :param api: Api instance. :return: App object. """ api = api if api else cls._API app = api.post(url=cls._URL['raw'].format(id=id), data=raw).json() app_wrapper = api.get(url=cls._URL['get'].format( id=app['sbg:id'])).json() return App(api=api, **app_wrapper) @classmethod def create_revision(cls, id, revision, raw, api=None): """ Create a new app revision. :param id: App identifier. :param revision: App revision. :param raw: Raw cwl object. :param api: Api instance. :return: App object. """ api = api if api else cls._API app = api.post(url=cls._URL['create_revision'].format( id=id, revision=revision), data=raw).json() app_wrapper = api.get( url=cls._URL['get'].format(id=app['sbg:id'])).json() return App(api=api, **app_wrapper) def copy(self, project, name=None): """ Copies the current app. :param project: Destination project. :param name: Destination app name. :return: Copied App object. """ project = Transform.to_project(project) data = { 'project': project } if name: data['name'] = name app = self._api.post(url=self._URL['copy'].format(id=self.id), data=data).json() return App(api=self._api, **app)