class ExecutionStatus(Resource): """ Task execution status resource. Contains information about the number of completed task steps, total number of task steps, current execution message and information regarding computation limits. In case of a batch task it also contains the number of queued, running, completed, failed and aborted tasks. """ steps_completed = IntegerField(read_only=True) steps_total = IntegerField(read_only=True) message = StringField(read_only=True) message_code = StringField(read_only=True) queued = IntegerField(read_only=True) running = IntegerField(read_only=True) completed = IntegerField(read_only=True) failed = IntegerField(read_only=True) aborted = IntegerField(read_only=True) system_limit = BooleanField(read_only=True) account_limit = BooleanField(read_only=True) instance_init = BooleanField(read_only=True) queued_duration = IntegerField(read_only=True) running_duration = IntegerField(read_only=True) execution_duration = IntegerField(read_only=True) duration = IntegerField(read_only=True) def __str__(self): return '<ExecutionStatus>'
class BillingGroup(Resource): """ Central resource for managing billing groups. """ _URL = {'query': '/billing/groups', 'get': '/billing/groups/{id}'} href = HrefField() id = UuidField() owner = StringField(read_only=True) name = StringField(read_only=True) type = StringField(read_only=True) pending = BooleanField(read_only=True) disabled = BooleanField(read_only=False) balance = CompoundField(Price, read_only=True) def __str__(self): return six.text_type('<BillingGroup: id={id}>'.format(id=self.id)) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) @classmethod def query(cls, offset=None, limit=None, api=None): """ Query (List) billing group. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object. :param api: Api instance. """ api = api or cls._API return super(BillingGroup, cls)._query(url=cls._URL['query'], offset=offset, limit=limit, fields='_all', api=api) def breakdown(self): """ Get Billing group breakdown for the current billing group. """ return BillingGroupBreakdown.get(self.id, self._api)
class Invoice(Resource): """ Central resource for managing invoices. """ _URL = {'query': '/billing/invoices', 'get': '/billing/invoices/{id}'} href = HrefField() id = StringField(read_only=True) pending = BooleanField(read_only=True) analysis_costs = CompoundField(Price, read_only=True) storage_costs = CompoundField(Price, read_only=True) total = CompoundField(Price, read_only=True) invoice_period = CompoundField(InvoicePeriod, read_only=True) def __str__(self): return six.text_type('<Invoice: id={id}>'.format(id=self.id)) @classmethod def query(cls, offset=None, limit=None, api=None): """ Query (List) invoices. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api if api else cls._API return super(Invoice, cls)._query(url=cls._URL['query'], offset=offset, limit=limit, fields='_all', api=api)
class Permissions(Resource): """ Permissions resource contains member permissions in regards to the project. """ write = BooleanField() read = BooleanField() copy = BooleanField() execute = BooleanField() admin = BooleanField() def __str__(self): return six.text_type( '<Permissions: write={write}, read={read}, copy={copy},' ' execute={execute}, admin={admin}>'.format(write=self.write, read=self.read, copy=self.copy, execute=self.execute, admin=self.admin))
class BillingGroupStorageBreakdown(Resource): _URL = {'query': '/billing/groups/{id}/breakdown/storage'} project_name = StringField(read_only=True) project_created_by = StringField(read_only=True) location = StringField(read_only=True) active = CompoundField(Measurement, read_only=True) archived = CompoundField(Measurement, read_only=True) project_locked = BooleanField(read_only=True) @classmethod def query(cls, bg_id, api=None, date_from=None, date_to=None, invoice_id=None, fields=None, offset=0, limit=50): """ Query (List) billing group storage breakdown. Date parameters must be string in format MM-DD-YYYY :param fields: :param invoice_id: :param date_to: include all storage transactions charged before and including date_to :param date_from: include all storage transactions charged after and including date_from :param bg_id: Billing Group ID :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API return super(BillingGroupStorageBreakdown, cls)._query(url=cls._URL['query'].format(id=bg_id), offset=offset, limit=limit, date_from=date_from, date_to=date_to, invoice_id=invoice_id, fields=fields, api=api) def __str__(self): return '<BillingGroupStorageBreakdown>'
class BillingGroupAnalysisBreakdown(Resource): _URL = { 'query': '/billing/groups/{id}/breakdown/analysis' } project_name = StringField(read_only=True) analysis_app_name = StringField(read_only=True) analysis_name = StringField(read_only=True) analysis_type = StringField(read_only=True) analysis_id = UuidField(read_only=True) ran_by = StringField(read_only=True) analysis_status = StringField(read_only=True) analysis_cost = CompoundField(AnalysisCost, read_only=True) refunded_amount = FloatField(read_only=True) time_started = DateTimeField(read_only=True) time_finished = DateTimeField(read_only=True) project_locked = BooleanField(read_only=True) @classmethod def query(cls, bg_id, api=None, date_from=None, date_to=None, invoice_id=None, fields=None, offset=0, limit=50): """ Query (List) billing group analysis breakdown. Date parameters must be string in format MM-DD-YYYY :param fields: :param invoice_id: :param date_to: include all analysis transactions charged before and including date_to :param date_from: include all analysis transactions charged after and including date_from :param bg_id: Billing Group ID :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API return super(BillingGroupAnalysisBreakdown, cls)._query( url=cls._URL['query'].format(id=bg_id), offset=offset, limit=limit, date_from=date_from, date_to=date_to, invoice_id=invoice_id, fields=fields, api=api ) def __str__(self): return '<BillingGroupAnalysisBreakdown>'
class Job(Resource): """ Job resource contains information for a single executed node in the analysis. """ name = StringField(read_only=True) start_time = DateTimeField(read_only=True) end_time = DateTimeField(read_only=True) status = StringField(read_only=True) command_line = StringField(read_only=True) retried = BooleanField(read_only=True) instance = CompoundField(Instance, read_only=True) docker = CompoundField(JobDocker, read_only=True) logs = CompoundField(Logs, read_only=True) def __str__(self): return f'<Job: name={self.name}, status={self.status}>'
class Automation(Resource): """ Central resource for managing automations. """ # noinspection PyProtectedMember _URL = { 'query': '/automation/automations', 'get': '/automation/automations/{id}', 'member': AutomationMember._URL['get'], 'members': AutomationMember._URL['query'], 'packages': AutomationPackage._URL['query'], 'archive': '/automation/automations/{automation_id}/actions/archive', 'restore': '/automation/automations/{automation_id}/actions/restore' } href = HrefField(read_only=True) id = UuidField(read_only=True) name = StringField(read_only=False) description = StringField(read_only=False) billing_group = UuidField(read_only=False) owner = StringField(read_only=True) created_by = StringField(read_only=True) created_on = DateTimeField(read_only=True) modified_by = StringField(read_only=True) modified_on = DateTimeField(read_only=False) archived = BooleanField(read_only=True) secret_settings = DictField(read_only=False) memory_limit = IntegerField(read_only=False) project_based = BooleanField(read_only=False) def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id def __str__(self): return f'<Automation: id={self.id} name={self.name}>' @classmethod def query(cls, name=None, include_archived=False, project_based=None, offset=None, limit=None, api=None): """ Query (List) automations. :param name: Automation name. :param include_archived: Include archived automations also :param project_based: Search project based automations :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ api = api or cls._API return super()._query( url=cls._URL['query'], name=name, include_archived=include_archived, project_based=project_based, offset=offset, limit=limit, api=api, ) @classmethod def create(cls, name, description=None, billing_group=None, secret_settings=None, project_based=None, memory_limit=None, api=None): """ Create a automation template. :param name: Automation name. :param billing_group: Automation billing group. :param description: Automation description. :param secret_settings: Automation settings. :param project_based: Create project based automation template. :param memory_limit: Memory limit in MB. :param api: Api instance. :return: """ api = api if api else cls._API if name is None: raise SbgError('Automation name is required!') data = { 'name': name, } if billing_group: data['billing_group'] = Transform.to_billing_group(billing_group) if description: data['description'] = description if secret_settings: data['secret_settings'] = secret_settings if project_based: data['project_based'] = project_based if memory_limit: data['memory_limit'] = memory_limit extra = {'resource': cls.__name__, 'query': data} logger.info('Creating automation template', extra=extra) automation_data = api.post(url=cls._URL['query'], data=data).json() return Automation(api=api, **automation_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the automation template on the server. :param inplace Apply edits on the current instance or get a new one. :return: Automation instance. """ modified_data = self._modified_data() if modified_data: extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'modified_data': modified_data } } logger.info('Saving automation template', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() return Automation(api=self._api, **data) else: raise ResourceNotModified() @inplace_reload def archive(self): """ Archive automation :return: Automation instance. """ extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Archive automation', extra=extra) automation_data = self._api.post(url=self._URL['archive'].format( automation_id=self.id)).json() return Automation(api=self._api, **automation_data) @inplace_reload def restore(self): """ Restore archived automation :return: Automation instance. """ extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Restore archived automation', extra=extra) automation_data = self._api.post(url=self._URL['restore'].format( automation_id=self.id)).json() return Automation(api=self._api, **automation_data) def get_packages(self, offset=None, limit=None, api=None): """ Return list of packages that belong to this automation :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance. :return: AutomationPackage collection """ api = api or self._API return AutomationPackage.query(automation=self.id, offset=offset, limit=limit, api=api) @classmethod def get_package(cls, package, api=None): """ Return specified automation member :param package: Automation Package Id :param api: sevenbridges Api instance. :return: AutomationMember object """ package_id = Transform.to_automation_package(package) api = api or cls._API return AutomationPackage.get(id=package_id, api=api) def add_package(self, version, file_path, schema, file_name=None, retry_count=RequestParameters.DEFAULT_RETRY_COUNT, timeout=RequestParameters.DEFAULT_TIMEOUT, part_size=None, api=None): """ Add a code package to automation template. :param version: The code package version. :param file_path: Path to the code package file to be uploaded. :param schema: IO schema for main step of execution. :param part_size: Size of upload part in bytes. :param file_name: Optional file name. :param retry_count: Upload retry count. :param timeout: Timeout for s3/google session. :param api: sevenbridges Api instance. :return: AutomationPackage """ api = api or self._API if version is None: raise SbgError('Code package version is required!') if file_path is None: raise SbgError('Code package file path is required!') # Multipart upload the code package: upload = CodePackageUpload(file_path, self.id, api=api, part_size=part_size, file_name=file_name, retry_count=retry_count, timeout=timeout) upload.start() upload.wait() package_file = upload.result() # Create the automation package: return AutomationPackage.create(self.id, version=version, location=package_file.id, schema=schema, api=api) def get_member(self, username, api=None): """ Return specified automation member :param username: Member username :param api: sevenbridges Api instance. :return: AutomationMember object """ member = Transform.to_automation_member(username) api = api or self._API return AutomationMember.get(id=member, automation=self.id, api=api) def get_members(self, offset=None, limit=None, api=None): """ Return list of automation members :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance. :return: AutomationMember collection """ api = api or self._API return AutomationMember.query(automation=self.id, offset=offset, limit=limit, api=api) def add_member(self, user, permissions, api=None): """ Add member to the automation :param user: Member username :param permissions: Member permissions :param api: sevenbridges Api instance :return: AutomationMember object """ api = api or self._API return AutomationMember.add(automation=self.id, user=user, permissions=permissions, api=api) def remove_member(self, user, api=None): """ Remove a member from the automation :param user: Member username :param api: sevenbridges Api instance :return: None """ api = api or self._API AutomationMember.remove(automation=self.id, user=user, api=api) def get_runs(self, package=None, status=None, name=None, created_by=None, created_from=None, created_to=None, project_id=None, order_by=None, order=None, offset=None, limit=None, api=None): """ Query automation runs that belong to this automation :param package: Package id :param status: Run status :param name: Automation run name :param created_by: Username of member that created the run :param created_from: Date the run was created after :param created_to: Date the run was created before :param project_id: Search runs by project id, if run is project based :param order_by: Property by which to order results :param order: Ascending or Descending ("asc" or "desc") :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance :return: AutomationRun collection """ api = api or self._API return AutomationRun.query(automation=self.id, package=package, status=status, name=name, created_by=created_by, created_from=created_from, created_to=created_to, project_id=project_id, order_by=order_by, order=order, offset=offset, limit=limit, api=api)
class BillingGroup(Resource): """ Central resource for managing billing groups. """ _URL = {'query': '/billing/groups', 'get': '/billing/groups/{id}'} href = HrefField(read_only=True) id = UuidField(read_only=True) owner = StringField(read_only=True) name = StringField(read_only=True) type = StringField(read_only=True) pending = BooleanField(read_only=True) disabled = BooleanField(read_only=False) balance = CompoundField(Price, read_only=True) def __str__(self): return f'<BillingGroup: id={self.id}>' def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id @classmethod def query(cls, offset=None, limit=None, api=None): """ Query (List) billing group. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object. :param api: Api instance. """ api = api or cls._API return super()._query(url=cls._URL['query'], offset=offset, limit=limit, fields='_all', api=api) def breakdown(self): """ Get Billing group breakdown for the current billing group. """ return BillingGroupBreakdown.get(self.id, self._api) def analysis_breakdown(self, date_from=None, date_to=None, invoice_id=None, fields=None, offset=0, limit=50): """ Get Billing group analysis breakdown for the current billing group. """ return BillingGroupAnalysisBreakdown.query(self.id, self._api, date_from, date_to, invoice_id, fields, offset, limit) def storage_breakdown(self, date_from=None, date_to=None, invoice_id=None, fields=None, offset=0, limit=50): """ Get Billing group storage breakdown for the current billing group. """ return BillingGroupStorageBreakdown.query(self.id, self._api, date_from, date_to, invoice_id, fields, offset, limit) def egress_breakdown(self, date_from=None, date_to=None, invoice_id=None, fields=None, offset=0, limit=50): """ Get Billing group egress breakdown for the current billing group. """ return BillingGroupEgressBreakdown.query(self.id, self._api, date_from, date_to, invoice_id, fields, offset, limit)
class AutomationPackage(Resource): """ Central resource for managing automation packages. """ _URL = { 'query': '/automation/automations/{automation_id}/packages', 'get': '/automation/packages/{id}', 'archive': "/automation/automations/{automation_id}" "/packages/{id}/actions/archive", 'restore': "/automation/automations/{automation_id}" "/packages/{id}/actions/restore", } id = StringField(read_only=True) automation = UuidField(read_only=True) version = StringField(read_only=True) location = StringField(read_only=True) schema = DictField(read_only=True) created_by = StringField(read_only=True) created_on = DateTimeField(read_only=True) archived = BooleanField(read_only=True) custom_url = StringField(read_only=False) memory_limit = IntegerField(read_only=False) def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id def __str__(self): return f'<AutomationPackage: id={self.id}>' @classmethod def query(cls, automation, offset=None, limit=None, api=None): """ Query (List) automation packages. :param automation: Automation id. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object """ automation_id = Transform.to_automation(automation) api = api or cls._API return super()._query( url=cls._URL['query'].format(automation_id=automation_id), offset=offset, limit=limit, api=api, ) @classmethod def create(cls, automation, version, location, schema, memory_limit=None, api=None): """ Create a code package. :param automation: Automation id. :param version: File ID of the uploaded code package. :param location: The code package version. :param schema: IO schema for main step of execution. :param memory_limit: Memory limit in MB. :param api: Api instance. :return: """ automation_id = Transform.to_automation(automation) api = api if api else cls._API if version is None: raise SbgError('Code package version is required!') if location is None: raise SbgError('Code package location is required!') if schema is None: raise SbgError('Schema is required!') data = { 'version': version, 'location': location, 'schema': schema, 'memory_limit': memory_limit, } extra = {'resource': cls.__name__, 'query': data} package_data = api.post( cls._URL['query'].format(automation_id=automation_id), data=data).json() logger.info('Add code package to automation with id %s', automation_id, extra=extra) return AutomationPackage(api=api, **package_data) @inplace_reload def archive(self): """ Archive package :return: AutomationPackage object. """ automation_id = Transform.to_automation(self.automation) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Archive automation package', extra=extra) package_data = self._api.post(url=self._URL['archive'].format( automation_id=automation_id, id=self.id)).json() return AutomationPackage(api=self._api, **package_data) @inplace_reload def restore(self): """ Restore archived package :return: AutomationPackage object. """ automation_id = Transform.to_automation(self.automation) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, } } logger.info('Restore archived automation package', extra=extra) package_data = self._api.post(url=self._URL['restore'].format( automation_id=automation_id, id=self.id)).json() return AutomationPackage(api=self._api, **package_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the automation package on the server. :param inplace Apply edits on the current instance or get a new one. :return: AutomationPackage instance. """ modified_data = self._modified_data() if modified_data: extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'modified_data': modified_data } } logger.info('Saving automation package', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() return AutomationPackage(api=self._api, **data) else: raise ResourceNotModified()
class Task(Resource): """ Central resource for managing tasks. """ _URL = { 'query': '/tasks', 'get': '/tasks/{id}', 'delete': '/tasks/{id}', 'run': '/tasks/{id}/actions/run', 'abort': '/tasks/{id}/actions/abort', 'execution_details': "/tasks/{id}/execution_details" } href = HrefField() id = UuidField() name = StringField() status = StringField(read_only=True) description = StringField(read_only=False) project = StringField() app = StringField() type = StringField(read_only=True) created_by = StringField(read_only=True) executed_by = StringField(read_only=True) start_time = DateTimeField(read_only=True) created_time = DateTimeField(read_only=True) end_time = DateTimeField(read_only=True) batch = BooleanField(read_only=False) batch_by = CompoundField(BatchBy, read_only=False) batch_group = CompoundField(BatchGroup, read_only=True) batch_input = StringField(read_only=False) parent = StringField(read_only=True) execution_status = CompoundField(ExecutionStatus, read_only=True) errors = DictField(read_only=True) warnings = DictField(read_only=True) price = CompoundField(Price, read_only=True) inputs = CompoundField(Input, read_only=False) outputs = CompoundField(Output, read_only=True) use_interruptible_instances = BooleanField() def __str__(self): return six.text_type('<Task: id={id}>'.format(id=self.id)) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) @classmethod def query(cls, project=None, status=None, batch=None, parent=None, created_from=None, created_to=None, started_from=None, started_to=None, ended_from=None, ended_to=None, offset=None, limit=None, api=None): """ Query (List) tasks. Date parameters may be both strings and python date objects. :param project: Target project. optional. :param status: Task status. :param batch: Only batch tasks. :param parent: Parent batch task identifier. :param ended_to: All tasks that ended until this date. :param ended_from: All tasks that ended from this date. :param started_to: All tasks that were started until this date. :param started_from: All tasks that were started from this date. :param created_to: All tasks that were created until this date. :param created_from: All tasks that were created from this date. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if parent: parent = Transform.to_task(parent) if project: project = Transform.to_project(project) if created_from: created_from = Transform.to_datestring(created_from) if created_to: created_to = Transform.to_datestring(created_to) if started_from: started_from = Transform.to_datestring(started_from) if started_to: started_to = Transform.to_datestring(started_to) if ended_from: ended_from = Transform.to_datestring(ended_from) if ended_to: ended_to = Transform.to_datestring(ended_to) return super(Task, cls)._query(url=cls._URL['query'], project=project, status=status, batch=batch, parent=parent, created_from=created_from, created_to=created_to, started_from=started_from, started_to=started_to, ended_from=ended_from, ended_to=ended_to, offset=offset, limit=limit, fields='_all', api=api) @classmethod def create(cls, name, project, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, disable_batch=False, interruptible=True, api=None): """ Creates a task on server. :param name: Task name. :param project: Project identifier. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param disable_batch: If True disables batching of a batch task. :param interruptible: If True interruptible instance will be used. :param api: Api instance. :return: Task object. :raises: TaskValidationError if validation Fails. :raises: SbgError if any exception occurs during request. """ task_data = {} params = {} project = Transform.to_project(project) app_id = Transform.to_app(app) if revision: app_id = app_id + "/" + six.text_type(revision) else: if isinstance(app, App): app_id = app_id + "/" + six.text_type(app.revision) task_inputs = { 'inputs': Task._serialize_inputs(inputs) if inputs else {} } if batch_input and batch_by: task_data['batch_input'] = batch_input task_data['batch_by'] = batch_by if disable_batch: params.update({'batch': False}) task_meta = { 'name': name, 'project': project, 'app': app_id, 'description': description, } task_data.update(task_meta) task_data.update(task_inputs) task_data['use_interruptible_instances'] = interruptible if run: params.update({'action': 'run'}) api = api if api else cls._API created_task = api.post(cls._URL['query'], data=task_data, params=params).json() if run and 'errors' in created_task: if bool(created_task['errors']): raise TaskValidationError( 'Unable to run task! Task contains errors.', task=Task(api=api, **created_task)) return Task(api=api, **created_task) @inplace_reload def abort(self, inplace=True): """ Abort task :param inplace Apply action on the current object or return a new one. :return: Task object. """ extra = {'resource': self.__class__.__name__, 'query': {'id': self.id}} logger.info('Aborting task', extra=extra) task_data = self._api.post(url=self._URL['abort'].format( id=self.id)).json() return Task(api=self._api, **task_data) @inplace_reload def run(self, batch=True, interruptible=True, inplace=True): """ Run task :param batch if False batching will be disabled. :param interruptible: If true interruptible instance will be used. :param inplace Apply action on the current object or return a new one. :return: Task object. """ params = {} if not batch: params['batch'] = False params['use_interruptible_instances'] = interruptible extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'batch': batch } } logger.info('Running task', extra=extra) task_data = self._api.post(url=self._URL['run'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the task on the server. :param inplace Apply edits on the current instance or get a new one. :return: Task instance. """ modified_data = self._modified_data() if bool(modified_data): task_request_data = {} inputs = modified_data.pop('inputs', None) task_request_data.update(modified_data) if inputs: task_request_data['inputs'] = self._serialize_inputs(inputs) extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': task_request_data } } logger.info('Saving task', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=task_request_data).json() task = Task(api=self._api, **data) return task @staticmethod def _serialize_inputs(inputs): """Serialize task input dictionary""" serialized_inputs = {} for input_id, input_value in inputs.items(): if isinstance(input_value, list): serialized_list = Task._serialize_input_list(input_value) serialized_inputs[input_id] = serialized_list else: if isinstance(input_value, File): input_value = Task._to_api_file_format(input_value) serialized_inputs[input_id] = input_value return serialized_inputs @staticmethod def _serialize_input_list(input_value): """Recursively serialize task input list""" input_list = [] for item in input_value: if isinstance(item, list): input_list.append(Task._serialize_input_list(item)) else: if isinstance(item, File): item = Task._to_api_file_format(item) input_list.append(item) return input_list @staticmethod def _to_api_file_format(_file): api_file = {'class': 'File', 'path': _file.id} if _file.name: api_file['name'] = _file.name return api_file def get_execution_details(self): """ Retrieves execution details for a task. :return: Execution details instance. """ extra = {'resource': self.__class__.__name__, 'query': {'id': self.id}} logger.info('Get execution details', extra=extra) data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data) def get_batch_children(self): """ Retrieves batch child tasks for this task if its a batch task. :return: Collection instance. :raises SbError if task is not a batch task. """ if not self.batch: raise SbgError("This task is not a batch task.") return self.query(parent=self.id, api=self._api)
class Task(Resource): """ Central resource for managing tasks. """ _URL = { 'query': '/tasks', 'get': '/tasks/{id}', 'delete': '/tasks/{id}', 'run': '/tasks/{id}/actions/run', 'clone': '/tasks/{id}/actions/clone', 'abort': '/tasks/{id}/actions/abort', 'execution_details': "/tasks/{id}/execution_details", 'bulk_get': '/bulk/tasks/get', } href = HrefField(read_only=True) id = UuidField(read_only=True) name = StringField(read_only=False) status = StringField(read_only=True) description = StringField(read_only=False) project = StringField(read_only=False) app = StringField(read_only=False) type = StringField(read_only=True) created_by = StringField(read_only=True) executed_by = StringField(read_only=True) start_time = DateTimeField(read_only=True) created_time = DateTimeField(read_only=True) end_time = DateTimeField(read_only=True) batch = BooleanField(read_only=False) batch_by = CompoundField(BatchBy, read_only=False) batch_group = CompoundField(BatchGroup, read_only=True) batch_input = StringField(read_only=False) parent = StringField(read_only=True) execution_status = CompoundField(ExecutionStatus, read_only=True) errors = DictField(read_only=True) warnings = DictField(read_only=True) price = CompoundField(Price, read_only=True) inputs = CompoundField(Input, read_only=False) outputs = CompoundField(Output, read_only=True) execution_settings = DictField(read_only=True) use_interruptible_instances = BooleanField(read_only=False) def __str__(self): return f'<Task: id={self.id}>' def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id @classmethod def query(cls, project=None, status=None, batch=None, parent=None, created_from=None, created_to=None, started_from=None, started_to=None, ended_from=None, ended_to=None, offset=None, limit=None, order_by=None, order=None, api=None): """ Query (List) tasks. Date parameters may be both strings and python date objects. :param project: Target project. optional. :param status: Task status. :param batch: Only batch tasks. :param parent: Parent batch task identifier. :param ended_to: All tasks that ended until this date. :param ended_from: All tasks that ended from this date. :param started_to: All tasks that were started until this date. :param started_from: All tasks that were started from this date. :param created_to: All tasks that were created until this date. :param created_from: All tasks that were created from this date. :param offset: Pagination offset. :param limit: Pagination limit. :param order_by: Property to order by. :param order: Ascending or descending ordering. :param api: Api instance. :return: Collection object. """ api = api or cls._API if parent: parent = Transform.to_task(parent) if project: project = Transform.to_project(project) if created_from: created_from = Transform.to_datestring(created_from) if created_to: created_to = Transform.to_datestring(created_to) if started_from: started_from = Transform.to_datestring(started_from) if started_to: started_to = Transform.to_datestring(started_to) if ended_from: ended_from = Transform.to_datestring(ended_from) if ended_to: ended_to = Transform.to_datestring(ended_to) return super()._query(url=cls._URL['query'], project=project, status=status, batch=batch, parent=parent, created_from=created_from, created_to=created_to, started_from=started_from, started_to=started_to, ended_from=ended_from, ended_to=ended_to, offset=offset, limit=limit, order_by=order_by, order=order, fields='_all', api=api) @classmethod def create(cls, name, project, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, disable_batch=False, interruptible=None, execution_settings=None, api=None): """ Creates a task on server. :param name: Task name. :param project: Project identifier. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param disable_batch: If True disables batching of a batch task. :param interruptible: If True interruptible instance will be used. :param execution_settings: Execution settings for the task. :param api: Api instance. :return: Task object. :raises: TaskValidationError if validation Fails. :raises: SbgError if any exception occurs during request. """ task_data = {} params = {} project = Transform.to_project(project) app_id = Transform.to_app(app) if revision: app_id = f'{app_id}/{revision}' else: if isinstance(app, App): app_id = f'{app_id}/{app.revision}' task_inputs = { 'inputs': Task._serialize_inputs(inputs) if inputs else {} } if batch_input and batch_by: task_data['batch_input'] = batch_input task_data['batch_by'] = batch_by if disable_batch: params.update({'batch': False}) task_meta = { 'name': name, 'project': project, 'app': app_id, 'description': description, } task_data.update(task_meta) task_data.update(task_inputs) if interruptible is not None: task_data['use_interruptible_instances'] = interruptible if execution_settings: task_data.update({'execution_settings': execution_settings}) if run: params.update({'action': 'run'}) api = api if api else cls._API created_task = api.post(cls._URL['query'], data=task_data, params=params).json() if run and 'errors' in created_task and created_task['errors']: raise TaskValidationError( 'Unable to run task! Task contains errors.', task=Task(api=api, **created_task)) return Task(api=api, **created_task) @inplace_reload def abort(self, inplace=True): """ Abort task :param inplace Apply action on the current object or return a new one. :return: Task object. """ extra = {'resource': type(self).__name__, 'query': {'id': self.id}} logger.info('Aborting task', extra=extra) task_data = self._api.post(url=self._URL['abort'].format( id=self.id)).json() return Task(api=self._api, **task_data) @inplace_reload def run(self, batch=True, interruptible=None, inplace=True): """ Run task :param batch if False batching will be disabled. :param interruptible: If true interruptible instance will be used. :param inplace Apply action on the current object or return a new one. :return: Task object. """ params = {} if not batch: params['batch'] = False if interruptible is not None: params['use_interruptible_instances'] = interruptible extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'batch': batch } } logger.info('Running task', extra=extra) task_data = self._api.post(url=self._URL['run'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) def clone(self, run=True): """ Clone task :param run: run task after cloning :return: Task object. """ params = {} if run: params.update({'action': 'run'}) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'run': run } } logger.info('Cloning task', extra=extra) task_data = self._api.post(url=self._URL['clone'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the task on the server. :param inplace Apply edits on the current instance or get a new one. :return: Task instance. """ modified_data = self._modified_data() if modified_data: task_request_data = {} inputs = modified_data.pop('inputs', None) execution_settings = modified_data.pop('execution_settings', None) task_request_data.update(modified_data) if inputs: task_request_data['inputs'] = self._serialize_inputs(inputs) if execution_settings: task_request_data['execution_settings'] = ( self._serialize_execution_settings(execution_settings)) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'data': task_request_data } } logger.info('Saving task', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=task_request_data).json() task = Task(api=self._api, **data) return task def _serialize_execution_settings(self, execution_settings): instance_type = execution_settings.get( 'instance_type', self.execution_settings.get('instance_type', None)) max_parallel_instances = execution_settings.get( 'max_parallel_instances', self.execution_settings.get('max_parallel_instances', None)) use_memoization = execution_settings.get( 'use_memoization', self.execution_settings.get('use_memoization', None)) serialized_es_mapping = { 'instance_type': instance_type, 'max_parallel_instances': max_parallel_instances, 'use_memoization': use_memoization } serialized_es = dict() for key, value in serialized_es_mapping.items(): if value is not None: serialized_es[key] = value return serialized_es @staticmethod def _serialize_inputs(input_value): """ Recursively serialises input dictionary. :param input_value: input dictionary to serialize :return: serialized input dictionary """ if isinstance(input_value, list): return_value = [] for elem in input_value: return_value.append(Task._serialize_inputs(elem)) elif isinstance(input_value, dict): return_value = {} for key in input_value: return_value[key] = Task._serialize_inputs(input_value[key]) elif isinstance(input_value, File): return_value = Task._to_api_file_format(input_value) else: return_value = input_value return return_value @staticmethod def _to_api_file_format(_file): return { 'class': (FileApiFormats.FOLDER if _file.is_folder() else FileApiFormats.FILE), 'path': _file.id } def get_execution_details(self): """ Retrieves execution details for a task. :return: Execution details instance. """ extra = {'resource': type(self).__name__, 'query': {'id': self.id}} logger.info('Get execution details', extra=extra) data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data) def get_batch_children(self, status=None, created_from=None, created_to=None, started_from=None, started_to=None, ended_from=None, ended_to=None, order_by=None, order=None, offset=None, limit=None, api=None): """ Retrieves batch child tasks for this task if its a batch task. :return: Collection instance. :raises SbError if task is not a batch task. """ api = api or self._api if not self.batch: raise SbgError("This task is not a batch task.") return self.query( parent=self.id, status=status, created_from=created_from, created_to=created_to, started_from=started_from, started_to=started_to, ended_from=ended_from, ended_to=ended_to, order_by=order_by, order=order, offset=offset, limit=limit, api=api, ) @classmethod def bulk_get(cls, tasks, api=None): """ Retrieve tasks with specified ids in bulk :param tasks: Tasks to be retrieved. :param api: Api instance. :return: List of TaskBulkRecord objects. """ api = api or cls._API task_ids = [Transform.to_task(task) for task in tasks] data = {'task_ids': task_ids} logger.debug('Getting tasks in bulk.') response = api.post(url=cls._URL['bulk_get'], data=data) return TaskBulkRecord.parse_records(response=response, api=api) def wait(self=None, period=10, callback=None, *args, **kwargs): """Wait until task is complete :param period: Time in seconds between reloads :param callback: Function to call after the task has finished, arguments and keyword arguments can be provided for it :return: Return value of provided callback function or None if a callback function was not provided """ while self.status not in TaskStatus.terminal_states: self.reload() time.sleep(period) if callback: return callback(*args, **kwargs)
class Export(Resource): """ Central resource for managing exports. """ _URL = { 'query': '/storage/exports', 'get': '/storage/exports/{id}', } href = HrefField() id = StringField(read_only=True) state = StringField(read_only=True) _source = DictField(name='source', read_only=True) destination = CompoundField(VolumeFile, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) properties = CompoundField(VolumeProperties, read_only=True) def __str__(self): return six.text_type('<Export: id={id}>'.format(id=self.id)) @property def source(self): try: return File(id=self._source['file'], api=self._api) except TypeError: return None @property def result(self): try: return File(id=self._result['id'], api=self._api) except TypeError: return None @classmethod def submit_export(cls, file, volume, location, properties=None, overwrite=False, api=None): """ Submit new export job. :param file: File to be exported. :param volume: Volume identifier. :param location: Volume location. :param properties: Properties dictionary. :param overwrite: If true it will overwrite file if exists :param api: Api Instance. :return: Export object. """ data = {} volume = Transform.to_volume(volume) file = Transform.to_file(file) destination = {'volume': volume, 'location': location} source = {'file': file} if properties: data['properties'] = properties data['source'] = source data['destination'] = destination data['overwrite'] = overwrite api = api if api else cls._API _export = api.post(cls._URL['query'], data=data).json() return Export(api=api, **_export) @classmethod def query(cls, project=None, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) exports. :param project: Optional project identifier. :param volume: Optional volume identifier. :param state: Optional import sate. :param api: Api instance. :return: Collection object. """ api = api or cls._API if project: project = Transform.to_project(project) if volume: volume = Transform.to_volume(volume) return super(Export, cls)._query(url=cls._URL['query'], project=project, volume=volume, state=state, offset=offset, limit=limit, fields='_all', api=api)
class Volume(Resource): """ Central resource for managing volumes. """ _URL = { 'query': '/storage/volumes', 'get': '/storage/volumes/{id}', 'delete': '/storage/volumes/{id}', 'list': '/storage/volumes/{id}/list', 'object': '/storage/volumes/{id}/object', 'member': '/storage/volumes/{id}/members/{username}', 'members_query': '/storage/volumes/{id}/members', } href = HrefField(read_only=True) id = StringField(read_only=True) name = StringField(read_only=False) description = StringField(read_only=False) access_mode = StringField(read_only=False) service = CompoundField(VolumeService, read_only=True) created_on = DateTimeField(read_only=True) modified_on = DateTimeField(read_only=True) active = BooleanField(read_only=True) def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id def __str__(self): return f'<Volume: id={self.id}>' @classmethod def query(cls, offset=None, limit=None, api=None): """ Query (List) volumes. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API return super()._query(url=cls._URL['query'], offset=offset, limit=limit, fields='_all', api=api) @classmethod def create_s3_volume(cls, name, bucket, access_key_id, secret_access_key, access_mode, description=None, prefix=None, properties=None, api=None): """ Create s3 volume. :param name: Volume name. :param bucket: Referenced bucket. :param access_key_id: Amazon access key identifier. :param secret_access_key: Amazon secret access key. :param access_mode: Access Mode. :param description: Volume description. :param prefix: Volume prefix. :param properties: Volume properties. :param api: Api instance. :return: Volume object. """ service = { 'type': VolumeType.S3, 'bucket': bucket, 'credentials': { 'access_key_id': access_key_id, 'secret_access_key': secret_access_key } } if prefix: service['prefix'] = prefix if properties: service['properties'] = properties data = {'name': name, 'service': service, 'access_mode': access_mode} if description: data['description'] = description api = api or cls._API extra = {'resource': cls.__name__, 'query': data} logger.info('Creating s3 volume', extra=extra) response = api.post(url=cls._URL['query'], data=data).json() return Volume(api=api, **response) @classmethod def create_google_volume(cls, name, bucket, client_email, private_key, access_mode, description=None, prefix=None, properties=None, api=None): """ Create s3 volume. :param name: Volume name. :param bucket: Referenced bucket. :param client_email: Google client email. :param private_key: Google client private key. :param access_mode: Access Mode. :param description: Volume description. :param prefix: Volume prefix. :param properties: Volume properties. :param api: Api instance. :return: Volume object. """ service = { 'type': VolumeType.GOOGLE, 'bucket': bucket, 'credentials': { 'client_email': client_email, 'private_key': private_key } } if prefix: service['prefix'] = prefix if properties: service['properties'] = properties data = {'name': name, 'service': service, 'access_mode': access_mode} if description: data['description'] = description api = api or cls._API extra = {'resource': cls.__name__, 'query': data} logger.info('Creating google volume', extra=extra) response = api.post(url=cls._URL['query'], data=data).json() return Volume(api=api, **response) @classmethod def create_oss_volume(cls, name, bucket, endpoint, access_key_id, secret_access_key, access_mode, description=None, prefix=None, properties=None, api=None): """ Create oss volume. :param name: Volume name. :param bucket: Referenced bucket. :param access_key_id: Access key identifier. :param secret_access_key: Secret access key. :param access_mode: Access Mode. :param endpoint: Volume Endpoint. :param description: Volume description. :param prefix: Volume prefix. :param properties: Volume properties. :param api: Api instance. :return: Volume object. """ service = { 'type': VolumeType.OSS, 'bucket': bucket, 'endpoint': endpoint, 'credentials': { 'access_key_id': access_key_id, 'secret_access_key': secret_access_key } } if prefix: service['prefix'] = prefix if properties: service['properties'] = properties data = {'name': name, 'service': service, 'access_mode': access_mode} if description: data['description'] = description api = api or cls._API extra = {'resource': cls.__name__, 'query': data} logger.info('Creating oss volume', extra=extra) response = api.post(url=cls._URL['query'], data=data).json() return Volume(api=api, **response) @inplace_reload def save(self, inplace=True): """ Saves all modification to the volume on the server. """ modified_data = self._modified_data() if modified_data: extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'modified_data': modified_data } } logger.info('Saving volume', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() volume = Volume(api=self._api, **data) return volume else: raise ResourceNotModified() def list(self, prefix=None, limit=None): params = {} if prefix: params['prefix'] = prefix if limit: params['limit'] = limit data = self._api.get(url=self._URL['list'].format(id=self.id), params=params).json() href = data['href'] links = [VolumeLink(**link) for link in data['links']] objects = [ VolumeObject(api=self._api, **item) for item in data['items'] ] prefixes = [ VolumePrefix(api=self._api, **prefix) for prefix in # noqa: F812 data['prefixes'] ] return VolumeCollection(href=href, items=objects, links=links, prefixes=prefixes, api=self._api) def get_volume_object_info(self, location): """ Fetches information about single volume object - usually file :param location: object location :return: """ param = {'location': location} data = self._api.get(url=self._URL['object'].format(id=self.id), params=param).json() return VolumeObject(api=self._api, **data) def get_imports(self, project=None, state=None, offset=None, limit=None): """ Fetches imports for this volume. :param project: Optional project identifier. :param state: Optional state. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object. """ return self._api.imports.query(volume=self, project=project, state=state, offset=offset, limit=limit) def get_exports(self, state=None, offset=None, limit=None): """ Fetches exports for this volume. :param state: Optional state. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object. """ return self._api.exports.query(volume=self, state=state, offset=offset, limit=limit) def get_members(self, offset=None, limit=None): """ Retrieves volume members. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object. """ extra = {'resource': type(self).__name__, 'query': {'id': self.id}} logger.info('Get volume members', extra=extra) response = self._api.get( url=self._URL['members_query'].format(id=self.id), params={ 'offset': offset, 'limit': limit }) data = response.json() total = response.headers['x-total-matching-query'] members = [Member(api=self._api, **member) for member in data['items']] links = [Link(**link) for link in data['links']] href = data['href'] return Collection(resource=Member, href=href, total=total, items=members, links=links, api=self._api) def add_member(self, user, permissions): """ Add a member to the volume. :param user: Member username :param permissions: Permissions dictionary. :return: Member object. """ user = Transform.to_user(user) data = {'username': user, 'type': 'USER'} if 'execute' in permissions: permissions.pop('execute') if isinstance(permissions, dict): data.update({'permissions': permissions}) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'data': data, } } logger.info('Adding volume member', extra=extra) response = self._api.post( url=self._URL['members_query'].format(id=self.id), data=data) member_data = response.json() return Member(api=self._api, **member_data) def add_member_team(self, team, permissions): """ Add a member (team) to a volume. :param team: Team object or team identifier. :param permissions: Permissions dictionary. :return: Member object. """ team = Transform.to_team(team) data = {'username': team, 'type': 'TEAM'} if 'execute' in permissions: permissions.pop('execute') if isinstance(permissions, dict): data.update({'permissions': permissions}) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'data': data, } } logger.info('Adding volume team member', extra=extra) response = self._api.post( url=self._URL['members_query'].format(id=self.id), data=data) member_data = response.json() return Member(api=self._api, **member_data) def add_member_division(self, division, permissions): """ Add a member (team) to a volume. :param division: Division object or division identifier. :param permissions: Permissions dictionary. :return: Member object. """ division = Transform.to_division(division) if 'execute' in permissions: permissions.pop('execute') data = {'username': division, 'type': 'DIVISION'} if isinstance(permissions, dict): data.update({'permissions': permissions}) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'data': data, } } logger.info('Adding volume division member', extra=extra) response = self._api.post( url=self._URL['members_query'].format(id=self.id), data=data) member_data = response.json() return Member(api=self._api, **member_data) def get_member(self, username, api=None): """ Fetches information about a single volume member :param username: Member name :param api: Api instance :return: Member object """ api = api if api else self._API response = api.get(url=self._URL['member'].format(id=self.id, username=username), ) data = response.json() return Member(api=api, **data) def remove_member(self, user): """ Remove member from the volume. :param user: User to be removed. """ username = Transform.to_user(user) extra = { 'resource': type(self).__name__, 'query': { 'id': self.id, 'user': user, } } logger.info('Removing volume member', extra=extra) self._api.delete( url=self._URL['member'].format(id=self.id, username=username))
class Volume(Resource): """ Central resource for managing volumes. """ _URL = { 'query': '/storage/volumes', 'get': '/storage/volumes/{id}', 'delete': '/storage/volumes/{id}', } href = HrefField() id = StringField(read_only=True) name = StringField(read_only=False) description = StringField(read_only=False) access_mode = StringField(read_only=False) service = CompoundField(VolumeService, read_only=True) created_on = DateTimeField(read_only=True) modified_on = DateTimeField(read_only=True) active = BooleanField(read_only=True) def __str__(self): return six.text_type('<Volume: id={id}>'.format(id=self.id)) @classmethod def query(cls, offset=None, limit=None, api=None): """ Query (List) volumes. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API return super(Volume, cls)._query(url=cls._URL['query'], offset=offset, limit=limit, fields='_all', api=api) @classmethod def create_s3_volume(cls, name, bucket, access_key_id, secret_access_key, access_mode, description=None, prefix=None, properties=None, api=None): """ Create s3 volume. :param name: Volume name. :param bucket: Referenced bucket. :param access_key_id: Amazon access key identifier. :param secret_access_key: Amazon secret access key. :param access_mode: Access Mode. :param description: Volume description. :param prefix: Volume prefix. :param properties: Volume properties. :param api: Api instance. :return: Volume object. """ service = { 'type': VolumeType.S3, 'bucket': bucket, 'credentials': { 'access_key_id': access_key_id, 'secret_access_key': secret_access_key } } if prefix: service['prefix'] = prefix if properties: service['properties'] = properties data = {'name': name, 'service': service, 'access_mode': access_mode} if description: data['description'] = description api = api or cls._API response = api.post(url=cls._URL['query'], data=data).json() return Volume(api=api, **response) @classmethod def create_google_volume(cls, name, bucket, client_email, private_key, access_mode, description=None, prefix=None, properties=None, api=None): """ Create s3 volume. :param name: Volume name. :param bucket: Referenced bucket. :param client_email: Google client email. :param private_key: Google client private key. :param access_mode: Access Mode. :param description: Volume description. :param prefix: Volume prefix. :param properties: Volume properties. :param api: Api instance. :return: Volume object. """ service = { 'type': VolumeType.GOOGLE, 'bucket': bucket, 'credentials': { 'client_email': client_email, 'private_key': private_key } } if prefix: service['prefix'] = prefix if properties: service['properties'] = properties data = {'name': name, 'service': service, 'access_mode': access_mode} if description: data['description'] = description api = api or cls._API response = api.post(url=cls._URL['query'], data=data).json() return Volume(api=api, **response) @inplace_reload def save(self, inplace=True): """ Saves all modification to the volume on the server. """ modified_data = self._modified_data() if bool(modified_data): data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() volume = Volume(api=self._api, **data) return volume else: raise ResourceNotModified() def get_imports(self, project=None, state=None, offset=None, limit=None): """ Fetches imports for this volume. :param project: Optional project identifier. :param state: Optional state. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object. """ return self._api.imports.query(volume=self, project=project, state=state, offset=offset, limit=limit) def get_exports(self, state=None, offset=None, limit=None): """ Fetches exports for this volume. :param state: Optional state. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object. """ return self._api.exports.query(volume=self, state=state, offset=offset, limit=limit)
class Import(Resource): """ Central resource for managing imports. """ _URL = { 'query': '/storage/imports', 'get': '/storage/imports/{id}', } href = HrefField() id = StringField(read_only=True) state = StringField(read_only=True) source = CompoundField(VolumeFile, read_only=True) destination = CompoundField(ImportDestination, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) def __str__(self): return six.text_type('<Import: id={id}>'.format(id=self.id)) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) @property def result(self): try: return File(id=self._result['id'], api=self._api) except TypeError: return None @classmethod def submit_import(cls, volume, location, project, name=None, overwrite=False, properties=None, api=None): """ Submits new import job. :param volume: Volume identifier. :param location: Volume location. :param project: Project identifier. :param name: Optional file name. :param overwrite: If true it will overwrite file if exists. :param properties: Properties dictionary. :param api: Api instance. :return: Import object. """ data = {} volume = Transform.to_volume(volume) project = Transform.to_project(project) source = { 'volume': volume, 'location': location } destination = { 'project': project } if name: destination['name'] = name data['source'] = source data['destination'] = destination data['overwrite'] = overwrite if properties: data['properties'] = properties api = api if api else cls._API extra = { 'resource': cls.__name__, 'query': data } logger.info('Submitting import', extra=extra) _import = api.post(cls._URL['query'], data=data).json() return Import(api=api, **_import) @classmethod def query(cls, project=None, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) imports. :param project: Optional project identifier. :param volume: Optional volume identifier. :param state: Optional import sate. :param api: Api instance. :return: Collection object. """ api = api or cls._API if project: project = Transform.to_project(project) if volume: volume = Transform.to_volume(volume) return super(Import, cls)._query( url=cls._URL['query'], project=project, volume=volume, state=state, fields='_all', offset=offset, limit=limit, api=api )
class Import(Resource): """ Central resource for managing imports. """ _URL = { 'query': '/storage/imports', 'get': '/storage/imports/{id}', 'bulk_get': '/bulk/storage/imports/get', 'bulk_create': '/bulk/storage/imports/create', } href = HrefField() id = StringField(read_only=True) state = StringField(read_only=True) preserve_folder_structure = BooleanField(read_only=True) source = CompoundField(VolumeFile, read_only=True) destination = CompoundField(ImportDestination, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) def __str__(self): return six.text_type('<Import: id={id}>'.format(id=self.id)) def __eq__(self, other): if not hasattr(other, '__class__'): return False if not self.__class__ == other.__class__: return False return self is other or self.id == other.id def __ne__(self, other): return not self.__eq__(other) @property def result(self): try: return File(api=self._api, **self._result) except TypeError: return None @classmethod def submit_import(cls, volume, location, project=None, name=None, overwrite=False, properties=None, parent=None, preserve_folder_structure=True, api=None): """ Submits new import job. :param volume: Volume identifier. :param location: Volume location. :param project: Project identifier. :param name: Optional file name. :param overwrite: If true it will overwrite file if exists. :param properties: Properties dictionary. :param parent: The ID of the target folder to which the item should be imported. Should not be used together with project. :param preserve_folder_structure: Whether to keep the exact source folder structure. The default value is true if the item being imported is a folder. Should not be used if you are importing a file. :param api: Api instance. :return: Import object. """ data = {} volume = Transform.to_volume(volume) if project and parent: raise SbgError( 'Project and parent identifiers are mutually exclusive') elif project: project = Transform.to_project(project) destination = {'project': project} elif parent: parent = Transform.to_file(parent) destination = {'parent': parent} else: raise SbgError('Project or parent identifier is required.') source = {'volume': volume, 'location': location} if name: destination['name'] = name data['source'] = source data['destination'] = destination data['overwrite'] = overwrite if not preserve_folder_structure: data['preserve_folder_structure'] = preserve_folder_structure if properties: data['properties'] = properties api = api if api else cls._API extra = {'resource': cls.__name__, 'query': data} logger.info('Submitting import', extra=extra) _import = api.post(cls._URL['query'], data=data).json() return Import(api=api, **_import) @classmethod def query(cls, project=None, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) imports. :param project: Optional project identifier. :param volume: Optional volume identifier. :param state: Optional import sate. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if project: project = Transform.to_project(project) if volume: volume = Transform.to_volume(volume) return super(Import, cls)._query(url=cls._URL['query'], project=project, volume=volume, state=state, fields='_all', offset=offset, limit=limit, api=api) @classmethod def bulk_get(cls, imports, api=None): """ Retrieve imports in bulk :param imports: Imports to be retrieved. :param api: Api instance. :return: List of ImportBulkRecord objects. """ api = api or cls._API import_ids = [Transform.to_import(import_) for import_ in imports] data = {'import_ids': import_ids} response = api.post(url=cls._URL['bulk_get'], data=data) return ImportBulkRecord.parse_records(response=response, api=api) @classmethod def bulk_submit(cls, imports, api=None): """ Submit imports in bulk :param imports: Imports to be retrieved. :param api: Api instance. :return: List of ImportBulkRecord objects. """ if not imports: raise SbgError('Imports are required') api = api or cls._API items = [] for import_ in imports: project = import_.get('project') parent = import_.get('parent') if project and parent: raise SbgError( 'Project and parent identifiers are mutually exclusive') elif project: destination = {'project': Transform.to_project(project)} elif parent: destination = {'parent': Transform.to_file(parent)} else: raise SbgError('Project or parent identifier is required.') volume = Transform.to_volume(import_.get('volume')) location = Transform.to_location(import_.get('location')) name = import_.get('name', None) overwrite = import_.get('overwrite', False) if name: destination['name'] = name items.append({ 'source': { 'volume': volume, 'location': location }, 'destination': destination, 'overwrite': overwrite }) data = {'items': items} response = api.post(url=cls._URL['bulk_create'], data=data) return ImportBulkRecord.parse_records(response=response, api=api)
class Export(Resource): """ Central resource for managing exports. """ _URL = { 'query': '/storage/exports', 'get': '/storage/exports/{id}', 'bulk_get': '/bulk/storage/exports/get', 'bulk_create': '/bulk/storage/exports/create', } href = HrefField(read_only=True) id = StringField(read_only=True) state = StringField(read_only=True) _source = DictField(name='source', read_only=True) destination = CompoundField(VolumeFile, read_only=True) started_on = DateTimeField(read_only=True) finished_on = DateTimeField(read_only=True) overwrite = BooleanField(read_only=True) error = CompoundField(Error, read_only=True) _result = DictField(name='result', read_only=True) properties = CompoundField(VolumeProperties, read_only=True) def __str__(self): return f'<Export: id={self.id}>' def __eq__(self, other): if type(other) is not type(self): return False return self is other or self.id == other.id @property def source(self): try: return File(id=self._source['file'], api=self._api) except TypeError: return None @property def result(self): try: return File(api=self._api, **self._result) except TypeError: return None @classmethod def submit_export(cls, file, volume, location, properties=None, overwrite=False, copy_only=False, api=None): """ Submit new export job. :param file: File to be exported. :param volume: Volume identifier. :param location: Volume location. :param properties: Properties dictionary. :param overwrite: If true it will overwrite file if exists :param copy_only: If true files are kept on SevenBridges bucket. :param api: Api Instance. :return: Export object. """ data = {} params = {} volume = Transform.to_volume(volume) file = Transform.to_file(file) destination = {'volume': volume, 'location': location} source = {'file': file} if properties: data['properties'] = properties data['source'] = source data['destination'] = destination data['overwrite'] = overwrite extra = {'resource': cls.__name__, 'query': data} logger.info('Submitting export', extra=extra) api = api if api else cls._API if copy_only: params['copy_only'] = True _export = api.post(cls._URL['query'], data=data, params=params).json() else: _export = api.post(cls._URL['query'], data=data).json() return Export(api=api, **_export) @classmethod def query(cls, volume=None, state=None, offset=None, limit=None, api=None): """ Query (List) exports. :param volume: Optional volume identifier. :param state: Optional import sate. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if volume: volume = Transform.to_volume(volume) return super()._query(url=cls._URL['query'], volume=volume, state=state, offset=offset, limit=limit, fields='_all', api=api) @classmethod def bulk_get(cls, exports, api=None): """ Retrieve exports in bulk. :param exports: Exports to be retrieved. :param api: Api instance. :return: list of ExportBulkRecord objects. """ api = api or cls._API export_ids = [Transform.to_export(export) for export in exports] data = {'export_ids': export_ids} response = api.post(url=cls._URL['bulk_get'], data=data) return ExportBulkRecord.parse_records(response=response, api=api) @classmethod def bulk_submit(cls, exports, copy_only=False, api=None): """ Create exports in bulk. :param exports: List of dicts describing a wanted export. :param copy_only: If true files are kept on SevenBridges bucket. :param api: Api instance. :return: list of ExportBulkRecord objects. """ if not exports: raise SbgError('Exports are required') api = api or cls._API items = [] for export in exports: file_ = Transform.to_file(export.get('file')) volume = Transform.to_volume(export.get('volume')) location = Transform.to_location(export.get('location')) properties = export.get('properties', {}) overwrite = export.get('overwrite', False) item = { 'source': { 'file': file_ }, 'destination': { 'volume': volume, 'location': location }, 'properties': properties, 'overwrite': overwrite } items.append(item) data = {'items': items} params = {'copy_only': copy_only} response = api.post(url=cls._URL['bulk_create'], params=params, data=data) return ExportBulkRecord.parse_records(response=response, api=api)
class Task(Resource): """ Central resource for managing tasks. """ _URL = { 'query': '/tasks', 'get': '/tasks/{id}', 'delete': '/tasks/{id}', 'run': '/tasks/{id}/actions/run', 'abort': '/tasks/{id}/actions/abort', 'execution_details': "/tasks/{id}/execution_details" } href = HrefField() id = UuidField() name = StringField() status = StringField(read_only=True) description = StringField(read_only=False) project = StringField() app = StringField() type = StringField(read_only=True) created_by = StringField(read_only=True) executed_by = StringField(read_only=True) start_time = DateTimeField(read_only=True) batch = BooleanField(read_only=True) batch_by = CompoundField(BatchBy, read_only=False) batch_group = CompoundField(BatchGroup, read_only=True) batch_input = StringField(read_only=False) parent = StringField(read_only=True) end_time = DateTimeField(read_only=True) execution_status = CompoundField(ExecutionStatus, read_only=True) errors = DictField(read_only=True) warnings = DictField(read_only=True) price = CompoundField(Price, read_only=True) inputs = CompoundField(Input, read_only=False) outputs = CompoundField(Output, read_only=True) def __str__(self): return six.text_type('<Task: id={id}>'.format(id=self.id)) @classmethod def query(cls, project=None, status=None, batch=None, parent=None, offset=None, limit=None, api=None): """ Query (List) tasks :param project: Target project. optional. :param status: Task status. :param batch: Only batch tasks. :param parent: Parent batch task identifier. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object. """ api = api or cls._API if parent: parent = Transform.to_task(parent) if project: project = Transform.to_project(project) return super(Task, cls)._query(url=cls._URL['query'], project=project, status=status, batch=batch, parent=parent, offset=offset, limit=limit, fields='_all', api=api) @classmethod def create(cls, name, project, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, api=None): """ Creates a task on server. :param name: Task name. :param project: Project identifier. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param api: Api instance. :return: Task object. :raises: TaskValidationError if validation Fails. :raises: SbgError if any exception occurs during request. """ task_data = {} project = Transform.to_project(project) app = Transform.to_app(app) if revision: app = app + "/" + six.text_type(revision) task_inputs = {'inputs': {}} for k, v in inputs.items(): if isinstance(v, File): input = { 'class': 'File', 'path': v.id, } task_inputs['inputs'][k] = input elif isinstance(v, list): input_list = [] for inp in v: if isinstance(inp, File): input = { 'class': 'File', 'path': inp.id, } if inp.name: input['name'] = inp.name input_list.append(input) else: input_list.append(inp) task_inputs['inputs'][k] = input_list else: task_inputs['inputs'][k] = v if batch_input: task_data['batch_input'] = batch_input if batch_by: task_data['batch_by'] = batch_by task_meta = { 'name': name, 'project': project, 'app': app, 'description': description } task_data.update(task_meta) task_data.update(task_inputs) params = {'action': 'run'} if run else {} api = api if api else cls._API created_task = api.post(cls._URL['query'], data=task_data, params=params).json() if run and 'errors' in created_task: if bool(created_task['errors']): raise TaskValidationError( 'Unable to run task! Task contains errors.', task=Task(api=api, **created_task)) return Task(api=api, **created_task) @inplace_reload def abort(self, inplace=True): """ Abort task :param inplace Apply action on the current object or return a new one. :return: Task object. """ task_data = self._api.post(url=self._URL['abort'].format( id=self.id)).json() return Task(api=self._api, **task_data) @inplace_reload def run(self, batch=True, inplace=True): """ Run task :param batch if False batching will be disabled. :param inplace Apply action on the current object or return a new one. :return: Task object. """ params = {} if not batch: params['batch'] = False task_data = self._api.post(url=self._URL['run'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data) @inplace_reload def save(self, inplace=True): """ Saves all modification to the task on the server. :param inplace Apply edits on the current instance or get a new one. :return: Task instance. """ modified_data = self._modified_data() if bool(modified_data): task_request_data = {} inputs = modified_data.pop('inputs', None) task_request_data.update(modified_data) if inputs: task_request_data['inputs'] = {} for input_id, input_value in inputs.items(): if isinstance(input_value, File): in_file = Task._to_api_file_format(input_value) task_request_data['inputs'][input_id] = in_file elif isinstance(input_value, list): in_list = [ item for item in input_value if not isinstance(item, File) ] in_list.extend([ Task._to_api_file_format(item) for item in input_value if isinstance(item, File) ]) task_request_data['inputs'][input_id] = in_list else: task_request_data['inputs'][input_id] = input_value data = self._api.patch(url=self._URL['get'].format(id=self.id), data=task_request_data).json() task = Task(api=self._api, **data) return task @staticmethod def _to_api_file_format(_file): api_file = {'class': 'File', 'path': _file.id} if _file.name: api_file['name'] = _file.name return api_file def get_execution_details(self): """ Retrieves execution details for a task. :return: Execution details instance. """ data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data) def get_batch_children(self): """ Retrieves batch child tasks for this task if its a batch task. :return: Collection instance. :raises SbError if task is not a batch task. """ if not self.batch: raise SbgError("This task is not a batch task.") return self.query(parent=self.id, api=self._api)