Ejemplo n.º 1
0
class InvoicePeriod(Resource):
    """
    Invoice period resource contains datetime information about the invoice.
    It has from and to fields which represent the interval period for this
    invoice.
    """
    from_ = DateTimeField(name='from', read_only=True)
    to = DateTimeField(read_only=True)

    def __str__(self):
        return f'<InvoicePeriod: from={self.from_}, to={self.to}>'
Ejemplo n.º 2
0
class TaskBreakdown(Resource):
    """
    Task breakdown resource contains information regarding
    billing group analysis breakdown costs.
    """
    href = HrefField(read_only=True)
    runner_username = StringField(read_only=True)
    time_started = DateTimeField(read_only=True)
    time_finished = DateTimeField(read_only=True)
    task_cost = CompoundField(Price, read_only=True)

    def __str__(self):
        return f'<TaskBreakdown: href={self.href}>'
class ExecutionDetails(Resource):
    """
    Task execution details.
    """
    href = HrefField()
    start_time = DateTimeField(read_only=True)
    end_time = DateTimeField(read_only=True)
    status = StringField(read_only=True)
    message = StringField(read_only=True)
    jobs = CompoundListField(Job, read_only=True)

    @staticmethod
    def __str__():
        return six.text_type('<Execution Details>')
Ejemplo n.º 4
0
class Job(Resource):
    """
    Job resource contains information for a single executed node
    in the analysis.
    """
    name = StringField(read_only=True)
    start_time = DateTimeField(read_only=True)
    end_time = DateTimeField(read_only=True)
    status = StringField(read_only=True)
    command_line = StringField(read_only=True)
    instance = CompoundField(Instance, read_only=True)
    logs = CompoundField(Log, read_only=True)

    def __str__(self):
        return six.text_type('<Job: name={name}, status={status}>'.format(
            name=self.name, status=self.status))
Ejemplo n.º 5
0
class BillingGroupAnalysisBreakdown(Resource):
    _URL = {
        'query': '/billing/groups/{id}/breakdown/analysis'
    }

    project_name = StringField(read_only=True)
    analysis_app_name = StringField(read_only=True)
    analysis_name = StringField(read_only=True)
    analysis_type = StringField(read_only=True)
    analysis_id = UuidField(read_only=True)
    ran_by = StringField(read_only=True)
    analysis_status = StringField(read_only=True)
    analysis_cost = CompoundField(AnalysisCost, read_only=True)
    refunded_amount = FloatField(read_only=True)
    time_started = DateTimeField(read_only=True)
    time_finished = DateTimeField(read_only=True)
    project_locked = BooleanField(read_only=True)

    @classmethod
    def query(cls, bg_id, api=None, date_from=None, date_to=None,
              invoice_id=None, fields=None, offset=0, limit=50):
        """
        Query (List) billing group analysis breakdown. Date parameters must be
        string in format MM-DD-YYYY

        :param fields:
        :param invoice_id:
        :param date_to: include all analysis transactions charged before and
         including date_to
        :param date_from: include all analysis transactions charged after and
         including date_from
        :param bg_id: Billing Group ID
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API

        return super(BillingGroupAnalysisBreakdown, cls)._query(
            url=cls._URL['query'].format(id=bg_id), offset=offset, limit=limit,
            date_from=date_from, date_to=date_to, invoice_id=invoice_id,
            fields=fields, api=api
        )

    def __str__(self):
        return '<BillingGroupAnalysisBreakdown>'
Ejemplo n.º 6
0
class Job(Resource):
    """
    Job resource contains information for a single executed node
    in the analysis.
    """
    name = StringField(read_only=True)
    start_time = DateTimeField(read_only=True)
    end_time = DateTimeField(read_only=True)
    status = StringField(read_only=True)
    command_line = StringField(read_only=True)
    retried = BooleanField(read_only=True)
    instance = CompoundField(Instance, read_only=True)
    docker = CompoundField(JobDocker, read_only=True)
    logs = CompoundField(Logs, read_only=True)

    def __str__(self):
        return f'<Job: name={self.name}, status={self.status}>'
Ejemplo n.º 7
0
class Rate(Resource):
    """
    Rate resource.
    """
    limit = IntegerField(read_only=True)
    remaining = IntegerField(read_only=True)
    reset = DateTimeField(read_only=True)

    def __str__(self):
        return f'<Rate: limit={self.limit}, remaining={self.remaining}>'
Ejemplo n.º 8
0
class Rate(Resource):
    """
    Rate resource.
    """
    limit = IntegerField(read_only=True)
    remaining = IntegerField(read_only=True)
    reset = DateTimeField()

    def __str__(self):
        return six.text_type('<Rate: limit={limit}, remaining={rem}>'.format(
            limit=self.limit, rem=self.remaining))
Ejemplo n.º 9
0
class Volume(Resource):
    """
    Central resource for managing volumes.
    """
    _URL = {
        'query': '/storage/volumes',
        'get': '/storage/volumes/{id}',
        'delete': '/storage/volumes/{id}',
        'list': '/storage/volumes/{id}/list',
        'object': '/storage/volumes/{id}/object',
        'member': '/storage/volumes/{id}/members/{username}',
        'members_query': '/storage/volumes/{id}/members',
    }

    href = HrefField(read_only=True)
    id = StringField(read_only=True)
    name = StringField(read_only=False)
    description = StringField(read_only=False)
    access_mode = StringField(read_only=False)
    service = CompoundField(VolumeService, read_only=True)
    created_on = DateTimeField(read_only=True)
    modified_on = DateTimeField(read_only=True)
    active = BooleanField(read_only=True)

    def __eq__(self, other):
        if type(other) is not type(self):
            return False
        return self is other or self.id == other.id

    def __str__(self):
        return f'<Volume: id={self.id}>'

    @classmethod
    def query(cls, offset=None, limit=None, api=None):
        """
        Query (List) volumes.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API
        return super()._query(url=cls._URL['query'],
                              offset=offset,
                              limit=limit,
                              fields='_all',
                              api=api)

    @classmethod
    def create_s3_volume(cls,
                         name,
                         bucket,
                         access_key_id,
                         secret_access_key,
                         access_mode,
                         description=None,
                         prefix=None,
                         properties=None,
                         api=None):
        """
        Create s3 volume.
        :param name: Volume name.
        :param bucket: Referenced bucket.
        :param access_key_id: Amazon access key identifier.
        :param secret_access_key: Amazon secret access key.
        :param access_mode: Access Mode.
        :param description: Volume description.
        :param prefix: Volume prefix.
        :param properties: Volume properties.
        :param api: Api instance.
        :return: Volume object.
        """
        service = {
            'type': VolumeType.S3,
            'bucket': bucket,
            'credentials': {
                'access_key_id': access_key_id,
                'secret_access_key': secret_access_key
            }
        }
        if prefix:
            service['prefix'] = prefix
        if properties:
            service['properties'] = properties

        data = {'name': name, 'service': service, 'access_mode': access_mode}
        if description:
            data['description'] = description
        api = api or cls._API
        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Creating s3 volume', extra=extra)
        response = api.post(url=cls._URL['query'], data=data).json()
        return Volume(api=api, **response)

    @classmethod
    def create_google_volume(cls,
                             name,
                             bucket,
                             client_email,
                             private_key,
                             access_mode,
                             description=None,
                             prefix=None,
                             properties=None,
                             api=None):
        """
        Create s3 volume.
        :param name: Volume name.
        :param bucket: Referenced bucket.
        :param client_email: Google client email.
        :param private_key: Google client private key.
        :param access_mode: Access Mode.
        :param description: Volume description.
        :param prefix: Volume prefix.
        :param properties: Volume properties.
        :param api: Api instance.
        :return: Volume object.
        """
        service = {
            'type': VolumeType.GOOGLE,
            'bucket': bucket,
            'credentials': {
                'client_email': client_email,
                'private_key': private_key
            }
        }
        if prefix:
            service['prefix'] = prefix
        if properties:
            service['properties'] = properties

        data = {'name': name, 'service': service, 'access_mode': access_mode}
        if description:
            data['description'] = description
        api = api or cls._API

        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Creating google volume', extra=extra)
        response = api.post(url=cls._URL['query'], data=data).json()
        return Volume(api=api, **response)

    @classmethod
    def create_oss_volume(cls,
                          name,
                          bucket,
                          endpoint,
                          access_key_id,
                          secret_access_key,
                          access_mode,
                          description=None,
                          prefix=None,
                          properties=None,
                          api=None):
        """
        Create oss volume.
        :param name: Volume name.
        :param bucket: Referenced bucket.
        :param access_key_id: Access key identifier.
        :param secret_access_key: Secret access key.
        :param access_mode: Access Mode.
        :param endpoint: Volume Endpoint.
        :param description: Volume description.
        :param prefix: Volume prefix.
        :param properties: Volume properties.
        :param api: Api instance.
        :return: Volume object.
        """
        service = {
            'type': VolumeType.OSS,
            'bucket': bucket,
            'endpoint': endpoint,
            'credentials': {
                'access_key_id': access_key_id,
                'secret_access_key': secret_access_key
            }
        }
        if prefix:
            service['prefix'] = prefix
        if properties:
            service['properties'] = properties

        data = {'name': name, 'service': service, 'access_mode': access_mode}
        if description:
            data['description'] = description
        api = api or cls._API
        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Creating oss volume', extra=extra)
        response = api.post(url=cls._URL['query'], data=data).json()
        return Volume(api=api, **response)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the volume on the server.
        """
        modified_data = self._modified_data()
        if modified_data:
            extra = {
                'resource': type(self).__name__,
                'query': {
                    'id': self.id,
                    'modified_data': modified_data
                }
            }
            logger.info('Saving volume', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=modified_data).json()
            volume = Volume(api=self._api, **data)
            return volume
        else:
            raise ResourceNotModified()

    def list(self, prefix=None, limit=None):
        params = {}
        if prefix:
            params['prefix'] = prefix
        if limit:
            params['limit'] = limit

        data = self._api.get(url=self._URL['list'].format(id=self.id),
                             params=params).json()

        href = data['href']
        links = [VolumeLink(**link) for link in data['links']]

        objects = [
            VolumeObject(api=self._api, **item) for item in data['items']
        ]
        prefixes = [
            VolumePrefix(api=self._api, **prefix) for prefix in  # noqa: F812
            data['prefixes']
        ]
        return VolumeCollection(href=href,
                                items=objects,
                                links=links,
                                prefixes=prefixes,
                                api=self._api)

    def get_volume_object_info(self, location):
        """
        Fetches information about single volume object - usually file
        :param location: object location
        :return:
        """
        param = {'location': location}
        data = self._api.get(url=self._URL['object'].format(id=self.id),
                             params=param).json()
        return VolumeObject(api=self._api, **data)

    def get_imports(self, project=None, state=None, offset=None, limit=None):
        """
        Fetches imports for this volume.
        :param project: Optional project identifier.
        :param state: Optional state.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        return self._api.imports.query(volume=self,
                                       project=project,
                                       state=state,
                                       offset=offset,
                                       limit=limit)

    def get_exports(self, state=None, offset=None, limit=None):
        """
        Fetches exports for this volume.
        :param state: Optional state.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        return self._api.exports.query(volume=self,
                                       state=state,
                                       offset=offset,
                                       limit=limit)

    def get_members(self, offset=None, limit=None):
        """
        Retrieves volume members.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        extra = {'resource': type(self).__name__, 'query': {'id': self.id}}
        logger.info('Get volume members', extra=extra)
        response = self._api.get(
            url=self._URL['members_query'].format(id=self.id),
            params={
                'offset': offset,
                'limit': limit
            })
        data = response.json()
        total = response.headers['x-total-matching-query']
        members = [Member(api=self._api, **member) for member in data['items']]
        links = [Link(**link) for link in data['links']]
        href = data['href']
        return Collection(resource=Member,
                          href=href,
                          total=total,
                          items=members,
                          links=links,
                          api=self._api)

    def add_member(self, user, permissions):
        """
        Add a member to the volume.
        :param user:  Member username
        :param permissions: Permissions dictionary.
        :return: Member object.
        """
        user = Transform.to_user(user)
        data = {'username': user, 'type': 'USER'}

        if 'execute' in permissions:
            permissions.pop('execute')

        if isinstance(permissions, dict):
            data.update({'permissions': permissions})

        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
                'data': data,
            }
        }
        logger.info('Adding volume member', extra=extra)
        response = self._api.post(
            url=self._URL['members_query'].format(id=self.id), data=data)
        member_data = response.json()
        return Member(api=self._api, **member_data)

    def add_member_team(self, team, permissions):
        """
        Add a member (team) to a volume.
        :param team: Team object or team identifier.
        :param permissions: Permissions dictionary.
        :return: Member object.
        """
        team = Transform.to_team(team)
        data = {'username': team, 'type': 'TEAM'}

        if 'execute' in permissions:
            permissions.pop('execute')

        if isinstance(permissions, dict):
            data.update({'permissions': permissions})

        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
                'data': data,
            }
        }
        logger.info('Adding volume team member', extra=extra)
        response = self._api.post(
            url=self._URL['members_query'].format(id=self.id), data=data)
        member_data = response.json()
        return Member(api=self._api, **member_data)

    def add_member_division(self, division, permissions):
        """
        Add a member (team) to a volume.
        :param division: Division object or division identifier.
        :param permissions: Permissions dictionary.
        :return: Member object.
        """
        division = Transform.to_division(division)

        if 'execute' in permissions:
            permissions.pop('execute')

        data = {'username': division, 'type': 'DIVISION'}
        if isinstance(permissions, dict):
            data.update({'permissions': permissions})

        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
                'data': data,
            }
        }
        logger.info('Adding volume division member', extra=extra)
        response = self._api.post(
            url=self._URL['members_query'].format(id=self.id), data=data)
        member_data = response.json()
        return Member(api=self._api, **member_data)

    def get_member(self, username, api=None):
        """
        Fetches information about a single volume member
        :param username: Member name
        :param api: Api instance
        :return: Member object
        """
        api = api if api else self._API

        response = api.get(url=self._URL['member'].format(id=self.id,
                                                          username=username), )
        data = response.json()
        return Member(api=api, **data)

    def remove_member(self, user):
        """
        Remove member from the volume.
        :param user: User to be removed.
        """
        username = Transform.to_user(user)
        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
                'user': user,
            }
        }
        logger.info('Removing volume member', extra=extra)
        self._api.delete(
            url=self._URL['member'].format(id=self.id, username=username))
Ejemplo n.º 10
0
class Marker(Resource):
    _URL = {
        'query': '/genome/markers',
        'get': '/genome/markers/{id}',
        'delete': '/genome/markers/{id}'
    }

    href = HrefField(read_only=True)
    id = StringField(read_only=True)
    file = StringField(read_only=True)
    name = StringField(read_only=False)
    chromosome = StringField(read_only=False)
    position = CompoundField(MarkerPosition, read_only=False)
    created_time = DateTimeField(read_only=True)
    created_by = StringField(read_only=True)

    def __str__(self):
        return f'<Marker: id={self.id}>'

    def __eq__(self, other):
        if type(other) is not type(self):
            return False
        return self is other or self.id == other.id

    @classmethod
    def query(cls, file, offset=None, limit=None, api=None):
        """
        Queries genome markers on a file.
        :param file: Genome file - Usually bam file.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api if api else cls._API

        file = Transform.to_file(file)
        return super()._query(url=cls._URL['query'],
                              offset=offset,
                              limit=limit,
                              file=file,
                              fields='_all',
                              api=api)

    @classmethod
    def create(cls, file, name, position, chromosome, private=True, api=None):
        """
        Create a marker on a file.
        :param file: File object or identifier.
        :param name: Marker name.
        :param position: Marker position object.
        :param chromosome: Chromosome number.
        :param private: Whether the marker is private or public.
        :param api: Api instance.
        :return: Marker object.
        """
        api = api if api else cls._API

        file = Transform.to_file(file)
        data = {
            'file': file,
            'name': name,
            'position': position,
            'chromosome': chromosome,
            'private': private
        }

        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Creating marker', extra=extra)
        marker_data = api.post(url=cls._URL['query'], data=data).json()
        return Marker(api=api, **marker_data)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the marker on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: Marker instance.
        """
        modified_data = self._modified_data()
        if modified_data:
            extra = {
                'resource': type(self).__name__,
                'query': {
                    'id': self.id,
                    'modified_data': modified_data
                }
            }
            logger.info('Saving marker', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=modified_data).json()
            marker = Marker(api=self._api, **data)
            return marker
        else:
            raise ResourceNotModified()
Ejemplo n.º 11
0
class Export(Resource):
    """
    Central resource for managing exports.
    """
    _URL = {
        'query': '/storage/exports',
        'get': '/storage/exports/{id}',
    }

    href = HrefField()
    id = StringField(read_only=True)
    state = StringField(read_only=True)
    _source = DictField(name='source', read_only=True)
    destination = CompoundField(VolumeFile, read_only=True)
    started_on = DateTimeField(read_only=True)
    finished_on = DateTimeField(read_only=True)
    overwrite = BooleanField(read_only=True)
    error = CompoundField(Error, read_only=True)
    _result = DictField(name='result', read_only=True)
    properties = CompoundField(VolumeProperties, read_only=True)

    def __str__(self):
        return six.text_type('<Export: id={id}>'.format(id=self.id))

    @property
    def source(self):
        try:
            return File(id=self._source['file'], api=self._api)
        except TypeError:
            return None

    @property
    def result(self):
        try:
            return File(id=self._result['id'], api=self._api)
        except TypeError:
            return None

    @classmethod
    def submit_export(cls,
                      file,
                      volume,
                      location,
                      properties=None,
                      overwrite=False,
                      api=None):
        """
        Submit new export job.
        :param file: File to be exported.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param properties: Properties dictionary.
        :param overwrite: If true it will overwrite file if exists
        :param api: Api Instance.
        :return: Export object.
        """
        data = {}
        volume = Transform.to_volume(volume)
        file = Transform.to_file(file)
        destination = {'volume': volume, 'location': location}
        source = {'file': file}
        if properties:
            data['properties'] = properties

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        api = api if api else cls._API
        _export = api.post(cls._URL['query'], data=data).json()
        return Export(api=api, **_export)

    @classmethod
    def query(cls,
              project=None,
              volume=None,
              state=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query (List) exports.
        :param project: Optional project identifier.
        :param volume: Optional volume identifier.
        :param state: Optional import sate.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API

        if project:
            project = Transform.to_project(project)
        if volume:
            volume = Transform.to_volume(volume)

        return super(Export, cls)._query(url=cls._URL['query'],
                                         project=project,
                                         volume=volume,
                                         state=state,
                                         offset=offset,
                                         limit=limit,
                                         fields='_all',
                                         api=api)
Ejemplo n.º 12
0
class File(Resource):
    """
    Central resource for managing files.
    """
    _URL = {
        'query': '/files',
        'get': '/files/{id}',
        'delete': '/files/{id}',
        'copy': '/files/{id}/actions/copy',
        'download_info': '/files/{id}/download_info',
        'metadata': '/files/{id}/metadata'
    }

    href = HrefField()
    id = StringField()
    name = StringField(read_only=False)
    size = IntegerField(read_only=True)
    project = StringField(read_only=True)
    created_on = DateTimeField(read_only=True)
    modified_on = DateTimeField(read_only=True)
    origin = CompoundField(FileOrigin)
    metadata = CompoundField(Metadata, read_only=False)

    def __str__(self):
        return six.text_type('<File: id={id}>'.format(id=self.id))

    @classmethod
    def query(cls,
              project,
              names=None,
              metadata=None,
              origin=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query ( List ) projects
        :param project: Project id
        :param names: Name list
        :param metadata: Metadata query dict
        :param origin: Origin query dict
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API

        project = Transform.to_project(project)
        query_params = {}

        if names and isinstance(names, list):
            query_params['name'] = names

        metadata_params = {}
        if metadata and isinstance(metadata, dict):
            for k, v in metadata.items():
                metadata_params['metadata.' + k] = metadata[k]

        query_params.update(metadata_params)

        origin_params = {}
        if origin and isinstance(origin, dict):
            for k, v in origin.items():
                origin_params['origin.' + k] = origin[k]

        query_params.update(origin_params)

        return super(File, cls)._query(api=api,
                                       url=cls._URL['query'],
                                       project=project,
                                       offset=offset,
                                       limit=limit,
                                       **query_params)

    def copy(self, project, name=None):
        """
        Copies the current file.
        :param project: Destination project.
        :param name: Destination file name.
        :return: Copied File object.
        """
        project = Transform.to_project(project)
        data = {'project': project}
        if name:
            data['name'] = name
        new_file = self._api.post(url=self._URL['copy'].format(id=self.id),
                                  data=data).json()
        return File(api=self._api, **new_file)

    def download_info(self):
        """
        Fetches download information containing file url
        that can be used to download file.
        :return: Download info object.
        """
        info = self._api.get(url=self._URL['download_info'].format(id=self.id))
        return DownloadInfo(api=self._api, **info.json())

    def download(self,
                 path,
                 retry=5,
                 timeout=10,
                 chunk_size=67108864,
                 wait=True):
        """
        Downloads the file and returns a download handle.
        Download will not start until .start() method is invoked.
        :param path: Full path to the new file/
        :param retry:  Number of retries if error occurs during download.
        :param timeout:  Timeout for http requests.
        :param chunk_size:  Chunk size in bytes.
        :param wait: If true will wait for download to complete.
        :return: Download handle.
        """
        info = self.download_info()
        download = Download(url=info.url,
                            file_path=path,
                            retry=retry,
                            timeout=timeout,
                            chunk_size=chunk_size,
                            api=self._api)
        if wait:
            download.start()
            download.wait()
        else:
            return download

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the file on the server.
        :param inplace Apply edits to the current instance or get a new one.
        :return: File instance.
        """
        modified_data = self._modified_data()
        if bool(modified_data):
            if 'metadata' in modified_data:
                self._api.patch(url=self._URL['metadata'].format(id=self.id),
                                data=modified_data['metadata'])
                self.metadata.dirty = {}
                return self.get(id=self.id)

            else:
                data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                       data=modified_data).json()
                file = File(api=self._api, **data)
                return file

    def stream(self, part_size=32 * PartSize.KB):
        """
        Creates an iterator which can be used to stream the file content.
        :param part_size: Size of the part in bytes. Default 32KB
        :return Iterator
        """
        download_info = self.download_info()
        response = self._api.get(url=download_info.url,
                                 stream=True,
                                 append_base=False)
        for part in response.iter_content(part_size):
            yield part
Ejemplo n.º 13
0
class AsyncJob(Resource):
    """
    Central resource for managing async jobs
    """

    _URL = {
        'list_file_jobs': '/async/files',
        'get_file_copy_job': '/async/files/copy/{id}',
        'get_file_delete_job': '/async/files/delete/{id}',
        'bulk_copy_files': '/async/files/copy',
        'bulk_delete_files': '/async/files/delete',
        'get_file_move_job': '/async/files/move/{id}',
        'bulk_move_files': '/async/files/move',
    }

    id = StringField(read_only=True)
    type = StringField(read_only=True)
    state = StringField(read_only=True)
    result = BasicListField(read_only=True)
    total_files = IntegerField(read_only=True)
    failed_files = IntegerField(read_only=True)
    completed_files = IntegerField(read_only=True)
    started_on = DateTimeField(read_only=True)
    finished_on = DateTimeField(read_only=True)

    def __str__(self):
        return six.text_type('<AsyncJob: type={type} id={id}>'.format(
            id=self.id, type=self.type))

    def __eq__(self, other):
        if not hasattr(other, '__class__'):
            return False
        if not self.__class__ == other.__class__:
            return False
        return self is other or self.id == other.id

    def __ne__(self, other):
        return not self.__eq__(other)

    @classmethod
    def get_file_copy_job(cls, id, api=None):
        """
        Retrieve file copy async job
        :param id: Async job identifier
        :param api: Api instance
        :return:
        """
        id = Transform.to_async_job(id)

        api = api if api else cls._API
        async_job = api.get(url=cls._URL['get_file_copy_job'].format(
            id=id)).json()
        return AsyncJob(api=api, **async_job)

    @classmethod
    def get_file_move_job(cls, id, api=None):
        """
        Retrieve file move async job
        :param id: Async job identifier
        :param api: Api instance
        :return:
        """
        id = Transform.to_async_job(id)

        api = api if api else cls._API
        async_job = api.get(url=cls._URL['get_file_move_job'].format(
            id=id)).json()
        return AsyncJob(api=api, **async_job)

    @classmethod
    def get_file_delete_job(cls, id, api=None):
        """
        :param id: Async job identifier
        :param api: Api instance
        :return:
        """
        id = Transform.to_async_job(id)

        api = api if api else cls._API
        async_job = api.get(url=cls._URL['get_file_delete_job'].format(
            id=id)).json()
        return AsyncJob(api=api, **async_job)

    def get_result(self, api=None):
        """
        Get async job result in bulk format
        :return: List of AsyncFileBulkRecord objects
        """
        api = api or self._API
        if not self.result:
            return []
        return AsyncFileBulkRecord.parse_records(result=self.result, api=api)

    @classmethod
    def list_file_jobs(cls, offset=None, limit=None, api=None):
        """Query ( List ) async jobs
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param api: Api instance
        :return: Collection object
        """
        api = api or cls._API
        return super(AsyncJob, cls)._query(
            api=api,
            url=cls._URL['list_file_jobs'],
            offset=offset,
            limit=limit,
        )

    @classmethod
    def file_bulk_copy(cls, files, api=None):
        api = api or cls._API
        data = {'items': files}
        logger.info('Submitting async job for copying files in bulk')
        response = api.post(url=cls._URL['bulk_copy_files'], data=data).json()
        return AsyncJob(api=api, **response)

    @classmethod
    def file_bulk_move(cls, files, api=None):
        api = api or cls._API
        data = {'items': files}
        logger.info('Submitting async job for moving files in bulk')
        response = api.post(url=cls._URL['bulk_move_files'], data=data).json()
        return AsyncJob(api=api, **response)

    @classmethod
    def file_bulk_delete(cls, files, api=None):
        api = api or cls._API
        data = {'items': files}
        logger.info('Submitting async job for deleting files in bulk')
        response = api.post(url=cls._URL['bulk_delete_files'],
                            data=data).json()
        return AsyncJob(api=api, **response)
Ejemplo n.º 14
0
class Import(Resource):
    """
    Central resource for managing imports.
    """
    _URL = {
        'query': '/storage/imports',
        'get': '/storage/imports/{id}',
        'bulk_get': '/bulk/storage/imports/get',
        'bulk_create': '/bulk/storage/imports/create',
    }

    href = HrefField()
    id = StringField(read_only=True)
    state = StringField(read_only=True)
    preserve_folder_structure = BooleanField(read_only=True)
    source = CompoundField(VolumeFile, read_only=True)
    destination = CompoundField(ImportDestination, read_only=True)
    started_on = DateTimeField(read_only=True)
    finished_on = DateTimeField(read_only=True)
    overwrite = BooleanField(read_only=True)
    error = CompoundField(Error, read_only=True)
    _result = DictField(name='result', read_only=True)

    def __str__(self):
        return six.text_type('<Import: id={id}>'.format(id=self.id))

    def __eq__(self, other):
        if not hasattr(other, '__class__'):
            return False
        if not self.__class__ == other.__class__:
            return False
        return self is other or self.id == other.id

    def __ne__(self, other):
        return not self.__eq__(other)

    @property
    def result(self):
        try:
            return File(api=self._api, **self._result)
        except TypeError:
            return None

    @classmethod
    def submit_import(cls,
                      volume,
                      location,
                      project=None,
                      name=None,
                      overwrite=False,
                      properties=None,
                      parent=None,
                      preserve_folder_structure=True,
                      api=None):
        """
        Submits new import job.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param project: Project identifier.
        :param name: Optional file name.
        :param overwrite: If true it will overwrite file if exists.
        :param properties: Properties dictionary.
        :param parent: The ID of the target folder to which the item should be
            imported. Should not be used together with project.
        :param preserve_folder_structure: Whether to keep the exact source
            folder structure. The default value is true if the item being
            imported is a folder. Should not be used if you are importing
            a file.
        :param api: Api instance.
        :return: Import object.
        """
        data = {}
        volume = Transform.to_volume(volume)

        if project and parent:
            raise SbgError(
                'Project and parent identifiers are mutually exclusive')
        elif project:
            project = Transform.to_project(project)
            destination = {'project': project}
        elif parent:
            parent = Transform.to_file(parent)
            destination = {'parent': parent}
        else:
            raise SbgError('Project or parent identifier is required.')

        source = {'volume': volume, 'location': location}

        if name:
            destination['name'] = name

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        if not preserve_folder_structure:
            data['preserve_folder_structure'] = preserve_folder_structure

        if properties:
            data['properties'] = properties

        api = api if api else cls._API
        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Submitting import', extra=extra)
        _import = api.post(cls._URL['query'], data=data).json()
        return Import(api=api, **_import)

    @classmethod
    def query(cls,
              project=None,
              volume=None,
              state=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query (List) imports.
        :param project: Optional project identifier.
        :param volume: Optional volume identifier.
        :param state: Optional import sate.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API

        if project:
            project = Transform.to_project(project)
        if volume:
            volume = Transform.to_volume(volume)

        return super(Import, cls)._query(url=cls._URL['query'],
                                         project=project,
                                         volume=volume,
                                         state=state,
                                         fields='_all',
                                         offset=offset,
                                         limit=limit,
                                         api=api)

    @classmethod
    def bulk_get(cls, imports, api=None):
        """
        Retrieve imports in bulk
        :param imports: Imports to be retrieved.
        :param api: Api instance.
        :return: List of ImportBulkRecord objects.
        """
        api = api or cls._API
        import_ids = [Transform.to_import(import_) for import_ in imports]
        data = {'import_ids': import_ids}

        response = api.post(url=cls._URL['bulk_get'], data=data)
        return ImportBulkRecord.parse_records(response=response, api=api)

    @classmethod
    def bulk_submit(cls, imports, api=None):
        """
        Submit imports in bulk
        :param imports: Imports to be retrieved.
        :param api: Api instance.
        :return: List of ImportBulkRecord objects.
        """
        if not imports:
            raise SbgError('Imports are required')

        api = api or cls._API

        items = []
        for import_ in imports:
            project = import_.get('project')
            parent = import_.get('parent')

            if project and parent:
                raise SbgError(
                    'Project and parent identifiers are mutually exclusive')
            elif project:
                destination = {'project': Transform.to_project(project)}
            elif parent:
                destination = {'parent': Transform.to_file(parent)}
            else:
                raise SbgError('Project or parent identifier is required.')

            volume = Transform.to_volume(import_.get('volume'))
            location = Transform.to_location(import_.get('location'))
            name = import_.get('name', None)
            overwrite = import_.get('overwrite', False)

            if name:
                destination['name'] = name

            items.append({
                'source': {
                    'volume': volume,
                    'location': location
                },
                'destination': destination,
                'overwrite': overwrite
            })

        data = {'items': items}
        response = api.post(url=cls._URL['bulk_create'], data=data)
        return ImportBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 15
0
class Project(Resource):
    """
    Central resource for managing projects.
    """
    _URL = {
        'query': '/projects/{owner}',
        'create': '/projects',
        'get': '/projects/{id}',
        'delete': '/projects/{id}',
        'member': '/projects/{id}/members/{username}',
        'members_query': '/projects/{id}/members',
        'apps': '/apps',
        'files': '/files',
        'tasks': '/tasks'
    }
    href = HrefField()
    id = StringField(read_only=True)
    name = StringField(read_only=False)
    billing_group = UuidField(read_only=False)
    description = StringField(read_only=False)
    type = StringField(read_only=False, max_length=2)
    tags = BasicListField(read_only=False)
    settings = CompoundField(Settings, read_only=False)
    root_folder = StringField(read_only=True)
    created_by = StringField(read_only=True)
    created_on = DateTimeField(read_only=True)
    modified_on = DateTimeField(read_only=True)

    def __str__(self):
        return six.text_type('<Project: id={id}>'.format(id=self.id))

    def __eq__(self, other):
        if not hasattr(other, '__class__'):
            return False
        if not self.__class__ == other.__class__:
            return False
        return self is other or self.id == other.id

    def __ne__(self, other):
        return not self.__eq__(other)

    @classmethod
    def query(cls, owner=None, name=None, offset=None, limit=None, api=None):
        """
        Query (List) projects
        :param owner: Owner username.
        :param name: Project name
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api if api else cls._API
        query_params = {}
        if owner:
            url = cls._URL['query'].format(owner=owner)
        else:
            url = cls._URL['query'].format(owner='')
        if name:
            query_params['name'] = name
        return super(Project, cls)._query(url=url,
                                          offset=offset,
                                          limit=limit,
                                          fields='_all',
                                          api=api,
                                          **query_params)

    @classmethod
    def create(cls,
               name,
               billing_group=None,
               description=None,
               tags=None,
               settings=None,
               api=None):
        """
        Create a project.
        :param name:  Project name.
        :param billing_group: Project billing group.
        :param description:  Project description.
        :param tags: Project tags.
        :param settings: Project settings.
        :param api: Api instance.
        :return:
        """
        api = api if api else cls._API

        if name is None:
            raise SbgError('Project name is required!')

        data = {
            'name': name,
        }

        if billing_group:
            data['billing_group'] = Transform.to_billing_group(billing_group)

        if description:
            data['description'] = description
        if tags:
            data['tags'] = tags

        if settings:
            data['settings'] = settings

        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Creating project', extra=extra)
        project_data = api.post(url=cls._URL['create'], data=data).json()
        return Project(api=api, **project_data)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the project on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: Project instance.
        """
        modified_data = self._modified_data()
        if bool(modified_data):
            extra = {
                'resource': self.__class__.__name__,
                'query': {
                    'id': self.id,
                    'modified_data': modified_data
                }
            }
            logger.info('Saving project', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=modified_data).json()
            project = Project(api=self._api, **data)
            return project
        else:
            raise ResourceNotModified()

    def get_members(self, offset=None, limit=None):
        """
        Retrieves project members.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        extra = {'resource': self.__class__.__name__, 'query': {'id': self.id}}
        logger.info('Get members', extra=extra)
        response = self._api.get(
            url=self._URL['members_query'].format(id=self.id),
            params={
                'offset': offset,
                'limit': limit
            })
        data = response.json()
        total = response.headers['x-total-matching-query']
        members = [Member(api=self._api, **member) for member in data['items']]
        links = [Link(**link) for link in data['links']]
        href = data['href']
        return Collection(resource=Member,
                          href=href,
                          total=total,
                          items=members,
                          links=links,
                          api=self._api)

    def add_member(self, user, permissions):
        """
        Add a member to the project.
        :param user:  Member username
        :param permissions: Permissions dictionary.
        :return: Member object.
        """
        user = Transform.to_user(user)
        data = {'username': user, 'type': 'USER'}
        if isinstance(permissions, dict):
            data.update({'permissions': permissions})

        extra = {
            'resource': self.__class__.__name__,
            'query': {
                'id': self.id,
                'data': data,
            }
        }
        logger.info('Adding member using username', extra=extra)
        response = self._api.post(
            url=self._URL['members_query'].format(id=self.id), data=data)
        member_data = response.json()
        return Member(api=self._api, **member_data)

    def add_member_team(self, team, permissions):
        """
        Add a member (team) to a project.
        :param team: Team object or team identifier.
        :param permissions: Permissions dictionary.
        :return: Member object.
        """
        team = Transform.to_team(team)
        data = {'id': team, 'type': 'TEAM'}
        if isinstance(permissions, dict):
            data.update({'permissions': permissions})

        extra = {
            'resource': self.__class__.__name__,
            'query': {
                'id': self.id,
                'data': data,
            }
        }
        logger.info('Adding team member using team id', extra=extra)
        response = self._api.post(
            url=self._URL['members_query'].format(id=self.id), data=data)
        member_data = response.json()
        return Member(api=self._api, **member_data)

    def add_member_division(self, division, permissions):
        """
        Add a member (team) to a project.
        :param division: Division object or division identifier.
        :param permissions: Permissions dictionary.
        :return: Member object.
        """
        division = Transform.to_division(division)
        data = {'id': division, 'type': 'DIVISION'}
        if isinstance(permissions, dict):
            data.update({'permissions': permissions})

        extra = {
            'resource': self.__class__.__name__,
            'query': {
                'id': self.id,
                'data': data,
            }
        }
        logger.info('Adding team member using division id', extra=extra)
        response = self._api.post(
            url=self._URL['members_query'].format(id=self.id), data=data)
        member_data = response.json()
        return Member(api=self._api, **member_data)

    def add_member_email(self, email, permissions=None):
        """
        Add a member to the project using member email.
        :param email: Member email.
        :param permissions: Permissions dictionary.
        :return: Member object.
        """
        data = {'email': email}

        if isinstance(permissions, dict):
            data.update({'permissions': permissions})

        extra = {
            'resource': self.__class__.__name__,
            'query': {
                'id': self.id,
                'data': data,
            }
        }
        logger.info('Adding member using email', extra=extra)
        response = self._api.post(
            url=self._URL['members_query'].format(id=self.id), data=data)
        member_data = response.json()
        return Member(api=self._api, **member_data)

    def get_member(self, username, api=None):
        """
        Fetches information about a single project member
        :param username: Member name
        :param api: Api instance
        :return: Member object
        """
        api = api if api else self._API

        response = api.get(url=self._URL['member'].format(id=self.id,
                                                          username=username), )
        data = response.json()
        return Member(api=api, **data)

    def remove_member(self, user):
        """
        Remove member from the project.
        :param user: User to be removed.
        """
        username = Transform.to_user(user)
        extra = {
            'resource': self.__class__.__name__,
            'query': {
                'id': self.id,
                'user': user,
            }
        }
        logger.info('Removing member', extra=extra)
        self._api.delete(
            url=self._URL['member'].format(id=self.id, username=username))

    def get_files(self, offset=None, limit=None):
        """
        Retrieves files in this project.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        params = {'project': self.id, 'offset': offset, 'limit': limit}
        return self._api.files.query(api=self._api, **params)

    def add_files(self, files):
        """
        Adds files to this project.
        :param files: List of files or a Collection object.
        """
        for file in files:
            file.copy(project=self.id)

    def get_apps(self, offset=None, limit=None):
        """
        Retrieves apps in this project.
        :param offset:  Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        params = {'project': self.id, 'offset': offset, 'limit': limit}
        return self._api.apps.query(api=self._api, **params)

    def get_tasks(self, status=None, offset=None, limit=None):
        """
        Retrieves tasks in this project.
        :param status: Optional task status.
        :param offset:  Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        params = {'project': self.id, 'offset': offset, 'limit': limit}
        if status:
            params['status'] = status
        return self._api.tasks.query(api=self._api, **params)

    def get_imports(self, volume=None, state=None, offset=None, limit=None):
        """
        Fetches imports for this project.
        :param volume: Optional volume identifier.
        :param state: Optional state.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        return self._api.imports.query(project=self.id,
                                       volume=volume,
                                       state=state,
                                       offset=offset,
                                       limit=limit)

    def get_exports(self, volume=None, state=None, offset=None, limit=None):
        """
        Fetches exports for this volume.
        :param volume: Optional volume identifier.
        :param state: Optional state.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        return self._api.exports.query(project=self.id,
                                       volume=volume,
                                       state=state,
                                       offset=offset,
                                       limit=limit)

    def create_task(self,
                    name,
                    app,
                    revision=None,
                    batch_input=None,
                    batch_by=None,
                    inputs=None,
                    description=None,
                    run=False,
                    disable_batch=False,
                    interruptible=True,
                    execution_settings=None):
        """
        Creates a task for this project.

        :param name: Task name.
        :param app: CWL app identifier.
        :param revision: CWL app revision.
        :param batch_input: Batch input.
        :param batch_by: Batch criteria.
        :param inputs: Input map.
        :param description: Task description.
        :param run: True if you want to run a task upon creation.
        :param disable_batch: True if you want to disable batching.
        :param interruptible: True if you want to use interruptible instances.
        :param execution_settings: Execution settings for the task.
        :return: Task object.
        """
        return self._api.tasks.create(name=name,
                                      project=self,
                                      app=app,
                                      revision=revision,
                                      batch_input=batch_input,
                                      batch_by=batch_by,
                                      inputs=inputs,
                                      description=description,
                                      run=run,
                                      disable_batch=disable_batch,
                                      interruptible=interruptible,
                                      execution_settings=execution_settings)
Ejemplo n.º 16
0
class Export(Resource):
    """
    Central resource for managing exports.
    """
    _URL = {
        'query': '/storage/exports',
        'get': '/storage/exports/{id}',
        'bulk_get': '/bulk/storage/exports/get',
        'bulk_create': '/bulk/storage/exports/create',
    }

    href = HrefField(read_only=True)
    id = StringField(read_only=True)
    state = StringField(read_only=True)
    _source = DictField(name='source', read_only=True)
    destination = CompoundField(VolumeFile, read_only=True)
    started_on = DateTimeField(read_only=True)
    finished_on = DateTimeField(read_only=True)
    overwrite = BooleanField(read_only=True)
    error = CompoundField(Error, read_only=True)
    _result = DictField(name='result', read_only=True)
    properties = CompoundField(VolumeProperties, read_only=True)

    def __str__(self):
        return f'<Export: id={self.id}>'

    def __eq__(self, other):
        if type(other) is not type(self):
            return False
        return self is other or self.id == other.id

    @property
    def source(self):
        try:
            return File(id=self._source['file'], api=self._api)
        except TypeError:
            return None

    @property
    def result(self):
        try:
            return File(api=self._api, **self._result)
        except TypeError:
            return None

    @classmethod
    def submit_export(cls,
                      file,
                      volume,
                      location,
                      properties=None,
                      overwrite=False,
                      copy_only=False,
                      api=None):
        """
        Submit new export job.
        :param file: File to be exported.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param properties: Properties dictionary.
        :param overwrite: If true it will overwrite file if exists
        :param copy_only: If true files are kept on SevenBridges bucket.
        :param api: Api Instance.
        :return: Export object.
        """
        data = {}
        params = {}

        volume = Transform.to_volume(volume)
        file = Transform.to_file(file)
        destination = {'volume': volume, 'location': location}
        source = {'file': file}
        if properties:
            data['properties'] = properties

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Submitting export', extra=extra)

        api = api if api else cls._API
        if copy_only:
            params['copy_only'] = True
            _export = api.post(cls._URL['query'], data=data,
                               params=params).json()
        else:
            _export = api.post(cls._URL['query'], data=data).json()

        return Export(api=api, **_export)

    @classmethod
    def query(cls, volume=None, state=None, offset=None, limit=None, api=None):
        """
        Query (List) exports.
        :param volume: Optional volume identifier.
        :param state: Optional import sate.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API

        if volume:
            volume = Transform.to_volume(volume)

        return super()._query(url=cls._URL['query'],
                              volume=volume,
                              state=state,
                              offset=offset,
                              limit=limit,
                              fields='_all',
                              api=api)

    @classmethod
    def bulk_get(cls, exports, api=None):
        """
        Retrieve exports in bulk.
        :param exports: Exports to be retrieved.
        :param api: Api instance.
        :return: list of ExportBulkRecord objects.
        """
        api = api or cls._API
        export_ids = [Transform.to_export(export) for export in exports]
        data = {'export_ids': export_ids}

        response = api.post(url=cls._URL['bulk_get'], data=data)
        return ExportBulkRecord.parse_records(response=response, api=api)

    @classmethod
    def bulk_submit(cls, exports, copy_only=False, api=None):
        """
        Create exports in bulk.
        :param exports: List of dicts describing a wanted export.
        :param copy_only: If true files are kept on SevenBridges bucket.
        :param api: Api instance.
        :return: list of ExportBulkRecord objects.
        """
        if not exports:
            raise SbgError('Exports are required')

        api = api or cls._API

        items = []
        for export in exports:
            file_ = Transform.to_file(export.get('file'))
            volume = Transform.to_volume(export.get('volume'))
            location = Transform.to_location(export.get('location'))
            properties = export.get('properties', {})
            overwrite = export.get('overwrite', False)

            item = {
                'source': {
                    'file': file_
                },
                'destination': {
                    'volume': volume,
                    'location': location
                },
                'properties': properties,
                'overwrite': overwrite
            }

            items.append(item)

        data = {'items': items}
        params = {'copy_only': copy_only}

        response = api.post(url=cls._URL['bulk_create'],
                            params=params,
                            data=data)
        return ExportBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 17
0
class Task(Resource):
    """
    Central resource for managing tasks.
    """
    _URL = {
        'query': '/tasks',
        'get': '/tasks/{id}',
        'delete': '/tasks/{id}',
        'run': '/tasks/{id}/actions/run',
        'abort': '/tasks/{id}/actions/abort',
        'execution_details': "/tasks/{id}/execution_details"
    }

    href = HrefField()
    id = UuidField()
    name = StringField()
    status = StringField(read_only=True)
    description = StringField(read_only=False)
    project = StringField()
    app = StringField()
    type = StringField(read_only=True)
    created_by = StringField(read_only=True)
    executed_by = StringField(read_only=True)
    start_time = DateTimeField(read_only=True)
    batch = BooleanField(read_only=True)
    batch_by = CompoundField(BatchBy, read_only=False)
    batch_group = CompoundField(BatchGroup, read_only=True)
    batch_input = StringField(read_only=False)
    parent = StringField(read_only=True)
    end_time = DateTimeField(read_only=True)
    execution_status = CompoundField(ExecutionStatus, read_only=True)
    errors = DictField(read_only=True)
    warnings = DictField(read_only=True)
    price = CompoundField(Price, read_only=True)
    inputs = CompoundField(Input, read_only=False)
    outputs = CompoundField(Output, read_only=True)

    def __str__(self):
        return six.text_type('<Task: id={id}>'.format(id=self.id))

    @classmethod
    def query(cls,
              project=None,
              status=None,
              batch=None,
              parent=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query (List) tasks
        :param project: Target project. optional.
        :param status: Task status.
        :param batch: Only batch tasks.
        :param parent: Parent batch task identifier.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API
        if parent:
            parent = Transform.to_task(parent)
        if project:
            project = Transform.to_project(project)
        return super(Task, cls)._query(url=cls._URL['query'],
                                       project=project,
                                       status=status,
                                       batch=batch,
                                       parent=parent,
                                       offset=offset,
                                       limit=limit,
                                       fields='_all',
                                       api=api)

    @classmethod
    def create(cls,
               name,
               project,
               app,
               revision=None,
               batch_input=None,
               batch_by=None,
               inputs=None,
               description=None,
               run=False,
               api=None):
        """
        Creates a task on server.
        :param name: Task name.
        :param project: Project identifier.
        :param app: CWL app identifier.
        :param revision: CWL app revision.
        :param batch_input: Batch input.
        :param batch_by: Batch criteria.
        :param inputs: Input map.
        :param description: Task description.
        :param run: True if you want to run a task upon creation.
        :param api: Api instance.
        :return: Task object.
        :raises: TaskValidationError if validation Fails.
        :raises: SbgError if any exception occurs during request.
        """
        task_data = {}

        project = Transform.to_project(project)
        app = Transform.to_app(app)
        if revision:
            app = app + "/" + six.text_type(revision)

        task_inputs = {'inputs': {}}
        for k, v in inputs.items():
            if isinstance(v, File):
                input = {
                    'class': 'File',
                    'path': v.id,
                }
                task_inputs['inputs'][k] = input
            elif isinstance(v, list):
                input_list = []
                for inp in v:
                    if isinstance(inp, File):
                        input = {
                            'class': 'File',
                            'path': inp.id,
                        }
                        if inp.name:
                            input['name'] = inp.name
                        input_list.append(input)

                    else:
                        input_list.append(inp)
                task_inputs['inputs'][k] = input_list
            else:
                task_inputs['inputs'][k] = v

        if batch_input:
            task_data['batch_input'] = batch_input

        if batch_by:
            task_data['batch_by'] = batch_by

        task_meta = {
            'name': name,
            'project': project,
            'app': app,
            'description': description
        }
        task_data.update(task_meta)
        task_data.update(task_inputs)

        params = {'action': 'run'} if run else {}
        api = api if api else cls._API
        created_task = api.post(cls._URL['query'],
                                data=task_data,
                                params=params).json()
        if run and 'errors' in created_task:
            if bool(created_task['errors']):
                raise TaskValidationError(
                    'Unable to run task! Task contains errors.',
                    task=Task(api=api, **created_task))

        return Task(api=api, **created_task)

    @inplace_reload
    def abort(self, inplace=True):
        """
        Abort task
        :param inplace Apply action on the current object or return a new one.
        :return: Task object.
        """
        task_data = self._api.post(url=self._URL['abort'].format(
            id=self.id)).json()
        return Task(api=self._api, **task_data)

    @inplace_reload
    def run(self, batch=True, inplace=True):
        """
        Run task
        :param batch if False batching will be disabled.
        :param inplace Apply action on the current object or return a new one.
        :return: Task object.
        """
        params = {}
        if not batch:
            params['batch'] = False
        task_data = self._api.post(url=self._URL['run'].format(id=self.id),
                                   params=params).json()
        return Task(api=self._api, **task_data)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the task on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: Task instance.
        """
        modified_data = self._modified_data()
        if bool(modified_data):
            task_request_data = {}
            inputs = modified_data.pop('inputs', None)
            task_request_data.update(modified_data)
            if inputs:
                task_request_data['inputs'] = {}
                for input_id, input_value in inputs.items():
                    if isinstance(input_value, File):
                        in_file = Task._to_api_file_format(input_value)
                        task_request_data['inputs'][input_id] = in_file
                    elif isinstance(input_value, list):
                        in_list = [
                            item for item in input_value
                            if not isinstance(item, File)
                        ]
                        in_list.extend([
                            Task._to_api_file_format(item)
                            for item in input_value if isinstance(item, File)
                        ])

                        task_request_data['inputs'][input_id] = in_list
                    else:
                        task_request_data['inputs'][input_id] = input_value
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=task_request_data).json()
            task = Task(api=self._api, **data)
            return task

    @staticmethod
    def _to_api_file_format(_file):
        api_file = {'class': 'File', 'path': _file.id}
        if _file.name:
            api_file['name'] = _file.name
        return api_file

    def get_execution_details(self):
        """
        Retrieves execution details for a task.
        :return: Execution details instance.
        """
        data = self._api.get(
            self._URL['execution_details'].format(id=self.id)).json()
        return ExecutionDetails(api=self._api, **data)

    def get_batch_children(self):
        """
        Retrieves batch child tasks for this task if its a batch task.
        :return: Collection instance.
        :raises SbError if task is not a batch task.
        """
        if not self.batch:
            raise SbgError("This task is not a batch task.")
        return self.query(parent=self.id, api=self._api)
Ejemplo n.º 18
0
class File(Resource):
    """
    Central resource for managing files.
    """
    _URL = {
        'query': '/files',
        'get': '/files/{id}',
        'delete': '/files/{id}',
        'copy': '/files/{id}/actions/copy',
        'download_info': '/files/{id}/download_info',
        'metadata': '/files/{id}/metadata',
        'tags': '/files/{id}/tags'
    }

    href = HrefField()
    id = StringField()
    name = StringField(read_only=False)
    size = IntegerField(read_only=True)
    project = StringField(read_only=True)
    created_on = DateTimeField(read_only=True)
    modified_on = DateTimeField(read_only=True)
    origin = CompoundField(FileOrigin, read_only=True)
    storage = CompoundField(FileStorage, read_only=True)
    metadata = CompoundField(Metadata, read_only=False)
    tags = BasicListField(read_only=False)

    def __str__(self):
        return six.text_type('<File: id={id}>'.format(id=self.id))

    @classmethod
    def query(cls, project, names=None, metadata=None, origin=None,
              offset=None, limit=None, api=None):
        """
        Query ( List ) projects
        :param project: Project id
        :param names: Name list
        :param metadata: Metadata query dict
        :param origin: Origin query dict
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API

        project = Transform.to_project(project)
        query_params = {}

        if names and isinstance(names, list):
            query_params['name'] = names

        metadata_params = {}
        if metadata and isinstance(metadata, dict):
            for k, v in metadata.items():
                metadata_params['metadata.' + k] = metadata[k]

        query_params.update(metadata_params)

        origin_params = {}
        if origin and isinstance(origin, dict):
            for k, v in origin.items():
                origin_params['origin.' + k] = origin[k]

        query_params.update(origin_params)

        return super(File, cls)._query(
            api=api, url=cls._URL['query'], project=project, offset=offset,
            limit=limit, fields='_all', **query_params
        )

    @classmethod
    def upload(cls, path, project, file_name=None, overwrite=False, retry=5,
               timeout=10, part_size=PartSize.UPLOAD_MINIMUM_PART_SIZE,
               wait=True, api=None):

        """
        Uploads a file using multipart upload and returns an upload handle
        if the wait parameter is set to False. If wait is set to True it
        will block until the upload is completed.

        :param path: File path on local disc.
        :param project: Project identifier
        :param file_name: Optional file name.
        :param overwrite: If true will overwrite the file on the server.
        :param retry:  Number of retries if error occurs during upload.
        :param timeout:  Timeout for http requests.
        :param part_size:  Part size in bytes.
        :param wait:  If true will wait for upload to complete.
        :param api: Api instance.
        """

        api = api or cls._API
        project = Transform.to_project(project)
        upload = Upload(
            path, project, file_name=file_name, overwrite=overwrite,
            retry_count=retry, timeout=timeout, part_size=part_size, api=api
        )
        if wait:
            upload.start()
            upload.wait()
            return upload
        else:
            return upload

    def copy(self, project, name=None):
        """
        Copies the current file.
        :param project: Destination project.
        :param name: Destination file name.
        :return: Copied File object.
        """
        project = Transform.to_project(project)
        data = {
            'project': project
        }
        if name:
            data['name'] = name
        new_file = self._api.post(url=self._URL['copy'].format(id=self.id),
                                  data=data).json()
        return File(api=self._api, **new_file)

    def download_info(self):
        """
        Fetches download information containing file url
        that can be used to download file.
        :return: Download info object.
        """
        info = self._api.get(url=self._URL['download_info'].format(id=self.id))
        return DownloadInfo(api=self._api, **info.json())

    def download(self, path, retry=5, timeout=10,
                 chunk_size=PartSize.DOWNLOAD_MINIMUM_PART_SIZE, wait=True,
                 overwrite=False):
        """
        Downloads the file and returns a download handle.
        Download will not start until .start() method is invoked.
        :param path: Full path to the new file.
        :param retry:  Number of retries if error occurs during download.
        :param timeout:  Timeout for http requests.
        :param chunk_size:  Chunk size in bytes.
        :param wait: If true will wait for download to complete.
        :param over_write: If True will silently overwrite existing file, otherwise OSError is raised
        :return: Download handle.
        """

        if not overwrite and os.path.exists(path):
            raise LocalFileAlreadyExists(message=path)

        info = self.download_info()
        download = Download(
            url=info.url, file_path=path, retry_count=retry, timeout=timeout,
            part_size=chunk_size, api=self._api
        )
        if wait:
            download.start()
            download.wait()
        else:
            return download

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the file on the server.
        :param inplace Apply edits to the current instance or get a new one.
        :return: File instance.
        """
        modified_data = self._modified_data()
        if bool(modified_data):
            # If metadata is to be set
            if 'metadata' in modified_data:
                try:
                    _ = self._method
                    self._api.put(
                        url=self._URL['metadata'].format(id=self.id),
                        data=modified_data['metadata']
                    )
                except AttributeError:
                    self._api.patch(
                        url=self._URL['metadata'].format(id=self.id),
                        data=modified_data['metadata']
                    )
                modified_data.pop('metadata')
            if 'tags' in modified_data:
                self._api.put(
                    url=self._URL['tags'].format(id=self.id),
                    data=modified_data['tags']
                )
                modified_data.pop('tags')
            # Change everything else
            if bool(modified_data):
                self._api.patch(
                    url=self._URL['get'].format(id=self.id), data=modified_data
                )
        else:
            raise ResourceNotModified()

        return self.reload()

    def stream(self, part_size=32 * PartSize.KB):
        """
        Creates an iterator which can be used to stream the file content.
        :param part_size: Size of the part in bytes. Default 32KB
        :return Iterator
        """
        download_info = self.download_info()
        response = self._api.get(
            url=download_info.url, stream=True, append_base=False
        )
        for part in response.iter_content(part_size):
            yield part

    # noinspection PyAttributeOutsideInit
    def reload(self):
        """
        Refreshes the file with the data from the server.
        """
        try:
            data = self._api.get(self.href, append_base=False).json()
            resource = File(api=self._api, **data)
        except Exception:
            try:
                data = self._api.get(
                    self._URL['get'].format(id=self.id)).json()
                resource = File(api=self._api, **data)
            except Exception:
                raise SbgError('Resource can not be refreshed!')

        self._data = resource._data
        self._dirty = resource._dirty

        # If file.metadata = value was executed
        # file object will have attribute _method='PUT', which tells us
        # to force overwrite of metadata on the server. This is metadata
        # specific. Once we reload the resource we delete the attribute
        # _method from the instance.
        try:
            delattr(self, '_method')
        except AttributeError:
            pass
Ejemplo n.º 19
0
class Automation(Resource):
    """
    Central resource for managing automations.
    """
    # noinspection PyProtectedMember
    _URL = {
        'query': '/automation/automations',
        'get': '/automation/automations/{id}',
        'member': AutomationMember._URL['get'],
        'members': AutomationMember._URL['query'],
        'packages': AutomationPackage._URL['query'],
        'archive': '/automation/automations/{automation_id}/actions/archive',
        'restore': '/automation/automations/{automation_id}/actions/restore'
    }

    href = HrefField(read_only=True)
    id = UuidField(read_only=True)
    name = StringField(read_only=False)
    description = StringField(read_only=False)
    billing_group = UuidField(read_only=False)
    owner = StringField(read_only=True)
    created_by = StringField(read_only=True)
    created_on = DateTimeField(read_only=True)
    modified_by = StringField(read_only=True)
    modified_on = DateTimeField(read_only=False)
    archived = BooleanField(read_only=True)
    secret_settings = DictField(read_only=False)
    memory_limit = IntegerField(read_only=False)
    project_based = BooleanField(read_only=False)

    def __eq__(self, other):
        if type(other) is not type(self):
            return False
        return self is other or self.id == other.id

    def __str__(self):
        return f'<Automation: id={self.id} name={self.name}>'

    @classmethod
    def query(cls,
              name=None,
              include_archived=False,
              project_based=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query (List) automations.
        :param name: Automation name.
        :param include_archived: Include archived automations also
        :param project_based: Search project based automations
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: collection object
        """

        api = api or cls._API
        return super()._query(
            url=cls._URL['query'],
            name=name,
            include_archived=include_archived,
            project_based=project_based,
            offset=offset,
            limit=limit,
            api=api,
        )

    @classmethod
    def create(cls,
               name,
               description=None,
               billing_group=None,
               secret_settings=None,
               project_based=None,
               memory_limit=None,
               api=None):
        """
        Create a automation template.
        :param name:  Automation name.
        :param billing_group: Automation billing group.
        :param description:  Automation description.
        :param secret_settings: Automation settings.
        :param project_based: Create project based automation template.
        :param memory_limit: Memory limit in MB.
        :param api: Api instance.
        :return:
        """
        api = api if api else cls._API

        if name is None:
            raise SbgError('Automation name is required!')

        data = {
            'name': name,
        }

        if billing_group:
            data['billing_group'] = Transform.to_billing_group(billing_group)

        if description:
            data['description'] = description
        if secret_settings:
            data['secret_settings'] = secret_settings
        if project_based:
            data['project_based'] = project_based
        if memory_limit:
            data['memory_limit'] = memory_limit

        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Creating automation template', extra=extra)
        automation_data = api.post(url=cls._URL['query'], data=data).json()
        return Automation(api=api, **automation_data)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the automation template on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: Automation instance.
        """
        modified_data = self._modified_data()
        if modified_data:
            extra = {
                'resource': type(self).__name__,
                'query': {
                    'id': self.id,
                    'modified_data': modified_data
                }
            }
            logger.info('Saving automation template', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=modified_data).json()
            return Automation(api=self._api, **data)

        else:
            raise ResourceNotModified()

    @inplace_reload
    def archive(self):
        """
        Archive automation
        :return: Automation instance.
        """
        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
            }
        }
        logger.info('Archive automation', extra=extra)

        automation_data = self._api.post(url=self._URL['archive'].format(
            automation_id=self.id)).json()
        return Automation(api=self._api, **automation_data)

    @inplace_reload
    def restore(self):
        """
        Restore archived automation
        :return: Automation instance.
        """
        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
            }
        }
        logger.info('Restore archived automation', extra=extra)

        automation_data = self._api.post(url=self._URL['restore'].format(
            automation_id=self.id)).json()
        return Automation(api=self._api, **automation_data)

    def get_packages(self, offset=None, limit=None, api=None):
        """
        Return list of packages that belong to this automation
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: sevenbridges Api instance.
        :return: AutomationPackage collection
        """
        api = api or self._API
        return AutomationPackage.query(automation=self.id,
                                       offset=offset,
                                       limit=limit,
                                       api=api)

    @classmethod
    def get_package(cls, package, api=None):
        """
        Return specified automation member
        :param package: Automation Package Id
        :param api: sevenbridges Api instance.
        :return: AutomationMember object
        """
        package_id = Transform.to_automation_package(package)
        api = api or cls._API
        return AutomationPackage.get(id=package_id, api=api)

    def add_package(self,
                    version,
                    file_path,
                    schema,
                    file_name=None,
                    retry_count=RequestParameters.DEFAULT_RETRY_COUNT,
                    timeout=RequestParameters.DEFAULT_TIMEOUT,
                    part_size=None,
                    api=None):
        """
        Add a code package to automation template.
        :param version: The code package version.
        :param file_path: Path to the code package file to be uploaded.
        :param schema: IO schema for main step of execution.
        :param part_size: Size of upload part in bytes.
        :param file_name: Optional file name.
        :param retry_count: Upload retry count.
        :param timeout: Timeout for s3/google session.
        :param api: sevenbridges Api instance.
        :return: AutomationPackage
        """
        api = api or self._API
        if version is None:
            raise SbgError('Code package version is required!')

        if file_path is None:
            raise SbgError('Code package file path is required!')

        # Multipart upload the code package:
        upload = CodePackageUpload(file_path,
                                   self.id,
                                   api=api,
                                   part_size=part_size,
                                   file_name=file_name,
                                   retry_count=retry_count,
                                   timeout=timeout)
        upload.start()
        upload.wait()
        package_file = upload.result()

        # Create the automation package:
        return AutomationPackage.create(self.id,
                                        version=version,
                                        location=package_file.id,
                                        schema=schema,
                                        api=api)

    def get_member(self, username, api=None):
        """
        Return specified automation member
        :param username: Member username
        :param api: sevenbridges Api instance.
        :return: AutomationMember object
        """
        member = Transform.to_automation_member(username)
        api = api or self._API
        return AutomationMember.get(id=member, automation=self.id, api=api)

    def get_members(self, offset=None, limit=None, api=None):
        """
        Return list of automation members
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: sevenbridges Api instance.
        :return: AutomationMember collection
        """
        api = api or self._API
        return AutomationMember.query(automation=self.id,
                                      offset=offset,
                                      limit=limit,
                                      api=api)

    def add_member(self, user, permissions, api=None):
        """
        Add member to the automation
        :param user: Member username
        :param permissions: Member permissions
        :param api: sevenbridges Api instance
        :return: AutomationMember object
        """
        api = api or self._API
        return AutomationMember.add(automation=self.id,
                                    user=user,
                                    permissions=permissions,
                                    api=api)

    def remove_member(self, user, api=None):
        """
        Remove a member from the automation
        :param user: Member username
        :param api: sevenbridges Api instance
        :return: None
        """
        api = api or self._API
        AutomationMember.remove(automation=self.id, user=user, api=api)

    def get_runs(self,
                 package=None,
                 status=None,
                 name=None,
                 created_by=None,
                 created_from=None,
                 created_to=None,
                 project_id=None,
                 order_by=None,
                 order=None,
                 offset=None,
                 limit=None,
                 api=None):
        """
        Query automation runs that belong to this automation
        :param package: Package id
        :param status: Run status
        :param name: Automation run name
        :param created_by: Username of member that created the run
        :param created_from: Date the run was created after
        :param created_to: Date the run was created before
        :param project_id: Search runs by project id, if run is project based
        :param order_by: Property by which to order results
        :param order: Ascending or Descending ("asc" or "desc")
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: sevenbridges Api instance
        :return: AutomationRun collection
        """
        api = api or self._API
        return AutomationRun.query(automation=self.id,
                                   package=package,
                                   status=status,
                                   name=name,
                                   created_by=created_by,
                                   created_from=created_from,
                                   created_to=created_to,
                                   project_id=project_id,
                                   order_by=order_by,
                                   order=order,
                                   offset=offset,
                                   limit=limit,
                                   api=api)
Ejemplo n.º 20
0
class Task(Resource):
    """
    Central resource for managing tasks.
    """
    _URL = {
        'query': '/tasks',
        'get': '/tasks/{id}',
        'delete': '/tasks/{id}',
        'run': '/tasks/{id}/actions/run',
        'abort': '/tasks/{id}/actions/abort',
        'execution_details': "/tasks/{id}/execution_details"
    }

    href = HrefField()
    id = UuidField()
    name = StringField()
    status = StringField(read_only=True)
    description = StringField(read_only=False)
    project = StringField()
    app = StringField()
    type = StringField(read_only=True)
    created_by = StringField(read_only=True)
    executed_by = StringField(read_only=True)
    start_time = DateTimeField(read_only=True)
    created_time = DateTimeField(read_only=True)
    end_time = DateTimeField(read_only=True)
    batch = BooleanField(read_only=False)
    batch_by = CompoundField(BatchBy, read_only=False)
    batch_group = CompoundField(BatchGroup, read_only=True)
    batch_input = StringField(read_only=False)
    parent = StringField(read_only=True)
    execution_status = CompoundField(ExecutionStatus, read_only=True)
    errors = DictField(read_only=True)
    warnings = DictField(read_only=True)
    price = CompoundField(Price, read_only=True)
    inputs = CompoundField(Input, read_only=False)
    outputs = CompoundField(Output, read_only=True)
    use_interruptible_instances = BooleanField()

    def __str__(self):
        return six.text_type('<Task: id={id}>'.format(id=self.id))

    def __eq__(self, other):
        if not hasattr(other, '__class__'):
            return False
        if not self.__class__ == other.__class__:
            return False
        return self is other or self.id == other.id

    def __ne__(self, other):
        return not self.__eq__(other)

    @classmethod
    def query(cls,
              project=None,
              status=None,
              batch=None,
              parent=None,
              created_from=None,
              created_to=None,
              started_from=None,
              started_to=None,
              ended_from=None,
              ended_to=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query (List) tasks. Date parameters may be both strings and python date
        objects.
        :param project: Target project. optional.
        :param status: Task status.
        :param batch: Only batch tasks.
        :param parent: Parent batch task identifier.
        :param ended_to: All tasks that ended until this date.
        :param ended_from: All tasks that ended from this date.
        :param started_to: All tasks that were started until this date.
        :param started_from: All tasks that were started from this date.
        :param created_to: All tasks that were created until this date.
        :param created_from: All tasks that were created from this date.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API
        if parent:
            parent = Transform.to_task(parent)
        if project:
            project = Transform.to_project(project)
        if created_from:
            created_from = Transform.to_datestring(created_from)
        if created_to:
            created_to = Transform.to_datestring(created_to)
        if started_from:
            started_from = Transform.to_datestring(started_from)
        if started_to:
            started_to = Transform.to_datestring(started_to)
        if ended_from:
            ended_from = Transform.to_datestring(ended_from)
        if ended_to:
            ended_to = Transform.to_datestring(ended_to)

        return super(Task, cls)._query(url=cls._URL['query'],
                                       project=project,
                                       status=status,
                                       batch=batch,
                                       parent=parent,
                                       created_from=created_from,
                                       created_to=created_to,
                                       started_from=started_from,
                                       started_to=started_to,
                                       ended_from=ended_from,
                                       ended_to=ended_to,
                                       offset=offset,
                                       limit=limit,
                                       fields='_all',
                                       api=api)

    @classmethod
    def create(cls,
               name,
               project,
               app,
               revision=None,
               batch_input=None,
               batch_by=None,
               inputs=None,
               description=None,
               run=False,
               disable_batch=False,
               interruptible=True,
               api=None):
        """
        Creates a task on server.
        :param name: Task name.
        :param project: Project identifier.
        :param app: CWL app identifier.
        :param revision: CWL app revision.
        :param batch_input: Batch input.
        :param batch_by: Batch criteria.
        :param inputs: Input map.
        :param description: Task description.
        :param run: True if you want to run a task upon creation.
        :param disable_batch: If True disables batching of a batch task.
        :param interruptible: If True interruptible instance will be used.
        :param api: Api instance.
        :return: Task object.
        :raises: TaskValidationError if validation Fails.
        :raises: SbgError if any exception occurs during request.
        """
        task_data = {}
        params = {}

        project = Transform.to_project(project)

        app_id = Transform.to_app(app)

        if revision:
            app_id = app_id + "/" + six.text_type(revision)
        else:
            if isinstance(app, App):
                app_id = app_id + "/" + six.text_type(app.revision)

        task_inputs = {
            'inputs': Task._serialize_inputs(inputs) if inputs else {}
        }

        if batch_input and batch_by:
            task_data['batch_input'] = batch_input
            task_data['batch_by'] = batch_by
            if disable_batch:
                params.update({'batch': False})

        task_meta = {
            'name': name,
            'project': project,
            'app': app_id,
            'description': description,
        }
        task_data.update(task_meta)
        task_data.update(task_inputs)

        task_data['use_interruptible_instances'] = interruptible

        if run:
            params.update({'action': 'run'})

        api = api if api else cls._API
        created_task = api.post(cls._URL['query'],
                                data=task_data,
                                params=params).json()
        if run and 'errors' in created_task:
            if bool(created_task['errors']):
                raise TaskValidationError(
                    'Unable to run task! Task contains errors.',
                    task=Task(api=api, **created_task))

        return Task(api=api, **created_task)

    @inplace_reload
    def abort(self, inplace=True):
        """
        Abort task
        :param inplace Apply action on the current object or return a new one.
        :return: Task object.
        """
        extra = {'resource': self.__class__.__name__, 'query': {'id': self.id}}
        logger.info('Aborting task', extra=extra)
        task_data = self._api.post(url=self._URL['abort'].format(
            id=self.id)).json()
        return Task(api=self._api, **task_data)

    @inplace_reload
    def run(self, batch=True, interruptible=True, inplace=True):
        """
        Run task
        :param batch if False batching will be disabled.
        :param interruptible: If true interruptible instance
        will be used.
        :param inplace Apply action on the current object or return a new one.
        :return: Task object.
        """
        params = {}
        if not batch:
            params['batch'] = False

        params['use_interruptible_instances'] = interruptible
        extra = {
            'resource': self.__class__.__name__,
            'query': {
                'id': self.id,
                'batch': batch
            }
        }
        logger.info('Running task', extra=extra)
        task_data = self._api.post(url=self._URL['run'].format(id=self.id),
                                   params=params).json()
        return Task(api=self._api, **task_data)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the task on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: Task instance.
        """
        modified_data = self._modified_data()
        if bool(modified_data):
            task_request_data = {}
            inputs = modified_data.pop('inputs', None)
            task_request_data.update(modified_data)
            if inputs:
                task_request_data['inputs'] = self._serialize_inputs(inputs)
            extra = {
                'resource': self.__class__.__name__,
                'query': {
                    'id': self.id,
                    'data': task_request_data
                }
            }
            logger.info('Saving task', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=task_request_data).json()
            task = Task(api=self._api, **data)
            return task

    @staticmethod
    def _serialize_inputs(inputs):
        """Serialize task input dictionary"""
        serialized_inputs = {}
        for input_id, input_value in inputs.items():
            if isinstance(input_value, list):
                serialized_list = Task._serialize_input_list(input_value)
                serialized_inputs[input_id] = serialized_list
            else:
                if isinstance(input_value, File):
                    input_value = Task._to_api_file_format(input_value)
                serialized_inputs[input_id] = input_value
        return serialized_inputs

    @staticmethod
    def _serialize_input_list(input_value):
        """Recursively serialize task input list"""
        input_list = []
        for item in input_value:
            if isinstance(item, list):
                input_list.append(Task._serialize_input_list(item))
            else:
                if isinstance(item, File):
                    item = Task._to_api_file_format(item)
                input_list.append(item)
        return input_list

    @staticmethod
    def _to_api_file_format(_file):
        api_file = {'class': 'File', 'path': _file.id}
        if _file.name:
            api_file['name'] = _file.name
        return api_file

    def get_execution_details(self):
        """
        Retrieves execution details for a task.
        :return: Execution details instance.
        """
        extra = {'resource': self.__class__.__name__, 'query': {'id': self.id}}
        logger.info('Get execution details', extra=extra)
        data = self._api.get(
            self._URL['execution_details'].format(id=self.id)).json()
        return ExecutionDetails(api=self._api, **data)

    def get_batch_children(self):
        """
        Retrieves batch child tasks for this task if its a batch task.
        :return: Collection instance.
        :raises SbError if task is not a batch task.
        """
        if not self.batch:
            raise SbgError("This task is not a batch task.")
        return self.query(parent=self.id, api=self._api)
Ejemplo n.º 21
0
class Task(Resource):
    """
    Central resource for managing tasks.
    """
    _URL = {
        'query': '/tasks',
        'get': '/tasks/{id}',
        'delete': '/tasks/{id}',
        'run': '/tasks/{id}/actions/run',
        'clone': '/tasks/{id}/actions/clone',
        'abort': '/tasks/{id}/actions/abort',
        'execution_details': "/tasks/{id}/execution_details",
        'bulk_get': '/bulk/tasks/get',
    }

    href = HrefField(read_only=True)
    id = UuidField(read_only=True)
    name = StringField(read_only=False)
    status = StringField(read_only=True)
    description = StringField(read_only=False)
    project = StringField(read_only=False)
    app = StringField(read_only=False)
    type = StringField(read_only=True)
    created_by = StringField(read_only=True)
    executed_by = StringField(read_only=True)
    start_time = DateTimeField(read_only=True)
    created_time = DateTimeField(read_only=True)
    end_time = DateTimeField(read_only=True)
    batch = BooleanField(read_only=False)
    batch_by = CompoundField(BatchBy, read_only=False)
    batch_group = CompoundField(BatchGroup, read_only=True)
    batch_input = StringField(read_only=False)
    parent = StringField(read_only=True)
    execution_status = CompoundField(ExecutionStatus, read_only=True)
    errors = DictField(read_only=True)
    warnings = DictField(read_only=True)
    price = CompoundField(Price, read_only=True)
    inputs = CompoundField(Input, read_only=False)
    outputs = CompoundField(Output, read_only=True)
    execution_settings = DictField(read_only=True)
    use_interruptible_instances = BooleanField(read_only=False)

    def __str__(self):
        return f'<Task: id={self.id}>'

    def __eq__(self, other):
        if type(other) is not type(self):
            return False
        return self is other or self.id == other.id

    @classmethod
    def query(cls,
              project=None,
              status=None,
              batch=None,
              parent=None,
              created_from=None,
              created_to=None,
              started_from=None,
              started_to=None,
              ended_from=None,
              ended_to=None,
              offset=None,
              limit=None,
              order_by=None,
              order=None,
              api=None):
        """
        Query (List) tasks. Date parameters may be both strings and python date
        objects.
        :param project: Target project. optional.
        :param status: Task status.
        :param batch: Only batch tasks.
        :param parent: Parent batch task identifier.
        :param ended_to: All tasks that ended until this date.
        :param ended_from: All tasks that ended from this date.
        :param started_to: All tasks that were started until this date.
        :param started_from: All tasks that were started from this date.
        :param created_to: All tasks that were created until this date.
        :param created_from: All tasks that were created from this date.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param order_by: Property to order by.
        :param order: Ascending or descending ordering.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API
        if parent:
            parent = Transform.to_task(parent)
        if project:
            project = Transform.to_project(project)
        if created_from:
            created_from = Transform.to_datestring(created_from)
        if created_to:
            created_to = Transform.to_datestring(created_to)
        if started_from:
            started_from = Transform.to_datestring(started_from)
        if started_to:
            started_to = Transform.to_datestring(started_to)
        if ended_from:
            ended_from = Transform.to_datestring(ended_from)
        if ended_to:
            ended_to = Transform.to_datestring(ended_to)

        return super()._query(url=cls._URL['query'],
                              project=project,
                              status=status,
                              batch=batch,
                              parent=parent,
                              created_from=created_from,
                              created_to=created_to,
                              started_from=started_from,
                              started_to=started_to,
                              ended_from=ended_from,
                              ended_to=ended_to,
                              offset=offset,
                              limit=limit,
                              order_by=order_by,
                              order=order,
                              fields='_all',
                              api=api)

    @classmethod
    def create(cls,
               name,
               project,
               app,
               revision=None,
               batch_input=None,
               batch_by=None,
               inputs=None,
               description=None,
               run=False,
               disable_batch=False,
               interruptible=None,
               execution_settings=None,
               api=None):
        """
        Creates a task on server.
        :param name: Task name.
        :param project: Project identifier.
        :param app: CWL app identifier.
        :param revision: CWL app revision.
        :param batch_input: Batch input.
        :param batch_by: Batch criteria.
        :param inputs: Input map.
        :param description: Task description.
        :param run: True if you want to run a task upon creation.
        :param disable_batch: If True disables batching of a batch task.
        :param interruptible: If True interruptible instance will be used.
        :param execution_settings: Execution settings for the task.
        :param api: Api instance.
        :return: Task object.
        :raises: TaskValidationError if validation Fails.
        :raises: SbgError if any exception occurs during request.
        """
        task_data = {}
        params = {}
        project = Transform.to_project(project)

        app_id = Transform.to_app(app)

        if revision:
            app_id = f'{app_id}/{revision}'
        else:
            if isinstance(app, App):
                app_id = f'{app_id}/{app.revision}'

        task_inputs = {
            'inputs': Task._serialize_inputs(inputs) if inputs else {}
        }

        if batch_input and batch_by:
            task_data['batch_input'] = batch_input
            task_data['batch_by'] = batch_by
            if disable_batch:
                params.update({'batch': False})

        task_meta = {
            'name': name,
            'project': project,
            'app': app_id,
            'description': description,
        }
        task_data.update(task_meta)
        task_data.update(task_inputs)

        if interruptible is not None:
            task_data['use_interruptible_instances'] = interruptible

        if execution_settings:
            task_data.update({'execution_settings': execution_settings})

        if run:
            params.update({'action': 'run'})

        api = api if api else cls._API
        created_task = api.post(cls._URL['query'],
                                data=task_data,
                                params=params).json()
        if run and 'errors' in created_task and created_task['errors']:
            raise TaskValidationError(
                'Unable to run task! Task contains errors.',
                task=Task(api=api, **created_task))

        return Task(api=api, **created_task)

    @inplace_reload
    def abort(self, inplace=True):
        """
        Abort task
        :param inplace Apply action on the current object or return a new one.
        :return: Task object.
        """
        extra = {'resource': type(self).__name__, 'query': {'id': self.id}}
        logger.info('Aborting task', extra=extra)
        task_data = self._api.post(url=self._URL['abort'].format(
            id=self.id)).json()
        return Task(api=self._api, **task_data)

    @inplace_reload
    def run(self, batch=True, interruptible=None, inplace=True):
        """
        Run task
        :param batch if False batching will be disabled.
        :param interruptible: If true interruptible instance
        will be used.
        :param inplace Apply action on the current object or return a new one.
        :return: Task object.
        """
        params = {}
        if not batch:
            params['batch'] = False
        if interruptible is not None:
            params['use_interruptible_instances'] = interruptible
        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
                'batch': batch
            }
        }
        logger.info('Running task', extra=extra)
        task_data = self._api.post(url=self._URL['run'].format(id=self.id),
                                   params=params).json()
        return Task(api=self._api, **task_data)

    def clone(self, run=True):
        """
        Clone task
        :param run: run task after cloning
        :return: Task object.
        """
        params = {}
        if run:
            params.update({'action': 'run'})

        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
                'run': run
            }
        }
        logger.info('Cloning task', extra=extra)
        task_data = self._api.post(url=self._URL['clone'].format(id=self.id),
                                   params=params).json()

        return Task(api=self._api, **task_data)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the task on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: Task instance.
        """
        modified_data = self._modified_data()
        if modified_data:
            task_request_data = {}
            inputs = modified_data.pop('inputs', None)
            execution_settings = modified_data.pop('execution_settings', None)
            task_request_data.update(modified_data)

            if inputs:
                task_request_data['inputs'] = self._serialize_inputs(inputs)

            if execution_settings:
                task_request_data['execution_settings'] = (
                    self._serialize_execution_settings(execution_settings))

            extra = {
                'resource': type(self).__name__,
                'query': {
                    'id': self.id,
                    'data': task_request_data
                }
            }
            logger.info('Saving task', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=task_request_data).json()
            task = Task(api=self._api, **data)
            return task

    def _serialize_execution_settings(self, execution_settings):
        instance_type = execution_settings.get(
            'instance_type',
            self.execution_settings.get('instance_type', None))
        max_parallel_instances = execution_settings.get(
            'max_parallel_instances',
            self.execution_settings.get('max_parallel_instances', None))
        use_memoization = execution_settings.get(
            'use_memoization',
            self.execution_settings.get('use_memoization', None))
        serialized_es_mapping = {
            'instance_type': instance_type,
            'max_parallel_instances': max_parallel_instances,
            'use_memoization': use_memoization
        }
        serialized_es = dict()
        for key, value in serialized_es_mapping.items():
            if value is not None:
                serialized_es[key] = value

        return serialized_es

    @staticmethod
    def _serialize_inputs(input_value):
        """
        Recursively serialises input dictionary.
        :param input_value: input dictionary to serialize
        :return: serialized input dictionary
        """
        if isinstance(input_value, list):
            return_value = []
            for elem in input_value:
                return_value.append(Task._serialize_inputs(elem))
        elif isinstance(input_value, dict):
            return_value = {}
            for key in input_value:
                return_value[key] = Task._serialize_inputs(input_value[key])
        elif isinstance(input_value, File):
            return_value = Task._to_api_file_format(input_value)
        else:
            return_value = input_value
        return return_value

    @staticmethod
    def _to_api_file_format(_file):
        return {
            'class': (FileApiFormats.FOLDER
                      if _file.is_folder() else FileApiFormats.FILE),
            'path':
            _file.id
        }

    def get_execution_details(self):
        """
        Retrieves execution details for a task.
        :return: Execution details instance.
        """
        extra = {'resource': type(self).__name__, 'query': {'id': self.id}}
        logger.info('Get execution details', extra=extra)
        data = self._api.get(
            self._URL['execution_details'].format(id=self.id)).json()
        return ExecutionDetails(api=self._api, **data)

    def get_batch_children(self,
                           status=None,
                           created_from=None,
                           created_to=None,
                           started_from=None,
                           started_to=None,
                           ended_from=None,
                           ended_to=None,
                           order_by=None,
                           order=None,
                           offset=None,
                           limit=None,
                           api=None):
        """
        Retrieves batch child tasks for this task if its a batch task.
        :return: Collection instance.
        :raises SbError if task is not a batch task.
        """
        api = api or self._api
        if not self.batch:
            raise SbgError("This task is not a batch task.")
        return self.query(
            parent=self.id,
            status=status,
            created_from=created_from,
            created_to=created_to,
            started_from=started_from,
            started_to=started_to,
            ended_from=ended_from,
            ended_to=ended_to,
            order_by=order_by,
            order=order,
            offset=offset,
            limit=limit,
            api=api,
        )

    @classmethod
    def bulk_get(cls, tasks, api=None):
        """
        Retrieve tasks with specified ids in bulk
        :param tasks: Tasks to be retrieved.
        :param api: Api instance.
        :return: List of TaskBulkRecord objects.
        """
        api = api or cls._API
        task_ids = [Transform.to_task(task) for task in tasks]
        data = {'task_ids': task_ids}

        logger.debug('Getting tasks in bulk.')
        response = api.post(url=cls._URL['bulk_get'], data=data)
        return TaskBulkRecord.parse_records(response=response, api=api)

    def wait(self=None, period=10, callback=None, *args, **kwargs):
        """Wait until task is complete
        :param period: Time in seconds between reloads
        :param callback: Function to call after the task has finished,
            arguments and keyword arguments can be provided for it
        :return: Return value of provided callback function or None if a
            callback function was not provided
        """
        while self.status not in TaskStatus.terminal_states:
            self.reload()
            time.sleep(period)

        if callback:
            return callback(*args, **kwargs)
Ejemplo n.º 22
0
class File(Resource):
    """
    Central resource for managing files.
    """
    FOLDER_TYPE = 'folder'

    _URL = {
        'query': '/files',
        'scroll': '/files/scroll',
        'get': '/files/{id}',
        'delete': '/files/{id}',
        'copy': '/files/{id}/actions/copy',
        'download_info': '/files/{id}/download_info',
        'metadata': '/files/{id}/metadata',
        'tags': '/files/{id}/tags',

        'bulk_get': '/bulk/files/get',
        'bulk_delete': '/bulk/files/delete',
        'bulk_update': '/bulk/files/update',
        'bulk_edit': '/bulk/files/edit',

        'create_folder': '/files',
        'list_folder': '/files/{id}/list',
        'scroll_folder': '/files/{id}/scroll',
        'copy_to_folder': '/files/{file_id}/actions/copy',
        'move_to_folder': '/files/{file_id}/actions/move',
    }

    href = HrefField()
    id = StringField(read_only=True)
    type = StringField(read_only=True)
    name = StringField()
    size = IntegerField(read_only=True)
    parent = StringField(read_only=True)
    project = StringField(read_only=True)
    created_on = DateTimeField(read_only=True)
    modified_on = DateTimeField(read_only=True)
    origin = CompoundField(FileOrigin, read_only=True)
    storage = CompoundField(FileStorage, read_only=True)
    metadata = CompoundField(Metadata)
    tags = BasicListField()
    _secondary_files = BasicListField(name='_secondary_files')

    def __str__(self):
        return six.text_type('<File: id={id}>'.format(id=self.id))

    def __eq__(self, other):
        if not hasattr(other, '__class__'):
            return False
        if not self.__class__ == other.__class__:
            return False
        return self is other or self.id == other.id

    def __ne__(self, other):
        return not self.__eq__(other)

    def is_folder(self):
        return self.type.lower() == self.FOLDER_TYPE

    @property
    def secondary_files(self):
        if self._secondary_files:
            return [
                File(api=self._api, **data)
                for data in self._secondary_files
            ]

    @classmethod
    def query(cls, project=None, names=None, metadata=None, origin=None,
              tags=None, offset=None, limit=None, dataset=None,
              api=None, parent=None, cont_token=None):
        """
        Query ( List ) files, requires project or dataset
        :param project: Project id
        :param names: Name list
        :param metadata: Metadata query dict
        :param origin: Origin query dict
        :param tags: List of tags to filter on
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param dataset: Dataset id
        :param api: Api instance.
        :param parent: Folder id or File object with type folder
        :param cont_token: Pagination continuation token
        :return: Collection object.
        """

        if cont_token and offset:
            raise SbgError(
                'Offset and continuation token parameters'
                'are mutually exclusive.'
            )

        if cont_token and metadata:
            raise SbgError(
                'Metadata filtering cannot be combined '
                'with continuation token pagination.'
            )

        api = api or cls._API

        query_params = {}

        if project:
            project = Transform.to_project(project)
            query_params['project'] = project

        if dataset:
            dataset = Transform.to_dataset(dataset)
            query_params['dataset'] = dataset

        if parent:
            query_params['parent'] = Transform.to_file(parent)

        if not (project or dataset or parent):
            raise SbgError('Project, dataset or parent must be provided.')

        if [project, parent, dataset].count(None) < 2:
            raise SbgError(
                'Only one out of project, parent or dataset must be provided.'
            )

        if names is not None and isinstance(names, list):
            if len(names) == 0:
                names.append("")
            query_params['name'] = names

        metadata_params = {}
        if metadata and isinstance(metadata, dict):
            for k, v in metadata.items():
                metadata_params['metadata.' + k] = metadata[k]

        if tags:
            query_params['tag'] = tags

        query_params.update(metadata_params)

        origin_params = {}
        if origin and isinstance(origin, dict):
            for k, v in origin.items():
                origin_params['origin.' + k] = origin[k]

        query_params.update(origin_params)

        return super(File, cls)._query(
            api=api, url=cls._URL['scroll' if cont_token else 'query'],
            token=cont_token, offset=offset,
            limit=limit, fields='_all', **query_params
        )

    @classmethod
    def upload(cls, path, project=None, parent=None, file_name=None,
               overwrite=False, retry=5, timeout=60, part_size=None, wait=True,
               api=None):
        """
        Uploads a file using multipart upload and returns an upload handle
        if the wait parameter is set to False. If wait is set to True it
        will block until the upload is completed.

        :param path: File path on local disc.
        :param project: Project identifier
        :param parent: Parent folder identifier
        :param file_name: Optional file name.
        :param overwrite: If true will overwrite the file on the server.
        :param retry:  Number of retries if error occurs during upload.
        :param timeout:  Timeout for http requests.
        :param part_size:  Part size in bytes.
        :param wait:  If true will wait for upload to complete.
        :param api: Api instance.
        """

        api = api or cls._API
        extra = {'resource': cls.__name__, 'query': {
            'path': path,
            'project': project,
            'file_name': file_name,
            'overwrite': overwrite,
            'retry': retry,
            'timeout': timeout,
            'part_size': part_size,
            'wait': wait,
        }}
        logger.info('Uploading file', extra=extra)

        if not project and not parent:
            raise SbgError('A project or parent identifier is required.')

        if project and parent:
            raise SbgError(
                'Project and parent identifiers are mutually exclusive.'
            )

        if project:
            project = Transform.to_project(project)

        if parent:
            parent = Transform.to_file(parent)

        upload = Upload(
            file_path=path, project=project, parent=parent,
            file_name=file_name, overwrite=overwrite, retry_count=retry,
            timeout=timeout, part_size=part_size, api=api
        )
        if wait:
            upload.start()
            upload.wait()
            return upload
        else:
            return upload

    def copy(self, project, name=None):
        """
        Copies the current file.
        :param project: Destination project.
        :param name: Destination file name.
        :return: Copied File object.
        """
        project = Transform.to_project(project)
        data = {
            'project': project
        }
        if name:
            data['name'] = name
        extra = {'resource': self.__class__.__name__, 'query': {
            'id': self.id,
            'data': data
        }}
        logger.info('Copying file', extra=extra)
        new_file = self._api.post(url=self._URL['copy'].format(id=self.id),
                                  data=data).json()
        return File(api=self._api, **new_file)

    def download_info(self):
        """
        Fetches download information containing file url
        that can be used to download file.
        :return: Download info object.
        """
        info = self._api.get(url=self._URL['download_info'].format(id=self.id))
        return DownloadInfo(api=self._api, **info.json())

    def download(self, path, retry=5, timeout=10,
                 chunk_size=PartSize.DOWNLOAD_MINIMUM_PART_SIZE, wait=True,
                 overwrite=False):
        """
        Downloads the file and returns a download handle.
        Download will not start until .start() method is invoked.
        :param path: Full path to the new file.
        :param retry:  Number of retries if error occurs during download.
        :param timeout:  Timeout for http requests.
        :param chunk_size:  Chunk size in bytes.
        :param wait: If true will wait for download to complete.
        :param overwrite: If True will silently overwrite existing file.
        :return: Download handle.
        """

        if not overwrite and os.path.exists(path):
            raise LocalFileAlreadyExists(message=path)

        extra = {'resource': self.__class__.__name__, 'query': {
            'id': self.id,
            'path': path,
            'overwrite': overwrite,
            'retry': retry,
            'timeout': timeout,
            'chunk_size': chunk_size,
            'wait': wait,
        }}
        logger.info('Downloading file', extra=extra)
        info = self.download_info()
        download = Download(
            url=info.url, file_path=path, retry_count=retry, timeout=timeout,
            part_size=chunk_size, api=self._api
        )
        if wait:
            download.start()
            download.wait()
        else:
            return download

    @inplace_reload
    def save(self, inplace=True, silent=False):
        """
        Saves all modification to the file on the server. By default this
        method raises an error if you are trying to save an instance that was
        not changed. Set check_if_modified param to False to disable
        this behaviour.
        :param inplace: Apply edits to the current instance or get a new one.
        :param silent: If Raises exception if file wasn't modified.
        :raise ResourceNotModified
        :return: File instance.
        """
        modified_data = self._modified_data()
        if silent or bool(modified_data):
            # If metadata is to be set
            if 'metadata' in modified_data:
                if hasattr(self, '_overwrite_metadata'):
                    self._api.put(
                        url=self._URL['metadata'].format(id=self.id),
                        data=modified_data['metadata']
                    )
                    delattr(self, '_overwrite_metadata')
                else:
                    self._api.patch(
                        url=self._URL['metadata'].format(id=self.id),
                        data=modified_data['metadata']
                    )
                modified_data.pop('metadata')
            if 'tags' in modified_data:
                self._api.put(
                    url=self._URL['tags'].format(id=self.id),
                    data=modified_data['tags']
                )
                modified_data.pop('tags')
            # Change everything else
            if bool(modified_data):
                self._api.patch(
                    url=self._URL['get'].format(id=self.id), data=modified_data
                )
        else:
            raise ResourceNotModified()

        return self.reload()

    def stream(self, part_size=32 * PartSize.KB):
        """
        Creates an iterator which can be used to stream the file content.
        :param part_size: Size of the part in bytes. Default 32KB
        :return Iterator
        """
        download_info = self.download_info()
        response = self._api.get(
            url=download_info.url, stream=True, append_base=False
        )
        for part in response.iter_content(part_size):
            yield part

    # noinspection PyAttributeOutsideInit
    def reload(self):
        """
        Refreshes the file with the data from the server.
        """
        try:
            data = self._api.get(self.href, append_base=False).json()
            resource = File(api=self._api, **data)
        except Exception:
            try:
                data = self._api.get(
                    self._URL['get'].format(id=self.id)).json()
                resource = File(api=self._api, **data)
            except Exception as e:
                raise SbgError(
                    'Resource can not be refreshed due to an error: {}'
                    .format(six.text_type(e))
                )

        self._data = resource._data
        self._dirty = resource._dirty
        self.update_old()

        # If file.metadata = value was executed
        # file object will have attribute _overwrite_metadata=True,
        # which tells us to force overwrite of metadata on the server.
        # This is metadata specific. Once we reload the resource we delete the
        # attribute _overwrite_metadata from the instance.
        try:
            delattr(self, '_overwrite_metadata')
        except AttributeError:
            pass

    def content(self, path=None, overwrite=True, encoding='utf-8'):
        """
        Downloads file to the specified path or as temporary file
        and reads the file content in memory.
         Should not be used on very large files.

        :param path: Path for file download If omitted tmp file will be used.
        :param overwrite: Overwrite file if exists locally
        :param encoding: File encoding, by default it is UTF-8
        :return: File content.
        """
        if path:
            self.download(wait=True, path=path, overwrite=overwrite)
            with io.open(path, 'r', encoding=encoding) as fp:
                return fp.read()

        with tempfile.NamedTemporaryFile() as tmpfile:
            self.download(wait=True, path=tmpfile.name, overwrite=overwrite)
            with io.open(tmpfile.name, 'r', encoding=encoding) as fp:
                return fp.read()

    @classmethod
    def bulk_get(cls, files, api=None):
        """
        Retrieve files with specified ids in bulk
        :param files: Files to be retrieved.
        :param api: Api instance.
        :return: List of FileBulkRecord objects.
        """
        api = api or cls._API
        file_ids = [Transform.to_file(file_) for file_ in files]
        data = {'file_ids': file_ids}

        logger.debug('Getting files in bulk.')
        response = api.post(url=cls._URL['bulk_get'], data=data)
        return FileBulkRecord.parse_records(response=response, api=api)

    @classmethod
    def bulk_delete(cls, files, api=None):
        """
        Delete files with specified ids in bulk
        :param files: Files to be deleted.
        :param api: Api instance.
        :return: List of FileBulkRecord objects.
        """
        api = api or cls._API
        file_ids = [Transform.to_file(file_) for file_ in files]
        data = {'file_ids': file_ids}

        logger.debug('Deleting files in bulk.')
        response = api.post(url=cls._URL['bulk_delete'], data=data)
        return FileBulkRecord.parse_records(response=response, api=api)

    @classmethod
    def bulk_update(cls, files, api=None):
        """
        This call updates the details for multiple specified files.
        Use this call to set new information for the files, thus replacing
        all existing information and erasing omitted parameters. For each
        of the specified files, the call sets a new name, new tags and
        metadata.
        :param files: List of file instances.
        :param api: Api instance.
        :return: List of FileBulkRecord objects.
        """
        if not files:
            raise SbgError('Files are required.')

        api = api or cls._API
        data = {
            'items': [
                {
                    'id': file_.id,
                    'name': file_.name,
                    'tags': file_.tags,
                    'metadata': file_.metadata,
                }
                for file_ in files
            ]
        }

        logger.debug('Updating files in bulk.')
        response = api.post(url=cls._URL['bulk_update'], data=data)
        return FileBulkRecord.parse_records(response=response, api=api)

    @classmethod
    def bulk_edit(cls, files, api=None):
        """
        This call edits the details for multiple specified files.
        Use this call to modify the existing information for the files
        or add new information while preserving omitted parameters.
        For each of the specified files, the call edits its name, tags
        and metadata.
        :param files: List of file instances.
        :param api: Api instance.
        :return: List of FileBulkRecord objects.
        """
        if not files:
            raise SbgError('Files are required.')

        api = api or cls._API
        data = {
            'items': [
                {
                    'id': file_.id,
                    'name': file_.name,
                    'tags': file_.tags,
                    'metadata': file_.metadata,
                }
                for file_ in files
            ]
        }

        logger.debug('Editing files in bulk.')
        response = api.post(url=cls._URL['bulk_edit'], data=data)
        return FileBulkRecord.parse_records(response=response, api=api)

    def list_files(self, offset=None, limit=None, api=None, cont_token=None):
        """List files in a folder
        :param api: Api instance
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param cont_token: Pagination continuation token
        :return: List of files
        """

        if cont_token and offset:
            raise SbgError(
                'Offset and continuation token parameters'
                'are mutually exclusive.'
            )

        api = api or self._API

        if not self.is_folder():
            raise SbgError('{name} is not a folder'.format(name=self.name))

        url = self._URL[
            'scroll_folder' if cont_token else 'list_folder'
        ].format(id=self.id)

        return super(File, self.__class__)._query(
            api=api, url=url, token=cont_token, offset=offset,
            limit=limit, fields='_all'
        )

    @classmethod
    def create_folder(cls, name, parent=None, project=None,
                      api=None):
        """Create a new folder
        :param name: Folder name
        :param parent: Parent folder
        :param project: Project to create folder in
        :param api: Api instance
        :return: New folder
        """
        api = api or cls._API

        data = {
            'name': name,
            'type': cls.FOLDER_TYPE
        }

        if not parent and not project:
            raise SbgError('Parent or project must be provided')

        if parent and project:
            raise SbgError(
                'Providing both "parent" and "project" is not allowed'
            )

        if parent:
            data['parent'] = Transform.to_file(file_=parent)

        if project:
            data['project'] = Transform.to_project(project=project)

        response = api.post(url=cls._URL['create_folder'], data=data).json()
        return cls(api=api, **response)

    def copy_to_folder(self, parent, name=None, api=None):
        """Copy file to folder
        :param parent: Folder to copy file to
        :param name: New file name
        :param api: Api instance
        :return: New file instance
        """
        api = api or self._API

        if self.is_folder():
            raise SbgError('Copying folders is not supported')

        data = {
            'parent': Transform.to_file(parent)
        }

        if name:
            data['name'] = name

        response = api.post(
            url=self._URL['copy_to_folder'].format(file_id=self.id),
            data=data
        ).json()
        return File(api=api, **response)

    def move_to_folder(self, parent, name=None, api=None):
        """Move file to folder
        :param parent: Folder to move file to
        :param name: New file name
        :param api: Api instance
        :return: New file instance
        """
        api = api or self._API

        if self.is_folder():
            raise SbgError('Moving folders is not supported')

        data = {
            'parent': Transform.to_file(parent)
        }

        if name:
            data['name'] = name

        response = api.post(
            url=self._URL['move_to_folder'].format(file_id=self.id),
            data=data
        ).json()
        return File(api=api, **response)
class AutomationRun(Resource):
    """
    Central resource for managing automation runs.
    """

    _URL = {
        'query': '/automation/runs',
        'get': '/automation/runs/{id}',
        'actions': '/automation/runs/{id}/actions/{action}',
        'state': '/automation/runs/{id}/state',
    }

    href = HrefField()
    id = StringField(read_only=True)
    name = StringField(read_only=True)
    automation = CompoundField(Automation, read_only=True)
    package = CompoundField(AutomationPackage, read_only=True)
    inputs = DictField()
    settings = DictField()
    created_on = DateTimeField(read_only=True)
    start_time = DateTimeField(read_only=True)
    end_time = DateTimeField(read_only=True)
    resumed_from = StringField(read_only=True)
    created_by = StringField(read_only=True)
    status = StringField(read_only=True)
    message = StringField(read_only=True)
    execution_details = DictField(read_only=True)

    def __eq__(self, other):
        if not hasattr(other, '__class__'):
            return False
        if not self.__class__ == other.__class__:
            return False
        return self is other or self.id == other.id

    def __ne__(self, other):
        return not self.__eq__(other)

    def __str__(self):
        return six.text_type('<AutomationRun: id={id}>'.format(id=self.id))

    @classmethod
    def query(cls,
              automation=None,
              package=None,
              status=None,
              name=None,
              created_by=None,
              created_from=None,
              created_to=None,
              order_by=None,
              order=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query (List) automation runs.
        :param name: Automation run name
        :param automation: Automation template
        :param package: Package
        :param status: Run status
        :param created_by: Username of user that created the run
        :param order_by: Property by which to order results
        :param order: Ascending or descending ("asc" or "desc")
        :param created_from: Date the run is created after
        :param created_to: Date the run is created before
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: collection object
        """
        if automation:
            automation = Transform.to_automation(automation)

        if package:
            package = Transform.to_automation_package(package)

        api = api or cls._API
        return super(AutomationRun, cls)._query(
            url=cls._URL['query'],
            name=name,
            automation=automation,
            package=package,
            status=status,
            created_by=created_by,
            created_from=created_from,
            created_to=created_to,
            order_by=order_by,
            order=order,
            offset=offset,
            limit=limit,
            api=api,
        )

    @classmethod
    def create(cls,
               package,
               inputs=None,
               settings=None,
               resume_from=None,
               name=None,
               secret_settings=None,
               api=None):
        """
        Create and start a new run.
        :param package: Automation package id
        :param inputs: Input dictionary
        :param settings: Settings override dictionary
        :param resume_from: Run to resume from
        :param name: Automation run name
        :param secret_settings: dict to override secret_settings from
        automation template
        :param api: sevenbridges Api instance
        :return: AutomationRun object
        """
        package = Transform.to_automation_package(package)

        data = {'package': package}
        if inputs:
            data['inputs'] = inputs
        if settings:
            data['settings'] = settings
        if resume_from:
            data['resume_from'] = resume_from
        if name:
            data['name'] = name
        if secret_settings:
            data['secret_settings'] = secret_settings

        api = api or cls._API
        automation_run = api.post(
            url=cls._URL['query'],
            data=data,
        ).json()
        return AutomationRun(api=api, **automation_run)

    def stop(self, api=None):
        """
        Stop automation run.
        :param api: sevenbridges Api instance.
        :return: AutomationRun object
        """
        api = api or self._API

        return api.post(url=self._URL['actions'].format(
            id=self.id, action=AutomationRunActions.STOP)).content

    def get_log_file(self, api=None):
        """
        Retrieve automation run log.
        :param api: sevenbridges Api instance
        :return: Log string
        """
        api = api or self._API
        log_file_data = self.execution_details.get('log_file')
        return File(api=api, **log_file_data) if log_file_data else None

    def get_state(self, api=None):
        """
        Retrieve automation run state.
        :param api: sevenbridges Api instance
        :return: State file json contents as string
        """
        api = api or self._API
        return api.get(self._URL['state'].format(id=self.id)).json()
Ejemplo n.º 24
0
class AutomationPackage(Resource):
    """
    Central resource for managing automation packages.
    """
    _URL = {
        'query':
        '/automation/automations/{automation_id}/packages',
        'get':
        '/automation/packages/{id}',
        'archive':
        "/automation/automations/{automation_id}"
        "/packages/{id}/actions/archive",
        'restore':
        "/automation/automations/{automation_id}"
        "/packages/{id}/actions/restore",
    }

    id = StringField(read_only=True)
    automation = UuidField(read_only=True)
    version = StringField(read_only=True)
    location = StringField(read_only=True)
    schema = DictField(read_only=True)
    created_by = StringField(read_only=True)
    created_on = DateTimeField(read_only=True)
    archived = BooleanField(read_only=True)
    custom_url = StringField(read_only=False)
    memory_limit = IntegerField(read_only=False)

    def __eq__(self, other):
        if type(other) is not type(self):
            return False
        return self is other or self.id == other.id

    def __str__(self):
        return f'<AutomationPackage: id={self.id}>'

    @classmethod
    def query(cls, automation, offset=None, limit=None, api=None):
        """
        Query (List) automation packages.
        :param automation: Automation id.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: collection object
        """
        automation_id = Transform.to_automation(automation)

        api = api or cls._API
        return super()._query(
            url=cls._URL['query'].format(automation_id=automation_id),
            offset=offset,
            limit=limit,
            api=api,
        )

    @classmethod
    def create(cls,
               automation,
               version,
               location,
               schema,
               memory_limit=None,
               api=None):
        """
        Create a code package.
        :param automation: Automation id.
        :param version: File ID of the uploaded code package.
        :param location: The code package version.
        :param schema: IO schema for main step of execution.
        :param memory_limit: Memory limit in MB.
        :param api: Api instance.
        :return:
        """
        automation_id = Transform.to_automation(automation)

        api = api if api else cls._API

        if version is None:
            raise SbgError('Code package version is required!')

        if location is None:
            raise SbgError('Code package location is required!')

        if schema is None:
            raise SbgError('Schema is required!')

        data = {
            'version': version,
            'location': location,
            'schema': schema,
            'memory_limit': memory_limit,
        }

        extra = {'resource': cls.__name__, 'query': data}

        package_data = api.post(
            cls._URL['query'].format(automation_id=automation_id),
            data=data).json()
        logger.info('Add code package to automation with id %s',
                    automation_id,
                    extra=extra)
        return AutomationPackage(api=api, **package_data)

    @inplace_reload
    def archive(self):
        """
        Archive package
        :return: AutomationPackage object.
        """
        automation_id = Transform.to_automation(self.automation)

        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
            }
        }
        logger.info('Archive automation package', extra=extra)

        package_data = self._api.post(url=self._URL['archive'].format(
            automation_id=automation_id, id=self.id)).json()
        return AutomationPackage(api=self._api, **package_data)

    @inplace_reload
    def restore(self):
        """
        Restore archived package
        :return: AutomationPackage object.
        """
        automation_id = Transform.to_automation(self.automation)
        extra = {
            'resource': type(self).__name__,
            'query': {
                'id': self.id,
            }
        }
        logger.info('Restore archived automation package', extra=extra)

        package_data = self._api.post(url=self._URL['restore'].format(
            automation_id=automation_id, id=self.id)).json()
        return AutomationPackage(api=self._api, **package_data)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the automation package on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: AutomationPackage instance.
        """
        modified_data = self._modified_data()
        if modified_data:
            extra = {
                'resource': type(self).__name__,
                'query': {
                    'id': self.id,
                    'modified_data': modified_data
                }
            }
            logger.info('Saving automation package', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=modified_data).json()
            return AutomationPackage(api=self._api, **data)

        else:
            raise ResourceNotModified()
Ejemplo n.º 25
0
class Import(Resource):
    """
    Central resource for managing imports.
    """
    _URL = {
        'query': '/storage/imports',
        'get': '/storage/imports/{id}',
    }

    href = HrefField()
    id = StringField(read_only=True)
    state = StringField(read_only=True)
    source = CompoundField(VolumeFile, read_only=True)
    destination = CompoundField(ImportDestination, read_only=True)
    started_on = DateTimeField(read_only=True)
    finished_on = DateTimeField(read_only=True)
    overwrite = BooleanField(read_only=True)
    error = CompoundField(Error, read_only=True)
    _result = DictField(name='result', read_only=True)

    def __str__(self):
        return six.text_type('<Import: id={id}>'.format(id=self.id))

    def __eq__(self, other):
        if not hasattr(other, '__class__'):
            return False
        if not self.__class__ == other.__class__:
            return False
        return self is other or self.id == other.id

    def __ne__(self, other):
        return not self.__eq__(other)

    @property
    def result(self):
        try:
            return File(id=self._result['id'], api=self._api)
        except TypeError:
            return None

    @classmethod
    def submit_import(cls, volume, location, project, name=None,
                      overwrite=False, properties=None, api=None):

        """
        Submits new import job.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param project: Project identifier.
        :param name: Optional file name.
        :param overwrite: If true it will overwrite file if exists.
        :param properties: Properties dictionary.
        :param api: Api instance.
        :return: Import object.
        """
        data = {}
        volume = Transform.to_volume(volume)
        project = Transform.to_project(project)
        source = {
            'volume': volume,
            'location': location
        }
        destination = {
            'project': project
        }
        if name:
            destination['name'] = name

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        if properties:
            data['properties'] = properties

        api = api if api else cls._API
        extra = {
            'resource': cls.__name__,
            'query': data
        }
        logger.info('Submitting import', extra=extra)
        _import = api.post(cls._URL['query'], data=data).json()
        return Import(api=api, **_import)

    @classmethod
    def query(cls, project=None, volume=None, state=None, offset=None,
              limit=None, api=None):

        """
        Query (List) imports.
        :param project: Optional project identifier.
        :param volume: Optional volume identifier.
        :param state: Optional import sate.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API

        if project:
            project = Transform.to_project(project)
        if volume:
            volume = Transform.to_volume(volume)

        return super(Import, cls)._query(
            url=cls._URL['query'], project=project, volume=volume, state=state,
            fields='_all', offset=offset, limit=limit, api=api
        )
Ejemplo n.º 26
0
class AutomationRun(Resource):
    """
    Central resource for managing automation runs.
    """

    _URL = {
        'query': '/automation/runs',
        'get': '/automation/runs/{id}',
        'actions': '/automation/runs/{id}/actions/{action}',
        'state': '/automation/runs/{id}/state',
    }

    href = HrefField(read_only=True)
    id = StringField(read_only=True)
    name = StringField(read_only=False)
    automation = CompoundField(Automation, read_only=True)
    package = CompoundField(AutomationPackage, read_only=True)
    inputs = DictField(read_only=False)
    outputs = DictField(read_only=True)
    settings = DictField(read_only=False)
    created_on = DateTimeField(read_only=True)
    start_time = DateTimeField(read_only=True)
    end_time = DateTimeField(read_only=True)
    resumed_from = StringField(read_only=True)
    created_by = StringField(read_only=True)
    status = StringField(read_only=True)
    message = StringField(read_only=True)
    execution_details = DictField(read_only=True)
    memory_limit = IntegerField(read_only=False)
    project_id = StringField(read_only=True)

    def __eq__(self, other):
        if type(other) is not type(self):
            return False
        return self is other or self.id == other.id

    def __str__(self):
        return f'<AutomationRun: id={self.id}>'

    @classmethod
    def query(cls,
              automation=None,
              package=None,
              status=None,
              name=None,
              created_by=None,
              created_from=None,
              created_to=None,
              project_id=None,
              order_by=None,
              order=None,
              offset=None,
              limit=None,
              api=None):
        """
        Query (List) automation runs.
        :param name: Automation run name
        :param automation: Automation template
        :param package: Package
        :param status: Run status
        :param created_by: Username of user that created the run
        :param order_by: Property by which to order results
        :param order: Ascending or descending ("asc" or "desc")
        :param created_from: Date the run is created after
        :param created_to: Date the run is created before
        :param project_id: Id of project if Automation run is project based
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: collection object
        """
        if automation:
            automation = Transform.to_automation(automation)

        if package:
            package = Transform.to_automation_package(package)

        api = api or cls._API
        return super()._query(
            url=cls._URL['query'],
            name=name,
            automation_id=automation,
            package_id=package,
            status=status,
            created_by=created_by,
            created_from=created_from,
            created_to=created_to,
            project_id=project_id,
            order_by=order_by,
            order=order,
            offset=offset,
            limit=limit,
            api=api,
        )

    @classmethod
    def create(cls,
               package,
               inputs=None,
               settings=None,
               resume_from=None,
               name=None,
               secret_settings=None,
               memory_limit=None,
               api=None):
        """
        Create and start a new run.
        :param package: Automation package id
        :param inputs: Input dictionary
        :param settings: Settings override dictionary
        :param resume_from: Run to resume from
        :param name: Automation run name
        :param secret_settings: dict to override secret_settings from
        automation template
        :param memory_limit: Memory limit in MB.
        :param api: sevenbridges Api instance
        :return: AutomationRun object
        """
        package = Transform.to_automation_package(package)

        data = {'package': package}
        if inputs:
            data['inputs'] = inputs
        else:
            data['inputs'] = dict()
        if settings:
            data['settings'] = settings
        if resume_from:
            data['resume_from'] = resume_from
        if name:
            data['name'] = name
        if secret_settings:
            data['secret_settings'] = secret_settings
        if memory_limit:
            data['memory_limit'] = memory_limit

        api = api or cls._API
        automation_run = api.post(
            url=cls._URL['query'],
            data=data,
        ).json()
        return AutomationRun(api=api, **automation_run)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the automation run on the server.
        :param inplace Apply edits on the current instance or get a new one.
        :return: Automation run instance.
        """
        modified_data = self._modified_data()
        if modified_data:
            extra = {
                'resource': type(self).__name__,
                'query': {
                    'id': self.id,
                    'modified_data': modified_data
                }
            }
            logger.info('Saving automation run', extra=extra)
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=modified_data).json()
            return AutomationRun(api=self._api, **data)
        else:
            raise ResourceNotModified()

    @classmethod
    def rerun(cls,
              id,
              package=None,
              inputs=None,
              settings=None,
              resume_from=None,
              name=None,
              secret_settings=None,
              merge=True,
              api=None):
        """
        Create and start rerun of existing automation.
        :param id: Automation id to rerun
        :param package: Automation package id
        :param inputs: Input dictionary
        :param settings: Settings override dictionary
        :param resume_from: Run to resume from
        :param name: Automation run name
        :param secret_settings: dict to override secret_settings from
        automation template
        :param merge: merge settings and inputs of run
        :param api: sevenbridges Api instance
        :return: AutomationRun object
        """
        data = {'merge': merge}
        if package:
            data['package'] = package
        if inputs:
            data['inputs'] = inputs
        if settings:
            data['settings'] = settings
        if resume_from:
            data['resume_from'] = resume_from
        if name:
            data['name'] = name
        if secret_settings:
            data['secret_settings'] = secret_settings

        api = api or cls._API
        automation_run = api.post(url=cls._URL['actions'].format(
            id=id, action=AutomationRunActions.RERUN)).json()
        return AutomationRun(api=api, **automation_run)

    def stop(self, api=None):
        """
        Stop automation run.
        :param api: sevenbridges Api instance.
        :return: AutomationRun object
        """
        api = api or self._API

        return api.post(url=self._URL['actions'].format(
            id=self.id, action=AutomationRunActions.STOP)).content

    def get_log_file(self, api=None):
        """
        Retrieve automation run log.
        :param api: sevenbridges Api instance
        :return: Log string
        """
        api = api or self._API
        log_file_data = self.execution_details.get('log_file')
        return File(api=api, **log_file_data) if log_file_data else None

    def get_state(self, api=None):
        """
        Retrieve automation run state.
        :param api: sevenbridges Api instance
        :return: State file json contents as string
        """
        api = api or self._API
        return api.get(self._URL['state'].format(id=self.id)).json()
Ejemplo n.º 27
0
class Volume(Resource):
    """
    Central resource for managing volumes.
    """
    _URL = {
        'query': '/storage/volumes',
        'get': '/storage/volumes/{id}',
        'delete': '/storage/volumes/{id}',
    }

    href = HrefField()
    id = StringField(read_only=True)
    name = StringField(read_only=False)
    description = StringField(read_only=False)
    access_mode = StringField(read_only=False)
    service = CompoundField(VolumeService, read_only=True)
    created_on = DateTimeField(read_only=True)
    modified_on = DateTimeField(read_only=True)
    active = BooleanField(read_only=True)

    def __str__(self):
        return six.text_type('<Volume: id={id}>'.format(id=self.id))

    @classmethod
    def query(cls, offset=None, limit=None, api=None):
        """
        Query (List) volumes.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api or cls._API
        return super(Volume, cls)._query(url=cls._URL['query'],
                                         offset=offset,
                                         limit=limit,
                                         fields='_all',
                                         api=api)

    @classmethod
    def create_s3_volume(cls,
                         name,
                         bucket,
                         access_key_id,
                         secret_access_key,
                         access_mode,
                         description=None,
                         prefix=None,
                         properties=None,
                         api=None):
        """
        Create s3 volume.
        :param name: Volume name.
        :param bucket: Referenced bucket.
        :param access_key_id: Amazon access key identifier.
        :param secret_access_key: Amazon secret access key.
        :param access_mode: Access Mode.
        :param description: Volume description.
        :param prefix: Volume prefix.
        :param properties: Volume properties.
        :param api: Api instance.
        :return: Volume object.
        """
        service = {
            'type': VolumeType.S3,
            'bucket': bucket,
            'credentials': {
                'access_key_id': access_key_id,
                'secret_access_key': secret_access_key
            }
        }
        if prefix:
            service['prefix'] = prefix
        if properties:
            service['properties'] = properties

        data = {'name': name, 'service': service, 'access_mode': access_mode}
        if description:
            data['description'] = description
        api = api or cls._API
        response = api.post(url=cls._URL['query'], data=data).json()
        return Volume(api=api, **response)

    @classmethod
    def create_google_volume(cls,
                             name,
                             bucket,
                             client_email,
                             private_key,
                             access_mode,
                             description=None,
                             prefix=None,
                             properties=None,
                             api=None):
        """
        Create s3 volume.
        :param name: Volume name.
        :param bucket: Referenced bucket.
        :param client_email: Google client email.
        :param private_key: Google client private key.
        :param access_mode: Access Mode.
        :param description: Volume description.
        :param prefix: Volume prefix.
        :param properties: Volume properties.
        :param api: Api instance.
        :return: Volume object.
        """
        service = {
            'type': VolumeType.GOOGLE,
            'bucket': bucket,
            'credentials': {
                'client_email': client_email,
                'private_key': private_key
            }
        }
        if prefix:
            service['prefix'] = prefix
        if properties:
            service['properties'] = properties

        data = {'name': name, 'service': service, 'access_mode': access_mode}
        if description:
            data['description'] = description
        api = api or cls._API
        response = api.post(url=cls._URL['query'], data=data).json()
        return Volume(api=api, **response)

    @inplace_reload
    def save(self, inplace=True):
        """
        Saves all modification to the volume on the server.
        """
        modified_data = self._modified_data()
        if bool(modified_data):
            data = self._api.patch(url=self._URL['get'].format(id=self.id),
                                   data=modified_data).json()
            volume = Volume(api=self._api, **data)
            return volume
        else:
            raise ResourceNotModified()

    def get_imports(self, project=None, state=None, offset=None, limit=None):
        """
        Fetches imports for this volume.
        :param project: Optional project identifier.
        :param state: Optional state.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        return self._api.imports.query(volume=self,
                                       project=project,
                                       state=state,
                                       offset=offset,
                                       limit=limit)

    def get_exports(self, state=None, offset=None, limit=None):
        """
        Fetches exports for this volume.
        :param state: Optional state.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :return: Collection object.
        """
        return self._api.exports.query(volume=self,
                                       state=state,
                                       offset=offset,
                                       limit=limit)