Ejemplo n.º 1
0
    def create_folder(cls, name, parent=None, project=None,
                      api=None):
        """Create a new folder
        :param name: Folder name
        :param parent: Parent folder
        :param project: Project to create folder in
        :param api: Api instance
        :return: New folder
        """
        api = api or cls._API

        data = {
            'name': name,
            'type': cls.FOLDER_TYPE
        }

        if not parent and not project:
            raise SbgError('Parent or project must be provided')

        if parent and project:
            raise SbgError(
                'Providing both "parent" and "project" is not allowed'
            )

        if parent:
            data['parent'] = Transform.to_file(file_=parent)

        if project:
            data['project'] = Transform.to_project(project=project)

        response = api.post(url=cls._URL['create_folder'], data=data).json()
        return cls(api=api, **response)
Ejemplo n.º 2
0
    def create(cls, file, name, position, chromosome, private=True, api=None):
        """
        Create a marker on a file.
        :param file: File object or identifier.
        :param name: Marker name.
        :param position: Marker position object.
        :param chromosome: Chromosome number.
        :param private: Whether the marker is private or public.
        :param api: Api instance.
        :return: Marker object.
        """
        api = api if api else cls._API

        file = Transform.to_file(file)
        data = {
            'file': file,
            'name': name,
            'position': position,
            'chromosome': chromosome,
            'private': private
        }

        extra = {
            'resource': cls.__name__,
            'query': data
        }
        logger.info('Creating marker', extra=extra)
        marker_data = api.post(url=cls._URL['query'], data=data).json()
        return Marker(api=api, **marker_data)
Ejemplo n.º 3
0
    def create(cls, file, name, position, chromosome, private=True, api=None):
        """
        Create a marker on a file.
        :param file: File object or identifier.
        :param name: Marker name.
        :param position: Marker position object.
        :param chromosome: Chromosome number.
        :param private: Whether the marker is private or public.
        :param api: Api instance.
        :return: Marker object.
        """
        api = api if api else cls._API

        file = Transform.to_file(file)
        data = {
            'file': file,
            'name': name,
            'position': position,
            'chromosome': chromosome,
            'private': private
        }

        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Creating marker', extra=extra)
        marker_data = api.post(url=cls._URL['query'], data=data).json()
        return Marker(api=api, **marker_data)
    def submit_export(cls,
                      file,
                      volume,
                      location,
                      properties=None,
                      overwrite=False,
                      api=None):
        """
        Submit new export job.
        :param file: File to be exported.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param properties: Properties dictionary.
        :param overwrite: If true it will overwrite file if exists
        :param api: Api Instance.
        :return: Export object.
        """
        data = {}
        volume = Transform.to_volume(volume)
        file = Transform.to_file(file)
        destination = {'volume': volume, 'location': location}
        source = {'file': file}
        if properties:
            data['properties'] = properties

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        api = api if api else cls._API
        _export = api.post(cls._URL['query'], data=data).json()
        return Export(api=api, **_export)
Ejemplo n.º 5
0
    def create_folder(cls, name, parent=None, project=None,
                      api=None):
        """Create a new folder
        :param name: Folder name
        :param parent: Parent folder
        :param project: Project to create folder in
        :param api: Api instance
        :return: New folder
        """
        api = api or cls._API

        data = {
            'name': name,
            'type': cls.FOLDER_TYPE
        }

        if not parent and not project:
            raise SbgError('Parent or project must be provided')

        if parent and project:
            raise SbgError(
                'Providing both "parent" and "project" is not allowed'
            )

        if parent:
            data['parent'] = Transform.to_file(file_=parent)

        if project:
            data['project'] = Transform.to_project(project=project)

        response = api.post(url=cls._URL['create_folder'], data=data).json()
        return cls(api=api, **response)
Ejemplo n.º 6
0
    def upload(cls, path, project=None, parent=None, file_name=None,
               overwrite=False, retry=5, timeout=10,
               part_size=PartSize.UPLOAD_MINIMUM_PART_SIZE, wait=True,
               api=None):
        """
        Uploads a file using multipart upload and returns an upload handle
        if the wait parameter is set to False. If wait is set to True it
        will block until the upload is completed.

        :param path: File path on local disc.
        :param project: Project identifier
        :param parent: Parent folder identifier
        :param file_name: Optional file name.
        :param overwrite: If true will overwrite the file on the server.
        :param retry:  Number of retries if error occurs during upload.
        :param timeout:  Timeout for http requests.
        :param part_size:  Part size in bytes.
        :param wait:  If true will wait for upload to complete.
        :param api: Api instance.
        """

        api = api or cls._API
        extra = {'resource': cls.__name__, 'query': {
            'path': path,
            'project': project,
            'file_name': file_name,
            'overwrite': overwrite,
            'retry': retry,
            'timeout': timeout,
            'part_size': part_size,
            'wait': wait,
        }}
        logger.info('Uploading file', extra=extra)

        if not project and not parent:
            raise SbgError('A project or parent identifier is required.')

        if project and parent:
            raise SbgError(
                'Project and parent identifiers are mutually exclusive.'
            )

        if project:
            project = Transform.to_project(project)

        if parent:
            parent = Transform.to_file(parent)

        upload = Upload(
            file_path=path, project=project, parent=parent,
            file_name=file_name, overwrite=overwrite, retry_count=retry,
            timeout=timeout, part_size=part_size, api=api
        )
        if wait:
            upload.start()
            upload.wait()
            return upload
        else:
            return upload
Ejemplo n.º 7
0
    def upload(cls, path, project=None, parent=None, file_name=None,
               overwrite=False, retry=5, timeout=60, part_size=None, wait=True,
               api=None):
        """
        Uploads a file using multipart upload and returns an upload handle
        if the wait parameter is set to False. If wait is set to True it
        will block until the upload is completed.

        :param path: File path on local disc.
        :param project: Project identifier
        :param parent: Parent folder identifier
        :param file_name: Optional file name.
        :param overwrite: If true will overwrite the file on the server.
        :param retry:  Number of retries if error occurs during upload.
        :param timeout:  Timeout for http requests.
        :param part_size:  Part size in bytes.
        :param wait:  If true will wait for upload to complete.
        :param api: Api instance.
        """

        api = api or cls._API
        extra = {'resource': cls.__name__, 'query': {
            'path': path,
            'project': project,
            'file_name': file_name,
            'overwrite': overwrite,
            'retry': retry,
            'timeout': timeout,
            'part_size': part_size,
            'wait': wait,
        }}
        logger.info('Uploading file', extra=extra)

        if not project and not parent:
            raise SbgError('A project or parent identifier is required.')

        if project and parent:
            raise SbgError(
                'Project and parent identifiers are mutually exclusive.'
            )

        if project:
            project = Transform.to_project(project)

        if parent:
            parent = Transform.to_file(parent)

        upload = Upload(
            file_path=path, project=project, parent=parent,
            file_name=file_name, overwrite=overwrite, retry_count=retry,
            timeout=timeout, part_size=part_size, api=api
        )
        if wait:
            upload.start()
            upload.wait()
            return upload
        else:
            return upload
Ejemplo n.º 8
0
    def bulk_submit(cls, imports, api=None):
        """
        Submit imports in bulk
        :param imports: List of dicts describing a wanted import.
        :param api: Api instance.
        :return: List of ImportBulkRecord objects.
        """
        if not imports:
            raise SbgError('Imports are required')

        api = api or cls._API

        items = []
        for import_ in imports:
            project = import_.get('project')
            parent = import_.get('parent')

            if project and parent:
                raise SbgError(
                    'Project and parent identifiers are mutually exclusive')
            elif project:
                destination = {'project': Transform.to_project(project)}
            elif parent:
                destination = {'parent': Transform.to_file(parent)}
            else:
                raise SbgError('Project or parent identifier is required.')

            volume = Transform.to_volume(import_.get('volume'))
            location = Transform.to_location(import_.get('location'))
            name = import_.get('name', None)
            overwrite = import_.get('overwrite', False)
            autorename = import_.get('autorename', None)
            preserve_folder_structure = import_.get(
                'preserve_folder_structure', None)

            if name:
                destination['name'] = name

            import_config = {
                'source': {
                    'volume': volume,
                    'location': location
                },
                'destination': destination,
                'overwrite': overwrite,
            }
            if autorename is not None:
                import_config['autorename'] = autorename
            if preserve_folder_structure is not None:
                import_config['preserve_folder_structure'] = (
                    preserve_folder_structure)
            items.append(import_config)

        data = {'items': items}
        response = api.post(url=cls._URL['bulk_create'], data=data)
        return ImportBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 9
0
 def bulk_copy_files(cls, files, destination_project, api=None):
     """
     Bulk copy of files.
     :param files: List containing files to be copied.
     :param destination_project: Destination project.
     :param api: Api instance.
     :return: MultiStatus copy result.
     """
     api = api if api else cls._API
     files = [Transform.to_file(file) for file in files]
     data = {'project': destination_project, 'file_ids': files}
     extra = {'resource': cls.__name__, 'query': data}
     logger.info('Performing bulk copy', extra=extra)
     return api.post(url=cls._URL['bulk_copy'], data=data).json()
Ejemplo n.º 10
0
    def bulk_delete(cls, files, api=None):
        """
        Delete files with specified ids in bulk
        :param files: Files to be deleted.
        :param api: Api instance.
        :return: List of FileBulkRecord objects.
        """
        api = api or cls._API
        file_ids = [Transform.to_file(file_) for file_ in files]
        data = {'file_ids': file_ids}

        logger.debug('Deleting files in bulk.')
        response = api.post(url=cls._URL['bulk_delete'], data=data)
        return FileBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 11
0
    def bulk_delete(cls, files, api=None):
        """
        Delete files with specified ids in bulk
        :param files: Files to be deleted.
        :param api: Api instance.
        :return: List of FileBulkRecord objects.
        """
        api = api or cls._API
        file_ids = [Transform.to_file(file_) for file_ in files]
        data = {'file_ids': file_ids}

        logger.info('Deleting files in bulk.')
        response = api.post(url=cls._URL['bulk_delete'], data=data)
        return FileBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 12
0
    def submit_export(cls, file, volume, location, properties=None,
                      overwrite=False, copy_only=False, api=None):

        """
        Submit new export job.
        :param file: File to be exported.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param properties: Properties dictionary.
        :param overwrite: If true it will overwrite file if exists
        :param copy_only: If true files are kept on SevenBridges bucket.
        :param api: Api Instance.
        :return: Export object.
        """
        data = {}
        params = {}

        volume = Transform.to_volume(volume)
        file = Transform.to_file(file)
        destination = {
            'volume': volume,
            'location': location
        }
        source = {
            'file': file
        }
        if properties:
            data['properties'] = properties

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        extra = {
            'resource': cls.__name__,
            'query': data
        }
        logger.info('Submitting export', extra=extra)

        api = api if api else cls._API
        if copy_only:
            params['copy_only'] = True
            _export = api.post(
                cls._URL['query'], data=data, params=params).json()
        else:
            _export = api.post(
                cls._URL['query'], data=data).json()

        return Export(api=api, **_export)
Ejemplo n.º 13
0
    def query(cls, file, offset=None, limit=None, api=None):
        """
        Queries genome markers on a file.
        :param file: Genome file - Usually bam file.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api if api else cls._API

        file = Transform.to_file(file)
        return super(Marker, cls)._query(
            url=cls._URL['query'], offset=offset, limit=limit,
            file=file, fields='_all', api=api
        )
Ejemplo n.º 14
0
    def submit_export(cls,
                      file,
                      volume,
                      location,
                      properties=None,
                      overwrite=False,
                      copy_only=False,
                      api=None):
        """
        Submit new export job.
        :param file: File to be exported.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param properties: Properties dictionary.
        :param overwrite: If true it will overwrite file if exists
        :param copy_only: If true files are kept on SevenBridges bucket.
        :param api: Api Instance.
        :return: Export object.
        """
        data = {}
        params = {}

        volume = Transform.to_volume(volume)
        file = Transform.to_file(file)
        destination = {'volume': volume, 'location': location}
        source = {'file': file}
        if properties:
            data['properties'] = properties

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Submitting export', extra=extra)

        api = api if api else cls._API
        if copy_only:
            params['copy_only'] = True
            _export = api.post(cls._URL['query'], data=data,
                               params=params).json()
        else:
            _export = api.post(cls._URL['query'], data=data).json()

        return Export(api=api, **_export)
Ejemplo n.º 15
0
    def query(cls, file, offset=None, limit=None, api=None):
        """
        Queries genome markers on a file.
        :param file: Genome file - Usually bam file.
        :param offset: Pagination offset.
        :param limit: Pagination limit.
        :param api: Api instance.
        :return: Collection object.
        """
        api = api if api else cls._API

        file = Transform.to_file(file)
        return super()._query(url=cls._URL['query'],
                              offset=offset,
                              limit=limit,
                              file=file,
                              fields='_all',
                              api=api)
Ejemplo n.º 16
0
    def bulk_submit(cls, exports, copy_only=False, api=None):
        """
        Create exports in bulk.
        :param exports: Exports to be submitted in bulk.
        :param copy_only: If true files are kept on SevenBridges bucket.
        :param api: Api instance.
        :return: list of ExportBulkRecord objects.
        """
        if not exports:
            raise SbgError('Exports are required')

        api = api or cls._API

        items = []
        for export in exports:
            file_ = Transform.to_file(export.get('file'))
            volume = Transform.to_volume(export.get('volume'))
            location = Transform.to_location(export.get('location'))
            properties = export.get('properties', {})
            overwrite = export.get('overwrite', False)

            item = {
                'source': {
                    'file': file_
                },
                'destination': {
                    'volume': volume,
                    'location': location
                },
                'properties': properties,
                'overwrite': overwrite
            }

            items.append(item)

        data = {'items': items}
        params = {'copy_only': copy_only}

        response = api.post(
            url=cls._URL['bulk_create'], params=params, data=data
        )
        return ExportBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 17
0
    def bulk_submit(cls, exports, copy_only=False, api=None):
        """
        Create exports in bulk.
        :param exports: List of dicts describing a wanted export.
        :param copy_only: If true files are kept on SevenBridges bucket.
        :param api: Api instance.
        :return: list of ExportBulkRecord objects.
        """
        if not exports:
            raise SbgError('Exports are required')

        api = api or cls._API

        items = []
        for export in exports:
            file_ = Transform.to_file(export.get('file'))
            volume = Transform.to_volume(export.get('volume'))
            location = Transform.to_location(export.get('location'))
            properties = export.get('properties', {})
            overwrite = export.get('overwrite', False)

            item = {
                'source': {
                    'file': file_
                },
                'destination': {
                    'volume': volume,
                    'location': location
                },
                'properties': properties,
                'overwrite': overwrite
            }

            items.append(item)

        data = {'items': items}
        params = {'copy_only': copy_only}

        response = api.post(url=cls._URL['bulk_create'],
                            params=params,
                            data=data)
        return ExportBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 18
0
 def bulk_copy_files(cls, files, destination_project, api=None):
     """
     Bulk copy of files.
     :param files: List containing files to be copied.
     :param destination_project: Destination project.
     :param api: Api instance.
     :return: MultiStatus copy result.
     """
     api = api if api else cls._API
     files = [Transform.to_file(file) for file in files]
     data = {
         'project': destination_project,
         'file_ids': files
     }
     extra = {
         'resource': cls.__name__,
         'query': data
     }
     logger.info('Performing bulk copy', extra=extra)
     return api.post(url=cls._URL['bulk_copy'], data=data).json()
Ejemplo n.º 19
0
    def move_to_folder(self, parent, name=None, api=None):
        """Move file to folder
        :param parent: Folder to move file to
        :param name: New file name
        :param api: Api instance
        :return: New file instance
        """
        api = api or self._API

        if self.is_folder():
            raise SbgError('Moving folders is not supported')

        data = {'parent': Transform.to_file(parent)}

        if name:
            data['name'] = name

        response = api.post(
            url=self._URL['move_to_folder'].format(file_id=self.id),
            data=data).json()
        return File(api=api, **response)
Ejemplo n.º 20
0
    def bulk_submit(cls, exports, api=None):
        """
        Create exports in bulk.
        :param exports: Exports to be submitted in bulk.
        :param api: Api instance.
        :return: list of ExportBulkRecord objects.
        """
        if not exports:
            raise SbgError('Exports are required')

        api = api or cls._API

        items = []
        for export in exports:
            file_ = Transform.to_file(export.get('file'))
            volume = Transform.to_volume(export.get('volume'))
            location = Transform.to_location(export.get('location'))
            properties = export.get('properties', {})
            overwrite = export.get('overwrite', False)

            item = {
                'source': {
                    'file': file_
                },
                'destination': {
                    'volume': volume,
                    'location': location
                },
                'properties': properties,
                'overwrite': overwrite
            }

            items.append(item)

        data = {'items': items}

        response = api.post(url=cls._URL['bulk_create'], data=data)
        return ExportBulkRecord.parse_records(response=response, api=api)
Ejemplo n.º 21
0
    def move_to_folder(self, parent, name=None, api=None):
        """Move file to folder
        :param parent: Folder to move file to
        :param name: New file name
        :param api: Api instance
        :return: New file instance
        """
        api = api or self._API

        if self.is_folder():
            raise SbgError('Moving folders is not supported')

        data = {
            'parent': Transform.to_file(parent)
        }

        if name:
            data['name'] = name

        response = api.post(
            url=self._URL['move_to_folder'].format(file_id=self.id),
            data=data
        ).json()
        return File(api=api, **response)
def test_transform_file_invalid_values(file):
    with pytest.raises(SbgError):
        Transform.to_file(file)
def test_transform_file(file):
    Transform.to_file(file)
Ejemplo n.º 24
0
    def query(cls, project=None, names=None, metadata=None, origin=None,
              tags=None, offset=None, limit=None, dataset=None,
              api=None, parent=None, cont_token=None):
        """
        Query ( List ) files, requires project or dataset
        :param project: Project id
        :param names: Name list
        :param metadata: Metadata query dict
        :param origin: Origin query dict
        :param tags: List of tags to filter on
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param dataset: Dataset id
        :param api: Api instance.
        :param parent: Folder id or File object with type folder
        :param cont_token: Pagination continuation token
        :return: Collection object.
        """

        if cont_token and offset:
            raise SbgError(
                'Offset and continuation token parameters'
                'are mutually exclusive.'
            )

        if cont_token and metadata:
            raise SbgError(
                'Metadata filtering cannot be combined '
                'with continuation token pagination.'
            )

        api = api or cls._API

        query_params = {}

        if project:
            project = Transform.to_project(project)
            query_params['project'] = project

        if dataset:
            dataset = Transform.to_dataset(dataset)
            query_params['dataset'] = dataset

        if parent:
            query_params['parent'] = Transform.to_file(parent)

        if not (project or dataset or parent):
            raise SbgError('Project, dataset or parent must be provided.')

        if [project, parent, dataset].count(None) < 2:
            raise SbgError(
                'Only one out of project, parent or dataset must be provided.'
            )

        if names is not None and isinstance(names, list):
            if len(names) == 0:
                names.append("")
            query_params['name'] = names

        metadata_params = {}
        if metadata and isinstance(metadata, dict):
            for k, v in metadata.items():
                metadata_params['metadata.' + k] = metadata[k]

        if tags:
            query_params['tag'] = tags

        query_params.update(metadata_params)

        origin_params = {}
        if origin and isinstance(origin, dict):
            for k, v in origin.items():
                origin_params['origin.' + k] = origin[k]

        query_params.update(origin_params)

        return super(File, cls)._query(
            api=api, url=cls._URL['scroll' if cont_token else 'query'],
            token=cont_token, offset=offset,
            limit=limit, fields='_all', **query_params
        )
Ejemplo n.º 25
0
    def query(cls, project=None, names=None, metadata=None, origin=None,
              tags=None, offset=None, limit=None, dataset=None, api=None,
              parent=None):
        """
        Query ( List ) files, requires project or dataset
        :param project: Project id
        :param names: Name list
        :param metadata: Metadata query dict
        :param origin: Origin query dict
        :param tags: List of tags to filter on
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param dataset: Dataset id
        :param api: Api instance.
        :param parent: Folder id or File object with type folder
        :return: Collection object.
        """
        api = api or cls._API

        query_params = {}

        if project:
            project = Transform.to_project(project)
            query_params['project'] = project

        if dataset:
            dataset = Transform.to_dataset(dataset)
            query_params['dataset'] = dataset

        if parent:
            query_params['parent'] = Transform.to_file(parent)

        if not (project or dataset or parent):
            raise SbgError('Project, dataset or parent must be provided.')

        if [project, parent, dataset].count(None) < 2:
            raise SbgError(
                'Only one out of project, parent or dataset must be provided.'
            )

        if names is not None and isinstance(names, list):
            if len(names) == 0:
                names.append("")
            query_params['name'] = names

        metadata_params = {}
        if metadata and isinstance(metadata, dict):
            for k, v in metadata.items():
                metadata_params['metadata.' + k] = metadata[k]

        if tags:
            query_params['tag'] = tags

        query_params.update(metadata_params)

        origin_params = {}
        if origin and isinstance(origin, dict):
            for k, v in origin.items():
                origin_params['origin.' + k] = origin[k]

        query_params.update(origin_params)

        return super(File, cls)._query(
            api=api, url=cls._URL['query'], offset=offset,
            limit=limit, fields='_all', **query_params
        )
Ejemplo n.º 26
0
def test_transform_file(file):
    Transform.to_file(file)
Ejemplo n.º 27
0
def test_transform_file_invalid_values(file):
    with pytest.raises(SbgError):
        Transform.to_file(file)
Ejemplo n.º 28
0
    def query(cls,
              project=None,
              names=None,
              metadata=None,
              origin=None,
              tags=None,
              offset=None,
              limit=None,
              dataset=None,
              api=None,
              parent=None):
        """
        Query ( List ) files, requires project or dataset
        :param project: Project id
        :param names: Name list
        :param metadata: Metadata query dict
        :param origin: Origin query dict
        :param tags: List of tags to filter on
        :param offset: Pagination offset
        :param limit: Pagination limit
        :param dataset: Dataset id
        :param api: Api instance.
        :param parent: Folder id or File object with type folder
        :return: Collection object.
        """
        api = api or cls._API

        query_params = {}

        if project:
            project = Transform.to_project(project)
            query_params['project'] = project

        if dataset:
            dataset = Transform.to_dataset(dataset)
            query_params['dataset'] = dataset

        if parent:
            query_params['parent'] = Transform.to_file(parent)

        if not (project or dataset or parent):
            raise SbgError('Project, dataset or parent must be provided!')

        if names is not None and isinstance(names, list):
            if len(names) == 0:
                names.append("")
            query_params['name'] = names

        metadata_params = {}
        if metadata and isinstance(metadata, dict):
            for k, v in metadata.items():
                metadata_params['metadata.' + k] = metadata[k]

        if tags:
            query_params['tag'] = tags

        query_params.update(metadata_params)

        origin_params = {}
        if origin and isinstance(origin, dict):
            for k, v in origin.items():
                origin_params['origin.' + k] = origin[k]

        query_params.update(origin_params)

        return super(File, cls)._query(api=api,
                                       url=cls._URL['query'],
                                       offset=offset,
                                       limit=limit,
                                       fields='_all',
                                       **query_params)
Ejemplo n.º 29
0
    def submit_import(cls,
                      volume,
                      location,
                      project=None,
                      name=None,
                      overwrite=False,
                      properties=None,
                      parent=None,
                      preserve_folder_structure=True,
                      api=None):
        """
        Submits new import job.
        :param volume: Volume identifier.
        :param location: Volume location.
        :param project: Project identifier.
        :param name: Optional file name.
        :param overwrite: If true it will overwrite file if exists.
        :param properties: Properties dictionary.
        :param parent: The ID of the target folder to which the item should be
            imported. Should not be used together with project.
        :param preserve_folder_structure: Whether to keep the exact source
            folder structure. The default value is true if the item being
            imported is a folder. Should not be used if you are importing
            a file.
        :param api: Api instance.
        :return: Import object.
        """
        data = {}
        volume = Transform.to_volume(volume)

        if project and parent:
            raise SbgError(
                'Project and parent identifiers are mutually exclusive')
        elif project:
            project = Transform.to_project(project)
            destination = {'project': project}
        elif parent:
            parent = Transform.to_file(parent)
            destination = {'parent': parent}
        else:
            raise SbgError('Project or parent identifier is required.')

        source = {'volume': volume, 'location': location}

        if name:
            destination['name'] = name

        data['source'] = source
        data['destination'] = destination
        data['overwrite'] = overwrite

        if not preserve_folder_structure:
            data['preserve_folder_structure'] = preserve_folder_structure

        if properties:
            data['properties'] = properties

        api = api if api else cls._API
        extra = {'resource': cls.__name__, 'query': data}
        logger.info('Submitting import', extra=extra)
        _import = api.post(cls._URL['query'], data=data).json()
        return Import(api=api, **_import)