def __create_dataset_collection(self, trans, history, payload, **kwd): source = kwd.get("source", "new_collection") service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params(payload) dataset_collection_instance = service.create(trans, parent=history, **create_params) elif source == "hdca": content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException( "'content' id of target to copy is missing") dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content, ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) return self.__collection_dict(trans, dataset_collection_instance, view="element")
def create(self, trans, payload, **kwd): """ * POST /api/dataset_collections: create a new dataset collection instance. :type payload: dict :param payload: (optional) dictionary structure containing: * collection_type: dataset colltion type to create. * instance_type: Instance type - 'history' or 'library'. * name: the new dataset collections's name * datasets: object describing datasets for collection :rtype: dict :returns: element view of new dataset collection """ # TODO: Error handling... create_params = api_payload_to_create_params(payload) instance_type = payload.pop("instance_type", "history") if instance_type == "history": history_id = payload.get('history_id') history_id = self.decode_id(history_id) history = self.history_manager.get_owned(history_id, trans.user, current_history=trans.history) create_params["parent"] = history elif instance_type == "library": folder_id = payload.get('folder_id') library_folder = self.get_library_folder(trans, folder_id, check_accessible=True) self.check_user_can_add_to_library_item(trans, library_folder, check_accessible=False) create_params["parent"] = library_folder else: trans.status = 501 return dataset_collection_instance = self.__service(trans).create(trans=trans, **create_params) return dictify_dataset_collection_instance(dataset_collection_instance, security=trans.security, parent=create_params["parent"])
def __create_dataset_collection(self, trans, history, payload, **kwd): source = kwd.get("source", payload.get("source", "new_collection")) service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params(payload) dataset_collection_instance = service.create(trans, parent=history, **create_params) elif source == "hdca": content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException( "'content' id of target to copy is missing") dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content, ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) # if the consumer specified keys or view, use the secondary serializer if 'view' in kwd or 'keys' in kwd: return self.hdca_serializer.serialize_to_view( dataset_collection_instance, user=trans.user, trans=trans, **self._parse_serialization_params(kwd, 'detailed')) return self.__collection_dict(trans, dataset_collection_instance, view="element")
def create(self, trans, payload, **kwd): """ * POST /api/dataset_collections: create a new dataset collection instance. :type payload: dict :param payload: (optional) dictionary structure containing: * collection_type: dataset colltion type to create. * instance_type: Instance type - 'history' or 'library'. * name: the new dataset collections's name * datasets: object describing datasets for collection :rtype: dict :returns: element view of new dataset collection """ # TODO: Error handling... create_params = api_payload_to_create_params(payload) instance_type = payload.pop("instance_type", "history") if instance_type == "history": history_id = payload.get('history_id') history_id = decode_id(self.app, history_id) history = self.history_manager.get_owned(history_id, trans.user, current_history=trans.history) create_params["parent"] = history elif instance_type == "library": folder_id = payload.get('folder_id') library_folder = self.get_library_folder(trans, folder_id, check_accessible=True) self.check_user_can_add_to_library_item(trans, library_folder, check_accessible=False) create_params["parent"] = library_folder else: trans.status = 501 return dataset_collection_instance = self.__service(trans).create(trans=trans, **create_params) return dictify_dataset_collection_instance(dataset_collection_instance, security=trans.security, parent=create_params["parent"])
def __create_dataset_collection( self, trans, history, payload, **kwd ): source = kwd.get( "source", payload.get( "source", "new_collection" ) ) service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params( payload ) dataset_collection_instance = service.create( trans, parent=history, **create_params ) elif source == "hdca": content = payload.get( 'content', None ) if content is None: raise exceptions.RequestParameterMissingException( "'content' id of target to copy is missing" ) dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content, ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) # if the consumer specified keys or view, use the secondary serializer if 'view' in kwd or 'keys' in kwd: return self.hdca_serializer.serialize_to_view( dataset_collection_instance, user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) ) return self.__collection_dict( trans, dataset_collection_instance, view="element" )
def create(self, trans: ProvidesHistoryContext, payload: CreateNewCollectionPayload) -> HDCADetailed: """ Create a new dataset collection instance. :type payload: dict :param payload: (optional) dictionary structure containing: * collection_type: dataset collection type to create. * instance_type: Instance type - 'history' or 'library'. * name: the new dataset collections's name * datasets: object describing datasets for collection :rtype: dict :returns: element view of new dataset collection """ # TODO: Error handling... create_params = api_payload_to_create_params(payload.dict(exclude_unset=True)) if payload.instance_type == DatasetCollectionInstanceType.history: if payload.history_id is None: raise exceptions.RequestParameterInvalidException("Parameter history_id is required.") history_id = self.decode_id(payload.history_id) history = self.history_manager.get_owned(history_id, trans.user, current_history=trans.history) create_params["parent"] = history create_params["history"] = history elif payload.instance_type == DatasetCollectionInstanceType.library: library_folder = self.get_library_folder(trans, payload.folder_id, check_accessible=True) self.check_user_can_add_to_library_item(trans, library_folder, check_accessible=False) create_params["parent"] = library_folder else: raise exceptions.RequestParameterInvalidException() dataset_collection_instance = self.collection_manager.create(trans=trans, **create_params) rval = dictify_dataset_collection_instance( dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=create_params["parent"] ) return rval
def __create_dataset_collection(self, trans, history, payload, **kwd): source = kwd.get("source", "new_collection") service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params(payload) dataset_collection_instance = service.create(trans, parent=history, **create_params) elif source == "hdca": content = payload.get("content", None) if content is None: raise exceptions.RequestParameterMissingException("'content' id of target to copy is missing") dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) return self.__collection_dict(trans, dataset_collection_instance, view="element")
def create(self, trans, library_id, payload, **kwd): """ create( self, trans, library_id, payload, **kwd ) * POST /api/libraries/{library_id}/contents: create a new library file or folder To copy an HDA into a library send ``create_type`` of 'file' and the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``). To copy an HDCA into a library send ``create_type`` of 'file' and the HDCA's encoded id in ``from_hdca_id`` (and optionally ``ldda_message``). :type library_id: str :param library_id: the encoded id of the library where to create the new item :type payload: dict :param payload: dictionary structure containing: * folder_id: the encoded id of the parent folder of the new item * create_type: the type of item to create ('file', 'folder' or 'collection') * from_hda_id: (optional, only if create_type is 'file') the encoded id of an accessible HDA to copy into the library * ldda_message: (optional) the new message attribute of the LDDA created * extended_metadata: (optional) sub-dictionary containing any extended metadata to associate with the item * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths' * server_dir: (optional, only if upload_option is 'upload_directory') relative path of the subdirectory of Galaxy ``library_import_dir`` to upload. All and only the files (i.e. no subdirectories) contained in the specified directory will be uploaded. * filesystem_paths: (optional, only if upload_option is 'upload_paths' and the user is an admin) file paths on the Galaxy server to upload to the library, one file per line * link_data_only: (optional, only when upload_option is 'upload_directory' or 'upload_paths') either 'copy_files' (default) or 'link_to_files'. Setting to 'link_to_files' symlinks instead of copying the files * name: (optional, only if create_type is 'folder') name of the folder to create * description: (optional, only if create_type is 'folder') description of the folder to create * tag_using_filename: (optional) create tags on datasets using the file's original name :returns: a dictionary describing the new item unless ``from_hdca_id`` is supplied, in that case a list of such dictionaries is returned. :rtype: object """ if 'create_type' not in payload: trans.response.status = 400 return "Missing required 'create_type' parameter." else: create_type = payload.pop('create_type') if create_type not in ('file', 'folder', 'collection'): trans.response.status = 400 return "Invalid value for 'create_type' parameter ( %s ) specified." % create_type if 'folder_id' not in payload: trans.response.status = 400 return "Missing required 'folder_id' parameter." else: folder_id = payload.pop('folder_id') class_name, folder_id = self.__decode_library_content_id(folder_id) try: # security is checked in the downstream controller parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) except Exception as e: return str(e) # The rest of the security happens in the library_common controller. real_folder_id = trans.security.encode_id(parent.id) # are we copying an HDA to the library folder? # we'll need the id and any message to attach, then branch to that private function from_hda_id, from_hdca_id, ldda_message = (payload.pop( 'from_hda_id', None), payload.pop('from_hdca_id', None), payload.pop('ldda_message', '')) if create_type == 'file': if from_hda_id: return self._copy_hda_to_library_folder( trans, self.hda_manager, self.decode_id(from_hda_id), real_folder_id, ldda_message) if from_hdca_id: return self._copy_hdca_to_library_folder( trans, self.hda_manager, self.decode_id(from_hdca_id), real_folder_id, ldda_message) # check for extended metadata, store it and pop it out of the param # otherwise sanitize_param will have a fit ex_meta_payload = payload.pop('extended_metadata', None) # Now create the desired content object, either file or folder. if create_type == 'file': status, output = trans.webapp.controllers[ 'library_common'].upload_library_dataset( trans, 'api', library_id, real_folder_id, **payload) elif create_type == 'folder': status, output = trans.webapp.controllers[ 'library_common'].create_folder(trans, 'api', real_folder_id, library_id, **payload) elif create_type == 'collection': # Not delegating to library_common, so need to check access to parent # folder here. self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) create_params = api_payload_to_create_params(payload) create_params['parent'] = parent service = trans.app.dataset_collections_service dataset_collection_instance = service.create(**create_params) return [ dictify_dataset_collection_instance( dataset_collection_instance, security=trans.security, parent=parent) ] if status != 200: trans.response.status = status return output else: rval = [] for v in output.values(): if ex_meta_payload is not None: # If there is extended metadata, store it, attach it to the dataset, and index it ex_meta = ExtendedMetadata(ex_meta_payload) trans.sa_session.add(ex_meta) v.extended_metadata = ex_meta trans.sa_session.add(v) trans.sa_session.flush() for path, value in self._scan_json_block(ex_meta_payload): meta_i = ExtendedMetadataIndex(ex_meta, path, value) trans.sa_session.add(meta_i) trans.sa_session.flush() if type(v) == trans.app.model.LibraryDatasetDatasetAssociation: v = v.library_dataset encoded_id = trans.security.encode_id(v.id) if create_type == 'folder': encoded_id = 'F' + encoded_id rval.append( dict(id=encoded_id, name=v.name, url=url_for('library_content', library_id=library_id, id=encoded_id))) return rval
def create(self, trans, library_id, payload, **kwd): """ create( self, trans, library_id, payload, **kwd ) * POST /api/libraries/{library_id}/contents: create a new library file or folder To copy an HDA into a library send ``create_type`` of 'file' and the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``). To copy an HDCA into a library send ``create_type`` of 'file' and the HDCA's encoded id in ``from_hdca_id`` (and optionally ``ldda_message``). :type library_id: str :param library_id: the encoded id of the library where to create the new item :type payload: dict :param payload: dictionary structure containing: * folder_id: the encoded id of the parent folder of the new item * create_type: the type of item to create ('file', 'folder' or 'collection') * from_hda_id: (optional, only if create_type is 'file') the encoded id of an accessible HDA to copy into the library * ldda_message: (optional) the new message attribute of the LDDA created * extended_metadata: (optional) sub-dictionary containing any extended metadata to associate with the item * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths' * server_dir: (optional, only if upload_option is 'upload_directory') relative path of the subdirectory of Galaxy ``library_import_dir`` to upload. All and only the files (i.e. no subdirectories) contained in the specified directory will be uploaded. * filesystem_paths: (optional, only if upload_option is 'upload_paths' and the user is an admin) file paths on the Galaxy server to upload to the library, one file per line * link_data_only: (optional, only when upload_option is 'upload_directory' or 'upload_paths') either 'copy_files' (default) or 'link_to_files'. Setting to 'link_to_files' symlinks instead of copying the files * name: (optional, only if create_type is 'folder') name of the folder to create * description: (optional, only if create_type is 'folder') description of the folder to create * tag_using_filename: (optional) create tags on datasets using the file's original name :returns: a dictionary describing the new item unless ``from_hdca_id`` is supplied, in that case a list of such dictionaries is returned. :rtype: object """ if 'create_type' not in payload: trans.response.status = 400 return "Missing required 'create_type' parameter." else: create_type = payload.pop('create_type') if create_type not in ('file', 'folder', 'collection'): trans.response.status = 400 return "Invalid value for 'create_type' parameter ( %s ) specified." % create_type if 'folder_id' not in payload: trans.response.status = 400 return "Missing required 'folder_id' parameter." else: folder_id = payload.pop('folder_id') class_name, folder_id = self.__decode_library_content_id(folder_id) try: # security is checked in the downstream controller parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) except Exception as e: return str(e) # The rest of the security happens in the library_common controller. real_folder_id = trans.security.encode_id(parent.id) # are we copying an HDA to the library folder? # we'll need the id and any message to attach, then branch to that private function from_hda_id, from_hdca_id, ldda_message = (payload.pop('from_hda_id', None), payload.pop('from_hdca_id', None), payload.pop('ldda_message', '')) if create_type == 'file': if from_hda_id: return self._copy_hda_to_library_folder(trans, self.hda_manager, self.decode_id(from_hda_id), real_folder_id, ldda_message) if from_hdca_id: return self._copy_hdca_to_library_folder(trans, self.hda_manager, self.decode_id(from_hdca_id), real_folder_id, ldda_message) # check for extended metadata, store it and pop it out of the param # otherwise sanitize_param will have a fit ex_meta_payload = payload.pop('extended_metadata', None) # Now create the desired content object, either file or folder. if create_type == 'file': status, output = trans.webapp.controllers['library_common'].upload_library_dataset(trans, 'api', library_id, real_folder_id, **payload) elif create_type == 'folder': status, output = trans.webapp.controllers['library_common'].create_folder(trans, 'api', real_folder_id, library_id, **payload) elif create_type == 'collection': # Not delegating to library_common, so need to check access to parent # folder here. self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) create_params = api_payload_to_create_params(payload) create_params['parent'] = parent service = trans.app.dataset_collections_service dataset_collection_instance = service.create(**create_params) return [dictify_dataset_collection_instance(dataset_collection_instance, security=trans.security, parent=parent)] if status != 200: trans.response.status = status return output else: rval = [] for v in output.values(): if ex_meta_payload is not None: # If there is extended metadata, store it, attach it to the dataset, and index it ex_meta = ExtendedMetadata(ex_meta_payload) trans.sa_session.add(ex_meta) v.extended_metadata = ex_meta trans.sa_session.add(v) trans.sa_session.flush() for path, value in self._scan_json_block(ex_meta_payload): meta_i = ExtendedMetadataIndex(ex_meta, path, value) trans.sa_session.add(meta_i) trans.sa_session.flush() if type(v) == trans.app.model.LibraryDatasetDatasetAssociation: v = v.library_dataset encoded_id = trans.security.encode_id(v.id) if create_type == 'folder': encoded_id = 'F' + encoded_id rval.append(dict(id=encoded_id, name=v.name, url=url_for('library_content', library_id=library_id, id=encoded_id))) return rval
def __create_dataset_collection(self, trans, history, payload, **kwd): """Create hdca in a history from the list of element identifiers :param history: history the new hdca should be added to :type history: History :param source: whether to create a new collection or copy existing one :type source: str :param payload: dictionary structure containing: :param collection_type: type (and depth) of the new collection :type name: str :param element_identifiers: list of elements that should be in the new collection :param element: one member of the collection :param name: name of the element :type name: str :param src: source of the element (hda/ldda) :type src: str :param id: identifier :type id: str :type element: dict :type name: list :param name: name of the collection :type name: str :param hide_source_items: whether to mark the original hdas as hidden :type name: bool :type payload: dict .. note:: Elements may be nested depending on the collection_type :returns: dataset collection information :rtype: dict :raises: RequestParameterInvalidException, RequestParameterMissingException """ source = kwd.get("source", payload.get("source", "new_collection")) def convert_lddas(element_identifiers): for ei in element_identifiers: src = ei.get("src") if src == "ldda": # Convert lddas to hdas since there is no direct representation of library items in history. hda = self.__create_hda_from_ldda(trans, ei['id'], history) ei["id"] = trans.security.encode_id(hda.id) ei["src"] = "hda" elif src == "new_collection" and "element_identifiers" in ei: convert_lddas(ei["element_identifiers"]) service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params(payload) convert_lddas(payload.get("element_identifiers", [])) dataset_collection_instance = service.create( trans, parent=history, **create_params ) elif source == "hdca": content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException("'content' id of target to copy is missing") copy_elements = payload.get('copy_elements', False) dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content, copy_elements=copy_elements, ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) # if the consumer specified keys or view, use the secondary serializer if 'view' in kwd or 'keys' in kwd: return self.hdca_serializer.serialize_to_view(dataset_collection_instance, user=trans.user, trans=trans, **self._parse_serialization_params(kwd, 'detailed')) return self.__collection_dict(trans, dataset_collection_instance, view="element")
# Now create the desired content object, either file or folder. if create_type == 'file': status, output = trans.webapp.controllers[ 'library_common'].upload_library_dataset( trans, 'api', library_id, real_folder_id, **payload) elif create_type == 'folder': status, output = trans.webapp.controllers[ 'library_common'].create_folder(trans, 'api', real_folder_id, library_id, **payload) elif create_type == 'collection': # Not delegating to library_common, so need to check access to parent # folder here. self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) create_params = api_payload_to_create_params(payload) create_params['parent'] = parent service = trans.app.dataset_collections_service dataset_collection_instance = service.create(**create_params) return [ dictify_dataset_collection_instance( dataset_collection_instance, security=trans.security, parent=parent) ] if status != 200: trans.response.status = status return output else: rval = [] for k, v in output.items():
def __create_dataset_collection(self, trans, history, payload, **kwd): """Create hdca in a history from the list of element identifiers :param history: history the new hdca should be added to :type history: History :param source: whether to create a new collection or copy existing one :type source: str :param payload: dictionary structure containing: :param collection_type: type (and depth) of the new collection :type name: str :param element_identifiers: list of elements that should be in the new collection :param element: one member of the collection :param name: name of the element :type name: str :param src: source of the element (hda/ldda) :type src: str :param id: identifier :type id: str :param id: tags :type id: list :type element: dict :type name: list :param name: name of the collection :type name: str :param hide_source_items: whether to mark the original hdas as hidden :type name: bool :param copy_elements: whether to copy HDAs when creating collection :type name: bool :type payload: dict .. note:: Elements may be nested depending on the collection_type :returns: dataset collection information :rtype: dict :raises: RequestParameterInvalidException, RequestParameterMissingException """ source = kwd.get("source", payload.get("source", "new_collection")) service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params(payload) dataset_collection_instance = service.create( trans, parent=history, **create_params ) elif source == "hdca": content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException("'content' id of target to copy is missing") copy_elements = payload.get('copy_elements', False) dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content, copy_elements=copy_elements, ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) # if the consumer specified keys or view, use the secondary serializer if 'view' in kwd or 'keys' in kwd: return self.hdca_serializer.serialize_to_view(dataset_collection_instance, user=trans.user, trans=trans, **self._parse_serialization_params(kwd, 'detailed')) return self.__collection_dict(trans, dataset_collection_instance, view="element")
return self._copy_hda_to_library_folder( trans, from_hda_id, library_id, real_folder_id, ldda_message ) # check for extended metadata, store it and pop it out of the param # otherwise sanitize_param will have a fit ex_meta_payload = payload.pop('extended_metadata', None) # Now create the desired content object, either file or folder. if create_type == 'file': status, output = trans.webapp.controllers['library_common'].upload_library_dataset( trans, 'api', library_id, real_folder_id, **payload ) elif create_type == 'folder': status, output = trans.webapp.controllers['library_common'].create_folder( trans, 'api', real_folder_id, library_id, **payload ) elif create_type == 'collection': # Not delegating to library_common, so need to check access to parent # folder here. self.check_user_can_add_to_library_item( trans, parent, check_accessible=True ) create_params = api_payload_to_create_params( payload ) create_params[ 'parent' ] = parent service = trans.app.dataset_collections_service dataset_collection_instance = service.create( **create_params ) return [ dictify_dataset_collection_instance( dataset_collection_instance, security=trans.security, parent=parent ) ] if status != 200: trans.response.status = status return output else: rval = [] for v in output.values(): if ex_meta_payload is not None: # If there is extended metadata, store it, attach it to the dataset, and index it ex_meta = ExtendedMetadata(ex_meta_payload) trans.sa_session.add( ex_meta ) v.extended_metadata = ex_meta