def _upload_dataset(self, trans, library_id, folder_id, replace_dataset=None, **kwd): # Set up the traditional tool state/params cntrller = 'api' tool_id = 'upload1' message = None file_type = kwd.get('file_type') try: upload_common.validate_datatype_extension(datatypes_registry=trans.app.datatypes_registry, ext=file_type) except RequestParameterInvalidException as e: return (400, util.unicodify(e)) tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) populate_state(trans, tool.inputs, kwd, state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input_name, input in tool.inputs.items(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) # Library-specific params server_dir = kwd.get('server_dir', '') upload_option = kwd.get('upload_option', 'upload_file') response_code = 200 if upload_option == 'upload_directory': full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) message = 'Select a directory' elif upload_option == 'upload_paths': # Library API already checked this - following check isn't actually needed. validate_path_upload(trans) # Some error handling should be added to this method. try: # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd # should be passed so that complex objects that may have been included in the initial request remain. library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset) except Exception: response_code = 500 message = "Unable to parse upload parameters, please report this error." # Proceed with (mostly) regular upload processing if we're still errorless if response_code == 200: if upload_option == 'upload_file': tool_params = upload_common.persist_uploads(tool_params, trans) uploaded_datasets = upload_common.get_uploaded_datasets(trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch) elif upload_option == 'upload_directory': uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets(trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message) elif upload_option == 'upload_paths': uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets(trans, kwd, library_bunch, response_code, message) if upload_option == 'upload_file' and not uploaded_datasets: response_code = 400 message = 'Select a file, enter a URL or enter text' if response_code != 200: return (response_code, message) json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) data_list = [ud.data for ud in uploaded_datasets] job_params = {} job_params['link_data_only'] = json.dumps(kwd.get('link_data_only', 'copy_files')) job_params['uuid'] = json.dumps(kwd.get('uuid', None)) job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params) trans.sa_session.add(job) trans.sa_session.flush() return output
def _upload_dataset(self, trans, library_id, folder_id, replace_dataset=None, **kwd): # Set up the traditional tool state/params cntrller = 'api' tool_id = 'upload1' message = None tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) populate_state(trans, tool.inputs, kwd, state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input_name, input in tool.inputs.items(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) # Library-specific params server_dir = kwd.get('server_dir', '') upload_option = kwd.get('upload_option', 'upload_file') response_code = 200 if upload_option == 'upload_directory': full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) message = 'Select a directory' elif upload_option == 'upload_paths': # Library API already checked this - following check isn't actually needed. validate_path_upload(trans) # Some error handling should be added to this method. try: # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd # should be passed so that complex objects that may have been included in the initial request remain. library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset) except Exception: response_code = 500 message = "Unable to parse upload parameters, please report this error." # Proceed with (mostly) regular upload processing if we're still errorless if response_code == 200: precreated_datasets = upload_common.get_precreated_datasets(trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=cntrller) if upload_option == 'upload_file': tool_params = upload_common.persist_uploads(tool_params, trans) uploaded_datasets = upload_common.get_uploaded_datasets(trans, cntrller, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch) elif upload_option == 'upload_directory': uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets(trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message) elif upload_option == 'upload_paths': uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets(trans, kwd, library_bunch, response_code, message) upload_common.cleanup_unused_precreated_datasets(precreated_datasets) if upload_option == 'upload_file' and not uploaded_datasets: response_code = 400 message = 'Select a file, enter a URL or enter text' if response_code != 200: return (response_code, message) json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) data_list = [ud.data for ud in uploaded_datasets] job_params = {} job_params['link_data_only'] = json.dumps(kwd.get('link_data_only', 'copy_files')) job_params['uuid'] = json.dumps(kwd.get('uuid', None)) job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params) trans.sa_session.add(job) trans.sa_session.flush() return output
def create_dataset( name ): ud = Bunch( name=name, file_type=None, dbkey=None ) if nonfile_params.get( 'folder_id', False ): replace_id = nonfile_params.get( 'replace_id', None ) if replace_id not in [ None, 'None' ]: replace_dataset = trans.sa_session.query( l.LibraryDataset ).get( int( replace_id ) ) else: replace_dataset = None library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset ) else: library_bunch = None return upload_common.new_upload( trans, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD )
def create_dataset( name ): ud = Bunch( name=name, file_type=None, dbkey=None ) if nonfile_params.get( 'folder_id', False ): replace_id = nonfile_params.get( 'replace_id', None ) if replace_id not in [ None, 'None' ]: replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( replace_id ) ) else: replace_dataset = None # FIXME: instead of passing params here ( chiech have been process by util.Params(), the original kwd # should be passed so that complex objects that may have been included in the initial request remain. library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset ) else: library_bunch = None return upload_common.new_upload( trans, cntrller, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD )
def create_dataset(name): ud = Bunch(name=name, file_type=None, dbkey=None) if nonfile_params.get('folder_id', False): replace_id = nonfile_params.get('replace_id', None) if replace_id not in [None, 'None']: replace_dataset = trans.sa_session.query( l.LibraryDataset).get(int(replace_id)) else: replace_dataset = None library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset) else: library_bunch = None return upload_common.new_upload( trans, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD)
def load( self, trans, **kwd ): """ load( self, trans, **kwd ): * POST /api/libraries/datasets Load dataset from the given source into the library. Source can be: user directory - root folder specified in galaxy.ini as "$user_library_import_dir" example path: path/to/galaxy/$user_library_import_dir/[email protected]/{user can browse everything here} the folder with the user login has to be created beforehand (admin)import directory - root folder specified in galaxy ini as "$library_import_dir" example path: path/to/galaxy/$library_import_dir/{admin can browse everything here} (admin)any absolute or relative path - option allowed with "allow_library_path_paste" in galaxy.ini :param encoded_folder_id: the encoded id of the folder to import dataset(s) to :type encoded_folder_id: an encoded id string :param source: source the datasets should be loaded form :type source: str :param link_data: flag whether to link the dataset to data or copy it to Galaxy, defaults to copy while linking is set to True all symlinks will be resolved _once_ :type link_data: bool :param preserve_dirs: flag whether to preserve the directory structure when importing dir if False only datasets will be imported :type preserve_dirs: bool :param file_type: file type of the loaded datasets, defaults to 'auto' (autodetect) :type file_type: str :param dbkey: dbkey of the loaded genome, defaults to '?' (unknown) :type dbkey: str :returns: dict containing information about the created upload job :rtype: dictionary """ kwd[ 'space_to_tab' ] = 'False' kwd[ 'to_posix_lines' ] = 'True' kwd[ 'dbkey' ] = kwd.get( 'dbkey', '?' ) kwd[ 'file_type' ] = kwd.get( 'file_type', 'auto' ) kwd[' link_data_only' ] = 'link_to_files' if util.string_as_bool( kwd.get( 'link_data', False ) ) else 'copy_files' encoded_folder_id = kwd.get( 'encoded_folder_id', None ) if encoded_folder_id is not None: folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id ) else: raise exceptions.RequestParameterMissingException( 'The required atribute encoded_folder_id is missing.' ) path = kwd.get( 'path', None) if path is None: raise exceptions.RequestParameterMissingException( 'The required atribute path is missing.' ) folder = self.folder_manager.get( trans, folder_id ) source = kwd.get( 'source', None ) if source not in [ 'userdir_file', 'userdir_folder', 'importdir_file', 'importdir_folder', 'admin_path' ]: raise exceptions.RequestParameterMissingException( 'You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". ') if source in [ 'importdir_file', 'importdir_folder' ]: if not trans.user_is_admin: raise exceptions.AdminRequiredException( 'Only admins can import from importdir.' ) if not trans.app.config.library_import_dir: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from importdir.' ) import_base_dir = trans.app.config.library_import_dir path = os.path.join( import_base_dir, path ) if source in [ 'userdir_file', 'userdir_folder' ]: user_login = trans.user.email user_base_dir = trans.app.config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow upload from user directories.' ) full_dir = os.path.join( user_base_dir, user_login ) if not path.lower().startswith( full_dir.lower() ): path = os.path.join( full_dir, path ) if not os.path.exists( path ): raise exceptions.RequestParameterInvalidException( 'Given path does not exist on the host.' ) if not self.folder_manager.can_add_item( trans, folder ): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to add items to the given folder.' ) if source == 'admin_path': if not trans.app.config.allow_library_path_paste: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from path.' ) if not trans.user_is_admin: raise exceptions.AdminRequiredException( 'Only admins can import from path.' ) # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.get_tool( tool_id ) state = tool.new_state( trans ) tool.update_state( trans, tool.inputs_by_page[ 0 ], state.inputs, kwd ) tool_params = state.inputs dataset_upload_inputs = [] for input in tool.inputs.itervalues(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) ) abspath_datasets = [] kwd[ 'filesystem_paths' ] = path if source in [ 'importdir_folder' ]: kwd[ 'filesystem_paths' ] = os.path.join( import_base_dir, path ) params = util.Params( kwd ) # user wants to import one file only if source in [ "userdir_file", "importdir_file" ]: file = os.path.abspath( path ) abspath_datasets.append( trans.webapp.controllers[ 'library_common' ].make_library_uploaded_dataset( trans, 'api', params, os.path.basename( file ), file, 'server_dir', library_bunch ) ) # user wants to import whole folder if source == "userdir_folder": uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common' ].get_path_paste_uploaded_datasets( trans, 'api', params, library_bunch, 200, '' ) uploaded_datasets = uploaded_datasets_bunch[ 0 ] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.' ) for ud in uploaded_datasets: ud.path = os.path.abspath( ud.path ) abspath_datasets.append( ud ) # user wants to import from path if source in [ "admin_path", "importdir_folder" ]: # validate the path is within root uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common' ].get_path_paste_uploaded_datasets( trans, 'api', params, library_bunch, 200, '' ) uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.' ) for ud in uploaded_datasets: ud.path = os.path.abspath( ud.path ) abspath_datasets.append( ud ) json_file_path = upload_common.create_paramfile( trans, abspath_datasets ) data_list = [ ud.data for ud in abspath_datasets ] job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=folder ) # HACK: Prevent outputs_to_working_directory from overwriting inputs when "linking" job.add_parameter( 'link_data_only', dumps( kwd.get( 'link_data_only', 'copy_files' ) ) ) job.add_parameter( 'uuid', dumps( kwd.get( 'uuid', None ) ) ) trans.sa_session.add( job ) trans.sa_session.flush() job_dict = job.to_dict() job_dict[ 'id' ] = trans.security.encode_id( job_dict[ 'id' ] ) return job_dict
def load(self, trans, **kwd): """ Load dataset from the given source into the library. :param encoded_folder_id: the encoded id of the folder to import dataset to :type encoded_folder_id: an encoded id string :param source: source of the dataset to be loaded :type source: str :param link_data: flag whether to link the dataset to data or copy it to Galaxy :type link_data: bool :param preserve_dirs: flag whether to preserver directory structure when importing dir :type preserve_dirs: bool """ kwd['space_to_tab'] = 'False' kwd['to_posix_lines'] = 'True' kwd['dbkey'] = kwd.get('dbkey', '?') kwd['file_type'] = kwd.get('file_type', 'auto') kwd[' link_data_only'] = 'link_to_files' if util.string_as_bool( kwd.get('link_data', False)) else 'copy_files' encoded_folder_id = kwd.get('encoded_folder_id', None) if encoded_folder_id is not None: folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id) else: raise exceptions.RequestParameterMissingException( 'The required atribute encoded_folder_id is missing.') path = kwd.get('path', None) if path is None: raise exceptions.RequestParameterMissingException( 'The required atribute path is missing.') folder = self.folder_manager.get(trans, folder_id) source = kwd.get('source', None) if source not in [ 'userdir_file', 'userdir_folder', 'importdir_file', 'importdir_folder', 'admin_path' ]: raise exceptions.RequestParameterMissingException( 'You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". ' ) if source in ['importdir_file', 'importdir_folder']: if not trans.user_is_admin: raise exceptions.AdminRequiredException( 'Only admins can import from importdir.') if not trans.app.config.library_import_dir: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from importdir.' ) user_base_dir = trans.app.config.library_import_dir if source in ['userdir_file', 'userdir_folder']: user_login = trans.user.email user_base_dir = trans.app.config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow upload from user directories.' ) full_dir = os.path.join(user_base_dir, user_login) # path_to_root_import_folder = None if not path.lower().startswith(full_dir.lower()): # path_to_root_import_folder = path path = os.path.join(full_dir, path) if not os.path.exists(path): raise exceptions.RequestParameterInvalidException( 'Given path does not exist on the host.') if not self.folder_manager.can_add_item(trans, folder): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to add items to the given folder.' ) if source == 'admin_path': if not trans.app.config.allow_library_path_paste: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from path.' ) if not trans.user_is_admin: raise exceptions.AdminRequiredException( 'Only admins can import from path.') # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) tool.update_state(trans, tool.inputs_by_page[0], state.inputs, kwd) tool_params = state.inputs dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id(folder.id)) abspath_datasets = [] kwd['filesystem_paths'] = path params = util.Params(kwd) # user wants to import one file only if source == "userdir_file": file = os.path.abspath(path) abspath_datasets.append(trans.webapp.controllers['library_common']. make_library_uploaded_dataset( trans, 'api', params, os.path.basename(file), file, 'server_dir', library_bunch)) # user wants to import whole folder if source == "userdir_folder": uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common'].get_path_paste_uploaded_datasets( trans, 'api', params, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) # user wants to import from path (admins only) if source == "admin_path": # validate the path is within root uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common'].get_path_paste_uploaded_datasets( trans, 'api', params, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) json_file_path = upload_common.create_paramfile( trans, abspath_datasets) data_list = [ud.data for ud in abspath_datasets] job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=folder) # HACK: Prevent outputs_to_working_directory from overwriting inputs when "linking" job.add_parameter('link_data_only', dumps(kwd.get('link_data_only', 'copy_files'))) job.add_parameter('uuid', dumps(kwd.get('uuid', None))) trans.sa_session.add(job) trans.sa_session.flush() job_dict = job.to_dict() job_dict['id'] = trans.security.encode_id(job_dict['id']) return job_dict
def load( self, trans, **kwd ): """ Load dataset from the given source into the library. :param encoded_folder_id: the encoded id of the folder to import dataset to :type encoded_folder_id: an encoded id string :param source: source of the dataset to be loaded :type source: str :param link_data: flag whether to link the dataset to data or copy it to Galaxy :type link_data: bool :param preserve_dirs: flag whether to preserver directory structure when importing dir :type preserve_dirs: bool """ kwd[ 'space_to_tab' ] = 'False' kwd[ 'to_posix_lines' ] = 'True' kwd[ 'dbkey' ] = kwd.get( 'dbkey', '?' ) kwd[ 'file_type' ] = kwd.get( 'file_type', 'auto' ) kwd[' link_data_only' ] = 'link_to_files' if util.string_as_bool( kwd.get( 'link_data', False ) ) else 'copy_files' encoded_folder_id = kwd.get( 'encoded_folder_id', None ) if encoded_folder_id is not None: folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id ) else: raise exceptions.RequestParameterMissingException( 'The required atribute encoded_folder_id is missing.' ) path = kwd.get( 'path', None) if path is None: raise exceptions.RequestParameterMissingException( 'The required atribute path is missing.' ) folder = self.folder_manager.get( trans, folder_id ) source = kwd.get( 'source', None ) if source not in [ 'userdir_file', 'userdir_folder', 'admin_path' ]: raise exceptions.RequestParameterMissingException( 'You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder" and "admin_path". ') if source in [ 'userdir_file', 'userdir_folder' ]: user_login = trans.user.email user_base_dir = trans.app.config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow upload from user directories.' ) full_dir = os.path.join( user_base_dir, user_login ) # path_to_root_import_folder = None if not path.lower().startswith( full_dir.lower() ): # path_to_root_import_folder = path path = os.path.join( full_dir, path ) if not os.path.exists( path ): raise exceptions.RequestParameterInvalidException( 'Given path does not exist on the host.' ) if not self.folder_manager.can_add_item( trans, folder ): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to add items to the given folder.' ) if source == 'admin_path': if not trans.app.config.allow_library_path_paste: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from path.' ) if not trans.user_is_admin: raise exceptions.AdminRequiredException( 'Only admins can import from path.' ) # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.get_tool( tool_id ) state = tool.new_state( trans ) tool.update_state( trans, tool.inputs_by_page[ 0 ], state.inputs, kwd ) tool_params = state.inputs dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) ) abspath_datasets = [] kwd[ 'filesystem_paths' ] = path params = util.Params( kwd ) # user wants to import one file only if source == "userdir_file": file = os.path.abspath( path ) abspath_datasets.append( trans.webapp.controllers[ 'library_common' ].make_library_uploaded_dataset( trans, 'api', params, os.path.basename( file ), file, 'server_dir', library_bunch ) ) # user wants to import whole folder if source == "userdir_folder": uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common' ].get_path_paste_uploaded_datasets( trans, 'api', params, library_bunch, 200, '' ) uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.' ) for ud in uploaded_datasets: ud.path = os.path.abspath( ud.path ) abspath_datasets.append( ud ) # user wants to import from path (admins only) if source == "admin_path": # validate the path is within root uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common' ].get_path_paste_uploaded_datasets( trans, 'api', params, library_bunch, 200, '' ) uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.' ) for ud in uploaded_datasets: ud.path = os.path.abspath( ud.path ) abspath_datasets.append( ud ) json_file_path = upload_common.create_paramfile( trans, abspath_datasets ) data_list = [ ud.data for ud in abspath_datasets ] job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=folder ) # HACK: Prevent outputs_to_working_directory from overwriting inputs when "linking" job.add_parameter( 'link_data_only', dumps( kwd.get( 'link_data_only', 'copy_files' ) ) ) job.add_parameter( 'uuid', dumps( kwd.get( 'uuid', None ) ) ) trans.sa_session.add( job ) trans.sa_session.flush() job_dict = job.to_dict() job_dict[ 'id' ] = trans.security.encode_id( job_dict[ 'id' ] ) return job_dict
def load(self, trans, payload=None, **kwd): """ Load dataset(s) from the given source into the library. * POST /api/libraries/datasets :param payload: dictionary structure containing: :param encoded_folder_id: the encoded id of the folder to import dataset(s) to :type encoded_folder_id: an encoded id string :param source: source the datasets should be loaded from Source can be: user directory - root folder specified in galaxy.ini as "$user_library_import_dir" example path: path/to/galaxy/$user_library_import_dir/[email protected]/{user can browse everything here} the folder with the user login has to be created beforehand (admin)import directory - root folder specified in galaxy ini as "$library_import_dir" example path: path/to/galaxy/$library_import_dir/{admin can browse everything here} (admin)any absolute or relative path - option allowed with "allow_library_path_paste" in galaxy.ini :type source: str :param link_data: flag whether to link the dataset to data or copy it to Galaxy, defaults to copy while linking is set to True all symlinks will be resolved _once_ :type link_data: bool :param preserve_dirs: flag whether to preserve the directory structure when importing dir if False only datasets will be imported :type preserve_dirs: bool :param file_type: file type of the loaded datasets, defaults to 'auto' (autodetect) :type file_type: str :param dbkey: dbkey of the loaded genome, defaults to '?' (unknown) :type dbkey: str :param tag_using_filenames: flag whether to generate dataset tags from filenames :type tag_using_filenames: bool :type dictionary :returns: dict containing information about the created upload job :rtype: dictionary :raises: RequestParameterMissingException, AdminRequiredException, ConfigDoesNotAllowException, RequestParameterInvalidException InsufficientPermissionsException, ObjectNotFound """ if payload: kwd.update(payload) kwd['space_to_tab'] = False kwd['to_posix_lines'] = True kwd['dbkey'] = kwd.get('dbkey', '?') kwd['file_type'] = kwd.get('file_type', 'auto') kwd['link_data_only'] = 'link_to_files' if util.string_as_bool(kwd.get('link_data', False)) else 'copy_files' kwd['tag_using_filenames'] = util.string_as_bool(kwd.get('tag_using_filenames', None)) encoded_folder_id = kwd.get('encoded_folder_id', None) if encoded_folder_id is not None: folder_id = self.folder_manager.cut_and_decode(trans, encoded_folder_id) else: raise exceptions.RequestParameterMissingException('The required attribute encoded_folder_id is missing.') path = kwd.get('path', None) if path is None: raise exceptions.RequestParameterMissingException('The required attribute path is missing.') folder = self.folder_manager.get(trans, folder_id) source = kwd.get('source', None) if source not in ['userdir_file', 'userdir_folder', 'importdir_file', 'importdir_folder', 'admin_path']: raise exceptions.RequestParameterMissingException('You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". ') elif source in ['importdir_file', 'importdir_folder']: if not trans.user_is_admin(): raise exceptions.AdminRequiredException('Only admins can import from importdir.') if not trans.app.config.library_import_dir: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow admins to import into library from importdir.') import_base_dir = trans.app.config.library_import_dir if not safe_relpath(path): # admins shouldn't be able to explicitly specify a path outside server_dir, but symlinks are allowed. # the reasoning here is that galaxy admins may not have direct filesystem access or can only access # library_import_dir via FTP (which cannot create symlinks), and may rely on sysadmins to set up the # import directory. if they have filesystem access, all bets are off. raise exceptions.RequestParameterInvalidException('The given path is invalid.') path = os.path.join(import_base_dir, path) elif source in ['userdir_file', 'userdir_folder']: unsafe = None user_login = trans.user.email user_base_dir = trans.app.config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow upload from user directories.') full_dir = os.path.join(user_base_dir, user_login) if not safe_contains(full_dir, path, whitelist=trans.app.config.user_library_import_symlink_whitelist): # the path is a symlink outside the user dir path = os.path.join(full_dir, path) log.error('User attempted to import a path that resolves to a path outside of their import dir: %s -> %s', path, os.path.realpath(path)) raise exceptions.RequestParameterInvalidException('The given path is invalid.') path = os.path.join(full_dir, path) for unsafe in unsafe_walk(path, whitelist=[full_dir] + trans.app.config.user_library_import_symlink_whitelist): # the path is a dir and contains files that symlink outside the user dir log.error('User attempted to import a directory containing a path that resolves to a path outside of their import dir: %s -> %s', unsafe, os.path.realpath(unsafe)) if unsafe: raise exceptions.RequestParameterInvalidException('The given path is invalid.') if not os.path.exists(path): raise exceptions.RequestParameterInvalidException('Given path does not exist on the host.') if not self.folder_manager.can_add_item(trans, folder): raise exceptions.InsufficientPermissionsException('You do not have proper permission to add items to the given folder.') elif source == 'admin_path': if not trans.app.config.allow_library_path_paste: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow admins to import into library from path.') if not trans.user_is_admin(): raise exceptions.AdminRequiredException('Only admins can import from path.') # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) populate_state(trans, tool.inputs, kwd, state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input in tool.inputs.itervalues(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) library_bunch = upload_common.handle_library_params(trans, {}, trans.security.encode_id(folder.id)) abspath_datasets = [] kwd['filesystem_paths'] = path if source in ['importdir_folder']: kwd['filesystem_paths'] = os.path.join(import_base_dir, path) # user wants to import one file only elif source in ["userdir_file", "importdir_file"]: file = os.path.abspath(path) abspath_datasets.append(trans.webapp.controllers['library_common'].make_library_uploaded_dataset( trans, 'api', kwd, os.path.basename(file), file, 'server_dir', library_bunch)) # user wants to import whole folder elif source == "userdir_folder": uploaded_datasets_bunch = trans.webapp.controllers['library_common'].get_path_paste_uploaded_datasets( trans, 'api', kwd, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound('Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) # user wants to import from path if source in ["admin_path", "importdir_folder"]: # validate the path is within root uploaded_datasets_bunch = trans.webapp.controllers['library_common'].get_path_paste_uploaded_datasets( trans, 'api', kwd, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound('Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) json_file_path = upload_common.create_paramfile(trans, abspath_datasets) data_list = [ud.data for ud in abspath_datasets] job_params = {} job_params['link_data_only'] = dumps(kwd.get('link_data_only', 'copy_files')) job_params['uuid'] = dumps(kwd.get('uuid', None)) job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=folder, job_params=job_params) trans.sa_session.add(job) trans.sa_session.flush() job_dict = job.to_dict() job_dict['id'] = trans.security.encode_id(job_dict['id']) return job_dict
def load(self, trans, payload=None, **kwd): """ POST /api/libraries/datasets Load dataset(s) from the given source into the library. :param payload: dictionary structure containing: :param encoded_folder_id: the encoded id of the folder to import dataset(s) to :type encoded_folder_id: an encoded id string :param source: source the datasets should be loaded from. Source can be: - user directory root folder specified in galaxy.ini as "$user_library_import_dir" example path: path/to/galaxy/$user_library_import_dir/[email protected]/{user can browse everything here} the folder with the user login has to be created beforehand - (admin)import directory root folder specified in galaxy ini as "$library_import_dir" example path: path/to/galaxy/$library_import_dir/{admin can browse everything here} - (admin)any absolute or relative path option allowed with "allow_library_path_paste" in galaxy.ini :type source: str :param link_data: flag whether to link the dataset to data or copy it to Galaxy, defaults to copy while linking is set to True all symlinks will be resolved _once_ :type link_data: bool :param preserve_dirs: flag whether to preserve the directory structure when importing dir if False only datasets will be imported :type preserve_dirs: bool :param file_type: file type of the loaded datasets, defaults to 'auto' (autodetect) :type file_type: str :param dbkey: dbkey of the loaded genome, defaults to '?' (unknown) :type dbkey: str :param tag_using_filenames: flag whether to generate dataset tags from filenames :type tag_using_filenames: bool :type dictionary :returns: dict containing information about the created upload job :rtype: dictionary :raises: RequestParameterMissingException, AdminRequiredException, ConfigDoesNotAllowException, RequestParameterInvalidException InsufficientPermissionsException, ObjectNotFound """ if payload: kwd.update(payload) kwd['space_to_tab'] = False kwd['to_posix_lines'] = True kwd['dbkey'] = kwd.get('dbkey', '?') kwd['file_type'] = kwd.get('file_type', 'auto') kwd['link_data_only'] = 'link_to_files' if util.string_as_bool(kwd.get('link_data', False)) else 'copy_files' kwd['tag_using_filenames'] = util.string_as_bool(kwd.get('tag_using_filenames', None)) encoded_folder_id = kwd.get('encoded_folder_id', None) if encoded_folder_id is not None: folder_id = self.folder_manager.cut_and_decode(trans, encoded_folder_id) else: raise exceptions.RequestParameterMissingException('The required attribute encoded_folder_id is missing.') path = kwd.get('path', None) if path is None: raise exceptions.RequestParameterMissingException('The required attribute path is missing.') if not isinstance(path, str): raise exceptions.RequestParameterInvalidException('The required attribute path is not String.') folder = self.folder_manager.get(trans, folder_id) source = kwd.get('source', None) if source not in ['userdir_file', 'userdir_folder', 'importdir_file', 'importdir_folder', 'admin_path']: raise exceptions.RequestParameterMissingException('You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". ') elif source in ['importdir_file', 'importdir_folder']: if not trans.user_is_admin: raise exceptions.AdminRequiredException('Only admins can import from importdir.') if not trans.app.config.library_import_dir: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow admins to import into library from importdir.') import_base_dir = trans.app.config.library_import_dir if not safe_relpath(path): # admins shouldn't be able to explicitly specify a path outside server_dir, but symlinks are allowed. # the reasoning here is that galaxy admins may not have direct filesystem access or can only access # library_import_dir via FTP (which cannot create symlinks), and may rely on sysadmins to set up the # import directory. if they have filesystem access, all bets are off. raise exceptions.RequestParameterInvalidException('The given path is invalid.') path = os.path.join(import_base_dir, path) elif source in ['userdir_file', 'userdir_folder']: username = trans.user.username if trans.app.config.user_library_import_check_permissions else None user_login = trans.user.email user_base_dir = trans.app.config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow upload from user directories.') full_dir = os.path.join(user_base_dir, user_login) if not safe_contains(full_dir, path, allowlist=trans.app.config.user_library_import_symlink_allowlist): # the path is a symlink outside the user dir path = os.path.join(full_dir, path) log.error('User attempted to import a path that resolves to a path outside of their import dir: %s -> %s', path, os.path.realpath(path)) raise exceptions.RequestParameterInvalidException('The given path is invalid.') if trans.app.config.user_library_import_check_permissions and not full_path_permission_for_user(full_dir, path, username): log.error('User attempted to import a path that resolves to a path outside of their import dir: ' '%s -> %s and cannot be read by them.', path, os.path.realpath(path)) raise exceptions.RequestParameterInvalidException('The given path is invalid.') path = os.path.join(full_dir, path) if unsafe_walk(path, allowlist=[full_dir] + trans.app.config.user_library_import_symlink_allowlist, username=username): # the path is a dir and contains files that symlink outside the user dir error = 'User attempted to import a path that resolves to a path outside of their import dir: {} -> {}'.format( path, os.path.realpath(path) ) if trans.app.config.user_library_import_check_permissions: error += ' or is not readable for them.' log.error(error) raise exceptions.RequestParameterInvalidException('The given path is invalid.') if not os.path.exists(path): raise exceptions.RequestParameterInvalidException('Given path does not exist on the host.') if not self.folder_manager.can_add_item(trans, folder): raise exceptions.InsufficientPermissionsException('You do not have proper permission to add items to the given folder.') elif source == 'admin_path': if not trans.app.config.allow_library_path_paste: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow admins to import into library from path.') if not trans.user_is_admin: raise exceptions.AdminRequiredException('Only admins can import from path.') # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) populate_state(trans, tool.inputs, kwd, state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input in tool.inputs.values(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) library_bunch = upload_common.handle_library_params(trans, {}, trans.security.encode_id(folder.id)) abspath_datasets = [] kwd['filesystem_paths'] = path if source in ['importdir_folder']: kwd['filesystem_paths'] = os.path.join(import_base_dir, path) # user wants to import one file only elif source in ["userdir_file", "importdir_file"]: file = os.path.abspath(path) abspath_datasets.append(self._make_library_uploaded_dataset( trans, kwd, os.path.basename(file), file, 'server_dir', library_bunch)) # user wants to import whole folder elif source == "userdir_folder": uploaded_datasets_bunch = self._get_path_paste_uploaded_datasets( trans, kwd, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound('Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) # user wants to import from path if source in ["admin_path", "importdir_folder"]: # validate the path is within root uploaded_datasets_bunch = self._get_path_paste_uploaded_datasets( trans, kwd, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound('Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) json_file_path = upload_common.create_paramfile(trans, abspath_datasets) data_list = [ud.data for ud in abspath_datasets] job_params = {} job_params['link_data_only'] = dumps(kwd.get('link_data_only', 'copy_files')) job_params['uuid'] = dumps(kwd.get('uuid', None)) job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=folder, job_params=job_params) trans.app.job_manager.enqueue(job, tool=tool) job_dict = job.to_dict() job_dict['id'] = trans.security.encode_id(job_dict['id']) return job_dict
def upload_dataset( self, trans, controller, library_id, folder_id, replace_dataset=None, **kwd ): # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.tools_by_id[ tool_id ] state = tool.new_state( trans ) errors = tool.update_state( trans, tool.inputs_by_page[0], state.inputs, kwd ) tool_params = state.inputs dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) # Library-specific params params = util.Params( kwd ) # is this filetoolparam safe? library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) server_dir = util.restore_text( params.get( 'server_dir', '' ) ) if replace_dataset not in [ None, 'None' ]: replace_id = replace_dataset.id else: replace_id = None upload_option = params.get( 'upload_option', 'upload_file' ) err_redirect = False if upload_option == 'upload_directory': if server_dir in [ None, 'None', '' ]: err_redirect = True if controller == 'library_admin': import_dir = trans.app.config.library_import_dir import_dir_desc = 'library_import_dir' full_dir = os.path.join( import_dir, server_dir ) else: import_dir = trans.app.config.user_library_import_dir import_dir_desc = 'user_library_import_dir' if server_dir == trans.user.email: full_dir = os.path.join( import_dir, server_dir ) else: full_dir = os.path.join( import_dir, trans.user.email, server_dir ) if import_dir: msg = 'Select a directory' else: msg = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc # Proceed with (mostly) regular upload processing precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=controller ) if upload_option == 'upload_file': tool_params = upload_common.persist_uploads( tool_params ) uploaded_datasets = upload_common.get_uploaded_datasets( trans, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch ) elif upload_option == 'upload_directory': uploaded_datasets, err_redirect, msg = self.get_server_dir_uploaded_datasets( trans, params, full_dir, import_dir_desc, library_bunch, err_redirect, msg ) elif upload_option == 'upload_paths': uploaded_datasets, err_redirect, msg = self.get_path_paste_uploaded_datasets( trans, params, library_bunch, err_redirect, msg ) upload_common.cleanup_unused_precreated_datasets( precreated_datasets ) if upload_option == 'upload_file' and not uploaded_datasets: msg = 'Select a file, enter a URL or enter text' err_redirect = True if err_redirect: trans.response.send_redirect( web.url_for( controller=controller, action='upload_library_dataset', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option=upload_option, msg=util.sanitize_text( msg ), messagetype='error' ) ) json_file_path = upload_common.create_paramfile( trans, uploaded_datasets ) data_list = [ ud.data for ud in uploaded_datasets ] return upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder )
def load(self, trans, payload=None, **kwd): """ * POST /api/libraries/datasets Load dataset from the given source into the library. Source can be: user directory - root folder specified in galaxy.ini as "$user_library_import_dir" example path: path/to/galaxy/$user_library_import_dir/[email protected]/{user can browse everything here} the folder with the user login has to be created beforehand (admin)import directory - root folder specified in galaxy ini as "$library_import_dir" example path: path/to/galaxy/$library_import_dir/{admin can browse everything here} (admin)any absolute or relative path - option allowed with "allow_library_path_paste" in galaxy.ini :param payload: dictionary structure containing: :param encoded_folder_id: the encoded id of the folder to import dataset(s) to :type encoded_folder_id: an encoded id string :param source: source the datasets should be loaded from :type source: str :param link_data: flag whether to link the dataset to data or copy it to Galaxy, defaults to copy while linking is set to True all symlinks will be resolved _once_ :type link_data: bool :param preserve_dirs: flag whether to preserve the directory structure when importing dir if False only datasets will be imported :type preserve_dirs: bool :param file_type: file type of the loaded datasets, defaults to 'auto' (autodetect) :type file_type: str :param dbkey: dbkey of the loaded genome, defaults to '?' (unknown) :type dbkey: str :type dictionary :returns: dict containing information about the created upload job :rtype: dictionary :raises: RequestParameterMissingException, AdminRequiredException, ConfigDoesNotAllowException, RequestParameterInvalidException InsufficientPermissionsException, ObjectNotFound """ if payload: kwd.update(payload) kwd['space_to_tab'] = False kwd['to_posix_lines'] = True kwd['dbkey'] = kwd.get('dbkey', '?') kwd['file_type'] = kwd.get('file_type', 'auto') kwd['link_data_only'] = 'link_to_files' if util.string_as_bool( kwd.get('link_data', False)) else 'copy_files' encoded_folder_id = kwd.get('encoded_folder_id', None) if encoded_folder_id is not None: folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id) else: raise exceptions.RequestParameterMissingException( 'The required atribute encoded_folder_id is missing.') path = kwd.get('path', None) if path is None: raise exceptions.RequestParameterMissingException( 'The required atribute path is missing.') folder = self.folder_manager.get(trans, folder_id) source = kwd.get('source', None) if source not in [ 'userdir_file', 'userdir_folder', 'importdir_file', 'importdir_folder', 'admin_path' ]: raise exceptions.RequestParameterMissingException( 'You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". ' ) if source in ['importdir_file', 'importdir_folder']: if not trans.user_is_admin: raise exceptions.AdminRequiredException( 'Only admins can import from importdir.') if not trans.app.config.library_import_dir: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from importdir.' ) import_base_dir = trans.app.config.library_import_dir path = os.path.join(import_base_dir, path) if source in ['userdir_file', 'userdir_folder']: user_login = trans.user.email user_base_dir = trans.app.config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow upload from user directories.' ) full_dir = os.path.join(user_base_dir, user_login) if not path.lower().startswith(full_dir.lower()): path = os.path.join(full_dir, path) if not os.path.exists(path): raise exceptions.RequestParameterInvalidException( 'Given path does not exist on the host.') if not self.folder_manager.can_add_item(trans, folder): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to add items to the given folder.' ) if source == 'admin_path': if not trans.app.config.allow_library_path_paste: raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from path.' ) if not trans.user_is_admin: raise exceptions.AdminRequiredException( 'Only admins can import from path.') # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) tool.populate_state(trans, tool.inputs, kwd, state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input in tool.inputs.itervalues(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id(folder.id)) abspath_datasets = [] kwd['filesystem_paths'] = path if source in ['importdir_folder']: kwd['filesystem_paths'] = os.path.join(import_base_dir, path) # user wants to import one file only if source in ["userdir_file", "importdir_file"]: file = os.path.abspath(path) abspath_datasets.append(trans.webapp.controllers['library_common']. make_library_uploaded_dataset( trans, 'api', kwd, os.path.basename(file), file, 'server_dir', library_bunch)) # user wants to import whole folder if source == "userdir_folder": uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common'].get_path_paste_uploaded_datasets( trans, 'api', kwd, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) # user wants to import from path if source in ["admin_path", "importdir_folder"]: # validate the path is within root uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common'].get_path_paste_uploaded_datasets( trans, 'api', kwd, library_bunch, 200, '') uploaded_datasets = uploaded_datasets_bunch[0] if uploaded_datasets is None: raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.') for ud in uploaded_datasets: ud.path = os.path.abspath(ud.path) abspath_datasets.append(ud) json_file_path = upload_common.create_paramfile( trans, abspath_datasets) data_list = [ud.data for ud in abspath_datasets] job_params = {} job_params['link_data_only'] = dumps( kwd.get('link_data_only', 'copy_files')) job_params['uuid'] = dumps(kwd.get('uuid', None)) job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=folder, job_params=job_params) trans.sa_session.add(job) trans.sa_session.flush() job_dict = job.to_dict() job_dict['id'] = trans.security.encode_id(job_dict['id']) return job_dict
def upload_dataset(self, trans, controller, library_id, folder_id, replace_dataset=None, **kwd): # Set up the traditional tool state/params tool_id = 'upload1' tool = trans.app.toolbox.tools_by_id[tool_id] state = tool.new_state(trans) errors = tool.update_state(trans, tool.inputs_by_page[0], state.inputs, kwd) tool_params = state.inputs dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) # Library-specific params params = util.Params(kwd) # is this filetoolparam safe? library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset) msg = util.restore_text(params.get('msg', '')) messagetype = params.get('messagetype', 'done') server_dir = util.restore_text(params.get('server_dir', '')) if replace_dataset not in [None, 'None']: replace_id = replace_dataset.id else: replace_id = None upload_option = params.get('upload_option', 'upload_file') err_redirect = False if upload_option == 'upload_directory': if server_dir in [None, 'None', '']: err_redirect = True if controller == 'library_admin': import_dir = trans.app.config.library_import_dir import_dir_desc = 'library_import_dir' full_dir = os.path.join(import_dir, server_dir) else: import_dir = trans.app.config.user_library_import_dir import_dir_desc = 'user_library_import_dir' if server_dir == trans.user.email: full_dir = os.path.join(import_dir, server_dir) else: full_dir = os.path.join(import_dir, trans.user.email, server_dir) if import_dir: msg = 'Select a directory' else: msg = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc # Proceed with (mostly) regular upload processing precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=controller) if upload_option == 'upload_file': tool_params = upload_common.persist_uploads(tool_params) uploaded_datasets = upload_common.get_uploaded_datasets( trans, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch) elif upload_option == 'upload_directory': uploaded_datasets, err_redirect, msg = self.get_server_dir_uploaded_datasets( trans, params, full_dir, import_dir_desc, library_bunch, err_redirect, msg) elif upload_option == 'upload_paths': uploaded_datasets, err_redirect, msg = self.get_path_paste_uploaded_datasets( trans, params, library_bunch, err_redirect, msg) upload_common.cleanup_unused_precreated_datasets(precreated_datasets) if upload_option == 'upload_file' and not uploaded_datasets: msg = 'Select a file, enter a URL or enter text' err_redirect = True if err_redirect: trans.response.send_redirect( web.url_for(controller=controller, action='upload_library_dataset', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option=upload_option, msg=util.sanitize_text(msg), messagetype='error')) json_file_path = upload_common.create_paramfile( trans, uploaded_datasets) data_list = [ud.data for ud in uploaded_datasets] return upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder)