def _upload_dataset(self, trans, library_id, folder_id, replace_dataset=None, **kwd): # Set up the traditional tool state/params cntrller = 'api' tool_id = 'upload1' message = None file_type = kwd.get('file_type') try: upload_common.validate_datatype_extension(datatypes_registry=trans.app.datatypes_registry, ext=file_type) except RequestParameterInvalidException as e: return (400, util.unicodify(e)) tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) populate_state(trans, tool.inputs, kwd, state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input_name, input in tool.inputs.items(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) # Library-specific params server_dir = kwd.get('server_dir', '') upload_option = kwd.get('upload_option', 'upload_file') response_code = 200 if upload_option == 'upload_directory': full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) message = 'Select a directory' elif upload_option == 'upload_paths': # Library API already checked this - following check isn't actually needed. validate_path_upload(trans) # Some error handling should be added to this method. try: # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd # should be passed so that complex objects that may have been included in the initial request remain. library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset) except Exception: response_code = 500 message = "Unable to parse upload parameters, please report this error." # Proceed with (mostly) regular upload processing if we're still errorless if response_code == 200: if upload_option == 'upload_file': tool_params = upload_common.persist_uploads(tool_params, trans) uploaded_datasets = upload_common.get_uploaded_datasets(trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch) elif upload_option == 'upload_directory': uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets(trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message) elif upload_option == 'upload_paths': uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets(trans, kwd, library_bunch, response_code, message) if upload_option == 'upload_file' and not uploaded_datasets: response_code = 400 message = 'Select a file, enter a URL or enter text' if response_code != 200: return (response_code, message) json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) data_list = [ud.data for ud in uploaded_datasets] job_params = {} job_params['link_data_only'] = json.dumps(kwd.get('link_data_only', 'copy_files')) job_params['uuid'] = json.dumps(kwd.get('uuid', None)) job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params) trans.sa_session.add(job) trans.sa_session.flush() return output
def check_src(item): if "object_id" in item: raise RequestParameterInvalidException( "object_id not allowed to appear in the request.") validate_datatype_extension( datatypes_registry=trans.app.datatypes_registry, ext=item.get('ext')) # Normalize file:// URLs into paths. if item["src"] == "url": if "url" not in item: raise RequestParameterInvalidException( "src specified as 'url' but 'url' not specified") url = item["url"] if url.startswith("file://"): item["src"] = "path" item["path"] = url[len("file://"):] del item["url"] if "in_place" in item: raise RequestParameterInvalidException( "in_place cannot be set in the upload request") src = item["src"] # Check link_data_only can only be set for certain src types and certain elements_from types. _handle_invalid_link_data_only_elements_type(item) if src not in ["path", "server_dir"]: _handle_invalid_link_data_only_type(item) elements_from = item.get("elements_from", None) if elements_from and elements_from not in ELEMENTS_FROM_TYPE: raise RequestParameterInvalidException( "Invalid elements_from/items_from found in request") if src == "path" or (src == "url" and item["url"].startswith("file:")): # Validate is admin, leave alone. validate_path_upload(trans) elif src == "server_dir": # Validate and replace with path definition. server_dir = item["server_dir"] full_path, _ = validate_server_directory_upload(trans, server_dir) item["src"] = "path" item["path"] = full_path elif src == "ftp_import": ftp_path = item["ftp_path"] full_path = None # It'd be nice if this can be de-duplicated with what is in parameters/grouping.py. user_ftp_dir = trans.user_ftp_dir is_directory = False assert not os.path.islink( user_ftp_dir), "User FTP directory cannot be a symbolic link" for (dirpath, dirnames, filenames) in os.walk(user_ftp_dir): for filename in filenames: if ftp_path == filename: path = relpath(os.path.join(dirpath, filename), user_ftp_dir) if not os.path.islink(os.path.join(dirpath, filename)): full_path = os.path.abspath( os.path.join(user_ftp_dir, path)) break for dirname in dirnames: if ftp_path == dirname: path = relpath(os.path.join(dirpath, dirname), user_ftp_dir) if not os.path.islink(os.path.join(dirpath, dirname)): full_path = os.path.abspath( os.path.join(user_ftp_dir, path)) is_directory = True break if is_directory: # If the target is a directory - make sure no files under it are symbolic links for (dirpath, dirnames, filenames) in os.walk(full_path): for filename in filenames: if ftp_path == filename: path = relpath(os.path.join(dirpath, filename), full_path) if not os.path.islink( os.path.join(dirpath, filename)): full_path = False break for dirname in dirnames: if ftp_path == dirname: path = relpath(os.path.join(dirpath, filename), full_path) if not os.path.islink( os.path.join(dirpath, filename)): full_path = False break if not full_path: raise RequestParameterInvalidException( "Failed to find referenced ftp_path or symbolic link was enountered" ) item["src"] = "path" item["path"] = full_path item["purge_source"] = purge_ftp_source elif src == "url": url = item["url"] looks_like_url = False for url_prefix in ["http://", "https://", "ftp://", "ftps://"]: if url.startswith(url_prefix): looks_like_url = True break if not looks_like_url and trans.app.file_sources.looks_like_uri( url): looks_like_url = True if not looks_like_url: raise RequestParameterInvalidException( "Invalid URL [%s] found in src definition." % url) validate_url(url, trans.app.config.fetch_url_allowlist_ips) item["in_place"] = run_as_real_user elif src == "files": item["in_place"] = run_as_real_user # Small disagreement with traditional uploads - we purge less by default since whether purging # happens varies based on upload options in non-obvious ways. # https://github.com/galaxyproject/galaxy/issues/5361 if "purge_source" not in item: item["purge_source"] = False