def download(): messages = [] data = {} try: login_required() data_req = json_loads(request.form.get("meta")) job_id = data_req["job_id"] run_id = data_req["run_id"] path = data_req["path"] send_file = data_req["send_file"] assert path != "" and os.path.exists( path), "Path does not exist or you have no permission to enter it." path = normalize_path(path) allowed_dirs = get_allowed_base_dirs(job_id=job_id, run_id=run_id, allow_input=False, allow_upload=False, allow_download=True) base_dir = check_if_path_in_dirs(path, allowed_dirs) assert base_dir is not None, "Path does not exist or you have no permission to enter it." if os.path.isdir(path): data["zip_path"] = zip_dir(path) if send_file: return send_from_directory( os.path.dirname(path), os.path.basename(path), attachment_filename=os.path.basename(path), as_attachment=True) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def upload_file(): messages = [] data = {} try: # load metadata: metadata = json_loads(request.form.get("meta")) dir_path = metadata["dir_path"] job_name = metadata["job_name"] if "job_name" in metadata.keys( ) else None access_token = metadata["access_token"] login_required(access_token=access_token) assert 'file' in request.files, 'No file received.' import_file = request.files['file'] assert import_file.filename != '', "No file specified." filename = secure_filename(import_file.filename) # check if dir path allowed: allowed_dirs = get_allowed_base_dirs(job_name=job_name, allow_input=False, allow_upload=True, allow_download=False) assert dir_path != "", "Path does not exist or you have no permission to enter it." dir_path = normalize_path(dir_path) assert os.path.exists(dir_path) and \ os.path.isdir(dir_path) and \ check_if_path_in_dirs(dir_path, allowed_dirs) is not None, \ "Path does not exist or you have no permission to enter it." import_filepath = os.path.join(dir_path, filename) import_file.save(import_filepath) data["file_path"] = import_filepath messages.append({ "time": get_time_string(), "type": "success", "text": "Successfully uploaded file." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def import_wf_by_path_or_url(): messages = [] data = [] try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) wf_path = data_req["wf_path"] is_url = data_req["is_url"] if "is_url" in data_req.keys() else None import_name = data_req["import_name"] wf_type = data_req["wf_type"] if "wf_type" in data_req.keys() else None if is_url: validate_url(wf_path) else: allowed_dirs = get_allowed_base_dirs( allow_input=True, allow_upload=True, allow_download=False, include_tmp_dir=True ) assert os.path.isfile(wf_path) and \ check_if_path_in_dirs(wf_path, allowed_dirs) is not None, \ "Path does not exist or you have no permission to enter it." import_wf_(wf_path=wf_path, name=import_name, wf_type=wf_type) messages.append( { "time": get_time_string(), "type":"success", "text": import_name + " successfully imported." } ) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data":data,"messages":messages})
def create_job(job_name, username, job_param_sheet=None, run_inputs=None, wf_target=None, validate_uris=True, search_paths=False, search_subdirs=False, search_dir=None, sheet_format="xlsx"): assert not ( job_param_sheet is None and (run_inputs is None or wf_target is None) ), "You have to either provide a job_param_sheet or a list of run_inputs plus a wf_target document" runs_yaml_dir = get_path("runs_yaml_dir", job_name=job_name) if wf_target is None: job_param_sheet_dest_path = get_path("job_param_sheet", job_name=job_name, param_sheet_format=sheet_format) copyfile(job_param_sheet, job_param_sheet_dest_path) wf_target = get_job_templ_info( "metadata", job_templ_path=job_param_sheet_dest_path)["workflow_name"] wf_type = get_workflow_type_from_file_ext(wf_target) # make directories: make_job_dir_tree(job_name) # make run yamls: if not job_param_sheet is None: assert not ( search_paths and search_dir is None ), "search_paths was set to True but no search dir has been defined." make_runs( sheet_file=job_param_sheet_dest_path, wf_type=wf_type, output_basename="", output_dir=runs_yaml_dir, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=search_subdirs, allow_remote_uri=app.config["INPUT_SOURCES"]["URL"], allow_local_path=app.config["INPUT_SOURCES"]["local_file_system"], input_dir=search_dir) else: [copy(run_input, runs_yaml_dir) for run_input in run_inputs] # get run names from produced yamls_ runs_yaml_dir = get_path("runs_yaml_dir", job_name) run_yamls = fetch_files_in_dir(dir_path=runs_yaml_dir, file_exts=["yaml"], ignore_subdirs=True) run_names = [r["file_nameroot"] for r in run_yamls] # check if wf_target is absolute path and exists, else search for it in the wf_target dir: if os.path.exists(wf_target): wf_target = os.path.abspath(wf_target) allowed_dirs = get_allowed_base_dirs(job_name=job_name, allow_input=True, allow_upload=False, allow_download=False) assert not check_if_path_in_dirs( wf_target, allowed_dirs ) is None, "The provided wf_target file does not exit or you have no permission to access it." else: wf_target = get_path("wf", wf_target=wf_target) # copy wf_target document: copyfile(wf_target, get_path("job_wf", job_name=job_name, wf_type=wf_type)) # make output directories: for run_name in run_names: run_out_dir = get_path("run_out_dir", job_name, run_name) if not os.path.exists(run_out_dir): os.mkdir(run_out_dir) # add job to database: _ = job_manager.create_job(job_name=job_name, username=username, wf_target=wf_target) # add runs to database: job_manager.create_runs(run_names=run_names, job_name=job_name)
def browse_dir(): messages = [] data = {} try: login_required() data_req = request.get_json() path = remove_non_printable_characters(data_req["path"]) ignore_files = data_req["ignore_files"] file_exts = data_req["file_exts"] show_only_hits = data_req["show_only_hits"] get_parent_dir = data_req["get_parent_dir"] allow_input = data_req["allow_input"] allow_upload = data_req["allow_upload"] allow_download = data_req["allow_download"] default_base_dir = data_req[ "default_base_dir"] if "default_base_dir" in data_req.keys( ) else None job_id = data_req["job_id"] if "job_id" in data_req.keys() else None run_id = data_req["run_id"] if "run_id" in data_req.keys() else None on_error_return_base_dir_items = data_req[ "on_error_return_base_dir_items"] fixed_base_dir = data_req[ "fixed_base_dir"] if "fixed_base_dir" in data_req.keys() else None fixed_base_dir_name = data_req[ "fixed_base_dir_name"] if "fixed_base_dir_name" in data_req.keys( ) else "FIXED_BASE_DIR" include_tmp_dir = data_req[ "include_tmp_dir"] if "include_tmp_dir" in data_req.keys( ) else False data["allowed_dirs"] = get_allowed_base_dirs( job_id=job_id, run_id=run_id, allow_input=allow_input, allow_upload=allow_upload, allow_download=allow_download, include_tmp_dir=include_tmp_dir) if not fixed_base_dir is None: assert check_if_path_in_dirs( fixed_base_dir, data["allowed_dirs"] ) is not None, "Fixed base dir is not allowed." data["allowed_dirs"] = { fixed_base_dir_name: { "path": fixed_base_dir, "mode": data["allowed_dirs"][check_if_path_in_dirs( fixed_base_dir, data["allowed_dirs"])]["mode"] } } try: assert path != "" and os.path.exists( path ), "Path does not exist or you have no permission to enter it." path = normalize_path(path) if get_parent_dir or not os.path.isdir(path): path = os.path.dirname(path) data["base_dir"] = check_if_path_in_dirs(path, data["allowed_dirs"]) assert data[ "base_dir"] is not None, "Path does not exist or you have no permission to enter it." data["items"] = browse_dir_(path, ignore_files, file_exts, show_only_hits) data["dir"] = path except AssertionError as e: if on_error_return_base_dir_items: if (not default_base_dir is None ) and default_base_dir in data["allowed_dirs"].keys(): data["base_dir"] = default_base_dir else: data["base_dir"] = list(data["allowed_dirs"].keys())[0] path = data["allowed_dirs"][data["base_dir"]]["path"] data["dir"] = path data["items"] = browse_dir_(path, ignore_files, file_exts, show_only_hits) else: raise AssertionError(str(e)) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def create_job(job_id, job_param_sheet=None, run_inputs=None, wf_target=None, validate_paths=True, search_paths=False, search_subdirs=False, search_dir=None, sheet_format="xlsx"): assert not ( job_param_sheet is None and (run_inputs is None or wf_target is None) ), "You have to either provide a job_param_sheet or a list of run_inputs plus a wf_target document" runs_yaml_dir = get_path("runs_yaml_dir", job_id=job_id) if wf_target is None: job_param_sheet_dest_path = get_path("job_param_sheet", job_id=job_id, param_sheet_format=sheet_format) copyfile(job_param_sheet, job_param_sheet_dest_path) wf_target = get_job_templ_info( "metadata", job_templ_path=job_param_sheet_dest_path)["workflow_name"] wf_type = get_workflow_type_from_file_ext(wf_target) # make directories: make_job_dir_tree(job_id) # make run yamls: if not job_param_sheet is None: assert not ( search_paths and search_dir is None ), "search_paths was set to True but no search dir has been defined." make_runs(sheet_file=job_param_sheet_dest_path, wf_type=wf_type, output_basename="", output_dir=runs_yaml_dir, validate_paths=validate_paths, search_paths=search_paths, search_subdirs=search_subdirs, input_dir=search_dir) else: [copy(run_input, runs_yaml_dir) for run_input in run_inputs] # check if wf_target is absolute path and exists, else search for it in the wf_target dir: if os.path.exists(wf_target): wf_target = os.path.abspath(wf_target) allowed_dirs = get_allowed_base_dirs(job_id=job_id, allow_input=True, allow_upload=False, allow_download=False) assert not check_if_path_in_dirs( wf_target, allowed_dirs ) is None, "The provided wf_target file does not exit or you have no permission to access it." else: wf_target = get_path("wf", wf_target=wf_target) # copy wf_target document: copyfile(wf_target, get_path("job_wf", job_id=job_id, wf_type=wf_type)) # make output directories: run_ids = get_run_ids(job_id) for run_id in run_ids: run_out_dir = get_path("run_out_dir", job_id, run_id) if not os.path.exists(run_out_dir): os.mkdir(run_out_dir)