def get_run_list(): messages = [] data = {} try: data_req = request.get_json() access_token = data_req["access_token"] username = data_req["username"] login_required(access_token=access_token, username=username) job_name = data_req["job_name"] run_names = job_manager.get_run_names(job_name) run_names.sort() data["run_names"] = run_names if len(run_names) == 0: messages.append( { "type": "info", "text": f"No runs available in this job." } ) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error( e, alt_err_message="An unkown error occured reading the execution directory", return_front_end_message=True )) return jsonify({ "data": data, "messages": messages } )
def change_password(): messages = [] data = {"success": False} try: validate_local_login_enabled() data_req = request.get_json() access_token = data_req["access_token"] username = data_req["username"] old_password = data_req["old_password"] new_password = data_req["new_password"] new_rep_password = data_req["new_rep_password"] login_required(access_token=access_token, username=username) db_connector.user_manager.change_password(username, old_password, new_password, new_rep_password) data = {"success": True} messages.append({ "time": get_time_string(), "type": "success", "text": "Successfully changed password. You will be logged out." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def get_param_form_sheet(): messages = [] data = {} try: data_req = request.args.to_dict() access_token = data_req["access_token"] login_required(access_token=access_token) job_name = data_req["job_name"] temp_dir_name = data_req["temp_dir_name"] temp_dir = os.path.join(app.config["TEMP_DIR"], temp_dir_name) hits = fetch_files_in_dir( dir_path=temp_dir, file_exts=allowed_extensions_by_type["spreadsheet"], search_string=job_name, return_abspaths=True) assert len(hits) == 1, \ "The requested file does not exist or you have no permission access it" sheet_path = hits[0]["file_abspath"] return send_from_directory(os.path.dirname(sheet_path), os.path.basename(sheet_path), attachment_filename=job_name + "_inputs" + os.path.splitext(sheet_path)[1], as_attachment=True) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def import_wf_by_trs_uri(): messages = [] data = [] try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) trs_uri = data_req["trs_uri"] import_name = data_req["import_name"] import_worflow_by_trs(uri=trs_uri, name=import_name, access_token=access_token) messages.append({ "time": get_time_string(), "type": "success", "text": import_name + " successfully imported." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def modify_or_delete_users(): messages = [] data = [] try: validate_local_login_enabled() data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token, admin=True) action = data_req["action"] user_selection = data_req["user_selection"] value = data_req["value"] if action == "delete": for username in user_selection: user_manager.delete(username) messages.append({ "time": get_time_string(), "type": "success", "text": "Successfully deleted users: \"" + ", ".join(user_selection) + "\"" }) if action == "set_status": for user in user_selection: change_user_status_or_level(get_user_by_username(user).id, new_status=value) messages.append({ "time": get_time_string(), "type": "success", "text": "Successfully set status on users: \"" + ", ".join(user_selection) + "\"" }) if action == "set_level": for user in user_selection: change_user_status_or_level(get_user_by_username(user).id, new_level=value) messages.append({ "time": get_time_string(), "type": "success", "text": "Successfully set level on users: \"" + ", ".join(user_selection) + "\"" }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def upload_file(): messages = [] data={} try: # load metadata: metadata = json_loads(request.form.get("meta")) dir_path = metadata["dir_path"] job_name = metadata["job_name"] if "job_name" in metadata.keys() else None access_token = metadata["access_token"] login_required(access_token=access_token) assert 'file' in request.files, 'No file received.' import_file = request.files['file'] assert import_file.filename != '', "No file specified." filename = secure_filename(import_file.filename) # check if dir path allowed: allowed_dirs = get_allowed_base_dirs( job_name=job_name, allow_input=False, allow_upload=True, allow_download=False ) assert dir_path != "", "Path does not exist or you have no permission to enter it." dir_path = normalize_path(dir_path) assert os.path.exists(dir_path) and \ os.path.isdir(dir_path) and \ check_if_path_in_dirs(dir_path, allowed_dirs) is not None, \ "Path does not exist or you have no permission to enter it." import_filepath = os.path.join(dir_path, filename) import_file.save(import_filepath) data["file_path"] = import_filepath messages.append( { "time": get_time_string(), "type":"success", "text": "Successfully uploaded file." } ) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data": data, "messages": messages } )
def upload_cwl_zip(): messages = [] data = {} try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] login_required(access_token=access_token) assert 'file' in request.files, 'No file received.' import_wf_file = request.files['file'] assert import_wf_file.filename != '', "No file specified." assert is_allowed_file(import_wf_file.filename, type="zip"), ( "Wrong file type. Only files with following extensions are allowed: " + ", ".join(allowed_extensions_by_type["CWL"])) # save the file to the CWL directory: import_wf_filename = secure_filename(import_wf_file.filename) temp_upload_dir = make_temp_dir() imported_filepath = os.path.join(temp_upload_dir, import_wf_filename) import_wf_file.save(imported_filepath) temp_extract_dir = make_temp_dir() unzip_dir(imported_filepath, temp_extract_dir) try: rmtree(temp_upload_dir) except Exception as e: pass data["temp_dir"] = temp_extract_dir messages.append({ "time": get_time_string(), "type": "success", "text": import_wf_file.filename + " was successfully uploaded and extracted." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def get_job_templ_list(): # returns list of job templates # for already imported CWL documents messages = [] templates = [] try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) templates = get_job_templates() except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": templates, "messages": messages})
def get_all_users_info(): messages = [] data = [] try: validate_local_login_enabled() data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token, admin=True) data = get_all_users_info_() except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def start_exec(): # returns all parmeter and its default mode (global/job specific) # for a given xls config messages = [] try: data_req = request.get_json() access_token = data_req["access_token"] username = data_req["username"] login_required(access_token=access_token, username=username) access_token = data_req["access_token"] job_name = data_req["job_name"] run_names = sorted(data_req["run_names"]) exec_profile_name = data_req["exec_profile"] max_parrallel_exec_user_def = int(data_req["parallel_exec"]) if "parallel_exec" in data_req.keys() else None started_runs, already_running_runs = exec_runs( job_name, run_names, exec_profile_name, username=username, max_parrallel_exec_user_def=max_parrallel_exec_user_def, access_token=access_token ) if len(started_runs) > 0: messages.append({ "time": get_time_string(), "type":"success", "text":"Successfully started execution for runs: " + ", ".join(started_runs) }) if len(already_running_runs) > 0: messages.append({ "time": get_time_string(), "type":"warning", "text":"Following runs are already running or have already finished: " + ", ".join(already_running_runs) + ". To restart them, reset them first." }) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data":{}, "messages":messages })
def get_job_list(): messages = [] jobs = [] try: data_req = request.get_json() access_token = data_req["access_token"] username = data_req["username"] login_required(access_token=access_token, username=username) job_info = [] for job in job_manager.get_jobs_info_for_user(username): #! should be changed once workflows are integrated into the database job["wf_name"] = os.path.basename(job["wf_target"]) job_info.append(job) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) messages.append(handle_unknown_error( e, alt_err_message="An unkown error occured reading the execution directory", return_front_end_message=True )) # get exec profiles names: exec_profile_names = list(app.config["EXEC_PROFILES"].keys()) exec_profile_params = {} for exec_profile_name in exec_profile_names: exec_profile_params[exec_profile_name] = { "workflow_type": app.config["EXEC_PROFILES"][exec_profile_name]["workflow_type"], "max_retries": app.config["EXEC_PROFILES"][exec_profile_name]["max_retries"], "max_parallel_exec": app.config["EXEC_PROFILES"][exec_profile_name]["max_parallel_exec"], "enable_queueing": app.config["EXEC_PROFILES"][exec_profile_name]["enable_queueing"], "allow_user_decrease_max_parallel_exec": app.config["EXEC_PROFILES"][exec_profile_name]["allow_user_decrease_max_parallel_exec"], } return jsonify({ "data": { "exec_profiles": exec_profile_names, "exec_profile_params": exec_profile_params, "jobs": job_info }, "messages": messages } )
def download(): messages = [] data = {} try: data_req = json_loads(request.form.get("meta")) access_token = data_req["access_token"] login_required(access_token=access_token) job_name = data_req["job_name"] run_name = data_req["run_name"] path = data_req["path"] send_file = data_req["send_file"] assert path != "" and os.path.exists(path), "Path does not exist or you have no permission to enter it." path = normalize_path(path) allowed_dirs = get_allowed_base_dirs( job_name=job_name, run_name=run_name, allow_input=False, allow_upload=False, allow_download=True ) base_dir = check_if_path_in_dirs(path, allowed_dirs) assert base_dir is not None, "Path does not exist or you have no permission to enter it." if os.path.isdir(path): data["zip_path"] = zip_dir(path) if send_file: return send_from_directory( os.path.dirname(path), os.path.basename(path), attachment_filename=os.path.basename(path), as_attachment=True ) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data":data, "messages":messages })
def get_run_status(): messages = [] data={} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) if data_req["run_names"] is not None: data = get_runs_info(data_req["job_name"], data_req["run_names"]) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error( e, alt_err_message="An unkown error occured reading the execution directory", return_front_end_message=True )) return jsonify({ "data": data, "messages": messages } )
def get_param_values(): messages = [] data = {} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) param_values, configs = gen_form_sheet( output_file_path=None, template_config_file_path=get_path( "job_templ", wf_target=data_req["wf_target"]), has_multiple_runs=data_req["batch_mode"], run_names=data_req["run_names"], param_is_run_specific=data_req["param_modes"], show_please_fill=True, metadata={"workflow_name": data_req["wf_target"]}) data = {"param_values": param_values, "configs": configs} except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def list_avail_wfs_in_janis(): messages = [] data = [] try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] login_required(access_token=access_token) # save the file to the CWL directory: assert 'wf_file' in request.files, 'No file received.' import_wf_file = request.files['wf_file'] assert import_wf_file.filename != '', "No file specified." import_wf_filename = secure_filename(import_wf_file.filename) temp_dir = make_temp_dir() imported_wf_filepath = os.path.join(temp_dir, import_wf_filename) import_wf_file.save(imported_wf_filepath) # import workflow: avail_wfs = list_workflows_in_janis_file(file=imported_wf_filepath, only_return_name=True) # cleanup temp: try: rmtree(temp_dir) except Exception as e: pass assert len( avail_wfs ) > 0, "No workflow definition could be found in the provided Janis file." data = {"avail_wfs": avail_wfs} except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def delete_job(): messages = [] data = {} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) job_name = data_req["job_name"] results = delete_job_by_name(job_name) if results["status"] == "success": pass elif results["status"] == "failed run termination": if len(results["could_not_be_terminated"]) > 0: messages.append({ "time": get_time_string(), "type":"error", "text":"Following runs could not be terminated: " + ", ".join(results["could_not_be_terminated"]) }) if len(results["could_not_be_cleaned"]) > 0: messages.append({ "time": get_time_string(), "type":"error", "text":"Following runs could not be cleaned: " + ", ".join(results["could_not_be_cleaned"]) }) else: messages.append({ "time": get_time_string(), "type":"error", "text":"Could not delete job dir for \"" + job_name + "\"." }) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data":data, "messages":messages })
def import_wf_by_path_or_url(): messages = [] data = [] try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) wf_path = data_req["wf_path"] is_url = data_req["is_url"] if "is_url" in data_req.keys() else None import_name = data_req["import_name"] wf_type = data_req["wf_type"] if "wf_type" in data_req.keys() else None if is_url: validate_url(wf_path) else: allowed_dirs = get_allowed_base_dirs(allow_input=True, allow_upload=False, allow_download=False, include_tmp_dir=False) assert os.path.isfile(wf_path) and \ check_if_path_in_dirs(wf_path, allowed_dirs) is not None, \ f"Path does not exist or you have no permission to enter it.{allowed_dirs}" import_wf_(wf_path=wf_path, name=import_name, wf_type=wf_type) messages.append({ "time": get_time_string(), "type": "success", "text": import_name + " successfully imported." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def generate_param_form_sheet(): # generate param form sheet with data sent # by the client messages = [] data = {} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) sheet_format = data_req["sheet_format"] job_name = data_req["job_name"] wf_target = data_req["wf_target"] param_modes = data_req["param_modes"] run_names = data_req["run_names"] batch_mode = data_req["batch_mode"] temp_dir = make_temp_dir() # will stay, need to be cleaned up temp_dir_name = os.path.basename(temp_dir) output_file_path = os.path.join(temp_dir, f"{job_name}_inputs.{sheet_format}") gen_form_sheet(output_file_path=output_file_path, template_config_file_path=get_path("job_templ", wf_target=wf_target), has_multiple_runs=batch_mode, run_names=run_names, param_is_run_specific=param_modes, show_please_fill=True, metadata={"workflow_name": wf_target}) data["get_form_sheet_href"] = url_for( "get_param_form_sheet", job_name=job_name, temp_dir_name=temp_dir_name, access_token=access_token ## should be changed ) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def terminate_runs(): messages = [] data = {} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) job_name = data_req["job_name"] run_names = sorted(data_req["run_names"]) mode = data_req["mode"] # one of terminate, reset, delete succeeded, could_not_be_terminated, could_not_be_cleaned = terminate_runs_by_name(job_name, run_names, mode) if len(succeeded) > 0: messages.append({ "time": get_time_string(), "type":"success", "text":"Successfully terminated/reset/deleted runs: " + ", ".join(succeeded) }) if len(could_not_be_terminated) > 0: messages.append({ "time": get_time_string(), "type":"warning", "text":"Following runs could not be terminated: " + ", ".join(could_not_be_terminated) }) if len(could_not_be_cleaned) > 0: messages.append({ "time": get_time_string(), "type":"warning", "text":"Following runs could not be cleaned: " + ", ".join(could_not_be_cleaned) }) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data":data, "messages":messages })
def get_run_details(): messages = [] data = {} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) data_req = request.get_json() job_name = data_req["job_name"] run_name = data_req["run_name"] log_content = read_run_log(job_name, run_name) yaml_content = read_run_input(job_name, run_name) data = { "log": log_content, "yaml": yaml_content } except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data":data, "messages":messages })
def get_job_templ_config_info( ): # returns all parmeter and its default mode (global/job specific) # for a given xls config messages = [] param_config_info = [] template_metadata = [] try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) wf_target = data_req["wf_target"] param_config_info = get_job_templ_info("config", wf_target) template_metadata = get_job_templ_info("metadata", wf_target) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data": { "params": param_config_info, "templ_meta": template_metadata, }, "messages": messages })
def browse_dir(): messages = [] data={} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) path = remove_non_printable_characters(data_req["path"]) ignore_files = data_req["ignore_files"] file_exts = data_req["file_exts"] show_only_hits = data_req["show_only_hits"] get_parent_dir = data_req["get_parent_dir"] allow_input = data_req["allow_input"] allow_upload = data_req["allow_upload"] allow_download = data_req["allow_download"] default_base_dir = data_req["default_base_dir"] if "default_base_dir" in data_req.keys() else None job_name = data_req["job_name"] if "job_name" in data_req.keys() else None run_name = data_req["run_name"] if "run_name" in data_req.keys() else None on_error_return_base_dir_items = data_req["on_error_return_base_dir_items"] fixed_base_dir = data_req["fixed_base_dir"] if "fixed_base_dir" in data_req.keys() else None fixed_base_dir_name = data_req["fixed_base_dir_name"] if "fixed_base_dir_name" in data_req.keys() else "FIXED_BASE_DIR" include_tmp_dir = data_req["include_tmp_dir"] if "include_tmp_dir" in data_req.keys() else False data["allowed_dirs"] = get_allowed_base_dirs( job_name=job_name, run_name=run_name, allow_input=allow_input, allow_upload=allow_upload, allow_download=allow_download, include_tmp_dir=include_tmp_dir ) if not fixed_base_dir is None: assert check_if_path_in_dirs(fixed_base_dir, data["allowed_dirs"]) is not None, "Fixed base dir is not allowed." data["allowed_dirs"] = { fixed_base_dir_name: { "path": fixed_base_dir, "mode": data["allowed_dirs"][check_if_path_in_dirs(fixed_base_dir, data["allowed_dirs"])]["mode"] } } try: assert path != "" and os.path.exists(path), "Path does not exist or you have no permission to enter it." path = normalize_path(path) if get_parent_dir or not os.path.isdir(path): path = os.path.dirname(path) data["base_dir"] = check_if_path_in_dirs(path, data["allowed_dirs"]) assert data["base_dir"] is not None, "Path does not exist or you have no permission to enter it." data["items"] = browse_dir_(path, ignore_files, file_exts, show_only_hits) data["dir"] = path except AssertionError as e: if on_error_return_base_dir_items: if (not default_base_dir is None) and default_base_dir in data["allowed_dirs"].keys(): data["base_dir"] = default_base_dir else: data["base_dir"] = list(data["allowed_dirs"].keys())[0] path = data["allowed_dirs"][data["base_dir"]]["path"] data["dir"] = path data["items"] = browse_dir_(path, ignore_files, file_exts, show_only_hits) else: raise AssertionError(str(e)) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({ "data": data, "messages": messages } )
def upload_wf(): messages = [] data = [] try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] login_required(access_token=access_token) # load metadata: wf_type = metadata["wf_type"] if "wf_type" in metadata.keys() else None # only relavant for janis: translate_to_cwl = metadata["translate_to_cwl"] \ if "translate_to_cwl" in metadata.keys() else True translate_to_wdl = metadata["translate_to_wdl"] \ if "translate_to_wdl" in metadata.keys() else True wf_name_in_script = metadata["wf_name_in_script"] \ if "wf_name_in_script" in metadata.keys() else None # save the file to the CWL directory: assert 'wf_file' in request.files, 'No file received.' import_wf_file = request.files['wf_file'] assert import_wf_file.filename != '', "No file specified." import_wf_filename = secure_filename(import_wf_file.filename) temp_dir = make_temp_dir() imported_wf_filepath = os.path.join(temp_dir, import_wf_filename) import_wf_file.save(imported_wf_filepath) # if existent, save imports.zip: wf_imports_zip_filepath = None if 'wf_imports_zip' in request.files.keys(): wf_imports_zip_file = request.files['wf_imports_zip'] wf_imports_zip_filepath = os.path.join(temp_dir, "imports.zip") wf_imports_zip_file.save(wf_imports_zip_filepath) # import workflow: import_name = secure_filename(metadata["import_name"]) \ if "import_name" in metadata.keys() and metadata["import_name"] != "" \ else import_wf_filename import_wf_(wf_path=imported_wf_filepath, name=os.path.splitext(import_name)[0], wf_type=wf_type, wf_imports_zip_path=wf_imports_zip_filepath, translate_to_cwl=translate_to_cwl, translate_to_wdl=translate_to_wdl, wf_name_in_script=wf_name_in_script) # cleanup temp: try: rmtree(temp_dir) except Exception as e: pass messages.append({ "time": get_time_string(), "type": "success", "text": import_wf_file.filename + " successfully imported." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def create_job_from_param_values(): messages = [] data = [] temp_dir = make_temp_dir() try: data_req = request.get_json() access_token = data_req["access_token"] username = data_req["username"] login_required(access_token=access_token, username=username) param_values = data_req["param_values"] param_configs = data_req["param_configs"] wf_target = data_req["wf_target"] job_name = data_req["job_name"] import_filepath = os.path.join(temp_dir, "param_sheet.xlsx") validate_uris = data_req["validate_uris"] search_paths = data_req["search_paths"] search_dir = os.path.abspath( remove_non_printable_characters(data_req["search_dir"])) include_subdirs_for_searching = data_req[ "include_subdirs_for_searching"] if search_paths: # test if search dir exists: assert os.path.isdir(search_dir), ( "The specified search dir \"" + search_dir + "\" does not exist or is not a directory.") try: print({ "param_values": param_values, "configs": param_configs, "output_file": import_filepath, "validate_uris": validate_uris, "search_paths": search_paths, "search_subdirs": include_subdirs_for_searching, "allow_remote_uri": app.config["INPUT_SOURCES"]["URL"], "allow_local_path": app.config["INPUT_SOURCES"]["local_file_system"], "input_dir": search_dir, "metadata": { "workflow_name": wf_target } }) generate_xls_from_param_values( param_values=param_values, configs=param_configs, output_file=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, allow_remote_uri=app.config["INPUT_SOURCES"]["URL"], allow_local_path=app.config["INPUT_SOURCES"] ["local_file_system"], input_dir=search_dir, metadata={"workflow_name": wf_target}) except AssertionError as e: raise AssertionError( "The provided form failed validation: {}".format(str(e))) # create job: make_job_dir_tree(job_name) create_job_(job_name=job_name, username=username, job_param_sheet=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, search_dir=search_dir, sheet_format="xlsx") except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) if len(messages) == 0: messages.append({ "type": "success", "text": f"Job {job_name} was successfully created. Please head over to \"Job Execution and Results\"" }) try: rmtree(temp_dir) except Exception: pass return jsonify({"data": data, "messages": messages})
def create_job_from_param_form_sheet(): messages = [] data = [] temp_dir = make_temp_dir() try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] username = metadata["username"] login_required(access_token=access_token, username=username) assert 'file' in request.files, 'No file received.' import_file = request.files['file'] assert import_file.filename != '', "No file specified." assert is_allowed_file(import_file.filename, type="spreadsheet"), "Wrong file type. Only files with following extensions are allowed: " + \ ", ".join(allowed_extensions_by_type["spreadsheet"]) sheet_format = os.path.splitext( import_file.filename)[1].strip(".").lower() job_name = metadata["job_name"] import_filepath = os.path.join(temp_dir, f"param_sheet.{sheet_format}") import_file.save(import_filepath) validate_uris = metadata["validate_uris"] search_paths = metadata["search_paths"] search_dir = os.path.abspath( remove_non_printable_characters(metadata["search_dir"])) include_subdirs_for_searching = metadata[ "include_subdirs_for_searching"] if search_paths: # test if search dir exists: assert os.path.isdir(search_dir), ( "The specified search dir \"" + search_dir + "\" does not exist or is not a directory.") # validate the uploaded form sheet: validation_result = only_validate_xls( sheet_file=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, allow_remote_uri=app.config["INPUT_SOURCES"]["URL"], allow_local_path=app.config["INPUT_SOURCES"]["local_file_system"], input_dir=search_dir) assert validation_result == "VALID", "The provided form failed validation: {}".format( validation_result) # create job: make_job_dir_tree(job_name) create_job_(job_name=job_name, username=username, job_param_sheet=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, search_dir=search_dir, sheet_format=sheet_format) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) if len(messages) == 0: messages.append({ "type": "success", "text": f"Job {job_name} was successfully created. Please head over to \"Job Execution and Results\"" }) try: rmtree(temp_dir) except Exception: pass return jsonify({"data": data, "messages": messages})