def upload_cwl_zip(): messages = [] data = {} try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] login_required(access_token=access_token) assert 'file' in request.files, 'No file received.' import_wf_file = request.files['file'] assert import_wf_file.filename != '', "No file specified." assert is_allowed_file(import_wf_file.filename, type="zip"), ( "Wrong file type. Only files with following extensions are allowed: " + ", ".join(allowed_extensions_by_type["CWL"])) # save the file to the CWL directory: import_wf_filename = secure_filename(import_wf_file.filename) temp_upload_dir = make_temp_dir() imported_filepath = os.path.join(temp_upload_dir, import_wf_filename) import_wf_file.save(imported_filepath) temp_extract_dir = make_temp_dir() unzip_dir(imported_filepath, temp_extract_dir) try: rmtree(temp_upload_dir) except Exception as e: pass data["temp_dir"] = temp_extract_dir messages.append({ "time": get_time_string(), "type": "success", "text": import_wf_file.filename + " was successfully uploaded and extracted." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def upload_wf(): messages = [] data = [] try: login_required() assert 'file' in request.files, 'No file received.' import_file = request.files['file'] assert import_file.filename != '', "No file specified." # save the file to the CWL directory: metadata = json_loads(request.form.get("meta")) import_filename = secure_filename(import_file.filename) wf_type = metadata["wf_type"] if "wf_type" in metadata.keys() else None temp_dir = make_temp_dir() imported_filepath = os.path.join(temp_dir, import_filename) import_file.save(imported_filepath) import_name = secure_filename(metadata["import_name"]) \ if "import_name" in metadata.keys() and metadata["import_name"] != "" \ else import_filename import_wf_(wf_path=imported_filepath, name=import_name, wf_type=wf_type) try: rmtree(temp_dir) except Exception as e: pass messages.append( { "time": get_time_string(), "type":"success", "text": import_file.filename + " successfully imported." } ) except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data":data,"messages":messages})
def list_avail_wfs_in_janis(): messages = [] data = [] try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] login_required(access_token=access_token) # save the file to the CWL directory: assert 'wf_file' in request.files, 'No file received.' import_wf_file = request.files['wf_file'] assert import_wf_file.filename != '', "No file specified." import_wf_filename = secure_filename(import_wf_file.filename) temp_dir = make_temp_dir() imported_wf_filepath = os.path.join(temp_dir, import_wf_filename) import_wf_file.save(imported_wf_filepath) # import workflow: avail_wfs = list_workflows_in_janis_file( file=imported_wf_filepath, only_return_name=True ) # cleanup temp: try: rmtree(temp_dir) except Exception as e: pass assert len(avail_wfs) > 0, "No workflow definition could be found in the provided Janis file." data = { "avail_wfs": avail_wfs } except AssertionError as e: messages.append( handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data":data,"messages":messages})
def generate_param_form_sheet(): # generate param form sheet with data sent # by the client messages = [] data = {} try: data_req = request.get_json() access_token = data_req["access_token"] login_required(access_token=access_token) sheet_format = data_req["sheet_format"] job_name = data_req["job_name"] wf_target = data_req["wf_target"] param_modes = data_req["param_modes"] run_names = data_req["run_names"] batch_mode = data_req["batch_mode"] temp_dir = make_temp_dir() # will stay, need to be cleaned up temp_dir_name = os.path.basename(temp_dir) output_file_path = os.path.join(temp_dir, f"{job_name}_inputs.{sheet_format}") gen_form_sheet(output_file_path=output_file_path, template_config_file_path=get_path("job_templ", wf_target=wf_target), has_multiple_runs=batch_mode, run_names=run_names, param_is_run_specific=param_modes, show_please_fill=True, metadata={"workflow_name": wf_target}) data["get_form_sheet_href"] = url_for( "get_param_form_sheet", job_name=job_name, temp_dir_name=temp_dir_name, access_token=access_token ## should be changed ) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})
def create_job_from_param_values(): messages = [] data = [] temp_dir = make_temp_dir() try: data_req = request.get_json() access_token = data_req["access_token"] username = data_req["username"] login_required(access_token=access_token, username=username) param_values = data_req["param_values"] param_configs = data_req["param_configs"] wf_target = data_req["wf_target"] job_name = data_req["job_name"] import_filepath = os.path.join(temp_dir, "param_sheet.xlsx") validate_uris = data_req["validate_uris"] search_paths = data_req["search_paths"] search_dir = os.path.abspath( remove_non_printable_characters(data_req["search_dir"])) include_subdirs_for_searching = data_req[ "include_subdirs_for_searching"] if search_paths: # test if search dir exists: assert os.path.isdir(search_dir), ( "The specified search dir \"" + search_dir + "\" does not exist or is not a directory.") try: print({ "param_values": param_values, "configs": param_configs, "output_file": import_filepath, "validate_uris": validate_uris, "search_paths": search_paths, "search_subdirs": include_subdirs_for_searching, "allow_remote_uri": app.config["INPUT_SOURCES"]["URL"], "allow_local_path": app.config["INPUT_SOURCES"]["local_file_system"], "input_dir": search_dir, "metadata": { "workflow_name": wf_target } }) generate_xls_from_param_values( param_values=param_values, configs=param_configs, output_file=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, allow_remote_uri=app.config["INPUT_SOURCES"]["URL"], allow_local_path=app.config["INPUT_SOURCES"] ["local_file_system"], input_dir=search_dir, metadata={"workflow_name": wf_target}) except AssertionError as e: raise AssertionError( "The provided form failed validation: {}".format(str(e))) # create job: make_job_dir_tree(job_name) create_job_(job_name=job_name, username=username, job_param_sheet=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, search_dir=search_dir, sheet_format="xlsx") except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) if len(messages) == 0: messages.append({ "type": "success", "text": f"Job {job_name} was successfully created. Please head over to \"Job Execution and Results\"" }) try: rmtree(temp_dir) except Exception: pass return jsonify({"data": data, "messages": messages})
def create_job_from_param_form_sheet(): messages = [] data = [] temp_dir = make_temp_dir() try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] username = metadata["username"] login_required(access_token=access_token, username=username) assert 'file' in request.files, 'No file received.' import_file = request.files['file'] assert import_file.filename != '', "No file specified." assert is_allowed_file(import_file.filename, type="spreadsheet"), "Wrong file type. Only files with following extensions are allowed: " + \ ", ".join(allowed_extensions_by_type["spreadsheet"]) sheet_format = os.path.splitext( import_file.filename)[1].strip(".").lower() job_name = metadata["job_name"] import_filepath = os.path.join(temp_dir, f"param_sheet.{sheet_format}") import_file.save(import_filepath) validate_uris = metadata["validate_uris"] search_paths = metadata["search_paths"] search_dir = os.path.abspath( remove_non_printable_characters(metadata["search_dir"])) include_subdirs_for_searching = metadata[ "include_subdirs_for_searching"] if search_paths: # test if search dir exists: assert os.path.isdir(search_dir), ( "The specified search dir \"" + search_dir + "\" does not exist or is not a directory.") # validate the uploaded form sheet: validation_result = only_validate_xls( sheet_file=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, allow_remote_uri=app.config["INPUT_SOURCES"]["URL"], allow_local_path=app.config["INPUT_SOURCES"]["local_file_system"], input_dir=search_dir) assert validation_result == "VALID", "The provided form failed validation: {}".format( validation_result) # create job: make_job_dir_tree(job_name) create_job_(job_name=job_name, username=username, job_param_sheet=import_filepath, validate_uris=validate_uris, search_paths=search_paths, search_subdirs=include_subdirs_for_searching, search_dir=search_dir, sheet_format=sheet_format) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) if len(messages) == 0: messages.append({ "type": "success", "text": f"Job {job_name} was successfully created. Please head over to \"Job Execution and Results\"" }) try: rmtree(temp_dir) except Exception: pass return jsonify({"data": data, "messages": messages})
def upload_wf(): messages = [] data = [] try: metadata = json_loads(request.form.get("meta")) access_token = metadata["access_token"] login_required(access_token=access_token) # load metadata: wf_type = metadata["wf_type"] if "wf_type" in metadata.keys() else None # only relavant for janis: translate_to_cwl = metadata["translate_to_cwl"] \ if "translate_to_cwl" in metadata.keys() else True translate_to_wdl = metadata["translate_to_wdl"] \ if "translate_to_wdl" in metadata.keys() else True wf_name_in_script = metadata["wf_name_in_script"] \ if "wf_name_in_script" in metadata.keys() else None # save the file to the CWL directory: assert 'wf_file' in request.files, 'No file received.' import_wf_file = request.files['wf_file'] assert import_wf_file.filename != '', "No file specified." import_wf_filename = secure_filename(import_wf_file.filename) temp_dir = make_temp_dir() imported_wf_filepath = os.path.join(temp_dir, import_wf_filename) import_wf_file.save(imported_wf_filepath) # if existent, save imports.zip: wf_imports_zip_filepath = None if 'wf_imports_zip' in request.files.keys(): wf_imports_zip_file = request.files['wf_imports_zip'] wf_imports_zip_filepath = os.path.join(temp_dir, "imports.zip") wf_imports_zip_file.save(wf_imports_zip_filepath) # import workflow: import_name = secure_filename(metadata["import_name"]) \ if "import_name" in metadata.keys() and metadata["import_name"] != "" \ else import_wf_filename import_wf_(wf_path=imported_wf_filepath, name=os.path.splitext(import_name)[0], wf_type=wf_type, wf_imports_zip_path=wf_imports_zip_filepath, translate_to_cwl=translate_to_cwl, translate_to_wdl=translate_to_wdl, wf_name_in_script=wf_name_in_script) # cleanup temp: try: rmtree(temp_dir) except Exception as e: pass messages.append({ "time": get_time_string(), "type": "success", "text": import_wf_file.filename + " successfully imported." }) except AssertionError as e: messages.append(handle_known_error(e, return_front_end_message=True)) except Exception as e: messages.append(handle_unknown_error(e, return_front_end_message=True)) return jsonify({"data": data, "messages": messages})