def pipelines_json(project_uuid, pipeline_uuid): pipeline_json_path = get_pipeline_path( pipeline_uuid, project_uuid, request.args.get("job_uuid"), request.args.get("pipeline_run_uuid"), ) if request.method == "POST": pipeline_directory = get_pipeline_directory( pipeline_uuid, project_uuid, request.args.get("job_uuid"), request.args.get("pipeline_run_uuid"), ) # Parse JSON. pipeline_json = json.loads(request.form.get("pipeline_json")) # Side effect: for each Notebook in de pipeline.json set the # correct kernel. pipeline_set_notebook_kernels( pipeline_json, pipeline_directory, project_uuid ) with open(pipeline_json_path, "w") as json_file: json.dump(pipeline_json, json_file, indent=4, sort_keys=True) # Analytics call. send_anonymized_pipeline_definition(app, pipeline_json) return jsonify({"message": "Successfully saved pipeline."}) elif request.method == "GET": if not os.path.isfile(pipeline_json_path): return ( jsonify( { "success": False, "reason": ".orchest file doesn't exist at location %s" % pipeline_json_path, } ), 404, ) else: with open(pipeline_json_path, "r") as json_file: pipeline_json = json.load(json_file) # json.dumps because the front end expects it as a # string. return jsonify( {"success": True, "pipeline_json": json.dumps(pipeline_json)} ) return ""
def pipelines_json(project_uuid, pipeline_uuid): pipeline_json_path = get_pipeline_path( pipeline_uuid, project_uuid, request.args.get("job_uuid"), request.args.get("pipeline_run_uuid"), ) if request.method == "POST": pipeline_directory = get_pipeline_directory( pipeline_uuid, project_uuid, request.args.get("job_uuid"), request.args.get("pipeline_run_uuid"), ) # Parse JSON. pipeline_json = json.loads(request.form.get("pipeline_json")) # First create all files part of pipeline_json definition # TODO: consider removing other files (no way to do this # reliably, special case might be rename). create_pipeline_files(pipeline_json, pipeline_directory, project_uuid) # Side effect: for each Notebook in de pipeline.json set the # correct kernel. pipeline_set_notebook_kernels(pipeline_json, pipeline_directory, project_uuid) with open(pipeline_json_path, "w") as json_file: json.dump(pipeline_json, json_file, indent=4, sort_keys=True) # Analytics call. send_anonymized_pipeline_definition(app, pipeline_json) return jsonify({"message": "Successfully saved pipeline."}) elif request.method == "GET": if not os.path.isfile(pipeline_json_path): return ( jsonify({ "success": False, "reason": ".orchest file doesn't exist at location %s" % pipeline_json_path, }), 404, ) else: with open(pipeline_json_path) as json_file: pipeline_json = json.load(json_file) # Take care of old pipelines with no defined params. if "parameters" not in pipeline_json: pipeline_json["parameters"] = {} # json.dumps because the front end expects it as a # string. return jsonify({ "success": True, "pipeline_json": json.dumps(pipeline_json) }) return ""
def pipelines_json(project_uuid, pipeline_uuid): if request.method == "POST": pipeline_json_path = get_pipeline_path( pipeline_uuid, project_uuid, None, request.args.get("pipeline_run_uuid"), ) pipeline_directory = get_pipeline_directory( pipeline_uuid, project_uuid, None, request.args.get("pipeline_run_uuid"), ) # Parse JSON. pipeline_json = json.loads(request.form.get("pipeline_json")) # Normalize relative paths. for step in pipeline_json["steps"].values(): is_project_file = is_valid_pipeline_relative_path( project_uuid, pipeline_uuid, step["file_path"]) is_data_file = is_valid_data_path(step["file_path"]) if not (is_project_file or is_data_file): raise app_error.OutOfAllowedDirectoryError( "File is neither in the project, nor in the data directory." ) if not step["file_path"].startswith("/"): step["file_path"] = normalize_project_relative_path( step["file_path"]) errors = check_pipeline_correctness(pipeline_json) if errors: msg = {} msg = {"success": False} reason = ", ".join([key for key in errors]) reason = f"Invalid value: {reason}." msg["reason"] = reason return jsonify(msg), 400 # Side effect: for each Notebook in de pipeline.json set the # correct kernel. try: pipeline_set_notebook_kernels(pipeline_json, pipeline_directory, project_uuid) except KeyError: msg = { "success": False, "reason": "Invalid Notebook metadata structure.", } return jsonify(msg), 400 with open(pipeline_json_path, "r") as json_file: old_pipeline_json = json.load(json_file) # Save the pipeline JSON again to make sure its keys are # sorted. with open(pipeline_json_path, "w") as json_file: json.dump(pipeline_json, json_file, indent=4, sort_keys=True) if old_pipeline_json["name"] != pipeline_json["name"]: resp = requests.put( (f'http://{current_app.config["ORCHEST_API_ADDRESS"]}' f"/api/pipelines/{project_uuid}/{pipeline_uuid}"), json={"name": pipeline_json["name"]}, ) if resp.status_code != 200: return ( jsonify( {"message": "Failed to PUT name to orchest-api."}), resp.status_code, ) # Analytics call. analytics.send_event( app, analytics.Event.PIPELINE_SAVE, {"pipeline_definition": pipeline_json}, ) return jsonify({ "success": True, "message": "Successfully saved pipeline." }) elif request.method == "GET": pipeline_json_path = get_pipeline_path( pipeline_uuid, project_uuid, request.args.get("job_uuid"), request.args.get("pipeline_run_uuid"), ) if not os.path.isfile(pipeline_json_path): return ( jsonify({ "success": False, "reason": ".orchest file doesn't exist at location " + pipeline_json_path, }), 404, ) else: pipeline_json = get_pipeline_json(pipeline_uuid, project_uuid) return jsonify({ "success": True, "pipeline_json": json.dumps(pipeline_json) })