def get(self, id): """Get information on workflow by workflow id""" try: return db.session.query(WorkflowExecution)\ .filter(WorkflowExecution.fargateTaskArn==id)\ .filter(WorkflowExecution.workgroup.in_(get_jwt_groups()))\ .one() except sqlalchemy.orm.exc.NoResultFound: abort(404)
def get(self): """Returns the various api keys""" defined_workgroups = current_app.config["WORKGROUPS"].keys() user_groups = get_jwt_groups() intersection = list(set(defined_workgroups) & set(user_groups)) workgroups = { g: current_app.config["WORKGROUPS"][g]["API_KEY"] for g in intersection } return jsonify({"WORKGROUP_API_KEY": workgroups})
def get(self, args): """List all workflows""" sql = [ """ SELECT w."fargateTaskArn", w."fargateCreatedAt", w."nextflowRunName", w."fargateLastStatus" as runnerTaskStatus, w."nextflowLastEvent" as nextflowLastEvent, w."nextflowMetadata"->'workflow'->'manifest' as manifest, w."cacheTaskArn", w."username", w."workgroup", task_counts."submitted_task_count", task_counts."running_task_count", task_counts."completed_task_count" FROM workflow_execution as w LEFT JOIN ( SELECT t."fargateTaskArn", count(t."taskLastEvent" = 'process_submitted' OR NULL) submitted_task_count, count(t."taskLastEvent" = 'process_started' OR NULL) running_task_count, count(t."taskLastEvent" = 'process_completed' OR NULL) completed_task_count FROM task_execution as t GROUP BY t."fargateTaskArn" ) as task_counts ON task_counts."fargateTaskArn" = w."fargateTaskArn" """ ] where_statements = ['w."workgroup" = any(:workgroup_list)'] where_args = {"workgroup_list": get_jwt_groups()} if "username" in args: where_statements += ['w."username" = :username'] if args["username"] == "me": where_args["username"] = get_jwt_identity() else: where_args["username"] = args["username"] if "workgroup" in args: where_statements += ['w."workgroup" = :workgroup'] where_args["workgroup"] = args["workgroup"] if "status" in args: where_statements += ['w."nextflowLastEvent" = :status'] where_args["status"] = args["status"] if len(where_statements) > 0: sql += ["WHERE"] sql.extend([" AND ".join(where_statements)]) sql += ['ORDER BY w."fargateCreatedAt" DESC;'] res = db.session.execute("\n".join(sql), where_args) res = [dict(row) for row in res] return jsonify(res)
def get(self, id): """Get top level workflow logs""" try: db_res = db.session.query(WorkflowExecution)\ .filter(WorkflowExecution.fargateTaskArn==id)\ .filter(WorkflowExecution.workgroup.in_(get_jwt_groups()))\ .one() except sqlalchemy.orm.exc.NoResultFound: abort(404) res = logs_client.get_log_events( logGroupName=db_res.fargateLogGroupName, logStreamName=db_res.fargateLogStreamName, startFromHead=False) return res
def get(self): defined_workgroups = current_app.config["WORKGROUPS"].keys() user_groups = get_jwt_groups() intersection = list(set(defined_workgroups) & set(user_groups)) workgroups = map( lambda g: { "display_name": current_app.config["WORKGROUPS"][g].get("DISPLAY_NAME", g), "name": g, "default_work_dir": current_app.config["WORKGROUPS"][g].get( "NEXTFLOW_S3_WORK_DIR", ""), "default_profile": current_app.config["WORKGROUPS"][g].get( "NEXTFLOW_DEFAULT_PROFILE", ""), }, intersection) return jsonify({ "username": get_jwt_identity(), "workgroups": list(workgroups) })
def post(self, args): """Submit new workflow for execution""" # 0. define execution environment variables if args.get("workgroup") in current_app.config["WORKGROUPS"]: # user-initiated launch via web interface WORKGROUP = args["workgroup"] if WORKGROUP not in get_jwt_groups(): return jsonify({"error": "User is not part of group"}), 401 else: env = current_app.config["WORKGROUPS"][WORKGROUP] username = get_jwt_identity() elif validate_api_key(args.get("api_key")): # headless or API-driven launch WORKGROUP = get_workgroup_from_api_key(args["api_key"]) env = current_app.config["WORKGROUPS"][WORKGROUP] username = "******" else: return jsonify( {"error": "Must specify a valid workgroup or api_key in POST"}), 500 nextflow_options = ["-with-trace"] additional_env_vars = [] uuid_key = self._generate_key() workflow_loc = None config_loc = None params_file_loc = None if ("nextflow_workflow" in args) and ("nextflow_config" in args): # 1a. If a workflow and config file was uploaded workflow_loc = "%s/%s/%s/main.nf" % (env["NEXTFLOW_S3_SCRIPTS"], uuid_key[0:2], uuid_key) config_loc = "%s/%s/%s/nextflow.config" % ( env["NEXTFLOW_S3_SCRIPTS"], uuid_key[0:2], uuid_key) try: self._upload_to_s3(workflow_loc, args["nextflow_workflow"]) self._upload_to_s3(config_loc, args["nextflow_config"]) except botocore.exceptions.ClientError: return jsonify({"error": "unable to save scripts"}), 500 execution_type = "FILES" command = ["runner.sh", workflow_loc, config_loc] elif ("git_url" in args): # 1b. Or, if a git url is provided execution_type = "GIT_URL" command = [ "runner.sh", args["git_url"], args.get("git_hash", "master") ] elif ("s3_url" in args): # 1c. Or, a s3 url execution_type = "S3_URL" command = ["runner.sh", args["s3_url"]] else: print(args) return jsonify({"error": "Invalid nextflow commands"}), 500 if args.get("nextflow_params", "") != "": # upload params_file to S3 if provided. Runner.sh downloads this. params_file_loc = "%s/%s/%s/params.json" % ( env["NEXTFLOW_S3_SCRIPTS"], uuid_key[0:2], uuid_key) try: self._upload_to_s3(params_file_loc, args["nextflow_params"]) except botocore.exceptions.ClientError: return jsonify({"error": "unable to save params file."}), 500 nextflow_options.append("-params-file params.json") additional_env_vars.append({ "name": "NF_PARAMS_FILE", "value": params_file_loc }) if args.get("nextflow_profile", "") != "": nextflow_options.append("-profile " + args["nextflow_profile"]) if args.get("nextflow_workdir", "") != "": nextflow_options.append("-work-dir " + args["nextflow_workdir"]) if args.get("resume_fargate_task_arn", "") != "": # resume from prior nextflow execution resume_fargate_task_arn = args["resume_fargate_task_arn"] # ensure this arn was run as part of current group try: w = db.session.query(WorkflowExecution)\ .filter(WorkflowExecution.fargateTaskArn==resume_fargate_task_arn).one() except sqlalchemy.orm.exc.NoResultFound: abort(404) if w.workgroup != WORKGROUP: return jsonify({ "error": "You can only resume from workflows in the same workgroup" }), 401 nextflow_options.append("-resume") additional_env_vars.append({ "name": "NF_SESSION_CACHE_ARN", "value": resume_fargate_task_arn }) else: resume_fargate_task_arn = "" try: res = ecs_client.run_task( cluster=env["ECS_CLUSTER"], taskDefinition=current_app.config["NEXTFLOW_TASK_DEFINITION"], overrides={ "taskRoleArn": env["IAM_TASK_ROLE_ARN"], "containerOverrides": [{ "name": "nextflow", "command": command, "environment": [{ "name": "EXECUTION_TYPE", "value": execution_type }, { "name": "API_ENDPOINT", "value": current_app.config["API_ENDPOINT"] }, { "name": "API_KEY", "value": env["API_KEY"] }, { "name": "NEXTFLOW_OPTIONS", "value": " ".join(nextflow_options) }, { "name": "NF_SESSION_CACHE_DIR", "value": env["NEXTFLOW_S3_SESSION_CACHE"] }, *additional_env_vars], }] }, launchType="FARGATE", networkConfiguration={ "awsvpcConfiguration": { "subnets": env["ECS_SUBNETS"], "assignPublicIp": "ENABLED" }, }, ) except botocore.exceptions.ClientError as e: return jsonify({"error": "unable to launch job", "msg": e}), 500 taskArn = res['tasks'][0]['taskArn'].split(":task/")[1] # save to database -- must serialize the date to string first infoJson = res['tasks'][0].copy() infoJson['createdAt'] = str(infoJson['createdAt']) launchMetadataJson = { "execution_type": execution_type, # FILES | GIT_URL "execution_source": "WEB", # TODO: could be lambda, api, etc? "git_url": args.get("git_url", None), "git_hash": args.get("git_hash", None), "nextflow_profile": args.get("nextflow_profile", None), "params_loc": params_file_loc, "workflow_loc": workflow_loc, "config_loc": config_loc, } e = WorkflowExecution( fargateTaskArn=taskArn, fargateCreatedAt=res['tasks'][0]['createdAt'], fargateLastStatus=res['tasks'][0]['lastStatus'], fargateMetadata=infoJson, fargateLogGroupName='/ecs/nextflow-runner', fargateLogStreamName='ecs/nextflow/%s' % taskArn, cacheTaskArn=resume_fargate_task_arn, username=username, workgroup=WORKGROUP, launchMetadata=launchMetadataJson, ) db.session.add(e) db.session.commit() return e