def update_global(global_var): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter(User.username == username).first()).id data = request.get_json() global_id = data["id_"] new_permissions = data['permissions'] access_level = data['access_level'] to_update = auth_check(global_id, "update", "global_variables") if (global_var.creator == curr_user_id) or to_update: if access_level == 0: auth_check(global_id, "update", "global_variables", updated_roles=[{"role": 1, "permissions": ["delete", "execute", "read", "update"]}]) if access_level == 1: default_permissions("global_variables", global_id, data=data) elif access_level == 2: auth_check(global_id, "update", "global_variables", updated_roles=new_permissions) # if new_permissions: # auth_check(global_id, "update", "global_variables", updated_roles=new_permissions) # else: # default_permissions("global_variables", global_id, data=data) try: key = config.get_from_file(config.ENCRYPTION_KEY_PATH)#, 'rb') data['value'] = fernet_encrypt(key, data['value']) global_variable_schema.load(data, instance=global_var) current_app.running_context.execution_db.session.commit() return global_variable_schema.dump(global_var), HTTPStatus.OK except (IntegrityError, StatementError): current_app.running_context.execution_db.session.rollback() return unique_constraint_problem("global_variable", "update", data["name"]) else: return None, HTTPStatus.FORBIDDEN
def update_workflow(workflow): data = request.get_json() old_name = workflow.name new_name = data['name'] new_permissions = data['permissions'] to_update = auth_check(old_name, "update", "workflows") if to_update: auth_check(old_name, "update", "workflows", new_name=new_name, updated_roles=new_permissions) try: workflow_schema.load(data, instance=workflow) current_app.running_context.execution_db.session.commit() current_app.logger.info( f"Updated workflow {workflow.name} ({workflow.id_})") return workflow_schema.dump(workflow), HTTPStatus.OK except ValidationError as e: current_app.running_context.execution_db.session.rollback() return improper_json_problem('workflow', 'update', workflow.id_, e.messages) except IntegrityError: # ToDo: Make sure this fires on duplicate current_app.running_context.execution_db.session.rollback() return unique_constraint_problem('workflow', 'update', workflow.id_) else: return None, HTTPStatus.FORBIDDEN
def update_global(global_var): data = request.get_json() global_id = data["id_"] new_permissions = data['permissions'] to_update = auth_check(global_id, "update", "global_variables") if to_update: if new_permissions: auth_check(global_id, "update", "global_variables", updated_roles=new_permissions) else: default_permissions("global_variables", global_id) try: with open(config.ENCRYPTION_KEY_PATH, 'rb') as f: data['value'] = fernet_encrypt(f.read(), data['value']) global_variable_schema.load(data, instance=global_var) current_app.running_context.execution_db.session.commit() return global_variable_schema.dump(global_var), HTTPStatus.OK except (IntegrityError, StatementError): current_app.running_context.execution_db.session.rollback() return unique_constraint_problem("global_variable", "update", data["name"]) else: return None, HTTPStatus.FORBIDDEN
def update_workflow(workflow): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter( User.username == username).first()).id data = request.get_json() new_permissions = data['permissions'] access_level = data['access_level'] to_update = auth_check(str(workflow.id_), "update", "workflows") if (workflow.creator == curr_user_id) or to_update: if access_level == 0: auth_check(str(workflow.id_), "update", "workflows", updated_roles=[{ "role": 1, "permissions": ["delete", "execute", "read", "update"] }]) elif access_level == 1: default_permissions("workflows", str(workflow.id_), data=data) elif access_level == 2: auth_check(str(workflow.id_), "update", "workflows", updated_roles=new_permissions) # if new_permissions: # auth_check(str(workflow.id_), "update", "workflows", updated_roles=new_permissions) # else: # default_permissions("workflows", str(workflow.id_), data=data) try: workflow_schema.load(data, instance=workflow) current_app.running_context.execution_db.session.commit() current_app.logger.info( f"Updated workflow {workflow.name} ({workflow.id_})") return workflow_schema.dump(workflow), HTTPStatus.OK except ValidationError as e: current_app.running_context.execution_db.session.rollback() return improper_json_problem('workflow', 'update', workflow.id_, e.messages) except IntegrityError: # ToDo: Make sure this fires on duplicate current_app.running_context.execution_db.session.rollback() return unique_constraint_problem('workflow', 'update', workflow.id_) else: return None, HTTPStatus.FORBIDDEN
def copy_workflow(workflow, permissions, workflow_name=None): old_json = workflow_schema.dump(workflow) workflow_json = deepcopy(old_json) update_check = auth_check(workflow_json["name"], "update", "workflows") if not update_check: return None, HTTPStatus.FORBIDDEN regenerate_workflow_ids(workflow_json) if workflow_name: workflow_json['name'] = workflow_name else: workflow_json['name'] = old_json.get("name") update_permissions("workflows", workflow_json['name'], permissions) try: new_workflow = workflow_schema.load(workflow_json) current_app.running_context.execution_db.session.add(new_workflow) current_app.running_context.execution_db.session.commit() current_app.logger.info( f" Workflow {workflow.id_} copied to {new_workflow.id_}") return workflow_schema.dump(new_workflow), HTTPStatus.CREATED except IntegrityError: current_app.running_context.execution_db.session.rollback() current_app.logger.error( f" Could not copy workflow {workflow_json['name']}. Unique constraint failed" ) return unique_constraint_problem('workflow', 'copy', workflow_json['name'])
def copy_workflow(workflow, permissions, workflow_name=None, creator=None): old_json = workflow_schema.dump(workflow) workflow_json = deepcopy(old_json) update_check = auth_check(workflow_json["id_"], "update", "workflows") if (not update_check) and (workflow_json['creator'] != creator): return None, HTTPStatus.FORBIDDEN regenerate_workflow_ids(workflow_json) if workflow_name: workflow_json['name'] = workflow_name else: workflow_json['name'] = old_json.get("name") workflow_json['creator'] = creator access_level = workflow_json['access_level'] if access_level == 0: update_permissions("workflows", workflow_json['id_'], new_permissions=[{ "role": 1, "permissions": ["delete", "execute", "read", "update"] }], creator=creator) if access_level == 1: default_permissions("workflows", workflow_json['id_'], data=workflow_json, creator=creator) elif access_level == 2: update_permissions("workflows", workflow_json['id_'], new_permissions=permissions, creator=creator) # if permissions: # update_permissions("workflows", workflow_json['id_'], new_permissions=permissions, creator=creator) # else: # default_permissions("workflows", workflow_json['id_'], data=workflow_json, creator=creator) try: new_workflow = workflow_schema.load(workflow_json) current_app.running_context.execution_db.session.add(new_workflow) current_app.running_context.execution_db.session.commit() current_app.logger.info( f" Workflow {workflow.id_} copied to {new_workflow.id_}") return workflow_schema.dump(new_workflow), HTTPStatus.CREATED except IntegrityError: current_app.running_context.execution_db.session.rollback() current_app.logger.error( f" Could not copy workflow {workflow_json['name']}. Unique constraint failed" ) return unique_constraint_problem('workflow', 'copy', workflow_json['name'])
def read_all_workflows(): r = current_app.running_context.execution_db.session.query( Workflow).order_by(Workflow.name).all() ret = [] for workflow in r: to_read = auth_check(workflow.name, "read", "workflows") if to_read: workflow_schema.dump(workflow) ret.append(workflow) return ret, HTTPStatus.OK
def delete_global(global_var): global_id = str(global_var.id_) to_delete = auth_check(global_id, "delete", "global_variables") if to_delete: current_app.running_context.execution_db.session.delete(global_var) current_app.logger.info(f"Global_variable removed {global_var.name}") current_app.running_context.execution_db.session.commit() return None, HTTPStatus.NO_CONTENT else: return None, HTTPStatus.FORBIDDEN
def delete_workflow(workflow): workflow_name = workflow.name to_delete = auth_check(workflow_name, "delete", "workflows") if to_delete: current_app.running_context.execution_db.session.delete(workflow) current_app.logger.info( f"Removed workflow {workflow.name} ({workflow.id_})") current_app.running_context.execution_db.session.commit() return None, HTTPStatus.NO_CONTENT else: return None, HTTPStatus.FORBIDDEN
def delete_workflow(workflow): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter( User.username == username).first()).id to_delete = auth_check(str(workflow.id_), "delete", "workflows") if (workflow.creator == curr_user_id) or to_delete: current_app.running_context.execution_db.session.delete(workflow) current_app.logger.info( f"Removed workflow {workflow.name} ({workflow.id_})") current_app.running_context.execution_db.session.commit() return None, HTTPStatus.NO_CONTENT else: return None, HTTPStatus.FORBIDDEN
def read_all_workflows(): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter( User.username == username).first()).id r = current_app.running_context.execution_db.session.query( Workflow).order_by(Workflow.name).all() ret = [] for workflow in r: to_read = auth_check(str(workflow.id_), "read", "workflows") if (workflow.creator == curr_user_id) or to_read: workflow_schema.dump(workflow) ret.append(workflow) return ret, HTTPStatus.OK
def delete_global(global_var): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter(User.username == username).first()).id global_id = str(global_var.id_) to_delete = auth_check(global_id, "delete", "global_variables") if (global_var.creator == curr_user_id) or to_delete: current_app.running_context.execution_db.session.delete(global_var) current_app.logger.info(f"Global_variable removed {global_var.name}") current_app.running_context.execution_db.session.commit() return None, HTTPStatus.NO_CONTENT else: return None, HTTPStatus.FORBIDDEN
def get_workflow_status(execution): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter( User.username == username).first()).id workflow_status = workflow_status_schema.dump(execution) to_read = auth_check(str(workflow_status['workflow_id']), "read", "workflows") wf_creator = creator_check(str(workflow_status['workflow_id']), "workflows") if (wf_creator == curr_user_id) or to_read: return workflow_status, HTTPStatus.OK else: return None, HTTPStatus.FORBIDDEN
def get_all_workflow_status(): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter( User.username == username).first()).id r = current_app.running_context.execution_db.session.query( WorkflowStatus).order_by(WorkflowStatus.name).all() ret = [] for wf_status in r: wf_creator = creator_check(str(wf_status.workflow_id), "workflows") to_read = auth_check(str(wf_status.workflow_id), "read", "workflows") if (wf_creator == curr_user_id) or to_read: ret.append(wf_status) return ret, HTTPStatus.OK
def read_global(global_var): global_id = str(global_var.id_) to_read = auth_check(global_id, "read", "global_variables") if to_read: global_json = global_variable_schema.dump(global_var) if request.args.get('to_decrypt') == "false": return jsonify(global_json), HTTPStatus.OK else: with open(config.ENCRYPTION_KEY_PATH, 'rb') as f: return jsonify(fernet_decrypt( f.read(), global_json['value'])), HTTPStatus.OK else: return None, HTTPStatus.FORBIDDEN
def read_global(global_var): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter(User.username == username).first()).id global_id = str(global_var.id_) to_read = auth_check(global_id, "read", "global_variables") if (global_var.creator == curr_user_id) or to_read: global_json = global_variable_schema.dump(global_var) if request.args.get('to_decrypt') == "false": return jsonify(global_json), HTTPStatus.OK else: key = config.get_from_file(config.ENCRYPTION_KEY_PATH)#, 'rb') return jsonify(fernet_decrypt(key, global_json['value'])), HTTPStatus.OK else: return None, HTTPStatus.FORBIDDEN
def read_all_globals(): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter(User.username == username).first()).id key = config.get_from_file(config.ENCRYPTION_KEY_PATH) #, 'rb') ret = [] query = current_app.running_context.execution_db.session.query(GlobalVariable).order_by(GlobalVariable.name).all() if request.args.get('to_decrypt') == "false": return query, HTTPStatus.OK else: for global_var in query: to_read = auth_check(str(global_var.id_), "read", "global_variables") if (global_var.creator == curr_user_id) or to_read: temp_var = deepcopy(global_var) temp_var.value = fernet_decrypt(key, global_var.value) ret.append(temp_var) return ret, HTTPStatus.OK
def read_all_globals(): with open(config.ENCRYPTION_KEY_PATH, 'rb') as f: key = f.read() ret = [] query = current_app.running_context.execution_db.session.query( GlobalVariable).order_by(GlobalVariable.name).all() if request.args.get('to_decrypt') == "false": return query, HTTPStatus.OK else: for global_var in query: to_read = auth_check(str(global_var.id_), "read", "global_variables") if to_read: temp_var = deepcopy(global_var) temp_var.value = fernet_decrypt(key, global_var.value) ret.append(temp_var) return ret, HTTPStatus.OK
def read_workflow(workflow): workflow_name = workflow.name to_read = auth_check(workflow_name, "read", "workflows") if to_read: workflow_json = workflow_schema.dump(workflow) if request.args.get('mode') == "export": f = BytesIO() f.write( json.dumps(workflow_json, sort_keys=True, indent=4, separators=(',', ': ')).encode('utf-8')) f.seek(0) return send_file(f, attachment_filename=workflow.name + '.json', as_attachment=True), HTTPStatus.OK else: return workflow_json, HTTPStatus.OK else: return None, HTTPStatus.FORBIDDEN
def read_workflow(workflow): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter( User.username == username).first()).id to_read = auth_check(str(workflow.id_), "read", "workflows") if (workflow.creator == curr_user_id) or to_read: workflow_json = workflow_schema.dump(workflow) if request.args.get('mode') == "export": f = BytesIO() f.write( json.dumps(workflow_json, sort_keys=True, indent=4, separators=(',', ': ')).encode('utf-8')) f.seek(0) return send_file(f, attachment_filename=workflow.name + '.json', as_attachment=True), HTTPStatus.OK else: return workflow_json, HTTPStatus.OK else: return None, HTTPStatus.FORBIDDEN
def control_workflow(execution): data = request.get_json() status = data['status'] workflow = workflow_getter(execution.workflow_id) # The resource factory returns the WorkflowStatus model but we want the string of the execution ID execution_id = str(execution.execution_id) to_execute = auth_check(str(workflow.id_), "execute", "workflows") username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter( User.username == username).first()).id # TODO: add in pause/resume here. Workers need to store and recover state for this if (workflow.creator == curr_user_id) or to_execute: if status == 'abort': logger.info( f"User '{get_jwt_claims().get('username', None)}' aborting workflow: {execution_id}" ) message = { "execution_id": execution_id, "status": status, "workflow": workflow_schema.dumps(workflow) } current_app.running_context.cache.smove( static.REDIS_PENDING_WORKFLOWS, static.REDIS_ABORTING_WORKFLOWS, execution_id) current_app.running_context.cache.xadd( static.REDIS_WORKFLOW_CONTROL, message) return None, HTTPStatus.NO_CONTENT elif status == 'trigger': if execution.status not in (StatusEnum.PENDING, StatusEnum.EXECUTING, StatusEnum.AWAITING_DATA): return invalid_input_problem( "workflow", "trigger", execution_id, errors=[ "Workflow must be in a running state to accept triggers." ]) trigger_id = data.get('trigger_id') if not trigger_id: return invalid_input_problem( "workflow", "trigger", execution_id, errors=[ "ID of the trigger must be specified in trigger_id." ]) seen = False for trigger in workflow.triggers: if str(trigger.id_) == trigger_id: seen = True if not seen: return invalid_input_problem( "workflow", "trigger", execution_id, errors=[ f"trigger_id {trigger_id} was not found in this workflow." ]) trigger_stream = f"{execution_id}-{trigger_id}:triggers" try: info = current_app.running_context.cache.xinfo_stream( trigger_stream) stream_length = info["length"] except Exception: stream_length = 0 if stream_length > 0: return invalid_input_problem( "workflow", "trigger", execution_id, errors=[f"This trigger has already received data."]) trigger_data = data.get('trigger_data') logger.info( f"User '{get_jwt_claims().get('username', None)}' triggering workflow: {execution_id} at trigger " f"{trigger_id} with data {trigger_data}") current_app.running_context.cache.xadd( trigger_stream, {execution_id: message_dumps({"trigger_data": trigger_data})}) return jsonify({"trigger_stream": trigger_stream}), HTTPStatus.OK else: return None, HTTPStatus.FORBIDDEN
def execute_workflow(): data = request.get_json() workflow_id = data.get("workflow_id") execution_id = data.get("execution_id", None) workflow = workflow_getter( workflow_id ) # ToDo: should this go under a path param so we can use the decorator to_execute = auth_check(workflow.name, "execute", "workflows") if to_execute: if not workflow: return dne_problem("workflow", "execute", workflow_id) if not workflow.is_valid: return invalid_input_problem("workflow", "execute", workflow.id_, errors=workflow.errors) workflow = workflow_schema.dump(workflow) actions_by_id = {a['id_']: a for a in workflow["actions"]} triggers_by_id = {t['id_']: t for t in workflow["triggers"]} # TODO: Add validation to all overrides if "start" in data: if data["start"] in actions_by_id or data[ "start"] in triggers_by_id: workflow["start"] = data["start"] else: return invalid_input_problem( "workflow", "execute", workflow.id_, errors=[ "Start override must be an action or a trigger in this workflow." ]) if "workflow_variables" in workflow and "workflow_variables" in data: # TODO: change these on the db model to be keyed by ID # Get workflow variables keyed by ID current_wvs = { wv['id_']: wv for wv in workflow["workflow_variables"] } new_wvs = {wv['id_']: wv for wv in data["workflow_variables"]} # Update workflow variables with new values, ignore ids that didn't already exist override_wvs = { id_: new_wvs[id_] if id_ in new_wvs else current_wvs[id_] for id_ in current_wvs } workflow["workflow_variables"] = list(override_wvs.values()) if "parameters" in data: start_id = data.get("start", workflow["start"]) if start_id in actions_by_id: parameters_by_name = { p["name"]: p for p in actions_by_id[start_id]["parameters"] } for parameter in data["parameters"]: parameters_by_name[parameter["name"]] = parameter actions_by_id[start_id]["parameters"] = list( parameters_by_name.values()) workflow["actions"] = list(actions_by_id.values()) else: return invalid_input_problem( "workflow", "execute", workflow.id_, errors=[ "Cannot override starting parameters for anything but an action." ]) try: execution_id = execute_workflow_helper(workflow_id, execution_id, workflow) return jsonify({'execution_id': execution_id}), HTTPStatus.ACCEPTED except ValidationError as e: current_app.running_context.execution_db.session.rollback() return improper_json_problem('workflow_status', 'create', workflow['name'], e.messages) else: return None, HTTPStatus.FORBIDDEN