async def list_files(app_name, version): relative_path = [] minio_client = Minio(config.MINIO, access_key=config.get_from_file(config.MINIO_ACCESS_KEY_PATH), secret_key=config.get_from_file(config.MINIO_SECRET_KEY_PATH), secure=False) objects = minio_client.list_objects("apps-bucket", recursive=True) for obj in objects: p_src = Path(obj.object_name) if p_src.parts[1] == app_name: p_dst = p_src.relative_to(f"{p_src.parts[0]}/{app_name}/{version}") relative_path.append(str(p_dst)) return relative_path
def __init__(self): self.async_client = motor.motor_asyncio.AsyncIOMotorClient( username=config.DB_USERNAME, password=config.get_from_file(config.MONGO_KEY_PATH), host=config.MONGO_HOST, port=config.get_int("MONGO_PORT", 27016)) self.reg_client = pymongo.MongoClient( username=config.DB_USERNAME, password=config.get_from_file(config.MONGO_KEY_PATH), host=config.MONGO_HOST, port=config.get_int("MONGO_PORT", 27016)) self.init_db()
async def build_image(app_name, version): tag_name = f"{static.APP_PREFIX}_{app_name}" repo = f"{config.DOCKER_REGISTRY}/{tag_name}:{version}" try: pathlib.Path(f"./temp_apps/{app_name}/{version}/src").mkdir(parents=True, exist_ok=True) except Exception as e: print(e) minio_client = Minio(config.MINIO, access_key=config.get_from_file(config.MINIO_ACCESS_KEY_PATH), secret_key=config.get_from_file(config.MINIO_SECRET_KEY_PATH), secure=False) objects = minio_client.list_objects("apps-bucket", recursive=True) for obj in objects: size = obj.size p_src = Path(obj.object_name) if p_src.parts[1] == app_name: hold = str(p_src) p_dst = hold[hold.find(app_name):] p_dst = Path("temp_apps") / p_dst os.makedirs(p_dst.parent, exist_ok=True) data = minio_client.get_object('apps-bucket', hold) with open(p_dst, 'wb+') as file_data: for d in data.stream(size): file_data.write(d) logger.setLevel("DEBUG") docker_logger.setLevel("DEBUG") async with connect_to_aiodocker() as docker_client: context_dir = f"./temp_apps/{app_name}/{version}/" with docker_context(Path(context_dir)) as context: logger.info("Sending image to be built") dockerfile = "./Dockerfile" try: log_stream = await docker_client.images.build(fileobj=context, tag=repo, rm=True, forcerm=True, pull=True, stream=True, path_dockerfile=dockerfile, encoding="application/x-tar") logger.info("Docker image building") await stream_docker_log(log_stream) logger.info("Docker image Built") # if await push_image(docker_client, repo): # return "Docker image built and pushed successfully." success = await push_image(docker_client, repo) if success: return True, "Successfully built and pushed image" else: return False, "Failed to push image" except Exception as e: return False, str(e)
def update_global(global_var): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter(User.username == username).first()).id data = request.get_json() global_id = data["id_"] new_permissions = data['permissions'] access_level = data['access_level'] to_update = auth_check(global_id, "update", "global_variables") if (global_var.creator == curr_user_id) or to_update: if access_level == 0: auth_check(global_id, "update", "global_variables", updated_roles=[{"role": 1, "permissions": ["delete", "execute", "read", "update"]}]) if access_level == 1: default_permissions("global_variables", global_id, data=data) elif access_level == 2: auth_check(global_id, "update", "global_variables", updated_roles=new_permissions) # if new_permissions: # auth_check(global_id, "update", "global_variables", updated_roles=new_permissions) # else: # default_permissions("global_variables", global_id, data=data) try: key = config.get_from_file(config.ENCRYPTION_KEY_PATH)#, 'rb') data['value'] = fernet_encrypt(key, data['value']) global_variable_schema.load(data, instance=global_var) current_app.running_context.execution_db.session.commit() return global_variable_schema.dump(global_var), HTTPStatus.OK except (IntegrityError, StatementError): current_app.running_context.execution_db.session.rollback() return unique_constraint_problem("global_variable", "update", data["name"]) else: return None, HTTPStatus.FORBIDDEN
class FlaskConfig(object): SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_DATABASE_URI = format_db_path( config.DB_TYPE, config.SERVER_DB_NAME, config.DB_USERNAME, config.get_from_file(config.POSTGRES_KEY_PATH), config.DB_HOST) JWT_BLACKLIST_ENABLED = True JWT_BLACKLIST_TOKEN_CHECKS = ['refresh'] JWT_TOKEN_LOCATION = 'headers' JWT_BLACKLIST_PRUNE_FREQUENCY = 1000 MAX_STREAM_RESULTS_SIZE_KB = 156 ITEMS_PER_PAGE = 20 SECRET_KEY = config.get_from_file(config.ENCRYPTION_KEY_PATH)
async def read_all_globals(request: Request, to_decrypt: str = False, global_col: AsyncIOMotorCollection = Depends(get_mongo_c), page: int = 1): """ Returns a list of all Global Variables currently loaded in WALKOFF. Pagination is currently not supported. """ walkoff_db = get_mongo_d(request) curr_user_id = await get_jwt_identity(request) # Pagination is currently not supported. if page > 1: return [] key = config.get_from_file(config.ENCRYPTION_KEY_PATH, mode='rb') query = await mongo_helpers.get_all_items(global_col, GlobalVariable) ret = [] if to_decrypt == "false": return query else: for global_var in query: to_read = await auth_check(global_var, curr_user_id, "read", walkoff_db) if to_read: temp_var = deepcopy(global_var) temp_var.value = fernet_decrypt(key, global_var.value) ret.append(temp_var) return ret
async def create_global( request: Request, new_global: GlobalVariable, global_col: AsyncIOMotorCollection = Depends(get_mongo_c)): """ Creates a new Global Variable in WALKOFF and returns it. """ walkoff_db = get_mongo_d(request) curr_user_id = await get_jwt_identity(request) permissions = new_global.permissions access_level = permissions.access_level if access_level == AccessLevel.CREATOR_ONLY: new_global.permissions = await creator_only_permissions(curr_user_id) elif access_level == AccessLevel.EVERYONE: new_global.permissions = await default_permissions( curr_user_id, walkoff_db, "global_variables") elif access_level == AccessLevel.ROLE_BASED: await append_super_and_internal(new_global.permissions) new_global.permissions.creator = curr_user_id try: key = config.get_from_file(config.ENCRYPTION_KEY_PATH) #, 'rb') # for testing try: key = key.encode('utf-8') key = base64.b64encode(key) except: key = key new_global.value = fernet_encrypt(key, new_global.value) return await mongo_helpers.create_item(global_col, GlobalVariable, new_global) except Exception as e: logger.info(e) raise UniquenessException("global_variable", "create", new_global.name)
async def update_global(request: Request, updated_global: GlobalVariable, global_var: UUID, global_col: AsyncIOMotorCollection = Depends(get_mongo_c)): """ Updates a specific Global Variable (fetched by id) and returns it. """ walkoff_db = get_mongo_d(request) curr_user_id = await get_jwt_identity(request) old_global = await mongo_helpers.get_item(global_col, GlobalVariable, global_var) if not old_global: raise DoesNotExistException("update", "Global Variable", global_var) global_id = old_global.id_ new_permissions = updated_global.permissions access_level = new_permissions.access_level to_update = await auth_check(old_global, curr_user_id, "update", walkoff_db) if to_update: if access_level == AccessLevel.CREATOR_ONLY: updated_global.permissions = await creator_only_permissions(curr_user_id) elif access_level == AccessLevel.EVERYONE: updated_global.permissions = await default_permissions(curr_user_id, walkoff_db, "global_variables") elif access_level == AccessLevel.ROLE_BASED: await append_super_and_internal(updated_global.permissions) updated_global.permissions.creator = curr_user_id # try: key = config.get_from_file(config.ENCRYPTION_KEY_PATH, mode='rb') updated_global.value = fernet_encrypt(key, updated_global.value) return await mongo_helpers.update_item(global_col, GlobalVariable, global_id, updated_global) # except Exception as e: # logger.info(e) # raise UniquenessException("global_variable", "update", updated_global.name) else: raise UnauthorizedException("update data for", "Global Variable", old_global.name)
async def read_global( request: Request, global_var: UUID, to_decrypt: str = "false", global_col: AsyncIOMotorCollection = Depends(get_mongo_c)): """ Returns the Global Variable for the specified id. """ walkoff_db = get_mongo_d(request) curr_user_id = await get_jwt_identity(request) global_variable = await mongo_helpers.get_item(global_col, GlobalVariable, global_var) to_read = await auth_check(global_variable, curr_user_id, "read", walkoff_db) if to_read: if to_decrypt == "false": return global_variable.value else: key = config.get_from_file(config.ENCRYPTION_KEY_PATH) #, 'rb') # for testing try: key = key.encode('utf-8') key = base64.b64encode(key) except: key = key return fernet_decrypt(key, global_variable.value) else: raise UnauthorizedException("read data for", "Global Variable", global_variable.name)
async def read_all_globals( request: Request, to_decrypt: str = False, global_col: AsyncIOMotorCollection = Depends(get_mongo_c)): """ Returns a list of all Global Variables currently loaded in WALKOFF. """ walkoff_db = get_mongo_d(request) curr_user_id = await get_jwt_identity(request) key = config.get_from_file(config.ENCRYPTION_KEY_PATH) #, 'rb') # for testing try: key = key.encode('utf-8') key = base64.b64encode(key) except: key = key query = await mongo_helpers.get_all_items(global_col, GlobalVariable) ret = [] if to_decrypt == "false": return query else: for global_var in query: to_read = await auth_check(global_var, curr_user_id, "read", walkoff_db) if to_read: temp_var = deepcopy(global_var) temp_var.value = fernet_decrypt(key, global_var.value) ret.append(temp_var) return ret
def create_user(): from api_gateway.serverdb import add_user, User, Role, initialize_default_resources_admin, \ initialize_default_resources_internal_user, \ initialize_default_resources_workflow_developer, \ initialize_default_resources_app_developer, \ initialize_default_resources_workflow_operator, initialize_default_resources_super_admin from sqlalchemy_utils import database_exists, create_database if not database_exists(db.engine.url): create_database(db.engine.url) db.create_all() # alembic_cfg = Config(api_gateway.config.Config.ALEMBIC_CONFIG, ini_section="walkoff", # attributes={'configure_logger': False}) # # # This is necessary for a flask database # connection = db.engine.connect() # context = MigrationContext.configure(connection) # script = ScriptDirectory.from_config(alembic_cfg) # context.stamp(script, "head") # Setup internal, super_admin, admin workflow_developer, and workflow_operator roles initialize_default_resources_internal_user() initialize_default_resources_super_admin() initialize_default_resources_admin() initialize_default_resources_app_developer() initialize_default_resources_workflow_developer() initialize_default_resources_workflow_operator() # Setup internal user internal_role = Role.query.filter_by(id=1).first() internal_user = User.query.filter_by(username="******").first() if not internal_user: key = config.get_from_file(config.INTERNAL_KEY_PATH) add_user(username='******', password=key, roles=[2]) elif internal_role not in internal_user.roles: internal_user.roles.append(internal_role) # Setup Super Admin user super_admin_role = Role.query.filter_by(id=2).first() super_admin_user = User.query.filter_by(username="******").first() if not super_admin_user: add_user(username='******', password='******', roles=[2]) elif super_admin_role not in super_admin_user.roles: super_admin_user.roles.append(super_admin_role) # Setup Admin user admin_role = Role.query.filter_by(id=3).first() admin_user = User.query.filter_by(username="******").first() if not admin_user: add_user(username='******', password='******', roles=[3]) elif admin_role not in admin_user.roles: admin_user.roles.append(admin_role) db.session.commit()
def validate_global(self, data, **kwargs): try: if "schema" in data: key = config.get_from_file(config.ENCRYPTION_KEY_PATH, 'rb') temp = fernet_decrypt(key, data['value']) Draft4Validator(data['schema']['schema']).validate(temp) except (SchemaError, JSONSchemaValidationError): raise MarshmallowValidationError( f"Global variable did not validate with provided schema: " f"{data['schema']['schema']}")
async def connect_to_aioredis_pool(redis_uri) -> aioredis.Redis: # Redis client bound to pool of connections (auto-reconnecting). redis_pool = await aioredis.create_redis_pool( redis_uri, password=config.get_from_file(config.REDIS_KEY_PATH)) try: yield redis_pool finally: # gracefully close pool redis_pool.close() await redis_pool.wait_closed() logger.info("Redis connection pool closed.")
async def push_to_minio(): minio_client = Minio(config.MINIO, access_key=config.get_from_file(config.MINIO_ACCESS_KEY_PATH), secret_key=config.get_from_file(config.MINIO_SECRET_KEY_PATH), secure=False) bucket_exists = False try: buckets = minio_client.list_buckets() for bucket in buckets: if bucket.name == "apps-bucket": bucket_exists = True except Exception as e: logger.info("Bucket doesn't exist.") if not bucket_exists: minio_client.make_bucket("apps-bucket", location="us-east-1") files_to_upload = [x for x in p if x.is_file()] for file in files_to_upload: path_to_file = str(file) with open(path_to_file, "rb") as file_data: file_stat = os.stat(path_to_file) minio_client.put_object("apps-bucket", path_to_file, file_data, file_stat.st_size)
def create_global(): data = request.get_json() global_id = data['id_'] username = get_jwt_claims().get('username', None) curr_user = db.session.query(User).filter( User.username == username).first() data.update({'creator': curr_user.id}) new_permissions = data['permissions'] access_level = data['access_level'] # creator only if access_level == 0: update_permissions("global_variables", global_id, new_permissions=[{ "role": 1, "permissions": ["delete", "execute", "read", "update"] }], creator=curr_user.id) # default permissions elif access_level == 1: default_permissions("global_variables", global_id, data=data, creator=curr_user.id) # user-specified permissions elif access_level == 2: update_permissions("global_variables", global_id, new_permissions=new_permissions, creator=curr_user.id) # if new_permissions: # update_permissions("global_variables", global_id, new_permissions=new_permissions, creator=curr_user.id) # else: # default_permissions("global_variables", global_id, data=data, creator=curr_user.id) try: key = config.get_from_file(config.ENCRYPTION_KEY_PATH, 'rb') data['value'] = fernet_encrypt(key, data['value']) global_variable = global_variable_schema.load(data) current_app.running_context.execution_db.session.add(global_variable) current_app.running_context.execution_db.session.commit() return global_variable_schema.dump(global_variable), HTTPStatus.CREATED except IntegrityError: current_app.running_context.execution_db.session.rollback() return unique_constraint_problem("global_variable", "create", data["name"])
async def update_file(app_name, version, path, file_data, file_size): minio_client = Minio(config.MINIO, access_key=config.get_from_file(config.MINIO_ACCESS_KEY_PATH), secret_key=config.get_from_file(config.MINIO_SECRET_KEY_PATH), secure=False) abs_path = f"apps/{app_name}/{version}/{path}" found = False try: minio_client.stat_object("apps-bucket", abs_path) found = True except Exception as e: logger.info("File does not exist, creating a new one.") if found is True: minio_client.remove_object("apps-bucket", abs_path) logger.info("File exists, removing it before creating a new one.") file_data = io.BytesIO(file_data) try: minio_client.put_object("apps-bucket", abs_path, file_data, file_size) r = minio_client.stat_object("apps-bucket", abs_path) return True, str(r) except Exception as e: return False, str(e)
async def save_file(app_name, version): temp = [] minio_client = Minio(config.MINIO, access_key=config.get_from_file(config.MINIO_ACCESS_KEY_PATH), secret_key=config.get_from_file(config.MINIO_SECRET_KEY_PATH), secure=False) objects = minio_client.list_objects("apps-bucket", recursive=True) for obj in objects: size = obj.size p_src = Path(obj.object_name) if p_src.parts[1] == app_name: hold = str(p_src) p_dst = hold[hold.find(app_name):] p_dst = Path("apps") / p_dst os.makedirs(p_dst.parent, exist_ok=True) data = minio_client.get_object('apps-bucket', hold) with open(str(p_dst), 'wb+') as file_data: for d in data.stream(size): file_data.write(d) owner_id = stat(f"apps/{app_name}/{version}/requirements.txt").st_uid group_id = stat(f"apps/{app_name}/{version}/requirements.txt").st_gid os.chown(p_dst, owner_id, group_id) return True
def read_global(global_var): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter(User.username == username).first()).id global_id = str(global_var.id_) to_read = auth_check(global_id, "read", "global_variables") if (global_var.creator == curr_user_id) or to_read: global_json = global_variable_schema.dump(global_var) if request.args.get('to_decrypt') == "false": return jsonify(global_json), HTTPStatus.OK else: key = config.get_from_file(config.ENCRYPTION_KEY_PATH)#, 'rb') return jsonify(fernet_decrypt(key, global_json['value'])), HTTPStatus.OK else: return None, HTTPStatus.FORBIDDEN
def __init__(self): # All of these imports are necessary from api_gateway.executiondb.returns import ReturnApi from api_gateway.executiondb.parameter import Parameter, ParameterApi from api_gateway.executiondb.action import Action, ActionApi from api_gateway.executiondb.appapi import AppApi from api_gateway.executiondb.branch import Branch from api_gateway.executiondb.condition import Condition from api_gateway.executiondb.transform import Transform # from api_gateway.executiondb.trigger import Trigger from api_gateway.executiondb.global_variable import GlobalVariable from api_gateway.executiondb.workflow_variable import WorkflowVariable from api_gateway.executiondb.workflow import Workflow from api_gateway.executiondb.workflowresults import WorkflowStatus, NodeStatus ExecutionDatabase.db_type = config.DB_TYPE if 'sqlite' in config.DB_TYPE: self.engine = create_engine( format_db_path(config.DB_TYPE, config.EXECUTION_DB_NAME), connect_args={'check_same_thread': False}, poolclass=NullPool) else: self.engine = create_engine(format_db_path( config.DB_TYPE, config.EXECUTION_DB_NAME, config.DB_USERNAME, config.get_from_file(config.POSTGRES_KEY_PATH), config.DB_HOST), poolclass=NullPool, isolation_level="AUTOCOMMIT") if not database_exists(self.engine.url): try: create_database(self.engine.url) except IntegrityError as e: pass self.connection = self.engine.connect() self.transaction = self.connection.begin() session = sessionmaker() session.configure(bind=self.engine) self.session = scoped_session(session) Base.metadata.bind = self.engine Base.metadata.create_all(self.engine)
async def get_walkoff_auth_header(session, token=None, timeout=5 * 60): url = config.API_URI.rstrip('/') + '/walkoff/api' logger.debug("Attempting to refresh WALKOFF JWT") if token is None: key = config.get_from_file(config.INTERNAL_KEY_PATH) async with session.post(url + "/auth/login", json={"username": config.WALKOFF_USERNAME, "password": key}, timeout=timeout) as resp: resp_json = await resp.json() token = resp_json["refresh_token"] logger.debug("Successfully logged into WALKOFF") headers = {"Authorization": f"Bearer {token}"} async with session.post(url + "/auth/refresh", headers=headers, timeout=timeout) as resp: resp_json = await resp.json() access_token = resp_json["access_token"] logger.debug("Successfully refreshed WALKOFF JWT") return {"Authorization": f"Bearer {access_token}"}, token
def read_all_globals(): username = get_jwt_claims().get('username', None) curr_user_id = (db.session.query(User).filter(User.username == username).first()).id key = config.get_from_file(config.ENCRYPTION_KEY_PATH) #, 'rb') ret = [] query = current_app.running_context.execution_db.session.query(GlobalVariable).order_by(GlobalVariable.name).all() if request.args.get('to_decrypt') == "false": return query, HTTPStatus.OK else: for global_var in query: to_read = auth_check(str(global_var.id_), "read", "global_variables") if (global_var.creator == curr_user_id) or to_read: temp_var = deepcopy(global_var) temp_var.value = fernet_decrypt(key, global_var.value) ret.append(temp_var) return ret, HTTPStatus.OK
async def load_secret_key(): return config.get_from_file(config.ENCRYPTION_KEY_PATH)
async def execute_action(self, action: Action): """ Execute an action, and push its result to Redis. """ # TODO: Is there a better way to do this? self.logger.handlers[0].stream.execution_id = action.execution_id self.logger.handlers[0].stream.workflow_id = action.workflow_id self.logger.debug( f"Attempting execution of: {action.label}-{action.execution_id}") self.current_execution_id = action.execution_id self.current_workflow_id = action.workflow_id results_stream = f"{action.execution_id}:results" if hasattr(self, action.name): # Tell everyone we started execution action.started_at = datetime.datetime.now() start_action_msg = NodeStatusMessage.executing_from_node( action, action.execution_id, started_at=action.started_at) await self.redis.xadd( results_stream, {action.execution_id: message_dumps(start_action_msg)}) try: func = getattr(self, action.name, None) if callable(func): if len(action.parameters) < 1: result = await func() else: params = {} for p in action.parameters: if p.variant == ParameterVariant.GLOBAL: key = config.get_from_file( config.ENCRYPTION_KEY_PATH, 'rb') params[p.name] = fernet_decrypt(key, p.value) else: params[p.name] = p.value result = await func(**params) action_result = NodeStatusMessage.success_from_node( action, action.execution_id, result=result, started_at=action.started_at) self.logger.debug( f"Executed {action.label}-{action.execution_id} " f"with result: {result}") else: self.logger.error( f"App {self.__class__.__name__}.{action.name} is not callable" ) action_result = NodeStatusMessage.failure_from_node( action, action.execution_id, result="Action not callable", started_at=action.started_at) except Exception as e: self.logger.exception( f"Failed to execute {action.label}-{action.execution_id}") action_result = NodeStatusMessage.failure_from_node( action, action.execution_id, result=repr(e), started_at=action.started_at) else: self.logger.error( f"App {self.__class__.__name__} has no method {action.name}") action_result = NodeStatusMessage.failure_from_node( action, action.execution_id, result="Action does not exist", started_at=action.started_at) await self.redis.xadd( results_stream, {action.execution_id: message_dumps(action_result)})
async def get_file(app_name, version, path): minio_client = Minio(config.MINIO, access_key=config.get_from_file(config.MINIO_ACCESS_KEY_PATH), secret_key=config.get_from_file(config.MINIO_SECRET_KEY_PATH), secure=False) abs_path = f"apps/{app_name}/{version}/{path}" data = minio_client.get_object('apps-bucket', abs_path) return data.read()
naming_convention = { "ix": 'ix_%(column_0_label)s', "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(column_0_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s" } Base.metadata = MetaData(naming_convention=naming_convention) if 'sqlite' in config.DB_TYPE: engine = create_engine(format_db_path(config.DB_TYPE, config.EXECUTION_DB_NAME), connect_args={'check_same_thread': False}, poolclass=NullPool) else: engine = create_engine( format_db_path(config.DB_TYPE, config.EXECUTION_DB_NAME, config.DB_USERNAME, config.get_from_file(config.POSTGRES_KEY_PATH), config.DB_HOST), poolclass=NullPool, isolation_level="AUTOCOMMIT") if not database_exists(engine.url): try: create_database(engine.url) except IntegrityError as e: pass connection = engine.connect() transaction = connection.begin() session = sessionmaker() session.configure(bind=engine) session = scoped_session(session)
} class DefaultUserUUID(Enum): INTERNAL_USER = preset_uuid("internal_user") SUPER_ADMIN = preset_uuid("sadmin_user") ADMIN = preset_uuid("admin_user") DefaultUserUUIDS = [v.value for v in DefaultUserUUID.__members__.values()] default_users = { "internal_user": { "id_": DefaultUserUUID.INTERNAL_USER.value, "username": "******", "password": config.get_from_file(config.INTERNAL_KEY_PATH), "hashed": False, "roles": [DefaultRoleUUID.INTERNAL_USER.value] }, "super_admin": { "id_": DefaultUserUUID.SUPER_ADMIN.value, "username": "******", "password": "******", "hashed": False, "roles": [DefaultRoleUUID.SUPER_ADMIN.value] }, "admin": { "id_": DefaultUserUUID.ADMIN.value, "username": "******", "password": "******", "hashed": False,