def make_flask(): flask_app = Flask(__name__) flask_app.config["SECRET_KEY"] = str(uuid.uuid4()) flask_app.config["MAX_CONTENT_LENGTH"] = 8 * 1024 * 1024 flask_app.url_map.strict_slashes = False flask_app.wsgi_app = ProxyFix(flask_app.wsgi_app) if "database" in ConfigManager.get_config(): config_database(flask_app, ConfigManager.get_config_value("database")) apis_config = ConfigManager.get_config_value("apis") for api_config in apis_config.values(): api = create_api(api_config) api.init_app(flask_app) # pylint: disable=unused-variable @flask_app.errorhandler(422) def handle_error(err): headers = err.data.get("headers", None) messages = err.data.get("messages", ["Invalid request."]) if headers: return jsonify({"errors": messages}), err.code, headers return jsonify({"errors": messages}), err.code CORS(flask_app) return flask_app
def get_instance(cls, use_flask=True, flask_app=None): if not cls.__instance: name = flask_app.import_name if flask_app else __name__ broker_url = make_url( ConfigManager.get_config_value("celery", "broker")) results_backend_url = make_url( ConfigManager.get_config_value("celery", "results_backend")) celery = Celery( name, broker=broker_url, backend=results_backend_url, ) if use_flask: if use_flask and not cls.__flask_app: from app.app_factory import make_flask flask_app = flask_app or make_flask() flask_app.config.update( CELERY_BROKER_URL=broker_url, CELERY_RESULT_BACKEND=results_backend_url, ) cls.__flask_app = flask_app celery.conf.update(cls.__flask_app.config) class ContextTask(celery.Task): def __call__(self, *args, **kwargs): with flask_app.app_context(): return self.run(*args, **kwargs) celery.Task = ContextTask cls.__instance = celery return cls.__instance
def make_redis(config=None): config = config or ConfigManager.get_config_value("cache", "redis") return redis.StrictRedis( host=config["host"], port=config["port"], db=config["db"], password=config["password"], )
def init_client(cls, config=ConfigManager.get_config_value("cache", "redis")): cls.__client = cls.__client = redis.StrictRedis( host=config["host"], port=config["port"], db=config["db"], password=config["password"]) cls.__types_mapping = cls.__name__ + "__types_mapping"
def make_flask(): flask_app = Flask(__name__) flask_app.config["SECRET_KEY"] = str(uuid.uuid4()) flask_app.config["MAX_CONTENT_LENGTH"] = 8 * 1024 * 1024 if "database" in ConfigManager.get_config(): config_database(flask_app, ConfigManager.get_config_value("database")) return flask_app
def get_broker(cls): if not cls.__broker: redis_config = ConfigManager.get_config_value( "events-stream", "broker") cls.__broker = redis.StrictRedis( host=redis_config["host"], port=redis_config["port"], db=redis_config["db"], password=redis_config["password"], ) return cls.__broker
def exists(self, key): bucket = self.__client config = ConfigManager.get_config_value("aws", "s3") try: # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_object bucket.head_object(Bucket=config["bucket_name"], Key=key) except ClientError as client_error: if client_error.response["Error"]["Code"] == "404": return False else: raise client_error return True
def make_es(retries=30, db_config=ConfigManager.get_config_value("database", "elasticsearch")): while retries != 0: try: es = Elasticsearch(db_config["hosts"]) init_es(es) return es except: import time time.sleep(5) print( f"Elasticsearch is not ready... Retrying in 5s (retries: {retries})" ) retries -= 1
import os from utils.configmanager import ConfigManager def write_config_dict(config, filename, header="[default]"): content = [] content += header + "\n" [content.append(f"{k} = {v}\n") for k, v in config.items()] with open(filename, "w") as f: print(f"Creating file: {filename}") f.writelines(content) def create_config_file(config, filename): directory = os.path.dirname(filename) if not os.path.exists(directory): print(f"Creating directory: {directory}") os.makedirs(directory) write_config_dict(config, filename) if __name__ == "__main__": credentials_config = ConfigManager.get_config_value("aws", "credentials") general_config = ConfigManager.get_config_value("aws", "general") create_config_file(credentials_config, credentials_config.pop("path")) create_config_file(general_config, general_config.pop("path"))
def _get_bucket(self, config=None): if not self.__bucket: config = config or ConfigManager.get_config_value("aws", "s3") self.__bucket = boto3.resource("s3").Bucket(config["bucket_name"]) return self.__bucket
# pylint: disable=import-error # pylint: disable=no-name-in-module import os from importlib import import_module from utils.configmanager import ConfigManager consumers = ConfigManager.get_config_value("event_consumers") task_ids = [] for _, consumer in consumers.items(): consumer_module = import_module(consumer["consumer_module"]) consumer_task = getattr(consumer_module, consumer["consumer_task"]) print(f"Starting {consumer['workers']} workers...") for index in range(consumer["workers"]): try: consumer_id = os.environ["HOSTNAME"] + "-" + str(index) task = consumer_task.delay(consumer["consumer_group"], consumer_id=consumer_id) except TypeError: task = consumer_task.delay(consumer["consumer_group"]) print(f"Consumer started for {consumer['consumer_group']}") task_ids.append(f'{task}') with open(f'{os.environ["HOME"]}/tasks_ids', 'w') as f: f.write(','.join(task_ids))
import pytest import boto3 from services.storage import S3 from utils.configmanager import ConfigManager TEST_CONFIG = "/config/receipts/test_config.yml" FIXTURES_PATH = "/receipts/src/tests/library/services/data/" PREFIX = "storage-tests/" filename_1 = "small.jpg" s3_config = ConfigManager.get_config_value("aws", "s3", TEST_CONFIG) def setup_module(module): clean_all_files(s3_config, PREFIX) def teardown_module(module): pass # clean_all_files(s3_config, PREFIX) def clean_all_files(config, prefix): print( f'\nDeleting all files in bucket: \'{config["bucket_name"]}\' with prefix: \'{prefix}\'' ) bucket = boto3.resource("s3").Bucket(config["bucket_name"]) bucket.objects.filter(Prefix=prefix).delete() @pytest.fixture
def __init__(self, config=ConfigManager.get_config_value("aws", "s3")): self.__bucket = self._get_bucket(config)
def __init__(self): self._dao = MongoDAO( make_db(ConfigManager.get_config_value("database", "mongo"))["transaction"])
# pylint: disable=import-error # pylint: disable=no-name-in-module from importlib import import_module from sqlalchemy.ext.declarative import declarative_base from utils.celery import CeleryManager from utils.common import make_url from utils.configmanager import ConfigManager config = ConfigManager.get_config_value("database") module_name = "database." + next(iter(config.keys())) module = import_module(module_name) db = getattr(module, "db") ma = getattr(module, "ma") CONSTRAINT_ERROR_MSG = "constraint_value can't be None when IBase.__force_constraint__ is True" class IBase(db.Model): __abstract__ = True @classmethod def get_all(cls, constraint_value=None): if cls.__force_constraint__ and constraint_value is None: raise ValueError(CONSTRAINT_ERROR_MSG) if cls.__force_constraint__: return cls.query.filter_by(**constraint_value).all() else: return cls.query.all()
def __init__(self, config=None): config = config or ConfigManager.get_config_value("aws", "s3") self.__resource = self._get_bucket(config) self.__client = boto3.client("s3")
from re import compile from json import dumps, loads from requests import post, get from flask import Response, request from utils.configmanager import ConfigManager from utils.rediscache import make_redis conf = ConfigManager.get_config_value('ory') hydra_config = conf['oauth2']['hydra'] oauth2_client = conf['oauth2']['client'] HYDRA_HOST = hydra_config['host'] HYDRA_PUBLIC_PORT = hydra_config['public_port'] HYDRA_ADMIN_PORT = hydra_config['admin_port'] def __token_introspection(access_token): data = {'token': access_token} headers = {'X-Forwarded-Proto': 'https'} response = post(f'{HYDRA_HOST}:{HYDRA_ADMIN_PORT}/oauth2/introspect', headers=headers, data=data, verify=False) return response.status_code, response.json() def __userinfo(access_token): headers = {'Authorization': f'Bearer {access_token}', **request.headers}
def __init__(self): self._db = make_db( ConfigManager.get_config_value("database", "mongo-dataset")) self._doc_dao = MongoDAO(self._db["documents"]) self._info_dao = MongoDAO(self._db["dataset"])
def make_db(db_config=ConfigManager.get_config_value("database", "mongo")): return MongoClient(make_url(db_config, include_db=False), connect=False)[db_config["db"]]
def init_es(es, indices=ConfigManager.get_config_value("search", "indices")): for index in indices: if not es.indices.exists(index): es.indices.create(index)