def CrateTranslatorInstance(): r = EnvReader(log=logging.getLogger(__name__).info) db_host = r.read(StrVar('CRATE_HOST', 'crate')) db_port = r.read(IntVar('CRATE_PORT', 4200)) db_name = "ngsi-tsdb" with CrateTranslator(db_host, db_port, db_name) as trans: yield trans
def read_env(self, env: dict = os.environ): r = EnvReader(env, log=logging.getLogger(__name__).debug) self.host = r.read(StrVar(CRATE_HOST_ENV_VAR, self.host)) self.port = r.read(IntVar(CRATE_PORT_ENV_VAR, self.port)) self.db_user = r.read(StrVar(CRATE_DB_USER_ENV_VAR, self.db_user)) self.db_pass = r.read( StrVar(CRATE_DB_PASS_ENV_VAR, self.db_pass, mask_value=True)) # Added backoff_factor for retry interval between attempt of # consecutive retries self.backoff_factor = r.read(FloatVar('CRATE_BACKOFF_FACTOR', 0.0)) self.active_shards = r.read(StrVar('CRATE_WAIT_ACTIVE_SHARDS', '1'))
class GeoCodingEnvReader: """ Helper class to encapsulate the reading of geo-coding env vars. """ def __init__(self): self.env = EnvReader(log=logging.getLogger(__name__).info) def use_geocoding(self) -> bool: return self.env.read(BoolVar('USE_GEOCODING', False)) def redis_host(self) -> MaybeString: return self.env.read(StrVar('REDIS_HOST', None)) def redis_port(self) -> int: return self.env.read(IntVar('REDIS_PORT', 6379))
def log(): r = EnvReader(log=logging.getLogger(__name__).info) level = r.read(StrVar('LOGLEVEL', 'INFO')).upper() logging.basicConfig(level=level, format='%(asctime)s.%(msecs)03d ' '%(levelname)s:%(name)s:%(message)s ' 'Thread ID: [%(thread)d] Process ID: [%(process)d]', datefmt='%Y-%m-%d %I:%M:%S') return logging.getLogger(__name__)
class CacheEnvReader: """ Helper class to encapsulate the reading of geo-coding env vars. """ def __init__(self): self.env = EnvReader(log=logging.getLogger(__name__).debug) def redis_host(self) -> MaybeString: return self.env.read(StrVar('REDIS_HOST', None)) def redis_port(self) -> int: return self.env.read(IntVar('REDIS_PORT', 6379)) def default_ttl(self) -> int: return self.env.read(IntVar('DEFAULT_CACHE_TTL', 60)) def cache_queries(self) -> bool: return self.env.read(BoolVar('CACHE_QUERIES', False))
def default_backend() -> MaybeString: cfg_reader = YamlReader(log=log().debug) env_reader = EnvReader(log=log().debug) config = cfg_reader.from_env_file(QL_CONFIG_ENV_VAR, defaults={}) config_backend = maybe_string_match(config, 'default-backend') env_backend = env_reader.read(StrVar(QL_DEFAULT_DB_ENV_VAR, None)) return env_backend or config_backend or CRATE_BACKEND
def lookup_backend(fiware_service: str) -> MaybeString: cfg_reader = YamlReader(log=log().debug) env_reader = EnvReader(log=log().info) config = cfg_reader.from_env_file(QL_CONFIG_ENV_VAR, defaults={}) tenant_backend = maybe_string_match(config, 'tenants', fiware_service, 'backend') default_backend = maybe_string_match(config, 'default-backend') env_backend = env_reader.read(StrVar(QL_DEFAULT_DB_ENV_VAR, CRATE_BACKEND)) return tenant_backend or env_backend or default_backend
def db_con_factory(t: DbType) -> Any: if t is DbType.CRATE: r = EnvReader() host = r.read(StrVar('CRATE_HOST', 'localhost')) port = r.read(IntVar('CRATE_PORT', 4200)) return client.connect([f"{host}:{port}"], error_trace=True) if t is DbType.TIMESCALE: cfg = PostgresConnectionData() cfg.read_env() pg8000.paramstyle = "qmark" cx = pg8000.connect(host=cfg.host, port=cfg.port, database=cfg.db_name, user=cfg.db_user, password=cfg.db_pass) cx.autocommit = True return cx return None
def setup(): r = EnvReader(log=logging.getLogger().debug) level = r.read(StrVar('LOGLEVEL', 'INFO')).upper() logger = logging.getLogger() logger.setLevel(level) logger.addHandler(default_handler)
def log(): r = EnvReader(log=logging.getLogger(__name__).info) level = r.read(StrVar('LOGLEVEL', 'INFO')).upper() logging.basicConfig(level=level) return logging.getLogger(__name__)