def CrateTranslatorInstance(): r = EnvReader(log=logging.getLogger(__name__).info) db_host = r.read(StrVar('CRATE_HOST', 'crate')) db_port = r.read(IntVar('CRATE_PORT', 4200)) db_name = "ngsi-tsdb" with CrateTranslator(db_host, db_port, db_name) as trans: yield trans
def read_env(self, env: dict = os.environ): r = EnvReader(env, log=logging.getLogger(__name__).debug) self.host = r.read(StrVar(CRATE_HOST_ENV_VAR, self.host)) self.port = r.read(IntVar(CRATE_PORT_ENV_VAR, self.port)) self.db_user = r.read(StrVar(CRATE_DB_USER_ENV_VAR, self.db_user)) self.db_pass = r.read( StrVar(CRATE_DB_PASS_ENV_VAR, self.db_pass, mask_value=True)) # Added backoff_factor for retry interval between attempt of # consecutive retries self.backoff_factor = r.read(FloatVar('CRATE_BACKOFF_FACTOR', 0.0)) self.active_shards = r.read(StrVar('CRATE_WAIT_ACTIVE_SHARDS', '1'))
def db_con_factory(t: DbType) -> Any: if t is DbType.CRATE: r = EnvReader() host = r.read(StrVar('CRATE_HOST', 'localhost')) port = r.read(IntVar('CRATE_PORT', 4200)) return client.connect([f"{host}:{port}"], error_trace=True) if t is DbType.TIMESCALE: cfg = PostgresConnectionData() cfg.read_env() pg8000.paramstyle = "qmark" cx = pg8000.connect(host=cfg.host, port=cfg.port, database=cfg.db_name, user=cfg.db_user, password=cfg.db_pass) cx.autocommit = True return cx return None
def default_ttl(self) -> int: return self.env.read(IntVar('DEFAULT_CACHE_TTL', 60))
def redis_port(self) -> int: return self.env.read(IntVar('REDIS_PORT', 6379))
def redis_port(self) -> int: return self.env.read(IntVar(REDIS_PORT_ENV_VAR, 6379))
def default_ttl(self) -> int: return self.env.read(IntVar(DEFAULT_CACHE_TTL_ENV_VAR, 60))
def default_limit(self) -> int: fallback_limit = 10000 var = IntVar(DEFAULT_LIMIT_VAR, default_value=fallback_limit) return self.store.safe_read(var)
def default_limit(self) -> int: var = IntVar(DEFAULT_LIMIT_VAR, default_value=FALLBACK_LIMIT) return self.store.safe_read(var)
""" return 'default' def queue_names() -> [str]: """ :return: the name of the RQ queues to use for executing tasks. """ return [default_queue_name()] # NOTE. Multiple task queues. # For now we're just using one queue for all tasks but going forward we could # e.g. use a separate queue for each task type to prioritise execution. MAX_RETRIES_VAR = IntVar('WQ_MAX_RETRIES', 0) def max_retries() -> int: """ :return: how many times a failed task should be retried. """ return EnvReader().safe_read(MAX_RETRIES_VAR) def retry_intervals() -> [int]: """ Build a growing sequence of exponentially spaced out intervals at which to retry failed tasks. Each value is in seconds and in total there are `max_retries()` values.