def sync_sleeper(seconds: int): """ I'm almost like sleep-async, but I'm not async.. That means that I block python when I do nothing. """ randstring = secrets.token_urlsafe(5) log.info(f'Start sync sleep for ({randstring}) for {seconds}') sleep(seconds) log.info(f'Ending sync sleep for ({randstring}) for {seconds}') return f'I slept for {seconds} seconds'
async def async_sleeper(seconds: int): """ I sleep for some seconds, then return how long I slept.. Neat! But I'm also async, so it shoulnt block anything.. """ randstring = secrets.token_urlsafe(5) log.info(f'Start async sleep for ({randstring}) for {seconds}') await async_sleep(seconds) log.info(f'Ending async sleep for ({randstring}) for {seconds}') return f'I slept for {seconds} seconds'
async def connect_to_mongo(): log.info("Connecting to mongodb database..") db.client = AsyncIOMotorClient( config.OPTIONAL_COMPONENTS.MONGODB.URL, socketTimeoutMS=1000, connectTimeoutMS=1000, serverSelectionTimeoutMS=1000, ) # This throws an exception if not connected info = await db.client.server_info() log.debug(info)
def _preload_drivers(self): for name, values in config.OPTIONAL_COMPONENTS.items(): name = name.lower() | log {'test': 123} | log load = values.get('LOAD', 'auto') if load == 'no': continue drivername = values.get('DRIVER') try: driver = self.drivers[drivername] except KeyError: raise Exception( f'Invalid driver specified ({drivername}), no way to handle it' ) driverinstance = driver(opts=values.get('OPTS', {}), load=load) driverinstance.pm = self self.optional_components[name] = driverinstance log.info( f'Connecting to {name} with driver {drivername}, using {driverinstance.opts}' ) yield driverinstance
async def close_mongo_connection(): log.info("Closing mongodb connection") db.client.close()
async def connect_to_walrus(): log.info("Connectiong to redis using walrus") redis.walrus = walruslib.Database.from_url(config.OPTIONAL_COMPONENTS.REDIS.URL) # Raises exception if not able to connect redis.walrus.client_id()
async def connect_to_aioredis(): log.info("Connectiong to redis using aioredis") redis.aioredis = await aioredislib.create_redis_pool( config.OPTIONAL_COMPONENTS.REDIS.URL )
def _get_plugindata(): """ Plugins are imported from multiple paths with these rules: * First with a unique name wins * There are multiple matchers, that ALL must return true. They return true if they are NOT set, or if they match "$plugin_path / $plugin_name" * PLUGIN_WHITELIST_RE (regex) * PLUGIN_WHITELIST_LIST * PLUGIN_WHITELIST_TAGS * not in PLUGIN_BLACKLIST_LIST * not in PLUGIN_BLACKLIST_RE * not in PLUGIN_BLACKLIST_TAGS """ PLUGIN_WHITELIST_RE = re.compile(config.PLUGIN_WHITELIST_RE) PLUGIN_BLACKLIST_RE = re.compile(config.PLUGIN_BLACKLIST_RE) PLUGIN_WHITELIST_TAGS = set(config.PLUGIN_WHITELIST_TAGS) PLUGIN_BLACKLIST_TAGS = set(config.PLUGIN_BLACKLIST_TAGS) PLUGIN_PATHS = unique( list([config.PLUGIN_PATHS]) if isinstance(config.PLUGIN_PATHS, str) else config.PLUGIN_PATHS) + ['/data/opa/plugins'] log.info('Plugin loading settings:' f' plugin-paths: {PLUGIN_PATHS}\n' f' whitelist-regex: {PLUGIN_WHITELIST_RE}\n' f' whitelist-list: {config.PLUGIN_WHITELIST_LIST}\n' f' whitelist-tags: {config.PLUGIN_WHITELIST_TAGS}\n' f' blacklist-list: {config.PLUGIN_BLACKLIST_LIST}\n' f' blacklist-regex: {PLUGIN_BLACKLIST_RE}\n' f' blacklist-tags: {PLUGIN_BLACKLIST_TAGS}\n') sys_paths = sys.path + PLUGIN_PATHS sys.path = unique(sys_paths) plugins_to_load = defaultdict(list) task_candidates = [] routers = [] for plugin in pkgutil.iter_modules(PLUGIN_PATHS): allow_match = os.path.join(plugin.module_finder.path, plugin.name) tasks_candidate = False if plugin.ispkg: metafile = os.path.join(allow_match, 'meta.json') if os.path.exists(os.path.join(allow_match, 'tasks.py')): tasks_candidate = True else: metafile = f'{allow_match}-meta.json' log.debug('') log.debug(f'Checking if we should load "{allow_match}"') if os.path.exists(metafile): log.debug(f'Found metafile @ {metafile}') metadata = json.load(open(metafile, 'r')) else: log.debug( f'Metafile @ {metafile} does not exist, using empty metadata') metadata = {} log.debug(f'Metadata: {metadata}') load_checks = {} if config.PLUGIN_WHITELIST_LIST: load_checks['PLUGIN_WHITELIST_LIST'] = ( allow_match in config.PLUGIN_WHITELIST_LIST) if PLUGIN_WHITELIST_RE.pattern: load_checks['PLUGIN_WHITELIST_RE'] = bool( PLUGIN_WHITELIST_RE.match(allow_match)) if PLUGIN_WHITELIST_TAGS: load_checks['PLUGIN_WHITELIST_TAGS'] = bool( PLUGIN_WHITELIST_TAGS & set(metadata.get('tags', []))) if config.PLUGIN_BLACKLIST_LIST: load_checks['PLUGIN_BLACKLIST_LIST'] = ( allow_match not in config.PLUGIN_BLACKLIST_LIST) if PLUGIN_BLACKLIST_RE.pattern: load_checks['PLUGIN_BLACKLIST_RE'] = not bool( PLUGIN_BLACKLIST_RE.match(allow_match)) if PLUGIN_BLACKLIST_TAGS: load_checks['PLUGIN_BLACKLIST_TAGS'] = not bool( PLUGIN_BLACKLIST_TAGS & set(metadata.get('tags', []))) load = all(load_checks.values()) log.debug(f'Load-checks: {load_checks}, overall({load})') if not load: continue log.info(f'Loading plugin: {plugin.name}') """ We should consider using lazy-loading instead of import_module. This line imports all .py files, which is problematic for example for tasks. If you import tasks inside an __init__, you will still need a get_component at top of tasks (to mark function as celery tasks). The get_component('celery') won't return anything, since everything is not ready yet. Refactoring this code will probably be a no-go since there are many dependencies in eg the hooking system. Modules needs to be loaded """ mod = import_module(plugin.name) if tasks_candidate: task_candidates.append(plugin.name) defined_plugins = get_defined_plugins(mod) for pt in ['hook-definitions', 'hooks', 'drivers', 'setup']: plugins_to_load[pt] += defined_plugins[pt] if hasattr(mod, 'router'): routers.append(mod.router) return { 'plugins_to_load': plugins_to_load, 'task_candidates': task_candidates, 'routers': routers, }