def receiver_callback(self, bot, update): # spawning the job for this requests to the bot import time try: event_loop = asyncio.get_event_loop() except: # print('except called') start = time.time() from asyncio.events import BaseDefaultEventLoopPolicy from asyncio.selector_events import BaseSelectorEventLoop from asyncio.unix_events import _UnixSelectorEventLoop base_event_loop_policy = BaseDefaultEventLoopPolicy() base_event_loop_policy.set_event_loop(_UnixSelectorEventLoop()) asyncio.set_event_loop_policy(base_event_loop_policy) event_loop = asyncio.get_event_loop() # async event loop end = time.time() # print("Time taken in intialization and blah: {}".format(end - start)) future = asyncio.ensure_future(aiojobs.create_scheduler()) self.aiojobs_scheduler = event_loop.run_until_complete(future) # for execution of async jobs finally: start = time.time() future = asyncio.ensure_future(self.aiojobs_scheduler.spawn(self.interactor_callback(update))) # future = asyncio.ensure_future(self.interactor_callback(update)) end = time.time() # print("Time taken in ensuring the future: {}".format(end - start)) start = time.time() event_loop.run_until_complete(future) end = time.time()
def main(host: Optional[str], port: Optional[int], path: Optional[str], start_streams: bool, config_files: List[str], api_file: Optional[str]): loop = asyncio.get_event_loop() scheduler = loop.run_until_complete(aiojobs.create_scheduler()) logger.info("Loading engine config file=%s...", config_files[0]) # type: ignore server_config = _load_engine_config(config_files[0]) loop.run_until_complete(start_server(server_config)) if server_config.auth.domain: auth_info_default['domain'] = server_config.auth.domain if api_file is not None: api.load_api_file(api_file) api.register_server_config(server_config) apps_config = [] for config_file in config_files[1:]: logger.info(__name__, f"Loading app config file={config_file}...") config = _load_app_config(config_file) config.server = server_config apps_config.append(config) api.register_apps(apps_config) api.enable_swagger(server_config, web_server) for config in apps_config: loop.run_until_complete(start_app(config, scheduler, start_streams)) logger.debug(__name__, "Performing forced garbage collection...") gc.collect() web.run_app(web_server, path=path, port=port, host=host)
def __init__(self, loop, start_event=None): self.loop = loop self.start_event = start_event self.scheduler = self.loop.run_until_complete(aiojobs.create_scheduler( limit=self.SCHEDULER_LIMIT, ))
def init_api(self): self.log.info('initializing') self.log.info('creating job scheudler') self.scheduler = self.loop.run_until_complete( aiojobs.create_scheduler(close_timeout=self.heartbeat_interval + 2)) self.log.info('registering') self.log.info('starting heartbeat task') asyncio.ensure_future(self.scheduler.spawn(self.heartbeat()))
def test_manager_instance(loop, dbi, tmpdir): files_path = str(tmpdir.mkdir("files")) watch_path = str(tmpdir.mkdir("watch")) executor = concurrent.futures.ThreadPoolExecutor() scheduler = loop.run_until_complete(aiojobs.create_scheduler()) manager = virtool.files.Manager(executor, dbi, files_path, watch_path) loop.run_until_complete(scheduler.spawn(manager.run())) yield manager loop.run_until_complete(scheduler.close())
async def maker(**kwargs): ret = await (create_scheduler(**kwargs)) schedulers.append(ret) return ret
def get_scheduler(self): loop = asyncio.get_event_loop() future = asyncio.ensure_future(aiojobs.create_scheduler()) return loop.run_until_complete(future)