def __init__(self, working_dir: str, zone_id: str, transform_id: str): super(BaseTransform, self).__init__() self.working_dir = working_dir self.zone_id = zone_id self.transform_id = transform_id self.kernel = None self.warehouse_endpoint = config.get_setting( working_dir)['warehouse_endpoint'] setting = config.get_setting(working_dir) warehouse_dir = Path(working_dir, setting['warehouse_dir']).as_posix() zone_storage_dir = setting['zone_storage_dir'].format( warehouse_dir=warehouse_dir, zone_id=zone_id, ) self.transform_storage_dir = setting['transform_storage_dir'].format( zone_storage_dir=zone_storage_dir, transform_id=transform_id) self.transform_cache_dir = setting['transform_cache_dir'].format( zone_storage_dir=zone_storage_dir, transform_id=transform_id) Path(self.transform_cache_dir).parent.mkdir(parents=1, exist_ok=1) self.transform_cache_db = plyvel.DB(self.transform_cache_dir, create_if_missing=True) self.logger = get_child_logger('aggregator.transform')
def _run_server(endpoint, working_dir, zone_id): global _PROVIDER global _LOGGER config.get_setting(working_dir) _LOGGER = create_logger('provider', zone_id) rpc_server.set_logger(_LOGGER) _PROVIDER = Provider(working_dir, zone_id) _LOGGER.info(f'Provider endpoint: {endpoint}') _LOGGER.info(f'Provider zone ID: {zone_id}') host = endpoint.split(':')[0] port = int(endpoint.split(':')[1]) async def handle(request): request = await request.text() response = await dispatch(request) if response.wanted: return web.json_response(response.deserialized(), status=response.http_status) else: return web.Response() app = web.Application() cors = aiohttp_cors.setup(app, defaults={"*": aiohttp_cors.ResourceOptions()}) resource = cors.add(app.router.add_resource("/")) cors.add(resource.add_route("POST", handle)) web.run_app(app, port=port) _LOGGER.info('Exited Provider')
def load_config(self): if not config.check_user_config(self.working_dir): raise Exception('User config not found, please init config first') config.set_working_dir(self.working_dir) self.upstream_endpoint = config.get_setting()['upstream_endpoint'] self.aggregator_endpoint = config.get_setting()['aggregator_endpoint'] self.warehouse_endpoint = config.get_setting()['warehouse_endpoint'] self.provider_endpoint = config.get_setting()['provider_endpoint']
def __init__(self, working_dir: str, zone_id: str): super(BaseStorage, self).__init__() self.working_dir = working_dir self.zone_id = zone_id setting = config.get_setting(working_dir) self.warehouse_dir = Path(working_dir, setting['warehouse_dir']).as_posix() self.zone_storage_dir = setting['zone_storage_dir'].format( warehouse_dir=self.warehouse_dir, zone_id=zone_id, ) transforms = zone_manager.load_zone( zone_id, working_dir)['aggregator']['transform_registry'] self.transform_storage_dirs = { tid: setting['transform_storage_dir'].format( zone_storage_dir=self.zone_storage_dir, transform_id=tid) for tid in transforms } # Setup storage DB for all transforms for p in self.transform_storage_dirs.values(): Path(p).parent.mkdir(parents=1, exist_ok=1) self.transform_storage_dbs = { tid: plyvel.DB(self.transform_storage_dirs[tid], create_if_missing=True) for tid in transforms } self.logger = get_child_logger('warehouse.storage')
def _run_server(endpoint, working_dir, zone_id): global _UPSTREAM global _LOGGER config.get_setting(working_dir) _LOGGER = create_logger('upstream', zone_id) rpc_server.set_logger(_LOGGER) _UPSTREAM = Upstream(working_dir, zone_id) _LOGGER.info(f'Upstream endpoint: {endpoint}') _LOGGER.info(f'Upstream zone ID: {zone_id}') host = endpoint.split(':')[0] port = int(endpoint.split(':')[1]) start_server = websockets.serve(main_dispatcher, host, port) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() _LOGGER.info('Exited Upstream')
def _run_server(endpoint, working_dir, zone_id): global _WAREHOUSE global _LOGGER config.get_setting(working_dir) _LOGGER = create_logger('warehouse', zone_id) rpc_server.set_logger(_LOGGER) _WAREHOUSE = Warehouse(working_dir, zone_id) _LOGGER.info(f'Warehouse endpoint: {endpoint}') _LOGGER.info(f'Warehouse zone ID: {zone_id}') host = endpoint.split(':')[0] port = int(endpoint.split(':')[1]) start_server = websockets.serve(main_dispatcher, host, port) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() _LOGGER.info('Exited Warehouse')
def __init__(self, working_dir: str, zone_id: str): super(BaseCollator, self).__init__() self.working_dir = working_dir self.zone_id = zone_id self.warehouse_endpoint = config.get_setting( working_dir)['warehouse_endpoint'] self.logger = get_child_logger('provider.collator')
def __init__(self, working_dir: str, zone_id: str): super(BaseKernel, self).__init__() self.working_dir = working_dir self.zone_id = zone_id self.transforms = {} self.chain_registry = config.get_chain_registry(working_dir) self.warehouse_endpoint = config.get_setting(working_dir)['warehouse_endpoint'] self.logger = get_child_logger('aggregator.kernel')
def __init__(self, working_dir: str, zone_id: str): super(Warehouse, self).__init__() self.working_dir = working_dir self.zone_id = zone_id config.set_working_dir(working_dir) self.setting = config.get_setting(working_dir) self.chain_registry = config.get_chain_registry(working_dir) mods = zone_manager.load_zone(self.zone_id, working_dir)['warehouse'] self.storage = mods['storage'].Storage(working_dir, zone_id)
def _run_server(endpoint, working_dir, zone_id): global _AGGREGATOR global _LOGGER config.get_setting(working_dir) _LOGGER = create_logger('aggregator', zone_id) rpc_server.set_logger(_LOGGER) _AGGREGATOR = Aggregator(working_dir, zone_id) _LOGGER.info(f'Aggregator endpoint: {endpoint}') _LOGGER.info(f'Aggregator zone ID: {zone_id}') host = endpoint.split(':')[0] port = int(endpoint.split(':')[1]) asyncio.get_event_loop().run_until_complete(initialize()) asyncio.get_event_loop().create_task(fetch_data()) start_server = websockets.serve(main_dispatcher, host, port) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() _LOGGER.info('Exited Aggregator')
def __init__(self, working_dir: str, zone_id: str): super(Provider, self).__init__() self.working_dir = working_dir self.zone_id = zone_id config.set_working_dir(working_dir) self.setting = config.get_setting(working_dir) self.chain_registry = config.get_chain_registry(working_dir) mods = zone_manager.load_zone(self.zone_id, working_dir)['provider'] self.collator = mods['collator'].Collator(working_dir, zone_id) self.api_bundle = mods['api_bundle'].ApiBundle(working_dir, zone_id) self.api_bundle.set_collator(self.collator)
def test_setting_value(setup_chainalytic_config): setting = config.get_setting() assert setting valid_keys = [ 'upstream_endpoint', 'aggregator_endpoint', 'warehouse_endpoint', 'provider_endpoint', 'warehouse_dir', 'zone_storage_dir', 'transform_storage_dir', 'transform_cache_dir', ] for k in valid_keys: assert k in setting assert setting['upstream_endpoint'] == 'localhost:5500' assert setting['aggregator_endpoint'] == 'localhost:5510' assert setting['warehouse_endpoint'] == 'localhost:5520' assert setting['provider_endpoint'] == 'localhost:5530' assert setting['warehouse_dir'] == '.chainalytic/chainalytic_warehouse' assert setting['zone_storage_dir'] == '{warehouse_dir}/{zone_id}_storage' assert setting['transform_storage_dir'] == '{zone_storage_dir}/{transform_id}_storage' assert setting['transform_cache_dir'] == '{zone_storage_dir}/{transform_id}_cache'