async def start(self, blockchain_node: 'BlockchainNode', extraconf=None): self.data_path = tempfile.mkdtemp() conf = { 'DESCRIPTION': '', 'PAYMENT_ADDRESS': '', 'DAILY_FEE': '0', 'DB_DIRECTORY': self.data_path, 'DAEMON_URL': blockchain_node.rpc_url, 'REORG_LIMIT': '100', 'HOST': self.hostname, 'TCP_PORT': str(self.port), 'UDP_PORT': str(self.udp_port), 'SESSION_TIMEOUT': str(self.session_timeout), 'MAX_QUERY_WORKERS': '0', 'INDIVIDUAL_TAG_INDEXES': '', 'RPC_PORT': self.rpc_port, 'ES_INDEX_PREFIX': self.index_name, 'ES_MODE': 'writer', } if extraconf: conf.update(extraconf) # TODO: don't use os.environ os.environ.update(conf) self.server = Server(Env(self.coin_class)) self.server.bp.mempool.refresh_secs = self.server.bp.prefetcher.polling_delay = 0.5 await self.server.start()
async def make_es_index(): env = Env(LBC) index = SearchIndex('', elastic_host=env.elastic_host, elastic_port=env.elastic_port) try: return await index.start() finally: index.stop()
def run_elastic_sync(): logging.basicConfig(level=logging.INFO) logging.getLogger('aiohttp').setLevel(logging.WARNING) logging.getLogger('elasticsearch').setLevel(logging.WARNING) logging.info('lbry.server starting') parser = argparse.ArgumentParser(prog="lbry-hub-elastic-sync") parser.add_argument("-c", "--clients", type=int, default=32) parser.add_argument("-f", "--force", default=False, action='store_true') Env.contribute_to_arg_parser(parser) args = parser.parse_args() env = Env.from_arg_parser(args) if not os.path.exists(os.path.join(args.db_dir, 'lbry-leveldb')): logging.info("DB path doesnt exist, nothing to sync to ES") return asyncio.run( make_es_index_and_run_sync(env, clients=args.clients, force=args.force))
async def consume(producer, index_name): env = Env(LBC) logging.info("ES sync host: %s:%i", env.elastic_host, env.elastic_port) es = AsyncElasticsearch([{ 'host': env.elastic_host, 'port': env.elastic_port }]) try: await async_bulk(es, producer, request_timeout=120) await es.indices.refresh(index=index_name) finally: await es.close()
def main(): parser = get_argument_parser() args = parser.parse_args() coin_class = get_coin_class(args.spvserver) logging.basicConfig(level=logging.INFO) logging.info('lbry.server starting') try: server = Server(Env(coin_class)) server.run() except Exception: traceback.print_exc() logging.critical('lbry.server terminated abnormally') else: logging.info('lbry.server terminated normally')
async def make_es_index(): env = Env(LBC) index = SearchIndex('', elastic_host=env.elastic_host, elastic_port=env.elastic_port) try: return await index.start() except IndexVersionMismatch as err: logging.info( "dropping ES search index (version %s) for upgrade to version %s", err.got_version, err.expected_version) await index.delete_index() return await index.start() finally: index.stop()
def main(): parser = get_argument_parser() args = parser.parse_args() logging.basicConfig( level=logging.INFO, format="%(asctime)s %(levelname)-4s %(name)s:%(lineno)d: %(message)s") logging.info('lbry.server starting') logging.getLogger('aiohttp').setLevel(logging.WARNING) logging.getLogger('elasticsearch').setLevel(logging.WARNING) try: server = Server(Env.from_arg_parser(args)) server.run() except Exception: traceback.print_exc() logging.critical('lbry.server terminated abnormally') else: logging.info('lbry.server terminated normally')
def get_argument_parser(): parser = argparse.ArgumentParser(prog="lbry-hub") Env.contribute_to_arg_parser(parser) return parser