def _shot(self): poll = file_ipc.read_poll() nodes = _load_from(RedisNodeStatus, poll['nodes']) proxies = _load_from(ProxyStatus, poll['proxies']) # commit because `get_by` may create new nodes # to reattach session they must be persisted commit_session() all_nodes = nodes + proxies random.shuffle(all_nodes) pollers = [ Poller(all_nodes[i:i + NODES_EACH_THREAD], self.algalon_client) for i in xrange(0, len(all_nodes), NODES_EACH_THREAD) ] for p in pollers: p.start() time.sleep(self.interval) for p in pollers: p.join() for p in pollers: for n in p.nodes: n.add_to_db() save_polling_stat(nodes, proxies) commit_session() logging.debug('Total %d nodes, %d proxies', len(nodes), len(proxies)) try: file_ipc.write_details({n.addr: n.details for n in nodes}, {p.addr: p.details for p in proxies}) except StandardError, e: logging.exception(e)
def run(interval, algalon_client, app): from daemonutils import node_polling from daemonutils.cluster_task import TaskPoller TaskPoller(app, interval).start() NODES_EACH_THREAD = 20 with handlers.base.app.app_context(): while True: poll = file_ipc.read_poll() nodes = _load_from(node_polling.RedisNodeStatus, poll['nodes']) proxies = _load_from(node_polling.ProxyStatus, poll['proxies']) models.base.db.session.commit() all_nodes = nodes + proxies random.shuffle(all_nodes) pollers = [ Poller(all_nodes[i: i + NODES_EACH_THREAD], algalon_client) for i in xrange(0, len(all_nodes), NODES_EACH_THREAD)] for p in pollers: p.start() time.sleep(interval) for p in pollers: p.join() logging.info('Total %d nodes, %d proxies', len(nodes), len(proxies)) try: file_ipc.write_details({n.addr: n.details for n in nodes}, {p.addr: p.details for p in proxies}) except StandardError, e: logging.exception(e)
def _shot(self): poll = file_ipc.read_poll() nodes = _load_from(RedisNodeStatus, poll['nodes']) proxies = _load_from(ProxyStatus, poll['proxies']) # commit because `get_by` may create new nodes # to reattach session they must be persisted db.session.commit() all_nodes = nodes + proxies random.shuffle(all_nodes) pollers = [Poller(all_nodes[i: i + NODES_EACH_THREAD], self.algalon_client) for i in xrange(0, len(all_nodes), NODES_EACH_THREAD)] for p in pollers: p.start() time.sleep(self.interval) for p in pollers: p.join() for p in pollers: for n in p.nodes: n.add_to_db() save_polling_stat(nodes, proxies) db.session.commit() logging.debug('Total %d nodes, %d proxies', len(nodes), len(proxies)) try: file_ipc.write_details({n.addr: n.details for n in nodes}, {p.addr: p.details for p in proxies}) except StandardError, e: logging.exception(e)
def run(interval, algalon_client, app): from daemonutils import node_polling from daemonutils.cluster_task import TaskPoller TaskPoller(app, interval).start() NODES_EACH_THREAD = 20 with handlers.base.app.app_context(): while True: poll = file_ipc.read_poll() nodes = _load_from(node_polling.RedisNodeStatus, poll['nodes']) proxies = _load_from(node_polling.ProxyStatus, poll['proxies']) models.base.db.session.commit() all_nodes = nodes + proxies random.shuffle(all_nodes) pollers = [ Poller(all_nodes[i:i + NODES_EACH_THREAD], algalon_client) for i in xrange(0, len(all_nodes), NODES_EACH_THREAD) ] for p in pollers: p.start() time.sleep(interval) for p in pollers: p.join() logging.info('Total %d nodes, %d proxies', len(nodes), len(proxies)) try: file_ipc.write_details({n.addr: n.details for n in nodes}, {p.addr: p.details for p in proxies}) except StandardError, e: logging.exception(e)
def write_polling_details(self, redis_details, proxy_details): file_ipc.write_details(redis_details, proxy_details)