def main(): comment_queue = RedisQueue('reddit-book-stream', host=REDIS_HOST, port=REDIS_PORT) reddit = praw.Reddit(user_agent=USER_AGENT, client_id=CLIENT_ID, client_secret=CLIENT_SECRET, username=USERNAME, password=PASSWORD) # stream comments from r/all, pasue_after < 0 allows faster streaming for comment in reddit.subreddit('all').stream.comments(pause_after=-1): if comment and mentions_book(comment.body): comment_queue.put(comment.id) print(comment.id) print(f'reddit.com/api/info?id=t1_{comment.id}')
class SerialWorker: def __init__(self): self.trigger = False self.result_queue = RedisQueue(Config.UP_QUEUE_NAME) self.command_queue = RedisQueue(Config.DOWN_QUEUE_NAME) self.port = serial.Serial("/dev/ttyS0", 9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS, timeout=Config.SERIAL_WAIT) self.start() def start(self): while True: self.executeTask() time.sleep(Config.SERIAL_CYC) def executeTask(self): GPIO.output(Config.EN_485, GPIO.HIGH) command = self.command_queue.get_nowait() if not command: self.trigger = not self.trigger if self.trigger: command = DEFAULT_COMMAND else: command = DEFAULT_COMMAND2 print 'write to 485 %s' % command command = CommandHelper.toWriteable(command) self.port.write(command) while self.port.out_waiting > 0: time.sleep(0.01) GPIO.output(Config.EN_485, GPIO.LOW) result = self.port.readall() if result: result = CommandHelper.toReadable(result) print 'receive from 485 %s' % result self.result_queue.put(result)
class A3CActorThread(object): def __init__(self, thread_index, global_network): self.thread_index = thread_index self.local_network = global_network self.game_state = GameState() self.local_t = 0 # for log self.episode_reward = 0.0 self.episode_start_time = 0.0 self.prev_local_t = 0 self.rq = RedisQueue(REDIS_QUEUE_NAME) return def choose_action(self, policy_output): if random.random() < RANDOM_ACTION_PROBILITY: return random.randint(0, ACTION_DIM - 1) values = [] sum = 0.0 for rate in policy_output: sum += rate values.append(sum) r = random.random() * sum for i in range(len(values)): if values[i] >= r: return i return len(values) - 1 def _record_log(self, sess, global_t, summary_writer, summary_op, reward_input, reward, time_input, living_time): summary_str = sess.run(summary_op, feed_dict={ reward_input: reward, time_input: living_time }) summary_writer.add_summary(summary_str, global_t) return def process(self, sess, global_t, summary_writer, summary_op, reward_input, time_input): states = [] actions = [] rewards = [] values = [] terminal_end = False # reduce the influence of socket connecting time if self.episode_start_time == 0.0: self.episode_start_time = timestamp() start_local_t = self.local_t for i in range(LOCAL_T_MAX): policy_, value_ = self.local_network.run_policy_and_value( sess, self.game_state.s_t) if self.thread_index == 0 and self.local_t % 1000 == 0: print 'policy=', policy_ print 'value=', value_ action_id = self.choose_action(policy_) states.append(self.game_state.s_t) actions.append(action_id) values.append(value_) self.game_state.process(action_id) reward = self.game_state.reward terminal = self.game_state.terminal self.episode_reward += reward rewards.append(reward) self.local_t += 1 # s_t1 -> s_t self.game_state.update() if terminal: terminal_end = True episode_end_time = timestamp() living_time = episode_end_time - self.episode_start_time self._record_log(sess, global_t, summary_writer, summary_op, reward_input, self.episode_reward, time_input, living_time) print("global_t=%d / reward=%.2f / living_time=%.4f") % ( global_t, self.episode_reward, living_time) # reset variables self.episode_reward = 0.0 self.episode_start_time = episode_end_time self.game_state.reset() if USE_LSTM: self.local_network.reset_lstm_state() break # log if self.local_t % 2000 == 0: living_time = timestamp() - self.episode_start_time self._record_log(sess, global_t, summary_writer, summary_op, reward_input, self.episode_reward, time_input, living_time) # -----------end of batch (LOCAL_T_MAX)-------------------- R = 0.0 if not terminal_end: R = self.local_network.run_value(sess, self.game_state.s_t) # print ('global_t: %d, R: %f') % (global_t, R) states.reverse() actions.reverse() rewards.reverse() values.reverse() batch_state = [] batch_action = [] batch_td = [] batch_R = [] for (ai, ri, si, Vi) in zip(actions, rewards, states, values): R = ri + GAMMA * R td = R - Vi action = np.zeros([ACTION_DIM]) action[ai] = 1 batch_state.append(si) batch_action.append(action) batch_td.append(td) batch_R.append(R) # put in into redis queue for asychronously train data = cPickle.dumps((si, action, td, R)) self.rq.put(data) diff_local_t = self.local_t - start_local_t return diff_local_t
#!/usr/bin/env python # coding:utf-8 # Copyright (C) dirlt from redis_queue import RedisQueue command_queue = RedisQueue('command') command_queue.put('trigger')
class Mocker(Thread): def __init__(self, stop_event): super().__init__() self.energy_data_queue = RedisQueue('normal') self.stop_event = stop_event self.default_message = self.get_default_message() self.total_usage = random.randint(1000, 5000) self.total_redelivery = random.randint(1000, 5000) self.total_solar = random.randint(1000, 5000) self.total_gas = random.randint(1000, 5000) def get_default_message(self): try: with open('default_message.json') as default_message_file: default_message = json.load(default_message_file) except: print( 'Something went wrong when trying to open default_message.json' ) sys.exit() return default_message def run(self): while not self.stop_event.is_set(): message = self.build_mock_data() print(message) self.energy_data_queue.put(json.dumps(message)) time.sleep(10) def build_mock_data(self): message = copy.deepcopy(self.default_message) energy_data = self.generate_mock_data(message) return energy_data def generate_mock_data(self, message): message["mode"] = "1" random_decider = random.randint(0, 10000) if random_decider < 6000: usage = random.randint(0, 2500) self.total_usage = self.total_usage + int(usage / 100) solar = random.randint(0, 2000) redelivery = 0 else: usage = 0 solar = random.randint(0, 4000) redelivery = random.randint(0, solar) self.total_redelivery = self.total_redelivery + int( redelivery / 100) self.total_solar = self.total_solar + int(solar / 100) self.total_gas = self.total_gas + int(random.randint(0, 110) / 100) message['unix_timestamp'] = int(time.time()) message["usage_now"] = usage message["redelivery_now"] = redelivery message["solar_now"] = solar message["usage_total_high"] = self.total_usage message["redelivery_total_high"] = self.total_redelivery message["usage_total_low"] = self.total_usage message["redelivery_total_low"] = self.total_redelivery message["solar_total"] = self.total_solar message["usage_gas_now"] = 0 message["usage_gas_total"] = self.total_gas return message
def push_to_queue(queue_name, items): queue = RedisQueue(queue_name) for item in items: queue.put(item)
class Reader(threading.Thread): def __init__(self, status_queue, config, stop_event): super().__init__() self.energy_data_queue = RedisQueue('normal') self.status_queue = status_queue self.reader = self.init_reader() self.solar_ip = config['solar_ip'] self.solar_url = self.solar_ip + config['solar_url'] self.stop_event = stop_event self.console_mode = True if config["console_mode"] == "true" else False def init_reader(self): serial_reader = SerialReader( device="/dev/ttyUSB0", serial_settings=SERIAL_SETTINGS_V4, telegram_specification=telegram_specifications.V4) return serial_reader def run(self): self.send_message_to_listeners(Status.RUNNING, description='Reader has been started') self.read() def read(self): for telegram in self.reader.read(): energy_data = self.extract_data_from_telegram(telegram) if self.console_mode: self.send_message_to_listeners(Status.RUNNING, description=energy_data) self.energy_data_queue.put(json.dumps(energy_data)) if self.stop_event.is_set(): break self.send_message_to_listeners(Status.STOPPED, description='Reader has been stopped') def extract_data_from_telegram(self, telegram): solar = self.read_solar() data = { 'unix_timestamp': int(time.time()), 'mode': str(telegram[obis_references.ELECTRICITY_ACTIVE_TARIFF].value), 'usage_now': str(telegram[obis_references.CURRENT_ELECTRICITY_USAGE].value * 1000), 'redelivery_now': str(telegram[obis_references.CURRENT_ELECTRICITY_DELIVERY].value * 1000), 'solar_now': solar['now'], 'usage_total_high': str(telegram[obis_references.ELECTRICITY_USED_TARIFF_2].value * 1000), 'redelivery_total_high': str(telegram[obis_references.ELECTRICITY_DELIVERED_TARIFF_2].value * 1000), 'usage_total_low': str(telegram[obis_references.ELECTRICITY_USED_TARIFF_1].value * 1000), 'redelivery_total_low': str(telegram[obis_references.ELECTRICITY_DELIVERED_TARIFF_1].value * 1000), 'solar_total': solar['total'], 'usage_gas_now': "0", 'usage_gas_total': str(telegram[obis_references.HOURLY_GAS_METER_READING].value * 1000) } return data def read_solar(self, retry=False): solar = {"now": 0, "total": 0} if str(self.solar_ip) is "": return solar try: time.sleep(0.85) solar_data = requests.get(url=self.solar_url, timeout=2).json() solar['now'] = solar_data['Body']['Data']['PAC']['Value'] solar['total'] = solar_data['Body']['Data']['TOTAL_ENERGY'][ 'Value'] return solar except requests.exceptions.ConnectTimeout: return solar except Exception: if not retry: solar = self.read_solar(True) self.send_message_to_listeners( Status.RUNNING, Error.SOLAR_API, 'Could not read data from solar api: {}'.format( self.solar_url)) return solar def send_message_to_listeners(self, status, error=None, description=None): message = dict() message["thread"] = Thread.READER message["status"] = status if error is not None: message["error"] = error if message is not None: message["description"] = description self.status_queue.put(message)
class Zhihu_crawler(): def __init__(self, url): self.queue = RedisQueue('zhihu', host='localhost', port=6379, db=0) self.url = url self.headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36" , "Host":"www.zhihu.com" , "Refer":"www.zhihu.com" , "Accept-Language":"zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4" , "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8" , "Accept-Encoding":"gzip, deflate, sdch, br" , "Cache-Control":"max-age=0" , "Connection":"keep-alive"} #cookie self.cookies={"_zap":"aaf2a75d-0a1b-4863-b8a0-23ff0f4a9002" , "_za":"e73a8db5-0824-4c36-b6a2-7a5378a046f7" , "udid":'"AFAAY31blAmPTta9QIqu7S6lUdEK97RWDgg=|1457941793"' , "d_c0":'"AGBAzqyTowmPTpYh7UrYZSjcr43LFX006Tw=|1461248461"' , "_zap":"267bc327-098d-4d7c-85cb-3cfd13cd2e8e" , "q_c1":"3b3a3dccecf1499ea32a0b2da9be35ec|1470149980000|1445741536000" , "_xsrf":"8a812fd7745e54a8e8ab4ed815fa9001" , "l_cap_id":'"YzQ3YzNhNzUxZjBlNDAzNTgwM2FhNzdlODI5NjAxZjY=|1472298711|d67a5a1c7e5fb41cfe2715e389c74ebc6132007d"' , "cap_id":'"ZGQwYTE0MTM3ODk0NDUzOGFkM2RiNGYxYTNmYTc1YTM=|1472298711|8fd9f406e4786a9ca56227b61e7c6a2a5c0f4b42"' , "login":'******' , "n_c":'1' , "s-t":"autocomplete" , "s-q":"volley%2Cretrofit%2Cokhttp" , "s-i":"1" , "sid":"6vahoruo" , "a_t":'"2.0AEAAukjbcgoXAAAATjPpVwBAALpI23IKAGBAzqyTowkXAAAAYQJVTfYL6VcAoZ3PJyuvTIR4Yl3RS9B_tCnMwHxnX7iDfjl2Ve7xk-Nk6RdV68h4_A=="' , "z_c0":"Mi4wQUVBQXVramJjZ29BWUVET3JKT2pDUmNBQUFCaEFsVk45Z3ZwVndDaG5jOG5LNjlNaEhoaVhkRkwwSC0wS2N6QWZB|1472308814|21bb41cc3844239f4582374fc850ced4a5e8c564" , "__utma":"51854390.226515891.1472287250.1472298703.1472307196.4" , "__utmc":"51854390" , "__utmz":"51854390.1472296126.2.2.utmcsr=google|utmccn=(organic)|utmcmd=organic|utmctr=(not%20provided)" , "__utmv":"51854390.100--|2=registration_date=20160827=1^3=entry_date=20151025=1"} def send_request(self): #关注者的url followees_url = self.url + '/followees' session = requests.session() session.proxies = { "http": "http://124.88.67.17.251:8685", "https": "http://223.67.136.218:8920", } #发起请求 #避免Https的证书验证 r = requests.get(followees_url, cookies = self.cookies, headers = self.headers, verify = True) try: r.raise_for_status() except requests.HTTPError as e: print e.message + ' HttpError' except requests.ConnectionError as e: print e.message content = r.text if r.status_code == requests.codes.ok: self.parse_users_content(content) print "requests success!" #判断是否数据存在 def judge_data_have(self, name, datas): if datas: #print datas[0] return datas[0] else: #print name + " not exist!" return '' #解析数据 def parse_users_content(self, html_source): #初始化我们需要的信息变量 self.user_name='' self.user_gender='' self.user_location='' self.user_followees='' self.user_followers='' self.user_be_agreed='' self.user_be_thanked='' self.user_education_school='' self.user_education_subject='' self.user_employment='' self.user_employment_extra='' self.user_intro='' self.followees_urls='' tree = etree.HTML(html_source) self.user_name = self.judge_data_have("姓名", tree.xpath('//a[@class = "name"]/text()')) self.user_location = self.judge_data_have("位置", tree.xpath('//span[@class = "location item"]/@title')) self.user_gender = self.judge_data_have("性别", tree.xpath('//span[@class = "item gender"]/i/@class')) if self.user_gender: if 'female' in self.user_gender: self.user_gender = 'female' elif 'male' in self.user_gender: self.user_gender = 'male' followees = tree.xpath('//div[@class = "zu-main-sidebar"]//strong/text()') if followees: self.user_followees = tree.xpath('//div[@class = "zu-main-sidebar"]//strong/text()')[0] self.user_followers = tree.xpath('//div[@class = "zu-main-sidebar"]//strong/text()')[1] stats = tree.xpath('//div[@class = "zm-profile-header-info-list"]//strong/text()') if stats: self.user_be_agreed = tree.xpath('//div[@class = "zm-profile-header-info-list"]//strong/text()')[0] self.user_be_thanked = tree.xpath('//div[@class = "zm-profile-header-info-list"]//strong/text()')[1] self.user_education_school = self.judge_data_have("学校", tree.xpath('//span[@class = "education item"]/a/@title')) self.user_education_subject = self.judge_data_have("学科", tree.xpath('//span[@class = "education-extra item"]/a/@title')) self.user_employment = self.judge_data_have("公司", tree.xpath('//span[@class = "employment item"]/@title')) self.user_employment_extra = self.judge_data_have("公司", tree.xpath('//span[@class = "position item"]/@title')) self.user_intro = self.judge_data_have("简介", tree.xpath('//div[@class = "bio ellipsis"]/@title')) #添加到队列里面 self.followees_urls = tree.xpath('//a[@class = "zg-link author-link"]/@href') for url in self.followees_urls: #url = url.replace("https", "http") self.queue.put(url) self.print_data_out() #打印最终信息 def print_data_out(self): print "*"*60 print "用户名:%s".decode('utf-8') % self.user_name print "用户性别:%s".decode('utf-8') % self.user_gender print "用户地址:%s".decode('utf-8') % self.user_location print "被同意:%s".decode('utf-8') % self.user_be_agreed print "被感谢:%s".decode('utf-8') % self.user_be_thanked print "被关注:%s".decode('utf-8') % self.user_followers print "关注了:%s".decode('utf-8') % self.user_followees print "工作:%s/%s".decode('utf-8') % (self.user_employment,self.user_employment_extra) print "教育:%s/%s".decode('utf-8') % (self.user_education_school,self.user_education_subject) print "用户信息:%s".decode('utf-8') % self.user_intro print "*"*60 self.save_in_mongodb() #存储到mongodb数据库里面 def save_in_mongodb(self): new_data = Zhihu_User_Data( user_name = self.user_name, user_gender = self.user_gender, user_location = self.user_location, user_followees = self.user_followees, user_followers = self.user_followers, user_be_agreed = self.user_be_agreed, user_be_thanked = self.user_be_thanked, user_education_school = self.user_education_school, user_education_subject = self.user_education_subject, user_employment = self.user_employment, user_employment_extra = self.user_employment_extra, user_intro = self.user_intro, followees_urls = self.followees_urls ) new_data.save() #返回队列 def get_queue(self): return self.queue
class SocketWorker: def __init__(self): self.command_queue = RedisQueue(Config.DOWN_QUEUE_NAME) self.result_queue = RedisQueue(Config.UP_QUEUE_NAME) self.socket = websocket.WebSocketApp(HOST, on_open=self.on_open, on_message=self.on_message, on_error=self.on_error, on_close=self.on_close) while True: try: self.socket.run_forever(ping_interval=100) except: pass time.sleep(5) def on_open(self): print 'socket connected' self.socket.send( json.dumps({ 'type': 'verify', 'data': 'device', 'id': self.getSerial() })) thread.start_new_thread(self.start, ()) def on_error(self, error): print 'socket error %s' % error def on_message(self, message): print 'socket get message %s' % message try: message = json.loads(message) self.handle_message(message) except Exception as e: print e print 'message parse fail' def handle_message(self, message): if message['type'] == 'message': self.command_queue.put(message['data']) elif message['type'] == 'unverified': self.socket.send( json.dumps({ 'type': 'verify', 'data': 'device', 'id': self.getSerial() })) def on_close(self): print 'socket close' def start(self): while True: self.execTask() time.sleep(0.5) def execTask(self): result = self.result_queue.get_nowait() if result: print 'socket send result %s' % result self.socket.send(json.dumps({'type': 'message', 'data': result})) def getSerial(self): cpuserial = "0000000000000000" try: f = open('/proc/cpuinfo', 'r') for line in f: if line[0:6] == 'Serial': cpuserial = line[10:26] f.close() except: cpuserial = "ERROR000000000" return cpuserial
# -*- coding:utf-8 -*- from redis_queue import RedisQueue import time q = RedisQueue('rq') # 新建队列名为rq for i in ["a", "b", "c", "d", "e", "f"]: q.put(i) print("input.py: data {} enqueue {}".format(i, time.strftime("%c"))) time.sleep(1)
class Sender(threading.Thread): def __init__(self, status_queue, stop_event, config): super(Sender, self).__init__() self.normal_data_queue = RedisQueue('normal') self.retry_data_queue = RedisQueue('retry') self.status_queue = status_queue self.stop_event = stop_event self.base_url = config["api_url"] self.key = config["key"] self.store_energy_url = self.base_url + "/v2/energy" self.backup_file = "backup" self.console_mode = True if config["console_mode"] == "true" else False self.connected = False def run(self): self.send_message_to_listeners(Status.RUNNING, description="Sender has been started") while not self.stop_event.is_set(): if not self.connected: self.connect_to_api() while self.connected: retry_data = self.read_messages_from_retry_queue() if len(retry_data) > 0: self.send_data_to_api(retry_data) break normal_data = self.read_messages_from_normal_queue() if len(normal_data) > 0: self.send_data_to_api(normal_data) break time.sleep(1) time.sleep(5) self.send_message_to_listeners( Status.STOPPED, description="Sender has been terminated") def read_messages_from_retry_queue(self): retry_data = [] while not self.retry_data_queue.empty(): retry_message = self.retry_data_queue.get() retry_data.append(json.loads(retry_message.decode('utf-8'))) if len(retry_data) > 30: break return retry_data def read_messages_from_normal_queue(self): normal_data = [] while not self.normal_data_queue.empty(): normal_message = self.normal_data_queue.get() normal_data.append(json.loads(normal_message.decode('utf-8'))) if len(normal_data) > 30: break return normal_data def connect_to_api(self): try: response = requests.get(self.base_url) self.connected = response.status_code == requests.codes.ok if response.status_code == requests.codes.ok: self.connected = True self.send_message_to_listeners( Status.RUNNING, description="Connected to server running on {}".format( self.base_url)) except requests.exceptions.ConnectionError as e: self.connected = False self.send_message_to_listeners(Status.RUNNING, Error.SERVER_UNREACHABLE, "Could not connect to the server") def send_data_to_api(self, messages): headers = { 'Content-type': 'application/json', 'Accept': 'application/json' } try: response = requests.post(self.store_energy_url, data=json.dumps({ 'data': messages, "rpi_key": self.key }), headers=headers) if response.status_code == requests.codes.created: if self.console_mode: self.send_message_to_listeners( Status.RUNNING, description="Succesfully stored energy data") return if response.status_code == requests.codes.unauthorized: self.send_message_to_listeners( Status.STOPPED, Error.UNAUTHORIZED, "Could not authorize with given key") self.stop_event.set() except requests.exceptions.ConnectionError as e: self.send_message_to_listeners(Status.RUNNING, Error.SERVER_UNREACHABLE, "Could not reach the server") self.connected = False for message in messages: self.retry_data_queue.put(json.dumps(message)) def send_message_to_listeners(self, status, error=None, description=None): message = dict() message["thread"] = Thread.SENDER message["status"] = status if error is not None: message["error"] = error if message is not None: message["description"] = description self.status_queue.put(message)
class MyTaskSet(CountResults): def __init__(self, time_execution_in_sec, chart_title, slave, *args, **kwargs): super(MyTaskSet, self).__init__(time_execution_in_sec, chart_title, slave, *args, **kwargs) self.running = True self.slave = slave self.code = None self.queue_chart = RedisQueue(name="data_chart", namespace="data_chart") self.queue_tasks = RedisQueue(name="data_tasks", namespace="data_tasks") self.chart = ReportCharts(time_execution_in_sec, chart_title, self.slave) self.db = create_engine(self.config["database"]["db_string"]) def purge_queues(self): self.queue_chart.purge() self.queue_tasks.purge() self.queue_data.purge() def set_tasks(self): while self.running: self.queue_tasks.put("heartbeat") def vacuum(self): try: self.db.execute("vacuum analyze films;") self.db.execute("vacuum films;") except InternalError: from table import Films films = Films() films.metadata.create_all(bind=self.db) return def run(self, thread=0): self.chart.update_chart(self.queue_chart, 5, "thread", data=1) while self.running and self.queue_tasks.get(): try: self.read() self.write() except Exception as e: self.RESPONSE_TIME_AVERAGE["errors"] += 1 def read(self): self.db.execute("SELECT * FROM films;".format( str(uuid.uuid4())[-5:], random.randint(0, self.LIMIT * 100))) def write(self): # INSERTS self.code = str(uuid.uuid4())[-5:] self.db.execute( "INSERT into films (code, title, did, kind) VALUES('{}', 'test', {}, 't');" .format( # noqa self.code, random.randint(0, self.LIMIT * 100))) # UPDATES new_code = str(uuid.uuid4())[-5:] self.db.execute("UPDATE films set code='{}' where code='{}';".format( new_code, self.code)) def on_finish(self): self.running = False time.sleep(5) print("Getting time here to wait all queue get empty") while self.queue_data.qsize() > 0 or self.queue_chart.qsize() > 0: print("Waiting finishing all pendents query") time.sleep(1) if not self.slave: table = PrettyTable([ "Item", "Total", "Average Execution (sec)", "Total Errors", "Total Executed (sec)" ]) table.add_row([ "INSERTS", self.RESPONSE_TIME_AVERAGE["count"]["insert"], self.RESPONSE_TIME_AVERAGE["average"]["insert"], "", "" ]) table.add_row([ "UPDATES", self.RESPONSE_TIME_AVERAGE["count"]["update"], self.RESPONSE_TIME_AVERAGE["average"]["update"], "", "" ]) table.add_row([ "SELECTS", self.RESPONSE_TIME_AVERAGE["count"]["select"], self.RESPONSE_TIME_AVERAGE["average"]["select"], "", "" ]) table.add_row([ "", "", "", self.RESPONSE_TIME_AVERAGE["errors"], "Finished execution after {} seconds".format(self.TIMING) ]) print(table) while self.queue_data.qsize() > 0 or self.queue_chart.qsize() > 0: print("Waiting finishing all pendents query") time.sleep(1) self.purge_queues() print("Finished! See http://localhost:9111/ to full report") os._exit(0)