def render(self, screen): # if it's not sticky, e.g. we're not on the latest and greatest, and the user is scrolling... if not self.start_sticky: # pull from db if we don't have enough records to display without crashing end = min(len(self.display_stamps), self.element_count + 1) if len(self.display_stamps) < end + self.scroll_offset + 1: # pull records d = Dao() timestamps = d.get_n_timestamp_records_starting_from( self.total_stamps, interval=self.DB_PULL_INTERVAL) self.total_stamps += self.DB_PULL_INTERVAL stamps: List[Stamp] = [] # convert into stamps for timestamp in timestamps: if timestamp.ms <= 0: self.dead_counter += 1 stamps.append( Stamp(timestamp.receiver_readable, timestamp.ms, offset=self.label_offset_y)) self.display_stamps.extend(stamps) # if we still don't have enough records, remove the last step the user did... if len(self.display_stamps) < end + self.scroll_offset + 1: if self.scroll_offset > self.STEP_UP: self.scroll_offset -= self.STEP_UP else: self.scroll_offset = 0 self.draw_axes(screen) self.draw_misc(screen) self.draw_stamps(screen, self.GREEN, self.MARINE)
def find_the_most_suitable_answer(keywords): dao = Dao() answers = dao.get_dictionary() # print(answers) value_list = {} i = 0 while i < 500: value_list[i] = 0.0 i += 1 '''for keyword in keywords: #print(keyword) for answer in answers: #print(answer) if answers[answer].count(keyword) == 1: value_list[answer] += 1''' # print(keyword) for answer in answers: for keyword in keywords: # print(answer) if answers[answer].count(keyword) == 1: value_list[answer] += 1 if len(answers[answer]) != 0: # print("Sss" + str(value_list[answer]) + str(len(answers[answer]) * 1.0)) value_list[answer] = value_list[answer] / (len(answers[answer]) * 1.0) # print(value_list[answer]) # print(dao.execute("select `text` from hack.`key` where `answer_id` = " + str(answer))) return sorted(value_list.items(), key=operator.itemgetter(1))[len(value_list) - 1][0]
def parse_available_stock(self, response): data = json.loads(response.body) product_name = response.meta['product_name'] start_datetime = self.crawler.stats.get_stats(self)['start_time'] # in utc start_time_epoch = mktime(start_datetime.timetuple()) Path('logs').mkdir(parents=True, exist_ok=True) logfile = 'logs/' + self.name + '_' + strftime("%Y-%m-%d_%H:%M:%S_UTC", start_datetime.timetuple()) + '.log' # NOTE(sdsmith): These are totally out of the blue numbers availStatusToQuantity = {'OUT_OF_STOCK': 0, 'LIMITED': 5, 'AVAILABLE': 30} with open(logfile, 'a') as f: for i, loc in enumerate(data['info']): location = loc['displayName'] + ', ' + loc['intersection'] quantity = availStatusToQuantity[loc['availabilityStatus']] price = float(loc['sellPrice']) msg = '{}: {} - price ${}, availability {}\n'.format(product_name, location, price, loc['availabilityStatus']) store_id = response.meta['db']['store_id'] is_change = Dao.record_latest_product_stock(start_time_epoch, product_name, store_id, location, quantity, price) if is_change: slack.send_message(msg) f.write(msg) status_msg = '{}: found {} locations, saved in {}'.format(product_name, i + 1, logfile) self.log(status_msg) slack.send_health_message(status_msg)
def __init__(self, src_addr: str): with open("servers.json", 'r', encoding="utf-8") as file: data: List[List[str], List[str]] = load(file) self.servers: List = data[0] self.servers_readable: List = data[1] self.local_history = [] self.db = Dao() self.interface = src_addr for _ in self.servers: self.local_history.append(0) if len(self.servers) != len(self.servers_readable): raise ServerHostNameMismatchException()
def start_requests(self): store_name = 'walmart' slack.send_health_message('Starting Walmart check...') store_id = Dao.get_store_id(store_name) # TODO(sdsmith): only do the loc call if it has changed! yield scrapy.Request(url=self._loc_url('L7T1X4'), callback=self.parse_loc, meta={'db': {'store_id': store_id}})
def add_to_bd_type_two(filename, lines_count, start_id): try: row = start_id dao = Dao(True) file = open(filename, 'r', encoding="UTF-8") for i in range(0, lines_count, 3): keys = file.readline() answer = file.readline() nothing = file.readline() lst = tokenizer.tokenize_ru(keys) keywords = [] for word in lst: checked = spellchecker.correct(word) # stemmed = stemmer.stem(word) lemma = pymorphy2.MorphAnalyzer().parse(checked)[0].normal_form keywords.append(lemma) for keyword in keywords: sql = "insert into hack.`key`(text, answer_id) values ('" + keyword + "', " + str( row) + ");" dao.execute(sql) sql = "insert into answer(id, text) values (" + str( row) + ", '" + answer + "')" dao.execute(sql) row += 1 finally: file.close()
from db.dao import Dao dao = Dao() print(dao.execute("select * from hack.`answer`;"))
def get_user_response(msg): message = str(msg) dao = Dao() num = bot.get_response(message) print(dao.get_answer(num)[0]) return dao.get_answer(num)[0]
from db.dao import Dao dao = Dao() print(dao.get_dictionary())
class StabilityTester: UPPER_LIMIT = 1000 # upper limit in ms SLEEP_TIME = 1 # sleep time between calls in seconds -- ideally should be the same as upper limit def __init__(self, src_addr: str): with open("servers.json", 'r', encoding="utf-8") as file: data: List[List[str], List[str]] = load(file) self.servers: List = data[0] self.servers_readable: List = data[1] self.local_history = [] self.db = Dao() self.interface = src_addr for _ in self.servers: self.local_history.append(0) if len(self.servers) != len(self.servers_readable): raise ServerHostNameMismatchException() def ping_forever(self, scene): while True: self.loop_servers(scene) def ping_with_event(self, evt: Event, scene: PingScene): while True: if evt.is_set(): break self.loop_servers(scene) def ping_with_event_counter(self, evt: Event, c, scene: PingScene): for i in range(c): if evt.is_set(): break self.loop_servers(scene) def loop_servers(self, scene: PingScene): tts = StabilityTester.SLEEP_TIME for i in range(len(self.servers)): try: ms = int(self.ping_server(self.servers[i])) ping_in_s = ms / 1000 time_left = tts - ping_in_s if ping_in_s < tts else 0 self.db.timestamp(ms, StabilityTester.UPPER_LIMIT, self.servers[i], self.servers_readable[i], self.interface) scene.add_stamp(self.servers_readable[i], ms) print(f"Server Name: {self.servers_readable[i]}\n" f"\tReplied in: {ms}ms\n" f"\tPing Variation: {ms - self.local_history[i]}ms") self.local_history[i] = ms sleep(time_left) except Timeout: self.db.timestamp(0, StabilityTester.UPPER_LIMIT, self.servers[i], self.servers_readable[i], self.interface, True) scene.add_stamp(self.servers_readable[i], 0) print(f"Server Name: {self.servers_readable[i]}\n" f"\t\tConnection timed out.") except PingError as pe: scene.add_stamp(self.servers_readable[i], 0) print(f"Encountered unexpected PingError {pe} when pinging {self.servers_readable[i]}") sleep(0.1) except Exception as e: # silently pass if our adapter dies or something, we do not care sleep(tts) def ping_server(self, server: str): return ping(server, src_addr=self.interface, unit='ms', timeout=StabilityTester.SLEEP_TIME)
handler.parse_sys_args(argv) handler.post_processing(Sniffer.get_interface_list(), Sniffer.get_interface_ip_list()) # if we encounter any exceptions, bail if handler.exceptions: for e in handler.exceptions: print(e.__str__() + "\n") exit(0) any_special_flag = handler.RECORDS_FLAG or handler.PICKLE_FOUND or handler.SAVE_FOUND any_output_flag = handler.PICKLE_FOUND or handler.SAVE_FOUND if handler.RECORDS_FLAG: # if we found records flag, for each MM/DD/HH/MM/SS (depending on if YYYY/MM/DD/HH/MM was given), # print number of unique records and then exit tsc, pkc = get_record_count(Dao(), handler.RECORDS_DATE, handler.RECORDS_TYPE) print(f"Found {tsc} timestamps and {pkc} sniff records in the given date.") if any_output_flag: generator = Generator() if handler.PICKLE_FOUND: # if cmd handler found any pickles as an arg, open # noinspection PyUnboundLocalVariable generator.open_saved_pickles(handler.pickles) if handler.SAVE_FOUND: # do stuff for output here # noinspection PyUnboundLocalVariable generator.start_new_pass(handler.OUTPUT_PATH, handler.ANON_FLAG) d = Dao()
#!/usr/bin/env python3 import sys from scrapy.crawler import CrawlerProcess from hyper_scraper.spiders.walmart_spider import WalmartNintendoSwitchSpider from db.dao import Dao from notifs import slack if __name__ == '__main__': Dao.setup_db() if len(sys.argv) > 1: if sys.argv[1] == 'stock': s = Dao.products_in_stock() slack.send_message(s) print(s) exit(0) else: print('Usage: main.py [stock]') exit(1) process = CrawlerProcess() process.crawl(WalmartNintendoSwitchSpider) process.start()