def test_getFilesList(self): # Path not found res = DBManager().getCompetitionsFilesList('../Data') self.assertEqual(res, [], "Should be empty list []") # Have a result res = DBManager().getCompetitionsFilesList('../DataRaw/2009-2010') self.assertNotEqual(res, [], "Should not be an empty list") # All the entries are files of html type for file in res: self.assertNotEqual(file.find('.html'), -1, file + " should be a html")
def __init__(self): dbManager = DBManager() dbManager.openConnection() dbManager.initDB() # create table to store the tenant/owner maintenance data self.tableName = "maintenance" self.tableCommand = '''CREATE TABLE IF NOT EXISTS maintenance (FLATNO TEXT PRIMARY KEY NOT NULL, OCCUPANT_NAME TEXT, DATE TEXT, AMT_PAID REAL NOT NULL, AMT_DUE REAL NOT NULL, MODE_OF_PAYMENT TEXT NOT NULL)''' dbManager.createTable(self.tableName, self.tableCommand) self.tableName = "reference" self.tableCommand = '''CREATE TABLE IF NOT EXISTS reference (ID INT PRIMARY KEY NOT NULL, MAINT_AMT REAL NOT NULL, LATE_FEE_AMT REAL NOT NULL, LAST_DATE TEXT NOT NULL )''' dbManager.createTable(self.tableName, self.tableCommand) self.populateDB(dbManager) dbManager.closeConnection()
def openConn(self, conname): self.currentConname=conname if conname in self.openConNameList: self.currentDBIndex=self.openConNameList.index(conname) self.tabWidget.setCurrentIndex( self.currentDBIndex) else: conf = self.conf.cfg_get(conname) try: self.currentDB = DBManager(conf['conname'], conf['hostname'], conf['port'], conf['user'], conf['password']) except: self.conn.closeDialog() QMessageBox.warning(self, '提醒','配置参数有误') return re = self.currentDB.testConnect() if(re != 'Success'): self.conn.closeDialog() QMessageBox.information(self, '链接失败', re) return dbList=self.currentDB.showDBs() #初始化界面 self.setupBaseUi(dbList) index = len(self.openDBClassList) self.currentDBIndex = index self.openDBClassList.append(self.currentDB) self.openConNameList.append(conname) self.tabWidget.setTabText(index,conname) self.tabWidget.setCurrentIndex(index)
def retrieve_from_host(local_fqdn, remote_fqdn, direction): dbManager_CES = DBManager("127.0.0.1", "root", "take5", "CES_Policies") dbManager_CES.connect() response = dbManager_CES.retrieve_from_host(local_fqdn, remote_fqdn, direction) dbManager_CES.deconnect() return response
def retrieve_from_fqdn(reply): dbManager_Firewall = DBManager("127.0.0.1", "root", "take5", "Firewall_Policies") dbManager_Firewall.connect() response = dbManager_Firewall.retrieve_from_fqdn(str(reply)) dbManager_Firewall.deconnect() return response
def retrieve_from_CES(transport_protocol, link_alias, direction, ces_fqdn): dbManager_CES = DBManager("127.0.0.1", "root", "take5", "CES_Policies") dbManager_CES.connect() response = dbManager_CES.retrieve_from_ces(transport_protocol, link_alias, direction, ces_fqdn) dbManager_CES.deconnect() return response
def __init__(self): self.dbmanager = DBManager() self.taskdbmanager = TaskDBManager() self.current = Queue.Queue() self.q = Queue.Queue() self.guess = Queue.Queue() self.guess2 = Queue.Queue()
def main(): network = NetworkManager() json_string = network.getPresence() seats = json.loads(json_string) db_manager = DBManager() for item in seats: db_manager.create_seat(item)
def __init__(self): self.__db_manager = DBManager() self.__helper = GeneralHelpers() self.__plot_manager = PlotManager() self.__import_manager = ImportManager() self.__feature_manager = FeatureManager() self.years = ("2012", "2013", "2014", "2015")
def retrieve_from_msisdn(reply): dbManager_Firewall = DBManager("127.0.0.1", "root", "take5", "Firewall_Policies") dbManager_Firewall.connect() response = dbManager_Firewall.retrieve_from_msisdn(str(reply)) with open('data.yaml', 'w') as outfile: yaml.dump(response, outfile, default_flow_style=False) dbManager_Firewall.deconnect() return response
def test(self): #self.merge() #self.compress() #return embedding_size = 100 for CLUSTER_MIN_SIZE in range(4,19,2): for dsname in ['webkb','er']: mln = MLN(dsname) db = DBManager(dsname,mln) print('merge db dom sizes:') dom_obj_map = db.get_dom_objs_map(mln,db.merge_db_file) cf = common_f() #cf.delete_files(mln.pickle_location) #cf.remove_irrelevant_atoms() embedding_size += 100 embedding_size = embedding_size%1000 db.set_atoms() bmf = bmf_cluster(dsname) bmf.cluster(db,1,mln.pdm,dom_obj_map) print('original db dom sizes(after compression):') orig_dom_objs_map = db.get_dom_objs_map(mln,mln.orig_db_file) CLUSTER_MIN_SIZE = 10 w2v = word2vec(dsname,db,CLUSTER_MIN_SIZE,embedding_size) print('w2v cluster dom sizes:') w2v_dom_objs_map = db.get_dom_objs_map(mln,w2v.w2v__cluster_db_file) cr = cf.calculate_cr(orig_dom_objs_map,w2v_dom_objs_map) print('cr : ' + str(cr)) rc = random_cluster(dsname) rc.generate_random_db(db,w2v.pred_atoms_reduced_numbers,mln,w2v_dom_objs_map) print('random cluster dom sizes') db.get_dom_objs_map(mln,mln.random__cluster_db_file) kmc = kmeans_cluster(dsname) kmc.cluster(db,str(cr),mln.pdm,w2v_dom_objs_map,mln.dom_pred_map) print('kmeans cluster dom sizes:') kmeans_dom_objs_map = db.get_dom_objs_map(mln,kmc.kmeans__cluster_db_file) mln.create_magician_mln() #magician(dsname,mln) tuffy(dsname) orig_meta_map = {} orig_meta_map['bmf'] = bmf.bmf_orig_meta_map orig_meta_map['w2v'] = w2v.w2v_orig_meta_map orig_meta_map['random'] = rc.rand_orig_meta_map orig_meta_map['kmeans'] = kmc.kmeans_orig_meta_map print('Dataset : ' + dsname + '; CR : ' + str(cr)) p = performance(dsname,embedding_size) p.compare_marginal(mln,orig_meta_map,cr) p.compare_map(mln,orig_meta_map,cr) break
def e90post_thread_grab(url, postsdb='e90post_posts', debug=True): bimm = e90post() next_page = xmlTree(url) #posts = main_page if not debug: udb = DBManager() while True: try: next = next_page.xpath(bimm.next_link)[0] except IndexError: next = None posts = next_page.xpath(bimm.postdata) for t in posts: threaddata = [] if t.xpath(bimm.posterid): threaddata.append( ('TimeOfPost', bimm.convert_to_valid_date( bimm.last_activity( t.xpath( bimm.timeofpost)[0].text_content().strip())))) #print '**********************************************************************' #print t.xpath(bimm.timeofpost)[0].text_content().strip() #print bimm.poster_id(t.xpath(bimm.posterid)[0])#[0].text_content() threaddata.append( ('PosterID', bimm.poster_id(t.xpath(bimm.posterid)[0]))) #print bimm.post_id(t.xpath(bimm.postid)[0])#[0].text_content() threaddata.append( ('PostID', bimm.post_id(t.xpath(bimm.postid)[0]))) threaddata.append( ('ThreadID', bimm.thread_id(t.xpath(bimm.post_thread)[0]))) try: threaddata.append( ('PostCountInThread', t.xpath(bimm.postcount)[0].text_content())) #print t.xpath(bimm.postcount)[0].text_content() except AttributeError: #print t.xpath(bimm.postcount)[0] threaddata.append( ('PostCountInThread', t.xpath(bimm.postcount)[0])) #print t.xpath(bimm.postlink)#[0].text_content() #print '**********************************************************************' threaddata.append(('Link', t.xpath(bimm.postlink)[0])) e90post_user_grab(bimm.domain + t.xpath(bimm.posterid)[0], 'e90post_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata if next: #pdb.set_trace() next_page = xmlTree(bimm.domain + next + alltime) elif not next: break
def mazda3_grab(threaddb='mazda3_threadas', debug=True): #pdb.set_trace() bmw = mazda3() main_page = xmlTree(bmw.domain) lvl1 = main_page.xpath(bmw.linklist) if not debug: udb = DBManager() for lvl1_link in lvl1: next_page = xmlTree(lvl1_link) count = 0 pos = lvl1_link.find('board=') + len('board=') part = lvl1_link[:pos] fid = lvl1_link[pos:] ifid = fid.split(r'.') try: lastpage = int( next_page.xpath(bmw.last_page_threads)[0].text_content()) except IndexError: lastpage = 1 while count < lastpage: #threads_list = next_page.xpath(bmw.threaddata) threads = next_page.xpath(bmw.threads_list) tread_data = next_page.xpath(bmw.threaddata) for j in tread_data: #pass threaddata = [] if j.xpath(bmw.description): threaddata.append(('Description', j.xpath( bmw.description)[0].text_content().encode('utf-8'))) #print j.xpath(bmw.description)[0].text_content().encode('utf-8') threaddata.append( ('Views', bmw.parse_stats(j.xpath( bmw.stats)[0].text_content())['Views'])) threaddata.append( ('Views', bmw.parse_stats(j.xpath( bmw.stats)[0].text_content())['Replies'])) # print bmw.parse_stats(j.xpath(bmw.stats)[0].text_content()) #print j.xpath(bmw.views)[0].text_content() threaddata.append(('Link', j.xpath(bmw.link)[0])) # print j.xpath(bmw.link)[0] mazda3_thread_grab(bmw.thread_id(j.xpath(bmw.link)[0])) if not debug: udb.insert_into_table(threaddb, threaddata) #pass else: print threaddata count += 1 print count print part + addto(ifid, count) next_page = xmlTree(part + addto(ifid, count)) if not debug: udb.close()
def task_manager(self, clientsock): db = DBManager() while True: #while not self.task_queue.empty(): task = self.task_queue.get() self.send_and_receive(task, clientsock) clientsock.sendall(json.dumps({"action": "close"})) log.info("task is empty")
def __init__(self, **kwargs): super().__init__(**kwargs) self.dbManager = DBManager() #initialize screen widgets and layout for login and timetable loginScreen = LoginScreen(name="LOGIN_SCREEN") self.loginScreen = loginScreen self.add_widget(loginScreen) timetableScreen = TimetableScreen(name="TIMETABLE_SCREEN") self.timetableScreen = timetableScreen self.add_widget(timetableScreen)
def send_and_receive(self, task_dict, clientsock): db = DBManager() try: action = {"action": "solve", "task": task_dict} clientsock.sendall(json.dumps(action)) log.info("task send {}".format(task_dict)) task_report_json = clientsock.recv(65536) except socket.error as e: log.error("shit happened {}".format(e)) time.sleep(60) task_report = json.loads(task_report_json) log.debug(task_report) query_result = task_report['result'] # some is success if query_result['success'] > 0: db.StoreData(task_report['receipt']) task = task_report['task'].copy() task['fail_cnt'] = 0 task['receipt'] = self._modify_receipt_num( query_result['lastSuccessReceipt'], task['direction']) self.task_queue.put(task) # nothing is success(error at first) else: if task_report['task']['fail_cnt'] == 0: origin_task = task_report['task'].copy() task = task_report['task'].copy() task['fail_cnt'] += 1 task['date_guess'] = 1 task['date'] = self._modify_date(origin_task['date'], 1) task['receipt'] = self._modify_receipt_num( query_result['lastSuccessReceipt'], task['direction']) self.task_queue.put(task) task = task_report['task'].copy() task['fail_cnt'] += 1 task['date_guess'] = -1 task['date'] = self._modify_date(origin_task['date'], -1) task['receipt'] = self._modify_receipt_num( query_result['lastSuccessReceipt'], task['direction']) self.task_queue.put(task) elif task_report['task']['fail_cnt'] > 5: log.debug('a task was terminated due to fail_cnt limit exceed') return else: origin_task = task_report['task'].copy() task = task_report['task'].copy() task['fail_cnt'] += 1 task['date'] = self._modify_date(origin_task['date'], 1 * origin_task['date_guess']) self.task_queue.put(task)
def testReadLongFile(self): self.pylog.tratar("./20181017intra.txt") text_file = open("Output.txt", "w") text_file.write("var vjson="+json.dumps(self.pylog.result)) text_file.close() assert(len(self.pylog.result.keys()) == 2) assert(len(self.pylog.result["wl11_error"]) == 20) assert(len(self.pylog.result["wl11_stuck"]) == 37) assert(self.pylog.result["wl11_stuck"][36]["stuck_info"]["thread num"]=="53") db = DBManager("test2.db") db.addElements(self.pylog.result) db.close()
def audiworld_grab(threaddb='audiworld_threads', debug=True): bmw = audiworld() main_page = xmlTree(bmw.domain) lvl1 = main_page.xpath(bmw.linklist) if not debug: udb = DBManager() for lvl1_link in lvl1: # print lvl1_link next_page = xmlTree(bmw.domain + lvl1_link + alltime) #next_page = xmlTree(lvl1_link) # print bmw.domain+lvl1_link #next = next_page.xpath(bmw.next_link)[0] while True: try: next = next_page.xpath(bmw.next_link)[0] except IndexError: next = None threads = next_page.xpath(bmw.threads_list) tread_data = next_page.xpath(bmw.threaddata) for j in tread_data: #pass threaddata = [] if j.xpath(bmw.description): threaddata.append(('Description', j.xpath( bmw.description)[0].text_content().encode('utf-8'))) #print j.xpath(bmw.description)[0].text_content().encode('utf-8') threaddata.append( ('Replies', j.xpath(bmw.replies)[0].text_content())) #print j.xpath(bmw.replies)[0].text_content() threaddata.append( ('Views', j.xpath(bmw.views)[0].text_content())) #print j.xpath(bmw.views)[0].text_content() threaddata.append(('Link', j.xpath(bmw.link)[0])) #print j.xpath(bmw.link)[0] threaddata.append( ('ThreadID', bmw.thread_id(j.xpath(bmw.link)[0]))) #print bmw.thread_id(j.xpath(bmw.link)[0]) audiworld_thread_grab(bmw.domain + j.xpath(bmw.link)[0]) if not debug: udb.insert_into_table(threaddb, threaddata) else: print threaddata for k in threads: pass print k if next: next_page = xmlTree(bmw.domain + next + alltime) elif not next: break if not debug: udb.close()
def priuschat_thread_grab(url, postsdb='priuschat_posts', debug=False): bimm = priuschat() next_page = xmlTree(url) #posts = main_page if not debug: udb = DBManager() while True: try: next = next_page.xpath(bimm.next_link)[0] print next except IndexError: next = None posts = next_page.xpath(bimm.postdata) for t in posts: if t.xpath(bimm.posterid): threaddata = [] threaddata.append(('TimeOfPost',bimm.convert_to_valid_date(bimm.last_activity(t.xpath(bimm.timeofpost)[0]\ .text_content().strip())))) #print t.xpath(bimm.timeofpost)[0].text_content().strip() threaddata.append( ('PosterID', bimm.poster_id(t.xpath(bimm.posterid)[0]))) #print bimm.poster_id(t.xpath(bimm.posterid)[0])#[0].text_content() threaddata.append( ('PostID', bimm.post_id(t.xpath(bimm.postid)[0]))) #print bimm.post_id(t.xpath(bimm.postid)[0])#[0].text_content() try: threaddata.append( ('PostCountInThread', t.xpath(bimm.postcount)[0].text_content())) #print t.xpath(bimm.postcount)[0].text_content() except AttributeError: threaddata.append( ('PostCountInThread', t.xpath(bimm.postcount)[0])) #print t.xpath(bimm.postcount)[0] threaddata.append(('Link', t.xpath(bimm.postlink)[0])) #print t.xpath(bimm.postlink)#[0].text_content() priuschat_user_grab( t.xpath(bimm.posterid)[0], 'priuschat_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata if next: #pdb.set_trace() next_page = xmlTree(next) elif not next: if not debug: udb.close() break
def HandleServerStart(self): # Connect to the MongoDB self.database = DBManager(host="localhost", port=27017, testing=True) address = self.server.address logger.info("Server started in address %s:%d", *address) self.admin_user = User(name="admin", user="******") self.room_manager = RoomManager() self.room_manager.CreateRoom("default", self.admin_user) self.logged_users = Connect.SafeList()
def CheckTaskDB(self): task_db = TaskDBManager() db = DBManager() while True: time.sleep(60) data = task_db.GetData() task_db.Clear() for i in data: i = (i[0].encode('ascii', 'ignore'), i[1].encode('ascii', 'ignore'), i[2], i[3]) if not db.Findid(i[0]): self.q.put(i)
def vwvortex_user_grab(url,userdb = 'vwvortex_users', debug = True): bimm = vwvortex() main_page = xmlTree(url) userdata = [] if main_page: if not debug: udb = DBManager() #pdb.set_trace() if len(main_page.xpath(bimm.location))>0: #print main_page.xpath(bimm.location)[0].text_content().strip() userdata.append(('Location',main_page.xpath(bimm.location)[0].text_content().strip())) if len(main_page.xpath(bimm.cars))>0: #print main_page.xpath(bimm.cars)[0].text_content() userdata.append(('Cars',main_page.xpath(bimm.cars)[0].text_content())) if len(main_page.xpath(bimm.interests))>0: #print main_page.xpath(bimm.interests)[0].text_content() userdata.append(('Interests', main_page.xpath(bimm.interests)[0].text_content())) if len(main_page.xpath(bimm.noposts))>0: #print bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()) userdata.append(('TotalPosts', bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()))) if len(main_page.xpath(bimm.lastac))>0: #print bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content()) userdata.append(('LastActivity', bimm.convert_to_valid_date(bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content())))) if len(main_page.xpath(bimm.joindate))>0: #print bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content()) userdata.append(('JoinDate', bimm.convert_to_valid_date(bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content())))) #if len(main_page.xpath(bimm.ppday))>0: # print bimm.p_p_day(main_page.xpath(bimm.ppday)[0].text_content()) #if len(main_page.xpath(bimm.cars))>0: # print main_page.xpath(bimm.cars)[0].text_content() if len(main_page.xpath(bimm.handle))>0: #print bimm.get_handle(main_page.xpath(bimm.handle)[0].text_content()) userdata.append(('Handle', bimm.get_handle(main_page.xpath(bimm.handle)[0]))) #if len(main_page.xpath(bimm.bio))>0: # print main_page.xpath(bimm.bio)[0].text_content() if len(main_page.xpath(bimm.occupation))>0: #print main_page.xpath(bimm.occupation)[0].text_content() userdata.append(('Occupation', main_page.xpath(bimm.occupation)[0].text_content())) if len(main_page.xpath(bimm.ulink))>0: userdata.append(('Link',main_page.xpath(bimm.ulink)[0].text_content())) # print main_page.xpath(bimm.ulink)[0].text_content() #if len(main_page.xpath(bimm.name))>0: # print bimm.get_name_email(main_page.xpath(bimm.name)[0]) #print userdata if not debug: udb.insert_into_table(userdb, userdata) udb.close() else: print userdata else: pass
def __init__(self): self.db = DBManager("mongodb", 27017); self.db.setDB("usersDB"); self.db.setCollection("users_records"); self.name = "Bruce Wayne"; self.pwdhash = ""; self.email = "*****@*****.**"; self.creation_date = None; self.update_date = None; self.status = "pending"; self.authenticated = False; self.role = None;
def vwvortex_grab(threaddb = 'vwvortex_threads', debug = True): # pdb.set_trace() bmw = vwvortex() main_page = xmlTree(bmw.domain) lvl1 = main_page.xpath(bmw.linklist_lvl1) if not debug: udb = DBManager() for lvl1_link in lvl1: second_lvl_page = xmlTree(bmw.domain+lvl1_link) lvl2 = second_lvl_page.xpath(bmw.linklist_lvl2) for lvl2_link in lvl2: next_page = xmlTree(bmw.domain+lvl2_link+alltime) while True: try: next = next_page.xpath(bmw.next_link)[0] except IndexError: next = None threads = next_page.xpath(bmw.threads_list) tread_data = next_page.xpath(bmw.threaddata) for j in tread_data: #pass if j.xpath(bmw.description): threaddata = [] threaddata.append(('Description', j.xpath(bmw.description)[0].text_content().encode('utf-8'))) #print j.xpath(bmw.description)[0].text_content().encode('utf-8') threaddata.append(('Replies', j.xpath(bmw.replies)[0].text_content())) #print j.xpath(bmw.replies)[0].text_content() threaddata.append(('Views', bmw.parse_views(j.xpath(bmw.views)[0].text_content())['Views'])) #print bmw.parse_views(j.xpath(bmw.views)[0].text_content()) threaddata.append(('Link', j.xpath(bmw.link)[0].encode('utf-8'))) #print j.xpath(bmw.link)[0].encode('utf-8') #print bmw.thread_id(j.xpath(bmw.link)[0].encode('utf-8')) vwvortex_thread_grab(bmw.domain+j.xpath(bmw.link)[0].encode('utf-8')) if not debug: udb.insert_into_table(threaddb, threaddata) else: print threaddata #vwvortex_thread_grab() # for k in threads: # pass # print k if next: next_page = xmlTree(bmw.domain+next) elif not next: break if not debug: udb.close()
def mazda3_thread_grab(url, postsdb='mazda3_posts', debug=True): bimm = mazda3() next_page = xmlTree(url) lvl1 = next_page.xpath(bimm.postdata) count = 0 pos = url.find('topic=') + len('topic=') part = url[:pos] fid = url[pos:] ifid = fid.split(r'.') if not debug: udb = DBManager() try: lastpage = int( next_page.xpath(bimm.last_page_threads)[0].text_content()) except IndexError: lastpage = 1 while count < lastpage: posts = next_page.xpath(bimm.postdata) for pst in posts: threaddata = [] threaddata.append(('TimeOfPost',bimm.convert_to_valid_date(bimm.last_activity(bimm.time_of_post(pst.xpath(\ bimm.timeofpost)[0].text_content()))))) #print bimm.time_of_post(pst.xpath(bimm.timeofpost)[0].text_content()) threaddata.append( ('PosterID', bimm.poster_id(pst.xpath(bimm.posterid)[0]))) #print pst.xpath(bimm.posterid)[0] threaddata.append( ('PostID', bimm.post_id(pst.xpath(bimm.postid)[0]))) #print pst.xpath(bimm.postid)[0] threaddata.append(('Link', pst.xpath(bimm.postlink)[0])) #print pst.xpath(bimm.postlink)[0] threaddata.append( ('PostCountInThread', bimm.post_count(pst.xpath(bimm.postcount)[0].text_content()))) threaddata.append( ('ThreadID', bimm.thread_id(pst.xpath(bimm.post_thread)[0]))) #print bimm.post_count(pst.xpath(bimm.postcount)[0].text_content()) mazda3_user_grab(pst.xpath(bimm.posterid)[0], 'mazda3_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata count += 1 #print count #print part+addto(ifid,count) next_page = xmlTree(part + addto(ifid, count)) if not debug: udb.close()
def __init__(self): """ Constructor method :param file_path_to_import: String a txt file path containing tweet ids :return: ImportManager instance """ self.__db_manager = DBManager() self.__helper = GeneralHelpers() self.__preprocess_manager = PreprocessManager() self.__tweets_classes_dictionary = {} # magic numbers self.__components_in_a_line = 2 self.__max_num_of_tweets_at_once = 100
def testConnect(self): conname = self.lineEdit_conname.text() hostname = self.lineEdit_hostname.text() port = self.lineEdit_port.text() user = self.lineEdit_user.text() password = self.lineEdit_password.text() if hostname == '' or user == '': QMessageBox.warning(self, '链接测试', '主机名,用户名不能为空') return try: newDB = DBManager(conname, hostname, port, user, password) except: QMessageBox.warning(self, '链接测试', '配置参数有误!') else: QMessageBox.information(self, '链接测试', newDB.testConnect())
def __init__(self, **kwargs): super().__init__(**kwargs) self.dbManager = DBManager(loginCallback=self.onLoggedIn) loginScreen = LoginScreen(name="LOGIN_SCREEN") self.loginScreen = loginScreen self.add_widget(loginScreen) subjectsScreen = SubjectsScreen(name="SUBJECTS_SCREEN") self.subjectsScreen = subjectsScreen self.add_widget(subjectsScreen) professorsScreen = ProfessorsScreen(name="PROFESSORS_SCREEN") self.professorsScreen = professorsScreen self.add_widget(professorsScreen)
def __init__(self): self.config = ConfigManager() self.db = DBManager() self.report = Report(self.config, self.db) if self.config.hostsToPing[0] is '' and len( self.config.hostsToPing) is 1: print("No hosts to ping") sys.exit() # Perform pings for host in self.config.hostsToPing: self.ping(host) # self.db.PrintResultsTable() # Used for testing self.report.SendReport() # Send report if needed
def check_task_db(self): task_db = TaskDBManager() db = DBManager() while True: data = task_db.GetData() task_db.Clear() for i in data: self.task_queue.put({ 'receipt': i[0].encode('ascii', 'ignore'), 'date': i[1].encode('ascii', 'ignore'), 'date_guess': i[2], 'direction': i[3], 'distance': i[4], 'fail_cnt': i[5], }) time.sleep(60)