def validateLocations(self): #get updated location data from this nodes session table self.updateLocations() #initialize list to hold lID's of location rows to delete locToDel = list() #loop over every location for location in self.locations: lID = location['lID'] locTime = location['time'] locLat = location['lat'] locLon = location['lon'] # if the timestamp is invalid (in the future) if not self.validTime(locTime): locToDel.append(lID) # o.w. if the location is invalid elif not self.validLoc(locLat, locLon): locToDel.append(lID) #if there are locations to delete if locToDel: #open a connection to the DB db = DBManager() #delete the locations that were marked db.deleteLocByLID(self.sessionTblName, locToDel) #close the database connection db.close()
def namotoring_user_grab(url, userdb = 'namotoring_users', debug = False): bimm = namotoring() main_page = xmlTree_w_login(url,'vb', 'somebody', 'anybody') userdata = [] #pdb.set_trace() if main_page: if not debug: udb = DBManager() if len(main_page.xpath(bimm.location))>0: #print bimm.get_location(main_page.xpath(bimm.location)[0].text_content().strip()) userdata.append(('Location',bimm.get_location(main_page.xpath(bimm.location)[0].text_content().strip()))) #if len(main_page.xpath(bimm.cars))>0: # print main_page.xpath(bimm.cars)[0].text_content() if len(main_page.xpath(bimm.interests))>0: #print main_page.xpath(bimm.interests)[0].text_content() userdata.append(('Interests',main_page.xpath(bimm.interests))) if len(main_page.xpath(bimm.noposts))>0: #print bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()) userdata.append(('TotalPosts',bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content())[0])) userdata.append(('PostsPerDay',bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content())[1])) if len(main_page.xpath(bimm.lastac))>0: #print bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content()) userdata.append(('LastActivity',bimm.convert_to_valid_date(bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content())))) if len(main_page.xpath(bimm.joindate))>0: #print bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content()) userdata.append(('JoinDate',bimm.convert_to_valid_date(bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content())))) #if len(main_page.xpath(bimm.ppday))>0: # print bimm.p_p_day(main_page.xpath(bimm.ppday)[0].text_content()) #if len(main_page.xpath(bimm.cars))>0: # print bimm.get_cars(main_page.xpath(bimm.cars)[0]) if len(main_page.xpath(bimm.handle))>0: #print main_page.xpath(bimm.handle)[0].text_content() userdata.append(('Handle',main_page.xpath(bimm.handle)[0].text_content().strip())) #if len(main_page.xpath(bimm.bio))>0: # print main_page.xpath(bimm.bio)[0].text_content() if len(main_page.xpath(bimm.occupation))>0: #print main_page.xpath(bimm.occupation)[0].text_content() try: userdata.append(('Occupation',main_page.xpath(bimm.occupation)[0].text_content())) except AttributeError: userdata.append(('Occupation',main_page.xpath(bimm.occupation)[0])) if len(main_page.xpath(bimm.ulink))>0: #print main_page.xpath(bimm.ulink)[0].text_content() userdata.append(('Link',main_page.xpath(bimm.ulink)[0].text_content())) if len(main_page.xpath(bimm.name))>0: #print bimm.get_name_email(main_page.xpath(bimm.name)[0]) userdata.append(('Name',bimm.get_name_email(main_page.xpath(bimm.name)[0]))) #print userdata if len(main_page.xpath(bimm.plus_fb))>0: userdata.append(('PositiveFeedback', main_page.xpath(bimm.plus_fb)[0].text_content().strip())) if len(main_page.xpath(bimm.minus_fb))>0: userdata.append(('NegativeFeedback', diff(main_page.xpath(bimm.minus_fb)[0].text_content().strip(),\ main_page.xpath(bimm.plus_fb)[0].text_content().strip()))) if not debug: udb.insert_into_table(userdb, userdata) udb.close() else: print userdata else: pass
class GoldenFishServicer(golden_fish_pb2_grpc.GoldenFishServicer): def __init__(self): self.logger = logging.getLogger('MainLogger') self.logger.setLevel(logging.DEBUG) if not os.path.exists('logs'): os.makedirs('logs') log_path = 'logs/server_{:%Y-%m-%d %H-%M-%S}.log'.format( datetime.now()) fh = logging.FileHandler(log_path) formatter = logging.Formatter( '%(asctime)s | %(name)-10s | %(levelname)-8s | %(lineno)04d | %(message)s' ) fh.setFormatter(formatter) fh.setLevel(self.logger.level) console = logging.StreamHandler(sys.stdout) console.setFormatter(formatter) self.logger.addHandler(fh) self.logger.addHandler(console) self.logger.info('GoldenFish Servicer initializing') self.db_manager = DBManager(self.logger) def RegisterValve(self, request, context): self.logger.info( f'Received request to register valve with id {request.id}') v = Valve(request.id, request.description) try: self.db_manager.register_valve(v) except sqlite3.Error as ex: self.logger.exception(ex) context.set_details(str(ex)) context.set_code(grpc.StatusCode.INTERNAL) return golden_fish_pb2.Status(code=-1, details=str(ex)) return golden_fish_pb2.Status(code=0, details="OK")
def get_session_results(url, race_country, session_name, year): # Obtain cached results from database entry = DBManager.get_session_results_entry(race_country, session_name) # Check if valid using year/season and if empty. If valid, return if entry: cached_session_results = entry[0][0] if cached_session_results: json_year = cached_session_results['year'] if json_year == year: print(session_name + " results obtained from cache") return cached_session_results # Otherwise, scrape session_results = {} if session_name[:2] == 'fp': session_results = Scraper.scrape_practice_results(url) elif session_name[0] == 'q': session_results = Scraper.scrape_qualifying_results(url) else: session_results = Scraper.scrape_race_results(url) # Add year to showtimes data to depict season session_results['year'] = year # Update cached showtimes file in database DBManager.update_session_results_entry(race_country, session_name, session_results) print("Showtimes obtained from website") return session_results
def openConn(self, conname): self.currentConname=conname if conname in self.openConNameList: self.currentDBIndex=self.openConNameList.index(conname) self.tabWidget.setCurrentIndex( self.currentDBIndex) else: conf = self.conf.cfg_get(conname) try: self.currentDB = DBManager(conf['conname'], conf['hostname'], conf['port'], conf['user'], conf['password']) except: self.conn.closeDialog() QMessageBox.warning(self, '提醒','配置参数有误') return re = self.currentDB.testConnect() if(re != 'Success'): self.conn.closeDialog() QMessageBox.information(self, '链接失败', re) return dbList=self.currentDB.showDBs() #初始化界面 self.setupBaseUi(dbList) index = len(self.openDBClassList) self.currentDBIndex = index self.openDBClassList.append(self.currentDB) self.openConNameList.append(conname) self.tabWidget.setTabText(index,conname) self.tabWidget.setCurrentIndex(index)
def compressLocations(self): #get updated location data from this nodes session table self.updateLocations() #initialize list to hold location rows to delete lID's #initialize Start and End locToDel = list() Start = 0 End = 0 #loop over every location for index, location in enumerate(self.locations): #skip the first iteration if index == 0: continue lID = location['lID'] #if location is the same at self.locations[Start] if self.sameLoc(location, self.locations[Start]): locToDel.append(lID) End = index else: if not Start == End: locToDel.pop() Start = index End = index #if there are locations to delete if locToDel: #open a connection to the DB db = DBManager() #delete the locations that were marked db.deleteLocByLID(self.sessionTblName, locToDel) #close the database connection db.close()
def get_showtimes(season, url, race_country): # Obtaining cached standings data from database entry = DBManager.get_showtimes_entry(race_country) # Check cached data exists and is from the current season to be # valid. If valid, then return if entry: cached_showtimes_data = entry[0][0] if cached_showtimes_data: json_year = cached_showtimes_data['year'] if json_year == season: print("Showtimes obtained from cache") return cached_showtimes_data # Scrape showtimes from url website showtimes_data = Scraper.scrape_showtimes(season, url) if showtimes_data == {}: print("Showtimes unavailable as session has elapsed") return showtimes_data # Add year to showtimes data to depict season showtimes_data['year'] = season # Update cached showtimes file in database DBManager.update_showtimes_entry(race_country, showtimes_data) print("Showtimes obtained from website") return showtimes_data
class IniciarCaja(wx.Dialog): def __init__(self, parent, id, title): wx.Dialog.__init__(self, parent, id, title, size=(180,90)) self.DBM = DBManager() vbox = wx.BoxSizer(wx.VERTICAL) hbox1 = wx.BoxSizer(wx.HORIZONTAL) font = wx.Font(16, wx.NORMAL, wx.NORMAL, wx.BOLD) title = wx.StaticText(self, -1, "Inicio de caja") title.SetFont( font ) hbox2 = wx.BoxSizer(wx.HORIZONTAL) self.money = wx.TextCtrl(self, -1, "") image = wx.Image('green-ok.gif', wx.BITMAP_TYPE_ANY).ConvertToBitmap() btn_ini = wx.BitmapButton(self, id=-1, bitmap=image, size=(30,30)) vbox.Add( hbox1 ) vbox.Add( hbox2 ) hbox1.Add( title ) hbox2.Add( self.money ) hbox2.Add( btn_ini ) self.Bind(wx.EVT_BUTTON, self.OnInit, btn_ini) #self.SetSizerAndFit(vbox) self.SetSizer(vbox) self.Show(True) def OnInit( self, evt ): dinero = self.money.GetValue() this_time = datetime.datetime.now().strftime("%m/%d/%y") self.DBM.addProductTrans( this_time, dinero, 1, 1, 19, 1, 1, 0, 0) self.Close()
def audiworld_user_grab(url, userdb = 'audiworld_users', debug = True): bimm = audiworld() main_page = xmlTree(url) if main_page: userdata = [] if not debug: udb = DBManager() if len(main_page.xpath(bimm.location))>0: #print main_page.xpath(bimm.location)[0].text_content() userdata.append(('Location',main_page.xpath(bimm.location)[0].text_content())) #if len(main_page.xpath(bimm.cars))>0: # print main_page.xpath(bimm.cars)[0].text_content() if len(main_page.xpath(bimm.interests))>0: #print main_page.xpath(bimm.interests)[0].text_content() userdata.append(('Interests',main_page.xpath(bimm.interests)[0].text_content())) if len(main_page.xpath(bimm.noposts))>0: #print bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()) userdata.append(('TotalPosts',bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()))) if len(main_page.xpath(bimm.lastac))>0: #print bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content()) userdata.append(('LastActivity',bimm.convert_to_valid_date(bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content())))) if len(main_page.xpath(bimm.joindate))>0: #print bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content()) userdata.append(('JoinDate',bimm.convert_to_valid_date(bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content())))) if len(main_page.xpath(bimm.ppday))>0: #print bimm.p_p_day(main_page.xpath(bimm.ppday)[0].text_content()) userdata.append(('PostsPerDay',bimm.p_p_day(main_page.xpath(bimm.ppday)[0].text_content()))) if len(main_page.xpath(bimm.cars))>0: #print bimm.get_cars(main_page.xpath(bimm.cars)[0]) userdata.append(('Cars',bimm.get_cars(main_page.xpath(bimm.cars)[0]))) if len(main_page.xpath(bimm.handle))>0: #print main_page.xpath(bimm.handle)[0].text_content().strip() userdata.append(('Handle',main_page.xpath(bimm.handle)[0].text_content().strip())) if len(main_page.xpath(bimm.bio))>0: #print main_page.xpath(bimm.bio)[0].text_content() userdata.append(('Biography',main_page.xpath(bimm.bio)[0].text_content())) if len(main_page.xpath(bimm.occupation))>0: #print main_page.xpath(bimm.occupation)[0].text_content() userdata.append(('Occupation',main_page.xpath(bimm.occupation)[0].text_content())) if len(main_page.xpath(bimm.ulink))>0: #print main_page.xpath(bimm.ulink)[0].text_content() userdata.append(('Link',main_page.xpath(bimm.ulink)[0].text_content())) if not debug: udb.insert_into_table(userdb, userdata) udb.close() #@IndentOk else: print userdata else: pass
def __init__(self): self.__db_manager = DBManager() self.__helper = GeneralHelpers() self.__plot_manager = PlotManager() self.__import_manager = ImportManager() self.__feature_manager = FeatureManager() self.years = ("2012", "2013", "2014", "2015")
def main(): network = NetworkManager() json_string = network.getPresence() seats = json.loads(json_string) db_manager = DBManager() for item in seats: db_manager.create_seat(item)
def updatekladr(self,listspr): for spr in listspr: for row in spr: query=DBManager.executeSql(u"""select KOD_T_ST from kladr.SOCRBASE where SOCRNAME='{0}' and infisCode='{1}'""".format(spr[row],row)) if len(query)>0: continue else: query=DBManager.executeSqlNonQuery(u"""update kladr.SOCRBASE set infisCode='{1}' where SOCRNAME='{0}'""".format(spr[row],row))
def getdata(self): #stmt=TextFileLoader.load() DBManager.executeSqlNonQuery(u"""drop procedure if exists `ExportR23PN_proc`;""") DBManager.executeSqlNonQuery(ConnectionsManager.samsonconn['export_proc']) query =DBManager.executeproc() return query
def updateLocations(self): db = DBManager() #get this nodes location data self.locations = db.getLocations(self.sessionTblName, self.nID) db.close() #if this node has no locations or is not in a sessions, stop the housekeeping threads if self.locations is None: self.keepRunning = False
def test(self): #self.merge() #self.compress() #return embedding_size = 100 for CLUSTER_MIN_SIZE in range(4,19,2): for dsname in ['webkb','er']: mln = MLN(dsname) db = DBManager(dsname,mln) print('merge db dom sizes:') dom_obj_map = db.get_dom_objs_map(mln,db.merge_db_file) cf = common_f() #cf.delete_files(mln.pickle_location) #cf.remove_irrelevant_atoms() embedding_size += 100 embedding_size = embedding_size%1000 db.set_atoms() bmf = bmf_cluster(dsname) bmf.cluster(db,1,mln.pdm,dom_obj_map) print('original db dom sizes(after compression):') orig_dom_objs_map = db.get_dom_objs_map(mln,mln.orig_db_file) CLUSTER_MIN_SIZE = 10 w2v = word2vec(dsname,db,CLUSTER_MIN_SIZE,embedding_size) print('w2v cluster dom sizes:') w2v_dom_objs_map = db.get_dom_objs_map(mln,w2v.w2v__cluster_db_file) cr = cf.calculate_cr(orig_dom_objs_map,w2v_dom_objs_map) print('cr : ' + str(cr)) rc = random_cluster(dsname) rc.generate_random_db(db,w2v.pred_atoms_reduced_numbers,mln,w2v_dom_objs_map) print('random cluster dom sizes') db.get_dom_objs_map(mln,mln.random__cluster_db_file) kmc = kmeans_cluster(dsname) kmc.cluster(db,str(cr),mln.pdm,w2v_dom_objs_map,mln.dom_pred_map) print('kmeans cluster dom sizes:') kmeans_dom_objs_map = db.get_dom_objs_map(mln,kmc.kmeans__cluster_db_file) mln.create_magician_mln() #magician(dsname,mln) tuffy(dsname) orig_meta_map = {} orig_meta_map['bmf'] = bmf.bmf_orig_meta_map orig_meta_map['w2v'] = w2v.w2v_orig_meta_map orig_meta_map['random'] = rc.rand_orig_meta_map orig_meta_map['kmeans'] = kmc.kmeans_orig_meta_map print('Dataset : ' + dsname + '; CR : ' + str(cr)) p = performance(dsname,embedding_size) p.compare_marginal(mln,orig_meta_map,cr) p.compare_map(mln,orig_meta_map,cr) break
def e90post_thread_grab(url, postsdb='e90post_posts', debug=True): bimm = e90post() next_page = xmlTree(url) #posts = main_page if not debug: udb = DBManager() while True: try: next = next_page.xpath(bimm.next_link)[0] except IndexError: next = None posts = next_page.xpath(bimm.postdata) for t in posts: threaddata = [] if t.xpath(bimm.posterid): threaddata.append( ('TimeOfPost', bimm.convert_to_valid_date( bimm.last_activity( t.xpath( bimm.timeofpost)[0].text_content().strip())))) #print '**********************************************************************' #print t.xpath(bimm.timeofpost)[0].text_content().strip() #print bimm.poster_id(t.xpath(bimm.posterid)[0])#[0].text_content() threaddata.append( ('PosterID', bimm.poster_id(t.xpath(bimm.posterid)[0]))) #print bimm.post_id(t.xpath(bimm.postid)[0])#[0].text_content() threaddata.append( ('PostID', bimm.post_id(t.xpath(bimm.postid)[0]))) threaddata.append( ('ThreadID', bimm.thread_id(t.xpath(bimm.post_thread)[0]))) try: threaddata.append( ('PostCountInThread', t.xpath(bimm.postcount)[0].text_content())) #print t.xpath(bimm.postcount)[0].text_content() except AttributeError: #print t.xpath(bimm.postcount)[0] threaddata.append( ('PostCountInThread', t.xpath(bimm.postcount)[0])) #print t.xpath(bimm.postlink)#[0].text_content() #print '**********************************************************************' threaddata.append(('Link', t.xpath(bimm.postlink)[0])) e90post_user_grab(bimm.domain + t.xpath(bimm.posterid)[0], 'e90post_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata if next: #pdb.set_trace() next_page = xmlTree(bimm.domain + next + alltime) elif not next: break
def priuschat_user_grab(url, userdb='priuschat_users', debug = False): bimm = priuschat() main_page = xmlTree(url) userdata = [] #pdb.set_trace() if main_page: if not debug: udb = DBManager() if len(main_page.xpath(bimm.location))>0: #print main_page.xpath(bimm.location)[0].text_content().strip() userdata.append(('Location',main_page.xpath(bimm.location)[0].text_content().strip())) #if len(main_page.xpath(bimm.cars))>0: # print main_page.xpath(bimm.cars)[0].text_content() if len(main_page.xpath(bimm.interests))>0: #print main_page.xpath(bimm.interests)[0].text_content() userdata.append(('Interests',main_page.xpath(bimm.interests)[0].text_content())) if len(main_page.xpath(bimm.noposts))>0: #print bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()) userdata.append(('TotalPosts',bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()))) if len(main_page.xpath(bimm.lastac))>0: #print bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content()) userdata.append(('LastActivity',bimm.convert_to_valid_date(bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content())))) if len(main_page.xpath(bimm.joindate))>0: #print bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content()) userdata.append(('JoinDate', bimm.convert_to_valid_date(bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content())))) if len(main_page.xpath(bimm.ppday))>0: #print bimm.p_p_day(main_page.xpath(bimm.ppday)[0].text_content()) userdata.append(('PostsPerDay', bimm.p_p_day(main_page.xpath(bimm.ppday)[0].text_content()))) if len(main_page.xpath(bimm.cars))>0: #print main_page.xpath(bimm.cars)[0].text_content() userdata.append(('Cars',main_page.xpath(bimm.cars)[0].text_content())) if len(main_page.xpath(bimm.handle))>0: #print main_page.xpath(bimm.handle)[0].text_content() userdata.append(('Handle',main_page.xpath(bimm.handle)[0].text_content())) #if len(main_page.xpath(bimm.bio))>0: # print main_page.xpath(bimm.bio)[0].text_content() if len(main_page.xpath(bimm.occupation))>0: #print main_page.xpath(bimm.occupation)[0].text_content() userdata.append(('Occupation',main_page.xpath(bimm.occupation)[0].text_content())) if len(main_page.xpath(bimm.ulink))>0: #print main_page.xpath(bimm.ulink)[0].text_content() userdata.append(('Link', main_page.xpath(bimm.ulink)[0].text_content())) #if len(main_page.xpath(bimm.name))>0: # print bimm.get_name_email(main_page.xpath(bimm.name)[0]) #print userdata if not debug: udb.insert_into_table(userdb, userdata) udb.close() else: print userdata else: pass
def test_getFilesList(self): # Path not found res = DBManager().getCompetitionsFilesList('../Data') self.assertEqual(res, [], "Should be empty list []") # Have a result res = DBManager().getCompetitionsFilesList('../DataRaw/2009-2010') self.assertNotEqual(res, [], "Should not be an empty list") # All the entries are files of html type for file in res: self.assertNotEqual(file.find('.html'), -1, file + " should be a html")
def send_and_receive(self, task_dict, clientsock): db = DBManager() try: action = {"action": "solve", "task": task_dict} clientsock.sendall(json.dumps(action)) log.info("task send {}".format(task_dict)) task_report_json = clientsock.recv(65536) except socket.error as e: log.error("shit happened {}".format(e)) time.sleep(60) task_report = json.loads(task_report_json) log.debug(task_report) query_result = task_report['result'] # some is success if query_result['success'] > 0: db.StoreData(task_report['receipt']) task = task_report['task'].copy() task['fail_cnt'] = 0 task['receipt'] = self._modify_receipt_num( query_result['lastSuccessReceipt'], task['direction']) self.task_queue.put(task) # nothing is success(error at first) else: if task_report['task']['fail_cnt'] == 0: origin_task = task_report['task'].copy() task = task_report['task'].copy() task['fail_cnt'] += 1 task['date_guess'] = 1 task['date'] = self._modify_date(origin_task['date'], 1) task['receipt'] = self._modify_receipt_num( query_result['lastSuccessReceipt'], task['direction']) self.task_queue.put(task) task = task_report['task'].copy() task['fail_cnt'] += 1 task['date_guess'] = -1 task['date'] = self._modify_date(origin_task['date'], -1) task['receipt'] = self._modify_receipt_num( query_result['lastSuccessReceipt'], task['direction']) self.task_queue.put(task) elif task_report['task']['fail_cnt'] > 5: log.debug('a task was terminated due to fail_cnt limit exceed') return else: origin_task = task_report['task'].copy() task = task_report['task'].copy() task['fail_cnt'] += 1 task['date'] = self._modify_date(origin_task['date'], 1 * origin_task['date_guess']) self.task_queue.put(task)
def get_schedule_from_api(): # Obtain cached race schedule form API response = requests.get(apiUrl + ".json") new_schedule_data = response.json() # Add image per track to data new_schedule_data = add_images_to_schedule(new_schedule_data) # Cache new data in database DBManager.update_schedule_entry(new_schedule_data) print("Updated season schedule for new season from API") return new_schedule_data
def CheckTaskDB(self): task_db = TaskDBManager() db = DBManager() while True: time.sleep(60) data = task_db.GetData() task_db.Clear() for i in data: i = (i[0].encode('ascii', 'ignore'), i[1].encode('ascii', 'ignore'), i[2], i[3]) if not db.Findid(i[0]): self.q.put(i)
def createNodes(self): #get data for nodes in the session db = DBManager() nodesData = db.getNodes(self.tblName) db.close() #create a new Node object for each nodeData row in nodesData # if such an object does not already exist for nodeData in nodesData: if not nodeData['devID'] in self.nodesDict: #add NodeHandler object to the list of nodes self.nodes.append(NodeHandler(nodeData)) #add devID key to nodesDict and set to false(not started yet) self.nodesDict[nodeData['devID']] = False
def HandleServerStart(self): # Connect to the MongoDB self.database = DBManager(host="localhost", port=27017, testing=True) address = self.server.address logger.info("Server started in address %s:%d", *address) self.admin_user = User(name="admin", user="******") self.room_manager = RoomManager() self.room_manager.CreateRoom("default", self.admin_user) self.logged_users = Connect.SafeList()
def get_standings_from_api(old_standings_json): # Obtaining drivers json from API new_standings_json = {} drivers_standings = requests.get(apiUrl + '/driverStandings.json') driver_json = drivers_standings.json() new_standings_json["driver_standings"] = driver_json # Checking if API standings are updated after race has ended if old_standings_json is not None: old_round = int(old_standings_json["driver_standings"]["MRData"] ["StandingsTable"]["StandingsLists"][0]["round"]) new_round = int(new_standings_json["driver_standings"]["MRData"] ["StandingsTable"]["StandingsLists"][0]["round"]) # If API standings are not yet updated, return old standings data with # the same expiry so this check will be conducted again next time if old_round == new_round: print("API standings are not yet updated. Using old cached data.") return old_standings_json # Obtaining Constructors json from API constructor_standings = requests.get(apiUrl + '/constructorStandings.json') constructor_json = constructor_standings.json() new_standings_json["constructor_standings"] = constructor_json # Adding expiry date to standings json file to aid Caching # by finding next race to add expiry info to json schedule_json = json.loads(get_schedule().data) races_json = schedule_json["MRData"]["RaceTable"]["Races"] curr_date = datetime.utcnow() for race in races_json: # Obtain race date and time race_date_raw = race["date"] + "T" + race["time"] race_date = datetime.strptime(race_date_raw, '%Y-%m-%dT%H:%M:%SZ') # If race date has not elapsed for the current race in the ordered # list, then set json to be that race date if curr_date < race_date: new_standings_json["expiryDate"] = race_date.strftime( '%Y-%m-%dT%H:%M:%SZ') break # Update cached standings file in database DBManager.update_standings_entry(new_standings_json) print("Updated standings from API") return new_standings_json
def __init__(self): self.db = DBManager("mongodb", 27017); self.db.setDB("usersDB"); self.db.setCollection("users_records"); self.name = "Bruce Wayne"; self.pwdhash = ""; self.email = "*****@*****.**"; self.creation_date = None; self.update_date = None; self.status = "pending"; self.authenticated = False; self.role = None;
def vwvortex_grab(threaddb = 'vwvortex_threads', debug = True): # pdb.set_trace() bmw = vwvortex() main_page = xmlTree(bmw.domain) lvl1 = main_page.xpath(bmw.linklist_lvl1) if not debug: udb = DBManager() for lvl1_link in lvl1: second_lvl_page = xmlTree(bmw.domain+lvl1_link) lvl2 = second_lvl_page.xpath(bmw.linklist_lvl2) for lvl2_link in lvl2: next_page = xmlTree(bmw.domain+lvl2_link+alltime) while True: try: next = next_page.xpath(bmw.next_link)[0] except IndexError: next = None threads = next_page.xpath(bmw.threads_list) tread_data = next_page.xpath(bmw.threaddata) for j in tread_data: #pass if j.xpath(bmw.description): threaddata = [] threaddata.append(('Description', j.xpath(bmw.description)[0].text_content().encode('utf-8'))) #print j.xpath(bmw.description)[0].text_content().encode('utf-8') threaddata.append(('Replies', j.xpath(bmw.replies)[0].text_content())) #print j.xpath(bmw.replies)[0].text_content() threaddata.append(('Views', bmw.parse_views(j.xpath(bmw.views)[0].text_content())['Views'])) #print bmw.parse_views(j.xpath(bmw.views)[0].text_content()) threaddata.append(('Link', j.xpath(bmw.link)[0].encode('utf-8'))) #print j.xpath(bmw.link)[0].encode('utf-8') #print bmw.thread_id(j.xpath(bmw.link)[0].encode('utf-8')) vwvortex_thread_grab(bmw.domain+j.xpath(bmw.link)[0].encode('utf-8')) if not debug: udb.insert_into_table(threaddb, threaddata) else: print threaddata #vwvortex_thread_grab() # for k in threads: # pass # print k if next: next_page = xmlTree(bmw.domain+next) elif not next: break if not debug: udb.close()
def testConnect(self): conname = self.lineEdit_conname.text() hostname = self.lineEdit_hostname.text() port = self.lineEdit_port.text() user = self.lineEdit_user.text() password = self.lineEdit_password.text() if hostname == '' or user == '': QMessageBox.warning(self, '链接测试', '主机名,用户名不能为空') return try: newDB = DBManager(conname, hostname, port, user, password) except: QMessageBox.warning(self, '链接测试', '配置参数有误!') else: QMessageBox.information(self, '链接测试', newDB.testConnect())
def __init__(self): """ Constructor method :param file_path_to_import: String a txt file path containing tweet ids :return: ImportManager instance """ self.__db_manager = DBManager() self.__helper = GeneralHelpers() self.__preprocess_manager = PreprocessManager() self.__tweets_classes_dictionary = {} # magic numbers self.__components_in_a_line = 2 self.__max_num_of_tweets_at_once = 100
def feoa_thread_grab(url, postsdb = 'feoa_posts', debug = True): bimm = feoa() next_page = xmlTree(url) #posts = main_page if not debug: udb = DBManager() while True: try: next = next_page.xpath(bimm.next_link)[0] #print next except IndexError: next = None posts = next_page.xpath(bimm.postdata) for t in posts: if t.xpath(bimm.posterid): threaddata = [] #print '**********************************************************************' threaddata.append(('TimeOfPost',bimm.convert_to_valid_date(bimm.time_of_post(\ t.xpath(bimm.timeofpost)[0].text_content().strip())))) #print t.xpath(bimm.timeofpost)[0].text_content().strip() threaddata.append(('PosterID',bimm.poster_id(t.xpath(bimm.posterid)[0]))) #print bimm.poster_id(t.xpath(bimm.posterid)[0])#[0].text_content() threaddata.append(('PostID',bimm.post_id(t.xpath(bimm.postid)[0]))) #print bimm.post_id(t.xpath(bimm.postid)[0])#[0].text_content() threaddata.append(('ThreadID', bimm.thread_id(t.xpath(bimm.post_thread)[0]))) try: threaddata.append(('PostCountInThread',t.xpath(bimm.postcount)[0].text_content())) #print t.xpath(bimm.postcount)[0].text_content() except AttributeError: threaddata.append(('PostCountInThread',t.xpath(bimm.postcount)[0])) #print t.xpath(bimm.postcount)[0] except IndexError: pass threaddata.append(('Link',t.xpath(bimm.postlink)[0] )) feoa_user_grab(bimm.domain+t.xpath(bimm.posterid)[0], 'feoa_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata #print t.xpath(bimm.postlink)#[0].text_content() #print '**********************************************************************' if next: #pdb.set_trace() next_page = xmlTree(bimm.domain+next+alltime) elif not next: if not debug: udb.close() break
def mazda3_grab(threaddb = 'mazda3_threadas', debug =True): #pdb.set_trace() bmw = mazda3() main_page = xmlTree(bmw.domain) lvl1 = main_page.xpath(bmw.linklist) if not debug: udb = DBManager() for lvl1_link in lvl1: next_page = xmlTree(lvl1_link) count = 0 pos = lvl1_link.find('board=') + len('board=') part = lvl1_link[:pos] fid = lvl1_link[pos:] ifid = fid.split(r'.') try: lastpage = int(next_page.xpath(bmw.last_page_threads)[0].text_content()) except IndexError: lastpage = 1 while count<lastpage: #threads_list = next_page.xpath(bmw.threaddata) threads = next_page.xpath(bmw.threads_list) tread_data = next_page.xpath(bmw.threaddata) for j in tread_data: #pass threaddata = [] if j.xpath(bmw.description): threaddata.append(('Description',j.xpath(bmw.description)[0].text_content().encode('utf-8'))) #print j.xpath(bmw.description)[0].text_content().encode('utf-8') threaddata.append(('Views', bmw.parse_stats(j.xpath(bmw.stats)[0].text_content())['Views'])) threaddata.append(('Views', bmw.parse_stats(j.xpath(bmw.stats)[0].text_content())['Replies'])) # print bmw.parse_stats(j.xpath(bmw.stats)[0].text_content()) #print j.xpath(bmw.views)[0].text_content() threaddata.append(('Link', j.xpath(bmw.link)[0])) # print j.xpath(bmw.link)[0] mazda3_thread_grab(bmw.thread_id(j.xpath(bmw.link)[0])) if not debug: udb.insert_into_table(threaddb, threaddata) #pass else: print threaddata count += 1 print count print part+addto(ifid,count) next_page = xmlTree(part+addto(ifid,count)) if not debug: udb.close()
def audiworld_grab(threaddb = 'audiworld_threads', debug = True): bmw = audiworld() main_page = xmlTree(bmw.domain) lvl1 = main_page.xpath(bmw.linklist) if not debug: udb = DBManager() for lvl1_link in lvl1: # print lvl1_link next_page = xmlTree(bmw.domain+lvl1_link+alltime) #next_page = xmlTree(lvl1_link) # print bmw.domain+lvl1_link #next = next_page.xpath(bmw.next_link)[0] while True: try: next = next_page.xpath(bmw.next_link)[0] except IndexError: next = None threads = next_page.xpath(bmw.threads_list) tread_data = next_page.xpath(bmw.threaddata) for j in tread_data: #pass threaddata = [] if j.xpath(bmw.description): threaddata.append(('Description',j.xpath(bmw.description)[0].text_content().encode('utf-8'))) #print j.xpath(bmw.description)[0].text_content().encode('utf-8') threaddata.append(('Replies', j.xpath(bmw.replies)[0].text_content())) #print j.xpath(bmw.replies)[0].text_content() threaddata.append(('Views', j.xpath(bmw.views)[0].text_content())) #print j.xpath(bmw.views)[0].text_content() threaddata.append(('Link', j.xpath(bmw.link)[0])) #print j.xpath(bmw.link)[0] threaddata.append(('ThreadID', bmw.thread_id(j.xpath(bmw.link)[0]) )) #print bmw.thread_id(j.xpath(bmw.link)[0]) audiworld_thread_grab(bmw.domain+j.xpath(bmw.link)[0]) if not debug: udb.insert_into_table(threaddb, threaddata) else: print threaddata for k in threads: pass print k if next: next_page = xmlTree(bmw.domain+next+alltime) elif not next: break if not debug: udb.close()
def __init__(self, parent, id, title): wx.Dialog.__init__(self, parent, id, title) self.SetSize((800,600)) self.SetMinSize((800,600)) self.rows = list() self.list_ctrl = wx.ListCtrl(self, style=wx.LC_REPORT) image2 = wx.Image('images/add.png', wx.BITMAP_TYPE_ANY).ConvertToBitmap() self.btn_add = wx.BitmapButton(self, id = -1, bitmap = image2, size = (24,24)) image3 = wx.Image('images/red.gif', wx.BITMAP_TYPE_ANY).ConvertToBitmap() self.btn_prt = wx.BitmapButton(self, id = -1, bitmap = image3, size = (24,24)) image5 = wx.Image('images/delete.png', wx.BITMAP_TYPE_ANY).ConvertToBitmap() self.btn_del = wx.BitmapButton(self, id = -1, bitmap = image5, size = (24,24)) self.DBM = DBManager() self.__generateContent() Publisher().subscribe(self.__redefine, ("producto_seleccionado_sin_cancha"))
def __init__(self): self.config = ConfigManager() self.db = DBManager() self.report = Report(self.config, self.db) if self.config.hostsToPing[0] is '' and len( self.config.hostsToPing) is 1: print("No hosts to ping") sys.exit() # Perform pings for host in self.config.hostsToPing: self.ping(host) # self.db.PrintResultsTable() # Used for testing self.report.SendReport() # Send report if needed
def saveEventConfig(self): #Save event Stuff in a file for now if (self.checkDates()): QMessageBox.about(self.EventConfig, "Success", "Dates are correct") self.name = self.EventConfig.findChild( QtWidgets.QTextEdit, 'EventEndTextbox').toPlainText() self.description = self.EventConfig.findChild( QtWidgets.QTextEdit, 'DescriptionTextbox').toPlainText() self.startDate = self.EventConfig.findChild( QtWidgets.QDateTimeEdit, 'EventConfigStartDate').dateTime().toString( "yyyy-MM-ddThh:mm:ss") self.endDate = self.EventConfig.findChild( QtWidgets.QDateTimeEdit, 'EventConfigEndDate').dateTime().toString( "yyyy-MM-ddThh:mm:ss") list = { "EventName": self.name, "EventDescription": self.description, "StartDate": self.startDate, "EndDate": self.endDate } self.id = DBManager.insert_event(list) print(self.endDate) self.moveToDirectConfig() else: QMessageBox.about(self.EventConfig, "Error", "Start Date is bigger than end date")
def __init__( self, reportname, data, tipo ): self.doc = SimpleDocTemplate(reportname, pagesize=A4) self.data = data self.elements = list() self.logo = 'logo.png' self.DBM = DBManager() self.tipo = tipo
def getSMO(self): stmt=u"""select infisCode,shortName from Organisation where id in (select insurer_id from clientpolicy where insurer_id is not null group by insurer_id)""" query = DBManager.executeSql(stmt) return query
def __init__(self): self.dbmanager = DBManager() self.taskdbmanager = TaskDBManager() self.current = Queue.Queue() self.q = Queue.Queue() self.guess = Queue.Queue() self.guess2 = Queue.Queue()
def __init__(self, parent, id, title): wx.Dialog.__init__(self, parent, id, title, size=(180,90)) self.DBM = DBManager() vbox = wx.BoxSizer(wx.VERTICAL) hbox1 = wx.BoxSizer(wx.HORIZONTAL) font = wx.Font(16, wx.NORMAL, wx.NORMAL, wx.BOLD) title = wx.StaticText(self, -1, "Inicio de caja") title.SetFont( font ) hbox2 = wx.BoxSizer(wx.HORIZONTAL) self.money = wx.TextCtrl(self, -1, "") image = wx.Image('green-ok.gif', wx.BITMAP_TYPE_ANY).ConvertToBitmap() btn_ini = wx.BitmapButton(self, id=-1, bitmap=image, size=(30,30)) vbox.Add( hbox1 ) vbox.Add( hbox2 ) hbox1.Add( title ) hbox2.Add( self.money ) hbox2.Add( btn_ini ) self.Bind(wx.EVT_BUTTON, self.OnInit, btn_ini) #self.SetSizerAndFit(vbox) self.SetSizer(vbox) self.Show(True)
def priuschat_thread_grab(url, postsdb = 'priuschat_posts',debug = False): bimm = priuschat() next_page = xmlTree(url) #posts = main_page if not debug: udb = DBManager() while True: try: next = next_page.xpath(bimm.next_link)[0] print next except IndexError: next = None posts = next_page.xpath(bimm.postdata) for t in posts: if t.xpath(bimm.posterid): threaddata = [] threaddata.append(('TimeOfPost',bimm.convert_to_valid_date(bimm.last_activity(t.xpath(bimm.timeofpost)[0]\ .text_content().strip())))) #print t.xpath(bimm.timeofpost)[0].text_content().strip() threaddata.append(('PosterID',bimm.poster_id(t.xpath(bimm.posterid)[0]))) #print bimm.poster_id(t.xpath(bimm.posterid)[0])#[0].text_content() threaddata.append(('PostID',bimm.post_id(t.xpath(bimm.postid)[0]))) #print bimm.post_id(t.xpath(bimm.postid)[0])#[0].text_content() try: threaddata.append(('PostCountInThread',t.xpath(bimm.postcount)[0].text_content())) #print t.xpath(bimm.postcount)[0].text_content() except AttributeError: threaddata.append(('PostCountInThread',t.xpath(bimm.postcount)[0])) #print t.xpath(bimm.postcount)[0] threaddata.append(('Link',t.xpath(bimm.postlink)[0])) #print t.xpath(bimm.postlink)#[0].text_content() priuschat_user_grab(t.xpath(bimm.posterid)[0],'priuschat_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata if next: #pdb.set_trace() next_page = xmlTree(next) elif not next: if not debug: udb.close() break
def __generateContent( self ): self.DBM = DBManager() id_reserva = self.DBM.getIDReservado( self.data['fecha'], self.data['id_cancha'], self.data['horario'] ) id_cuenta_horario = self.DBM.getCuentaHorarioID( id_reserva )[0] productos = self.DBM.getProductosByCuenta( id_cuenta_horario ) rows = list() try: for row in productos: rows.append( (row[3], row[4], row[5], row[1], row[2]) ) except: rows.append( ('Vacio', '', '', '', '') ) self.list_ctrl.InsertColumn(0, "Codigo") self.list_ctrl.InsertColumn(1, "Marca") self.list_ctrl.InsertColumn(2, "Descripcion") self.list_ctrl.InsertColumn(3, "Precio") self.list_ctrl.InsertColumn(4, "Cantidad") index = 0 total = 0 for row in rows: self.list_ctrl.InsertStringItem(index, row[0]) self.list_ctrl.SetStringItem(index, 1, row[1]) self.list_ctrl.SetStringItem(index, 2, row[2]) self.list_ctrl.SetStringItem(index, 3, "$ %s" % str( row[3] )) self.list_ctrl.SetStringItem(index, 4, str( row[4] )) total += row[3] * row[4] if index % 2: self.list_ctrl.SetItemBackgroundColour(index, "white") else: self.list_ctrl.SetItemBackgroundColour(index, "gray") index += 1 self.list_ctrl.InsertStringItem(index, "TOTAL") self.list_ctrl.SetStringItem(index, 1, "") self.list_ctrl.SetStringItem(index, 2, "") self.list_ctrl.SetStringItem(index, 3, "") self.list_ctrl.SetStringItem(index, 4, "$ %s" % total) self.list_ctrl.SetItemBackgroundColour(index, "red") self.Bind(wx.EVT_BUTTON, self.onAdd, self.btn_add) self.Bind(wx.EVT_BUTTON, self.onDel, self.btn_del) sizer = wx.BoxSizer(wx.VERTICAL) sizer2 = wx.BoxSizer(wx.HORIZONTAL) sizer2.Add(self.btn_add, 0, wx.ALL, 1) sizer2.Add(self.btn_del, 0, wx.ALL, 1) sizer.Add(sizer2, 0, wx.ALL, 1) sizer.Add(self.list_ctrl, 1, wx.EXPAND) self.SetSizer(sizer) self.Show(True)
def retrieve_from_fqdn(reply): dbManager_Firewall = DBManager("127.0.0.1", "root", "take5", "Firewall_Policies") dbManager_Firewall.connect() response = dbManager_Firewall.retrieve_from_fqdn(str(reply)) dbManager_Firewall.deconnect() return response
def mazda3_user_grab(url, userdb = 'mazda3_users',debug = True): bimm = mazda3() main_page = xmlTree_w_login(url, 'mazda', 'somebodyis', 'anybodyanybody') userdata = [] if main_page: if not debug: udb = DBManager() #pdb.set_trace() if len(main_page.xpath(bimm.location))>0: #print main_page.xpath(bimm.location)[0].text_content().strip() userdata.append(('Location', main_page.xpath(bimm.location)[0].text_content().strip())) if len(main_page.xpath(bimm.car_make))>0: #print main_page.xpath(bimm.cars)[0].text_content() userdata.append(('Cars', main_page.xpath(bimm.car_make)[0].text_content()\ +main_page.xpath(bimm.car_year)[0].text_content())) # if len(main_page.xpath(bimm.interests))>0: # #print main_page.xpath(bimm.interests)[0].text_content() # userdata.append(('Interests', main_page.xpath(bimm.interests)[0].text_content())) if len(main_page.xpath(bimm.noposts))>0: #print bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()) userdata.append(('TotalPosts', bimm.total_posts(main_page.xpath(bimm.noposts)[0].text_content()))) if len(main_page.xpath(bimm.lastac))>0: #print bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content()) userdata.append(('LastActivity', bimm.convert_to_valid_date(bimm.last_activity(main_page.xpath(bimm.lastac)[0].text_content())))) if len(main_page.xpath(bimm.joindate))>0: #print bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content()) userdata.append(('JoinDate', bimm.convert_to_valid_date(bimm.join_date(main_page.xpath(bimm.joindate)[0].text_content())))) #if len(main_page.xpath(bimm.ppday))>0: # print bimm.p_p_day(main_page.xpath(bimm.ppday)[0].text_content()) #if len(main_page.xpath(bimm.cars))>0: # print main_page.xpath(bimm.cars)[0].text_content() if len(main_page.xpath(bimm.handle))>0: #print bimm.get_handle(main_page.xpath(bimm.handle)[0].text_content()) userdata.append(('Handle', bimm.get_handle(main_page.xpath(bimm.handle)[0]))) if len(main_page.xpath(bimm.minus_fb))>0: userdata.append(('NegativeFeedback', main_page.xpath(bimm.minus_fb)[0].text_content().strip())) if len(main_page.xpath(bimm.plus_fb))>0: userdata.append(('PositiveFeedback', main_page.xpath(bimm.plus_fb)[0].text_content().strip())) if not debug: udb.insert_into_table(userdb, userdata) udb.close() else: print userdata else: pass
def retrieve_from_CES(transport_protocol, link_alias, direction, ces_fqdn): dbManager_CES = DBManager("127.0.0.1", "root", "take5", "CES_Policies") dbManager_CES.connect() response = dbManager_CES.retrieve_from_ces(transport_protocol, link_alias, direction, ces_fqdn) dbManager_CES.deconnect() return response
def retrieve_from_host(local_fqdn, remote_fqdn, direction): dbManager_CES = DBManager("127.0.0.1", "root", "take5", "CES_Policies") dbManager_CES.connect() response = dbManager_CES.retrieve_from_host(local_fqdn, remote_fqdn, direction) dbManager_CES.deconnect() return response
class Gasto(wx.Dialog): def __init__(self, parent, id, title): wx.Dialog.__init__(self, parent, id, title, size=(180, 130)) self.DBM = DBManager() vbox = wx.BoxSizer(wx.VERTICAL) hbox = wx.BoxSizer(wx.HORIZONTAL) label = wx.StaticText( self, label="Egreso de Caja") font = wx.Font(14, wx.NORMAL, wx.NORMAL, wx.BOLD) label.SetFont( font ) hbox.Add(label, 0, wx.EXPAND, 20) line = wx.StaticLine(self, -1, wx.Point(10, 30), wx.Size(380, -1)) vbox.Add(hbox) vbox.Add(line) hbox2 = wx.BoxSizer(wx.HORIZONTAL) label2 = wx.StaticText(self, label="Monto") self.monto = wx.TextCtrl( self, -1, "") hbox2.Add(label2) label2.SetFont(font) hbox2.Add(self.monto) vbox.Add(hbox2) hbox3 = wx.BoxSizer(wx.HORIZONTAL) label3 = wx.StaticText(self, label="Concepto") self.descr = wx.TextCtrl( self, -1, "") hbox3.Add(label3) hbox3.Add(self.descr) label3.SetFont(font) vbox.Add(hbox3) egresar = wx.Button(self, label="Descontar") vbox.Add(egresar) egresar.Bind(wx.EVT_BUTTON, self.generarEgreso) self.SetSizer(vbox) self.Show() def generarEgreso(self, evt): self.DBM.generateEgreso( self.descr.GetValue(), self.monto.GetValue() ) self.Close() self.Destroy()
def openListEvent(self): if(not self.openEvent.eventList.currentRow() == -1): self.eventConfig.id = self.openEvent.eventList.currentItem().data(Qt.UserRole) query = DBManager.get_single_directory(self.eventConfig.id) self.dirConfig.whiteFolder = query['whiteFolder'] self.dirConfig.blueFolder = query['blueFolder'] self.dirConfig.redFolder = query['redFolder'] self.startIngestion() self.openEvent.close()
def run(self): # run until forced to stop while True: # block until a new alert object appears on the queue alert = self.queue.get() # if alert.contactID exists if alert.contactID is not None: # try to find the contact information for the alert object's contactID try: db = DBManager() contact = db.findContact(cID=alert.contactID) db.close() # send the alert using the provided communication types self.sendAlert(contact, alert) except ContactError as ce: self.logger.info( str(self.reporterID) + ": Contact with ID: %s was not found" % str(alert.contactID) )
def mazda3_thread_grab(url, postsdb = 'mazda3_posts', debug = True): bimm = mazda3() next_page = xmlTree(url) lvl1 = next_page.xpath(bimm.postdata) count = 0 pos = url.find('topic=') + len('topic=') part = url[:pos] fid = url[pos:] ifid = fid.split(r'.') if not debug: udb = DBManager() try: lastpage = int(next_page.xpath(bimm.last_page_threads)[0].text_content()) except IndexError: lastpage = 1 while count<lastpage: posts = next_page.xpath(bimm.postdata) for pst in posts: threaddata = [] threaddata.append(('TimeOfPost',bimm.convert_to_valid_date(bimm.last_activity(bimm.time_of_post(pst.xpath(\ bimm.timeofpost)[0].text_content()))))) #print bimm.time_of_post(pst.xpath(bimm.timeofpost)[0].text_content()) threaddata.append(('PosterID',bimm.poster_id(pst.xpath(bimm.posterid)[0]))) #print pst.xpath(bimm.posterid)[0] threaddata.append(('PostID',bimm.post_id(pst.xpath(bimm.postid)[0]))) #print pst.xpath(bimm.postid)[0] threaddata.append(('Link',pst.xpath(bimm.postlink)[0])) #print pst.xpath(bimm.postlink)[0] threaddata.append(('PostCountInThread',bimm.post_count(pst.xpath(bimm.postcount)[0].text_content()))) threaddata.append(('ThreadID', bimm.thread_id(pst.xpath(bimm.post_thread)[0]))) #print bimm.post_count(pst.xpath(bimm.postcount)[0].text_content()) mazda3_user_grab(pst.xpath(bimm.posterid)[0],'mazda3_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata count += 1 #print count #print part+addto(ifid,count) next_page = xmlTree(part+addto(ifid,count)) if not debug: udb.close()
def load_joblist(self, refresh_jobstatus = False): if refresh_jobstatus: self.hginterface.showLoading(msg='Refreshing data fetch list...') job = JobManager.JobManager() job.updateAllJobStatus() time.sleep(0.5) self.hginterface.hideLoading() db = DBManager() joblist = db.getJobList() joblist = [x[:10] for x in joblist] self.joblist_df = self.hginterface.setJobList(joblist) self.hginterface.txt_joboutput.value = '' self.hginterface.txt_joboutout_title.value = 'Data fetch output' self.hginterface.grid_joblist._selected_rows = []
def __init__(self, parent): wx.Frame.__init__(self, parent, -1) self.SetSize((500, 300)) self.SetTitle("titulo") self.Centre() self.Show() self.DBManager = DBManager() self.UserSetting = UserSetting() self.OpenDatabase() self.menu() self.toolbar() self.gui() # Connect Events self.Bind(wx.EVT_MENU, self.closeApp, id=self.fileMenuClose.GetId()) self.Bind(wx.EVT_TOOL, self.connect, id=self.toolConnects.GetId())
def bimmerfest_thread_grab(url,postsdb='bimmerfest_posts', debug = True): #print "ceva" bimm = bimmerfest() next_page = xmlTree(url) #posts = main_page if not debug: udb = DBManager() while True: try: next = next_page.xpath(bimm.next_link)[0] except IndexError: next = None posts = next_page.xpath(bimm.postdata) for t in posts: if t.xpath(bimm.posterid): threaddata = [] #print '**********************************************************************' #print t.xpath(bimm.timeofpost)[0].text_content().strip() threaddata.append(('TimeOfPost',bimm.convert_to_valid_date(bimm.last_activity(t.xpath(bimm.timeofpost)[0].text_content().strip())))) #print bimm.poster_id(t.xpath(bimm.posterid)[0])#[0].text_content() threaddata.append(('PosterID',bimm.poster_id(t.xpath(bimm.posterid)[0])))#[0].text_content())) #print bimm.post_id(t.xpath(bimm.postid)[0])#[0].text_content() threaddata.append(('PostID',bimm.post_id(t.xpath(bimm.postid)[0]))) #print t.xpath(bimm.postcount)[0].text_content() threaddata.append(('PostCountInThread', t.xpath(bimm.postcount)[0].text_content())) #print t.xpath(bimm.postlink)#[0].text_content() threaddata.append(('Link',t.xpath(bimm.postlink)[0])) threaddata.append(('ThreadID',bimm.thread_id(t.xpath(bimm.post_thread)[0]))) bimmerfest_user_grab(bimm.domain+t.xpath(bimm.posterid)[0], 'bimmerfest_users') if not debug: udb.insert_into_table(postsdb, threaddata) else: print threaddata #print '**********************************************************************' if next: next_page = xmlTree(bimm.domain+next+alltime) elif not next: if not debug: udb.close() else: pass break
def task_manager(self, clientsock): db = DBManager() while True: #while not self.task_queue.empty(): task = self.task_queue.get() self.send_and_receive(task, clientsock) clientsock.sendall(json.dumps({"action": "close"})) log.info("task is empty")
def retrieve_from_msisdn(reply): dbManager_Firewall = DBManager("127.0.0.1", "root", "take5", "Firewall_Policies") dbManager_Firewall.connect() response = dbManager_Firewall.retrieve_from_msisdn(str(reply)) with open('data.yaml', 'w') as outfile: yaml.dump(response, outfile, default_flow_style=False) dbManager_Firewall.deconnect() return response
def __init__(self, **kwargs): super().__init__(**kwargs) self.dbManager = DBManager() #initialize screen widgets and layout for login and timetable loginScreen = LoginScreen(name="LOGIN_SCREEN") self.loginScreen = loginScreen self.add_widget(loginScreen) timetableScreen = TimetableScreen(name="TIMETABLE_SCREEN") self.timetableScreen = timetableScreen self.add_widget(timetableScreen)
def __init__(self): self.logger = logging.getLogger('MainLogger') self.logger.setLevel(logging.DEBUG) if not os.path.exists('logs'): os.makedirs('logs') log_path = 'logs/server_{:%Y-%m-%d %H-%M-%S}.log'.format( datetime.now()) fh = logging.FileHandler(log_path) formatter = logging.Formatter( '%(asctime)s | %(name)-10s | %(levelname)-8s | %(lineno)04d | %(message)s' ) fh.setFormatter(formatter) fh.setLevel(self.logger.level) console = logging.StreamHandler(sys.stdout) console.setFormatter(formatter) self.logger.addHandler(fh) self.logger.addHandler(console) self.logger.info('GoldenFish Servicer initializing') self.db_manager = DBManager(self.logger)
def __init__(self, request, logger, common): super(TracerouteManager, self).__init__() self.__request = request #json request self.__raw = '' #raw result for traceroute self.__logger = logger self.__common = common self.__dbmanager = DBManager(self.__common) self.__stop = False self.loginfo("Traceroute Manager for " + str(self.__request) + " started.")
def getFSUUID(self,name_or_ip,_user="",cached=True): """ returns UUID of a fileserver, which is used as key for server-entries in other tables. This does not silently update the Cache """ if cached : # local Cache first if name_or_ip in self.localCache["FSUUIDs"].keys() : return self.localCache["FSUUIDs"][name_or_ip] else : name_or_ip =self.getDNSInfo(name_or_ip)["names"][0] if name_or_ip in self.localCache["FSUUIDs"].keys() : return self.localCache["FSUUIDs"][name_or_ip] # then DB if self._CFG.DB_CACHE: from DBManager import DBManager from afs.model.FileServer import FileServer self.Logger.debug("looking up FSUUID in DB_Cache for serv=%s" % name_or_ip) DNSInfo=self.getDNSInfo(name_or_ip) thisDBManager=DBManager(self._CFG) fs=thisDBManager.getFromCacheByListElement(FileServer,FileServer.servernames_js,DNSInfo["names"][0]) if fs != None : # store it in localCache self.localCache["FSUUIDs"][name_or_ip] = fs.uuid return fs.uuid # not found in local cache and not in DB Cache, get it from live-system from afs.dao.VLDbDAO import VLDbDAO self.Logger.debug("getFSUUID: called with %s" % name_or_ip) DNSInfo=self.getDNSInfo(name_or_ip) uuid="" _vlDAO=VLDbDAO() try : uuid=_vlDAO.getFsUUID(DNSInfo["names"][0],_user=_user,_cfg=self._CFG) except : return None # store it in localCache self.localCache["FSUUIDs"][name_or_ip] = uuid return uuid
def getHostnameByFSUUID(self,uuid,_user="",cached=True) : """ returns hostname of a fileserver by uuid """ self.Logger.debug("called with %s, cached=%s" % (uuid,cached)) self.Logger.debug("self._CFG=%s" % (self._CFG)) if cached : # local Cache first for hn in self.localCache["FSUUIDs"] : if self.localCache["FSUUIDs"][hn] == uuid : return hn # then DB if self._CFG.DB_CACHE: from DBManager import DBManager from afs.model.FileServer import FileServer thisDBManager=DBManager(self._CFG) fs=thisDBManager.getFromCache(FileServer,uuid=uuid) self.Logger.debug("looking up hostname in DB_Cache for uuid=%s" % uuid) if fs != None : self.localCache["FSUUIDs"][fs.servernames[0]] = fs.uuid return fs.servernames[0] # not found in local cache and not in DB Cache, or cacheing disabled. # get it from live-system from afs.dao.VLDbDAO import VLDbDAO _vlDAO=VLDbDAO() name_or_ip=None for fs in _vlDAO.getFsServList(_cfg=self._CFG,_user="" ) : if fs['uuid'] == uuid : name_or_ip = fs['name_or_ip'] if name_or_ip == None : raise LookupUtilError("No Server with uuid=%s registered in live-system" % uuid) # store it in localCache self.Logger.debug("getHostnameByFSUUID: got name_or_ip =%s from live-system" % name_or_ip) name_or_ip=self.getDNSInfo(name_or_ip)["names"][0] self.localCache["FSUUIDs"][name_or_ip] = uuid self.Logger.debug("returning: %s" % name_or_ip) return name_or_ip