def timeYrsecToDate(yrsec, year): """Converts time expressed in seconds from start of year to a python datetime object Parameters ---------- yrsec : int seconds since start of year year : int year in YYYY Returns ------- my_date : datetime a python datetime object. Example my_date = utils.timeUtils.timeYrsecToDate(1205304,2012) Written by Sebastien, Jul. 2012 Modified by ASR 20151120 """ from datetime import datetime from datetime import timedelta assert (isinstance(yrsec, int)), logging.erorr('yrsec must be of type int') assert (isinstance(year, int)), logging.error('year must be of type int') return datetime(year, 1, 1) + timedelta(seconds=yrsec)
def timeYrsecToDate(yrsec, year): """Converts time expressed in seconds from start of year to a python datetime object Parameters ---------- yrsec : int seconds since start of year year : int year in YYYY Returns ------- my_date : datetime a python datetime object. Example my_date = utils.timeUtils.timeYrsecToDate(1205304,2012) Written by Sebastien, Jul. 2012 Modified by ASR 20151120 """ from datetime import datetime from datetime import timedelta assert(isinstance(yrsec, int)), logging.erorr( 'yrsec must be of type int') assert(isinstance(year, int)), logging.error( 'year must be of type int') return datetime(year, 1, 1) + timedelta(seconds=yrsec)
def watchdog(ns): try: logging.info('Sending DNS query: %s', ns) query = dns.message.make_query(DNS_QUERY, dns.rdatatype.ANY) result = dns.query.udp(query, ns, DNS_TIMEOUT) for rr in result.answer: logging.info('%s', rr) if not result.answer: logging.error('Host not found') else: logging.info('%s is healthy', SERVICE) return result.answer != [] except dns.exception.Timeout: logging.error('DNS Server Timeout') except: logging.erorr('Exception: {}'.sys.exc_info()[1]) return False
def create_csv(): try: api_response = requests.get( 'https://api.covidindiatracker.com/state_data.json') covid_data = api_response.json() statewise_dataframe = data_to_dataframe(covid_data) csv_name = 'csv_' + datetime.datetime.strftime( datetime.datetime.today(), '%d-%m-%Y') + '.csv' statewise_dataframe.to_csv(csv_name) except requests.exceptions.HTTPError as errh: logging.error("Http Error:", errh) except requests.exceptions.ConnectionError as errc: logging.erorr("Error Connecting:", errc) except requests.exceptions.Timeout as errt: logging.error("Timeout Error:", errt) except requests.exceptions.RequestException as err: logging.error("OOps: Something Else", err)
def new_folder (self, fname, ftype=None, storeid=None): logging.debug('bb:new_folder(): fname: %s; ftype: %s', fname, ftype) if not ftype: ftype = Folder.CONTACT_t if ftype != Folder.CONTACT_t: logging.erorr('Only Contact Groups are supported at this time.') return None if storeid: ms = self.get_msgstore(storeid) else: ms = self.get_def_msgstore() f = BBContactsFolder(self, fname, ms) ms.add_folder(f) return f
def new_folder(self, fname, ftype=None, storeid=None): logging.debug('bb:new_folder(): fname: %s; ftype: %s', fname, ftype) if not ftype: ftype = Folder.CONTACT_t if ftype != Folder.CONTACT_t: logging.erorr('Only Contact Groups are supported at this time.') return None if storeid: ms = self.get_msgstore(storeid) else: ms = self.get_def_msgstore() f = BBContactsFolder(self, fname, ms) ms.add_folder(f) return f
def new_folder(self, fname, ftype=None, storeid=None): """See the documentation in class PIMDB. It appears like CardDAV allows multiple 'root' addressbooks, and multiple folders in each addressbook. In order to support this we will have to provide a storeid to distinguish the various root addressbooks. For now new_folder will only create the default root addressbook which is the first entry in the adbkhomeset property.""" logging.debug("bb:new_folder(): fname: %s; ftype: %s", fname, ftype) if not ftype: ftype = Folder.CONTACT_t if ftype != Folder.CONTACT_t: logging.erorr("Only Contact Groups are supported at this time.") return None root = self.get_def_root_folder_path() resource = URL(os.path.join(root, fname)) ret = self.get_account().session.makeAddressBook(resource)
def new_folder(self, fname, ftype=None, storeid=None): """See the documentation in class PIMDB. It appears like CardDAV allows multiple 'root' addressbooks, and multiple folders in each addressbook. In order to support this we will have to provide a storeid to distinguish the various root addressbooks. For now new_folder will only create the default root addressbook which is the first entry in the adbkhomeset property.""" logging.debug('bb:new_folder(): fname: %s; ftype: %s', fname, ftype) if not ftype: ftype = Folder.CONTACT_t if ftype != Folder.CONTACT_t: logging.erorr('Only Contact Groups are supported at this time.') return None root = self.get_def_root_folder_path() resource = URL(os.path.join(root, fname)) ret = self.get_account().session.makeAddressBook(resource)
def new_folder(self, fname, ftype=None, storeid=None): if not ftype: ftype = Folder.CONTACT_t if ftype != Folder.CONTACT_t: logging.erorr('Only Contact Groups are supported at this time.') return None gn = gdata.data.Name(name=fname) new_group = gdata.contacts.data.GroupEntry(name=gn) new_group.title = atom.data.Title(text=fname) entry = self.get_gdc().create_group(new_group) if entry: logging.info('Successfully created group. ID: %s', entry.id.text) f = GCContactsFolder(self, entry.id.text, gn, entry) self.add_contacts_folder(f) return entry.id.text else: logging.error('Could not create Group \'%s\'', gn) return None
def new_folder (self, fname, ftype=None, storeid=None): if not ftype: ftype = Folder.CONTACT_t if ftype != Folder.CONTACT_t: logging.erorr('Only Contact Groups are supported at this time.') return None gn = gdata.data.Name(name=fname) new_group = gdata.contacts.data.GroupEntry(name=gn) new_group.title = atom.data.Title(text=fname) entry = self.get_gdc().create_group(new_group) if entry: logging.info('Successfully created group. ID: %s', entry.id.text) f = GCContactsFolder(self, entry.id.text, gn, entry) self.add_contacts_folder(f) return entry.id.text else: logging.error('Could not create Group \'%s\'', gn) return None
def close_consent(self): """Close cookie consent modal :Raises: - TimeoutException: If dismiss button couldn't be found in time """ logging.info("Closing cookie consent") # go to home page self.driver.get("https://www.gg.pl/") try: # find dismiss button and try to click it dismiss_button = self.wait.until( EC.element_to_be_clickable( (By.CSS_SELECTOR, ".subscribe_button"))) dismiss_button.click() logging.debug("Clicked dismiss button") logging.info("Closed cookie consent") except TimeoutException as e: logging.erorr("Didn't find dismiss button on website") raise e
def populateFromDatabase(self): """ Populate this object with data from the database. """ # Query to get basic Request data getDataQuery = """SELECT * FROM {reqtable} WHERE id=%(id)s""".format( reqtable=config.req_logtable) # Query to get the seeds getSeedsQuery = """SELECT * FROM {reqseedstable} WHERE id=%(id)s""".format( reqseedstable=config.req_seedstable) # Query to get recs getRecsQuery = """SELECT * FROM {reqrecstable} WHERE id=%(id)s""".format( reqrecstable=config.req_recstable) dbCursor = self.dbConn.cursor() try: dbCursor.execute(getDataQuery, {'id': self.id}) row = dbCursor.fetchone() dbCursor.fetchall() # flush cursor if not row: sys.stderr.write( "SBot Error: failed to find request with id {id} in the database\n" .format(id=self.id)) raise RequestIdError self.lang = row['lang'] self.username = unicode(row['username'], 'utf-8', errors='strict') self.page = unicode(row['page'], 'utf-8', errors='strict') self.revId = row['revid'] self.seedSource = row['seed_source'] self.startTime = row['start_time'] self.endTime = row['end_time'] self.status = row['status'] templates = unicode(row['templates'], 'utf-8', errors='strict') self.templates = templates.split(",") except MySQLdb.Error as e: logging.error("unable to update with request data from database") logging.erorr("MySQL error {d}: {s}".format(d=e.args[0], s=e.args[1])) raise RequestLoadDataError # We got request data, look for seeds... try: dbCursor.execute(getSeedsQuery, {'id': self.id}) for row in dbCursor.fetchall(): seedTitle = unicode(row['title'], 'utf-8', errors='strict') self.seeds.append(seedTitle) except MySQLdb.Error as e: logging.error("unable to update with seed data from database") logging.error("MySQL error {d}: {s}\n".format(d=e.args[0], s=e.args[1])) raise RequestLoadDataError # ...and look for recs try: dbCursor.execute(getRecsQuery, {'id': self.id}) for row in dbCursor.fetchall(): recTitle = unicode(row['title'], 'utf-8', errors='strict') self.recs[recTitle] = { 'title': recTitle, 'cat': row['category'], 'rank': row['rank'], 'source': row['rec_source'], 'rec_rank': row['rec_rank'], 'popcount': row['popcount'], 'popularity': row['popularity'], 'quality': row['quality'], 'assessedclass': row['assessed_class'], 'predictedclass': row['predicted_class'] } except MySQLdb.Error as e: logging.error("unable to update with rec data from database") logging.error("MySQL error {d}: {s}".format(d=e.args[0], s=e.args[1])) raise RequestLoadDataError # OK, done return
def run_ordering(paths, args): """ runs orientation Parameters ---------- paths.orient_file : file paths.order_file : file """ # load the bundle graph. BG = nx.read_gpickle(paths.bundle_file) # load the oriented graph. DG = nx.read_gpickle(paths.orient_file) # check it. for n in DG.nodes(): if DG.node[n]['orien'] == -1: logging.erorr("orientatio not set") for p,q in DG.edges(): if DG[p][q]['state'] == -1: logging.error("state not set") # solve the ILP. logging.info("solving ILP") ILP = order.OrderIlp("log.txt", "err.txt") # loop over each component. SG = nx.DiGraph() shorties = list() for subg in nx.weakly_connected_component_subgraphs(DG): # check for linear path. deg_list = [len(subg.neighbors(x)) for x in subg.nodes()] if max(deg_list) == 1 or len(subg.nodes()) < 3: shorties += subg.nodes() continue # solve the order otherwise. logging.info("solving order: %s" % len(subg.nodes())) # solve it. ILP.load("card", subg) tmp = ILP.solve() # combine it. SG = nx.union(SG, tmp) ILP.clear() # solve shorties in one batch. subg = DG.subgraph(shorties) logging.info("solving shorties: %s" % len(subg.nodes())) ILP.load("card", subg) tmp = ILP.solve() SG = nx.union(SG, tmp) ILP.clear() # ensure node degree is low. logging.info("sanity check.") deg_list = [len(SG.neighbors(x)) for x in SG.nodes()] if max(deg_list) > 2: logging.error("is not a path") sys.exit(1) # remove cycles. logging.info("computing cycles") for subg in nx.weakly_connected_component_subgraphs(SG): logging.info("comp size: %i" % len(subg.nodes())) for cycle in nx.simple_cycles(subg): # find weakest edge. weakest = None weight = 99999 for p, q in subg.subgraph(cycle).edges(): s = DG[p][q]['state'] w = BG[p][q]['bcnts'][s] if w < weight: weight = w weakest = p,q # remove weakest. SG.remove_edge(p,q) # ensure its a DAG. if nx.is_directed_acyclic_graph(SG) == False: logging.error("not a DAG?") sys.exit(1) # write to disk. nx.write_gpickle(SG, paths.order_file)
#List servers in the db if (options.servers != None): #ensure db is there if (not os.path.exists('lssecfixes.sqlite')): loggign.error( "\n**ERROR: You must load data prior to running a report\n") parser.print_help() sys.exit(1) list_servers_in_db() #List servers in the db if (options.advisories != None): #ensure db is there if (not os.path.exists('lssecfixes.sqlite')): logging.erorr( "\n**ERROR: You must load data prior to running a report\n") parser.print_help() sys.exit(1) list_advisories_in_db() if (options.load == None and options.report == None and options.advisories == None and options.servers == None): logging.error("\n**ERROR: You must specify to load or report\n") parser.print_help() sys.exit(1) #Load Data if (options.load != None): #print "HERE" #create db
def compute_distance(paths, args): """ runs orientation Parameters ---------- paths.order_file : file paths.edge_file : file paths.gap_file : file """ # load the graphs. BG = nx.read_gpickle(paths.bundle_file) EG = nx.read_gpickle(paths.edge_file) DG = nx.read_gpickle(paths.order_file) # check it. for n in DG.nodes(): if DG.node[n]['orien'] == -1: logging.erorr("orientatio not set") for p,q in DG.edges(): if DG[p][q]['state'] == -1: logging.error("state not set") # loop over each edge. missing_cnt = 0 missing_list = list() for p, q in DG.edges(): # simplify states. if DG[p][q]['state'] == 0 or DG[p][q]['state'] == 3: stype = 0 else: stype = 1 # get comparitble edges. glist = list() for e in EG[p][q]: if EG[p][q][e]['state'] == 0 or EG[p][q][e]['state'] == 3: ttype = 0 else: ttype = 1 # add edge if compatible. if stype == ttype: glist.append(e) # sanity check this. if len(glist) == 0: glist = list() for e in EG[p][q]: glist.append(e) missing_cnt += 1 missing_list.append((p,q)) # compute average. tmp = list() for e in glist: tmp.append(EG[p][q][e]['dist']) avg = np.average(np.array(tmp)) # save it. DG[p][q]['dist'] = avg # remove missing. logging.warning("removing edges with no support: %i" % missing_cnt) DG.remove_edges_from(missing_list) # write to disk. nx.write_gpickle(DG, paths.gap_file) logging.info("missing good edges: %i" % missing_cnt)
def populateFromDatabase(self): """ Populate this object with data from the database. """ # Query to get basic Request data getDataQuery = """SELECT * FROM {reqtable} WHERE id=%(id)s""".format(reqtable=config.req_logtable) # Query to get the seeds getSeedsQuery = """SELECT * FROM {reqseedstable} WHERE id=%(id)s""".format(reqseedstable=config.req_seedstable) # Query to get recs getRecsQuery = """SELECT * FROM {reqrecstable} WHERE id=%(id)s""".format(reqrecstable=config.req_recstable) dbCursor = self.dbConn.cursor() try: dbCursor.execute(getDataQuery, {'id': self.id}) row = dbCursor.fetchone() dbCursor.fetchall() # flush cursor if not row: sys.stderr.write("SBot Error: failed to find request with id {id} in the database\n".format(id=self.id)) raise RequestIdError self.lang = row['lang'] self.username = unicode(row['username'], 'utf-8', errors='strict') self.page = unicode(row['page'], 'utf-8', errors='strict') self.revId = row['revid'] self.seedSource = row['seed_source'] self.startTime = row['start_time'] self.endTime = row['end_time'] self.status = row['status'] templates = unicode(row['templates'], 'utf-8', errors='strict') self.templates = templates.split(",") except MySQLdb.Error as e: logging.error("unable to update with request data from database") logging.erorr("MySQL error {d}: {s}".format(d=e.args[0], s=e.args[1])) raise RequestLoadDataError # We got request data, look for seeds... try: dbCursor.execute(getSeedsQuery, {'id': self.id}) for row in dbCursor.fetchall(): seedTitle = unicode(row['title'], 'utf-8', errors='strict') self.seeds.append(seedTitle) except MySQLdb.Error as e: logging.error("unable to update with seed data from database") logging.error("MySQL error {d}: {s}\n".format(d=e.args[0], s=e.args[1])) raise RequestLoadDataError # ...and look for recs try: dbCursor.execute(getRecsQuery, {'id': self.id}) for row in dbCursor.fetchall(): recTitle = unicode(row['title'], 'utf-8', errors='strict') self.recs[recTitle] = {'title': recTitle, 'cat': row['category'], 'rank': row['rank'], 'source': row['rec_source'], 'rec_rank': row['rec_rank'], 'popcount': row['popcount'], 'popularity': row['popularity'], 'quality': row['quality'], 'assessedclass': row['assessed_class'], 'predictedclass': row['predicted_class']} except MySQLdb.Error as e: logging.error("unable to update with rec data from database") logging.error("MySQL error {d}: {s}".format(d=e.args[0], s=e.args[1])) raise RequestLoadDataError # OK, done return