def main(): DOMAIN_NAME = os.getenv('DOMAIN_NAME') SUBDOMAIN_NAME = os.getenv('SUBDOMAIN_NAME') RTYPE = os.getenv('RTYPE') TTL = os.getenv('TTL') configLogging() try: ipAddress = getMyIpAddress() if not ipAddress: return except Exception as err: logError('Unexpected error: {}'.format(err)) return digitalOceanApi = DigitalOceanApi(os.getenv('DIGITAL_OCEAN_API_URL'), os.getenv('DIGITAL_OCEAN_TOKEN')) try: record = digitalOceanApi.getRecord(DOMAIN_NAME, SUBDOMAIN_NAME, RTYPE) if record['data'] == ipAddress: logging.info('Already up to date!') return digitalOceanApi.updateRecord(DOMAIN_NAME, record, ipAddress, RTYPE, TTL) except NoRecord: digitalOceanApi.createRecord(DOMAIN_NAME, SUBDOMAIN_NAME, ipAddress, RTYPE, TTL) except Exception as err: logError('Unexpected error: {}'.format(err)) pass
def request(self, *args, **kwargs): response = super(InvidiousSession, self).request(*args, **kwargs) log("request.url: {}".format(response.url)) try: response.raise_for_status() except Exception as err: try: msg = response.json().get("error") except Exception: msg = None if msg: logError("session: error processing request [{}]".format(msg)) return notify(msg, icon=iconError) raise err else: return response.json()
def initHandler(self, data): handlerClass, uri = url_pattern.getHandlerFromData(data) if not handlerClass: utils.logError(msgs.CAN_NOT_FIND_HANDLER % data) self.close() handler = None if uri: uriInfo = urlparse.urlsplit(uri) path = uriInfo[2] req = urlparse.parse_qs(uriInfo[3]) handler = handlerClass(self, path = path, req=req, uri=uri) else: handler = handlerClass(self) weakR = weakref.ref(handler, onHandlerDone) nowCls.add(weakR) self.handler = handler
def getKeys (self): ''' Return a list of all of the Tile (key)s currently held in the repository ''' lst= [] try: for key,val in self.iteritems (): lst.append ( key ) except Exception as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'GraphRepository', \ 'Iterating repository, at key %s:' %(key), e ) return lst
def loadEdgeGraphForTile (self, thisTile): ''' A graph G is defined so: G = { nodeA: { nodeB : cost_A-B, nodeC: cost_A-C }, nodeB: { nodeA : cost_B-A }, nodeC: { nodeC : cost_C-A } } where cost_X_Y is in an instance of DataStructures.Edge The graph created here will be a subclass of: - dict - gis.Tile ''' try: strList = self._getStringList ( thisTile ) graph = {} for thisLine in strList: thisEdge = self._createEdgeFromLine (thisLine) # note, do not implement reverse costs/Edges at this # stage, to reduce memory requirement graph.setdefault ( thisEdge.sourceNode, {} ) graph[thisEdge.sourceNode][thisEdge.targetNode] = thisEdge graph.setdefault ( thisEdge.targetNode, {} ) graph[thisEdge.targetNode][thisEdge.sourceNode] = thisEdge except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'DataStore', \ 'loadEdgeGraphForTile', e ) return graph
def _getStringList(self, thisTile): """ Search for lines in the file that are in Tile thisTile return a list of those matched lines. """ lstLines = [] try: fs = open(self.filePath, "r") if self.hasHeader: fs.readline() allLines = fs.readlines() for thisLine in allLines: # print thisLine cols = thisLine.split("|") centroidWKT = cols[8] # centroidWKT is like "POINT (0.2343 0.2332432)" tmpList = centroidWKT.replace("POINT(", "").replace(")", "").split(" ") X = float(tmpList[0]) Y = float(tmpList[1]) # Find the integer "floor" value of x & y co-ordinates xVal = int(math.floor(X)) yVal = int(math.floor(Y)) if Tile(xVal, yVal) == thisTile: lstLines.append(thisLine) fs.close() except IOError as e: import traceback, utils utils.logError(traceback.format_exc()) raise AppError(utils.timestampStr(), "FileDataStore", "open/read file", e) return lstLines
def loadEdgeGraphForTile(self, thisTile): """ A graph G is defined so: G = { nodeA: { nodeB : cost_A-B, nodeC: cost_A-C }, nodeB: { nodeA : cost_B-A }, nodeC: { nodeC : cost_C-A } } where cost_X_Y is in an instance of DataStructures.Edge The graph created here will be a subclass of: - dict - gis.Tile """ try: strList = self._getStringList(thisTile) graph = {} for thisLine in strList: thisEdge = self._createEdgeFromLine(thisLine) # note, do not implement reverse costs/Edges at this # stage, to reduce memory requirement graph.setdefault(thisEdge.sourceNode, {}) graph[thisEdge.sourceNode][thisEdge.targetNode] = thisEdge graph.setdefault(thisEdge.targetNode, {}) graph[thisEdge.targetNode][thisEdge.sourceNode] = thisEdge except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError(traceback.format_exc()) raise AppError(utils.timestampStr(), "DataStore", "loadEdgeGraphForTile", e) return graph
def _getStringList (self, thisTile): ''' Search for lines in the file that are in Tile thisTile return a list of those matched lines. ''' lstLines = [] try: fs = open ( self.filePath, 'r' ) if (self.hasHeader): fs.readline () allLines = fs.readlines () for thisLine in allLines: # print thisLine cols = thisLine.split ("|") centroidWKT= cols [8] # centroidWKT is like "POINT (0.2343 0.2332432)" tmpList = centroidWKT.replace ('POINT(','' ).replace ( ')','' ).split (" ") X = float (tmpList[0] ) Y = float (tmpList[1] ) # Find the integer "floor" value of x & y co-ordinates xVal = int (math.floor ( X ) ) yVal = int (math.floor ( Y ) ) if ( Tile ( xVal , yVal ) == thisTile ): lstLines.append ( thisLine ) fs.close () except IOError as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'FileDataStore', 'open/read file', e ) return lstLines
def _getStringList(self, thisTile): conn = self._getS3Connection() bucketRef = None keyID = None try: bucketRef = conn.get_bucket(self.bucketPrefix) except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError(traceback.format_exc()) errStr = "Opening key %s" % (keyID) raise AppError(utils.timestampStr(), "DataStore", errStr, e) strEdges = None tileID = thisTile.getID() keyRef = None try: keyRef = bucketRef.get_key(thisTile.getID()) strEdges = keyRef.get_contents_as_string() except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError(traceback.format_exc()) errStr = "Reading bucket key: %s" % (tileID) raise AppError(utils.timestampStr(), "DataStore", errStr, e) resultList = [] someLines = strEdges.split("\n") listLen = len(someLines) count = 0 while len(someLines) > 0: aLine = someLines.pop() if len(aLine) > 0: resultList.append(aLine) conn.close() return resultList
def _getStringList (self, thisTile): conn = self._getS3Connection () bucketRef = None keyID = None try: bucketRef =conn.get_bucket ( self.bucketPrefix ); except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError ( traceback.format_exc() ) errStr = 'Opening key %s' %( keyID ) raise AppError (utils.timestampStr (), 'DataStore', errStr, e ) strEdges = None tileID = thisTile.getID () keyRef = None try: keyRef = bucketRef.get_key ( thisTile.getID ()) strEdges = keyRef.get_contents_as_string() except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError ( traceback.format_exc() ) errStr = 'Reading bucket key: %s' %( tileID ) raise AppError (utils.timestampStr (), 'DataStore', errStr, e ) resultList = [] someLines = strEdges.split ('\n') listLen = len ( someLines ) count = 0 while (len ( someLines ) > 0 ): aLine = someLines.pop () if len (aLine ) > 0: resultList.append ( aLine ) conn.close () return resultList
def __getitem__(self, key): ''' Handle case in which a Tile OBJECT is used Also increment the frequency counter for this tile. ''' keyStr = str (key ) # key might be a Tile object try: val = dict.__getitem__(self, keyStr ) except KeyError as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'GraphRepository', \ 'Tile with key: %s not found in GraphRepository' %(keyStr), e ) if keyStr in self.accessFrequency: self.accessFrequency [keyStr] = self.accessFrequency [keyStr] + 1 else: self.accessFrequency [keyStr] = 1 return val
def _getS3Connection (self): if 'AWS_ACCESS_KEY_ID' in os.environ: AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID'] else : errStr ="AWS_ACCESS_KEY_ID not set" raise AppError (utils.timestampStr (), 'DataStore', errStr, '' ) if 'AWS_SECRET_ACCESS_KEY' in os.environ: AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY'] else : errStr ="AWS_SECRET_ACCESS_KEY not set" raise AppError (utils.timestampStr (), 'DataStore', errStr, '' ) try: conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'DataStore', 'creating AWS connection', e ) else: return conn
def _getS3Connection(self): if "AWS_ACCESS_KEY_ID" in os.environ: AWS_ACCESS_KEY_ID = os.environ["AWS_ACCESS_KEY_ID"] else: errStr = "AWS_ACCESS_KEY_ID not set" raise AppError(utils.timestampStr(), "DataStore", errStr, "") if "AWS_SECRET_ACCESS_KEY" in os.environ: AWS_SECRET_ACCESS_KEY = os.environ["AWS_SECRET_ACCESS_KEY"] else: errStr = "AWS_SECRET_ACCESS_KEY not set" raise AppError(utils.timestampStr(), "DataStore", errStr, "") try: conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) except (AWSConnectionError, Exception) as e: import traceback, utils utils.logError(traceback.format_exc()) raise AppError(utils.timestampStr(), "DataStore", "creating AWS connection", e) else: return conn
for article in articlelist: page = article.find("a")['href'] print("P> {} T> {}".format(pagenumber, page)) # ALREADY SAVED if page in dataset.page.unique(): jumped += 1 print("Already saved article: ", page) continue browser.get(page) data = browser.page_source soup = bs(data, "html.parser") if "Page not found" in browser.title: utils.logError(page, "REQUEST:", "snopes") continue date = soup.find("meta", {"property": "DC.date.issued"})['content'] date = parse(date).strftime('%Y/%m/%d') content = soup.find("div", {"class": "entry-content article-text"}) # CONTENT if (content is None): jumped += 1 print("Content is None: ", page) utils.logError(page, "CONTENT:", "snopes") continue # CONTENT IS NONE if (content.find("div", {"class": "claim"}) is None):
print time.mktime(datetime.date.today().timetuple()) print(dict(a=1, b=2)) t1 = test1() print(test1.__dict__) print(t1.__dict__) print(type([])) print(type(())) print(type({})) utils.logDebug("abc") utils.logInfo("abc") utils.logWarning("abc") utils.logError("abc") utils.logCritical("abc") utils.logException("abc") try: raise Exception, 'this is a exception' except: utils.logException('sss') # print("---".join(dict(a=1,b=2).keys())) # print(dict(a=1,b=2).values()) l1 = [] l2 = [] l1.append([1, 2, 3])
def error(self, msg): utils.logError(msg, self.__class__.__name__)
soup = bs(browser.page_source, "html.parser") try: meta = soup.find("p", {"class": "statement__meta"}).text data = browser.page_source sidebar = bs(data, "html.parser").find("div", {"class": "widget__content"}) tags = [e.text for e in sidebar.findAll("p")[3].findAll("a")] sources = [ e.a for e in sidebar.div.findAll("p") if (e.a is not None) and (e.a.has_attr('href')) ] source_list = [a['href'] for e in sources] except: utils.logError(page, "REACT ERROR:", "politifact") continue claim_source_url, claim_source_domain = utils.fix_source(page, sources, meta=meta) date = parse(sidebar.p.text.split(":")[1].split(" at ") [0]).strftime('%Y/%m/%d') if claim_source_url is None: utils.logError(page, "NO SOURCE:", "politifact") else: # CREATE ENTRY entry = [ page, claim, claim_label, tags, source_list, claim_source_domain, claim_source_url, date
def geocode (txtLocation): ''' Pass txtLocation to the Nominatum geocoder API. All being well, return a dict object with structure {place_name = a, longitude=b, latitude = c } ''' resultDict = None try: txtLocationFmt = txtLocation.replace ( ' ', '%20' ) URL = "http://nominatim.openstreetmap.org/"+\ "search?q=%s&format=xml" %(txtLocationFmt) try: response = urllib2.urlopen (URL) except Exception as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'Geocoder.geocode', \ 'Connecting to nomatum site for search: %s' %(txtLocation), e ) parser = make_parser() encoding = response.headers.getparam('charset') resultDict = {'lon':None, 'lat':None, 'display_name': None} XMLText = response.read().decode(encoding) response.close() try: parser.setContentHandler(FirstMatchParser (resultDict) ) except Exception as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'Geocoder.geocode', \ 'Seting content handler for search: %s' %(txtLocation), e ) abytes = io.BytesIO(XMLText.encode('utf8')) try: parser.parse ( abytes ) except Exception as e: print "here %s" %(e) import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'Geocoder.geocode', \ 'Doing parse for search string : %s' %(txtLocation), e ) except Exception as e: import traceback, utils utils.logError ( traceback.format_exc() ) raise AppError (utils.timestampStr (), 'Geocoder.geocode', \ 'General error for search : %s' %(txtLocation), e ) return resultDict