def get_tweets(messages, verbosity): # Connect to Twitter API with OAuth2 consumer = oauth.Consumer(key=settings.TWITTER_API_KEY, secret=settings.TWITTER_API_SECRET) token = oauth.Token(key=settings.TWITTER_ACCESS_TOKEN, secret=settings.TWITTER_ACCESS_TOKEN_SECRET) client = oauth.Client(consumer, token) # Get list of recent tweets url = "https://api.twitter.com/1.1/statuses/home_timeline.json" resp, content = client.request(url, method="GET") data = json.loads(content) tweets = [] for d in data: tweets.append({"user": d["user"]["name"], "tuser": "******" + d["user"]["screen_name"], "tweet": d["text"]}) # Load 5 random tweets into widget random.shuffle(tweets) clear_statistic_list("tweets", "nsw:rt", "tweets") sort_order = 1 for t in tweets: add_statistic_list_item("tweets", "nsw:rt", "tweets", t["tweet"], sort_order=sort_order, label=t["tuser"]) if sort_order >= 5: break sort_order += 1 if verbosity > 1: messages.append("Stored %d tweets" % sort_order) return messages
def update_fire_danger(loader, verbosity=0): messages = [] now = datetime.datetime.now(pytz.timezone(settings.TIME_ZONE)) http = httplib.HTTPConnection("www.rfs.nsw.gov.au") http.request("GET", "http://www.rfs.nsw.gov.au/feeds/fdrToban.xml") xml = ET.parse(http.getresponse()) expand_ratings = [] for district in xml.getroot(): region = None rating = None fireban = None for elem in district: if elem.tag == 'Name': region = elem.text elif elem.tag == 'DangerLevelToday': rating = elem.text elif elem.tag == 'FirebanToday': fireban = (elem.text == 'Yes') if rating is None: continue if region == "Greater Sydney Region": region = "Greater Sydney" elif region == "Illawarra/Shoalhaven": region = "Illawarra" elif region == "ACT": continue elif region == "Upper Central West Plains": region = "Upper C.W. Plains" elif region == "Lower Central West Plains": region = "Lower C.W. Plains" fd = FireDanger(region, rating, fireban) expand_ratings.append(fd) expand_ratings.sort(reverse=True) sort_order = 10 clear_statistic_list("fire", "nsw:day", "rating_list_main") clear_statistic_list("fire", "nsw:day", "rating_list_expansion") for fd in expand_ratings: add_statistic_list_item("fire", "nsw:day", "rating_list_main", fd.rating(), sort_order, label=fd.region, traffic_light_code=fd.tlc()) add_statistic_list_item("fire", "nsw:day", "rating_list_expansion", fd.rating(), sort_order, label=fd.region, traffic_light_code=fd.tlc()) sort_order += 10 return messages
def get_airdata(loader, messages, verbosity=0): http = httplib.HTTPConnection("airquality.environment.nsw.gov.au") http.request("GET", "http://airquality.environment.nsw.gov.au/aquisnetnswphp/getPage.php?reportid=25") resp = http.getresponse() clear_statistic_list("air_pollution", "syd:rt", "regions") clear_statistic_list("air_pollution", "nsw:rt", "regions") clear_statistic_list("air_pollution", "syd:rt", "region_1") clear_statistic_list("air_pollution", "nsw:rt", "region_1") parser = AirPollutionHtmlParser(messages, verbosity) parser.feed(resp.read()) http.close() if verbosity >= 3: messages.append("Updated air pollution ratings from website") return parser.messages
def get_airdata(loader, messages, verbosity=0): http = httplib.HTTPConnection("airquality.environment.nsw.gov.au") http.request( "GET", "http://airquality.environment.nsw.gov.au/aquisnetnswphp/getPage.php?reportid=25" ) resp = http.getresponse() clear_statistic_list("air_pollution", "syd:rt", "regions") clear_statistic_list("air_pollution", "nsw:rt", "regions") clear_statistic_list("air_pollution", "syd:rt", "region_1") clear_statistic_list("air_pollution", "nsw:rt", "region_1") parser = AirPollutionHtmlParser(messages, verbosity) parser.feed(resp.read()) http.close() if verbosity >= 3: messages.append("Updated air pollution ratings from website") return parser.messages