def weather(place): """ @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result """ try: response = urllib2.urlopen( "http://free.worldweatheronline.com/feed/weather.ashx?format=xml&fx=no&extra=localObsTime&key=%s&q=%s" % (config["app-id"], urllib.quote(place)) ) page = response.read() response.close() soup = BeautifulSoup(page) current = soup.find("current_condition") query = soup.find("request") return ( "%s (%s): %s at %sC, %s%% humidity, %skmph winds" % ( "".join(query.find("query").findAll(text=True)), "".join(current.find("localobsdatetime").findAll(text=True)), "".join(current.find("weatherdesc").findAll(text=True)).strip(), "".join(current.find("temp_c").findAll(text=True)), "".join(current.find("humidity").findAll(text=True)), "".join(current.find("windspeedkmph").findAll(text=True)), ) ).encode("utf-8") except Exception: Log.error() return None
def weather(place): ''' @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result ''' try: response = urllib2.urlopen( 'http://free.worldweatheronline.com/feed/weather.ashx?format=xml&fx=no&extra=localObsTime&key=%s&q=%s' % (config['app-id'], urllib.quote(place))) page = response.read() response.close() soup = BeautifulSoup(page) current = soup.find('current_condition') query = soup.find('request') return ( '%s (%s): %s at %sC, %s%% humidity, %skmph winds' % (''.join(query.find('query').findAll(text=True)), ''.join( current.find('localobsdatetime').findAll(text=True)), ''.join( current.find('weatherdesc').findAll(text=True)).strip(), ''.join(current.find('temp_c').findAll(text=True)), ''.join( current.find('humidity').findAll(text=True)), ''.join( current.find('windspeedkmph').findAll(text=True))) ).encode('utf-8') except Exception: Log.error() return None
def translate(msg): ''' @param msg: Message to translate @summary: Translates a query into destination language using Microsoft Translate @attention: TODO ''' try: req = urllib2.Request('http://translate.google.com/#auto|en|%s.' % urllib.quote(msg)) req.add_header( 'User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.52 Safari/536.5' ) response = urllib2.urlopen(req) page = response.read() Log.write(page) response.close() soup = BeautifulSoup(page) trans = ''.join( soup.find('span', attrs={ 'id': 'result' }).findAll(text=True)) return ("%s -> %s" % (msg, trans)).encode('utf-8') except Exception: Log.error() return None
def google_forecast(place, num=3): """ @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result """ try: response = urllib2.urlopen("http://www.google.com/ig/api?weather=%s" % urllib.quote(place)) page = response.read() response.close() soup = BeautifulSoup(page) forecasts = soup.findAll("forecast_conditions") r = [] for f in forecasts[:num]: r.append( "%s on %s %dC-%dC" % ( f.find("condition")["data"], f.find("day_of_week")["data"], to_celcius(f.find("low")["data"]), to_celcius(f.find("high")["data"]), ) ) return ("%s: %s" % (soup.find("forecast_information").find("city")["data"], " | ".join(r))).encode("utf-8") except Exception: Log.error() return None
def forecast(place, num=3): ''' @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result ''' try: response = urllib2.urlopen( 'http://free.worldweatheronline.com/feed/weather.ashx?format=xml&num_of_days=%d&key=%s&q=%s' % (num, config['app-id'], urllib.quote(place))) page = response.read() response.close() soup = BeautifulSoup(page) forecasts = soup.findAll('weather') query = soup.find('request') r = [] for f in forecasts[:num]: r.append( '%s on %s [%sC-%sC], %skmph winds' % (''.join(f.find('weatherdesc').findAll(text=True)).strip(), ''.join(f.find('date').findAll(text=True)), ''.join( f.find('tempminc').findAll(text=True)), ''.join( f.find('tempmaxc').findAll(text=True)), ''.join( f.find('windspeedkmph').findAll(text=True)))) return ('%s: %s' % (''.join(query.find('query').findAll(text=True)), ' | '.join(r))).encode('utf-8') except Exception: Log.error() return None
def google_forecast(place, num=3): ''' @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result ''' try: response = urllib2.urlopen('http://www.google.com/ig/api?weather=%s' % urllib.quote(place)) page = response.read() response.close() soup = BeautifulSoup(page) forecasts = soup.findAll('forecast_conditions') r = [] for f in forecasts[:num]: r.append( '%s on %s %dC-%dC' % (f.find('condition')['data'], f.find('day_of_week')['data'], to_celcius(f.find('low')['data']), to_celcius(f.find('high')['data']))) return ('%s: %s' % (soup.find('forecast_information').find('city')['data'], ' | '.join(r))).encode('utf-8') except Exception: Log.error() return None
def urbandefine(term, num=1): ''' @param term: Term for searching @param num: Return the (n)th result @summary: Performs a urban dictionary search and returns the first result ''' try: response = urllib2.urlopen( 'http://www.urbandictionary.com/define.php?term=%s' % urllib.quote(term)) page = response.read() response.close() soup = BeautifulSoup(page) items = soup.find('table', attrs={ 'id': 'entries' }).findAll('td', attrs={ 'class': 'text', 'id': re.compile('entry_\d+') }) item = items[num - 1] define = htmlx.unescape(''.join( item.find('div', attrs={ 'class': 'definition' }).findAll(text=True))) example = htmlx.unescape(''.join( item.find('div', attrs={ 'class': 'example' }).findAll(text=True))) if len(example): example = ", Eg: " + example return ("%s: %s%s" % (term, define, example)).encode('utf-8') except Exception: Log.error() return None
def description(url): ''' @param url: The url to resolve @summary: Fetches the meta-description of an url ''' status, ctype, url = visit(url) if url is None: return None else: if status == 302: return 'Redirection loop detected for url %s' % url elif status == 200: try: if ctype.startswith('text/'): response = urllib2.urlopen(url) page = response.read() response.close() soup = BeautifulSoup(page) desc = soup.find( 'meta', {'name': re.compile('description', re.I)})['content'] return 'Description %s : [%s]' % (htmlx.unescape(desc), min_url(url)) else: return 'Preview not available for content type %s : [%s]' % ( ctype, min_url(url)) except Exception: Log.error() return None else: return 'Status Code %s : url %s' % (status, url)
def description(url): ''' @param url: The url to resolve @summary: Fetches the meta-description of an url ''' status, ctype, url = visit(url) if url is None: return None else: if status == 302: return 'Redirection loop detected for url %s' % url elif status == 200: try: if ctype.startswith('text/'): response = urllib2.urlopen(url) page = response.read() response.close() soup = BeautifulSoup(page) desc = soup.find('meta', {'name': re.compile('description', re.I)})['content'] return 'Description %s : [%s]' % (htmlx.unescape(desc), min_url(url)) else: return 'Preview not available for content type %s : [%s]' % (ctype, min_url(url)) except Exception: Log.error() return None else: return 'Status Code %s : url %s' % (status, url)
def forecast(place, num=3): """ @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result """ try: response = urllib2.urlopen( "http://free.worldweatheronline.com/feed/weather.ashx?format=xml&num_of_days=%d&key=%s&q=%s" % (num, config["app-id"], urllib.quote(place)) ) page = response.read() response.close() soup = BeautifulSoup(page) forecasts = soup.findAll("weather") query = soup.find("request") r = [] for f in forecasts[:num]: r.append( "%s on %s [%sC-%sC], %skmph winds" % ( "".join(f.find("weatherdesc").findAll(text=True)).strip(), "".join(f.find("date").findAll(text=True)), "".join(f.find("tempminc").findAll(text=True)), "".join(f.find("tempmaxc").findAll(text=True)), "".join(f.find("windspeedkmph").findAll(text=True)), ) ) return ("%s: %s" % ("".join(query.find("query").findAll(text=True)), " | ".join(r))).encode("utf-8") except Exception: Log.error() return None
def google_weather(place): ''' @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result ''' try: response = urllib2.urlopen('http://www.google.com/ig/api?weather=%s' % urllib.quote(place)) page = response.read() response.close() soup = BeautifulSoup(page) current = soup.find('current_conditions') return ( '%s: %s at %sC, %s, %s' % (soup.find('forecast_information').find('city')['data'], current.find('condition')['data'], current.find('temp_c')['data'], current.find('humidity')['data'], current.find('wind_condition')['data'])).encode('utf-8') except Exception: Log.error() return None
def google_weather(place): """ @param term: Term for searching @summary: Performs a urban dictionary search and returns the first result """ try: response = urllib2.urlopen("http://www.google.com/ig/api?weather=%s" % urllib.quote(place)) page = response.read() response.close() soup = BeautifulSoup(page) current = soup.find("current_conditions") return ( "%s: %s at %sC, %s, %s" % ( soup.find("forecast_information").find("city")["data"], current.find("condition")["data"], current.find("temp_c")["data"], current.find("humidity")["data"], current.find("wind_condition")["data"], ) ).encode("utf-8") except Exception: Log.error() return None
def title(url, only_title=False): ''' @param url: The url to resolve @summary: Fetches the title of an url ''' status, ctype, url = visit(url) if url is None: return None else: if status == 302: return 'Redirection loop detected for url %s' % url elif status == 200: try: if ctype.startswith('text/'): # Fast Title Search found = None buff = '' m = 0 n = 512 # Chunk Size while True: req = urllib2.Request(url) req.headers['Range'] = 'bytes=%s-%s' % (m, m + n - 1) response = urllib2.urlopen(req) buff += response.read() response.close() soup = BeautifulSoup(buff) found = soup.find('title') m += n # If PARTIAL OK (206) and <title> has an ending tag if response.code == 200 or (response.code == 206 and found and found.nextSibling): break if only_title: return 'Title: %s' % htmlx.unescape(u''.join( found.findAll(text=True))).encode('utf-8') else: return '%s : [%s]' % (htmlx.unescape(u''.join( found.findAll(text=True))).encode('utf-8'), min_url(url)) else: return 'Title not available for content type %s : url %s' % ( ctype, min_url(url)) except Exception: Log.error() return None else: return 'Status Code %s : url %s' % (status, url)
def translate(msg): ''' @param msg: Message to translate @summary: Translates a query into destination language using Microsoft Translate @attention: TODO ''' try: req = urllib2.Request('http://translate.google.com/#auto|en|%s.' % urllib.quote(msg)) req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.52 Safari/536.5') response = urllib2.urlopen(req) page = response.read() Log.write(page) response.close() soup = BeautifulSoup(page) trans = ''.join(soup.find('span', attrs={'id': 'result'}).findAll(text=True)) return ("%s -> %s" % (msg, trans)).encode('utf-8') except Exception: Log.error() return None
def title(url, only_title=False): ''' @param url: The url to resolve @summary: Fetches the title of an url ''' status, ctype, url = visit(url) if url is None: return None else: if status == 302: return 'Redirection loop detected for url %s' % url elif status == 200: try: if ctype.startswith('text/'): # Fast Title Search found = None buff = '' m = 0 n = 512 # Chunk Size while True: req = urllib2.Request(url) req.headers['Range'] = 'bytes=%s-%s' % (m, m+n-1) response = urllib2.urlopen(req) buff += response.read() response.close() soup = BeautifulSoup(buff) found = soup.find('title') m += n # If PARTIAL OK (206) and <title> has an ending tag if response.code == 200 or (response.code == 206 and found and found.nextSibling): break if only_title: return 'Title: %s' % htmlx.unescape(u''.join(found.findAll(text=True))).encode('utf-8') else: return '%s : [%s]' % (htmlx.unescape(u''.join(found.findAll(text=True))).encode('utf-8'), min_url(url)) else: return 'Title not available for content type %s : url %s' % (ctype, min_url(url)) except Exception: Log.error() return None else: return 'Status Code %s : url %s' % (status, url)
def iplocate(ip): ''' @param ip: The IP address @summary: Performs a IP lookup and obtains the location of the user ''' try: response = urllib2.urlopen('http://api.ipinfodb.com/v3/ip-city/?key=%s&format=xml&ip=%s' % (config['app-id'], urllib.quote(ip))) page = response.read() response.close() soup = BeautifulSoup(page) reply = soup.find('response') if reply.find('statuscode').find(text=True) == "OK": r_lat = str((reply.find('latitude').find(text=True))) r_long = str(reply.find('longitude').find(text=True)) return '%s belongs to %s' % (ip, geo(r_lat, r_long)) else: return None except Exception: Log.error() return None
def urbandefine(term, num=1): ''' @param term: Term for searching @param num: Return the (n)th result @summary: Performs a urban dictionary search and returns the first result ''' try: response = urllib2.urlopen('http://www.urbandictionary.com/define.php?term=%s' % urllib.quote(term)) page = response.read() response.close() soup = BeautifulSoup(page) items = soup.find('table', attrs={'id': 'entries'}).findAll('td', attrs={'class': 'text', 'id': re.compile('entry_\d+')}) item = items[num-1] define = htmlx.unescape(''.join(item.find('div', attrs={'class': 'definition'}).findAll(text=True))) example = htmlx.unescape(''.join(item.find('div', attrs={'class': 'example'}).findAll(text=True))) if len(example): example = ", Eg: " + example return ("%s: %s%s" % (term, define, example)).encode('utf-8') except Exception: Log.error() return None