def gis_search(search, tags=None): if cache_search(search): return params = { "v": "1.0", "start": "1", # this can be incremented for more results. "q": search, } url = "https://ajax.googleapis.com/ajax/services/search/images?%s" % ( urllib.urlencode(params)) (code, content, resp) = get_page(url) if code != 200: # google just fails randomly sometimes. let's just blindly retry # once. (code, content, resp) = get_page(url) if code != 200: return "google said %s" % str(resp) results = json.loads(content) try: images = [ i['unescapedUrl'] for i in results['responseData']['results'] ] except: images = None if images is not None and len(images) > 0: return safe_post(random.choice(images), caption=search, post_type="photo", tags=tags)
def gis_search(search, tags=None): if cache_search(search): return params = { "v": "1.0", "start": "1", # this can be incremented for more results. "q": search, } url = "https://ajax.googleapis.com/ajax/services/search/images?%s" % ( urllib.urlencode(params)) (code, content, resp) = get_page(url) if code != 200: # google just fails randomly sometimes. let's just blindly retry # once. (code, content, resp) = get_page(url) if code != 200: return "google said %s" % str(resp) results = json.loads(content) try: images = [i['unescapedUrl'] for i in results['responseData']['results']] except: images = None if images is not None and len(images) > 0: return safe_post(random.choice(images), caption=search, post_type="photo", tags=tags)
def post_to_imgur(url, title=None): imgur_url = None pwds = Passwords() client_id = pwds.getPassword('imgur.clientId') if client_id is None: return imgur_url headers = {} headers['Authorization'] = "Client-ID %s" % client_id params = {} params['image'] = url if title is not None: params['title'] = title params['description'] = title code, content, resp = get_page('https://api.imgur.com/3/image', params, headers) if code == 200: results = json.loads(content) try: imgur_url = results["data"]["link"] except: print "Invalid imgur response", content pass elif code is not None: print "imgur said: %s" % code else: print "get_page returned None?" return imgur_url
def extract_article_text(url): """Uses a web service to extract the text of an article.""" pwds = Passwords() token = pwds.getPassword('apibot.token') params = {} params['token'] = token params['url'] = url params['timeout'] = 20000 # timeout in ms summary_url = 'http://www.diffbot.com/api/article?%s' % urllib.urlencode( params) code, content, resp = util.get_page(summary_url, max_size=512 * 1024) if code != 200: print "Got %s requesting %s" % (str(code), summary_url) return None try: results = json.loads(content) except: print code, content, resp return "The summarizer doesn't like this." if "title" in results: if "text" in results: return "%s\f%s" % (results["title"], results["text"]) else: return results["title"] elif "text" in results: return results["text"] else: print results return None
def shorten_url(url): """Uses a web service to shorten a long url.""" short_url = None pwds = Passwords() token = pwds.getPassword('bitly.token') if random.random() < 0.01: url = random.choice(random_urls) params = { "access_token": token, "longUrl": url, "domain": "j.mp", # bit.ly and bitly.com are also options. } shortener = 'https://api-ssl.bitly.com/v3/shorten?%s' % urllib.urlencode( params) (code, content, resp) = util.get_page(shortener) url = None if code == 200: try: results = json.loads(content) except: print "error loading json from", shortener, content try: url = results["data"]["url"] except: print "unexpected json response from", shortener, results else: print shortener, "returned", code, content return url
def extract_article_text(url): """Uses a web service to extract the text of an article.""" pwds = Passwords() token = pwds.getPassword('apibot.token') params = {} params['token'] = token params['url'] = url params['timeout'] = 20000 # timeout in ms summary_url = 'http://www.diffbot.com/api/article?%s' % urllib.urlencode( params) code, content, resp = util.get_page(summary_url, max_size=512*1024) if code != 200: print "Got %s requesting %s" % (str(code), summary_url) return None try: results = json.loads(content) except: print code, content, resp return "The summarizer doesn't like this." if "title" in results: if "text" in results: return "%s\f%s" % (results["title"], results["text"]) else: return results["title"] elif "text" in results: return results["text"] else: print results return None
def describe_url(url): """Gives a textual description of the content of url.""" code, content, resp = util.get_page(url) if code != 200: print "I tried to look at %s but it told me %s." % (url, str(code)) return if resp.headers["content-type"].startswith("text/html"): return extract_article_text(url) else: msg = None try: msg = "%s, %s bytes" % (resp.headers["content-type"], resp.headers["content-length"]) except KeyError: print "Missing headers for %s" % url return msg