def search(service, query, page=1): if service not in urls: raise UnknownServiceException(service) options = { 'q': query, 'rpp': 100, 'page': page, } url_parts = { 'query': urllib.urlencode(options), 'url': urls[service]['search'], } res = UrlOpener().open("{url}?{query}".format(**url_parts)) if res.getcode() < 300: raw = json.load(res) updates = raw['results'] if raw['results_per_page'] == len(updates): updates.extend(search(service, query, page + 1)) return updates else: msg = "Unable to fetch: %i" % res.getcode() raise ServiceFailedException(msg)
def api_call(service, method, options, tries=3): if service not in urls: raise UnknownServiceException(service) url_parts = { 'query': urllib.urlencode(options), 'base_url': urls[service]['api'], 'method': method, } res = UrlOpener().open("{base_url}{method}.json?{query}".format(**url_parts)) # watch rate limit (twitter only) ratelimit = re.search("X-RateLimit-Remaining: ([0-9]+)", str(res.info())) if ratelimit != None: print "remaining API-calls: %s" % ratelimit.group(1) if res.getcode() < 300: return json.load(res) else: if tries > 1 and res.getcode() >= 500: print "ERROR while fetching, retrying" return api_call(service, method, options, tries-1) else: msg = "Unable to fetch: %i" % res.getcode() raise ServiceFailedException(msg)