def get_raw_gist(self, gistID): url = "https://raw.github.com/gist/%s" % (gistID) try: return get(url).read() except (URLError, HTTPError) as e: log.exception('Failed to access URL %s : %s' % (url, e)) return ''
def check(url, path): """A HEAD request to URL. If HEAD is not allowed, we try GET.""" try: get(url, timeout=10) except HTTPError as e: if e.code == 405: try: get(url, path, 'GET', True) except URLError as e: print(' ' + yellow(e.reason), url) print(white(' -- ' + path)) else: print(' ' + red(e.code), url) print(white(' -- ' + path)) except URLError as e: print(' ' + yellow(e.reason), url) print(white(' -- ' + path))
def check(url, path): """A HEAD request to URL. If HEAD is not allowed, we try GET.""" try: get(url, timeout=10) except HTTPError as e: if e.code == 405: try: get(url, path, 'GET', True) except URLError as e: print ' ' + yellow(e.reason), url print white(' -- ' + path) else: print ' ' + red(e.code), url print white(' -- ' + path) except URLError as e: print ' ' + yellow(e.reason), url print white(' -- ' + path)
def tweet(header, body=None): """Easy embedding of Tweets. The Twitter oEmbed API is rate-limited, hence we are caching the response per configuration to `.cache/`.""" oembed = 'https://api.twitter.com/1/statuses/oembed.json' args = list(map(str.strip, re.split(r'\s+', header))) params = Struct(url=args.pop(0)) for arg in args: k, v = list(map(str.strip, arg.split('='))) if k and v: v = v.strip('\'') params[k] = v try: with io.open(join(core.cache.cache_dir, 'tweets'), 'rb') as fp: cache = pickle.load(fp) except (IOError, pickle.PickleError): cache = {} if params in cache: body = cache[params] else: try: body = json.loads( requests.get(oembed + '?' + urlencode(params)).read())['html'] except (requests.HTTPError, requests.URLError): log.exception('unable to fetch tweet') body = "Tweet could not be fetched" except (ValueError, KeyError): log.exception('could not parse response') body = "Tweet could not be processed" else: cache[params] = body try: with io.open(join(core.cache.cache_dir, 'tweets'), 'wb') as fp: pickle.dump(cache, fp, pickle.HIGHEST_PROTOCOL) except (IOError, pickle.PickleError): log.exception('uncaught exception during pickle.dump') return "<div class='embed tweet'>%s</div>" % body
def tweet(header, body=None): """Easy embedding of Tweets. The Twitter oEmbed API is rate-limited, hence we are caching the response per configuration to `.cache/`.""" oembed = 'https://api.twitter.com/1/statuses/oembed.json' args = list(map(str.strip, re.split(r'\s+', header))) params = Struct(url=args.pop(0)) for arg in args: k, v = list(map(str.strip, arg.split('='))) if k and v: v = v.strip('\'') params[k] = v try: with io.open(join(core.cache.cache_dir, 'tweets'), 'rb') as fp: cache = pickle.load(fp) except (IOError, pickle.PickleError): cache = {} if params in cache: body = cache[params] else: try: body = json.loads(requests.get(oembed + '?' + urlencode(params)).read())['html'] except (requests.HTTPError, requests.URLError): log.exception('unable to fetch tweet') body = "Tweet could not be fetched" except (ValueError, KeyError): log.exception('could not parse response') body = "Tweet could not be processed" else: cache[params] = body try: with io.open(join(core.cache.cache_dir, 'tweets'), 'wb') as fp: pickle.dump(cache, fp, pickle.HIGHEST_PROTOCOL) except (IOError, pickle.PickleError): log.exception('uncaught exception during pickle.dump') return "<div class='embed tweet'>%s</div>" % body
def get_raw_gist(self, gistID): url = "https://raw.github.com/gist/%s/" % (gistID) return get(url).read()
def get_raw_gist_with_filename(self, gistID, filename): url = "https://raw.github.com/gist/%s/%s" % (gistID, filename) return get(url).read()