def add(self, channel, listoftxt): """ add listoftxt to channel's output. """ channel = str(channel).lower() data = get("outcache-" + channel) if not data: data = [] data.extend(listoftxt) set("outcache-" + channel, data, 3600)
def get_tinyurl(url): """ grab a tinyurl. """ res = get(url, namespace='tinyurl') ; logging.debug('tinyurl - cache - %s' % str(res)) if res and res[0] == '[': return json.loads(res) postarray = [ ('submit', 'submit'), ('url', url), ] postdata = urllib.parse.urlencode(postarray) req = urllib.request.Request(url=plugcfg.url, data=bytes(postdata, "utf-8")) req.add_header('User-agent', useragent()) try: res = urllib.request.urlopen(req).readlines() except urllib.error.URLError as e: logging.warn('tinyurl - %s - URLError: %s' % (url, str(e))) ; return except urllib.error.HTTPError as e: logging.warn('tinyurl - %s - HTTP error: %s' % (url, str(e))) ; return except Exception as ex: if "DownloadError" in str(ex): logging.warn('tinyurl - %s - DownloadError: %s' % (url, str(e))) else: handle_exception() return urls = [] for line in res: l = str(line, "utf-8") if l.startswith('<blockquote><b>'): urls.append(striphtml(l.strip()).split('[Open')[0]) if len(urls) == 3: urls.pop(0) set(url, json.dumps(urls), namespace='tinyurl') return urls
def get(self, channel): """ return 1 item popped from outcache. """ channel = str(channel).lower() global get data = get("outcache-" + channel) if not data: txt = None else: try: txt = data.pop(0) ; set("outcache-" + channel, data, 3600) except (KeyError, IndexError): txt = None if data: size = len(data) else: size = 0 return (txt, size)
def get_tinyurl(url): """ grab a tinyurl. """ from tl.utils.url import enabled if not enabled: raise URLNotEnabled res = get(url, namespace="tinyurl") logging.debug("tinyurl - cache - %s" % str(res)) if res and res[0] == "[": return json.loads(res) postarray = [("submit", "submit"), ("url", url)] postdata = urllib.parse.urlencode(postarray) postbytes = bytes(postdata, "utf-8") req = urllib.request.Request(url=posturl, data=postbytes) req.add_header("User-agent", useragent()) try: res = urllib.request.urlopen(req).readlines() except urllib.error.URLError as e: logging.warn("tinyurl - %s - URLError: %s" % (url, str(e))) return except urllib.error.HTTPError as e: logging.warn("tinyurl - %s - HTTP error: %s" % (url, str(e))) return except Exception as ex: if "DownloadError" in str(ex): logging.warn("tinyurl - %s - DownloadError: %s" % (url, str(e))) else: handle_exception() return urls = [] for line in res: bline = str(line, "utf-8") if bline.startswith("<blockquote><b>"): urls.append(striphtml(bline.strip()).split("[Open")[0]) if len(urls) == 3: urls.pop(0) set(url, json.dumps(urls), namespace="tinyurl") return urls
def copy(self, channel): """ return 1 item popped from outcache. """ channel = str(channel).lower() global get return get("outcache-" + channel)