def _story(self, data): m = {}; m["mid"] = str(data["id"]) m["service"] = "digg" m["account"] = self.account["id"] m["time"] = data["submit_date"] m["text"] = data["title"] + "\n" + data["description"] m["content"] = "<b>%(title)s</b><br />%(description)s" % data m["html"] = "<b>%(title)s</b><br />%(description)s" % data user = data["friends"]["users"][0] m["sender"] = {} m["sender"]["nick"] = user["name"] m["sender"]["id"] = user["name"] m["sender"]["image"] = user["icon"] m["sender"]["url"] = "http://digg.com/users/%s" % user["name"] m["sender"]["is_me"] = user["name"] == self.account["username"] if user.get("fullname", 0): m["sender"]["name"] = user["fullname"] m["url"] = data["link"] m["likes"] = {"count": data["diggs"]} m["html"] = util.linkify(m["text"], ((util.PARSE_HASH, '#<a class="hash" href="%s#search?q=\\1">\\1</a>' % URL_PREFIX), (util.PARSE_NICK, '@<a class="nick" href="%s/\\1">\\1</a>' % URL_PREFIX))) m["content"] = util.linkify(m["text"], ((util.PARSE_HASH, '#<a class="hash" href="gwibber:/tag?acct=%s&query=\\1">\\1</a>' % m["account"]), (util.PARSE_NICK, '@<a class="nick" href="gwibber:/user?acct=%s&name=\\1">\\1</a>' % m["account"]))) return m
def _common(self, data): m = {} try: m["mid"] = str(data["id"]) m["service"] = "statusnet" m["account"] = self.account["id"] m["time"] = util.parsetime(data["created_at"]) m["source"] = data.get("source", False) m["text"] = data["text"] m["to_me"] = ("@%s" % self.account["username"]) in data["text"] m["html"] = util.linkify(m["text"], ((util.PARSE_HASH, '#<a class="hash" href="%s#search?q=\\1">\\1</a>' % self.url_prefix), (util.PARSE_NICK, '@<a class="nick" href="%s/\\1">\\1</a>' % self.url_prefix))) m["content"] = util.linkify(m["text"], ((util.PARSE_HASH, '#<a class="hash" href="gwibber:/tag?acct=%s&query=\\1">\\1</a>' % m["account"]), (util.PARSE_NICK, '@<a class="nick" href="gwibber:/user?acct=%s&name=\\1">\\1</a>' % m["account"]))) images = [] if data.get("attachments", 0): for a in data["attachments"]: mime = a.get("mimetype", "") if mime and mime.startswith("image") and a.get("url", 0): images.append({"src": a["url"], "url": a["url"]}) images.extend(util.imgpreview(m["text"])) if images: m["images"] = images except: log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) return m
def on_bus_message(self, source, author, message, msg_type): if self == source: return if msg_type == MsgType.TEXT: msg = "[{}] <b>{}:</b> {}".format(source.prefix, author, linkify(message)) elif msg_type == MsgType.ACTION: msg = "[{}] <i><b>{}</b> {}</i>".format(source.prefix, author, linkify(message)) elif msg_type == MsgType.NICK: msg = "[{}] <i><b>{}</b> is now known as <b>{}</b></i>".format( source.prefix, author, message) elif msg_type == MsgType.JOIN: if not self.enable_joinparts: return msg = "[{}] <b>{}</b> has joined <b>{}</b>".format( source.prefix, author, message) elif msg_type == MsgType.PART: if not self.enable_joinparts: return msg = "[{}] <b>{}</b> has left <b>{}</b>".format( source.prefix, author, message) if not self.enable_prefixes: msg = msg[len(source.prefix) + 3:] self.send_text_message(self.channels[self.channel_id], msg)
def _common(self, data): m = {}; try: m["mid"] = str(data["id"]) m["service"] = "twitter" m["account"] = self.account["id"] m["time"] = util.parsetime(data["created_at"]) m["text"] = unescape(data["text"]) m["to_me"] = ("@%s" % self.account["username"]) in data["text"] m["html"] = util.linkify(data["text"], ((util.PARSE_HASH, '#<a class="hash" href="%s#search?q=\\1">\\1</a>' % URL_PREFIX), (util.PARSE_NICK, '@<a class="nick" href="%s/\\1">\\1</a>' % URL_PREFIX)), escape=False) m["content"] = util.linkify(data["text"], ((util.PARSE_HASH, '#<a class="hash" href="gwibber:/tag?acct=%s&query=\\1">\\1</a>' % m["account"]), (util.PARSE_NICK, '@<a class="nick" href="gwibber:/user?acct=%s&name=\\1">\\1</a>' % m["account"])), escape=False) images = util.imgpreview(m["text"]) if images: m["images"] = images except: log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) return m
def _common(self, data): m = {} try: m["mid"] = str(data["id"]) m["service"] = "twitter" m["account"] = self.account["id"] m["time"] = util.parsetime(data["created_at"]) m["text"] = unescape(data["text"]) m["to_me"] = ("@%s" % self.account["username"]) in data["text"] m["html"] = util.linkify( data["text"], ((util.PARSE_HASH, '#<a class="hash" href="%s#search?q=\\1">\\1</a>' % URL_PREFIX), (util.PARSE_NICK, '@<a class="nick" href="%s/\\1">\\1</a>' % URL_PREFIX)), escape=False) m["content"] = util.linkify(data["text"], (( util.PARSE_HASH, '#<a class="hash" href="gwibber:/tag?acct=%s&query=\\1">\\1</a>' % m["account"] ), (util.PARSE_NICK, '@<a class="nick" href="gwibber:/user?acct=%s&name=\\1">\\1</a>' % m["account"])), escape=False) images = util.imgpreview(m["text"]) if images: m["images"] = images except: log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) return m
def error(self, error, html=None, status=400, data=None, mail=False): logging.info(error, exc_info=True) self.response.set_status(status) error = html if html else util.linkify(error) self.response.write(error) if mail: self.mail_me('[Returned HTTP %s to client]\n\n%s' % (status, error))
def test_linkify(self): ''' Test all the linkify strings in src/tests/testnet.py ''' for url, result in linkify_test_strings: self.expect_equal(util.linkify(url), result)
def _message(self, data, profiles): m = {} m["mid"] = str(data["post_id"]) m["service"] = "facebook" m["account"] = self.account["id"] m["time"] = int(mx.DateTime.DateTimeFrom(int(data.get("updated_time", data["created_time"]))).gmtime()) m["url"] = data["permalink"] m["to_me"] = ("@%s" % self.account["username"]) in data["message"] if data.get("attribution", 0): m["source"] = util.strip_urls(data["attribution"]).replace("via ", "") if data.get("message", "").strip(): m["text"] = data["message"] m["html"] = util.linkify(data["message"]) m["content"] = m["html"] else: m["text"] = "" m["html"] = "" m["content"] = "" if data.get("actor_id", 0) in profiles: m["sender"] = self._sender(profiles[data["actor_id"]]) # Handle target for wall posts with a specific recipient if data.get("target_id", 0) in profiles: m["sender"]["name"] += u" \u25b8 %s"%(profiles[data["target_id"]]['name']) if data.get("likes", {}).get("count", None): m["likes"] = { "count": data["likes"]["count"], "url": data["likes"]["href"], } if data.get("comments", 0): m["comments"] = [] for item in data["comments"]["comment_list"]: if item["fromid"] in profiles: m["comments"].append({ "text": item["text"], "time": int(mx.DateTime.DateTimeFrom(int(item["time"])).gmtime()), "sender": self._sender(profiles[item["fromid"]]), }) if data.get("attachment", 0): if data["attachment"].get("name", 0): m["content"] += "<p><b>%s</b></p>" % data["attachment"]["name"] if data["attachment"].get("description", 0): m["content"] += "<p>%s</p>" % data["attachment"]["description"] m["images"] = [] for a in data["attachment"].get("media", []): if a["type"] in ["photo", "video", "link"]: if a.get("src", 0): if a["src"].startswith("/"): a["src"] = "http://facebook.com" + a["src"] m["images"].append({"src": a["src"], "url": a["href"]}) return m
def error(self, error, html=None, status=400, data=None, mail=False): logging.warning(error, exc_info=True) self.response.set_status(status) error = html if html else util.linkify(error) self.response.write(error) if mail: self.mail_me(error)
def handle_command(self, origin, message): args = message.split(' ') if args[0] == '.list': users = self.cmd.get_users(args[1]) if users: msg = '<b>Users:</b> ' + ', '.join(users) else: msg = 'Please enter the prefix of the room to get the user list of.' elif args[0] == '.topic': topic = self.cmd.get_topic(args[1]) if topic and topic != "": msg = '<b>Topic:</b> ' + linkify(topic) elif topic: msg = 'That room has no topic.' else: msg = 'Please enter the prefix of the room to get the topic of.' elif args[0] == '.prefixes' or args[0] == '.prefix': msg = self.cmd.get_prefixes() self.send_text_message(origin, msg)
def test_linkify_pretty(self): lp = lambda url: util.linkify(url, pretty=True, max_length=6) self.assertEqual('', lp('')) self.assertEqual('asdf qwert', lp('asdf qwert')) self.assertEquals('x <a href="http://foo">foo</a> y', lp('x http://foo y')) self.assertEquals('x <a href="http://www.foo/baz/baj">foo/ba...</a> y', lp('x http://www.foo/baz/baj y'))
def error(self, error, html=None, status=400, data=None, report=False, **kwargs): logging.info(error, stack_info=True) self.response.set_status(status) error = html if html else util.linkify(error) self.response.write(error) if report: self.report_error(error)
def error(self, error, html=None, status=400, data=None, log_exception=True, mail=True): logging.error(error, exc_info=sys.exc_info() if log_exception else None) self.response.set_status(status) error = util.linkify(html if html else error) self.response.write(error) if mail: self.mail_me(error)
def get(self): """URL parameters: start_time: float, seconds since the epoch key: string that should appear in the first app log """ start_time = util.get_required_param(self, 'start_time') if not util.is_float(start_time): self.abort(400, "Couldn't convert start_time to float: %r" % start_time) start_time = float(start_time) key = util.get_required_param(self, 'key') if not util.is_base64(key): self.abort(400, 'key is not base64: %r' % key) key = urllib.unquote(key) # the propagate task logs the poll task's URL, which includes the source # entity key as a query param. exclude that with this heuristic. key_re = re.compile('[^=]' + key) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' offset = None for log in logservice.fetch(start_time=start_time, end_time=start_time + 120, offset=offset, include_app_logs=True, version_ids=['2', '3', '4', '5', '6', '7']): first_lines = '\n'.join([ line.message.decode('utf-8') for line in log.app_logs[:min(10, len(log.app_logs))] ]) if log.app_logs and key_re.search(first_lines): # found it! render and return self.response.out.write("""\ <html> <body style="font-family: monospace; white-space: pre"> """) self.response.out.write(sanitize(log.combined)) self.response.out.write('<br /><br />') for a in log.app_logs: msg = a.message.decode('utf-8') # don't sanitize poll task URLs since they have a key= query param msg = linkify_datastore_keys( util.linkify( cgi.escape(msg if msg.startswith( 'Created by this poll:') else sanitize(msg)))) self.response.out.write( '%s %s %s<br />' % (datetime.datetime.utcfromtimestamp(a.time), LEVELS[a.level], msg.replace('\n', '<br />'))) self.response.out.write('</body>\n</html>') return offset = log.offset self.response.out.write('No log found!')
def test_linkify_pretty(self): lp = lambda url: util.linkify(url, pretty=True, max_length=6) self.assertEqual('', lp('')) self.assertEqual('asdf qwert', lp('asdf qwert')) self.assertEquals('x <a href="http://foo.co">foo.co</a> y', lp('x http://foo.co y')) self.assertEquals('x <a href="http://www.foo.ly/baz/baj">foo.ly...</a> y', lp('x http://www.foo.ly/baz/baj y')) self.assertEquals('x <a href="http://foo.co/bar?baz=baj#biff">foo.co...</a> y', lp('x http://foo.co/bar?baz=baj#biff y'))
def _message(self, data): m = { "mid": data["id"], "service": "friendfeed", "account": self.account["id"], "time": util.parsetime(data["published"]), "source": data.get("via", {}).get("name", None), "text": data["title"], "html": util.linkify(data["title"]), "content": util.linkify(data["title"]), "url": data["link"], "sender": self._sender(data["user"]), } if data.get("service", 0): m["origin"] = { "id": data["service"]["id"], "name": data["service"]["name"], "icon": data["service"]["iconUrl"], "url": data["service"]["profileUrl"], } if data.get("likes", 0): m["likes"] = {"count": len(data["likes"])} if data.get("comments", 0): m["comments"] = [] for item in data["comments"][-3:]: m["comments"].append({ "text": item["body"], "time": util.parsetime(item["date"]), "sender": self._sender(item["user"]), }) for i in data["media"]: if i.get("thumbnails", 0): m["images"] = [] for t in i["thumbnails"]: m["images"].append({"src": t["url"], "url": i["link"]}) if data.get("geo", 0): m["location"] = data["geo"] return m
def __save_layouts(self): homelayout,homelayoutname=self.get_layout_number() layoutname=homelayoutname if homelayout==-1: out("No layouts to save. Create layouts with \":layout new\"") return False currentlayout=homelayout loop_exit_allowed=False while currentlayout!=homelayout or not loop_exit_allowed: loop_exit_allowed=True sys.stdout.write("%s(%s); "%(currentlayout,layoutname)) self.command_at(False,'eval \'layout dump \"%s\"\' \'dumpscreen layout \"%s\"\' \'layout next\''%(os.path.join(self.basedir,self.savedir,"layout_"+currentlayout+"_"+layoutname),os.path.join(self.basedir,self.savedir,"winlayout_"+currentlayout+"_"+layoutname))) currentlayout,layoutname=self.get_layout_number() linkify(os.path.join(self.basedir,self.savedir),"layout_"+homelayout+"_"+homelayoutname,"last_layout") return True
def _common(self, data): m = {} try: m["mid"] = str(data["id"]) m["service"] = "identica" m["account"] = self.account["id"] m["time"] = util.parsetime(data["created_at"]) m["source"] = data.get("source", False) m["text"] = data["text"] m["to_me"] = ("@%s" % self.account["username"]) in data["text"] m["html"] = util.linkify( m["text"], ((util.PARSE_HASH, '#<a class="hash" href="%s#search?q=\\1">\\1</a>' % URL_PREFIX), (util.PARSE_NICK, '@<a class="nick" href="%s/\\1">\\1</a>' % URL_PREFIX))) m["content"] = util.linkify(m["text"], (( util.PARSE_HASH, '#<a class="hash" href="gwibber:/tag?acct=%s&query=\\1">\\1</a>' % m["account"] ), (util.PARSE_NICK, '@<a class="nick" href="gwibber:/user?acct=%s&name=\\1">\\1</a>' % m["account"]))) images = [] if data.get("attachments", 0): for a in data["attachments"]: mime = a.get("mimetype", "") if mime and mime.startswith("image") and a.get("url", 0): images.append({"src": a["url"], "url": a["url"]}) images.extend(util.imgpreview(m["text"])) if images: m["images"] = images except: log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) return m
def __save_layouts(self): (homelayout, homelayoutname) = self.get_layout_number() findir = sc.datadir if homelayout == -1: sys.stderr.write("No layouts to save.\n") return False path_layout = os.path.join(findir, "load_layout") oflayout = open(path_layout, "w") ex_lay = [] for lay in sc.gen_layout_info(self, sc.dumpscreen_layout_info(self)): try: num = lay[0] title = lay[1] except: title = "" if self.excluded_layouts and (num in self.excluded_layouts or title in self.excluded_layouts): ex_lay.append(lay) else: sys.stdout.write("%s(%s); " % (num, title)) oflayout.write( """layout select %s layout dump \"%s\" dumpscreen layout \"%s\" """ % (num, os.path.join(findir, "layout_" + num), os.path.join(findir, "winlayout_" + num)) ) oflayout.write("layout select %s\n" % homelayout) oflayout.close() self.source(path_layout) util.remove(path_layout) linkify(findir, "layout_" + homelayout, "last_layout") if ex_lay: sys.stdout.write( """ Excluded layouts: %s""" % str(ex_lay) ) out("") return True
def _story(self, data): m = {} m["mid"] = str(data["id"]) m["service"] = "digg" m["account"] = self.account["id"] m["time"] = data["submit_date"] m["text"] = data["title"] + "\n" + data["description"] m["content"] = "<b>%(title)s</b><br />%(description)s" % data m["html"] = "<b>%(title)s</b><br />%(description)s" % data user = data["friends"]["users"][0] m["sender"] = {} m["sender"]["nick"] = user["name"] m["sender"]["id"] = user["name"] m["sender"]["image"] = user["icon"] m["sender"]["url"] = "http://digg.com/users/%s" % user["name"] m["sender"]["is_me"] = user["name"] == self.account["username"] if user.get("fullname", 0): m["sender"]["name"] = user["fullname"] m["url"] = data["link"] m["likes"] = {"count": data["diggs"]} m["html"] = util.linkify( m["text"], ( (util.PARSE_HASH, '#<a class="hash" href="%s#search?q=\\1">\\1</a>' % URL_PREFIX), (util.PARSE_NICK, '@<a class="nick" href="%s/\\1">\\1</a>' % URL_PREFIX), ), ) m["content"] = util.linkify( m["text"], ( (util.PARSE_HASH, '#<a class="hash" href="gwibber:/tag?acct=%s&query=\\1">\\1</a>' % m["account"]), (util.PARSE_NICK, '@<a class="nick" href="gwibber:/user?acct=%s&name=\\1">\\1</a>' % m["account"]), ), ) return m
def test_linkify(self): for unchanged in ( '', 'x.c', 'x.computer', 'asdf qwert', 'X <a class="x" href="http://foo.com" >xyz</a> Y', '<a href="http://foo.com" class="x">xyz</a> Y', "X <a href='http://foo.com' />", 'asdf <a href="http://foo.com">foo</a> qwert ', # only a-z0-9 allowed in domain names u'http://aÇb.com'): self.assertEqual(unchanged, util.linkify(unchanged)) for expected, input in ( ('<a href="http://foo.com">http://foo.com</a>', 'http://foo.com'), ('<a href="http://foo.com/">http://foo.com/</a>', 'http://foo.com/'), ('<a href="http://foo.com/y">http://foo.com/y</a>', 'http://foo.com/y'), ('<a href="http://www.foo">www.foo</a>', 'www.foo'), ('<a href="http://www.foo:80">www.foo:80</a>', 'www.foo:80'), ('<a href="http://www.foo:80/x">www.foo:80/x</a>', 'www.foo:80/x'), ('asdf <a href="http://foo.com">http://foo.com</a> qwert <a class="x" href="http://foo.com" >xyz</a>', 'asdf http://foo.com qwert <a class="x" href="http://foo.com" >xyz</a>'), ('asdf <a href="http://t.co/asdf">http://t.co/asdf</a> qwert', 'asdf http://t.co/asdf qwert'), ('<a href="http://foo.co/?bar&baz">http://foo.co/?bar&baz</a>', 'http://foo.co/?bar&baz'), ('<a href="http://www.foo.com">www.foo.com</a>', 'www.foo.com'), ('a <a href="http://www.foo.com">www.foo.com</a> b', 'a www.foo.com b'), ('asdf <a href="http://foo.com">foo</a> qwert ' '<a href="http://www.bar.com">www.bar.com</a>', 'asdf <a href="http://foo.com">foo</a> qwert www.bar.com'), # https://github.com/snarfed/bridgy/issues/325#issuecomment-67923098 ('<a href="https://github.com/pfefferle/wordpress-indieweb-press-this">https://github.com/pfefferle/wordpress-indieweb-press-this</a> >', 'https://github.com/pfefferle/wordpress-indieweb-press-this >'), ('interesting how twitter auto-links it <a href="http://example.com/a_link_(with_parens)">http://example.com/a_link_(with_parens)</a> vs. (<a href="http://example.com/a_link_without">http://example.com/a_link_without</a>)', 'interesting how twitter auto-links it http://example.com/a_link_(with_parens) vs. (http://example.com/a_link_without)'), ('links separated by punctuation <a href="http://foo.com">http://foo.com</a>, <a href="http://bar.com/">http://bar.com/</a>; <a href="http://baz.com/?s=query">http://baz.com/?s=query</a>; did it work?', 'links separated by punctuation http://foo.com, http://bar.com/; http://baz.com/?s=query; did it work?'), ): self.assertEqual(expected, util.linkify(input))
def error(self, error, html=None, status=400, data=None, report=False, **kwargs): logging.info(f'publish: {error}') error = html or util.linkify(error) flash(f'{error}') if report: self.report_error(error, status=status)
def error(self, error, html=None, status=400, data=None, report=False, **kwargs): logging.info(error) error = html if html else util.linkify(error) self.messages.add('%s' % error) if report: self.report_error(error)
def error(self, error, html=None, status=400, data=None, mail=False, **kwargs): logging.info(error, exc_info=True) error = html if html else util.linkify(error) self.messages.add('%s' % error) if mail: self.mail_me(error)
def __save_layouts(self): (homelayout, homelayoutname) = self.get_layout_number() findir = sc.datadir if homelayout == -1: sys.stderr.write("No layouts to save.\n") return False path_layout = os.path.join(findir, "load_layout") oflayout = open(path_layout, "w") ex_lay = [] for lay in sc.gen_layout_info(self, sc.dumpscreen_layout_info(self)): try: num = lay[0] title = lay[1] except: title = "" if self.excluded_layouts and (num in self.excluded_layouts or title in self.excluded_layouts): ex_lay.append(lay) else: sys.stdout.write("%s(%s); " % (num, title)) oflayout.write('''layout select %s layout dump \"%s\" dumpscreen layout \"%s\" ''' % (num, os.path.join(findir, "layout_" + num), os.path.join(findir, "winlayout_" + num))) oflayout.write('layout select %s\n' % homelayout) oflayout.close() self.source(path_layout) util.remove(path_layout) linkify(findir, "layout_" + homelayout, "last_layout") if ex_lay: sys.stdout.write(""" Excluded layouts: %s""" % str(ex_lay)) out("") return True
def test_linkify(self): for unchanged in ( '', 'x.com', 'asdf qwert', 'X <a class="x" href="http://foo.com" >xyz</a> Y', '<a href="http://foo.com" class="x">xyz</a> Y', "X <a href='http//foo.com' />", 'asdf <a href="http://foo.com">foo</a> qwert '): self.assertEqual(unchanged, util.linkify(unchanged)) for expected, input in ( ('<a href="http://foo.com">http://foo.com</a>', 'http://foo.com'), ('<a href="http://foo.com/">http://foo.com/</a>', 'http://foo.com/'), ('<a href="http://foo.com/y">http://foo.com/y</a>', 'http://foo.com/y'), ('<a href="http://www.foo">www.foo</a>', 'www.foo'), ('<a href="http://www.foo:80">www.foo:80</a>', 'www.foo:80'), ('<a href="http://www.foo:80/x">www.foo:80/x</a>', 'www.foo:80/x'), ('asdf <a href="http://foo.com">http://foo.com</a> qwert <a class="x" href="http://foo.com" >xyz</a>', 'asdf http://foo.com qwert <a class="x" href="http://foo.com" >xyz</a>'), ('asdf <a href="http://t.co/asdf">http://t.co/asdf</a> qwert', 'asdf http://t.co/asdf qwert'), ('<a href="http://foo?bar&baz">http://foo?bar&baz</a>', 'http://foo?bar&baz'), ('<a href="http://www.foo.com">www.foo.com</a>', 'www.foo.com'), ('a <a href="http://www.foo.com">www.foo.com</a> b', 'a www.foo.com b'), ('asdf <a href="http://foo.com">foo</a> qwert ' '<a href="http://www.bar.com">www.bar.com</a>', 'asdf <a href="http://foo.com">foo</a> qwert www.bar.com'), (u'<a href="http://aÇb">http://aÇb</a>', # unicode char u'http://aÇb'), # https://github.com/snarfed/bridgy/issues/325#issuecomment-67923098 ('<a href="https://github.com/pfefferle/wordpress-indieweb-press-this">https://github.com/pfefferle/wordpress-indieweb-press-this</a> >', 'https://github.com/pfefferle/wordpress-indieweb-press-this >'), ('interesting how twitter auto-links it <a href="http://example.com/a_link_(with_parens)">http://example.com/a_link_(with_parens)</a> vs. (<a href="http://example.com/a_link_without">http://example.com/a_link_without</a>)', 'interesting how twitter auto-links it http://example.com/a_link_(with_parens) vs. (http://example.com/a_link_without)'), ('links separated by punctuation <a href="http://foo.com">http://foo.com</a>, <a href="http://bar.com/">http://bar.com/</a>; <a href="http://baz.com/?s=query">http://baz.com/?s=query</a>; did it work?', 'links separated by punctuation http://foo.com, http://bar.com/; http://baz.com/?s=query; did it work?'), ): self.assertEqual(expected, util.linkify(input))
def post(self): feature = self.request.get('feature') start_cls = util.oauth_starter(StartHandler).to('/mastodon/callback', scopes=PUBLISH_SCOPES if feature == 'publish' else LISTEN_SCOPES) start = start_cls(self.request, self.response) instance = util.get_required_param(self, 'instance') try: self.redirect(start.redirect_url(instance=instance)) except ValueError as e: logging.warning('Bad Mastodon instance', exc_info=True) self.messages.add(util.linkify(unicode(e), pretty=True)) return self.redirect(self.request.path)
def redirect_url(self, *args, **kwargs): features = (request.form.get('feature') or '').split(',') starter = util.oauth_starter(StartBase)( '/mastodon/callback', scopes=PUBLISH_SCOPES if 'publish' in features else LISTEN_SCOPES) try: return starter.redirect_url(*args, instance=request.form['instance'], **kwargs) except ValueError as e: logger.warning('Bad Mastodon instance', exc_info=True) flash(util.linkify(str(e), pretty=True)) redirect(request.path)
def get(self): """URL parameters: start_time: float, seconds since the epoch key: string that should appear in the first app log """ start_time = util.get_required_param(self, 'start_time') if not util.is_float(start_time): self.abort(400, "Couldn't convert start_time to float: %r" % start_time) start_time = float(start_time) key = util.get_required_param(self, 'key') if not util.is_base64(key): self.abort(400, 'key is not base64: %r' % key) key = urllib.unquote(key) # the propagate task logs the poll task's URL, which includes the source # entity key as a query param. exclude that with this heuristic. key_re = re.compile('[^=]' + key) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' offset = None for log in logservice.fetch(start_time=start_time, end_time=start_time + 120, offset=offset, include_app_logs=True, version_ids=['2', '3', '4', '5', '6', '7']): first_lines = '\n'.join([line.message.decode('utf-8') for line in log.app_logs[:min(10, len(log.app_logs))]]) if log.app_logs and key_re.search(first_lines): # found it! render and return self.response.out.write("""\ <html> <body style="font-family: monospace; white-space: pre"> """) self.response.out.write(sanitize(log.combined)) self.response.out.write('<br /><br />') for a in log.app_logs: msg = a.message.decode('utf-8') # don't sanitize poll task URLs since they have a key= query param msg = linkify_datastore_keys(util.linkify(cgi.escape( msg if msg.startswith('Created by this poll:') else sanitize(msg)))) self.response.out.write('%s %s %s<br />' % (datetime.datetime.utcfromtimestamp(a.time), LEVELS[a.level], msg.replace('\n', '<br />'))) self.response.out.write('</body>\n</html>') return offset = log.offset self.response.out.write('No log found!')
def at_linkified_text(self): pieces = filter(None, at_someone.split(self.text)) return u''.join(namelink(linkify(piece)) for piece in pieces)
def d_linkified_text(self): return linkify(self.text)
def error(self, error, html=None, status=400, data=None, mail=False): logging.info(error, exc_info=True) error = html if html else util.linkify(error) self.messages.add('%s' % error) if mail or isinstance(self.source, GitHub): # temporary! self.mail_me(error)
def old_twitter_linkify(s): pieces = filter(None, at_someone.split(preserve_newlines(s))) s = ''.join(namelink(linkify(piece)) for piece in pieces) return hashtag_linkify(s)
def error(self, error, html=None, status=400, data=None, mail=False): logging.warning(error, exc_info=True) error = html if html else util.linkify(error) self.messages.add('%s' % error) if mail: self.mail_me(error)
def GetInfo(contact, showprofile=False, showhide=True, overflow_hidden=True):#showicon=True): css = '''\ table{ table-layout: fixed; } body{ word-wrap: break-word; %s } div{ overflow: hidden; } ''' % ('overflow: hidden' if overflow_hidden else '') + LINK_CSS + skin.get_css() no_icon_path = skin.get('BuddiesPanel.BuddyIcons.NoIcon').path.url() constanttop = u'''\ <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> <style> %(css)s </style> <script type="text/javascript"> /* used to replace missing or BMP buddy icons with the default digsby one */ function imgError(img) { img.onerror = ""; img.src = "%(no_icon_path)s"; } </script> </head> <body><div id="content"><TABLE WIDTH=100%% cellpadding=0 border=0><TR><TD valign=top> ''' % dict(css=css, no_icon_path=no_icon_path) constantmiddle = u'</TD><TD width="68" valign="top" align="center">' constantbottom = u'</TD></TR></TABLE></div></body></html>' s = contact.serviceicon if contact.service == 'digsby' and getattr(contact, 'iswidget', False): s = 'widget' ico = skin_get('serviceicons.' + s) if isinstance(s, basestring) else s servico=u''.join([u'<div style="white-space: nowrap; position: relative;"><div style = "position: absolute; left: 0px; top: 0px;">', GenBitmapHTML(ico.path.url(), 16, 16), u'</div>']) alias = contact.alias name=u''.join([u'<div style="white-space: normal; overflow: hidden; word-wrap: break-word; min-height: 20; margin: 0 0 0 20;">', FontTagify(escape(alias),'header'), u'</div></div>']) if s=='widget': location = GetLocationFromIP(contact) #odict moreinfo = u'' if location: moreinfo = u''.join(['<div style="white-space: nowrap; width: 100%;">', TitleHTML(_(u'Location:')), BodyHTML(', '.join(location.values())), '</div>']) ip = u''.join(['<div style="white-space: nowrap; width: 100%;">', TitleHTML(_(u'IP Address:')), '<a href="http://www.geoiptool.com/en/?IP=%s">' % contact.ip, BodyHTML(contact.ip), '</a>', '</div>']) time_ = u''.join(['<div style="white-space: nowrap;">', TitleHTML(_(u'Time on Page:')), DetailHTML(GenTimedString(contact.online_time)), '</div>']) html = u''.join([constanttop, servico, name, time_, ip, moreinfo, constantbottom]) return html nicename = contact.nice_name if nicename != alias: username = u''.join(['<div style="white-space: nowrap;">', TitleHTML(protocols[contact.service].username_desc + u':'), BodyHTML(nicename), '</div>']) else: username = '' profile = ProfileHTML(contact.pretty_profile) if showprofile else u'' times = '' if contact.service in ('icq', 'aim') and contact.online_time: times = u''.join([TitleHTML(_(u'Online:')), DetailHTML(GenTimedString(contact.online_time)) ]) idle_since = contact.idle if contact.service in ('icq', 'aim', 'yahoo') and idle_since and idle_since is not True: times += (u''.join([TitleHTML((' ' if times else '') + _(u'Idle:')), DetailHTML(GenTimedString(idle_since)), ])) away_since = getattr(contact, 'away_updated', None) if getattr(contact, 'away', False) and away_since: times += (u''.join([TitleHTML(_((' ' if times else '') + _(u'Away:'))), DetailHTML(GenTimedString(away_since)) ])) if times: times = '<div>%s</div>' % times if contact.status_orb == 'unknown' or contact.service not in JABBER_SERVICES: status = u''.join(['<div style="white-space: nowrap;">', TitleHTML(_(u'Status:')), BodyHTML((_('{status} + Idle').format(status = contact.sightly_status) if contact.status == u'away' and contact.idle else contact.sightly_status)), '</div>']) else: status = JabberStatusMagic(contact) statusmsg = getattr(contact, '_infobox_status_message', contact.status_message) import hooks if statusmsg is not None: statusmsg = hooks.reduce('digsby.status.tagging.strip_tag', statusmsg, impl='text') if not statusmsg or contact.service in JABBER_SERVICES: statusmsg = '' else: if contact.service not in ('aim', 'icq'): statusmsg = BodyHTML(statusmsg) statusmsg = u''.join((separatorshort(), statusmsg)) icon = ''.join([constantmiddle, GenStatusIconHTML(contact), GenBuddyIconHTML(contact), LinkHTML(u'profile', (_(u'Hide Profile') if showprofile else _(u'Show Profile')) if showhide else '', nowrap = True)]) html = u''.join([constanttop, servico, name, username, times, status, statusmsg, icon, profile, constantbottom]) return linkify(html)
def __save_screen(self): errors=[] homewindow=self.homewindow group_wins={} group_groups={} excluded_wins=[] excluded_groups=[] scroll_wins=[] scroll_groups=[] cwin=-1 ctty=None cppids={} rollback=None,None,None ctime=self.time() findir=os.path.join(self.basedir,self.savedir) #sc_cwd=self.command_at(True,'hardcopydir') #print(sc_cwd) self.command_at(False, 'at \# dumpscreen window %s'%os.path.join(self.basedir,self.savedir,"winlist")) self.command_at(False, 'at \# dumpscreen window %s -F'%os.path.join(self.basedir,self.savedir)) self.command_at(False, 'hardcopydir %s'%os.path.join(self.basedir,self.savedir)) self.command_at(False, 'at \# hardcopy -h') self.command_at(False, 'hardcopydir \"%s\"'%self.homedir) # should be modified to properly restore hardcopydir(:dumpscreen settings) try: f=open(os.path.join(findir,"winlist"),'r') f.close() except: self.command_at(False, 'at \# dumpscreen window %s'%os.path.join(self.basedir,self.savedir,"winlist")) fmru = open(os.path.join(findir,"mru"),"w") for line in open(os.path.join(findir,"winlist"),'r'): try: id,cgroupid,ctty,ctitle = line.strip().split(' ',3) except: id,cgroupid,ctty= line.strip().split(' ') ctitle=None cwin=id fmru.write("%s "%cwin) if(ctty[0]=='z'): # zombie continue if(ctty[0]=="g"): # group ctype="group" cpids = None cpids_data=None if self.excluded: if cwin in self.excluded or ctitle in self.excluded: excluded_groups.append(cwin) try: group_groups[cgroupid]+=[cwin] except: group_groups[cgroupid]=[cwin] if self.scroll: if cwin in self.scroll or ctitle in self.scroll: scroll_groups.append(cwin) try: group_groups[cgroupid]+=[cwin] except: group_groups[cgroupid]=[cwin] else: if self.excluded: if cwin in self.excluded or ctitle in self.excluded: excluded_wins.append(cwin) else: try: group_wins[cgroupid]+=[cwin] except: group_wins[cgroupid]=[cwin] if self.scroll: if cwin in self.scroll or ctitle in self.scroll: scroll_wins.append(cwin) else: try: group_wins[cgroupid]+=[cwin] except: group_wins[cgroupid]=[cwin] if(ctty[0]=="t"): # telnet ctype="telnet" cpids = None cpids_data=None else: ctype="basic" # get sorted pids in window cpids=sc.get_tty_pids(ctty) cpids_data=[] ncpids=[] for pid in cpids: try: pidinfo=sc.get_pid_info(pid) (exehead,exetail)=os.path.split(pidinfo[1]) if exetail in self.blacklist: blacklist=True else: blacklist=False cpids_data.append(pidinfo+tuple([blacklist])) ncpids.append(pid) except: errors.append('%s PID %s: Unable to access. No permission or no procfs.'%(cwin,pid)) cpids=ncpids if(cpids): for i,pid in enumerate(cpids): if(cpids_data[i][3]): text="BLACKLISTED" else: text="" l=cpids_data[i][2].split('\0') jremove=[] wprev=False for j,w in enumerate(l): if w == '-ic' or w == '-c': wprev=True elif wprev: if w.startswith(self.primer): jremove+=j,j-1 wprev=False if jremove: s=[] for j,w in enumerate(l): if j not in jremove: s.append(w) newdata=(cpids_data[i][0],cpids_data[i][1],"\0".join(["%s"%v for v in s]),cpids_data[i][3]) cpids_data[i]=newdata #out('%s pid = %s: cwd = %s; exe = %s; cmdline = %s' % (text,pid, cpids_data[i][0], cpids_data[i][1], cpids_data[i][2])) vim_name=str(None) args=cpids_data[i][2].split('\0') if args[0].endswith(self.primer_base) and args[1]=='-p': sys.stdout.write('(primer)') rollback=self.__rollback(cpids_data[i][2]) #out(str(rollback)) elif args[0] in self.vim_names and self.bVim: sys.stdout.write('(vim)') vim_name=self.__save_vim(id) nargs=[] rmarg=False for arg in args: if rmarg: rmarg=False pass elif arg in ('-S','-i'): rmarg=True else: nargs.append(arg) args=nargs newdata=(cpids_data[i][0],cpids_data[i][1],"\0".join(["%s"%v for v in args]),cpids_data[i][3]) cpids_data[i]=newdata cpids_data[i]=(cpids_data[i][0],cpids_data[i][1],cpids_data[i][2],cpids_data[i][3],vim_name) scrollback_filename=os.path.join(self.basedir,self.savedir,"hardcopy."+id) sys.stdout.write("%s %s | "%(cwin,ctype)) errors+=self.__save_win(id,ctype,cpids_data,ctime,rollback) rollback=None,None,None out('') fmru.close() util.remove(os.path.join(findir,"winlist")) # remove ignored scrollbacks if 'all' in self.scroll: for f in glob.glob(os.path.join(self.basedir, self.savedir, "hardcopy.*")): open(f,'w') elif self.scroll: scroll_groups_tmp=[] while scroll_groups: sgroup=scroll_groups.pop() if sgroup not in scroll_groups_tmp: scroll_groups_tmp.append(sgroup) try: ngroups = group_groups[sgroup] if ngroups: for g in ngroups: scroll_groups.append(g) except: pass scroll_groups = scroll_groups_tmp out('Scrollback excluded groups: %s'%str(scroll_groups)) for sgroup in scroll_groups: scroll_wins.append(sgroup) try: for w in group_wins[sgroup]: scroll_wins.append(w) except: pass out('All scrollback excluded windows: %s'%str(scroll_wins)) for w in scroll_wins: util.remove(os.path.join(self.basedir, self.savedir, "hardcopy.%s"%w)) # remove ignored windows if self.excluded: excluded_groups_tmp=[] while excluded_groups: egroup=excluded_groups.pop() if egroup not in excluded_groups_tmp: excluded_groups_tmp.append(egroup) try: ngroups = group_groups[egroup] if ngroups: for g in ngroups: excluded_groups.append(g) except: pass excluded_groups = excluded_groups_tmp out('Excluded groups: %s'%str(excluded_groups)) for egroup in excluded_groups: excluded_wins.append(egroup) try: for w in group_wins[egroup]: excluded_wins.append(w) except: pass out('All excluded windows: %s'%str(excluded_wins)) bpath1 = os.path.join(self.basedir, self.savedir, "win_") bpath2 = os.path.join(self.basedir, self.savedir, "hardcopy.") bpath3 = os.path.join(self.basedir, self.savedir, "vim_W") for win in excluded_wins: util.remove(bpath1+win) util.remove(bpath2+win) for f in glob.glob(bpath3+win+'_*'): util.remove(f) linkify(os.path.join(self.basedir,self.savedir),"win_"+homewindow,"last_win") if errors: out('Errors:') for error in errors: out(error) out('\nSaved: '+str(ctime))