class RateLimit(Processor): priority = -1000 event_types = (u'message', u'action', u'notice') limit_time = IntOption('limit_time', 'Time period over which to measure messages', 10) limit_messages = IntOption( 'limit_messages', 'Number of messages to allow during the time period', 5) messages = {} @handler def ratelimit(self, event): if event.identity not in self.messages: self.messages[event.identity] = [event.time] else: self.messages[event.identity].append(event.time) self.messages[event.identity] = filter( lambda x: event.time - x < timedelta(seconds=self.limit_time), self.messages[event.identity]) if len(self.messages[event.identity]) > self.limit_messages: if event.public: event.addresponse(u'Geez, give me some time to think!', address=False) else: event.processed = True
class SourceFactory(client.DeferredClientFactory, protocol.ReconnectingClientFactory, IbidSourceFactory): auth = ('implicit',) supports = ('multiline',) jid_str = Option('jid', 'Jabber ID') server = Option('server', 'Server hostname (defaults to SRV lookup, ' 'falling back to JID domain)') port = IntOption('port', 'Server port number (defaults to SRV lookup, ' 'falling back to 5222/5223') ssl = BoolOption('ssl', 'Use SSL instead of automatic TLS') password = Option('password', 'Jabber password') nick = Option('nick', 'Nick for chatrooms', ibid.config['botname']) rooms = ListOption('rooms', 'Chatrooms to autojoin', []) accept_domains = ListOption('accept_domains', 'Only accept messages from these domains', []) max_public_message_length = IntOption('max_public_message_length', 'Maximum length of public messages', 512) def __init__(self, name): IbidSourceFactory.__init__(self, name) self.log = logging.getLogger('source.%s' % name) client.DeferredClientFactory.__init__(self, JID(self.jid_str), self.password) bot = JabberBot() self.addHandler(bot) bot.setHandlerParent(self) def setServiceParent(self, service): c = IbidXMPPClientConnector(reactor, self.authenticator.jid.host, self, self.server, self.port, self.ssl) c.connect() def connect(self): return self.setServiceParent(None) def disconnect(self): self.stopTrying() self.stopFactory() self.proto.xmlstream.transport.loseConnection() return True def join(self, room): return self.proto.join(room) def leave(self, room): return self.proto.leave(room) def url(self): return u'xmpp://%s' % (self.jid_str,) def logging_name(self, identity): return identity.split('/')[0] def truncation_point(self, response, event=None): if response.get('target', None) in self.proto.rooms: return self.max_public_message_length return None
class Coffee(Processor): usage = u'coffee (on|please)' features = ('coffee', ) pots = {} time = IntOption('coffee_time', u'Brewing time in seconds', 240) cups = IntOption('coffee_cups', u'Maximum number of cups', 4) def coffee_announce(self, event): event.addresponse(u"Coffee's ready for %s!", human_join(self.pots[(event.source, event.channel)])) del self.pots[(event.source, event.channel)] @match(r'^coffee\s+on$') def coffee_on(self, event): if (event.source, event.channel) in self.pots: if len(self.pots[(event.source, event.channel)]) >= self.cups: event.addresponse( u"There's already a pot on, and it's all reserved") elif event.sender['nick'] in self.pots[(event.source, event.channel)]: event.addresponse(u"You already have a pot on the go") else: event.addresponse( u"There's already a pot on. If you ask nicely, maybe you can have a cup" ) return self.pots[(event.source, event.channel)] = [event.sender['nick']] ibid.dispatcher.call_later(self.time, self.coffee_announce, event) event.addresponse(choice(( u'puts the kettle on', u'starts grinding coffee', u'flips the salt-timer', u'washes some mugs', )), action=True) @match(r'^coffee\s+(?:please|pls)$') def coffee_accept(self, event): if (event.source, event.channel) not in self.pots: event.addresponse(u"There isn't a pot on") elif len(self.pots[(event.source, event.channel)]) >= self.cups: event.addresponse(u"Sorry, there aren't any more cups left") elif event.sender['nick'] in self.pots[(event.source, event.channel)]: event.addresponse( u"Now now, we don't want anyone getting caffeine overdoses") else: self.pots[(event.source, event.channel)].append(event.sender['nick']) event.addresponse(True)
class SourceFactory(IbidSourceFactory): auth = ('implicit', ) supports = ('action', 'topic') server = Option('server', 'Server hostname') port = IntOption('port', 'Server port number', 706) nick = Option('nick', 'Nick', ibid.config['botname']) channels = ListOption('channels', 'Channels to autojoin', []) realname = Option('realname', 'Real Name', ibid.config['botname']) public_key = Option('public_key', 'Filename of public key', 'silc.pub') private_key = Option('private_key', 'Filename of private key', 'silc.prv') max_public_message_length = IntOption('max_public_message_length', 'Maximum length of public messages', 512) def __init__(self, name): IbidSourceFactory.__init__(self, name) self.log = logging.getLogger('source.%s' % self.name) pub = join(ibid.options['base'], self.public_key) prv = join(ibid.options['base'], self.private_key) if not exists(pub) and not exists(prv): keys = create_key_pair(pub, prv, passphrase='') else: keys = load_key_pair(pub, prv, passphrase='') self.client = SilcBot(keys, self.nick, self.nick, self.realname, self) def run_one(self): self.client.run_one() def setServiceParent(self, service): self.s = internet.TimerService(0.2, self.run_one) if service is None: self.s.startService() else: self.s.setServiceParent(service) def disconnect(self): self.client.disconnect() return True def url(self): return u'silc://%s@%s:%s' % (self.nick, self.server, self.port) def logging_name(self, identity): return self.client.logging_name(identity) def truncation_point(self, response, event=None): if response.get('target', None) in self.client.channels: return self.max_public_message_length return None
class SourceFactory(protocol.ServerFactory, IbidSourceFactory): protocol = TelnetProtocol supports = ('multiline',) port = IntOption('port', 'Port to listen on', 3000) def __init__(self, name, *args, **kwargs): #protocol.ServerFactory.__init__(self, *args, **kwargs) IbidSourceFactory.__init__(self, name) self.log = logging.getLogger('source.%s' % name) def setServiceParent(self, service=None): if service: self.listener = internet.TCPServer(self.port, self).setServiceParent(service) return self.listener else: self.listener = reactor.listenTCP(self.port, self) def connect(self): return self.setServiceParent(None) def disconnect(self): self.listener.stopListening() return True
class CounterStrike(Processor): usage = u'cs players | who is playing cs' feature = ('gameservers', ) autoload = False cs_host = Option('cs_host', 'CS server hostname / IP', '127.0.0.1') cs_port = IntOption('cs_port', 'CS server port', 27015) @match( r'^(?:(?:cs|counter[\s-]*strike)\s+players|who(?:\'s|\s+is)\s+(?:playing|on)\s+(?:cs|counter[\s-]*strike))$' ) def cs_players(self, event): server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) server.sendto('\xFF\xFF\xFF\xFFdetails', (self.cs_host, self.cs_port)) server.settimeout(5) data = server.recv(16384) assert data.startswith('\xFF\xFF\xFF\xFFm') data = data[5:] address, hostname, map, mod, modname, details = data.split('\x00', 5) details = details[:5] # We don't care about the rest clientcount, clientmax, protocol, type, os = struct.unpack( '<3Bcc', details) if clientcount == 0: event.addresponse(u'Nobody. Everyone must have lives...') return server.sendto('\xFF\xFF\xFF\xFFplayers', (self.cs_host, self.cs_port)) data = server.recv(16384) assert data.startswith('\xFF\xFF\xFF\xFF') data = data[6:] players = [] while data: player = {} data = data[1:] player['nickname'], data = data.split('\x00', 1) player['fragtotal'] = struct.unpack('<i', data[:4])[0] data = data[8:] players.append(player) players.sort(key=lambda x: x['fragtotal'], reverse=True) event.addresponse( u'There are %(clients)i/%(clientmax)s players playing %(map)s: %(players)s', { 'clients': clientcount, 'clientmax': clientmax, 'map': map, 'players': human_join(u'%s (%i)' % (p['nickname'], p['fragtotal']) for p in players), })
class Deliver(Processor): feature = ('memo',) addressed = False processed = True public_limit = IntOption('public_limit', 'Maximum number of memos to read ' 'out in public (flood-protection)', 2) @handler def deliver(self, event): if event.identity in nomemos_cache: return memos = get_memos(event) if len(memos) > self.public_limit and event.public: if event.identity not in notified_overlimit_cache: public = [True for memo in memos if not memo.private] message = u'By the way, you have a pile of memos waiting for ' \ u'you, too many to read out in public. PM me' if public: event.addresponse(u'%s: ' + message, event.sender['nick']) else: event.addresponse(message, target=event.sender['connection']) notified_overlimit_cache.add(event.identity) return for memo in memos: # Don't deliver if the user just sent a memo to themself if 'memo' in event and event.memo == memo.id: continue if memo.private: message = u'By the way, %(sender)s on %(source)s told me ' \ u'"%(message)s" %(ago)s ago' % { 'sender': memo.sender.identity, 'source': memo.sender.source, 'message': memo.memo, 'ago': ago(event.time - memo.time), } event.addresponse(message, target=event.sender['connection']) else: event.addresponse(u'By the way, %(sender)s on %(source)s ' u'told me "%(message)s" %(ago)s ago', { 'sender': memo.sender.identity, 'source': memo.sender.source, 'message': memo.memo, 'ago': ago(event.time - memo.time), }) memo.delivered = True event.session.save_or_update(memo) event.session.commit() log.info(u"Delivered memo %s to %s (%s)", memo.id, event.identity, event.sender['connection']) if 'memo' not in event: nomemos_cache.add(event.identity)
class DrawImage(Processor): usage = u'draw <url> [in colour] [width <width>] [height <height>]' feature = ('draw-aa',) max_filesize = IntOption('max_filesize', 'Only request this many KiB', 200) def_height = IntOption('def_height', 'Default height for libaa output', 10) max_width = IntOption('max_width', 'Maximum width for ascii output', 60) max_height = IntOption('max_height', 'Maximum height for ascii output', 15) font_width = IntOption('font_width', 'Font width assumed for output', 6) font_height = IntOption('font_height', 'Font height assumed for output', 10) img2txt_bin = Option('img2txt_bin', 'libcaca img2txt binary to use', 'img2txt') def setup(self): if not file_in_path(self.img2txt_bin): raise Exception('Cannot locate img2txt executable') @match(r'^draw\s+(\S+\.\S+)(\s+in\s+colou?r)?(?:\s+w(?:idth)?\s+(\d+))?(?:\s+h(?:eight)\s+(\d+))?$') def draw(self, event, url, colour, width, height): if not urlparse(url).netloc: url = 'http://' + url if urlparse(url).scheme == 'file': event.addresponse(u'Are you trying to haxor me?') return if not urlparse(url).path: url += '/' try: f = urlopen(url_to_bytestring(url)) except HTTPError, e: event.addresponse(u'Sorry, error fetching URL: %s', BaseHTTPRequestHandler.responses[e.code][0]) return except URLError: event.addresponse(u'Sorry, error fetching URL') return
class ExchangeAction(Processor): features = ('bucket', ) event_types = (u'action', ) addressed = False bucket_size = IntOption('bucket_size', "The maximum number of objects in the bucket", 5) @match(r'(?:gives|hands) {chunk} ' + object_pat) def give(self, event, addressee, determiner, object): if addressee in ibid.config.plugins['core']['names']: return exchange(event, determiner, object, self.bucket_size)
class SourceFactory(IbidSourceFactory): supports = ('multiline', ) port = IntOption('port', 'Port number to listen on', 8789) def setServiceParent(self, service): root = pb.PBServerFactory(IbidRoot(self.name)) if service: return internet.TCPServer(self.port, root).setServiceParent(service) else: reactor.listenTCP(self.port, root)
class Nmap(Processor): usage = u"""port scan <hostname> net scan <network>/<prefix>""" feature = ('nmap', ) permission = 'nmap' min_prefix = IntOption('min_prefix', 'Minimum network prefix that may be scanned', 24) def setup(self): if not file_in_path('nmap'): raise Exception("Cannot locate nmap executable") @match(r'^(?:port\s+scan|nmap)\s+([0-9a-z.-]+)$') @authorise() def host_scan(self, event, host): try: ip = gethostbyname(host) except gaierror, e: event.addresponse(unicode(e.args[1])) return if ip.startswith('127.'): event.addresponse( u"I'm not allowed to inspect my host's internal interface.") return output, error, code = get_process_output( ['nmap', '--open', '-n', host]) ports = [] gotports = False for line in output.splitlines(): if gotports: if not line.split(): break port, state, service = line.split() ports.append('%s (%s)' % (port, service)) else: if line.startswith('Note: Host seems down.'): event.addresponse(u'That host seems to be down') return if line.startswith('PORT'): gotports = True if ports: event.addresponse(human_join(ports)) else: event.addresponse(u'No open ports detected')
class SourceFactory(IbidSourceFactory): step = IntOption('step', 'Timer interval in seconds', 1) def tick(self): event = Event(self.name, u'clock') ibid.dispatcher.dispatch(event) def setServiceParent(self, service): self.s = internet.TimerService(self.step, self.tick) if service is None: self.s.startService() else: self.s.setServiceParent(service) def disconnect(self): self.s.stopService() return True
class Bnet(Processor): usage = u'dota players | who is playing dota' feature = ('gameservers', ) autoload = False bnet_host = Option('bnet_host', 'Bnet server hostname / IP', '127.0.0.1') bnet_port = IntOption('bnet_port', 'Bnet server port', 6112) bnet_user = Option('bnet_user', 'Bnet username', 'guest') bnet_pass = Option('bnet_pass', 'Bnet password', 'guest') def bnet_players(self, gametype): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.bnet_host, self.bnet_port)) s.settimeout(5) s.send('\03%s\n%s\n/con\n/quit\n' % (self.bnet_user, self.bnet_pass)) out = "" while (True): line = s.recv(1024) if line == "": break out += line s.close() player_re = re.compile( r'^1018 INFO "\s+bnet\s+%s\s+"(\S+?)"?\s+\d+\s+' % gametype) users = [ player_re.match(line).group(1) for line in out.splitlines() if player_re.match(line) ] users.sort() return users @match( r'^(?:dota\s+players|who(?:\'s|\s+is)\s+(?:playing\s+dota|on\s+bnet))$' ) def dota_players(self, event): users = self.bnet_players('W3XP') if users: event.addresponse(u'The battlefield contains %s', human_join(users)) else: event.addresponse(u'Nobody. Everyone must have a lives...')
class SourceFactory(IbidSourceFactory): auth = ('implicit', ) supports = ('action', 'multiline', 'topic') subdomain = Option('subdomain', 'Campfire subdomain') secure = BoolOption('secure', 'Use https (paid accounts only)', False) token = Option('token', 'Campfire token') rooms = ListOption('rooms', 'Rooms to join', []) keepalive_timeout = IntOption( 'keepalive_timeout', 'Stream keepalive timeout. ' 'Campfire sends a keepalive every <5 seconds', 30) def __init__(self, name): super(SourceFactory, self).__init__(name) self.log = logging.getLogger('source.%s' % self.name) self.client = CampfireBot(self) def setServiceParent(self, service): self.client.connect() def disconnect(self): self.client.disconnect() return True def url(self): protocol = self.secure and 'https' or 'http' return '%s://%s.campfirenow.com/' % (protocol, self.subdomain) def send(self, response): return self.client.send(response) def join(self, room_name): return self.client.join(room_name) def leave(self, room_name): return self.client.leave(room_name) def truncation_point(self, response, event=None): return None
class Notify(Processor): features = ('memo', ) event_types = (u'state', ) addressed = False processed = True public_limit = IntOption( 'public_limit', 'Maximum number of memos to read ' 'out in public (flood-protection)', 2) @handler def state(self, event): if event.state != 'online': return if event.identity in nomemos_cache: return memos = get_memos(event) if len(memos) > self.public_limit: event.addresponse( u'You have %s messages, too many for me to tell you in public,' u' so ask me in private.', len(memos), target=event.sender['connection'], address=False) elif len(memos) > 0: event.addresponse(plural( len(memos), u'You have %(memo_count)d message. ' u'Would you like to read it now?', u'You have %(memo_count)d messages. ' u'Would you like to read them now?'), {'memo_count': len(memos)}, target=event.sender['connection']) else: nomemos_cache.add(event.identity)
class SourceFactory(IbidSourceFactory): port = IntOption('port', 'Port number to listen on', 8080) myurl = Option('url', 'URL to advertise') def __init__(self, name): IbidSourceFactory.__init__(self, name) root = Plugin(name) root.putChild('', Index(name)) root.putChild('message', Message(name)) root.putChild('static', static.File(locate_resource('ibid', 'static'))) root.putChild('RPC2', XMLRPC()) root.putChild('SOAP', SOAP()) self.site = server.Site(root) def setServiceParent(self, service): if service: return internet.TCPServer(self.port, self.site).setServiceParent(service) else: reactor.listenTCP(self.port, self.site) def url(self): return self.myurl
class SourceFactory(ShellFactory, IbidSourceFactory): port = IntOption('port', 'Port number to listen on', 9898) username = Option('username', 'Login Username', 'admin') password = Option('password', 'Login Password', 'admin') def __init__(self, name): ShellFactory.__init__(self) IbidSourceFactory.__init__(self, name) self.name = name def setServiceParent(self, service=None): if service: self.listener = internet.TCPServer(self.port, self).setServiceParent(service) return self.listener else: self.listener = reactor.listenTCP(self.port, self) def connect(self): return self.setServiceParent(None) def disconnect(self): self.listener.stopListening() return True
class ICECast(Processor): usage = u"what's playing [on <stream>]?" feature = ('icecast', ) interval = IntOption('interval', 'Interval between checking for song changes', 60) streams = DictOption( 'streams', 'Dictionary of Stream names to base URL (include trailing /)', {}) last_checked = None last_songs = {} def scrape_status(self, stream): tree = get_html_parse_tree(self.streams[stream]['url'] + 'status.xsl', treetype='etree') main_table = tree.findall('.//table')[2] status = {} for row in main_table.findall('.//tr'): key, value = [x.text for x in row.findall('td')] status[key[:-1]] = value return status @match(r'^what(?:\'|\s+i)s\s+playing(?:\s+on\s+(.+))?$') def playing(self, event, stream): if not event.get('addressed', False): return if len(self.streams) == 0: event.addresponse(u"Sorry, I don't know about any streams") return elif stream is None and len(self.streams) == 1: stream = self.streams.keys()[0] elif stream is not None and stream not in self.streams: for name in self.streams.iterkeys(): if name.lower() == stream.lower(): stream = name break else: stream = None if stream is None: event.addresponse( u'Sorry, I only know about the following streams, ' u'please choose one: %s', human_join(self.streams.keys())) return try: status = self.scrape_status(stream) event.addresponse( u'Currently Playing on %(stream)s: ' u'%(song)s - %(description)s (Listeners: %(listeners)s)', { 'stream': stream, 'song': status['Current Song'], 'description': status['Stream Description'], 'listeners': status['Current Listeners'], }) except HTTPError: event.addresponse( u'The stream must be down, back to the MP3 collection for you') @periodic(config_key='interval') def check(self, event): for name, stream in self.streams.iteritems(): if 'source' in stream and 'channel' in stream: log.debug(u'Probing %s', name) status = self.scrape_status(name) if self.last_songs.get(name, '') != status['Current Song']: self.last_songs[name] = status['Current Song'] event.addresponse( u'Now Playing on %(stream)s: ' u'%(song)s - %(description)s ' u'(Listeners: %(listeners)s)', { 'stream': name, 'song': status['Current Song'], 'description': status['Stream Description'], 'listeners': status['Current Listeners'], }, source=stream['source'], target=stream['channel'], topic=(stream.get('topic', 'False').lower() in ('yes', 'true')), address=False, )
class SourceFactory(IbidSourceFactory, smtp.SMTPFactory): supports = ('multiline',) port = IntOption('port', 'Port number to listen on', 10025) address = Option('address', 'Email address to accept messages for and send from', 'ibid@localhost') accept = ListOption('accept', 'Email addresses to accept messages for', []) relayhost = Option('relayhost', 'SMTP server to relay outgoing messages to') def __init__(self, name): IbidSourceFactory.__init__(self, name) self.log = logging.getLogger('source.%s' % name) self.delivery = IbidDelivery(self) def buildProtocol(self, addr): p = smtp.SMTPFactory.buildProtocol(self, addr) p.delivery = self.delivery return p def setServiceParent(self, service): self.service = service if service: internet.TCPServer(self.port, self).setServiceParent(service) else: reactor.listenTCP(self.port, self) def url(self): return u'mailto:%s' % (self.address,) def respond(self, event): messages = {} for response in event.responses: if response['target'] not in messages: messages[response['target']] = response else: messages[response['target']]['reply'] += '\n' + response['reply'] for message in messages.values(): if 'subject' not in message: message['Subject'] = 'Re: ' + event['subject'] if 'message-id' in event.headers: response['In-Reply-To'] = event.headers['message-id'] if 'references' in event.headers: response['References'] = '%(references)s %(message-id)s' % event.headers elif 'in-reply-to' in event.headers: response['References'] = '%(in-reply-to)s %(message-id)s' % event.headers else: response['References'] = '%(message-id)s' % event.headers self.send(message) def send(self, response): message = response['reply'] response['To'] = response['target'] response['Date'] = smtp.rfc822date() if 'Subject' not in response: response['Subject'] = 'Message from %s' % ibid.config['botname'] response['Content-Type'] = 'text/plain; charset=utf-8' del response['target'] del response['source'] del response['reply'] body = '' for header, value in response.items(): body += '%s: %s\n' % (header, value) body += '\n' body += message port = ':' in self.relayhost and int(self.relayhost.split(':')[1]) or 25 smtp.sendmail(self.relayhost.split(':')[0], self.address, response['To'], body.encode('utf-8'), port=port) self.log.debug(u"Sent email to %s: %s", response['To'], response['Subject'])
class Bazaar(Processor, RPC): usage = u"""(last commit|commit <revno>) [to <repo>] [full] repositories""" features = ('bzr', ) autoload = False repositories = DictOption('repositories', 'Dict of repositories names and URLs') interval = IntOption('interval', 'Interval inbetween checks for new revisions', 300) def __init__(self, name): self.log = logging.getLogger('plugins.bzr') Processor.__init__(self, name) RPC.__init__(self) def setup(self): self.branches = {} must_monitor = False for name, repository in self.repositories.items(): try: self.branches[name.lower()] = Branch.open(repository['url']) except NotBranchError: self.log.error(u'%s is not a branch', repository) continue if repository.get('poll', 'False').lower() in ('yes', 'true'): must_monitor = True self.check.im_func.disabled = not must_monitor if must_monitor: self.seen_revisions = {} @match(r'^(?:repos|repositories)$') def handle_repositories(self, event): repositories = self.branches.keys() if repositories: event.addresponse(u'I know about: %s', human_join(sorted(repositories))) else: event.addresponse(u"I don't know about any repositories") def remote_committed(self, repository, start, end=None): commits = self.get_commits(repository, start, end) repo = self.repositories[repository] for commit in commits: ibid.dispatcher.send({ 'reply': commit, 'source': repo['source'], 'target': repo['channel'], }) return True @match( r'^(?:last\s+)?commit(?:\s+(\d+))?(?:(?:\s+to)?\s+(\S+?))?(\s+full)?$') def commit(self, event, revno, repository, full): revno = revno and int(revno) or None commits = self.get_commits(repository, revno, full=full) output = u'' for commit in commits: if commit: output += commit.strip() event.addresponse(output, conflate=False) def get_commits(self, repository, start, end=None, full=None): branch = None if repository: repository = repository.lower() if repository not in self.branches: return None branch = self.branches[repository] if not branch: if len(self.branches) == 1: (repository, branch) = self.branches.items()[0] else: (repository, branch) = sorted(self.branches.iteritems(), reverse=True, key=lambda (k, v): v.repository.get_revision( v.last_revision_info()[1]).timestamp)[0] if not start: start = branch.revision_id_to_revno(branch.last_revision()) f = StringIO() log.show_log(branch, LogFormatter(f, repository, branch, full), start_revision=start, end_revision=end or start) f.seek(0) commits = f.readlines() commits.reverse() return commits @handler def launchpad(self, event): if ibid.sources[event.source].type != 'smtp' \ or 'X-Launchpad-Branch' not in event.headers: return event.processed = True if 'X-Launchpad-Branch' not in event.headers or 'X-Launchpad-Branch-Revision-Number' not in event.headers: return for name, repository in self.repositories.iteritems(): if (event.headers['X-Launchpad-Branch'] == repository.get( 'lp_branch', None)): self.remote_committed( name, int(event.headers['X-Launchpad-Branch-Revision-Number'])) @periodic(config_key='interval') def check(self, event): for name, repo in self.repositories.iteritems(): if repo.get('poll', 'False').lower() not in ('yes', 'true'): continue branch = self.branches[name] lastrev = branch.last_revision() if name not in self.seen_revisions: self.seen_revisions[name] = lastrev continue if lastrev == self.seen_revisions[name]: continue try: commits = self.get_commits(name, None, False) except RevisionNotPresent: self.log.debug( u"Got a RevisionNotPresent, hoping it won't be there next time..." ) continue self.seen_revisions[name] = lastrev if commits: event.addresponse(unicode(commits[0].strip()), source=repo['source'], target=repo['channel'], address=False)
class Retrieve(Processor): usage = u"""latest [ <count> ] articles from <name> [ starting at <number> ] article ( <number> | /<pattern>/ ) from <name>""" feature = ('feeds', ) interval = IntOption('interval', 'Feed Poll interval (in seconds)', 300) @match(r'^(?:latest|last)\s+(?:(\d+)\s+)?articles\s+from\s+(.+?)' r'(?:\s+start(?:ing)?\s+(?:at\s+|from\s+)?(\d+))?$') def list(self, event, number, name, start): number = number and int(number) or 10 start = start and int(start) or 0 feed = event.session.query(Feed).filter_by(name=name).first() if not feed: event.addresponse(u"I don't know about the %s feed", name) return feed.update() if not feed.entries: event.addresponse(u"I can't find any articles in that feed") return articles = feed.entries[start:number + start] articles = [ u'%s: "%s"' % (feed.entries.index(entry) + 1, html2text_file(entry.title, None).strip()) for entry in articles ] event.addresponse(u', '.join(articles)) @match(r'^article\s+(?:(\d+)|/(.+?)/)\s+from\s+(.+?)$') def article(self, event, number, pattern, name): feed = event.session.query(Feed).filter_by(name=name).first() if not feed: event.addresponse(u"I don't know about the %s feed", name) return feed.update() if not feed.entries: event.addresponse(u"I can't access that feed") return article = None if number: if int(number) > len(feed.entries) or 1 > int(number): event.addresponse(u"That's old news dude") return article = feed.entries[int(number) - 1] else: pattern = re.compile(pattern, re.I) for entry in feed.entries: if pattern.search(entry.title): article = entry break if not article: event.addresponse(u'Are you making up news again?') return if 'summary' in article: summary = html2text_file(article.summary, None) else: if article.content[0].type in \ ('application/xhtml+xml', 'text/html'): summary = html2text_file(article.content[0].value, None) else: summary = article.content[0].value event.addresponse( u'"%(title)s" %(link)s : %(summary)s', { 'title': html2text_file(article.title, None).strip(), 'link': article.link, 'summary': summary, }) last_seen = {} @periodic(config_key='interval') def poll(self, event): feeds = event.session.query(Feed) \ .filter(Feed.source != None) \ .filter(Feed.target != None).all() for feed in feeds: try: feed.update(max_age=self.interval) except Exception, e: if isinstance(e, URLError): log.warning( u'Exception "%s" occured while polling ' u'feed %s from %s', e, feed, feed.url) else: log.exception( u'Exception "%s" occured while polling ' u'feed %s from %s', e, feed, feed.url) continue if not feed.entries: continue if feed.name not in self.last_seen: seen = {} for entry in feed.entries: id = entry.get('id', entry.title) seen[id] = entry.updated_parsed self.last_seen[feed.name] = seen continue old_seen = self.last_seen[feed.name] seen = {} for entry in reversed(feed.entries): id = entry.get('id', entry.title) seen[id] = entry.updated_parsed if entry.updated_parsed != old_seen.get(id): event.addresponse( u"%(status)s item in %(feed)s: %(title)s", { 'status': id in old_seen and u'Updated' or u'New', 'feed': feed.name, 'title': entry.title, }, source=feed.source, target=feed.target, adress=False) self.last_seen[feed.name] = seen
class Dict(Processor): usage = u"""spell <word> [using <strategy>] define <word> [using <dictionary>] (dictionaries|strategies) (dictionary|strategy) <name>""" feature = ('dict',) server = Option('server', 'Dictionary server hostname', 'localhost') port = IntOption('port', 'Dictionary server port number', 2628) @staticmethod def reduce_suggestions(suggestions): "Remove duplicate suggestions and suffixes" output = [] for s in suggestions: s = s.getword() if not s.startswith('-') and s not in output: output.append(s) return output @match(r'^(?:define|dict)\s+(.+?)(?:\s+using\s+(.+))?$') def define(self, event, word, dictionary): connection = Connection(self.server, self.port) dictionary = dictionary is None and '*' or dictionary.lower() dictionaries = connection.getdbdescs().keys() if dictionary != '*' and dictionary not in dictionaries: event.addresponse( u"I'm afraid I don't have a dictionary of that name. I know about: %s", human_join(sorted(dictionaries))) return definitions = connection.define(dictionary, word.encode('utf-8')) if definitions: event.addresponse(u', '.join(d.getdefstr() for d in definitions)) else: suggestions = connection.match(dictionary, 'lev', word.encode('utf-8')) if suggestions: event.addresponse( u"I don't know about %(word)s. Maybe you meant %(suggestions)s?", { 'word': word, 'suggestions': human_join( self.reduce_suggestions(suggestions), conjunction=u'or'), }) else: event.addresponse(u"I don't have a definition for that. Is it even a word?") @match(r'^spell\s+(.+?)(?:\s+using\s+(.+))?$') def handle_spell(self, event, word, strategy): connection = Connection(self.server, self.port) word = word.encode('utf-8') strategies = connection.getstratdescs().keys() if connection.match('*', 'exact', word): event.addresponse(choice(( u'That seems correct. Carry on', u'Looks good to me', u"Yup, that's a word all right", u'Yes, you *can* spell', ))) return strategy = strategy is None and 'lev' or strategy.lower() if strategy not in strategies: event.addresponse( u"I'm afraid I don't know about such a strategy. I know about: %s", human_join(sorted(strategies))) suggestions = connection.match('*', strategy, word) if suggestions: event.addresponse(u'Suggestions: %s', human_join( self.reduce_suggestions(suggestions), conjunction=u'or')) else: event.addresponse(u"That doesn't seem correct, but I can't find anything to suggest") @match(r'^dictionaries$') def handle_dictionaries(self, event): connection = Connection(self.server, self.port) dictionaries = connection.getdbdescs() event.addresponse(u'My Dictionaries: %s', human_join(sorted(dictionaries.keys()))) @match(r'^strater?gies$') def handle_strategies(self, event): connection = Connection(self.server, self.port) strategies = connection.getstratdescs() event.addresponse(u'My Strategies: %s', human_join(sorted(strategies.keys()))) @match(r'^dictionary\s+(.+?)$') def handle_dictionary(self, event, dictionary): connection = Connection(self.server, self.port) dictionaries = connection.getdbdescs() dictionary = dictionary.lower() if dictionary in dictionaries: event.addresponse(unicode(dictionaries[dictionary])) else: event.addresponse(u"I don't have that dictionary") @match(r'^strater?gy\s+(.+?)$') def handle_strategy(self, event, strategy): connection = Connection(self.server, self.port) strategies = connection.getstratdescs() strategy = strategy.lower() if strategy in strategies: event.addresponse(unicode(strategies[strategy])) else: event.addresponse(u"I don't have that strategy")
class Translate(Processor): usage = u"""translate (<phrase>|<url>) [from <language>] [to <language>] translation chain <phrase> [from <language>] [to <language>]""" feature = ('translate',) api_key = Option('api_key', 'Your Google API Key (optional)', None) referer = Option('referer', 'The referer string to use (API searches)', default_referer) dest_lang = Option('dest_lang', 'Destination language when none is specified', 'english') chain_length = IntOption('chain_length', 'Maximum length of translation chains', 10) lang_names = {'afrikaans':'af', 'albanian':'sq', 'arabic':'ar', 'belarusian':'be', 'bulgarian':'bg', 'catalan':'ca', 'chinese':'zh', 'chinese simplified':'zh-cn', 'chinese traditional':'zh-tw', 'croatian':'hr', 'czech':'cs', 'danish':'da', 'dutch':'nl', 'english':'en', 'estonian':'et', 'filipino':'tl', 'finnish':'fi', 'french':'fr', 'galacian':'gl', 'german':'de', 'greek':'el', 'hebrew':'iw', 'hindi':'hi', 'hungarian':'hu', 'icelandic':'is', 'indonesian':'id', 'irish':'ga', 'italian':'it', 'japanese':'ja', 'korean': 'ko', 'latvian':'lv', 'lithuanian':'lt', 'macedonian':'mk', 'malay':'ms', 'maltese':'mt', 'norwegian':'no', 'persian':'fa', 'polish':'pl', 'portuguese':'pt', 'romanian':'ro', 'russian': 'ru', 'serbian':'sr', 'slovak':'sk', 'slovenian':'sl', 'spanish':'es', 'swahili':'sw', 'swedish':'sv', 'thai':'th', 'turkish':'tr', 'ukrainian':'uk', 'uzbek': 'uz', 'vietnamese':'vi', 'welsh':'cy', 'yiddish': 'yi', 'haitian creole': 'ht'} alt_lang_names = {'simplified':'zh-CN', 'simplified chinese':'zh-CN', 'traditional':'zh-TW', 'traditional chinese':'zh-TW', 'bokmal':'no', 'norwegian bokmal':'no', u'bokm\N{LATIN SMALL LETTER A WITH RING ABOVE}l':'no', u'norwegian bokm\N{LATIN SMALL LETTER A WITH RING ABOVE}l': 'no', 'farsi': 'fa', 'haitian': 'ht', 'kreyol': 'ht'} LANG_REGEX = '|'.join(lang_names.keys() + lang_names.values() + alt_lang_names.keys()) @match(r'^(?:translation\s*)?languages$') def languages (self, event): event.addresponse(human_join(sorted(self.lang_names.keys()))) @match(r'^translate\s+(.*?)(?:\s+from\s+(' + LANG_REGEX + r'))?' r'(?:\s+(?:in)?to\s+(' + LANG_REGEX + r'))?$') def translate (self, event, text, src_lang, dest_lang): dest_lang = self.language_code(dest_lang or self.dest_lang) src_lang = self.language_code(src_lang or '') if is_url(text): if urlparse(text).scheme in ('', 'http'): url = url_to_bytestring(text) query = {'sl': src_lang, 'tl': dest_lang, 'u': url} event.addresponse(u'http://translate.google.com/translate?' + urlencode(query)) else: event.addresponse(u'I can only translate HTTP pages') return try: translated = self._translate(event, text, src_lang, dest_lang)[0] event.addresponse(translated) except TranslationException, e: event.addresponse(u"I couldn't translate that: %s.", unicode(e))
class Retrieve(Processor): usage = u"""latest [ <count> ] ( articles | headlines ) from <name> [ starting at <number> ] article ( <number> | /<pattern>/ ) from <name>""" features = ('feeds',) interval = IntOption('interval', 'Feed Poll interval (in seconds)', 300) max_interval = IntOption('max_interval', 'Maximum feed poll interval for broken feeds (in seconds)', 86400) backoff_ratio = FloatOption('backoff', 'The slowdown ratio to back off from broken feeds', 2.0) @match(r'^(?:latest|last)\s+(?:(\d+)\s+)?(article|headline)(s)?\s+from\s+(.+?)' r'(?:\s+start(?:ing)?\s+(?:at\s+|from\s+)?(\d+))?$') def list(self, event, number, full, plurality, name, start): full = full == 'article' if number: number = int(number) elif not plurality: number = 1 else: number = 10 start = start and int(start) or 0 feed = event.session.query(Feed).filter_by(name=name).first() if not feed: event.addresponse(u"I don't know about the %s feed", name) return feed.update() if not feed.entries: event.addresponse(u"I can't find any articles in that feed") return articles = feed.entries[start:number+start] entries = [] for article in articles: if full: if 'summary' in article: summary = html2text_file(article.summary, None) else: if article.content[0].type in \ ('application/xhtml+xml', 'text/html'): summary = html2text_file(article.content[0].value, None) else: summary = article.content[0].value entries.append(u'%(number)s: "%(title)s"%(link)s : %(summary)s' % { 'number': articles.index(article) + 1, 'title': html2text_file(article.title, None).strip(), 'link': get_link(article), 'summary': summary, }) else: entries.append(u'%s: "%s"' % (feed.entries.index(article) + 1, html2text_file(article.title, None).strip())) event.addresponse(u', '.join(entries)) @match(r'^article\s+(?:(\d+)|/(.+?)/)\s+from\s+(.+?)$') def article(self, event, number, pattern, name): feed = event.session.query(Feed).filter_by(name=name).first() if not feed: event.addresponse(u"I don't know about the %s feed", name) return feed.update() if not feed.entries: event.addresponse(u"I can't find any articles in that feed") return article = None if number: if int(number) > len(feed.entries) or 1 > int(number): event.addresponse(u"That's old news dude") return article = feed.entries[int(number) - 1] else: pattern = re.compile(pattern, re.I) for entry in feed.entries: if pattern.search(entry.title): article = entry break if not article: event.addresponse(u'Are you making up news again?') return if 'summary' in article: summary = html2text_file(article.summary, None) else: if article.content[0].type in \ ('application/xhtml+xml', 'text/html'): summary = html2text_file(article.content[0].value, None) else: summary = article.content[0].value event.addresponse(u'"%(title)s"%(link)s : %(summary)s', { 'title': html2text_file(article.title, None).strip(), 'link': get_link(article), 'summary': summary, }) last_seen = {} @periodic(config_key='interval') def poll(self, event): feeds = event.session.query(Feed) \ .filter(Feed.source != None) \ .filter(Feed.target != None).all() for feed in feeds: broken_lock.acquire() try: if feed.name in broken_feeds: last_exc, interval, time_since_fetch = broken_feeds[feed.name] time_since_fetch += self.interval if time_since_fetch < interval: broken_feeds[feed.name] = \ last_exc, interval, time_since_fetch continue else: last_exc = None interval = time_since_fetch = self.interval try: feed.update(max_age=time_since_fetch) except Exception, e: if type(e) != type(last_exc): if isinstance(e, URLError): log.warning(u'Exception "%s" occured while polling ' u'feed %s from %s', e, feed, feed.url) else: log.exception(u'Exception "%s" occured while polling ' u'feed %s from %s', e, feed, feed.url) broken_feeds[feed.name] = e, self.backoff(interval), 0 continue else: if feed.name in broken_feeds: del broken_feeds[feed.name] finally:
class HTTP(Processor): usage = u"""(get|head) <url> is <domain> (up|down) tell me when <domain|url> is up""" features = ('http',) priority = -10 max_size = IntOption('max_size', u'Only request this many bytes', 2048) timeout = IntOption('timeout', u'Timeout for HTTP connections in seconds', 15) sites = DictOption('sites', u'Mapping of site names to domains', {}) redirect_limit = IntOption('redirect_limit', u'Maximum number of http redirects to follow', 5) whensitup_delay = IntOption('whensitup_delay', u'Initial delay between whensitup attempts in seconds', 60) whensitup_factor = FloatOption('whensitup_factor', u'Factor to mutliply subsequent delays by for whensitup', 1.03) whensitup_maxdelay = IntOption('whensitup_maxdelay', u'Maximum delay between whensitup attempts in seconds', 30*60) whensitup_maxperiod = FloatOption('whensitup_maxperiod', u'Maximum period after which to stop checking the url ' u'for whensitup in hours', 72) def _get_header(self, headers, name): for header in headers: if header[0] == name: return header[1] return None @match(r'^(get|head)\s+(\S+)$') def get(self, event, action, url): try: status, reason, data, headers = self._request(self._makeurl(url), action.upper()) reply = u'%s %s' % (status, reason) hops = 0 while 300 <= status < 400 and self._get_header(headers, 'location'): location = self._get_header(headers, 'location') if not location: break status, reason, data, headers = self._request(location, 'GET') if hops >= self.redirect_limit: reply += u' to %s' % location break hops += 1 reply += u' to %(location)s, which gets a ' \ u'%(status)d %(reason)s' % { u'location': location, u'status': status, u'reason': reason, } if action.upper() == 'GET': got_title = False content_type = self._get_header(headers, 'content-type') if content_type and (content_type.startswith('text/html') or content_type.startswith('application/xhtml+xml')): match = re.search(r'<title>(.*)<\/title>', data, re.I | re.DOTALL) if match: got_title = True reply += u' "%s"' % match.groups()[0].strip() if not got_title and content_type: reply += u' ' + content_type event.addresponse(reply) except HTTPException, e: event.addresponse(unicode(e))
class MemoryLog(Processor): feature = ('memory', ) autoload = False mem_filename = Option('mem_filename', 'Memory log filename', 'logs/memory.log') mem_interval = IntOption('mem_interval', 'Interval between memory stat logging', 0) obj_filename = Option('obj_filename', 'Object Statistics log filename', 'logs/objstats.log') obj_interval = IntOption('obj_interval', 'Interval between logging object statistics', 0) def setup(self): fns = [] if self.mem_interval: fns.append(self.mem_filename) if self.obj_interval: fns.append(self.obj_filename) for filename in fns: if os.path.isfile(filename + '.10.gz'): os.remove(filename + '.10.gz') for i in range(9, 0, -1): if os.path.isfile('%s.%i.gz' % (filename, i)): os.rename('%s.%i.gz' % (filename, i), '%s.%i.gz' % (filename, i + 1)) if os.path.isfile(filename): o = gzip.open(filename + '.1.gz', 'wb') i = open(filename, 'rb') o.write(i.read()) o.close() i.close() stat = os.stat(filename) os.utime(filename + '.1.gz', (stat.st_atime, stat.st_mtime)) if self.mem_interval: self.mem_file = file(self.mem_filename, 'w+') self.mem_file.write('Ibid Memory Log v2: %s\n' % ibid.config['botname']) self.mem_csv = csv.writer(self.mem_file) self.mem_last = datetime.utcnow() if self.obj_interval: self.obj_file = file(self.obj_filename, 'w+') self.obj_file.write('Ibid Object Log v1: %s\n' % ibid.config['botname']) self.obj_last = datetime.utcnow() def process(self, event): if self.mem_interval and event.time - self.mem_last >= \ timedelta(seconds=self.mem_interval): self.mem_log() self.mem_last = event.time if self.obj_interval and event.time - self.obj_last >= \ timedelta(seconds=self.obj_interval): self.obj_log() self.obj_last = event.time def mem_log(self): status = get_memusage() gc.collect() self.mem_csv.writerow(( datetime.utcnow().isoformat(), len(gc.get_objects()), status['VmSize'], status['VmRSS'], )) self.mem_file.flush() def obj_log(self): self.obj_file.write( '%s %s\n' % (datetime.utcnow().isoformat(), json.dumps(objgraph.typestats()))) self.obj_file.flush()
class SourceFactory(protocol.ReconnectingClientFactory, IbidSourceFactory): protocol = Ircbot auth = ('hostmask', 'nickserv') supports = ('action', 'notice', 'topic', 'channel key') port = IntOption('port', 'Server port number', 6667) ssl = BoolOption('ssl', 'Use SSL', False) server = Option('server', 'Server hostname') nick = Option('nick', 'IRC nick', ibid.config['botname']) realname = Option('realname', 'Full Name', ibid.config['botname']) password = Option('password', 'Connection password', None) username = Option('username', 'Local username', None) modes = Option('modes', 'User modes to set') channels = ListOption('channels', 'Channels to autojoin', []) ping_interval = FloatOption('ping_interval', 'Seconds idle before sending a PING', 60) pong_timeout = FloatOption('pong_timeout', 'Seconds to wait for PONG', 300) # ReconnectingClient uses this: maxDelay = IntOption('max_delay', 'Max seconds to wait inbetween reconnects', 900) factor = FloatOption('delay_factor', 'Factor to multiply delay inbetween reconnects by', 2) def __init__(self, name): IbidSourceFactory.__init__(self, name) self._auth = {} self._auth_ticket = 0 self._auth_ticket_lock = Lock() self.log = logging.getLogger('source.%s' % self.name) def setServiceParent(self, service): if self.ssl: sslctx = ssl.ClientContextFactory() if service: internet.SSLClient(self.server, self.port, self, sslctx).setServiceParent(service) else: reactor.connectSSL(self.server, self.port, self, sslctx) else: if service: internet.TCPClient(self.server, self.port, self).setServiceParent(service) else: reactor.connectTCP(self.server, self.port, self) def connect(self): return self.setServiceParent(None) def disconnect(self): self.stopTrying() self.stopFactory() if hasattr(self, 'proto'): self.proto.transport.loseConnection() return True def join(self, channel, key=None): return self.proto.join(channel, key) def leave(self, channel): return self.proto.leave(channel) def change_nick(self, nick): return self.proto.setNick(nick.encode('utf-8')) def send(self, response): return self.proto.send(response) def logging_name(self, identity): if identity is None: return u'' return identity.split(u'!')[0] def truncation_point(self, response, event=None): target = response['target'].split('!')[0] raw_target = target.encode('utf-8') if hasattr(self.proto, 'hostmask'): hostmask_len = len(self.proto.hostmask) else: hostmask_len = 50 # max = 512 - len(':' + hostmask + ' ' + command + ' ' + target + ' :\r\n') cmds = { 'notice': len('NOTICE'), 'topic': len('TOPIC'), 'action': len('PRIVMSG\001ACTION \001') } for cmd, command_len in cmds.items(): if response.get(cmd, False): break else: command_len = len('PRIVMSG') return 505 - command_len - len(raw_target) - hostmask_len def url(self): return u'irc://%s@%s:%s' % (self.nick, self.server, self.port) def auth_hostmask(self, event, credential=None): for credential in event.session.query(Credential) \ .filter_by(method=u'hostmask', account_id=event.account) \ .filter(or_(Credential.source == event.source, Credential.source == None)) \ .all(): if fnmatch(event.sender['connection'], credential.credential): return True def auth_nickserv(self, event, credential): self._auth_ticket_lock.acquire() self._auth_ticket += 1 ticket = self._auth_ticket self._auth_ticket_lock.release() def callback(result): self._auth[ticket] = result reactor.callFromThread(self.proto.authenticate, event.sender['nick'], callback) # We block in the plugin thread for up to this long, waiting for # NickServ to reply wait = 15 for i in xrange(wait * 10): if ticket in self._auth: break sleep(0.1) if ticket in self._auth: result = self._auth[ticket] del self._auth[ticket] return result
class BuildBot(Processor, RPC): usage = u'rebuild <branch> [ (revision|r) <number> ]' feature = ('buildbot', ) autoload = False server = Option('server', 'Buildbot server hostname', 'localhost') status_port = IntOption('status_port', 'Buildbot server port number', 9988) change_port = IntOption('change_port', 'Buildbot server port number', 9989) source = Option('source', 'Source to send commit notifications to') channel = Option('channel', 'Channel to send commit notifications to') def __init__(self, name): Processor.__init__(self, name) RPC.__init__(self) def setup(self): self.status = pb.PBClientFactory() reactor.connectTCP(self.server, self.status_port, self.status) d = self.status.login( credentials.UsernamePassword('statusClient', 'clientpw')) d.addCallback(self.store_root, 'status') d.addCallback( lambda root: root.callRemote('subscribe', 'builds', 0, self)) d.addErrback(self.exception) self.change = pb.PBClientFactory() reactor.connectTCP(self.server, self.change_port, self.change) d = self.change.login( credentials.UsernamePassword('change', 'changepw')) d.addCallback(self.store_root, 'change') d.addErrback(self.exception) def remote_built(self, branch, revision, person, result): reply = u"Build %s of %s triggered by %s: %s" % (revision, branch, person, result) ibid.dispatcher.send({ 'reply': reply, 'source': self.source, 'target': self.channel }) return True @match(r'^(?:re)?build\s+(.+?)(?:\s+(?:revision|r)?\s*(\d+))?$') def build(self, event, branch, revision): change = { 'who': str(event.sender['nick']), 'branch': str(branch), 'files': [None], 'revision': revision and str(revision) or '-1', 'comments': 'Rebuild', } d = self.change_root.callRemote('addChange', change) d.addCallback(self.respond, event, True) d.addErrback(self.respond, event, False) event.processed = True def respond(self, rpc_response, event, result): ibid.dispatcher.send({ 'reply': result and 'Okay' or u"buildbot doesn't want to build :-(", 'source': event.source, 'target': event.channel }) def store_root(self, root, type): setattr(self, '%s_root' % type, root) return root def exception(self, exception): print exception raise exception def remote_buildsetSubmitted(self, buildset): pass def remote_builderAdded(self, builderName, builder): pass def remote_builderChangedState(self, builderName, state, foo): pass def remote_buildStarted(self, builderName, build): print "Build %s started on %s" % (builderName, build) def remote_buildETAUpdate(self, build, ETA): pass def remote_stepStarted(self, build, step): pass def remote_stepTextChanged(self, build, step, text): pass def remote_stepText2Changed(self, build, step, text2): pass def remote_stepETAUpdate(self, build, step, ETA, expectations): pass def remote_logStarted(self, build, step, log): pass def remote_logChunk(self, build, step, log, channel, text): pass def remote_logFinished(self, build, step, log): pass def remote_stepFinished(self, build, step, results): pass def remote_buildFinished(self, builderName, build, results): print "Build %s finished on %s" % (builderName, build) def remote_builderRemoved(self, builderName): pass
class Set(Processor): usage = u'<subject>(++|--|==| ftw| ftl) [[reason]]' features = ('karma', ) # Clashes with morse & math priority = 510 permission = u'karma' increase = ListOption('increase', 'Suffixes which indicate increased karma', ('++', ' ftw')) decrease = ListOption('decrease', 'Suffixes which indicate decreased karma', ('--', ' ftl')) neutral = ListOption('neutral', 'Suffixes which indicate neutral karma', ('==', )) reply = BoolOption('reply', 'Acknowledge karma changes', False) public = BoolOption('public', 'Only allow karma changes in public', True) ignore = ListOption('ignore', 'Karma subjects to silently ignore', ()) importance = IntOption( 'importance', 'Threshold for number of changes after' " which a karma won't be forgotten", 0) def setup(self): # When not addressed, match karma changes in any text if self.addressed: matchpat = r'^(.+?)\s*(%s)\s*(?:[[{(]+\s*(.+?)\s*[\]})]+)?$' else: matchpat = r'(\S*\w\S*)(%s)(?:$|[\s,;\.\?!])' self.increase_reg = self.regex_tokens(self.increase) self.decrease_reg = self.regex_tokens(self.decrease) self.neutral_reg = self.regex_tokens(self.neutral) self.set.im_func.pattern = re.compile( matchpat % '|'.join(self.increase_reg + self.decrease_reg + self.neutral_reg), re.I | re.UNICODE | re.DOTALL) def regex_tokens(self, tokens): """ Turn configured tokens into regex versions """ return [re.escape(t).replace(r'\ ', r'\s+') for t in tokens] def match_operators(self, roperators, adjust): return any(re.match(r, adjust) for r in roperators) @handler @authorise(fallthrough=False) def set(self, event, subject, adjust, reason=None): if reason is None: reason = event['message']['clean'] if self.public and not event.public: event.addresponse(u'Karma must be done in public') return if subject.lower() in self.ignore: return karma = event.session.query(Karma).filter_by(subject=subject).first() if not karma: karma = Karma(subject) if self.match_operators(self.increase_reg, adjust.lower()): if subject.lower() == event.sender['nick'].lower(): event.addresponse(u"You can't karma yourself!") return karma.changes += 1 karma.value += 1 change = u'Increased' elif self.match_operators(self.decrease_reg, adjust.lower()): karma.changes += 1 karma.value -= 1 change = u'Decreased' else: karma.changes += 2 change = u'Increased and decreased' if karma.value == 0 and karma.changes <= self.importance: change = u'Forgotten (unimportant)' event.session.delete(karma) else: event.session.save_or_update(karma) event.session.commit() log.info(u"%s karma for '%s' by %s/%s (%s) because: %s", change, subject, event.account, event.identity, event.sender['connection'], reason) if self.reply: event.addresponse( u'%(subject)s now has %(value)s %(points)s of karma', { 'subject': subject, 'value': karma.value, 'points': plural(karma.value, "point", "points"), }) else: event.processed = True
class Log(Processor): addressed = False processed = True event_types = (u'message', u'state', u'action', u'notice') priority = 1900 log = Option('log', 'Log file to log messages to. Can contain substitutions: source, channel, year, month, day', 'logs/%(year)d/%(month)02d/%(source)s/%(channel)s.log') timestamp_format = Option('timestamp_format', 'Format to substitute %(timestamp)s with', '%Y-%m-%d %H:%M:%S%z') date_utc = BoolOption('date_utc', 'Log with UTC timestamps', False) message_format = Option('message_format', 'Format string for messages', u'%(timestamp)s <%(sender_nick)s> %(message)s') action_format = Option('action_format', 'Format string for actions', u'%(timestamp)s * %(sender_nick)s %(message)s') notice_format = Option('notice_format', 'Format string for notices', u'%(timestamp)s -%(sender_nick)s- %(message)s') presence_format = Option('presence_format', 'Format string for presence events', u'%(timestamp)s %(sender_nick)s (%(sender_connection)s) is now %(state)s') rename_format = Option('rename_format', 'Format string for rename events', u'%(timestamp)s %(sender_nick)s (%(sender_connection)s) has renamed to %(new_nick)s') public_logs = ListOption('public_logs', u'List of source:channel globs for channels which should have public logs', []) public_mode = Option('public_mode', u'File Permissions mode for public channels, in octal', '644') private_mode = Option('private_mode', u'File Permissions mode for private chats, in octal', '640') dir_mode = Option('dir_mode', u'Directory Permissions mode, in octal', '755') fd_cache = IntOption('fd_cache', 'Number of log files to keep open.', 5) lock = Lock() logs = WeakValueDictionary() # Ensures that recently used FDs are still available in logs: recent_logs = [] def setup(self): sources = list(set(ibid.config.sources.keys()) | set(ibid.sources.keys())) for glob in self.public_logs: if u':' not in glob: log.warning(u"public_logs configuration values must follow the " u"format source:channel. \"%s\" doesn't contain a " u"colon.", glob) continue source_glob = glob.split(u':', 1)[0] if not fnmatch.filter(sources, source_glob): log.warning(u'public_logs includes "%s", but there is no ' u'configured source matching "%s"', glob, source_glob) def get_logfile(self, event): self.lock.acquire() try: when = event.time if not self.date_utc: when = when.replace(tzinfo=tzutc()).astimezone(tzlocal()) if event.channel is not None: channel = ibid.sources[event.source].logging_name(event.channel) else: channel = ibid.sources[event.source].logging_name(event.sender['id']) filename = self.log % { 'source': event.source.replace('/', '-'), 'channel': channel.replace('/', '-'), 'year': when.year, 'month': when.month, 'day': when.day, 'hour': when.hour, 'minute': when.minute, 'second': when.second, } filename = join(ibid.options['base'], expanduser(filename)) log = self.logs.get(filename, None) if log is None: try: makedirs(dirname(filename), int(self.dir_mode, 8)) except OSError, e: if e.errno != EEXIST: raise e log = open(filename, 'a') self.logs[filename] = log for glob in self.public_logs: if u':' not in glob: continue source_glob, channel_glob = glob.split(u':', 1) if (fnmatch.fnmatch(event.source, source_glob) and fnmatch.fnmatch(channel, channel_glob)): chmod(filename, int(self.public_mode, 8)) break else: chmod(filename, int(self.private_mode, 8)) else: