def _tweet(self, irc, msg, text, tweet=None): if not self._is_bot_enabled(msg, irc): return try: api = self._get_twitter_api(msg) if tweet: status_id = self._get_status_id(tweet) if status_id: if not text.startswith("@"): username = api.get_status(status_id).user.screen_name text = "@{} {}".format(username, text) message = utils.str.ellipsisify(text, 140) status = api.update_status(status=message, in_reply_to_status_id=status_id) else: irc.reply( "Du musst mir schon einen Tweet geben, auf den sich der Unsinn beziehen soll." ) return else: message = utils.str.ellipsisify(text, 140) status = api.update_status(status=message) irc.reply("https://twitter.com/{bot}/status/{status_id}".format( bot=self.registryValue("botNick", msg.args[0]), status_id=status.id)) except tweepy.TweepError as e: log.error("Twitter.tweet: {}".format(repr(e))) irc.reply("Das hat nicht geklappt.")
def flevel(self, irc, msg, args, level): """ Change your level """ dungeon = self.SpiffyWorld.get_dungeon_by_channel(GAME_CHANNEL) if dungeon is not None: user_id = self._get_user_id(irc, msg.prefix) unit = dungeon.get_unit_by_user_id(user_id) if unit is not None: int_level = int(level) xp_for_level = self.unit_level.get_xp_for_level(int_level) + 1 log.info("SpiffyRPG: setting xp for %s to %s (level %s)" % (unit.get_name(), xp_for_level, int_level)) unit.experience = xp_for_level unit.level = self.unit_level.get_level_by_xp(unit.experience) unit.on_unit_level() dungeon.announcer.unit_info(unit=unit, dungeon=dungeon, irc=irc) else: log.error("SpiffyRPG: could not find dungeon %s" % msg.args[0])
def sendEmail(self, irc, suser, duser, message): config = ConfigParser.ConfigParser() config.read(os.path.join(conf.supybot.directories.conf(), 'xmpp.conf')) # log.info(str(user)) if not config.has_section('Users'): config.add_section('Users') alias = self.aliasExists(duser, config) # log.info('Alias %s exists. Owner: %s' % (duser,alias)) if alias: email = config.get('Users', alias) else: email = None if email is not None: email = email.split(' ')[0] #subprocess.Popen(['python', '/.private/xmppScript/xmpp.py', '-t', email, '-m', message]) # REPLACE - xmpp email id jid = xmpp.protocol.JID(self.registryValue('auth.username')) cl = xmpp.Client(jid.getDomain(), debug=[]) connection = cl.connect(("talk.google.com", 5222)) if connection: # REPLACE - xmpp password auth = cl.auth(jid.getNode(), self.registryValue('auth.password'), resource=jid.getResource()) if auth: id = cl.send(xmpp.protocol.Message(email, message)) cl.disconnect() log.info('%s successfully sent a message to %s: %s' % (suser, duser, message)) return 0 else: log.error('XMPP: failed auth') return 3 else: log.error('XMPP: could not connect') return 2 else: return 1
def startServer(): """Starts the HTTP server. Shouldn't be called from other modules. The callback should be an instance of a child of SupyHTTPServerCallback.""" global http_servers addresses4 = [(4, (x, configGroup.port())) for x in configGroup.hosts4() if x != ''] addresses6 = [(6, (x, configGroup.port())) for x in configGroup.hosts6() if x != ''] http_servers = [] for protocol, address in (addresses4 + addresses6): try: server = SupyHTTPServer(address, protocol, SupyHTTPRequestHandler) except OSError as e: log.error( 'Failed to start HTTP server with protocol %s at address: %s', protocol, address, e) if e.args[0] == 98: log.error( 'This means the port (and address) is already in use by an ' 'other process. Either find the process using the port ' 'and stop it, or change the port configured in ' 'supybot.servers.http.port.') continue except: log.exception( "Failed to start HTTP server with protocol %s at address", protocol, address) continue Thread(target=server.serve_forever, name='HTTP Server').start() http_servers.append(server) log.info('Starting HTTP server: %s' % str(server))
def raisedead(self, irc, msg, args, user, target): """ Attempts to raise your target from the dead """ user_id = self._get_user_id(irc, msg.prefix) dungeon = self.SpiffyWorld.get_dungeon_by_channel(GAME_CHANNEL) if dungeon is not None: player = dungeon.get_unit_by_user_id(user_id) unit = dungeon.get_dead_unit_by_name(target) if unit is not None: undead_effect = self.SpiffyWorld.effects_collection.get_effect_undead() unit.apply_effect(undead_effect) dungeon.announcer.effect_raise_dead(player=player, unit=unit) dungeon.announcer.unit_info(unit=unit, dungeon=dungeon, irc=irc) player.add_raised_unit(unit=unit) else: irc.error( "You attempt to perform the ritual, but something seems amiss.") else: log.error("Trying to raise dead but there is no dungeon")
def inventory(self, irc, msg, args, user): """ Items in your inventory. """ user_id = None try: user_id = self._get_user_id(irc, msg.prefix) except KeyError: log.error("SpiffyRPG: error getting user id for %s" % msg.prefix) dungeon = self.SpiffyWorld.get_dungeon_by_channel(GAME_CHANNEL) if dungeon is not None: player = dungeon.get_unit_by_user_id(user_id) if player is not None: announcer = PlayerAnnouncer(irc=irc, destination=msg.nick, ircutils=ircutils, ircmsgs=ircmsgs) announcer.inventory(player=player, irc=irc) else: irc.error("Your bags explode, blanketing you in flames!")
def watch(self, irc, msg, args, username): """\"<username>\" Start reporting on edits for the given user.""" baseUrl = "http://osm.org" if not username: irc.error('You forgot to give me a username.') return quoted_uname = username quoted_uname = urllib.quote(quoted_uname) try: xml = urllib2.urlopen('%s/user/%s/edits/feed' % (baseUrl, quoted_uname)) except urllib2.HTTPError as e: response = "Username %s was not found." % (username) irc.reply(response.encode('utf-8')) return except Exception as e: irc.error("Could not parse the user's changeset feed.") log.error(traceback.format_exc(e)) return if username in self.watch_users: response = "We are already watching %s" % (username) else: self.watch_users.append(username) with open('watchedusers.txt', 'w') as f: f.write("\n".join(self.watch_users) + "\n") response = "We are now watching %s" % (username) irc.reply(response.encode('utf-8'))
def dosearch(self, query, channel): apikey = self.registryValue("developerKey") safe_search = self.registryValue("safeSearch", channel) sort_order = self.registryValue("sortOrder", channel) video_id = None opts = { "q": query, "part": "snippet", "maxResults": "1", "order": sort_order, "key": apikey, "safeSearch": safe_search, "type": "video", } api_url = "https://www.googleapis.com/youtube/v3/search?{0}".format( utils.web.urlencode(opts) ) try: log.debug("YouTube: requesting %s" % (api_url)) request = utils.web.getUrl(api_url).decode() response = json.loads(request) video_id = response["items"][0]["id"]["videoId"] except: log.error( "YouTube: Error retrieving data from API: %s" % request.content.decode() ) return video_id
def find_geolocation(self) -> None: """ Finds the location and coordinates and sets the class atttributes according to user's query. """ payload = { "access_key": os.getenv("WS_API_KEY"), "query": self.query, } # https requires the paid tier, so we use http here. response: requests.Response = requests.get( "http://api.weatherstack.com/current", params=payload) response.raise_for_status() res_data: Dict[str, Any] = response.json() if response.status_code == 200 and "error" in res_data: log.error("geolocation: %s", res_data["error"]["info"]) raise LocationNotFound("Unable to find this location.") res_location: Dict[str, Union[str, float]] = res_data.get("location") lat: str = res_location.get("lat") long: str = res_location.get("lon") # Sets the location attributes, if region is not found, it will use the country # name. If no country name is found, it will then throw an exception. self.location: str = res_location.get("name") self.region: str = res_location.get( "region").strip() or res_location.get("country") if not self.region: raise LocationNotFound("Unable to find this location.") self.coordinates = f"{lat},{long}"
def _loadFromConfig(self, name=None): self.relays = [] for relay in self.registryValue('relays').split(' || '): if relay.endswith('|'): relay += ' ' relay = relay.split(' | ') if not len(relay) == 5: continue try: self.relays.append(self.Relay(relay[0], relay[1], relay[2], relay[3], re.compile('^%s$' % relay[0], re.I), re.compile('^%s$' % relay[1]), re.compile(relay[4]))) except: log.error('Failed adding relay: %r' % relay) self.nickSubstitutions = {} for substitute in self.registryValue('substitutes').split(' || '): if substitute.endswith('|'): substitute += ' ' substitute = substitute.split(' | ') if not len(substitute) == 2: continue self.nickSubstitutions[substitute[0]] = substitute[1]
def check(self, irc, msg, args, domain): """<domain>. Checks if <domain> is available for purchase. """ response = self.namecheap('namecheap.domains.check', {'DomainList': domain}) if response.get('Status') == "ERROR": for error in response[0]: log.error(error.text) irc.reply("Error! %s" % error.text) results = response.find("{http://api.namecheap.com/xml.response}CommandResponse") if results is not None: for result in results: if result.attrib['Available'] == "true": db = dataset.connect("sqlite:///%s" % self.dbfile) tld = domain.split(".")[-1] prices = db['pricing'].find(tld=tld, category="register", years=1) no_prices = True for price in prices: no_prices = False purchase_url = "https://www.namecheap.com/domains/registration/results.aspx" purchase_url += "?domain=%s&aff=%s" % (domain, self.registryValue('affiliate_id')) irc.reply("[%s] Available from %s for %s $%s (%s)" % (domain, price['provider'], price['currency'], price['price'], purchase_url)) if no_prices: irc.reply("[%s] Allegedly available (pricing info not found for %s)" % (domain, tld)) else: irc.reply("[%s] Unavailable" % (result.attrib['Domain']))
def domian(self, irc, msg, args): """ Returns information about the next episode of Domian """ now = datetime.now() feed = feedparser.parse('http://nachtlager.de/go/de/feed/week') nextshow = None for show in feed.entries: showStart = datetime.fromtimestamp(mktime(show.published_parsed)).replace(hour=1) showEnd = datetime.fromtimestamp(mktime(show.published_parsed)).replace(hour=2) show['showstart'] = showStart if showStart < now and showEnd > now: nextshow = show nextshow['onair'] = True else: if showStart > now: if nextshow is None: nextshow = show nextshow['onair'] = False else: if showStart < nextshow['showstart']: nextshow = show nextshow['onair'] = False try: if nextshow['onair']: reply = u'Domian läuft gerade. (%s) - http://www.wdr.de/wdrlive/media/einslive.m3u' % nextshow.description else: starts_in = formatTimespan(int(mktime(nextshow['showstart'].timetuple()) - time())) reply = u'Nächste Sendung am %s (%s) - in %s' % (nextshow['showstart'].strftime('%d.%m.%Y um %H:%M'), nextshow.description, starts_in) except Exception, e: log.error('Domian: %s' % repr(e)) reply = u'Noch keine Daten vorhanden!'
def _createPrivmsg(self, channel, payload, commit, hidden=None): bold = ircutils.bold url = commit['url'] # ur1.ca try: post_param = ur1ca.parameterize(url) answerfile = ur1ca.request(post_param) doc = ur1ca.retrievedoc(answerfile) answerfile.close() status, url2 = ur1ca.scrape(doc) if status: url = url2 except Exception as e: log.error('Cannot connect to ur1.ca: %s' % e) s = _('%s/%s (in %s): %s committed %s %s') % \ (payload['repository']['owner']['name'], bold(payload['repository']['name']), bold(payload['ref'].split('/')[-1]), commit['author']['name'], bold(commit['message'].split('\n')[0]), url) if hidden is not None: s += _(' (+ %i hidden commits)') % hidden return ircmsgs.privmsg(channel, s.encode('utf8'))
def _createPrivmsg(self, channel, payload, commit, hidden=None): bold = ircutils.bold url = commit['url'] try: data = urlencode({'url': url}) if sys.version_info[0] >= 3: data = data.encode() f = utils.web.getUrlFd('http://git.io/', data=data) url = list( filter(lambda x: x[0] == 'Location', f.headers._headers))[0][1].strip() else: f = utils.web.getUrlFd('http://git.io/', data=data) url = filter(lambda x: x.startswith('Location: '), f.headers.headers)[0].split(': ', 1)[1].strip() except Exception as e: log.error('Cannot connect to git.io: %s' % e) s = _('%s/%s (in %s): %s committed %s %s') % \ (payload['repository']['owner']['name'], bold(payload['repository']['name']), bold(payload['ref'].split('/')[-1]), commit['author']['name'], bold(commit['message'].split('\n')[0]), url) if hidden is not None: s += _(' (+ %i hidden commits)') % hidden if sys.version_info[0] < 3: s = s.encode('utf-8') return ircmsgs.privmsg(channel, s)
def poll(self): try: log.info("Polling for " + str(self) + " on channels " + str(self.channels)) response, players, scores, joined, parted = self.Poll() log.info(str(response)) for channel in self.channels: log.info("Checking for " + channel) if len(self.players) == 0 and len(players) > 0 and len(str(self.parent.registryValue('onFirstJoinSay', channel)).strip()) > 0: if self.utdelay == 0: self.irc.queueMsg(ircmsgs.privmsg(channel, self.parent.registryValue('onFirstJoinSay'))) msgJoins = self.printJoins(joined, channel) msgParts = self.printParts(parted, channel) msg = msgJoins if len(msgParts) > 0: if len(msg) > 0: msg += ' and ' msg += msgParts log.info("Send to " + channel + " with msg: " + msg) if len(msg) > 0: self.irc.queueMsg(ircmsgs.privmsg(channel, msg)) self.players = players if len(players) > 0: self.utdelay = 6*30 elif self.utdelay > 0: self.utdelay = self.utdelay - 1 except: out = StringIO.StringIO() traceback.print_exc(file=out) log.error(out.getvalue())
def get_video_id_from_url(self, url, info): """ Get YouTube video ID from URL """ try: path = info.path domain = info.netloc video_id = "" if domain == "youtu.be": video_id = path.split("/")[1] else: parsed = cgi.parse_qsl(info.query) params = dict(parsed) if "v" in params: video_id = params["v"] if video_id: return video_id else: log.error("SpiffyTitles: error getting video id from %s" % (url)) except IndexError as e: log.error("SpiffyTitles: error getting video id from %s (%s)" % (url, str(e)))
def initialize_imgur_client(self, channel): """ Check if imgur client id or secret are set, and if so initialize imgur API client """ if self.imgur_client is None: imgur_client_id = self.registryValue("imgurClientID") imgur_client_secret = self.registryValue("imgurClientSecret") imgur_handler_enabled = self.registryValue("imgurHandlerEnabled", channel=channel) if imgur_handler_enabled and imgur_client_id and imgur_client_secret: log.debug("SpiffyTitles: enabling imgur handler") # Initialize API client try: from imgurpython import ImgurClient from imgurpython.helpers.error import ImgurClientError try: self.imgur_client = ImgurClient(imgur_client_id, imgur_client_secret) except ImgurClientError as e: log.error("SpiffyTitles: imgur client error: %s" % (e.error_message)) except ImportError as e: log.error("SpiffyTitles ImportError: %s" % str(e)) else: log.debug("SpiffyTitles: imgur handler disabled or empty client id/secret")
def getPluginList(self): latestCommit = self._query( 'repos', 'show/%s/%s/branches' % ( self._username, self._reponame, ) )['branches']['master'] path = [x for x in self._path.split('/') if x != ''] treeHash = self._navigate(latestCommit, path) if treeHash is None: log.error(( 'Cannot get plugins list from repository %s/%s ' 'at Github' ) % (self._username, self._reponame)) return [] nodes = self._query( 'tree', 'show/%s/%s/%s' % ( self._username, self._reponame, treeHash, ) )['tree'] plugins = [x['name'] for x in nodes if x['type'] == 'tree'] return plugins
def _createPrivmsg(self, channel, payload, commit, hidden=None): bold = ircutils.bold url = commit['url'] try: data = urlencode({'url': url}) if sys.version_info[0] >= 3: data = data.encode() f = utils.web.getUrlFd('http://git.io/', data=data) url = list(filter(lambda x:x[0] == 'Location', f.headers._headers))[0][1].strip() else: f = utils.web.getUrlFd('http://git.io/', data=data) url = filter(lambda x:x.startswith('Location: '), f.headers.headers)[0].split(': ', 1)[1].strip() except Exception as e: log.error('Cannot connect to git.io: %s' % e) s = _('%s/%s (in %s): %s committed %s %s') % \ (payload['repository']['owner']['name'], bold(payload['repository']['name']), bold(payload['ref'].split('/')[-1]), commit['author']['name'], bold(commit['message'].split('\n')[0]), url) if hidden is not None: s += _(' (+ %i hidden commits)') % hidden if sys.version_info[0] < 3: s = s.encode('utf-8') return ircmsgs.privmsg(channel, s)
def getPluginList(self): plugins = self._query("repos", "%s/%s/contents%s" % (self._username, self._reponame, self._path)) if plugins is None: log.error(("Cannot get plugins list from repository %s/%s " "at Github") % (self._username, self._reponame)) return [] plugins = [x["name"] for x in plugins if x["type"] == "dir"] return plugins
def __init__(self, irc): self.__parent = super(Twitter, self) callbacks.Plugin.__init__(self, irc) self._apis = {} self._died = False if world.starting: try: self._getApi().PostUpdate(_('I just woke up. :)')) except: pass self._runningAnnounces = [] try: conf.supybot.plugins.Twitter.consumer.key.addCallback( self._dropApiObjects) conf.supybot.plugins.Twitter.consumer.secret.addCallback( self._dropApiObjects) conf.supybot.plugins.Twitter.accounts.channel.key.addCallback( self._dropApiObjects) conf.supybot.plugins.Twitter.accounts.channel.secret.addCallback( self._dropApiObjects) conf.supybot.plugins.Twitter.accounts.channel.api.addCallback( self._dropApiObjects) except registry.NonExistentRegistryEntry: log.error('Your version of Supybot is not compatible with ' 'configuration hooks. So, Twitter won\'t be able ' 'to apply changes to the consumer key/secret ' 'and token key/secret unless you reload it.') self._shortids = {} self._current_shortid = 0
def _loadFromConfig(self, name=None): self.relays = [] for relay in self.registryValue('relays').split(' || '): if relay.endswith('|'): relay += ' ' relay = relay.split(' | ') if not len(relay) == 5: continue try: self.relays.append( self.Relay(relay[0], relay[1], relay[2], relay[3], re.compile('^%s$' % relay[0], re.I), re.compile('^%s$' % relay[1], re.I), re.compile(relay[4]))) except: log.error('Failed adding relay: %r' % relay) self.nickSubstitutions = {} for substitute in self.registryValue('substitutes').split(' || '): if substitute.endswith('|'): substitute += ' ' substitute = substitute.split(' | ') if not len(substitute) == 2: continue self.nickSubstitutions[substitute[0]] = substitute[1]
def get_status(tweet_id, token): headers = {'Authorization': 'Bearer %s' % token} # Twitter API wants value lists to be comma separated , but requests lib # urlencodes commas – so we build the URL by hand url = 'https://api.twitter.com/2/tweets/%s' % tweet_id url += '?tweet.fields=created_at' url += '&expansions=author_id&user.fields=username,verified' r = requests.get(url, headers=headers, timeout=TIMEOUT) if r.status_code != 200: log.error('twitter.get_status: call to API ' + 'unsuccesful, HTTP status code ' + str(r.status_code)) return None json = r.json() if 'errors' in json: # Most likely there's no tweet with that id, so abort return None try: tweet = json['data']['text'].strip() timestamp = json['data']['created_at'] username = json['includes']['users'][0]['username'] name = json['includes']['users'][0]['name'] verified = json['includes']['users'][0]['verified'] except KeyError as e: log.error('twitter.get_status: %s' % repr(e)) return None # replace linebreaks with Return symbol tweet = re.sub('\n+', ' ⏎ ', tweet) # Remove excess whitespace tweet = re.sub(' +', ' ', tweet) author = format_author(name, username, verified) time = humanize_time(timestamp) return '%s: %s (%s)' % (author, tweet, time)
def _query(self, function, **params): # filter out empty params params = {key: value for key, value in params.iteritems() if value} log.debug('RfK._query: %s' % repr(params)) if self.registryValue('httpProxy'): opener = urllib2.build_opener( urllib2.ProxyHandler({'http': self.registryValue('httpProxy')})) else: opener = urllib2.build_opener() request_url = '%s%s?key=%s&%s' % ( self.registryValue('queryURL'), function, self.registryValue('queryPass'), urllib.urlencode(params) ) try: response = opener.open(request_url) except IOError, e: log.error('RfK._query: %s' % repr(e)) return None
def get_profile(user, token): headers = {'Authorization': 'Bearer %s' % token} # Twitter API wants value lists to be comma separated , but requests lib # urlencodes commas – so we build the URL by hand url = 'https://api.twitter.com/2/users/by/username/' + user url += '?user.fields=description,public_metrics,verified' r = requests.get(url, headers=headers, timeout=TIMEOUT) if r.status_code != 200: log.error('twitter.get_profile: call to API ' + 'unsuccesful, HTTP status code ' + str(r.status_code)) return None json = r.json() if 'errors' in json: # Most likely there's no profile with that name, so abort return None try: description = json['data']['description'] name = json['data']['name'] username = json['data']['username'] verified = json['data']['verified'] tweet_count = json['data']['public_metrics']['tweet_count'] followers_count = json['data']['public_metrics']['followers_count'] except KeyError as e: log.error('twitter.get_status: %s' % repr(e)) return None # replace linebreaks with Return symbol description = re.sub('\n+', ' ⏎ ', description) # Remove excess whitespace description = re.sub(' +', ' ', description) author = format_author(name, username, verified) followers_count = humanize_count(followers_count) tweet_count = humanize_count(tweet_count) return '%s: %s (%s tweets, %s followers)' \ % (author, description, tweet_count, followers_count)
def _translateQuery(self, function, parameters={}): if self.registryValue("appId") == "": log.error("Translate: Set your appId and restart the plugin") return log.debug("Translate.query: %s" % (repr(parameters))) if self.registryValue("httpProxy") != "": opener = urllib2.build_opener(urllib2.ProxyHandler({"http": self.registryValue("httpProxy")})) else: opener = urllib2.build_opener() response = opener.open( self.registryValue("queryURL") + function + "?" + "appId=" + self.registryValue("appId") + "&" + urllib.urlencode(parameters) ) try: data = json.loads(json.dumps(response.read())) data = json.loads(data[1:]) log.debug("Translate.reply: %s" % repr(data)) return data except: log.error("Translate.query error") return None
def peak(self, irc, msg, args): """ Return the global listener peak of RfK """ try: listener_peak = self._query('listener_peak')['data']['listener_peak'] if listener_peak: peak_value = listener_peak['peak_value'] peak_time = listener_peak['peak_time'] peak_time_tz = pytz.timezone(self.registryValue('timezone')) peak_time_format = dateutil.parser.parse(peak_time).astimezone(peak_time_tz).strftime('%d.%m.%Y %H:%M %Z') peak_time_delta = self._format_timedelta(peak_time) if 'peak_show' in listener_peak: reply = u'RfK listener peak: %s concurrent listener (reached during "%s" with %s on %s -- %s ago)' % ( peak_value, listener_peak['peak_show']['show_name'], self._format_djs(listener_peak['peak_show']), peak_time_format, peak_time_delta) else: reply = u'RfK listener peak: %s concurrent listener (reached on %s -- %s ago)' % ( peak_value, peak_time_format, peak_time_delta) except Exception, e: log.error('RfK.peak: %s' % repr(e)) reply = self.reply_error
def update(self, irc, msg, args): """Update the namecheap pricing information.""" irc.reply("This could take a second....") response = self.namecheap('namecheap.users.getPricing', {'ProductType': 'DOMAIN'}) if response.get('Status') == "ERROR": for error in response[0]: log.error(error.text) irc.reply("Error! %s" % error.text) results = response.find("./{http://api.namecheap.com/xml.response}CommandResponse/{http://api.namecheap.com/xml.response}UserGetPricingResult") db = dataset.connect("sqlite:///%s" % self.dbfile) pricing_table = db['pricing'] pricing_table.delete(provider="Namecheap") categories = {} if results is not None: for product_type in results: for category in product_type: categories[category.attrib['Name']] = 0 for product in category: for duration in product: pricing_table.insert(dict(tld=product.attrib['Name'], years=duration.attrib['Duration'], category=category.attrib['Name'], price=duration.attrib['Price'], currency=duration.attrib['Currency'], provider="Namecheap")) categories[category.attrib['Name']] += 1 irc.reply("Loaded category %s (%s bits of pricing infoz)" % ( category.attrib['Name'], categories[category.attrib['Name']])) irc.reply("Done! Results: ")
def traffic(self, irc, msg, args): """ Return traffic information on all active relays """ try: active_relays = self._query('active_relays')['data']['active_relays'] if active_relays: slaves = [] for relay in active_relays['relays']: # type 0 -> Master # type 1 -> Slave if relay['relay_type'] == 0: master = 'master: %d kB/s' % (relay['relay_current_bandwidth'] / 8) elif relay['relay_type'] == 1: slaves.append('relay #%d: %d kB/s' % (relay['relay_id'], relay['relay_current_bandwidth'] / 8)) reply = u'%d kB/s ( %s | %s )' % (active_relays['total_bandwidth'] / 8, master, ' | '.join(slaves)) else: reply = u'No active relays found' except Exception, e: log.error('RfK.traffic: %s' % repr(e)) reply = self.reply_error
def reconnect(self, reset=True): self.nextReconnectTime = None if self.connected: drivers.log.reconnect(self.irc.network) if self in self._instances: self._instances.remove(self) self.conn.shutdown(socket.SHUT_RDWR) self.conn.close() self.connected = False if reset: drivers.log.debug('Resetting %s.', self.irc) self.irc.reset() else: drivers.log.debug('Not resetting %s.', self.irc) server = self._getNextServer() drivers.log.connect(self.currentServer) try: socks_proxy = getattr(conf.supybot.networks, self.irc.network) \ .socksproxy() try: if socks_proxy: import socks except ImportError: log.error('Cannot use socks proxy (SocksiPy not installed), ' 'using direct connection instead.') socks_proxy = '' self.conn = utils.net.getSocket(server[0], socks_proxy) vhost = conf.supybot.protocols.irc.vhost() self.conn.bind((vhost, 0)) except socket.error, e: drivers.log.connectError(self.currentServer, e) self.scheduleReconnect() return
def _translateQuery(self, function, parameters={}): if (self.registryValue('appId') == ''): log.error('Translate: Set your appId and restart the plugin') return log.debug('Translate.query: %s' % (repr(parameters))) if (self.registryValue('httpProxy') != ''): opener = urllib2.build_opener( urllib2.ProxyHandler({'http': self.registryValue('httpProxy')})) else: opener = urllib2.build_opener() response = opener.open(self.registryValue('queryURL') + function + '?'+ 'appId=' + self.registryValue('appId') + '&' + urllib.urlencode(parameters)) try: data = json.loads(json.dumps(response.read())) data = json.loads(data[1:]) log.debug('Translate.reply: %s' % repr(data)) return(data) except: log.error('Translate.query error') return(None)
def youtubeSnarfer(self, irc, msg, match): channel = msg.args[0] if not irc.isChannel(channel): return if self.registryValue('youtubeSnarfer', channel): ytid = self._youtubeId(match.group(0)) if ytid: try: apiReq = urlopen(self._api_url.format(ytid, self._api_key)) except: log.error("Couldn't connect to Youtube's API.") apiReq = None if apiReq: if sys.version_info[0] < 3: apiRes = apiReq.read() else: cntCharset = apiReq.headers.get_content_charset() apiRes = apiReq.read().decode(cntCharset) apiRes = json.loads(apiRes) if 'items' in apiRes: vInfo = apiRes['items'][0] vSnippet = vInfo['snippet'] vDetails = vInfo['contentDetails'] vStats = vInfo['statistics'] s = format("YouTube: \x02%s\x02", vSnippet['title']) if 'duration' in vDetails: dur = str(parse_isoduration(vDetails['duration'])).split(':') durstr = '' if dur[0] != '0': durstr += f'{dur[0]}d' durstr += f'{dur[1]}m{dur[2]}s' s += format(" - length: \x02%s\x02", durstr) if 'viewCount' in vStats: s += format(_(" - \x02%s\x02 views"), "{:,}".format(int(vStats['viewCount']))) if 'likeCount' in vStats and 'dislikeCount' in vStats: s += format(_(" - \x02%s\x02 likes / %s dislikes"), vStats['likeCount'], vStats['dislikeCount']) if 'channelTitle' in vSnippet: s += format(_(" - by \x02%s\x02"), vSnippet['channelTitle']) if 'publishedAt' in vSnippet: published_date = parser.parse(vSnippet['publishedAt']).astimezone(tz.tzlocal()) s += format(_(" on \x02%s\x02"), str(published_date).split()[0]) irc.reply(s, prefixNick=False)
def _query_server(self): """Query Q3 server via pyquake3.""" server = PyQuake3(self.registryValue('queryURL')) try: server.update() except Exception, e: log.error('Quake.query_server: %s' % repr(e)) return None
def onPayload(self, headers, payload): if 'reply_env' not in ircmsgs.IrcMsg.__slots__: log.error("Got event payload from GitHub, but your version " "of Supybot is not compatible with reply " "environments, so, the GitHub plugin can't " "announce it.") if 'full_name' in payload['repository']: repo = payload['repository']['full_name'] elif 'name' in payload['repository']['owner']: repo = '%s/%s' % (payload['repository']['owner']['name'], payload['repository']['name']) else: repo = '%s/%s' % (payload['repository']['owner']['login'], payload['repository']['name']) event = headers['X-GitHub-Event'] announces = self._load() repoAnnounces = [] for (dbRepo, network, channel) in announces: if dbRepo == repo: repoAnnounces.append((network, channel)) if len(repoAnnounces) == 0: log.info('Commit for repo %s not announced anywhere' % repo) return for (network, channel) in repoAnnounces: # Compatability with DBs without a network if network == '': for irc in world.ircs: if channel in irc.state.channels: break else: irc = world.getIrc(network) if not irc: log.warning('Received GitHub payload with announcing ' 'enabled in %s on unloaded network %s.', channel, network) return if channel not in irc.state.channels: log.info(('Cannot announce event for repo ' '%s in %s on %s because I\'m not in %s.') % (repo, channel, irc.network, channel)) if event == 'push': commits = payload['commits'] if len(commits) == 0: log.warning('GitHub push hook called without any commit.') else: hidden = None last_commit = commits[-1] if last_commit['message'].startswith('Merge ') and \ len(commits) > 5: hidden = len(commits) + 1 commits = [last_commit] payload2 = dict(payload) for commit in commits: payload2['__commit'] = commit self._createPrivmsg(irc, channel, payload2, 'push', hidden) else: self._createPrivmsg(irc, channel, payload, event)
def upkeep(): """Does upkeep (like flushing, garbage collection, etc.)""" sys.exc_clear() # Just in case, let's clear the exception info. if os.name == 'nt': try: import msvcrt msvcrt.heapmin() except ImportError: pass except IOError: # Win98 sux0rs! pass if conf.daemonized: # If we're daemonized, sys.stdout has been replaced with a StringIO # object, so let's see if anything's been printed, and if so, let's # log.warning it (things shouldn't be printed, and we're more likely # to get bug reports if we make it a warning). assert not type(sys.stdout) == file, 'Not a StringIO object!' if not hasattr(sys.stdout, 'getvalue'): # Stupid twisted sometimes replaces our stdout with theirs, because # "The Twisted Way Is The Right Way" (ha!). So we're stuck simply # returning. log.warning('Expected cStringIO as stdout, got %r.', sys.stdout) return s = sys.stdout.getvalue() if s: log.warning('Printed to stdout after daemonization: %s', s) sys.stdout.reset() # Seeks to 0. sys.stdout.truncate() # Truncates to current offset. assert not type(sys.stderr) == file, 'Not a StringIO object!' s = sys.stderr.getvalue() if s: log.error('Printed to stderr after daemonization: %s', s) sys.stderr.reset() # Seeks to 0. sys.stderr.truncate() # Truncates to current offset. doFlush = conf.supybot.flush() and not starting if doFlush: flush() # This is so registry._cache gets filled. # This seems dumb, so we'll try not doing it anymore. #if registryFilename is not None: # registry.open(registryFilename) if not dying: log.debug('Regexp cache size: %s', len(sre._cache)) log.debug('Pattern cache size: %s', len(ircutils._patternCache)) log.debug('HostmaskPatternEqual cache size: %s', len(ircutils._hostmaskPatternEqualCache)) #timestamp = log.timestamp() if doFlush: log.info('Flushers flushed and garbage collected.') else: log.info('Garbage collected.') collected = gc.collect() if gc.garbage: log.warning( 'Noncollectable garbage (file this as a bug on SF.net): %s', gc.garbage) return collected
def upkeep(): """Does upkeep (like flushing, garbage collection, etc.)""" sys.exc_clear() # Just in case, let's clear the exception info. if os.name == 'nt': try: import msvcrt msvcrt.heapmin() except ImportError: pass except IOError: # Win98 sux0rs! pass if conf.daemonized: # If we're daemonized, sys.stdout has been replaced with a StringIO # object, so let's see if anything's been printed, and if so, let's # log.warning it (things shouldn't be printed, and we're more likely # to get bug reports if we make it a warning). assert not type(sys.stdout) == file, 'Not a StringIO object!' if not hasattr(sys.stdout, 'getvalue'): # Stupid twisted sometimes replaces our stdout with theirs, because # "The Twisted Way Is The Right Way" (ha!). So we're stuck simply # returning. log.warning('Expected cStringIO as stdout, got %r.', sys.stdout) return s = sys.stdout.getvalue() if s: log.warning('Printed to stdout after daemonization: %s', s) sys.stdout.reset() # Seeks to 0. sys.stdout.truncate() # Truncates to current offset. assert not type(sys.stderr) == file, 'Not a StringIO object!' s = sys.stderr.getvalue() if s: log.error('Printed to stderr after daemonization: %s', s) sys.stderr.reset() # Seeks to 0. sys.stderr.truncate() # Truncates to current offset. doFlush = conf.supybot.flush() and not starting if doFlush: flush() # This is so registry._cache gets filled. # This seems dumb, so we'll try not doing it anymore. #if registryFilename is not None: # registry.open(registryFilename) if not dying: log.debug('Regexp cache size: %s', len(sre._cache)) log.debug('Pattern cache size: %s', len(ircutils._patternCache)) log.debug('HostmaskPatternEqual cache size: %s', len(ircutils._hostmaskPatternEqualCache)) #timestamp = log.timestamp() if doFlush: log.info('Flushers flushed and garbage collected.') else: log.info('Garbage collected.') collected = gc.collect() if gc.garbage: log.warning('Noncollectable garbage (file this as a bug on SF.net): %s', gc.garbage) return collected
def last_edit(self, irc, msg, args, username): """<username> Shows information about the last edit for the given user.""" baseUrl = "http://osm.org" if not username: irc.error('You forgot to give me a username.') return quoted_uname = username quoted_uname = urllib.quote(quoted_uname) try: req = urllib2.Request('%s/user/%s/edits/feed' % (baseUrl, quoted_uname), headers={'User-Agent': userAgent}) xml = urllib2.urlopen(req) except urllib2.HTTPError as e: irc.error('Username %s was not found.' % (username)) return except Exception as e: irc.error("Could not parse the user's changeset feed.") log.error(traceback.format_exc(e)) return tree = ElementTree.ElementTree(file=xml) first_entry = tree.find('{http://www.w3.org/2005/Atom}entry') if first_entry is None: irc.error("Looks like %s doesn't have any edits." % (username)) return author = first_entry.findtext( '{http://www.w3.org/2005/Atom}author/{http://www.w3.org/2005/Atom}name' ) timestamp = first_entry.findtext( '{http://www.w3.org/2005/Atom}updated') entry_id = first_entry.findtext('{http://www.w3.org/2005/Atom}id') if author != username: # It looks like there's a bug where the API will give back the most recent user's edit feed # instead of a 404 irc.error('Unknown username. Was "%s" but asked for "%s"' % (author, username)) return # Strip off the word "Changeset " from the title to get the number changeset_id = entry_id[39:] updated = isoToDatetime(timestamp) response = "User %s last edited %s with changeset http://osm.org/changeset/%s" % ( author, prettyDate(updated), changeset_id) irc.reply(response.encode('utf-8'))
def _add_players_from_channel(self, **kwargs): nicks_in_channel = [] ignore_nicks = self.registryValue("ignoreNicks") new_player_nick = None if "new_player_nick" in kwargs: new_player_nick = kwargs["new_player_nick"] if GAME_CHANNEL in self.irc.state.channels: nicks_in_channel = self.irc.state.channels[GAME_CHANNEL].users for nick in nicks_in_channel: if not nick: continue if nick in ignore_nicks: continue # Skip bot nick if nick == self.irc.nick: continue unit_collection = self.SpiffyWorld.unit_collection try: hostmask = self.irc.state.nickToHostmask(nick) except KeyError: hostmask = None user_id = None if hostmask is None: continue try: user_id = ircdb.users.getUserId(hostmask) except KeyError: log.info("SpiffyRPG: %s is not registered." % nick) """ Registered users only """ if user_id is None: continue if user_id is not None: player = unit_collection.get_player_by_user_id(user_id) if player is not None: player.nick = nick dungeon = self.SpiffyWorld.get_dungeon_by_channel(GAME_CHANNEL) dungeon.add_unit(player) if new_player_nick is not None: if player.nick == new_player_nick: return player else: log.error("No player with user_id %s" % user_id)
def translationparty(self, irc, msg, args, opts, text): """[--lang <language>[,...]] [--show <none|one|all>] [--max <int>] [--verbose] <text> Try to find equilibrium in back-and-forth translations of <text>. (Defaults: --lang ja --show none --max 50)""" input_langs = ['ja'] show = 'none' max_translations = 50 announce = False if len(text) > 1000: irc.reply('The text to be translated cannot exceed 1000 characters. Your request contains %d characters' % (len(text))) else: for (opt,arg) in opts: if opt == 'lang': input_langs = arg.split(',') if opt == 'max': max_translations = arg if opt == 'verbose': announce = True if opt == 'show': show = arg try: langs = ['en'] for l in input_langs: iso_code = validateLanguage(l) if iso_code == None: raise TranslationError(value="Unknown language: %s" % (l)) langs.append(iso_code) result = self._party(langs, text, max_translations) if announce: if len(result) < max_translations: irc.reply("Equilibrium found!") else: irc.reply("It is doubtful that this phrase will ever reach equilibrium.") texts = map(lambda x: x['text'],result) if show == 'all': irc.reply(" -> ".join(texts).encode('utf8')) elif show == 'one': irc.reply(" -> ".join((texts[0],texts[-1])).encode('utf8')) else: # STILL TOO VERBOSE #irc.reply(('%(text)s [%(iterations)d iterations]' % { 'iterations' : len(texts), 'text' : texts[-1] }).encode('utf8')) irc.reply(texts[-1].encode('utf8')) except TranslationError, e: irc.reply(e) log.error(str(e)) if e.stack is not None: texts = map(lambda x: '[%s] %s' % (x['lang'],x['text']),e.stack) log.debug("Stack: %s" % (" -> ".join(texts).encode('utf8'))) if e.url is not None: log.debug("Last URL: %s" % (e.url))
def createdb(self, irc: callbacks.NestedCommandsIrcProxy, msg: ircmsgs.IrcMsg, args: List[str]) -> None: """- takes no arguments. Creates a new user table. """ try: result: str = User.create_tables() irc.reply(result, prefixNick=False) except DatabaseError as exc: log.error(str(exc)) irc.reply("There was an error with the database. Check logs.", prefixNick=False)
def getPluginList(self): plugins = self._query('repos', '%s/%s/contents%s' % (self._username, self._reponame, self._path), args={'ref': self._branch}) if plugins is None: log.error(('Cannot get plugins list from repository %s/%s ' 'at Github') % (self._username, self._reponame)) return [] plugins = [x['name'] for x in plugins if x['type'] == 'dir'] return plugins
def Query(self, queryId): if not self.valid: log.error("Querying on an invalid server instance!") else: self.conn = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.conn.sendto(struct.pack("<IB",0x80, queryId), (self.addr, self.port)) recv, addr = self.conn.recvfrom(500000) if recv[0:5] == struct.pack("<IB",0x80, queryId): return recv[5:] else: log.info("Invalid response header") return ""
def finish(self): if self.u.name: try: self.users.setUser(self.u) except DuplicateHostmask: log.error("Hostmasks for %s collided with another user's. " "Resetting hostmasks for %s.", self.u.name) # Some might argue that this is arbitrary, and perhaps it is. # But we've got to do *something*, so we'll show some deference # to our lower-numbered users. self.u.hostmasks.clear() self.users.setUser(self.u) IrcUserCreator.u = None
def getPluginList(self): latestCommit = self._query("repos", "show/%s/%s/branches" % (self._username, self._reponame))["branches"][ "master" ] path = [x for x in self._path.split("/") if x != ""] treeHash = self._navigate(latestCommit, path) if treeHash is None: log.error(("Cannot get plugins list from repository %s/%s " "at Github") % (self._username, self._reponame)) return [] nodes = self._query("tree", "show/%s/%s/%s" % (self._username, self._reponame, treeHash))["tree"] plugins = [x["name"] for x in nodes if x["type"] == "tree"] return plugins
def fspawn(self, irc, msg, args, unit_level, unit_type_name): """ Spawns a NPC - <level> <zen master|hacker|troll> """ dungeon = self.SpiffyWorld.get_dungeon_by_channel(msg.args[0]) if dungeon is not None: unit_type_id = self._get_unit_type_id_by_name(unit_type_name) dungeon.spawn_unit(level=unit_level, unit_type_id=unit_type_id) else: log.error("SpiffyRPG: could not find dungeon %s" % msg.args[0])
def display_format(self, format: int = 1) -> str: """ Takes the data that was queried and formats it to display to the user. Args: format(optional): The format you want to display the weather with. e.g. imperial first or metric - F/C or C/F Returns: A formatted string to display of the current weather. """ current: Dict[Union[str, float]] = self.data.get("current") forecast: Dict[str, List[Dict]] = self.data.get("daily") if not current or not forecast: log.error("JSON data does not have current or forecast keys") raise WeatherNotFound("Unable to find the weather at this time.") temp: float = current.get("temp") feels: float = current.get("feels_like") wind_spd: float = current.get("wind_speed") forecast_high: float = forecast[0].get("temp").get("max") forecast_low: float = forecast[0].get("temp").get("min") # Format to display imperial or metric units first. # e.g. 1 = imperial, 2 = metric, default is imperial. if format == 1: temperature = f"{temp:.1f}F/{(temp - 32)/1.8:.1f}C" feels_like = f"{feels:.1f}F/{(feels - 32)/1.8:.1f}C" high = f"{forecast_high:.1f}F/{(forecast_high - 32)/1.8:.1f}C" low = f"{forecast_low:.1f}F/{(forecast_low - 32)/1.8:.1f}C" wind = f"{wind_spd:.1f}mph/{wind_spd * 1.609344:.1f}kph" else: temperature = f"{(temp - 32)/1.8:.1f}C/{temp:.1f}F" feels_like = f"{(feels - 32)/1.8:.1f}C/{feels:.1f}F" high = f"{(forecast_high - 32)/1.8:.1f}C/{forecast_high:.1f}F" low = f"{(forecast_low - 32)/1.8:.1f}C/{forecast_low:.1f}F" wind = f"{wind_spd * 1.609344:.1f}kph/{wind_spd:.1f}mph" place = f"{self.location}, {self.region}" condition: str = current.get("weather")[0].get( "description").capitalize() humidity = f"{current.get('humidity')}" wind_dir: str = self.format_directions(current.get("wind_deg")) summary: str = forecast[0].get("weather")[0].get( "description").capitalize() display = ( f"\x02{place}\x02 :: {condition} {temperature} (Humidity: {humidity}%) | \x02Feels like\x02: {feels_like} " f"| \x02Wind\x02: {wind_dir} at {wind} | \x02Today\x02: {summary}. High {high} - Low {low}" ) return display
def __init__(self, irc): self.__parent = super(Listener, self) self.__parent.__init__(irc) self.listenerThreads = [] try: conf.supybot.plugins.Listener.relays.addCallback( self._loadFromConfig) except registry.NonExistentRegistryEntry: log.error("Your version of Supybot is not compatible with " "configuration hooks. So, Listener won't be able " "to reload the configuration if you use the Config " "plugin.") self._loadFromConfig()
def run(self): if len(drivers._drivers) == 1 and not world.testing: log.error('Schedule is the only remaining driver, ' 'why do we continue to live?') time.sleep(1) # We're the only driver; let's pause to think. while self.schedule and self.schedule[0][0] < time.time(): (t, name) = heapq.heappop(self.schedule) f = self.events[name] del self.events[name] try: f() except Exception, e: log.exception('Uncaught exception in scheduled function:')
def open(self, filename): self.filename = filename reader = unpreserve.Reader(IrcUserCreator, self) try: self.noFlush = True try: reader.readFile(filename) self.noFlush = False self.flush() except EnvironmentError, e: log.error('Invalid user dictionary file, resetting to empty.') log.error('Exact error: %s', utils.exnToString(e)) except Exception, e: log.exception('Exact error:')
def finish(self): if self.u.name: try: self.users.setUser(self.u) except DuplicateHostmask: log.error( 'Hostmasks for %s collided with another user\'s. ' 'Resetting hostmasks for %s.', self.u.name) # Some might argue that this is arbitrary, and perhaps it is. # But we've got to do *something*, so we'll show some deference # to our lower-numbered users. self.u.hostmasks.clear() self.users.setUser(self.u) IrcUserCreator.u = None
def _shortenUrl(self, url): """Shortens a long URL into a short one.""" try: data = requests.get( "http://tinyurl.com/api-create.php?url={0}".format(url), timeout=5 ) except ( requests.exceptions.RequestException, requests.exceptions.HTTPError, ) as e: log.error("Tweety: ERROR retrieving tiny url: {0}".format(e)) return else: return data.content.decode()
def ratelimits(self, irc, msg, args): """ Display current rate limits for your twitter API account. """ # before we do anything, make sure we have a twitterApi object. if not self.twitterApi: irc.reply( "ERROR: Twitter is not authorized. Please check logs before running " "this command." ) return # make API call. data = self.twitterApi.ApiCall( "application/rate_limit_status", parameters={"resources": "trends,search,statuses,users"}, ) try: data = json.loads(data) except: irc.reply("ERROR: Failed to lookup ratelimit data: {0}".format(data)) return # parse data; data = data.get("resources") if not data: # simple check if we have part of the json dict. irc.reply( "ERROR: Failed to fetch application rate limit status. Something could " "be wrong with Twitter." ) log.error("Tweety: ERROR fetching rate limit data: {0}".format(data)) return # dict of resources and how to parse. key=name, values are for the json dict. resources = { "trends": ["trends", "/trends/place"], "tsearch": ["search", "/search/tweets"], "twitter --id": ["statuses", "/statuses/show/:id"], "twitter --info": ["users", "/users/show/:id"], "twitter timeline": ["statuses", "/statuses/user_timeline"], } # now iterate through dict above. for resource in resources: rdict = resources[resource] # get value. endpoint = data.get(rdict[0]).get(rdict[1]) # value[0], value[1] minutes = "%sm%ss" % divmod( int(endpoint["reset"]) - int(time.time()), 60 ) # math. output = "Reset in: {0} Remaining: {1}".format( minutes, endpoint["remaining"] ) irc.reply("{0} :: {1}".format(self._bold(resource), output))
def __init__(self, irc): self.__parent = super(LinkRelay, self) self.__parent.__init__(irc) self._loadFromConfig() self.ircstates = {} try: conf.supybot.plugins.LinkRelay.substitutes.addCallback( self._loadFromConfig) conf.supybot.plugins.LinkRelay.relays.addCallback( self._loadFromConfig) except registry.NonExistentRegistryEntry: log.error("Your version of Supybot is not compatible with " "configuration hooks. So, LinkRelay won't be able " "to reload the configuration if you use the Config " "plugin.")
def open(self, filename): self.filename = filename fd = file(self.filename) for line in utils.file.nonCommentNonEmptyLines(fd): try: line = line.rstrip('\r\n') L = line.split() hostmask = L.pop(0) if L: expiration = int(float(L.pop(0))) else: expiration = 0 self.add(hostmask, expiration) except Exception, e: log.error('Invalid line in ignores database: %q', line)
def login(self, irc, msg, args): """takes no arguments Logins to Undernet's X Service""" if irc.state.supported.get("NETWORK", "") == "UnderNet": if self.registryValue("auth.username") and self.registryValue( "auth.password"): log.info("Attempting login to XService") else: log.warning( "username and password not set, this plugin will not work") return self._login(irc) else: log.error("We're not on UnderNet, we can't use this.") irc.error("We're not on UnderNet, this is useless.")
def reconnect(self, reset=True): self._attempt += 1 self.nextReconnectTime = None if self.connected: drivers.log.reconnect(self.irc.network) if self in self._instances: self._instances.remove(self) try: self.conn.shutdown(socket.SHUT_RDWR) except: # "Transport endpoint not connected" pass self.conn.close() self.connected = False if reset: drivers.log.debug('Resetting %s.', self.irc) self.irc.reset() else: drivers.log.debug('Not resetting %s.', self.irc) server = self._getNextServer() socks_proxy = getattr(conf.supybot.networks, self.irc.network) \ .socksproxy() resolver = None try: if socks_proxy: import socks except ImportError: log.error('Cannot use socks proxy (SocksiPy not installed), ' 'using direct connection instead.') socks_proxy = '' if socks_proxy: address = server[0] else: try: address = utils.net.getAddressFromHostname(server[0], attempt=self._attempt) except socket.gaierror as e: drivers.log.connectError(self.currentServer, e) self.scheduleReconnect() return drivers.log.connect(self.currentServer) try: self.conn = utils.net.getSocket(address, socks_proxy) vhost = conf.supybot.protocols.irc.vhost() self.conn.bind((vhost, 0)) except socket.error, e: drivers.log.connectError(self.currentServer, e) self.scheduleReconnect() return