def handler(self, data): if len(self.pattern.findall(data.message.to_s())) >= 1: for link in self.pattern.findall(data.message.to_s()): link = link[0] try: with closing(urlopen(link)) as req: if req.info().getmaintype() != 'text': return content = req.read() if len(self.title_pattern.findall(content)) >= 1: title = self.title_pattern.findall(content)[0] else: title = '"' + content[:self.alt_title_len] + '..."' try: data.target.message( self.format.replace("^t^", unescape(title)).replace( "^l^", req.geturl())) except (UnicodeDecodeError) as e: data.target.message( "[%sURLScraper%s] %sCould not decode title information!%s" % (Escapes.GREEN, Escapes.BLACK, Escapes.AQUA, Escapes.BLACK)) except (HTTPError, IOError) as e: data.target.message( "[%sURLScraper%s] %sCould not fetch title information!%s" % (Escapes.GREEN, Escapes.BLACK, Escapes.AQUA, Escapes.BLACK)) return
def handler(self, data): if data.message == (0, '@google'): try: query = " ".join(data.message[1:]) except (IndexError): data.respond_to_user("[" + self.prefix + "Search]: You must provide a term to search!") return if query in self.cache: if self.cache[query] == (None, None): data.respond_to_user("[" + self.prefix + "Search]: No results could be found. Please refine your search terms.") return title, url = self.cache[query] # display result data.respond_to_user("[" + self.prefix + "Search]: %s%s%s, %s" % (Escapes.BOLD, title, Escapes.BOLD, url)) return data.respond_to_user("[" + self.prefix + "Search]: Searching...") target_url = self.url + "%s" % ( urlencode( { "v" : "1.0", "q" : query } ) ) response = urlopen(target_url) response = json.loads(response.read()) if response['responseStatus'] != 200: code = response['responseStatus'] error = response['responseDetails'] data.respond_to_user("[" + self.prefix + "Search]: An error occurred while searching!") data.respond_to_user("[" + self.prefix + "Search]: Failed with code [" + str(code) + "]: " + error) return try: title = unescape(response['responseData']['results'][0]['titleNoFormatting']) url = response['responseData']['results'][0]['url'] except (Exception): self.cache[query] = (None, None) return self.handler(data) self.cache[query] = (title, url) # re-call myself, so that there's not as much duplicate code. return self.handler(data)
def handler(self, data): if len(self.pattern.findall(data.message.to_s())) >= 1: for link in self.pattern.findall(data.message.to_s()): link = link[0] try: with closing(urlopen(link)) as req: if req.info().getmaintype() != 'text': return content = req.read() if len(self.title_pattern.findall(content)) >= 1: title = self.title_pattern.findall(content)[0] else: title = '"' + content[:self.alt_title_len] + '..."' try: data.target.message(self.format.replace("^t^", unescape(title)).replace("^l^", req.geturl())) except (UnicodeDecodeError) as e: data.target.message("[%sURLScraper%s] %sCould not decode title information!%s" % (Escapes.GREEN, Escapes.BLACK, Escapes.AQUA, Escapes.BLACK)) except (HTTPError, IOError) as e: data.target.message("[%sURLScraper%s] %sCould not fetch title information!%s" % (Escapes.GREEN, Escapes.BLACK, Escapes.AQUA, Escapes.BLACK)) return