Example #1
0
    def calc(self, expression):
        result = scrapemark.scrape("""
            <h2 class="r"><b>{{ answer }}</b></h2>
        """, url = GoogleAPI.GOOGLE_SEARCH_URL + urllib.urlencode({"q": expression}))

        if result:
            return result['answer']
Example #2
0
    def search(self, searchterms):
        result = scrapemark.scrape("""
            <div id="ires">
                <ol>
                {*
                    <a class="l" href="{{ [results].link }}">{{ [results].title }}</a>
                *}
                </ol>
            </div>
        """, url = GoogleAPI.GOOGLE_SEARCH_URL + urllib.urlencode({"q": searchterms}))

        if result:
            return result['results']
Example #3
0
    def event_privmsg(self, irc):
        match = self._re_url.search(irc.message.params)

        if not match:
            return

        url = match.group(0)

        url = url.replace('/#!', '');

        res = urlparse.urlsplit(url)

        # Don't try to get the title for ftp etc
        if res.scheme not in ['http', 'https']:
            return

        # Encode domain part to IDNA if possible
        netloc = res.netloc
        try:
            hostname = res.hostname.encode('IDNA')

            # Add back the username and password
            if '@' in netloc:
                usernpass, _ = netloc.rsplit('@')
                netloc = usernpass + '@' + hostname
            else:
                netloc = hostname
        except:
            pass

        res = res._replace(fragment = '', netloc = netloc)

        # Reassemble the parts
        url = urlparse.urlunsplit(res)
        self.event.dispatch_system_event('urlcatcherurl', [url, irc])

        Logger.info("Urlcatcher retrieving '%s'" % url)
        try:
            data = scrapemark.scrape("<title>{{title}}</title>", url = url)

            if not data:
                Logger.info("Urlcatcher received no title")
                return

            irc.reply('Title: %s' % data['title'])
        except Exception, e:
            Logger.warning("Urlcatcher error")
            raise e