コード例 #1
0
    def changed(self, entry):
        from gi.repository import Gtk, GObject, GLib, Soup

        text = misc.utf8(self.city_entry.get_text())
        if not text:
            return
        # TODO if the completion widget has a selection, return?  How do we
        # determine this?
        if text in self.geoname_cache:
            model = self.geoname_cache[text]
            self.city_entry.get_completion().set_model(model)
        else:
            model = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING,
                                  GObject.TYPE_STRING, GObject.TYPE_STRING,
                                  GObject.TYPE_STRING)

            if self.geoname_session is None:
                self.geoname_session = Soup.SessionAsync()
            url = _geoname_url % (quote(text), misc.get_release().version)
            message = Soup.Message.new('GET', url)
            message.request_headers.append('User-agent', 'Ubiquity/1.0')
            self.geoname_session.abort()
            if self.geoname_timeout_id is not None:
                GLib.source_remove(self.geoname_timeout_id)
            self.geoname_timeout_id = \
                GLib.timeout_add_seconds(2, self.geoname_timeout,
                                         (text, model))
            self.geoname_session.queue_message(message, self.geoname_cb,
                                               (text, model))
コード例 #2
0
 def checkuri(self, uri, callback, **kwargs):
     if not self.soup:
         self.soup = Soup.SessionAsync()
     msg = Soup.Message.new("GET", uri)
     self.to_check.append(uri)
     kwargs['uri'] = uri
     self.soup.queue_message(msg, callback, kwargs)
コード例 #3
0
ファイル: downloader.py プロジェクト: surajgillespie/sugar
def get_soup_session():
    global _session
    if _session is None:
        _session = Soup.SessionAsync()
        _session.set_property("timeout", 60)
        _session.set_property("idle-timeout", 60)
        _session.set_property("user-agent", "Sugar/%s" % config.version)
    return _session
コード例 #4
0
def get_soup_session():
    global _session
    if _session is None:
        _session = Soup.SessionAsync()
        _session.set_property("timeout", 60)
        _session.set_property("idle-timeout", 60)
        _session.set_property("user-agent", "Sugar/%s" % config.version)
        _session.add_feature_by_type(Soup.ProxyResolverDefault)
    return _session
コード例 #5
0
 def __init__(self, *args, **kwargs):
     """Initialize this instance."""
     super(WebClient, self).__init__(*args, **kwargs)
     # pylint: disable=E0611,F0401
     from gi.repository import Soup, SoupGNOME
     self.soup = Soup
     self.session = Soup.SessionAsync()
     self.session.add_feature(SoupGNOME.ProxyResolverGNOME())
     self.session.connect("authenticate", self._on_authenticate)
コード例 #6
0
import configparser
import os
import pickle
import re
import sys
import webbrowser
from collections import defaultdict
from html import escape, parser
from gi.repository import GLib, Gtk, Gdk, Notify, Soup
from xml.etree import ElementTree

CONFIG_PATH = os.path.expanduser('~/.config/grs')
CACHE_PATH = os.path.expanduser('~/.cache/grs')
CONFIG = configparser.ConfigParser()
CONFIG.read(CONFIG_PATH)
SESSION = Soup.SessionAsync()
CACHE = (pickle.load(open(CACHE_PATH, 'rb'))
         if os.path.exists(CACHE_PATH) else defaultdict(set))


class Article(object):
    def __init__(self, feed, tag):
        self.feed = feed
        title = tag.find(self.feed.namespace + 'title').text
        self.title = title.strip() if title else ''

        link_tag = tag.find(self.feed.namespace + 'link')
        self.link = (link_tag.attrib.get('href') or link_tag.text).strip()

        enclosure_tag = tag.find(self.feed.namespace + 'enclosure')
        if enclosure_tag is not None and 'url' in enclosure_tag.attrib:
コード例 #7
0
from gi.repository import Gtk
from gi.repository import Soup
from django.template import defaultfilters
import BeautifulSoup

import gpapers
from gpapers_info import __version__
from gpapers.logger import *
from gpapers.gPapers.models import Paper

active_threads = None

p_whitespace = re.compile('[\s]+')
p_doi = re.compile('doi *: *(10.[a-z0-9]+/[a-z0-9.]+)', re.IGNORECASE)

soup_session = Soup.SessionAsync()
#arXiv disallows requests if no user-agent is set
soup_session.set_property("user-agent", "gPapers/%s" % __version__)


def _decode_htmlentities(string):
    entity_re = re.compile("&(#?)(\d{1,5}|\w{1,8});")
    return entity_re.subn(_substitute_entity, string)[0]


def html_strip(s):
    if isinstance(s, BeautifulSoup.Tag):
        s = ''.join([html_strip(x) for x in s.contents])
    return _decode_htmlentities(
        p_whitespace.sub(' ',
                         str(s).replace(' ', ' ').strip()))