Exemple #1
0
def get_size_of_url(url):
    request = Request(url)
    request.add_header('Accept-Encoding', 'gzip')
    request.add_header('User-Agent', USER_AGENT)
    url_sock = urlopen(request)
    size = url_sock.headers.get('content-length')
    url_sock.close()
    return format_size(int(size)) if size else ''
Exemple #2
0
def get_size_of_url(url):
    request = Request(url)
    request.add_header("Accept-Encoding", "gzip")
    request.add_header("User-Agent", USER_AGENT)
    url_sock = urlopen(request)
    size = url_sock.headers.get("content-length")
    url_sock.close()
    return format_size(int(size)) if size else ""
Exemple #3
0
def get_url(url, post=None, get=None):
    post_params = urlencode(post or {})
    get_params = urlencode(get or {})
    if get:
        get_params = '?' + get_params

    # add post, get data and headers
    url = '%s%s' % (url, get_params)
    if post_params:
        request = Request(url, post_params)
    else:
        request = Request(url)

    # for discogs
    request.add_header('Accept-Encoding', 'gzip')
    request.add_header('User-Agent', USER_AGENT)

    url_sock = urlopen(request)
    enc = get_encoding_from_socket(url_sock)

    # unzip the response if needed
    data = url_sock.read()
    if url_sock.headers.get("content-encoding", "") == "gzip":
        data = gzip.GzipFile(fileobj=BytesIO(data)).read()
    url_sock.close()
    content_type = url_sock.headers.get('Content-Type', '').split(';', 1)[0]
    domain = re.compile(r'\w+://([^/]+)/').search(url).groups(0)[0]
    print_d("Got %s data from %s" % (content_type, domain))
    return (data if content_type.startswith('image') else data.decode(enc))
Exemple #4
0
def send_feedback(dsn, event_id, name, email, comment, timeout):
    """Send feedback, blocking.

    Args:
        dsn (str): The DSN
        event_id (str): The event ID this feedback should be attached to
        name (text_type): The user name
        email (text_type): The user email
        comment (text_type): The feedback text
        timeout (float): The timeout for this request
    Raises:
        SentryError: In case of timeout or other errors
    """

    name = text_type(name).encode("utf-8")
    email = text_type(email).encode("utf-8")
    comment = text_type(comment).encode("utf-8")

    data = urlencode([('name', name), ('email', email), ('comments', comment)])
    if not isinstance(data, bytes):
        # py3
        data = data.encode("utf-8")

    headers = {"Referer": "https://quodlibet.github.io"}
    params = urlencode([("dsn", dsn), ("eventId", event_id)])

    try:
        req = Request("https://sentry.io/api/embed/error-page/?" + params,
                      data=data,
                      headers=headers)

        urlopen(req, timeout=timeout).close()
    except EnvironmentError as e:
        raise SentryError(e)
Exemple #5
0
def get_url(url, post=None, get=None):
    post_params = urlencode(post or {})
    get_params = urlencode(get or {})
    if get:
        get_params = '?' + get_params

    # add post, get data and headers
    url = '%s%s' % (url, get_params)
    if post_params:
        request = Request(url, post_params)
    else:
        request = Request(url)

    # for discogs
    request.add_header('Accept-Encoding', 'gzip')
    request.add_header('User-Agent', USER_AGENT)

    url_sock = urlopen(request)
    enc = get_encoding_from_socket(url_sock)

    # unzip the response if needed
    data = url_sock.read()
    if url_sock.headers.get("content-encoding", "") == "gzip":
        data = gzip.GzipFile(fileobj=cBytesIO(data)).read()
    url_sock.close()
    content_type = url_sock.headers.get('Content-Type', '').split(';', 1)[0]
    domain = re.compile('\w+://([^/]+)/').search(url).groups(0)[0]
    print_d("Got %s data from %s" % (content_type, domain))
    return (data if content_type.startswith('image')
            else data.decode(enc))
Exemple #6
0
def get_url(url, post={}, get={}):
    post_params = urlencode(post)
    get_params = urlencode(get)
    if get:
        get_params = "?" + get_params

    # add post, get data and headers
    url = "%s%s" % (url, get_params)
    if post_params:
        request = Request(url, post_params)
    else:
        request = Request(url)

    # for discogs
    request.add_header("Accept-Encoding", "gzip")
    request.add_header("User-Agent", USER_AGENT)

    url_sock = urlopen(request)
    enc = get_encoding_from_socket(url_sock)

    # unzip the response if needed
    data = url_sock.read()
    if url_sock.headers.get("content-encoding", "") == "gzip":
        data = gzip.GzipFile(fileobj=cBytesIO(data)).read()
    url_sock.close()

    return data, enc
Exemple #7
0
    def __process(self, results):
        req_data = []
        req_data.append(
            urlencode({
                "format": "json",
                "client": APP_KEY,
                "batch": "1",
            }))

        for i, result in enumerate(results):
            postfix = ".%d" % i
            req_data.append(
                urlencode({
                    "duration" + postfix: str(int(round(result.length))),
                    "fingerprint" + postfix: result.chromaprint,
                }))

        req_data.append("meta=releases+recordings+tracks+sources")

        urldata = "&".join(req_data)
        obj = BytesIO()
        gzip.GzipFile(fileobj=obj, mode="wb").write(urldata.encode())
        urldata = obj.getvalue()

        headers = {
            "Content-Encoding": "gzip",
            "Content-type": "application/x-www-form-urlencoded"
        }
        req = Request(self.URL, urldata, headers)

        releases = {}
        error = ""
        try:
            response = urlopen(req, timeout=self.TIMEOUT)
        except EnvironmentError as e:
            error = "urllib error: " + str(e)
        else:
            try:
                data = response.read()
                data = json.loads(data.decode())
            except ValueError as e:
                error = str(e)
            else:
                if data["status"] == "ok":
                    for result_data in data.get("fingerprints", []):
                        if "index" not in result_data:
                            continue
                        index = result_data["index"]
                        releases[index] = parse_acoustid_response(result_data)

        for i, result in enumerate(results):
            yield LookupResult(result, releases.get(str(i), []), error)
Exemple #8
0
def get_size_of_url(url):
    request = Request(url)
    request.add_header('Accept-Encoding', 'gzip')
    request.add_header('User-Agent', USER_AGENT)
    url_sock = urlopen(request)
    size = url_sock.headers.get('content-length')
    url_sock.close()
    return format_size(int(size)) if size else ''
Exemple #9
0
    def __send(self, urldata):
        if self.__stopped:
            return

        gatekeeper.wait()

        self.__done += len(urldata)

        basedata = urlencode({
            "format": "xml",
            "client": APP_KEY,
            "user": get_api_key(),
        })

        urldata = "&".join([basedata] + list(map(urlencode, urldata)))
        obj = BytesIO()
        gzip.GzipFile(fileobj=obj, mode="wb").write(urldata.encode())
        urldata = obj.getvalue()

        headers = {
            "Content-Encoding": "gzip",
            "Content-type": "application/x-www-form-urlencoded"
        }
        req = Request(self.URL, urldata, headers)

        error = None
        try:
            response = urlopen(req, timeout=self.TIMEOUT)
        except EnvironmentError as e:
            error = "urllib error: " + str(e)
        else:
            xml = response.read()
            try:
                dom = parseString(xml)
            except:
                error = "xml error"
            else:
                status = dom.getElementsByTagName("status")
                if not status or not status[0].childNodes or not \
                    status[0].childNodes[0].nodeValue == "ok":
                    error = "response status error"

        if error:
            print_w("[fingerprint] Submission failed: " + error)

        # emit progress
        self.__idle(self.__progress_cb,
                    float(self.__done) / len(self.__results))
Exemple #10
0
def get_encoding(url):
    request = Request(url)
    request.add_header('Accept-Encoding', 'gzip')
    request.add_header('User-Agent', USER_AGENT)
    url_sock = urlopen(request)
    return get_encoding_from_socket(url_sock)
Exemple #11
0
    def __set_async(self, url):
        """Manages various things:
        Fast switching of covers (aborting old HTTP requests),
        The image cache, etc."""

        self.current_job += 1
        job = self.current_job

        self.stop_loading = True
        while self.loading:
            time.sleep(0.05)
        self.stop_loading = False

        if job != self.current_job:
            return

        self.loading = True

        GLib.idle_add(self.button.set_sensitive, False)
        self.current_pixbuf = None

        pbloader = GdkPixbuf.PixbufLoader()
        pbloader.connect('closed', self.__close)

        # Look for cached images
        raw_data = None
        for entry in self.data_cache:
            if entry[0] == url:
                raw_data = entry[1]
                break

        if not raw_data:
            pbloader.connect('area-updated', self.__update)

            data_store = BytesIO()

            try:
                request = Request(url)
                request.add_header('User-Agent', USER_AGENT)
                url_sock = urlopen(request)
            except EnvironmentError:
                print_w(_("[albumart] HTTP Error: %s") % url)
            else:
                while not self.stop_loading:
                    tmp = url_sock.read(1024 * 10)
                    if not tmp:
                        break
                    pbloader.write(tmp)
                    data_store.write(tmp)

                url_sock.close()

                if not self.stop_loading:
                    raw_data = data_store.getvalue()

                    self.data_cache.insert(0, (url, raw_data))

                    while 1:
                        cache_sizes = [
                            len(data[1]) for data in self.data_cache
                        ]
                        if sum(cache_sizes) > self.max_cache_size:
                            del self.data_cache[-1]
                        else:
                            break

            data_store.close()
        else:
            # Sleep for fast switching of cached images
            time.sleep(0.05)
            if not self.stop_loading:
                pbloader.write(raw_data)

        try:
            pbloader.close()
        except GLib.GError:
            pass

        self.current_data = raw_data

        if not self.stop_loading:
            GLib.idle_add(self.button.set_sensitive, True)

        self.loading = False
Exemple #12
0
def get_encoding(url):
    request = Request(url)
    request.add_header('Accept-Encoding', 'gzip')
    request.add_header('User-Agent', USER_AGENT)
    url_sock = urlopen(request)
    return get_encoding_from_socket(url_sock)
Exemple #13
0
    def __set_async(self, url):
        """Manages various things:
        Fast switching of covers (aborting old HTTP requests),
        The image cache, etc."""

        self.current_job += 1
        job = self.current_job

        self.stop_loading = True
        while self.loading:
            time.sleep(0.05)
        self.stop_loading = False

        if job != self.current_job:
            return

        self.loading = True

        GLib.idle_add(self.button.set_sensitive, False)
        self.current_pixbuf = None

        pbloader = GdkPixbuf.PixbufLoader()
        pbloader.connect('closed', self.__close)

        # Look for cached images
        raw_data = None
        for entry in self.data_cache:
            if entry[0] == url:
                raw_data = entry[1]
                break

        if not raw_data:
            pbloader.connect('area-updated', self.__update)

            data_store = cBytesIO()

            try:
                request = Request(url)
                request.add_header('User-Agent', USER_AGENT)
                url_sock = urlopen(request)
            except EnvironmentError:
                print_w(_("[albumart] HTTP Error: %s") % url)
            else:
                while not self.stop_loading:
                    tmp = url_sock.read(1024 * 10)
                    if not tmp:
                            break
                    pbloader.write(tmp)
                    data_store.write(tmp)

                url_sock.close()

                if not self.stop_loading:
                    raw_data = data_store.getvalue()

                    self.data_cache.insert(0, (url, raw_data))

                    while 1:
                        cache_sizes = [len(data[1]) for data in
                                       self.data_cache]
                        if sum(cache_sizes) > self.max_cache_size:
                            del self.data_cache[-1]
                        else:
                            break

            data_store.close()
        else:
            # Sleep for fast switching of cached images
            time.sleep(0.05)
            if not self.stop_loading:
                pbloader.write(raw_data)

        try:
            pbloader.close()
        except GLib.GError:
            pass

        self.current_data = raw_data

        if not self.stop_loading:
            GLib.idle_add(self.button.set_sensitive, True)

        self.loading = False
Exemple #14
0
def get_encoding(url):
    request = Request(url)
    request.add_header("Accept-Encoding", "gzip")
    request.add_header("User-Agent", USER_AGENT)
    url_sock = urlopen(request)
    return get_encoding_from_socket(url_sock)