Example #1
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles
    Returns None in error, else a possibly filled list of stations"""

    irfs = []

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        if not re.match('^([^/:]+)://', uri):
            # Assume HTTP if no protocol given. See #2731
            uri = 'http://' + uri
            print_d("Assuming http: %s" % uri)
        try:
            sock = urlopen(uri)
        except EnvironmentError as err:
            err = "%s\n\nURL: %s" % (text_type(err), uri)
            print_d("Got %s from %s" % (err, uri))
            ErrorMessage(None, _("Unable to add station"), escape(err)).run()
            return None

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #2
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles"""

    irfs = []
    if isinstance(uri, unicode):
        uri = uri.encode('utf-8')

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        try:
            sock = urlopen(uri)
        except EnvironmentError as e:
            encoding = util.get_locale_encoding()
            try:
                err = e.strerror.decode(encoding, 'replace')
            except (TypeError, AttributeError):
                err = e.strerror[1].decode(encoding, 'replace')
            qltk.ErrorMessage(None, _("Unable to add station"), err).run()
            return []

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            qltk.ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #3
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles
    Returns None in error, else a possibly filled list of stations"""

    irfs = []

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        try:
            sock = urlopen(uri)
        except EnvironmentError as err:
            err = text_type(err)
            print_d("Got %s from %s" % (uri, err))
            ErrorMessage(None, _("Unable to add station"), escape(err)).run()
            return None

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #4
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles
    Returns None in error, else a possibly filled list of stations"""

    irfs = []

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        if not re.match('^([^/:]+)://', uri):
            # Assume HTTP if no protocol given. See #2731
            uri = 'http://' + uri
            print_d("Assuming http: %s" % uri)
        try:
            sock = urlopen(uri)
        except EnvironmentError as err:
            err = "%s\n\nURL: %s" % (text_type(err), uri)
            print_d("Got %s from %s" % (err, uri))
            ErrorMessage(None, _("Unable to add station"), escape(err)).run()
            return None

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #5
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles
    Returns None in error, else a possibly filled list of stations"""

    irfs = []

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        try:
            sock = urlopen(uri)
        except EnvironmentError as err:
            err = text_type(err)
            print_d("Got %s from %s" % (uri, err))
            ErrorMessage(None, _("Unable to add station"), escape(err)).run()
            return None

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #6
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles"""

    irfs = []
    if isinstance(uri, unicode):
        uri = uri.encode('utf-8')

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        try:
            sock = urlopen(uri)
        except EnvironmentError as e:
            encoding = util.get_locale_encoding()
            try:
                err = e.strerror.decode(encoding, 'replace')
            except (TypeError, AttributeError):
                err = e.strerror[1].decode(encoding, 'replace')
            qltk.ErrorMessage(None, _("Unable to add station"), err).run()
            return []

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            qltk.ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #7
0
def get_punctuation_mapping(regenerate=False):
    """This takes the unicode confusables set and extracts punctuation
    which looks similar to one or more ASCII punctuation.

    e.g. ' --> '

    """

    if not regenerate:
        return _PUNCT_CONFUSABLES_CACHE

    h = urlopen("http://www.unicode.org/Public/security/9.0.0/confusables.txt")
    data = h.read()
    mapping = {}
    for line in data.decode("utf-8-sig").splitlines():
        line = line.strip()
        if not line:
            continue
        if line.startswith(u"#"):
            continue

        char, repls = line.split(";", 2)[:2]
        char = char.strip()
        repls = repls.split()
        to_uni = lambda x: unichr(int(x, 16))
        char = to_uni(char)
        repls = [to_uni(r) for r in repls]

        def is_ascii(char):
            try:
                char.encode("ascii")
            except UnicodeEncodeError:
                return False
            return True

        def is_punct(char):
            return unicodedata.category(char).startswith("P")

        if all(is_ascii(c) and is_punct(c) for c in repls) and char:
            repls = u"".join(repls)
            mapping[repls] = mapping.get(repls, u"") + char

    # if any of the equal chars is also ascii + punct we can replace
    # it as well
    for ascii_, uni in mapping.items():
        also_ascii = [c for c in uni if is_ascii(c) and is_punct(c)]
        for c in also_ascii:
            mapping[c] = uni.replace(c, u"")

    return mapping
Example #8
0
def get_punctuation_mapping(regenerate=False):
    """This takes the unicode confusables set and extracts punctuation
    which looks similar to one or more ASCII punctuation.

    e.g. ' --> '

    """

    if not regenerate:
        return _PUNCT_CONFUSABLES_CACHE

    h = urlopen("http://www.unicode.org/Public/security/9.0.0/confusables.txt")
    data = h.read()
    mapping = {}
    for line in data.decode("utf-8-sig").splitlines():
        line = line.strip()
        if not line:
            continue
        if line.startswith(u"#"):
            continue

        char, repls = line.split(";", 2)[:2]
        char = char.strip()
        repls = repls.split()
        to_uni = lambda x: unichr(int(x, 16))
        char = to_uni(char)
        repls = [to_uni(r) for r in repls]

        def is_ascii(char):
            try:
                char.encode("ascii")
            except UnicodeEncodeError:
                return False
            return True

        def is_punct(char):
            return unicodedata.category(char).startswith("P")

        if all(is_ascii(c) and is_punct(c) for c in repls) and char:
            repls = u"".join(repls)
            mapping[repls] = mapping.get(repls, u"") + char

    # if any of the equal chars is also ascii + punct we can replace
    # it as well
    for ascii_, uni in mapping.items():
        also_ascii = [c for c in uni if is_ascii(c) and is_punct(c)]
        for c in also_ascii:
            mapping[c] = uni.replace(c, u"")

    return mapping
Example #9
0
def download_taglist(callback, cofuncid, step=1024 * 10):
    """Generator for loading the bz2 compressed tag list.

    Calls callback with the decompressed data or None in case of
    an error."""

    with Task(_("Internet Radio"), _("Downloading station list")) as task:
        if cofuncid:
            task.copool(cofuncid)

        try:
            response = urlopen(STATION_LIST_URL)
        except EnvironmentError:
            GLib.idle_add(callback, None)
            return

        try:
            size = int(response.info().get("content-length", 0))
        except ValueError:
            size = 0

        decomp = bz2.BZ2Decompressor()

        data = ""
        temp = ""
        read = 0
        while temp or not data:
            read += len(temp)

            if size:
                task.update(float(read) / size)
            else:
                task.pulse()
            yield True

            try:
                data += decomp.decompress(temp)
                temp = response.read(step)
            except (IOError, EOFError):
                data = None
                break
        response.close()

        yield True

        stations = None
        if data:
            stations = parse_taglist(data)

        GLib.idle_add(callback, stations)
Example #10
0
def download_taglist(callback, cofuncid, step=1024 * 10):
    """Generator for loading the bz2 compressed tag list.

    Calls callback with the decompressed data or None in case of
    an error."""

    with Task(_("Internet Radio"), _("Downloading station list")) as task:
        if cofuncid:
            task.copool(cofuncid)

        try:
            response = urlopen(STATION_LIST_URL)
        except EnvironmentError:
            GLib.idle_add(callback, None)
            return

        try:
            size = int(response.info().get("content-length", 0))
        except ValueError:
            size = 0

        decomp = bz2.BZ2Decompressor()

        data = b""
        temp = b""
        read = 0
        while temp or not data:
            read += len(temp)

            if size:
                task.update(float(read) / size)
            else:
                task.pulse()
            yield True

            try:
                data += decomp.decompress(temp)
                temp = response.read(step)
            except (IOError, EOFError):
                data = None
                break
        response.close()

        yield True

        stations = None
        if data:
            stations = parse_taglist(data)

        GLib.idle_add(callback, stations)
Example #11
0
 def _fetch_image(self, url):
     try:
         data = urlopen(url).read()
     except Exception as e:
         print_w("Couldn't read web image from %s (%s)" % (url, e))
         return None
     try:
         loader = GdkPixbuf.PixbufLoader()
     except GLib.GError as e:
         print_w("Couldn't create GdkPixbuf (%s)" % e)
     else:
         loader.write(data)
         loader.close()
         print_d("Got web image from %s" % url)
         return loader.get_pixbuf()
Example #12
0
 def _fetch_image(self, url):
     try:
         data = urlopen(url).read()
     except Exception as e:
         print_w("Couldn't read web image from %s (%s)" % (url, e))
         return None
     try:
         loader = GdkPixbuf.PixbufLoader()
     except GLib.GError as e:
         print_w("Couldn't create GdkPixbuf (%s)" % e)
     else:
         loader.write(data)
         loader.close()
         print_d("Got web image from %s" % url)
         return loader.get_pixbuf()
Example #13
0
def fetch_versions(build_type, timeout=5.0):
    """Fetches the list of available releases and returns a list
    of version tuples. Sorted and oldest version first. The list
    might be empty. Also returns an URL to the download page.

    Args:
        build_type (text): the build type. e.g. "default" or "windows"
        timeout (float): timeout in seconds

    Thread safe.

    Raises UpdateError
    """

    # TODO: we currently don't depend on feedparser.. maybe we should?
    try:
        import feedparser
    except ImportError as error:
        raise UpdateError(error)

    try:
        content = urlopen(
            u"https://quodlibet.github.io/appcast/%s.rss" % build_type,
            timeout=timeout).read()
    except Exception as error:
        raise UpdateError(error)

    d = feedparser.parse(content)
    if d.bozo:
        raise UpdateError(d.bozo_exception)

    try:
        link = d.feed.link
        enclosures = [e for entry in d.entries for e in entry.enclosures]
    except AttributeError as error:
        raise UpdateError(error)

    try:
        versions = [parse_version(en.version) for en in enclosures]
    except ValueError as error:
        raise UpdateError(error)

    return sorted(versions), link
Example #14
0
def fetch_versions(build_type, timeout=5.0):
    """Fetches the list of available releases and returns a list
    of version tuples. Sorted and oldest version first. The list
    might be empty. Also returns an URL to the download page.

    Args:
        build_type (text): the build type. e.g. "default" or "windows"
        timeout (float): timeout in seconds

    Thread safe.

    Raises UpdateError
    """

    # TODO: we currently don't depend on feedparser.. maybe we should?
    try:
        import feedparser
    except ImportError as error:
        raise UpdateError(error)

    try:
        content = urlopen(
            u"https://quodlibet.github.io/appcast/%s.rss" % build_type,
            timeout=timeout).read()
    except Exception as error:
        raise UpdateError(error)

    d = feedparser.parse(content)
    if d.bozo:
        raise UpdateError(d.bozo_exception)

    try:
        link = d.feed.link
        enclosures = [e for entry in d.entries for e in entry.enclosures]
    except AttributeError as error:
        raise UpdateError(error)

    try:
        versions = [parse_version(en.version) for en in enclosures]
    except ValueError as error:
        raise UpdateError(error)

    return sorted(versions), link
Example #15
0
def get_decomps_mapping(regenerate=False):
    """This takes the decomps.txt file of the Unicode UCA and gives us a cases
    where a letter can be decomposed for collation and that mapping isn't in
    NFKD.
    """

    if not regenerate:
        return _UCA_DECOMPS_CACHE

    mapping = {}

    h = urlopen("http://unicode.org/Public/UCA/8.0.0/decomps.txt")
    for line in h.read().splitlines():
        if line.startswith("#"):
            continue

        to_uni = lambda x: unichr(int(x, 16))
        is_letter = lambda x: unicodedata.category(x) in ("Lu", "Ll", "Lt")

        cp, line = line.split(";", 1)
        tag, line = line.split(";", 1)
        decomp, line = line.split("#", 1)
        decomp = map(to_uni, decomp.strip().split())
        cp = to_uni(cp)

        if not is_letter(cp):
            continue

        decomp = filter(is_letter, decomp)
        simple = "".join(decomp)
        if not simple:
            continue

        # skip anything we get from normalization
        if unicodedata.normalize("NFKD", cp)[0] == simple:
            continue

        mapping[simple] = mapping.get(simple, "") + cp

    return mapping
Example #16
0
def get_decomps_mapping(regenerate=False):
    """This takes the decomps.txt file of the Unicode UCA and gives us a cases
    where a letter can be decomposed for collation and that mapping isn't in
    NFKD.
    """

    if not regenerate:
        return _UCA_DECOMPS_CACHE

    mapping = {}

    h = urlopen("http://unicode.org/Public/UCA/8.0.0/decomps.txt")
    for line in h.read().splitlines():
        if line.startswith("#"):
            continue

        to_uni = lambda x: unichr(int(x, 16))
        is_letter = lambda x: unicodedata.category(x) in ("Lu", "Ll", "Lt")

        cp, line = line.split(";", 1)
        tag, line = line.split(";", 1)
        decomp, line = line.split("#", 1)
        decomp = map(to_uni, decomp.strip().split())
        cp = to_uni(cp)

        if not is_letter(cp):
            continue

        decomp = filter(is_letter, decomp)
        simple = "".join(decomp)
        if not simple:
            continue

        # skip anything we get from normalization
        if unicodedata.normalize("NFKD", cp)[0] == simple:
            continue

        mapping[simple] = mapping.get(simple, "") + cp

    return mapping
Example #17
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles
    Returns None in error, else a possibly filled list of stations"""

    irfs = []
    if isinstance(uri, unicode):
        uri = uri.encode('utf-8')

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        try:
            sock = urlopen(uri)
        except EnvironmentError as e:
            print_d("Got %s from %s" % (uri, e))
            encoding = util.get_locale_encoding()
            try:
                err = e.strerror.decode(encoding, 'replace')
            except TypeError:
                err = e.strerror[1].decode(encoding, 'replace')
            except AttributeError:
                # Give up and display the exception - may be useful HTTP info
                err = str(e)
            ErrorMessage(None, _("Unable to add station"), escape(err)).run()
            return None

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #18
0
def add_station(uri):
    """Fetches the URI content and extracts IRFiles
    Returns None in error, else a possibly filled list of stations"""

    irfs = []
    if isinstance(uri, unicode):
        uri = uri.encode('utf-8')

    if uri.lower().endswith(".pls") or uri.lower().endswith(".m3u"):
        try:
            sock = urlopen(uri)
        except EnvironmentError as e:
            print_d("Got %s from %s" % (uri, e))
            encoding = util.get_locale_encoding()
            try:
                err = e.strerror.decode(encoding, 'replace')
            except TypeError:
                err = e.strerror[1].decode(encoding, 'replace')
            except AttributeError:
                # Give up and display the exception - may be useful HTTP info
                err = str(e)
            ErrorMessage(None, _("Unable to add station"), escape(err)).run()
            return None

        if uri.lower().endswith(".pls"):
            irfs = ParsePLS(sock)
        elif uri.lower().endswith(".m3u"):
            irfs = ParseM3U(sock)

        sock.close()
    else:
        try:
            irfs = [IRFile(uri)]
        except ValueError as err:
            ErrorMessage(None, _("Unable to add station"), err).run()

    return irfs
Example #19
0
 def test_urllib(self):
     if is_windows():
         # FXIME
         return
     urlopen(self.URI).close()
Example #20
0
 def test_urllib_default(self):
     for url in self.GOOD:
         urlopen(url).close()
     for url in self.BAD:
         with self.assertRaises(Exception):
             urlopen(url).close()
Example #21
0
 def test_urllib(self):
     for url in self.GOOD:
         urlopen(url, cafile=get_ca_file()).close()
     for url in self.BAD:
         with self.assertRaises(Exception):
             urlopen(url, cafile=get_ca_file()).close()
Example #22
0
 def test_urllib(self):
     for url in self.GOOD:
         urlopen(url, cafile=get_ca_file()).close()
     for url in self.BAD:
         with self.assertRaises(Exception):
             urlopen(url, cafile=get_ca_file()).close()
Example #23
0
 def test_urllib_default(self):
     for url in self.GOOD:
         urlopen(url).close()
     for url in self.BAD:
         with self.assertRaises(Exception):
             urlopen(url).close()