Exemplo n.º 1
0
    def download_to_file(url, file, name, download_queue, display=None):
        """Downloads a URL to a local file.

        Args:
            url: the source url
            file: the destination path
            name: the user-friendly name of the content
            download_queue: the download_queue overseeing this download
            display: (optional) the display to write status updates to
        """
        chunk_size = 1024
        chuck_size_label = "KB"

        response = Net.Get(url, stream=True)
        handle = open(file, "wb")
        downloaded = 0
        for chunk in response.iter_content(chunk_size=chunk_size):
            if display is not None:
                status_str = "Downloading \"%s\": %d%s" % (
                    name, downloaded / chunk_size, chuck_size_label)
                if download_queue.length > 1:
                    status_str += " (+%d downloads in queue)" % \
                                  download_queue.length - 1

                display.change_status(status_str)
            if chunk:
                handle.write(chunk)
            downloaded += len(chunk)

        if display is not None:
            display.change_status("Episode successfully downloaded.")
        download_queue.next()
Exemplo n.º 2
0
    def _download_feed(self):
        """Parses the feed at the provided url or file into _tree.

        This method checks whether the url is valid and that there is a
        parse-able XML document at the url, but it does not check that the
        document is an RSS feed, nor whether the feed has all necessary tags.

        Raises:
            FeedParseError: unable to parse text as an XML document
            FeedDownloadError: (only when retrieving feed using url) did not
                receive an acceptable status code, or an exception occurred
                when attempting to download the page
            FeedLoadError: (only when retrieving feed using file) a feed could
                not be found at the file, or an exception occurred when
                attempting to load the file
        """
        if self._url is not None:
            # handle feed from url
            try:
                response = Net.Get(self._url)
                if response.status_code == 200:
                    try:
                        self._tree = ElementTree.fromstring(response.text)
                    except ElementTree.ParseError:
                        raise FeedParseError(
                            "Unable to parse text as an XML document")
                else:
                    raise FeedDownloadError(
                        "Did not receive an acceptable status code while"
                        " downloading the page. Expected 200, got: "
                        + str(response.status_code))
            except requests.exceptions.RequestException:
                raise FeedDownloadError(
                    "An exception occurred when attempting to download the"
                    " page")
        elif self._file is not None:
            # handle feed from file
            file = None
            try:
                file = open(self._file)
                text = file.read()
                try:
                    self._tree = ElementTree.fromstring(text)
                except ElementTree.ParseError:
                    raise FeedParseError(
                        "Unable to parse text as an XML document")
            except IOError:
                raise FeedLoadError(
                    "An exception occurred when attempting to load the file")
            finally:
                if file is not None:
                    file.close()
Exemplo n.º 3
0
def test_net_gget_uses_args(get):
    arg1 = 'arg1'
    arg2 = 'arg2'
    kwarg1 = 'kwarg1'
    kwarg2 = 'kwarg2'
    kwarg3 = 'kwarg3'

    Net.GGet(arg1, arg2, kwarg1=kwarg1, kwarg2=kwarg2, kwarg3=kwarg3)
    args, kwargs = get.call_args
    assert 'arg1' in args
    assert 'arg2' in args
    assert 'kwarg1' in kwargs
    assert 'kwarg2' in kwargs
    assert 'kwarg3' in kwargs
Exemplo n.º 4
0
def test_net_gget_uses_args(get):
    arg1 = "arg1"
    arg2 = "arg2"
    kwarg1 = "kwarg1"
    kwarg2 = "kwarg2"
    kwarg3 = "kwarg3"

    Net.GGet(arg1, arg2, kwarg1=kwarg1, kwarg2=kwarg2, kwarg3=kwarg3)
    args, kwargs = get.call_args
    assert "arg1" in args
    assert "arg2" in args
    assert "kwarg1" in kwargs
    assert "kwarg2" in kwargs
    assert "kwarg3" in kwargs
Exemplo n.º 5
0
    def download_to_file(url, file, name, download_queue, display=None):
        """Downloads a URL to a local file.

        :param url: the source url
        :param file the destination path
        :param name the user-friendly name of the content
        :param download_queue the download_queue overseeing this download
        :param display (optional) the display to write status updates to
        """
        chunk_size = 1024
        chuck_size_label = "KB"

        try:
            response = Net.Get(url, stream=True)
        except requests.exceptions.RequestException as e:
            if display is not None:
                display.change_status("RequestException: %s" % str(e))
            download_queue.next()
            return
        else:
            handle = open(file, "wb")
            downloaded = 0
            for chunk in response.iter_content(chunk_size=chunk_size):
                if display is not None:
                    status_str = 'Downloading "%s": %d%s' % (
                        name, downloaded / chunk_size, chuck_size_label)
                    if download_queue.length > 1:
                        status_str += " (+%d downloads in queue)" % (
                            download_queue.length - 1)

                    display.change_status(status_str)
                if chunk:
                    handle.write(chunk)
                downloaded += len(chunk)

            if display is not None:
                display.change_status("Episode successfully downloaded.")
                display.menus_valid = False
        download_queue.next()
Exemplo n.º 6
0
    def reload(self, display=None, feeds=None) -> None:
        """Reload feeds in the database.

        To preserve user metadata for episodes (such as played/marked status),
        we use Episode.replace_from() which "manually" copies such fields to
        the new downloaded episode. This is necessary because downloaded
        episodes are new Episode instances and we can't guarantee they have any
        of the same properties.

        Therefore, Episode.replace_from() _must_ be updated if any new user
        metadata fields are added.

        Also: to determine which episodes are the same in order to copy user
        metadata, we simply check whether the string representation of the two
        episodes are matching (usually the episodes' titles). This could cause
        issues if a feed has multiple episodes with the same title, although it
        does not require episode titles to be globally unique (that is,
        episodes with the same name in different feeds will never have issues).

        This method adheres to the max_episodes config parameter to limit the
        number of episodes saved per feed.

        Args:
            display: (optional) the display to write status updates to
            feeds: (optional) a list of feeds to reload. If not specified,
                all feeds in the database will be reloaded
        """
        if feeds is None:
            feeds = self.feeds()
        total_feeds = len(feeds)
        completed_feeds = 0

        reqs = []
        url_pairs = {}
        file_feeds = []
        # Create async requests for each URL feed. We also keep a map from
        # each feed's URL to the Feed object itself in order to access the
        # object when a request completes (since the response object is all
        # that we are given).
        # We also keep track of file-based feeds, which are handled afterwards.
        for feed in feeds:
            if feed.key.startswith("http"):
                url_pairs[feed.key] = feed
                reqs.append(Net.GGet(feed.key))
            else:
                file_feeds.append(feed)

        # handle each response as downloads complete asynchronously
        for response in grequests.imap(reqs, size=3):
            if display is not None:
                display.change_status("Reloading feeds (%d/%d)" %
                                      (completed_feeds, total_feeds))
            old_feed = url_pairs[response.request.url]
            new_feed = Feed(url=response.request.url, response=response)
            self._reload_feed_data(old_feed, new_feed)

            completed_feeds += 1

        # handle each file-based feed
        for old_feed in file_feeds:
            new_feed = Feed(file=old_feed.key)
            self._reload_feed_data(old_feed, new_feed)

            completed_feeds += 1
            if display is not None:
                display.change_status("Reloading feeds (%d/%d)" %
                                      (completed_feeds, total_feeds))

        if display is not None:
            display.change_status("Successfully reloaded %d feeds" %
                                  total_feeds)
            display.menus_valid = False
Exemplo n.º 7
0
def test_net_get_empty(get):
    Net.Get()
    assert get.called