def test_net_gget_uses_args(get): arg1 = 'arg1' arg2 = 'arg2' kwarg1 = 'kwarg1' kwarg2 = 'kwarg2' kwarg3 = 'kwarg3' Net.GGet(arg1, arg2, kwarg1=kwarg1, kwarg2=kwarg2, kwarg3=kwarg3) args, kwargs = get.call_args assert 'arg1' in args assert 'arg2' in args assert 'kwarg1' in kwargs assert 'kwarg2' in kwargs assert 'kwarg3' in kwargs
def test_net_gget_uses_args(get): arg1 = "arg1" arg2 = "arg2" kwarg1 = "kwarg1" kwarg2 = "kwarg2" kwarg3 = "kwarg3" Net.GGet(arg1, arg2, kwarg1=kwarg1, kwarg2=kwarg2, kwarg3=kwarg3) args, kwargs = get.call_args assert "arg1" in args assert "arg2" in args assert "kwarg1" in kwargs assert "kwarg2" in kwargs assert "kwarg3" in kwargs
def reload(self, display=None, feeds=None) -> None: """Reload feeds in the database. To preserve user metadata for episodes (such as played/marked status), we use Episode.replace_from() which "manually" copies such fields to the new downloaded episode. This is necessary because downloaded episodes are new Episode instances and we can't guarantee they have any of the same properties. Therefore, Episode.replace_from() _must_ be updated if any new user metadata fields are added. Also: to determine which episodes are the same in order to copy user metadata, we simply check whether the string representation of the two episodes are matching (usually the episodes' titles). This could cause issues if a feed has multiple episodes with the same title, although it does not require episode titles to be globally unique (that is, episodes with the same name in different feeds will never have issues). This method adheres to the max_episodes config parameter to limit the number of episodes saved per feed. Args: display: (optional) the display to write status updates to feeds: (optional) a list of feeds to reload. If not specified, all feeds in the database will be reloaded """ if feeds is None: feeds = self.feeds() total_feeds = len(feeds) completed_feeds = 0 reqs = [] url_pairs = {} file_feeds = [] # Create async requests for each URL feed. We also keep a map from # each feed's URL to the Feed object itself in order to access the # object when a request completes (since the response object is all # that we are given). # We also keep track of file-based feeds, which are handled afterwards. for feed in feeds: if feed.key.startswith("http"): url_pairs[feed.key] = feed reqs.append(Net.GGet(feed.key)) else: file_feeds.append(feed) # handle each response as downloads complete asynchronously for response in grequests.imap(reqs, size=3): if display is not None: display.change_status("Reloading feeds (%d/%d)" % (completed_feeds, total_feeds)) old_feed = url_pairs[response.request.url] new_feed = Feed(url=response.request.url, response=response) self._reload_feed_data(old_feed, new_feed) completed_feeds += 1 # handle each file-based feed for old_feed in file_feeds: new_feed = Feed(file=old_feed.key) self._reload_feed_data(old_feed, new_feed) completed_feeds += 1 if display is not None: display.change_status("Reloading feeds (%d/%d)" % (completed_feeds, total_feeds)) if display is not None: display.change_status("Successfully reloaded %d feeds" % total_feeds) display.menus_valid = False
def test_net_gget_empty(get): Net.GGet() assert get.called