def _save_logo(self): with responses.RequestsMock() as rsps, open(IMG_PATH1, 'rb') as body: rsps.add( responses.GET, self.URL, status=200, body=body, content_type='image/png' ) CoverArt.save_podcast_logo(self.URL)
def test_save_empty_logo(self): """ Make sure that save_podcast_logo(None) does not fail """ try: CoverArt.save_podcast_logo(None) except: self.fail('CoverArt.save_podcast_logo(None) should not raise ' 'an exception')
def _save_logo(self): with responses.RequestsMock() as rsps, open(IMG_PATH1, 'rb') as body: rsps.add( responses.GET, self.URL, status=200, body=body, content_type='image/png' ) CoverArt.save_podcast_logo(self.URL)
def test_save_empty_logo(self): """ Make sure that save_podcast_logo(None) does not fail """ try: CoverArt.save_podcast_logo(None) except: self.fail( 'CoverArt.save_podcast_logo(None) should not raise ' 'an exception' )
def test_exception_during_fetch(self): with responses.RequestsMock() as rsps: rsps.add(responses.GET, self.URL, body=requests.exceptions.RequestException( 'Fetching URL failed')) CoverArt.save_podcast_logo(self.URL)
def test_exception_during_fetch(self): with responses.RequestsMock() as rsps: rsps.add( responses.GET, self.URL, body=requests.exceptions.RequestException('Fetching URL failed'), ) CoverArt.save_podcast_logo(self.URL)
def test_new_logo(self): with responses.RequestsMock() as rsps, open( IMG_PATH1, 'rb') as body1, open(IMG_PATH1, 'rb') as body2, open(IMG_PATH2, 'rb') as body3: rsps.add( responses.GET, self.URL, status=200, body=body1, content_type='image/png', ) rsps.add( responses.GET, self.URL, status=200, body=body2, content_type='image/png', ) rsps.add( responses.GET, self.URL, status=200, body=body3, content_type='image/png', ) logo_url = get_logo_url(self.podcast, 32) # first request CoverArt.save_podcast_logo(self.URL) response1 = self._fetch_cover(self.podcast) # stayed the same CoverArt.save_podcast_logo(self.URL) response2 = self._fetch_cover(self.podcast) self.assertEqual(list(response1.streaming_content), list(response2.streaming_content)) # changed CoverArt.save_podcast_logo(self.URL) response3 = self._fetch_cover(self.podcast) self.assertNotEqual(list(response2.streaming_content), list(response3.streaming_content))
def test_new_logo(self): with responses.RequestsMock() as rsps, open(IMG_PATH1, 'rb') as body1, open( IMG_PATH1, 'rb' ) as body2, open(IMG_PATH2, 'rb') as body3: rsps.add( responses.GET, self.URL, status=200, body=body1, content_type='image/png', ) rsps.add( responses.GET, self.URL, status=200, body=body2, content_type='image/png', ) rsps.add( responses.GET, self.URL, status=200, body=body3, content_type='image/png', ) logo_url = get_logo_url(self.podcast, 32) # first request CoverArt.save_podcast_logo(self.URL) response1 = self._fetch_cover(self.podcast) # stayed the same CoverArt.save_podcast_logo(self.URL) response2 = self._fetch_cover(self.podcast) self.assertEqual( list(response1.streaming_content), list(response2.streaming_content) ) # changed CoverArt.save_podcast_logo(self.URL) response3 = self._fetch_cover(self.podcast) self.assertNotEqual( list(response2.streaming_content), list(response3.streaming_content) )
def _update_podcast(self, podcast, parsed, episode_updater, update_result): """ updates a podcast according to new parser results """ # we need that later to decide if we can "bump" a category prev_latest_episode_timestamp = podcast.latest_episode_timestamp # will later be used to see whether the index is outdated old_index_fields = get_index_fields(podcast) podcast.title = parsed.get('title') or podcast.title podcast.description = parsed.get('description') or podcast.description podcast.subtitle = parsed.get('subtitle') or podcast.subtitle podcast.link = parsed.get('link') or podcast.link podcast.logo_url = parsed.get('logo') or podcast.logo_url podcast.author = to_maxlength(Podcast, 'author', parsed.get('author') or podcast.author) podcast.language = to_maxlength( Podcast, 'language', parsed.get('language') or podcast.language) podcast.content_types = (','.join(parsed.get('content_types')) or podcast.content_types) # podcast.tags['feed'] = parsed.tags or podcast.tags.get('feed', []) podcast.common_episode_title = to_maxlength( Podcast, 'common_episode_title', parsed.get('common_title') or podcast.common_episode_title, ) podcast.new_location = parsed.get( 'new_location') or podcast.new_location podcast.flattr_url = to_maxlength( Podcast, 'flattr_url', parsed.get('flattr') or podcast.flattr_url) podcast.hub = parsed.get('hub') or podcast.hub podcast.license = parsed.get('license') or podcast.license podcast.max_episode_order = episode_updater.max_episode_order podcast.add_missing_urls(parsed.get('urls', [])) if podcast.new_location: try: new_podcast = Podcast.objects.get( urls__url=podcast.new_location) if new_podcast != podcast: self._mark_outdated(podcast, 'redirected to different podcast', episode_updater) return except Podcast.DoesNotExist: podcast.set_url(podcast.new_location) # latest episode timestamp episodes = Episode.objects.filter( podcast=podcast, released__isnull=False).order_by('released') # Determine update interval # Update interval is based on intervals between episodes podcast.update_interval = episode_updater.get_update_interval(episodes) # factor is increased / decreased depending on whether the latest # update has returned episodes if update_result.episodes_added == 0: # no episodes, incr factor newfactor = podcast.update_interval_factor * 1.2 podcast.update_interval_factor = min(1000, newfactor) # never above 1000 elif update_result.episodes_added > 1: # new episodes, decr factor newfactor = podcast.update_interval_factor / 1.2 podcast.update_interval_factor = max(1, newfactor) # never below 1 latest_episode = episodes.last() if latest_episode: podcast.latest_episode_timestamp = latest_episode.released # podcast.episode_count is not update here on purpose. It is, instead, # continuously updated when creating new episodes in # EpisodeManager.get_or_create_for_url self._update_categories(podcast, prev_latest_episode_timestamp) # try to download the logo and reset logo_url to None on http errors found = CoverArt.save_podcast_logo(podcast.logo_url) if not found: podcast.logo_url = None # check if search index should be considered out of date new_index_fields = get_index_fields(podcast) if list(old_index_fields.items()) != list(new_index_fields.items()): podcast.search_index_uptodate = False # The podcast is always saved (not just when there are changes) because # we need to record the last update logger.info('Saving podcast.') podcast.last_update = datetime.utcnow() podcast.save() try: subscribe_at_hub(podcast) except SubscriptionError as se: logger.warn('subscribing to hub failed: %s', str(se)) self.assign_slug(podcast) episode_updater.assign_missing_episode_slugs() update_related_podcasts.delay(podcast.pk)