Ejemplo n.º 1
0
            if changed:
                print 'updating podcast'
                podcast.last_update = datetime.utcnow()
                podcast.save()
            else:
                print 'podcast not updated'


        except Exception, e:
            print podcast.url
            print >>sys.stderr, 'Exception:', e


        assign_slug(podcast, PodcastSlug)
        assign_missing_episode_slugs(podcast)


def get_podcast_logo(podcast, feed):
    cover_art = podcast.logo_url
    image = feed.feed.get('image', None)
    if image is not None:
        for key in ('href', 'url'):
            cover_art = getattr(image, key, None)
            if cover_art:
                break

    if podcast.link:
        yturl = youtube.get_real_cover(podcast.link)
        if yturl:
            cover_art = yturl
Ejemplo n.º 2
0
def _update_podcast(podcast, parsed, episodes, max_episode_order):
    """ updates a podcast according to new parser results """

    # we need that later to decide if we can "bump" a category
    prev_latest_episode_timestamp = podcast.latest_episode_timestamp

    podcast.title = parsed.get('title') or podcast.title
    podcast.description = parsed.get('description') or podcast.description
    podcast.subtitle = parsed.get('subtitle') or podcast.subtitle
    podcast.link = parsed.get('link') or podcast.link
    podcast.logo_url = parsed.get('logo') or podcast.logo_url
    podcast.author = to_maxlength(Podcast, 'author', parsed.get('author') or
                                  podcast.author)
    podcast.language = to_maxlength(Podcast, 'language',
                                    parsed.get('language') or podcast.language)
    podcast.content_types = ','.join(parsed.get('content_types')) or \
                                     podcast.content_types
    #podcast.tags['feed'] = parsed.tags or podcast.tags.get('feed', [])
    podcast.common_episode_title = to_maxlength(
        Podcast,
        'common_episode_title',
        parsed.get('common_title') or podcast.common_episode_title)
    podcast.new_location = parsed.get('new_location') or podcast.new_location
    podcast.flattr_url = to_maxlength(Podcast, 'flattr_url',
                                      parsed.get('flattr') or
                                      podcast.flattr_url)
    podcast.hub = parsed.get('hub') or podcast.hub
    podcast.license = parsed.get('license') or podcast.license
    podcast.max_episode_order = max_episode_order

    podcast.add_missing_urls(parsed.get('urls', []))

    if podcast.new_location:
        try:
            new_podcast = Podcast.objects.get(urls__url=podcast.new_location)
            if new_podcast != podcast:
                _mark_outdated(podcast, 'redirected to different podcast')
                return
        except Podcast.DoesNotExist:
            podcast.set_url(podcast.new_location)

    # latest episode timestamp
    episodes = Episode.objects.filter(podcast=podcast,
                                      released__isnull=False)\
                              .order_by('released')

    podcast.update_interval = get_update_interval(episodes)

    latest_episode = episodes.last()
    if latest_episode:
        podcast.latest_episode_timestamp = latest_episode.released

    # podcast.episode_count is not update here on purpose. It is, instead,
    # continuously updated when creating new episodes in
    # EpisodeManager.get_or_create_for_url

    _update_categories(podcast, prev_latest_episode_timestamp)

    # try to download the logo and reset logo_url to None on http errors
    found = _save_podcast_logo(podcast.logo_url)
    if not found:
        podcast.logo_url = None

    # The podcast is always saved (not just when there are changes) because
    # we need to record the last update
    logger.info('Saving podcast.')
    podcast.last_update = datetime.utcnow()
    podcast.save()

    try:
        subscribe_at_hub(podcast)
    except SubscriptionError as se:
        logger.warn('subscribing to hub failed: %s', str(se))

    if not podcast.slug:
        slug = PodcastSlug(podcast).get_slug()
        if slug:
            podcast.add_slug(slug)

    assign_missing_episode_slugs(podcast)
    update_related_podcasts.delay(podcast)
Ejemplo n.º 3
0
    def _update_podcast(self, podcast, parsed, episodes):
        """ updates a podcast according to new parser results """

        # we need that later to decide if we can "bump" a category
        prev_latest_episode_timestamp = podcast.latest_episode_timestamp

        old_json = copy.deepcopy(podcast.to_json())

        podcast.title = parsed.title or podcast.title
        podcast.urls = list(set(podcast.urls + parsed.urls))
        podcast.description = parsed.description or podcast.description
        podcast.subtitle = parsed.subtitle or podcast.subtitle
        podcast.link = parsed.link or podcast.link
        podcast.logo_url = parsed.logo or podcast.logo_url
        podcast.author = parsed.author or podcast.author
        podcast.language = parsed.language or podcast.language
        podcast.content_types = parsed.content_types or podcast.content_types
        podcast.tags['feed'] = parsed.tags or podcast.tags.get('feed', [])
        podcast.common_episode_title = parsed.common_title or podcast.common_episode_title
        podcast.new_location = parsed.new_location or podcast.new_location
        podcast.flattr_url = parsed.flattr or podcast.flattr_url
        podcast.hub = parsed.hub or podcast.hub
        podcast.license = parsed.license or podcast.license


        if podcast.new_location:
            new_podcast = podcast_for_url(podcast.new_location)
            if new_podcast != podcast:
                self._mark_outdated(podcast, 'redirected to different podcast')
                return

            elif not new_podcast:
                podcast.urls.insert(0, podcast.new_location)


        logger.info('Retrieved %d episodes in total', len(episodes))

        # latest episode timestamp
        eps = filter(lambda e: bool(e.released), episodes)
        eps = sorted(eps, key=lambda e: e.released)

        podcast.update_interval = get_update_interval(eps)

        if eps:
            podcast.latest_episode_timestamp = eps[-1].released

        podcast.episode_count = episode_count_for_podcast(podcast)


        self._update_categories(podcast, prev_latest_episode_timestamp)

        # try to download the logo and reset logo_url to None on http errors
        found = self._save_podcast_logo(podcast.logo_url)
        if not found:
            podcast.logo_url = None

        # The podcast is always saved (not just when there are changes) because
        # we need to record the last update
        logger.info('Saving podcast.')
        podcast.last_update = datetime.utcnow()
        podcast.save()


        try:
            subscribe_at_hub(podcast)
        except SubscriptionError as se:
            logger.warn('subscribing to hub failed: %s', str(se))

        assign_slug(podcast, PodcastSlug)
        assign_missing_episode_slugs(podcast)