def do_save_feed_creation_result( ctx: ActorContext, feed_creation_id: T.int, messages: T.list(T.str), feed: FeedSchema.optional, ): with transaction.atomic(): feed_dict = feed try: feed_creation = FeedCreation.get_by_pk(feed_creation_id) except FeedCreation.DoesNotExist: LOG.warning(f'feed creation {feed_creation_id} not exists') return if feed_creation.status == FeedStatus.READY: LOG.info(f'feed creation {feed_creation_id} is ready') return feed_creation.message = '\n\n'.join(messages) feed_creation.dt_updated = timezone.now() if not feed_dict: feed_creation.status = FeedStatus.ERROR feed_creation.save() FeedUrlMap(source=feed_creation.url, target=FeedUrlMap.NOT_FOUND).save() return url = feed_dict['url'] feed = Feed.get_first_by_url(url) if not feed: now = timezone.now() feed = Feed(url=url, status=FeedStatus.READY, reverse_url=reverse_url(url), dt_updated=now, dt_checked=now, dt_synced=now) feed.save() feed_creation.status = FeedStatus.READY feed_creation.feed_id = feed.id feed_creation.save() user_feed = UserFeed.objects.filter(user_id=feed_creation.user_id, feed_id=feed.id).first() if user_feed: LOG.info('UserFeed#{} user_id={} feed_id={} already exists'.format( user_feed.id, feed_creation.user_id, feed.id)) else: user_feed = UserFeed( user_id=feed_creation.user_id, feed_id=feed.id, is_from_bookmark=feed_creation.is_from_bookmark, ) user_feed.save() FeedUrlMap(source=feed_creation.url, target=feed.url).save() if feed.url != feed_creation.url: FeedUrlMap(source=feed.url, target=feed.url).save() ctx.hope('harbor_rss.update_feed', dict( feed_id=feed.id, feed=validate_feed_output(feed_dict), ))
def _feed_merge_duplicate(found: list): for feed_ids in found: primary_id, *duplicates = feed_ids with transaction.atomic(): primary = Feed.get_by_pk(primary_id) primary_info = f'#{primary.id} url={primary.url!r}' for feed_id in duplicates: other = Feed.get_by_pk(feed_id) other_info = f'#{other.id} url={other.url!r}' LOG.info('merge duplicate feed %s into %s', other_info, primary_info) FeedUrlMap(source=other.url, target=primary.url).save() primary.merge(other)
def _import_feeds(self, imports: list): result = UnionFeed.create_by_imports(user_id=self._tester.id, imports=imports) for creation in result.feed_creations: creation: FeedCreation feed = Feed( title=creation.title, url=creation.url, status=FeedStatus.READY, dt_updated=timezone.now(), ) feed.save() user_feed = UserFeed( user=self._tester, feed=feed, title=creation.title, group=creation.group, dt_updated=timezone.now(), ) user_feed.save() FeedUrlMap(source=creation.url, target=feed.url).save() FeedUrlMap(source=creation.url + '.c', target=feed.url).save() return result
def do_clean_feedurlmap_by_retention(ctx: ActorContext): num_rows = FeedUrlMap.delete_by_retention() LOG.info('delete {} outdated feedurlmap'.format(num_rows))