def test_duplicate_feeds(self): # had to load the feed data this way to hit the save() override. # it wouldn't work with loaddata or fixures with open( 'apps/feed_import/fixtures/duplicate_feeds.json') as json_file: feed_data = json.loads(json_file.read()) feed_data_1 = feed_data[0] feed_data_2 = feed_data[1] feed_1 = Feed(**feed_data_1) feed_2 = Feed(**feed_data_2) feed_1.save() feed_2.save() call_command('loaddata', 'apps/feed_import/fixtures/subscriptions.json') user_1_feed_subscription = UserSubscription.objects.filter( user__id=1)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter( user__id=2)[0].feed_id self.assertNotEqual(user_1_feed_subscription, user_2_feed_subscription) original_feed_id = merge_feeds(user_1_feed_subscription, user_2_feed_subscription) user_1_feed_subscription = UserSubscription.objects.filter( user__id=1)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter( user__id=2)[0].feed_id self.assertEqual(user_1_feed_subscription, user_2_feed_subscription)
def fetch_address_from_page(url, existing_feed=None): from apps.rss_feeds.models import Feed, DuplicateFeed feed_finder_url = feedfinder.feed(url) if feed_finder_url: if existing_feed: if Feed.objects.filter(feed_address=feed_finder_url): return None existing_feed.feed_address = feed_finder_url existing_feed.save() feed = existing_feed else: duplicate_feed = DuplicateFeed.objects.filter( duplicate_address=feed_finder_url) if duplicate_feed: feed = [duplicate_feed[0].feed] else: feed = Feed.objects.filter(feed_address=feed_finder_url) if not feed: feed = Feed(feed_address=feed_finder_url) feed.save() feed.update() feed = Feed.objects.get(pk=feed.pk) else: feed = feed[0] return feed