def check_feed_address_for_feed_link(self): feed_address = None if not feedfinder.isFeed(self.feed_address): feed_address = feedfinder.feed(self.feed_address) if not feed_address: feed_address = feedfinder.feed(self.feed_link) else: feed_address_from_link = feedfinder.feed(self.feed_link) if feed_address_from_link != self.feed_address: feed_address = feed_address_from_link if feed_address: try: self.feed_address = feed_address self.next_scheduled_update = datetime.datetime.utcnow() self.has_feed_exception = False self.active = True self.save() except IntegrityError: original_feed = Feed.objects.get(feed_address=feed_address) original_feed.has_feed_exception = False original_feed.active = True original_feed.save() merge_feeds(original_feed.pk, self.pk) return not not feed_address
def get_feed_from_url(cls, url): feed = None def by_url(address): feed = cls.objects.filter(feed_address=address) if not feed: duplicate_feed = DuplicateFeed.objects.filter(duplicate_address=address).order_by('pk') if duplicate_feed: feed = [duplicate_feed[0].feed] return feed url = urlnorm.normalize(url) feed = by_url(url) if feed: feed = feed[0] else: if feedfinder.isFeed(url): feed = cls.objects.create(feed_address=url) feed = feed.update() else: feed_finder_url = feedfinder.feed(url) if feed_finder_url: feed = by_url(feed_finder_url) if not feed: feed = cls.objects.create(feed_address=feed_finder_url) feed = feed.update() else: feed = feed[0] return feed
def get_feed_from_url(cls, url): feed = None def by_url(address): feed = cls.objects.filter(feed_address=address) if not feed: duplicate_feed = DuplicateFeed.objects.filter( duplicate_address=address).order_by('pk') if duplicate_feed: feed = [duplicate_feed[0].feed] return feed url = urlnorm.normalize(url) feed = by_url(url) if feed: feed = feed[0] else: if feedfinder.isFeed(url): feed = cls.objects.create(feed_address=url) feed = feed.update() else: feed_finder_url = feedfinder.feed(url) if feed_finder_url: feed = by_url(feed_finder_url) if not feed: feed = cls.objects.create(feed_address=feed_finder_url) feed = feed.update() else: feed = feed[0] return feed
def fetch_feeds(urls, n, output_path, options): if not options['quite']: print 'fetch feeds' for url in urls: url = urlnorm.normalize(url) if True or feedfinder.isFeed(url): fetch(url, n, output_path, options) else: print '%s is not a feed url' % url
def subscribe(user_id, url): """ subscribe a feed with address: url return: @state 'ok'/'network error'/'duplication' @feed feed information @articles latested articles """ state = 'ok' url = urlnorm.normalize(url) if not feedfinder.isFeed(url): return {'state': 'invalid feed url'} feed, feed_exist = Feed.get_or_create(url) sub, sub_exist = Subscription.get_or_create(user_id, feed.feed_id) if sub_exist: state = 'duplication' return {'state': state} return { 'state': state, 'feed': feed, 'stories': get_stories_for_feed(feed.feed_id) }