def view_full_feed(request): search_id = request.REQUEST.get('id') saved_search = SavedSearch.objects.get(id=search_id) if hasattr(saved_search, 'partnersavedsearch'): is_pss = True else: is_pss = False if request.user == saved_search.user: url_of_feed = url_sort_options(saved_search.feed, saved_search.sort_by, frequency=saved_search.frequency) try: items, count = parse_feed(url_of_feed, saved_search.frequency, saved_search.jobs_per_email) except HTTPError: items = None count = 0 start_date = date.today() + timedelta(get_interval_from_frequency( saved_search.frequency)) return render_to_response('mysearches/view_full_feed.html', {'search': saved_search, 'items': items, 'view_name': 'Saved Searches', 'is_pss': is_pss, 'start_date': start_date, 'count': count}, RequestContext(request)) else: return HttpResponseRedirect(reverse('saved_search_main'))
def view_full_feed(request): search_id = request.REQUEST.get('id') saved_search = SavedSearch.objects.get(id=search_id) if hasattr(saved_search, 'partnersavedsearch'): is_pss = True else: is_pss = False if request.user == saved_search.user: url_of_feed = url_sort_options(saved_search.feed, saved_search.sort_by, frequency=saved_search.frequency) try: items, count = parse_feed(url_of_feed, saved_search.frequency, saved_search.jobs_per_email) except HTTPError: items = None count = 0 start_date = date.today() + timedelta( get_interval_from_frequency(saved_search.frequency)) return render_to_response( 'mysearches/view_full_feed.html', { 'search': saved_search, 'items': items, 'view_name': 'Saved Searches', 'is_pss': is_pss, 'start_date': start_date, 'count': count }, RequestContext(request)) else: return HttpResponseRedirect(reverse('saved_search_main'))
def more_feed_results(request): # Ajax request comes from the view_full_feed view when user scrolls to # bottom of the page if request.is_ajax(): url_of_feed = url_sort_options(request.GET['feed'], request.GET['sort_by'], frequency=request.GET['frequency']) items = parse_feed(url_of_feed, request.GET['frequency'], offset=request.GET['offset'])[0] return render_to_response('mysearches/feed_page.html', {'items': items}, RequestContext(request))
def test_parse_feed(self): feed_url = 'http://www.my.jobs/feed/rss' for use_json, count in [(True, 2), (False, 1)]: items = parse_feed(feed_url, use_json=use_json) # The second value in the items list is the total count from a # feed, which may not equal the number of items returned self.assertEqual(items[1], len(items[0])) item = items[0][0] for element in ['pubdate', 'title', 'description', 'link']: self.assertTrue(item[element])
def get_feed_items(self, num_items=None): num_items = num_items or self.jobs_per_email url_of_feed = url_sort_options(self.feed, self.sort_by, self.frequency, hasattr(self, 'partnersavedsearch')) url_of_feed = update_url_if_protected(url_of_feed, self.user) parse_feed_args = { 'feed_url': url_of_feed, 'frequency': self.frequency, 'num_items': num_items, 'return_items': num_items, 'last_sent': self.last_sent } if hasattr(self, 'partnersavedsearch'): parse_feed_args['ignore_dates'] = True items = parse_feed(**parse_feed_args) return items
def test_parse_feed_with_count(self): feed_url = 'http://www.my.jobs/feed/rss' num_items = 1 items, count = parse_feed(feed_url, num_items=num_items) self.assertEqual(count, num_items)