def test_link_sort_order_is_maintained_hot(self): h = HTMLParser.HTMLParser() futuregarage_links = flock.parseRedditResponse(self.futuregarage_top) futuregarage_links = sorted(futuregarage_links, reverse=True, key=lambda l: l['created_utc']) futuregarage_links = sorted(futuregarage_links, reverse=True, key=lambda l: flock.hot(l)) cache_value = {} cache_value['data'] = {} cache_value['data']['children'] = {} cache_value['data']['children'] = self.futuregarage_top['data']['children'][::2] cache_value = flock.parseRedditResponse(cache_value) def cache_side_effect(*args, **kwargs): if args[0] == 'futuregarage+hot+week': return pickle.dumps(cache_value) return None flock.cache.get = mock.MagicMock(name='get') flock.cache.get.side_effect = cache_side_effect reddit_value = {} reddit_value['data'] = {} reddit_value['data']['children'] = {} reddit_value['data']['children'] = self.futuregarage_top['data']['children'][1::2] flock.getRedditResponse = mock.MagicMock(name='getRedditResponse', return_value=reddit_value) response = self.app.get('/?subreddits=futuregarage+futurebeats&sort=hot', follow_redirects=True) unescaped_response = h.unescape(response.data) for child in futuregarage_links: self.assertIn(child['title'], unescaped_response) self.assertIn(child['url'], unescaped_response) self.assertIn(child['permalink'], unescaped_response) j = 1 for i,child in enumerate(futuregarage_links): first_child = futuregarage_links[i] second_child = futuregarage_links[j] first_pos = unescaped_response.find(first_child['title']) second_pos = unescaped_response.find(second_child['title']) self.assertLess(first_pos, second_pos, '%s:%d, %s:%d' % (first_child['title'], flock.top(first_child), second_child['title'], flock.top(second_child))) j = j + 1 if j >= len(futuregarage_links): break
def test_parse_reddit_response(self): futuregarage_links = flock.parseRedditResponse(self.futuregarage_top) futuregarage_links = [ child['title'] for child in futuregarage_links ] for child in self.futuregarage_top['data']['children']: child = child['data'] if not 'youtube' in child['domain'] and not 'youtu.be' in child['domain']: self.assertNotIn(child['title'], futuregarage_links)
def test_cache_is_heated_with_parsed_links(self): cache_value = flock.parseRedditResponse(self.futuregarage_top) flock.cache.set = mock.MagicMock(name='set') flock.cache.get = mock.MagicMock(name='get', return_value=None) flock.getRedditResponse = mock.MagicMock(name='getRedditResponse', return_value=self.futuregarage_top) self.app.get('/?subreddits=futuregarage', follow_redirects=True) flock.cache.set.assert_called_with('futuregarage+hot+week', pickle.dumps(cache_value))
def test_frontpage_hits_memcached_same_number_of_times_as_subreddits_with_names(self): return_value = flock.parseRedditResponse(self.futuregarage_top) flock.getRedditResponse = mock.MagicMock(name='getRedditResponse', return_value=None) flock.cache.get = mock.MagicMock(name='get', return_value=pickle.dumps(return_value)) response = self.app.get('/?subreddits=1+2+3+4+5', follow_redirects=True) self.assertEquals(response.status_code, 200) self.assertFalse(flock.getRedditResponse.called) calls = [ mock.call('1+hot+week'), mock.call('2+hot+week'), mock.call('3+hot+week'), mock.call('4+hot+week'), mock.call('5+hot+week') ] flock.cache.get.assert_has_calls(calls)
def test_frontpage_subreddits_all_links_present(self): h = HTMLParser.HTMLParser() futuregarage_links = flock.parseRedditResponse(self.futuregarage_top) return_value = io.StringIO(json.dumps(self.futuregarage_top, ensure_ascii=False)) flock.urllib2.urlopen = mock.MagicMock(return_value=return_value) flock.cache.get = mock.MagicMock(name='get', return_value=None) response = self.app.get('/?subreddits=futuregarage', content_type='text/html', follow_redirects=True) self.assertEqual(response.status_code, 200) unescaped_response = h.unescape(response.data) for child in futuregarage_links: self.assertIn(child['title'], unescaped_response) self.assertIn(child['url'], unescaped_response) self.assertIn(child['permalink'], unescaped_response)
def test_cache_is_hit_after_cache_is_warmed(self): cache_value = flock.parseRedditResponse(self.futuregarage_top) flock.cache.set = mock.MagicMock(name='set') flock.cache.get = mock.MagicMock(name='get', return_value=None) flock.getRedditResponse = mock.MagicMock(name='getRedditResponse', return_value=self.futuregarage_top) self.app.get('/?subreddits=futuregarage', follow_redirects=True) flock.getRedditResponse.assert_called_with(['futuregarage'], 'hot', 'week', 100) flock.cache.set.assert_called_with('futuregarage+hot+week', pickle.dumps(cache_value)) flock.cache.get = mock.MagicMock(name='get', return_value=pickle.dumps(cache_value)) flock.cache.set = mock.MagicMock(name='set') flock.getRedditResponse = mock.MagicMock(name='getRedditResponse') self.app.get('/?subreddits=futuregarage', follow_redirects=True) flock.cache.get.assert_called_with('futuregarage+hot+week') self.assertEqual(flock.getRedditResponse.call_count, 0) self.assertEqual(flock.cache.set.call_count, 0)