def get_tweets(): dsp = [] for src in AutoImportResource.objects.filter(type_res='twitter', in_edit=False): resource = src.resource excl = [s for s in (src.excl or '').split(',') if s] tweets_data = get_tweets_by_url(src.link) for text, link, http_code in tweets_data: excl_link = bool([i for i in excl if i in link]) if not excl_link and src.incl in text: tw_txt = text.replace(src.incl, '') dsp.append([tw_txt, link, resource, http_code]) return dsp
def test_get_tweets(self): test_name = 'fixture_test_import_news_test_get_tweets.txt' self.patcher = patch('digest.management.commands.urlopen') self.urlopen_mock = self.patcher.start() self.urlopen_mock.return_value = MockResponse(read_fixture(test_name)) tweets = get_tweets_by_url(self.res_twitter.link) self.patcher.stop() self.assertEqual(len(tweets), 19) for x in tweets: self.assertEqual(len(x), 3) self.assertEqual(x[2], 200) self.assertEqual('http' in x[1], True) return tweets
def get_tweets(): dsp = [] for src in AutoImportResource.objects.filter(type_res='twitter', in_edit=False): dsp.extend(_parse_tweets_data(get_tweets_by_url(src.link), src)) return dsp