Esempio n. 1
0
 def crawl_post(self, time_interval=10):
     ## run crawler every 10 minutes
     crawler = Crawler(time_interval)
     carona_groups = CaronaGroupModel.objects.all()
     # pprint(carona_groups.values())
     for carona_group in carona_groups:
         crawler.retrieve_posts(carona_group.fb_group_id)
     return
Esempio n. 2
0
    def test_crawler(self):
        fb_group_id = '641749869191341'

        crawler = Crawler(time_interval=30)
        crawler.retrieve_posts(fb_group_id)

        ## test log
        self.assertEquals(ParserErrorsModel.objects.all().count(), 0)
        return