Exemplo n.º 1
0
def main():
    site_crawler = crawler.Crawler(DOMAIN)
    collection = site_crawler.crawl()
    print collection.get_len()
    data_saver = saver.DatabaseWorker()
    for url, content in collection.pages_content():
        nodes = parser.get_elements(content, REGULARS)
        data_saver.save_item(url, nodes)
        save_to_db(url, nodes)
Exemplo n.º 2
0
 def test_save_to_db(self):
     save_to_db(self.key, self.value)
     self.assertEqual(self.test_db_client.get(self.key), self.value)