Exemple #1
0
    def test_crawl(self):
        # Running the bfs crawler
        num_nodes_before_crash = 2
        checkpoint_freq = 2
        webnetwork = TestWebCaller("test", self.network_edges)
        self.crawler = BFSNetworkCrawler(webnetwork, store_type=self.STORE_TYPE)
        self.crawler.crawl(seed_nodes=["11", "5"], max_nodes=num_nodes_before_crash, recover=True,
                           checkpoint_frequency=checkpoint_freq)

        node_visit_order = webnetwork.get_visit_order()
        self.compare_values(node_visit_order, test_visit_order=self.correct_visit_order[:num_nodes_before_crash])
        self.crawler.close()

        self.crawler = BFSNetworkCrawler(webnetwork, store_type=self.STORE_TYPE)
        self.crawler.crawl(seed_nodes=["11", "5"], max_nodes=10, recover=True, checkpoint_frequency=checkpoint_freq)

        node_visit_order = webnetwork.get_visit_order()
        print("Node visit order is", node_visit_order)
        self.compare_values(node_visit_order,
                            test_visit_order=self.correct_visit_order)
    logging.basicConfig(filename="socintpy.log", level=numeric_loglevel)

    # Fetch api object using the settings from settings.py
    api_args = get_args(settings)
    api = get_api(api_args)
    """
    # If you want to pass a dictionary (say params_dict), use **params_dict
    result = api.get_data(user='******', method="user.getRecentTracks",
    max_results = 100)                
    print result
    """
    #####NOTENOTE: I changed how api calls handle errors. if you get one error, then it breaks and does not fetch other api calls for the same user.
    #####TEST it when YOU NEXT RUN THIS CRAWLER. Also, the checkpoint_Freq should not be changed. Keep it to one unless further notification.
    cmd_params = cmd_script.get_cmd_parameters(sys.argv)
    if cmd_params['mode'] == "fetch_data":
        # Set up the data crawl
        logging.info("STARTING FETCH_DATA CRAWL. STANDBY!")
        crawler = BFSNetworkCrawler(api, store_type="sqlite")

        #crawler = BFSNetworkCrawler(api, seed_nodes=None, store_type="basic_shelve", recover=True)
        # Start the data crawl
        crawler.crawl(seed_nodes=api.get_uniform_random_nodes(100), max_nodes=1000000, recover=True, checkpoint_frequency=1)

    elif cmd_params['mode'] == "retry_errors":
        nodes_with_error = cmd_script.get_nodes_with_error(logfile="socintpy_old.log")
        print nodes_with_error, len(nodes_with_error)
        logging.info("STARTING RETRY_ERRORS CRAWL. STANDBY!")
        crawler = FixedNodesCrawler(api, store_type="sqlite")
        nodes_stored, edges_stored = cmd_script.recover_num_items_stored(logfile="socinty_old.log")
        crawler.crawl(nodes_list=nodes_with_error, start_node_id=nodes_stored, start_edge_id=edges_stored)