Example #1
0
def do_crawl_exchange(config_file, log_file, exchange_adaptee,
                      kline_data_collection, market_depth_collection,
                      market_trade_collection, table_name_dict):
    Configuration.get_configuration(config_file)
    logger = Logger.get_logger(log_file)
    logger.debug("[%s]Log path: %s" % (os.getpid(), Logger.get_log_file()))
    try:
        logger.debug("==== do_crawl ===== %s" %
                     datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
        configure_logging(install_root_handler=False)
        huobi_adaptee = exchange_adaptee()
        pixiu_adapter = PixiuAdapter(huobi_adaptee)
        ExchangesSpider.pixiu_adapter = pixiu_adapter

        process = CrawlerProcess(get_project_settings())
        process.crawl(ExchangesSpider)
        process.start()

        for item in pixiu_adapter.kline_data_collection:
            kline_data_collection.append(item)

        for item in pixiu_adapter.market_depth_collection:
            market_depth_collection.append(item)

        for item in pixiu_adapter.market_trade_collection:
            market_trade_collection.append(item)

        table_name_dict[KLINE_TABLE_NAME] = pixiu_adapter.kline_table_name
        table_name_dict[TRADE_TABLE_NAME] = pixiu_adapter.trade_table_name

    except Exception as e:
        print("Engine get exception: %s" % e)
Example #2
0
 def main_thread_run(self):
     try:
         mp.set_start_method('spawn')
         do_crawl(self.config.config_file, Logger.get_log_file())
         print("------> [main_thread_run]crawler down!!!")
     except Exception as e:
         self.logger.error("Engine get exception: %s", e)
         print("Engine get exception %s" % e)
Example #3
0
        def do_something(sc):
            s.enter(self.config.crawl_period, 1, do_something, (sc, ))
            mp_manager = mp.Manager()
            kline_data_collection = mp_manager.list()
            market_depth_collection = mp_manager.list()
            market_trade_collection = mp_manager.list()
            table_name_dict = mp_manager.dict()
            # exchange_adaptee = BittrexAdaptee
            exchange_adaptee = self._exchange_adaptee
            process = mp.Process(target=do_crawl_exchange,
                                 args=(
                                     self.config.config_file,
                                     Logger.get_log_file(),
                                     exchange_adaptee,
                                     kline_data_collection,
                                     market_depth_collection,
                                     market_trade_collection,
                                     table_name_dict,
                                 ))
            self.logger.info(
                "[run] Start process: %s",
                datetime.datetime.now().strftime(
                    SQL_TIME_FORMAT_WITH_MILLISECOND))
            process.start()
            process.join()
            self.logger.info(
                "[run] End process: %s",
                datetime.datetime.now().strftime(
                    SQL_TIME_FORMAT_WITH_MILLISECOND))
            kline_table_name = table_name_dict[KLINE_TABLE_NAME]
            trade_table_name = table_name_dict[TRADE_TABLE_NAME]

            max_time_kline = self._get_max_time_kline(kline_table_name)

            filtered_kline_data_collection = KlineDataCollection.filter_kline_data(
                kline_data_collection, max_time_kline)

            kline_procedure_name = self._exchange_adaptee.get_kline_insert_procedure(
            )
            self._save_data_into_database_by_procedure(
                kline_procedure_name, filtered_kline_data_collection)

            max_time_market_trade = self._get_max_time_market_trade(
                trade_table_name)
            filtered_market_trade_data_collection = MarketTradeDataCollection.filter_market_trade_data(
                market_trade_collection, max_time_market_trade)
            market_trade_procedure_name = self._exchange_adaptee.get_market_trade_insert_procedure(
            )
            self._save_data_into_database_by_procedure(
                market_trade_procedure_name,
                filtered_market_trade_data_collection)

            market_depth_procedure_name = self._exchange_adaptee.get_market_depth_insert_procedure(
            )
            self._save_data_into_database_by_procedure(
                market_depth_procedure_name, market_depth_collection)
Example #4
0
 def do_something(sc):
     s.enter(self.config.crawl_period, 1, do_something, (sc,))
     process = mp.Process(target=do_crawl, args=(self.config.config_file, Logger.get_log_file()))
     process.start()
     time.sleep(self.config.crawl_period)
     # Received SIGTERM, shutting down gracefully. Send again to force
     process.terminate()
     # INFO: Closing spider (shutdown)
     process.terminate()
     self.logger.error("Crawler is overtime, terminate is %d" % process.pid)
Example #5
0
 def main_thread_run(self):
     try:
         mp.set_start_method('spawn')
         queue = mp.Queue()
         do_crawl_bittrex(queue, self.config.config_file,
                          Logger.get_log_file())
         # pixiu_adapter = queue.get()
         # do_crawl_huobi(self.config.config_file, Logger.get_log_file())
         print("------> [main_thread_run]crawler down!!!")
     except Exception as e:
         self.logger.error("Engine get exception: %s", e)
         print("Engine get exception %s" % e)
Example #6
0
def do_crawl(config_file, log_file):
    Configuration.get_configuration(config_file)
    logger = Logger.get_logger(log_file)
    logger.debug("[%s]Log path: %s" % (os.getpid(), Logger.get_log_file()))
    try:
        logger.debug("==== do_crawl ===== %s" % datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
        configure_logging(install_root_handler=False)
        huobi_adaptee = HuobiAdaptee()
        pixiu_adapter = PixiuAdapter(huobi_adaptee)
        ExchangesSpider.pixiu_adapter = pixiu_adapter
        process = CrawlerProcess(get_project_settings())
        process.crawl(ExchangesSpider)
        process.start()
        pixiu_adapter.save_trade_data_into_mysql()

    except Exception as e:
        print("Engine get exception: %s" % e)