def __downloadStocks(self, stocks, append=True, threads=5): crawler = Crawler(self.dbpath, threads) crawler.reset() symbol_str = "" for stock in stocks: start = self.history_start end = datetime.datetime.now() stock_l = self.sqlDAM.readStock(stock.symbol) if stock_l is None: stock_l = Stock(stock.symbol, stock.name, 0) self.sqlDAM.writeStock(stock_l) else: if stock_l.lastUpdate is not None: if (end - stock_l.lastUpdate).days < 1: continue else: start = stock_l.lastUpdate symbol_str += "%s - %s\n" %(stock_l.symbol, stock_l.name) crawler.addStock(stock_l, start, end) # commit to create local new stock objects self.sqlDAM.commit() if crawler.getRemainCount() > 0: logger.info("All stocks to update(%d): \n%s" % (crawler.getRemainCount(), symbol_str)) logger.info("starting crawler in %s mode with %d threads" % (("append" if append else "overwrite"), threads)) crawler.start() crawler.poll() else: logger.info("no stock needs to update")
def reset(): global call_reset_last time_since_last_call = time.time() - call_reset_last if time_since_last_call >= call_reset_timeout: Crawler.reset() call_reset_last = time.time() time_since_last_call = 0 return "%i000" % (call_reset_timeout - time_since_last_call)