def __getAndSaveQuotes(self): ''' get and save data ''' while not self.stockQueue.empty(): item = self.stockQueue.get_nowait() stock = item[0] start = item[1] end = item[2] try: quotes = self.__getSaveOneStockQuotes(stock, start, end) except KeyboardInterrupt as excp: logger.error("Interrupted while processing %s: %s" % (stock.symbol, excp)) self.failed.append(stock) raise excp except BaseException as excp: logger.error("Error while processing %s: %s" % (stock.symbol, excp)) #logger.debug(traceback.format_exc()) self.failed.append(stock) else: logger.info("Success processed %s" % stock.symbol) self.sqlDAM.writeQuotes(stock.symbol, quotes) stock.price = quotes[-1].close stock.lastUpdate = datetime.datetime.now() self.sqlDAM.writeStock(stock) self.counter += 1 if 0 == self.counter % (self.poolsize if self.poolsize < 10 else 10): self.sqlDAM.commit() logger.info("Processed %d, remain %d." % (self.counter, self.stockQueue.qsize())) self.succeeded.append(stock) finally: self.stockQueue.task_done()
def downloadAll(self, localOnly=False, append=True, threads=5): if localOnly: logger.info("only update local stocks") stocks = self.sqlDAM.readAllStocks() else: stocks = self.eastmoneyDAM.readAllStocks() self.__downloadStocks(stocks, append=append, threads=threads)
def __downloadStocks(self, stocks, append=True, threads=5): crawler = Crawler(self.dbpath, threads) crawler.reset() symbol_str = "" for stock in stocks: start = self.history_start end = datetime.datetime.now() stock_l = self.sqlDAM.readStock(stock.symbol) if stock_l is None: stock_l = Stock(stock.symbol, stock.name, 0) self.sqlDAM.writeStock(stock_l) else: if stock_l.lastUpdate is not None: if (end - stock_l.lastUpdate).days < 1: continue else: start = stock_l.lastUpdate symbol_str += "%s - %s\n" %(stock_l.symbol, stock_l.name) crawler.addStock(stock_l, start, end) # commit to create local new stock objects self.sqlDAM.commit() if crawler.getRemainCount() > 0: logger.info("All stocks to update(%d): \n%s" % (crawler.getRemainCount(), symbol_str)) logger.info("starting crawler in %s mode with %d threads" % (("append" if append else "overwrite"), threads)) crawler.start() crawler.poll() else: logger.info("no stock needs to update")
def __getSaveOneStockQuotes(self, stock, start, end): ''' get and save data for one symbol ''' lastExcp = None failCount = 0 quotes = None #try several times since it may fail while failCount < MAX_TRY: try: quotes = self.yahooDAM.readQuotes(stock.symbol, start, end) except BaseException as excp: failCount += 1 lastExcp = excp if isinstance(excp, UfException) and excp.getCode() == Errors.NETWORK_404_ERROR: raise BaseException("Failed, stock %s not found" % stock.symbol) else: logger.warning("Failed, %s" % (excp)) logger.info("Retry in 1 second") time.sleep(1) else: break if failCount >= MAX_TRY: raise BaseException("Can't retrieve historical data %s" % lastExcp) return quotes
if stock is None: print("stock %s is not found in database" % symbol) return -1 plot = StockPlot(stock) plot.setAdjusted(options.adjusted) plot.plot() elif command == "list": stocks = self.dataManager.loadAllStocks() print("List of stocks (symbol, name, price):") for stock in stocks: print("%s - %s - %d" % (stock.symbol, stock.name, stock.price)) elif command == "select": engine = SelectEngine() engine.globallMarketData = self.dataManager.loadGlobalMarketData() passed = [] for stock in self.dataManager.loadAllStocks(): _stock = self.dataManager.loadStockAndHistory(stock.symbol) if engine.select(_stock): passed.append(stock.symbol) print("Passed stocks:\n%s" % passed) else: parser.error("unrecognized command %s" % command) return -1 return 0 if __name__ == '__main__': ret = RufengFinance().main() logger.info("exiting...") exit(ret)
async def read_main(): logger.debug("Hello") logger.info("Hello") return {"msg": "Hello World"}