def doCall(self): for identifier in self.__identifiers: try: trades = api.get_trades(identifier) trades.reverse() for barDict in trades: bar = {} trade = TradeBar(barDict) bar[identifier] = trade tid = trade.getTradeId() if tid > self.last_tid: self.last_tid = tid self.__queue.put((TradesAPIThread.ON_TRADE, bar)) orders = api.get_orderbook(identifier) if len(orders['bids']) and len(orders['asks']): best_ask = orders['asks'][0] best_bid = orders['bids'][0] last_update = int( max(float(best_ask['timestamp']), float(best_bid['timestamp']))) if last_update > self.last_orderbook_ts: self.last_orderbook_ts = last_update self.__queue.put( (TradesAPIThread.ON_ORDER_BOOK_UPDATE, { 'bid': float(best_bid['price']), 'ask': float(best_ask['price']) })) except api.OkExError as e: logger.error(e)
def build_feed(instruments, fromYear, toYear, storage, frequency=bar.Frequency.DAY, timezone=None, skipErrors=False): """Build and load a :class:`mooquant.barfeed.mootdxfeed.Feed` using CSV files downloaded from Google Finance. CSV files are downloaded if they haven't been downloaded before. :param instruments: Instrument identifiers. :type instruments: list. :param fromYear: The first year. :type fromYear: int. :param toYear: The last year. :type toYear: int. :param storage: The path were the files will be loaded from, or downloaded to. :type storage: string. :param frequency: The frequency of the bars. Only **mooquant.bar.Frequency.DAY** is currently supported. :param timezone: The default timezone to use to localize bars. Check :mod:`mooquant.marketsession`. :type timezone: A pytz timezone. :param skipErrors: True to keep on loading/downloading files in case of errors. :type skipErrors: boolean. :rtype: :class:`mooquant.barfeed.mootdxfeed.Feed`. """ logger = mooquant.logger.getLogger("mootdx") ret = mootdxfeed.Feed(frequency, timezone) if not os.path.exists(storage): logger.info("Creating {dirname} directory".format(dirname=storage)) os.mkdir(storage) for year in range(fromYear, toYear + 1): for instrument in instruments: filePath = Path(storage) fileName = filePath / "{instrument}-{year}-mootdx.csv".format( instrument=instrument, year=year) if not os.path.exists(fileName): logger.info( "Downloading {instrument} {year} to {filename}".format( instrument=instrument, year=year, filename=fileName)) try: if frequency == bar.Frequency.DAY: download_daily_bars(instrument, year, fileName) else: raise Exception("Invalid frequency") except Exception as e: if skipErrors: logger.error(str(e)) continue else: raise e ret.addBarsFromCSV(instrument, fileName) return ret
def getNextBars(self): ret = None try: eventType, eventData = self.__queue.get(True, LiveFeed.QUEUE_TIMEOUT) if eventType == LiveFeedThread.ON_BARS: ret = eventData else: logger.error("Invalid event received: %s - %s" % (eventType, eventData)) except queue.Empty: pass return ret
def get_tushare_tick_data(self): try: df = ts.get_realtime_quotes(self._identifiers) for index, identifier in enumerate(self._identifiers): tick_info = df.ix[index] if self.valid_tick_data(identifier, tick_info): # tushare use unicode type, another way is convert it to # int/float here. refer to build_bar self._tickDSDict[identifier].append( tick_info.price, tick_info.volume, tick_info.amount, tick_info.time) except Exception as e: logger.error("Tushare polling exception", exc_info=e)
def doCall(self): endDateTime = self.__nextBarClose self.__updateNextBarClose() bar_dict = {} for identifier in self._identifiers: try: if not self._tickDSDict[identifier].empty(): bar_dict[identifier] = build_bar( to_market_datetime(endDateTime), self._tickDSDict[identifier]) except Exception as e: logger.error(e) if len(bar_dict): bars = bar.Bars(bar_dict) self.__queue.put((TushareBarFeedThread.ON_BARS, bars))
def _fill_today_bars(self): today = datetime.date.today().isoformat() if is_holiday(today): # do nothing if holiday return elif datetime.date.today().weekday() in [5, 0]: return today_bars = {} for identifier in self.__identifiers: try: df = ts.get_today_ticks(identifier) today_bars[identifier] = get_bar_list( df, self.__frequency, None) except Exception as e: logger.error(e) self.__fill_bars(today_bars)
def serve(barFeed, strategyParameters, address, port, drivce='xml'): """Executes a server that will provide bars and strategy parameters for workers to use. :param drivce: backend server drivce. :param barFeed: The bar feed that each worker will use to backtest the strategy. :type barFeed: :class:`mooquant.barfeed.BarFeed`. :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**. :param address: The address to listen for incoming worker connections. :type address: string. :param port: The port to listen for incoming worker connections. :type port: int. :rtype: A :class:`Results` instance with the best results found or None if no results were obtained. """ paramSource = base.ParameterSource(strategyParameters) resultSinc = base.ResultSinc() if drivce not in ('xml', 'zmq'): logger.error('drivce not found') raise Execute('drivce not found') if drivce == 'xml': from mooquant.optimizer import xmlrpcserver as server elif drivce == 'zmq': from mooquant.optimizer import zmqrpcserver as server s = server.Server(paramSource, resultSinc, barFeed, address, port) logger.info("Starting server") s.serve() logger.info("Server finished") ret = None bestResult, bestParameters = resultSinc.getBest() if bestResult is not None: logger.info("Best final result {} with parameters {}".format( bestResult, bestParameters.args)) ret = Results(bestParameters.args, bestResult) else: logger.error("No results. All jobs failed or no jobs were processed.") return ret
def __dispatchImpl(self, eventFilter): ret = False try: eventType, eventData = self.__queue.get(True, LiveFeed.QUEUE_TIMEOUT) if eventFilter is not None and eventType not in eventFilter: return False ret = True if eventType == TradesAPIThread.ON_TRADE: self.__onTrade(eventData) elif eventType == TradesAPIThread.ON_ORDER_BOOK_UPDATE: self.__orderBookUpdateEvent.emit(eventData) else: ret = False logger.error("Invalid event received to dispatch: %s - %s" % (eventType, eventData)) except queue.Empty: pass return ret
def get_trading_days(start_day, days): try: df = ts.get_hist_data('sh') except Exception as e: logger.error("Tushare get hist data exception", exc_info=e) return [] trading_days = list() holiday = 0 for i in range(days): while True: day = start_day - datetime.timedelta(days=i + 1 + holiday) if day.date().isoformat() in df.index: trading_days.append(day) break else: holiday += 1 trading_days.reverse() # oldest date is put to head return trading_days
def doCall(self): endDateTime = self.__nextBarClose barDict = {} self.__updateNextBarClose() for indentifier in self.__identifiers: try: logger.debug( "Requesting bars with precision {} and period {} for {}". format(self.__precision, self.__period, indentifier)) response = api.XigniteGlobalRealTime_GetBar( self.__apiToken, indentifier, "Symbol", endDateTime, self.__precision, self.__period) # logger.debug(response) barDict[indentifier] = build_bar(response["Bar"], indentifier, self.__frequency) except api.XigniteError as e: logger.error(e) if len(barDict): bars = bar.Bars(barDict) self.__queue.put((GetBarThread.ON_BARS, bars))
def build_feed(sourceCode, tableCodes, fromYear, toYear, storage, frequency=bar.Frequency.DAY, timezone=None, skipErrors=False, noAdjClose=False, authToken=None, columnNames={}, forceDownload=False ): """Build and load a :class:`mooquant.barfeed.quandlfeed.Feed` using CSV files downloaded from Quandl. CSV files are downloaded if they haven't been downloaded before. :param sourceCode: The dataset source code. :type sourceCode: string. :param tableCodes: The dataset table codes. :type tableCodes: list. :param fromYear: The first year. :type fromYear: int. :param toYear: The last year. :type toYear: int. :param storage: The path were the files will be loaded from, or downloaded to. :type storage: string. :param frequency: The frequency of the bars. Only **mooquant.bar.Frequency.DAY** or **mooquant.bar.Frequency.WEEK** are supported. :param timezone: The default timezone to use to localize bars. Check :mod:`mooquant.marketsession`. :type timezone: A pytz timezone. :param skipErrors: True to keep on loading/downloading files in case of errors. :type skipErrors: boolean. :param noAdjClose: True if the instruments don't have adjusted close values. :type noAdjClose: boolean. :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day. :type authToken: string. :param columnNames: Optional. A dictionary to map column names. Valid key values are: * datetime * open * high * low * close * volume * adj_close :type columnNames: dict. :rtype: :class:`mooquant.barfeed.quandlfeed.Feed`. """ logger = mooquant.logger.getLogger("quandl") ret = quandlfeed.Feed(frequency, timezone) if noAdjClose: ret.setNoAdjClose() # Additional column names. for col, name in columnNames.items(): ret.setColumnName(col, name) if not os.path.exists(storage): logger.info("Creating %s directory" % (storage)) os.mkdir(storage) for year in range(fromYear, toYear + 1): for tableCode in tableCodes: fileName = os.path.join(storage, "%s-%s-%d-quandl.csv" % (sourceCode, tableCode, year)) if not os.path.exists(fileName) or forceDownload: logger.info("Downloading %s %d to %s" % (tableCode, year, fileName)) try: if frequency == bar.Frequency.DAY: download_daily_bars(sourceCode, tableCode, year, fileName, authToken) elif frequency == bar.Frequency.WEEK: download_weekly_bars(sourceCode, tableCode, year, fileName, authToken) else: raise Exception("Invalid frequency") except Exception as e: if skipErrors: logger.error(str(e)) continue else: raise e ret.addBarsFromCSV(tableCode, fileName) return ret