Example #1
0
    def __init__(self, instrument, frequencies, maxLen=1000,
                 start=datetime.datetime(1988, 1, 1), pullDelay=30):
        if not isinstance(frequencies, list):
            frequencies = [frequencies]
        super(LiveBarFeed, self).__init__(frequencies, maxLen=maxLen)

        # proactivly register our instrument just incase not found if
        # someone tries to call getDataSeries()
        for i in frequencies:
            self.registerDataSeries(instrument, i)

        self.__instrument = instrument
        self.__last_date = None
        self.__bars_buf = []
        self.__data_downloaded = False
        self.__start_date = start
        self.__pullDelay = pullDelay
        self.__nextRealtimeBars = {}
        self.__isRealTime = Frequency.REALTIME in frequencies
        for i in frequencies:
            if self.__isRealTime and i != Frequency.REALTIME and pullDelay > i:
                    logger.error('pull delay is larger than minimum frequency.')
                    assert False
            self.__nextRealtimeBars[i] = {
                'open': None,
                'high': None,
                'low': None,
                'close': None,
                'start' : None,
            }
Example #2
0
 def doCall(self):
     for identifier in self.__identifiers:
         try:
             trades = api.get_trades(identifier)
             trades.reverse()
             for barDict in trades:
                 bar = {}
                 trade = TradeBar(barDict)
                 bar[identifier] = trade
                 tid = trade.getTradeId()
                 if tid > self.last_tid:
                     self.last_tid = tid
                     self.__queue.put((TradesAPIThread.ON_TRADE, bar))
             orders = api.get_orderbook(identifier)
             if len(orders['bids']) and len(orders['asks']):
                 best_ask = orders['asks'][0]
                 best_bid = orders['bids'][0]
                 last_update = max(best_ask['timestamp'],
                                   best_bid['timestamp'])
                 if last_update > self.last_orderbook_ts:
                     self.last_orderbook_ts = last_update
                     self.__queue.put(
                         (TradesAPIThread.ON_ORDER_BOOK_UPDATE, {
                             'bid': float(best_bid['price']),
                             'ask': float(best_ask['price'])
                         }))
         except api.BitfinexError, e:
             logger.error(e)
Example #3
0
def get_trading_days(start_day, days):
    try:
        #获取上证指数的k线数据
        df = ts.get_hist_data('sh')
    except Exception, e:
        logger.error("Tushare get hist data exception", exc_info=e)
        return []
    def doCall(self):
        for identifier in self.__identifiers:
            try:
                trades = api.get_trades() #identifier
                trades.reverse()
                for barDict in trades:
                    bar = {}
                    trade = TradeBar(barDict)
                    bar[identifier] = trade
                    tid = trade.getTradeId()
                    if tid > self.last_tid:
                        self.last_tid = tid
                        self.__queue.put((
                            TradesAPIThread.ON_TRADE, bar
                        ))
                orders = api.get_orderbook(identifier)
                if len(orders['bids']) and len(orders['asks']):
                    best_ask = orders['asks'][0]
                    best_bid = orders['bids'][0]
                    #last_update = self.last_orderbook_ts + 1;

                       # max(
                       # best_ask['timestamp'], best_bid['timestamp']
                    #)
                    # if last_update > self.last_orderbook_ts:
                    #     self.last_orderbook_ts = last_update
                    self.__queue.put((
                        TradesAPIThread.ON_ORDER_BOOK_UPDATE,
                        {
                            'bid': float(best_bid[0]),
                            'ask': float(best_ask[0])
                        }
                    ))
            except api.MercadobitcoinError, e:
                logger.error(e)
Example #5
0
 def addBarsFromCSV(self,instrument='', period='D', timeFrom = None, timeTo = None):
     '''mid
     添加一个symbol的历史数据到对象
     '''
     import pandas as pd
     import pyalgotrade.logger        
     dataCenter = self.dataCenter            
     logger = pyalgotrade.logger.getLogger("tushare")
 
     '''
     if not os.path.exists(storage):
         logger.info("Creating %s directory" % (storage))
         os.mkdir(storage) 
         
      
     '''
     if(not dataCenter.localStorage.exists(instrument,period)):
         logger.info("Downloading %s from %s to %s" % (instrument, str(timeFrom),str(timeTo)))
         try:
             if (period in self.dataCenter.localStorage.periods.keys()):
                 if dataCenter.downloadAndStoreKDataByCode(code = instrument,timeFrom = timeFrom,timeTo = timeTo,period=period):
                     logger.info("Downloading successed.")
                 else:
                     logger.info("Downloading failed.")
             else:
                 raise Exception("Invalid period")
         except Exception, e:
             logger.error(str(e))
             raise e
Example #6
0
def build_feed(instruments, fromYear, toYear, storage, frequency=bar.Frequency.DAY, timezone=None, skipErrors=False):
    logger = pyalgotrade.logger.getLogger("yahoofinance")
    ret = yahoofeed.Feed(frequency, timezone)

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear+1):
        for instrument in instruments:
            fileName = os.path.join(storage, "%s-%d-yahoofinance.csv" % (instrument, year))
            if not os.path.exists(fileName):
                logger.info("Downloading %s %d to %s" % (instrument, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(instrument, year, fileName)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(instrument, year, fileName)
                    else:
                        raise Exception("Invalid frequency")
                except Exception, e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(instrument, fileName)
Example #7
0
    def dispatchImpl(self, eventFilter):
        try:
            eventType, eventData = self.__queue.get(True, Client.QUEUE_TIMEOUT)
            if eventFilter is not None and eventType not in eventFilter:
                return

            if eventType == WSClient.ON_TICKER:
                self.__tickerEvent.emit(eventData)
            elif eventType == WSClient.ON_TRADE:
                self.__tradeEvent.emit(eventData)
            elif eventType == WSClient.ON_USER_ORDER:
                self.__userOrderEvent.emit(eventData)
            elif eventType == WSClient.ON_RESULT:
                requestId, result = eventData
                logger.info("Result: %s - %s" % (requestId, result))
            elif eventType == WSClient.ON_REMARK:
                requestId, data = eventData
                logger.info("Remark: %s - %s" % (requestId, data))
            elif eventType == WSClient.ON_CONNECTED:
                self.__onConnected()
            elif eventType == WSClient.ON_DISCONNECTED:
                self.__onDisconnected()
            else:
                logger.error("Invalid event received to dispatch: %s - %s" % (eventType, eventData))
        except Queue.Empty:
            pass
Example #8
0
def serve(barFeed, strategyParameters, address, port, batchSize=200):
    """Executes a server that will provide bars and strategy parameters for workers to use.

    :param barFeed: The bar feed that each worker will use to backtest the strategy.
    :type barFeed: :class:`pyalgotrade.barfeed.BarFeed`.
    :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**.
    :param address: The address to listen for incoming worker connections.
    :type address: string.
    :param port: The port to listen for incoming worker connections.
    :type port: int.
    :param batchSize: The number of strategy executions that are delivered to each worker.
    :type batchSize: int.
    :rtype: A :class:`Results` instance with the best results found or None if no results were obtained.
    """

    paramSource = base.ParameterSource(strategyParameters)
    resultSinc = base.ResultSinc()
    s = xmlrpcserver.Server(paramSource, resultSinc, barFeed, address, port, batchSize=batchSize)
    logger.info("Starting server")
    s.serve()
    logger.info("Server finished")

    ret = None
    bestResult, bestParameters = resultSinc.getBest()
    if bestResult is not None:
        logger.info("Best final result %s with parameters %s" % (bestResult, bestParameters.args))
        ret = Results(bestParameters.args, bestResult)
    else:
        logger.error("No results. All jobs failed or no jobs were processed.")
    return ret
Example #9
0
    def dispatchImpl(self, eventFilter):
        ret = False
        try:
            eventType, eventData = self.__wsClient.getQueue().get(True, Client.QUEUE_TIMEOUT)
            if eventFilter is not None and eventType not in eventFilter:
                return False

            ret = True
            if eventType == WSClient.ON_TICKER:
                self.__tickerEvent.emit(eventData)
            elif eventType == WSClient.ON_WALLET:
                self.__walletEvent.emit(eventData)
            elif eventType == WSClient.ON_TRADE:
                self.__tradeEvent.emit(eventData)
            elif eventType == WSClient.ON_USER_ORDER:
                self.__userOrderEvent.emit(eventData)
            elif eventType == WSClient.ON_RESULT:
                ret = False
                requestId, data = eventData
                self.__onResult(requestId, data)
            elif eventType == WSClient.ON_REMARK:
                ret = False
                requestId, data = eventData
                self.__onRemark(requestId, data)
            elif eventType == WSClient.ON_CONNECTED:
                self.__onConnected()
            elif eventType == WSClient.ON_DISCONNECTED:
                self.__onDisconnected()
            else:
                ret = False
                logger.error("Invalid event received to dispatch: %s - %s" % (eventType, eventData))
        except Queue.Empty:
            pass
        return ret
def serve(barFeed, strategyParameters, address, port):
    """Executes a server that will provide bars and strategy parameters for workers to use.

    :param barFeed: The bar feed that each worker will use to backtest the strategy.
    :type barFeed: :class:`pyalgotrade.barfeed.BarFeed`.
    :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**.
    :param address: The address to listen for incoming worker connections.
    :type address: string.
    :param port: The port to listen for incoming worker connections.
    :type port: int.
    :rtype: A :class:`Results` instance with the best results found or None if no results were obtained.
    """

    paramSource = base.ParameterSource(strategyParameters)
    resultSinc = base.ResultSinc()
    s = xmlrpcserver.Server(paramSource, resultSinc, barFeed, address, port)
    logger.info("Starting server")
    s.serve()
    logger.info("Server finished")

    ret = None
    bestResult, bestParameters = resultSinc.getBest()
    if bestResult is not None:
        logger.info("Best final result %s with parameters %s" %
                    (bestResult, bestParameters.args))
        ret = Results(bestParameters.args, bestResult)
    else:
        logger.error("No results. All jobs failed or no jobs were processed.")
    return ret
Example #11
0
 def stop(self):
     try:
         if self.__wsClient is not None:
             logger.debug("Stopping websocket client")
             self.__wsClient.stopClient()
     except Exception as e:
         logger.error("Error stopping websocket client: %s" % e)
Example #12
0
 def stop(self):
     try:
         if self.__thread is not None and self.__thread.is_alive():
             logger.info("Shutting down client.")
             self.__stream.disconnect()
     except Exception as e:
         logger.error("Error disconnecting stream: %s." % (str(e)))
Example #13
0
    def doCall(self):
        barDict = {}
        try:
            a = time.time()
            response = ts.get_realtime_quotes(self.__identifiers)
            logger.info('response cost: %f' % (time.time() - a))
            if self.__last_response_time != response.iloc[-1]['time']:
                for identifier in self.__identifiers:
                    barDict[identifier] = build_bar(
                        response[response.code ==
                                 identifier].iloc[-1])  # dataFrame 格式转换
            else:
                logger.info(
                    "bar is the same with previous bar at time %s,not refresh"
                    % response.iloc[-1]['time'])
            self.__last_response_time = response.iloc[-1]['time']

        except:
            logger.error("tushare time out")
            import traceback
            traceback.print_exc()

        if len(barDict):
            bars = bar.Ticks(barDict)
            self.__queue.put((GetBarThread.ON_BARS, bars))
def build_feed(instruments,
               fromYear,
               toYear,
               storage,
               frequency=bar.Frequency.DAY,
               timezone=None,
               skipErrors=False):
    logger = pyalgotrade.logger.getLogger("yahoofinance")
    ret = yahoofeed.Feed(frequency, timezone)

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for instrument in instruments:
            fileName = os.path.join(
                storage, "%s-%d-yahoofinance.csv" % (instrument, year))
            if not os.path.exists(fileName):
                logger.info("Downloading %s %d to %s" %
                            (instrument, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(instrument, year, fileName)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(instrument, year, fileName)
                    else:
                        raise Exception("Invalid frequency")
                except Exception, e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(instrument, fileName)
Example #15
0
 def stop(self):
     try:
         if self.__thread is not None and self.__thread.is_alive():
             logger.info("Shutting down client.")
             self.__stream.disconnect()
     except Exception, e:
         logger.error("Error disconnecting stream: %s." % (str(e)))
Example #16
0
 def stop(self):
     try:
         self.__stopped = True
         if self.__thread is not None and self.__thread.is_alive():
             logger.info("Stopping websocket client.")
             self.__thread.stop()
     except Exception as e:
         logger.error("Error shutting down client: %s" % (str(e)))
Example #17
0
 def stop(self):
     try:
         self.__stopped = True
         if self.__thread is not None and self.__thread.is_alive():
             logger.info("Shutting down MtGox client.")
             self.__wsClient.stopClient()
     except Exception, e:
         logger.error("Error shutting down MtGox client: %s" % (str(e)))
Example #18
0
 def stop(self):
     try:
         self.__stopped = True
         if self.__thread is not None and self.__thread.is_alive():
             logger.info("Shutting down MtGox client.")
             self.__wsClient.stopClient()
     except Exception, e:
         logger.error("Error shutting down MtGox client: %s" % (str(e)))
Example #19
0
def main():
    fromYear = 2000
    toYear = 2012

    try:
        symbolsFile = os.path.join("..", "symbols", "merval.xml")
        symbolsxml.parse(symbolsFile, lambda stock: download_files_for_symbol(stock.getTicker(), fromYear, toYear))
    except Exception, e:
        logger.error(str(e))
Example #20
0
def build_feed(sourceCode, tableCodes, fromYear, toYear, storage, frequency=bar.Frequency.DAY, timezone=None, skipErrors=False, noAdjClose=False, authToken=None):
    """Build and load a :class:`pyalgotrade.barfeed.quandlfeed.Feed` using CSV files downloaded from Quandl.
    CSV files are downloaded if they haven't been downloaded before.

    :param sourceCode: The dataset source code.
    :type sourceCode: string.
    :param tableCodes: The dataset table codes.
    :type tableCodes: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **pyalgotrade.bar.Frequency.DAY** or **pyalgotrade.bar.Frequency.WEEK**
        are supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`pyalgotrade.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :param noAdjClose: True if the instruments don't have adjusted close values.
    :type noAdjClose: boolean.
    :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day.
    :type authToken: string.
    :rtype: :class:`pyalgotrade.barfeed.quandlfeed.Feed`.
    """

    logger = pyalgotrade.logger.getLogger("quandl")
    ret = quandlfeed.Feed(frequency, timezone)
    if noAdjClose:
        ret.setNoAdjClose()

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear+1):
        for tableCode in tableCodes:
            fileName = os.path.join(storage, "%s-%s-%d-quandl.csv" % (sourceCode, tableCode, year))
            if not os.path.exists(fileName):
                logger.info("Downloading %s %d to %s" % (tableCode, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(sourceCode, tableCode, year, fileName, authToken)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(sourceCode, tableCode, year, fileName, authToken)
                    else:
                        raise Exception("Invalid frequency")
                except Exception as e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(tableCode, fileName)
    return ret
Example #21
0
def build_feed(instruments,
               fromYear,
               toYear,
               storage,
               frequency=bar.Frequency.DAY,
               timezone=None,
               skipErrors=False):
    """Build and load a :class:`pyalgotrade.barfeed.yahoofeed.Feed` using CSV files downloaded from Yahoo! Finance.
    CSV files are downloaded if they haven't been downloaded before.

    :param instruments: Instrument identifiers.
    :type instruments: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **pyalgotrade.bar.Frequency.DAY** or **pyalgotrade.bar.Frequency.WEEK**
        are supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`pyalgotrade.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :rtype: :class:`pyalgotrade.barfeed.yahoofeed.Feed`.
    """

    logger = pyalgotrade.logger.getLogger("yahoofinance")
    ret = yahoofeed.Feed(frequency, timezone)

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for instrument in instruments:
            fileName = os.path.join(
                storage, "%s-%d-yahoofinance.csv" % (instrument, year))
            if not os.path.exists(fileName):
                logger.info("Downloading %s %d to %s" %
                            (instrument, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(instrument, year, fileName)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(instrument, year, fileName)
                    else:
                        raise Exception("Invalid frequency")
                except Exception as e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(instrument, fileName)
    return ret
Example #22
0
def main():
    try:
        writer = symbolsxml.Writer()
        for symbol in open("merval-symbols.txt", "r"):
            symbol = symbol.strip()
            process_symbol(writer, symbol)
        logger.info("Writing merval.xml")
        writer.write("merval.xml")
    except Exception, e:
        logger.error(str(e))
Example #23
0
def main():
    fromYear = 2000
    toYear = 2013

    try:
        symbolsFile = os.path.join("..", "symbols", "merval.xml")
        callback = lambda stock: download_files_for_symbol(stock.getTicker(), fromYear, toYear)
        symbolsxml.parse(symbolsFile, callback, callback)
    except Exception as e:
        logger.error(str(e))
Example #24
0
def build_feed(instruments,
               fromYear,
               toYear,
               storage,
               frequency=bar.Frequency.DAY,
               timezone=None,
               skipErrors=False,
               rowFilter=None):
    """Build and load a :class:`pyalgotrade.barfeed.googlefeed.Feed` using CSV files downloaded from Google Finance.
    CSV files are downloaded if they haven't been downloaded before.

    :param instruments: Instrument identifiers.
    :type instruments: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **pyalgotrade.bar.Frequency.DAY** is currently supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`pyalgotrade.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :rtype: :class:`pyalgotrade.barfeed.googlefeed.Feed`.
    """

    logger = pyalgotrade.logger.getLogger("googlefinance")
    ret = googlefeed.Feed(frequency, timezone)

    if not os.path.exists(storage):
        logger.info("Creating {dirname} directory".format(dirname=storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for instrument in instruments:
            fileName = os.path.join(
                storage, "{instrument}-{year}-googlefinance.csv".format(
                    instrument=instrument, year=year))
            if not os.path.exists(fileName):
                logger.info(
                    "Downloading {instrument} {year} to {filename}".format(
                        instrument=instrument, year=year, filename=fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(instrument, year, fileName)
                    else:
                        raise Exception("Invalid frequency")
                except Exception, e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(instrument, fileName, rowFilter=rowFilter)
Example #25
0
def build_feed(sourceCode, tableCodes, fromYear, toYear, storage, frequency=bar.Frequency.DAY, timezone=None, skipErrors=False, noAdjClose=False, authToken=None):
    """Build and load a :class:`pyalgotrade.barfeed.quandlfeed.Feed` using CSV files downloaded from Quandl.
    CSV files are downloaded if they haven't been downloaded before.

    :param sourceCode: The dataset source code.
    :type sourceCode: string.
    :param tableCodes: The dataset table codes.
    :type tableCodes: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **pyalgotrade.bar.Frequency.DAY** or **pyalgotrade.bar.Frequency.WEEK**
        are supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`pyalgotrade.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :param noAdjClose: True if the instruments don't have adjusted close values.
    :type noAdjClose: boolean.
    :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day.
    :type authToken: string.
    :rtype: :class:`pyalgotrade.barfeed.quandlfeed.Feed`.
    """

    logger = pyalgotrade.logger.getLogger("quandl")
    ret = quandlfeed.Feed(frequency, timezone)
    if noAdjClose:
        ret.setNoAdjClose()

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear+1):
        for tableCode in tableCodes:
            fileName = os.path.join(storage, "%s-%s-%d-quandl.csv" % (sourceCode, tableCode, year))
            if not os.path.exists(fileName):
                logger.info("Downloading %s %d to %s" % (tableCode, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(sourceCode, tableCode, year, fileName, authToken)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(sourceCode, tableCode, year, fileName, authToken)
                    else:
                        raise Exception("Invalid frequency")
                except Exception, e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(tableCode, fileName)
Example #26
0
 def generateBars(self):
     if self.__pullDelay > 0:
         time.sleep(self.__pullDelay)
     logger.info('get quote of {0}'.format(self.__instrument))
     tmp = quote(self.__instrument)
     if tmp is None:
         logger.error('failed to get {0} quote'.format(self.__instrument))
         return
     tmpVal = tmp.iloc[0]['close'].iloc[0]
     curTime = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
     tmp = bar.BasicBar(curTime,
                        tmpVal, tmpVal, tmpVal, tmpVal,
                        0, False, Frequency.REALTIME)
     for freq in self.__nextRealtimeBars.keys():
         if self.__nextRealtimeBars[freq]['open'] is None:
             self.__nextRealtimeBars[freq]['open'] = tmpVal
             self.__nextRealtimeBars[freq]['high'] = tmpVal
             self.__nextRealtimeBars[freq]['low'] = tmpVal
             self.__nextRealtimeBars[freq]['close'] = tmpVal
             if freq == Frequency.DAY:
                 curTime = curTime.replace(hour=0, minute=0, second=0, microsecond=0)
             elif freq == Frequency.HOUR:
                 curTime = curTime.replace(minute=0, second=0, microsecond=0)
             elif freq == Frequency.MINUTE:
                 curTime = curTime.replace(second=0, microsecond=0)
             elif freq != Frequency.REALTIME:
                 logger.error('{0} is not supported.'.format(freq))
                 assert False
             self.__nextRealtimeBars[freq]['start'] = curTime
         else:
             if tmpVal > self.__nextRealtimeBars[freq]['high']:
                 self.__nextRealtimeBars[freq]['high'] = tmpVal
             elif tmpVal < self.__nextRealtimeBars[freq]['low']:
                 self.__nextRealtimeBars[freq]['low'] = tmpVal
             deltaTime = curTime - self.__nextRealtimeBars[freq]['start']
             if (Frequency.MINUTE == freq and deltaTime.total_seconds() > 60 or
                     Frequency.DAY == freq and deltaTime.total_seconds() > 60*60*24 or
                     Frequency.HOUR == freq and deltaTime.total_seconds() > 60*60):
                 self.__nextRealtimeBars[freq]['close'] = tmpVal
                 row = self.__nextRealtimeBars[freq]
                 tmpbar = bar.BasicBar(curTime, row['open'], row['high'],
                                     row['low'], row['close'], 0, False,
                                     freq)
                 self.__bars_buf.append(tmpbar)
                 self.__nextRealtimeBars[freq] = {
                     'open': None,
                     'high': None,
                     'low': None,
                     'close': None,
                     'start' : None,
                 }
                 self.__bars_buf.append(tmp)
     if self.__isRealTime:
         self.__bars_buf.append(tmp)
     self.__bars_buf.sort(key=lambda i: i.getDateTime())
Example #27
0
def build_feed(instruments,
               fromYear,
               toYear,
               storage,
               frequency=bar.Frequency.DAY,
               skipErrors=False):
    """Build and load a :class:`pyalgotrade.barfeed.tusharefeed.Feed` using CSV files downloaded from Tushare Library.
    CSV files are downloaded if they haven't been downloaded before.

    :param instruments: Instrument identifiers.
    :type instruments: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **pyalgotrade.bar.Frequency.DAY** is currently supported.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :rtype: :class:`pyalgotrade.barfeed.tusharefeed.Feed`.
    """

    logger = pyalgotrade.logger.getLogger("tushare")
    ret = barfeed.Feed(frequency)

    if not os.path.exists(storage):
        logger.info("Creating {dirname} directory".format(dirname=storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for instrument in instruments:
            filePath = Path(storage)
            fileName = filePath / "{instrument}-{year}-tushare.csv".format(
                instrument=instrument, year=year)

            if not os.path.exists(fileName):
                logger.info(
                    "Downloading {instrument} {year} to {filename}".format(
                        instrument=instrument, year=year, filename=fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(instrument, year, fileName)
                    else:
                        raise Exception("Invalid frequency")
                except Exception as e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e

            ret.addBarsFromCSV(instrument, fileName)

    return ret
Example #28
0
    def doCall(self):
        endDateTime = self.__nextBarClose
        self.__updateNextBarClose()
        bar_dict = {}

        for identifier in self._identifiers:
            try:
                if not self._tickDSDict[identifier].empty():
                    bar_dict[identifier] = build_bar(to_market_datetime(endDateTime), self._tickDSDict[identifier])
            except Exception, e:
                logger.error(e)
Example #29
0
 def getNextBars(self):
     ret = None
     try:
         eventType, eventData = self.__queue.get(True, LiveFeed.QUEUE_TIMEOUT)
         if eventType == GetBarThread.ON_BARS:
             ret = eventData
         else:
             logger.error("Invalid event received: %s - %s" % (eventType, eventData))
     except queue.Empty:
         pass
     return ret
Example #30
0
 def getNextBars(self):
     ret = None
     try:
         eventType, eventData = self.__queue.get(True, LiveFeed.QUEUE_TIMEOUT)
         if eventType == GetBarThread.ON_BARS:
             ret = eventData
         else:
             logger.error("Invalid event received: %s - %s" % (eventType, eventData))
     except queue.Empty:
         pass
     return ret
Example #31
0
 def __onDisconnected(self):
     logger.error("Disconnection detected")
     if self.__enableReconnection:
         initialized = False
         while not self.__stopped and not initialized:
             logger.info("Reconnecting")
             initialized = self.__initializeClient()
             if not initialized:
                 time.sleep(5)
     else:
         self.__stopped = True
Example #32
0
 def __onDisconnected(self):
     logger.error("Disconnection detected")
     if self.__enableReconnection:
         initialized = False
         while not self.__stopped and not initialized:
             logger.info("Reconnecting")
             initialized = self.__initializeClient()
             if not initialized:
                 time.sleep(5)
     else:
         self.__stopped = True
Example #33
0
def download_trades_impl(currency, tid):
    url = "https://data.mtgox.com/api/1/BTC%s/trades?since=%d" % (currency.upper(), tid)

    f = urllib.urlopen(url)
    buff = f.read()
    if f.headers["Content-Type"].find("application/json") != 0:
        logger.error(buff)
        raise Exception("Failed to download data. Invalid Content-Type: %s" % (f.headers["Content-Type"]))
    response = json.loads(buff)
    if response["result"] != "success":
        raise Exception("Failed to download data. Result '%s'" % (response["result"]))
    return response
def main():
    try:
        htmlTree = get_html()
        table = find_table(htmlTree)
        if table is None:
            raise Exception("S&P 500 Component Stocks table not found")
        symbolsXML = parse_results(table)

        logger.info("Writing sp500.xml")
        symbolsXML.write("sp500.xml")
    except Exception as e:
        logger.error(str(e))
Example #35
0
    def doCall(self):
        endDateTime = self.__nextBarClose
        self.__updateNextBarClose()
        bar_dict = {}

        for identifier in self._identifiers:
            try:
                if not self._tickDSDict[identifier].empty():
                    bar_dict[identifier] = build_bar(
                        to_market_datetime(endDateTime),
                        self._tickDSDict[identifier])
            except Exception, e:
                logger.error(e)
Example #36
0
def main():
    parser = argparse.ArgumentParser(description="Quandl utility")

    parser.add_argument("--auth-token", required=False,
                        help="An authentication token needed if you're doing more than 50 calls per day")
    parser.add_argument("--source-code", required=True,
                        help="The dataset source code")
    parser.add_argument("--table-code", required=True,
                        help="The dataset table code")
    parser.add_argument("--from-year", required=True,
                        type=int, help="The first year to download")
    parser.add_argument("--to-year", required=True, type=int,
                        help="The last year to download")
    parser.add_argument("--storage", required=True,
                        help="The path were the files will be downloaded to")
    parser.add_argument("--force-download", action='store_true',
                        help="Force downloading even if the files exist")
    parser.add_argument("--ignore-errors", action='store_true',
                        help="True to keep on downloading files in case of errors")
    parser.add_argument("--frequency", default="daily", choices=[
                        "daily", "weekly"], help="The frequency of the bars. Only daily or weekly are supported")

    args = parser.parse_args()

    logger = pyalgotrade.logger.getLogger("quandl")

    if not os.path.exists(args.storage):
        logger.info("Creating %s directory" % (args.storage))
        os.mkdir(args.storage)

    for year in range(args.from_year, args.to_year + 1):
        fileName = os.path.join(args.storage, "%s-%s-%d-quandl.csv" %
                                (args.source_code, args.table_code, year))
        if not os.path.exists(fileName) or args.force_download:
            logger.info("Downloading %s %d to %s" %
                        (args.table_code, year, fileName))
            try:
                if args.frequency == "daily":
                    download_daily_bars(
                        args.source_code, args.table_code, year, fileName, args.auth_token)
                else:
                    assert args.frequency == "weekly", "Invalid frequency"
                    download_weekly_bars(
                        args.source_code, args.table_code, year, fileName, args.auth_token)
            except Exception as e:
                if args.ignore_errors:
                    logger.error(str(e))
                    continue
                else:
                    raise
Example #37
0
def main():
    fromYear = 2000
    toYear = 2012

    try:
        # MERVAL config.
        symbolsFile = os.path.join("..", "symbols", "merval.xml")
        missingDataVerifierClass = MervalMissingDataVerifier

        stockCallback = lambda stock: process_symbol(stock.getTicker(), fromYear, toYear, missingDataVerifierClass)
        indexCallback = stockCallback
        symbolsxml.parse(symbolsFile, stockCallback, indexCallback)
    except Exception as e:
        logger.error(str(e))
Example #38
0
    def get_tushare_tick_data(self):
        try:
            df = ts.get_realtime_quotes(self._identifiers)

            for index, identifier in enumerate(self._identifiers):
                tick_info = df.ix[index]

                if self.valid_tick_data(identifier, tick_info):
                    # tushare use unicode type, another way is convert it to int/float here. refer to build_bar
                    self._tickDSDict[identifier].append(
                        tick_info.price, tick_info.volume, tick_info.amount,
                        tick_info.time)
        except Exception, e:
            logger.error("Tushare polling exception", exc_info=e)
Example #39
0
def main():
    try:
        writer = symbolsxml.Writer()
        for symbol in open("merval-symbols.txt", "r"):
            symbol = symbol.strip()
            process_symbol(writer, symbol)

        # Index
        writer.addIndex("^MERV", "Merval")

        logger.info("Writing merval.xml")
        writer.write("merval.xml")
    except Exception, e:
        logger.error(str(e))
Example #40
0
    def _fill_today_bars(self):
        today = datetime.date.today().isoformat()

        if is_holiday(today):  # do nothing if holiday
            return

        today_bars = {}
        for identifier in self.__identifiers:
            try:
                df = ts.get_today_ticks(identifier)
                today_bars[identifier] = get_bar_list(df, self.__frequency,
                                                      None)
            except Exception, e:
                logger.error(e)
Example #41
0
    def doCall(self):

        barDict = {}
        self.__updateNextBarClose()
        for indentifier in self.__identifiers:
            try:
                response = ts.get_k_data(code=indentifier, ktype=self.__precision)
                barDict[indentifier] = build_bar(response.iloc[-1], self.__frequency)
            except:
                logger.error("tushare time out")

        if len(barDict):
            bars = bar.Bars(barDict)
            self.__queue.put((GetBarThread.ON_BARS, bars))
Example #42
0
def download_trades_since(currency, tid, ignoreMultiCurrency, retries=3):
    logger.info("Downloading trades since %s." % (base.tid_to_datetime(tid)))
    # logger.info("Downloading trades since %d." % (tid))

    done = False
    while not done:
        try:
            response = download_trades_impl(currency, tid)
            done = True
        except Exception, e:
            if retries == 0:
                raise e
            else:
                logger.error("%s. Retrying..." % (e))
                retries -= 1