示例#1
0
    def serve(self):
        try:
            # Initialize instruments, bars and parameters.
            logger.info("Loading bars")
            loadedBars = []

            for dateTime, bars in self.__barFeed:
                loadedBars.append(bars)

            instruments = self.__barFeed.getRegisteredInstruments()

            self.__instrumentsAndBars = pickle.dumps(
                (list(instruments), loadedBars))
            self.__barsFreq = self.__barFeed.getFrequency()

            if self.__autoStopThread:
                self.__autoStopThread.start()

            logger.info("Waiting for workers")
            self.serve_forever()

            if self.__autoStopThread:
                self.__autoStopThread.join()
        finally:
            self.__forcedStop = True
示例#2
0
def build_feed(instruments,
               fromYear,
               toYear,
               storage,
               frequency=bar.Frequency.DAY,
               timezone=None,
               skipErrors=False):
    """Build and load a :class:`mooquant.barfeed.mootdxfeed.Feed` using CSV files downloaded from Google Finance.
    CSV files are downloaded if they haven't been downloaded before.

    :param instruments: Instrument identifiers.
    :type instruments: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **mooquant.bar.Frequency.DAY** is currently supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`mooquant.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :rtype: :class:`mooquant.barfeed.mootdxfeed.Feed`.
    """

    logger = mooquant.logger.getLogger("mootdx")
    ret = mootdxfeed.Feed(frequency, timezone)

    if not os.path.exists(storage):
        logger.info("Creating {dirname} directory".format(dirname=storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for instrument in instruments:
            filePath = Path(storage)
            fileName = filePath / "{instrument}-{year}-mootdx.csv".format(
                instrument=instrument, year=year)

            if not os.path.exists(fileName):
                logger.info(
                    "Downloading {instrument} {year} to {filename}".format(
                        instrument=instrument, year=year, filename=fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(instrument, year, fileName)
                    else:
                        raise Exception("Invalid frequency")
                except Exception as e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e

            ret.addBarsFromCSV(instrument, fileName)

    return ret
示例#3
0
 def run(self):
     logger.info("Thread started")
     while not self.__stopped:
         self.__wait()
         if not self.__stopped:
             try:
                 self.doCall()
             except Exception as e:
                 logger.critical("Unhandled exception", exc_info=e)
     logger.debug("Thread finished.")
示例#4
0
    def serve(self):
        try:
            # Initialize instruments, bars and parameters.
            logger.info("Loading bars")
            loadedBars = []

            for dateTime, bars in self.__barFeed:
                loadedBars.append(bars)

            instruments = self.__barFeed.getRegisteredInstruments()

            self.__instrumentsAndBars = pickle.dumps((list(instruments), loadedBars))
            self.__barsFreq = self.__barFeed.getFrequency()
        finally:
            pass
示例#5
0
    def __init__(self,
                 identifiers,
                 apiCallDelay=5,
                 maxLen=dataseries.DEFAULT_MAX_LEN):
        logger.info('Livefeed created')
        barfeed.BaseBarFeed.__init__(self, bar.Frequency.TRADE, maxLen)
        if not isinstance(identifiers, list):
            raise Exception("identifiers must be a list")

        self.__queue = queue.Queue()
        self.__orderBookUpdateEvent = observer.Event()
        self.__thread = TradesAPIThread(
            self.__queue, identifiers,
            datetime.timedelta(seconds=apiCallDelay))
        self.__bars = []
        for instrument in identifiers:
            self.registerInstrument(instrument)
示例#6
0
    def pushJobResults(self, jobId, result, parameters, workerName):
        jobId = pickle.loads(jobId.data)
        result = pickle.loads(result.data)
        parameters = pickle.loads(parameters.data)

        # Remove the job mapping.
        with self.__activeJobsLock:
            try:
                del self.__activeJobs[jobId]
            except KeyError:
                # The job's results were already submitted.
                return

        # if result is None or result > self.__bestResult:

        if result is None:
            logger.info("Best result so far {} with parameters {}".format(
                result, parameters))
            self.__bestResult = result
        elif self.__bestResult is None and result is not None:
            logger.info("Best result so far {} with parameters {}".format(
                result, parameters))
            self.__bestResult = result
        elif result > self.__bestResult:
            logger.info("Best result so far {} with parameters {}".format(
                result, parameters))
            self.__bestResult = result

        self.__resultSinc.push(result, base.Parameters(*parameters))
示例#7
0
def serve(barFeed, strategyParameters, address, port, drivce='xml'):
    """Executes a server that will provide bars and strategy parameters for workers to use.

    :param drivce: backend server drivce.
    :param barFeed: The bar feed that each worker will use to backtest the strategy.
    :type barFeed: :class:`mooquant.barfeed.BarFeed`.
    :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**.
    :param address: The address to listen for incoming worker connections.
    :type address: string.
    :param port: The port to listen for incoming worker connections.
    :type port: int.
    :rtype: A :class:`Results` instance with the best results found or None if no results were obtained.
    """

    paramSource = base.ParameterSource(strategyParameters)
    resultSinc = base.ResultSinc()

    if drivce not in ('xml', 'zmq'):
        logger.error('drivce not found')
        raise Execute('drivce not found')

    if drivce == 'xml':
        from mooquant.optimizer import xmlrpcserver as server
    elif drivce == 'zmq':
        from mooquant.optimizer import zmqrpcserver as server

    s = server.Server(paramSource, resultSinc, barFeed, address, port)
    logger.info("Starting server")

    s.serve()
    logger.info("Server finished")

    ret = None
    bestResult, bestParameters = resultSinc.getBest()

    if bestResult is not None:
        logger.info("Best final result {} with parameters {}".format(
            bestResult, bestParameters.args))
        ret = Results(bestParameters.args, bestResult)
    else:
        logger.error("No results. All jobs failed or no jobs were processed.")

    return ret
示例#8
0
 def serve(self):
     logger.info("Waiting for workers")
     self.__zerorpc.run()
示例#9
0
def build_feed(sourceCode, tableCodes, fromYear, toYear, storage, frequency=bar.Frequency.DAY, timezone=None,
               skipErrors=False, noAdjClose=False, authToken=None, columnNames={}, forceDownload=False
               ):
    """Build and load a :class:`mooquant.barfeed.quandlfeed.Feed` using CSV files downloaded from Quandl.
    CSV files are downloaded if they haven't been downloaded before.

    :param sourceCode: The dataset source code.
    :type sourceCode: string.
    :param tableCodes: The dataset table codes.
    :type tableCodes: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **mooquant.bar.Frequency.DAY** or **mooquant.bar.Frequency.WEEK**
        are supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`mooquant.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :param noAdjClose: True if the instruments don't have adjusted close values.
    :type noAdjClose: boolean.
    :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day.
    :type authToken: string.
    :param columnNames: Optional. A dictionary to map column names. Valid key values are:

        * datetime
        * open
        * high
        * low
        * close
        * volume
        * adj_close

    :type columnNames: dict.

    :rtype: :class:`mooquant.barfeed.quandlfeed.Feed`.
    """

    logger = mooquant.logger.getLogger("quandl")
    ret = quandlfeed.Feed(frequency, timezone)

    if noAdjClose:
        ret.setNoAdjClose()

    # Additional column names.
    for col, name in columnNames.items():
        ret.setColumnName(col, name)

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for tableCode in tableCodes:
            fileName = os.path.join(storage, "%s-%s-%d-quandl.csv" % (sourceCode, tableCode, year))

            if not os.path.exists(fileName) or forceDownload:
                logger.info("Downloading %s %d to %s" % (tableCode, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(sourceCode, tableCode, year, fileName, authToken)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(sourceCode, tableCode, year, fileName, authToken)
                    else:
                        raise Exception("Invalid frequency")
                except Exception as e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e

            ret.addBarsFromCSV(tableCode, fileName)

    return ret