def serve(barFeed, strategyParameters, address, port): """Executes a server that will provide bars and strategy parameters for workers to use. :param barFeed: The bar feed that each worker will use to backtest the strategy. :type barFeed: :class:`pyalgotrade.barfeed.BarFeed`. :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**. :param address: The address to listen for incoming worker connections. :type address: string. :param port: The port to listen for incoming worker connections. :type port: int. :rtype: A :class:`Results` instance with the best results found or None if no results were obtained. """ paramSource = base.ParameterSource(strategyParameters) resultSinc = base.ResultSinc() s = xmlrpcserver.Server(paramSource, resultSinc, barFeed, address, port) logger.info("Starting server") s.serve() logger.info("Server finished") ret = None bestResult, bestParameters = resultSinc.getBest() if bestResult is not None: logger.info("Best final result %s with parameters %s" % (bestResult, bestParameters.args)) ret = Results(bestParameters.args, bestResult) else: logger.error("No results. All jobs failed or no jobs were processed.") return ret
def run_impl(strategyClass, barFeed, strategyParameters, workerCount=None, logLevel=logging.ERROR, resultSinc=None): assert (workerCount is None or workerCount > 0) if workerCount is None: workerCount = multiprocessing.cpu_count() ret = None workers = [] port = find_port() if port is None: raise Exception("Failed to find a port to listen") # Build and start the server thread before the worker processes. # We'll manually stop the server once workers have finished. paramSource = base.ParameterSource(strategyParameters) if resultSinc is None: resultSinc = base.ResultSinc() # Create and start the server. logger.info("Starting server") srv = xmlrpcserver.Server(paramSource, resultSinc, barFeed, "localhost", port, False) serverThread = ServerThread(srv) serverThread.start() try: logger.info("Starting workers") # Build the worker processes. for i in range(workerCount): workers.append( multiprocessing.Process(target=worker_process, args=(strategyClass, port, logLevel))) # Start workers for process in workers: process.start() # Wait for all jobs to complete. while srv.jobsPending(): time.sleep(1) finally: # Stop workers for process in workers: stop_process(process) # Stop and wait the server to finish. logger.info("Stopping server") srv.stop() serverThread.join() bestResult, bestParameters = resultSinc.getBest() if bestResult is not None: ret = server.Results(bestParameters.args, bestResult) return ret
def run(strategyClass, barFeed, strategyParameters, workerCount=None, logLevel=logging.ERROR): """Executes many instances of a strategy in parallel and finds the parameters that yield the best results. :param strategyClass: The strategy class. :param barFeed: The bar feed to use to backtest the strategy. :type barFeed: :class:`pyalgotrade.barfeed.BarFeed`. :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**. :param workerCount: The number of strategies to run in parallel. If None then as many workers as CPUs are used. :type workerCount: int. :param logLevel: The log level. Defaults to **logging.ERROR**. :rtype: A :class:`Results` instance with the best results found. """ assert(workerCount is None or workerCount > 0) if workerCount is None: workerCount = multiprocessing.cpu_count() ret = None workers = [] port = find_port() if port is None: raise Exception("Failed to find a port to listen") # Build and start the server thread before the worker processes. # We'll manually stop the server once workers have finished. paramSource = base.ParameterSource(strategyParameters) resultSinc = base.ResultSinc() srv = xmlrpcserver.Server(paramSource, resultSinc, barFeed, "localhost", port, False) serverThread = ServerThread(srv) serverThread.start() try: # Build the worker processes. for i in range(workerCount): workers.append(multiprocessing.Process( target=worker_process, args=(strategyClass, port, logLevel)) ) logger.info("Executing workers") # Start workers for process in workers: process.start() # Wait workers for process in workers: wait_process(process) logger.info("All workers finished") finally: # Stop and wait the server to finish. srv.stop() serverThread.join() bestResult, bestParameters = resultSinc.getBest() if bestResult is not None: ret = server.Results(bestParameters.args, bestResult) return ret