def test_cloud_optimize_displays_optimal_backtest_results( optimizer_config_manager_mock: mock.Mock, target: str, extremum: OptimizationExtremum) -> None: create_fake_lean_cli_directory() project = create_api_project(1, "My Project") optimization = create_api_optimization() optimization.backtests["1"] = create_api_optimization_backtest( 1, True, True, True) optimization.backtests["2"] = create_api_optimization_backtest( 2, True, True, False) api_client = mock.Mock() api_client.projects.get_all.return_value = [project] api_client.optimizations.estimate.return_value = QCOptimizationEstimate( estimateId="x", time=10, balance=1000) container.api_client.override(providers.Object(api_client)) cloud_runner = mock.Mock() cloud_runner.run_optimization.return_value = optimization container.cloud_runner.override(providers.Object(cloud_runner)) optimizer_config_manager_mock.configure_target.return_value = OptimizationTarget( target=f"TotalPerformance.PortfolioStatistics.{target}", extremum=extremum, ) result = CliRunner().invoke(lean, ["cloud", "optimize", "My Project"]) assert result.exit_code == 0 if extremum == OptimizationExtremum.Maximum: assert "id: 1" in result.output else: assert "id: 2" in result.output
def configure_target(self) -> OptimizationTarget: """Asks the user for the optimization target. :return: the chosen optimization target """ # Create a list of options containing a "<target> (min)" and "<target> (max)" option for every target options = list( itertools.product( self.available_targets, [OptimizationExtremum.Minimum, OptimizationExtremum.Maximum])) options = [ Option(id=OptimizationTarget(target=option[0][0], extremum=option[1]), label=f"{option[0][1]} ({option[1]})") for option in options ] return self._logger.prompt_list("Select an optimization target", options)
def optimizer_config_manager_mock() -> mock.Mock: """A pytest fixture which mocks the optimizer config manager before every test.""" optimizer_config_manager = mock.Mock() optimizer_config_manager.available_targets = OptimizerConfigManager( mock.Mock()).available_targets optimizer_config_manager.configure_strategy.return_value = "QuantConnect.Optimizer.Strategies.GridSearchOptimizationStrategy" optimizer_config_manager.configure_target.return_value = OptimizationTarget( target="TotalPerformance.PortfolioStatistics.SharpeRatio", extremum=OptimizationExtremum.Maximum) optimizer_config_manager.configure_parameters.return_value = [ OptimizationParameter(name="param1", min=1.0, max=10.0, step=0.5) ] optimizer_config_manager.configure_constraints.return_value = [ OptimizationConstraint( **{ "target": "TotalPerformance.PortfolioStatistics.Drawdown", "operator": "less", "target-value": "0.25" }) ] optimizer_config_manager.configure_node.return_value = NodeType( name="O8-16", ram=16, cores=8, price=0.6, min_nodes=1, max_nodes=6, default_nodes=3), 3 container.optimizer_config_manager.override( providers.Object(optimizer_config_manager)) return optimizer_config_manager
def optimize(project: str, target: Optional[str], target_direction: Optional[str], parameter: List[Tuple[str, float, float, float]], constraint: List[str], node: Optional[str], parallel_nodes: Optional[int], name: Optional[str], push: bool) -> None: """Optimize a project in the cloud. PROJECT must be the name or id of the project to optimize. An interactive prompt will be shown to configure the optimizer. If --target is given the command runs in non-interactive mode. In this mode the CLI does not prompt for input and the following options become required: --target, --target-direction, --parameter, --node and --parallel-nodes. \b In non-interactive mode the --parameter option can be provided multiple times to configure multiple parameters: - --parameter <name> <min value> <max value> <step size> - --parameter my-first-parameter 1 10 0.5 --parameter my-second-parameter 20 30 5 \b In non-interactive mode the --constraint option can be provided multiple times to configure multiple constraints: - --constraint "<statistic> <operator> <value>" - --constraint "Sharpe Ratio >= 0.5" --constraint "Drawdown < 0.25" If the project that has to be optimized has been pulled to the local drive with `lean cloud pull` it is possible to use the --push option to push local modifications to the cloud before running the optimization. """ logger = container.logger() api_client = container.api_client() cloud_project_manager = container.cloud_project_manager() cloud_project = cloud_project_manager.get_cloud_project(project, push) if name is None: name = container.name_generator().generate_name() cloud_runner = container.cloud_runner() finished_compile = cloud_runner.compile_project(cloud_project) optimizer_config_manager = container.optimizer_config_manager() organization = api_client.organizations.get(cloud_project.organizationId) if target is not None: ensure_options([ "target", "target_direction", "parameter", "node", "parallel_nodes" ]) optimization_strategy = "QuantConnect.Optimizer.Strategies.GridSearchOptimizationStrategy" optimization_target = OptimizationTarget( target=optimizer_config_manager.parse_target(target), extremum=target_direction) optimization_parameters = optimizer_config_manager.parse_parameters( parameter) optimization_constraints = optimizer_config_manager.parse_constraints( constraint) node = next(n for n in available_nodes if n.name == node) if parallel_nodes < node.min_nodes: raise RuntimeError( f"The minimum number of parallel nodes for {node.name} is {node.min_nodes}" ) if parallel_nodes > node.max_nodes: raise RuntimeError( f"The maximum number of parallel nodes for {node.name} is {node.max_nodes}" ) _display_estimate(cloud_project, finished_compile, organization, name, optimization_strategy, optimization_target, optimization_parameters, optimization_constraints, node, parallel_nodes) else: optimization_strategy = optimizer_config_manager.configure_strategy( cloud=True) optimization_target = optimizer_config_manager.configure_target() optimization_parameters = optimizer_config_manager.configure_parameters( cloud_project.parameters, cloud=True) optimization_constraints = optimizer_config_manager.configure_constraints( ) while True: node, parallel_nodes = optimizer_config_manager.configure_node() _display_estimate(cloud_project, finished_compile, organization, name, optimization_strategy, optimization_target, optimization_parameters, optimization_constraints, node, parallel_nodes) if click.confirm( "Do you want to start the optimization on the selected node type?", default=True): break optimization = cloud_runner.run_optimization( cloud_project, finished_compile, name, optimization_strategy, optimization_target, optimization_parameters, optimization_constraints, node.name, parallel_nodes) backtests = optimization.backtests.values() backtests = [b for b in backtests if b.exitCode == 0] backtests = [ b for b in backtests if _backtest_meets_constraints(b, optimization_constraints) ] if len(backtests) == 0: logger.info( "No optimal parameter combination found, no successful backtests meet all constraints" ) return optimal_backtest = sorted(backtests, key=lambda backtest: _get_backtest_statistic( backtest, optimization_target.target), reverse=optimization_target.extremum == OptimizationExtremum.Maximum)[0] parameters = ", ".join(f"{key}: {optimal_backtest.parameterSet[key]}" for key in optimal_backtest.parameterSet) logger.info(f"Optimal parameters: {parameters}") optimal_backtest = api_client.backtests.get(cloud_project.projectId, optimal_backtest.id) logger.info(f"Optimal backtest id: {optimal_backtest.backtestId}") logger.info(f"Optimal backtest name: {optimal_backtest.name}") logger.info(f"Optimal backtest results:") logger.info(optimal_backtest.get_statistics_table())
def optimize(project: Path, output: Optional[Path], detach: bool, optimizer_config: Optional[Path], strategy: Optional[str], target: Optional[str], target_direction: Optional[str], parameter: List[Tuple[str, float, float, float]], constraint: List[str], release: bool, image: Optional[str], update: bool) -> None: """Optimize a project's parameters locally using Docker. \b If PROJECT is a directory, the algorithm in the main.py or Main.cs file inside it will be executed. If PROJECT is a file, the algorithm in the specified file will be executed. By default an interactive wizard is shown letting you configure the optimizer. If --optimizer-config or --strategy is given the command runs in non-interactive mode. In this mode the CLI does not prompt for input. \b The --optimizer-config option can be used to specify the configuration to run the optimizer with. When using the option it should point to a file like this (the algorithm-* properties should be omitted): https://github.com/QuantConnect/Lean/blob/master/Optimizer.Launcher/config.json If --strategy is given the optimizer configuration is read from the given options. In this case --strategy, --target, --target-direction and --parameter become required. \b In non-interactive mode the --parameter option can be provided multiple times to configure multiple parameters: - --parameter <name> <min value> <max value> <step size> - --parameter my-first-parameter 1 10 0.5 --parameter my-second-parameter 20 30 5 \b In non-interactive mode the --constraint option can be provided multiple times to configure multiple constraints: - --constraint "<statistic> <operator> <value>" - --constraint "Sharpe Ratio >= 0.5" --constraint "Drawdown < 0.25" By default the official LEAN engine image is used. You can override this using the --image option. Alternatively you can set the default engine image for all commands using `lean config set engine-image <image>`. """ project_manager = container.project_manager() algorithm_file = project_manager.find_algorithm_file(project) if output is None: output = algorithm_file.parent / "optimizations" / datetime.now( ).strftime("%Y-%m-%d_%H-%M-%S") optimizer_config_manager = container.optimizer_config_manager() config = None if optimizer_config is not None and strategy is not None: raise RuntimeError( "--optimizer-config and --strategy are mutually exclusive") if optimizer_config is not None: config = json5.loads(optimizer_config.read_text(encoding="utf-8")) # Remove keys which are configured in the Lean config for key in [ "algorithm-type-name", "algorithm-language", "algorithm-location" ]: config.pop(key, None) elif strategy is not None: ensure_options(["strategy", "target", "target_direction", "parameter"]) optimization_strategy = f"QuantConnect.Optimizer.Strategies.{strategy.replace(' ', '')}OptimizationStrategy" optimization_target = OptimizationTarget( target=optimizer_config_manager.parse_target(target), extremum=target_direction) optimization_parameters = optimizer_config_manager.parse_parameters( parameter) optimization_constraints = optimizer_config_manager.parse_constraints( constraint) else: project_config_manager = container.project_config_manager() project_config = project_config_manager.get_project_config( algorithm_file.parent) project_parameters = [ QCParameter(key=k, value=v) for k, v in project_config.get("parameters", {}).items() ] if len(project_parameters) == 0: raise MoreInfoError( "The given project has no parameters to optimize", "https://www.lean.io/docs/lean-cli/optimization/parameters") optimization_strategy = optimizer_config_manager.configure_strategy( cloud=False) optimization_target = optimizer_config_manager.configure_target() optimization_parameters = optimizer_config_manager.configure_parameters( project_parameters, cloud=False) optimization_constraints = optimizer_config_manager.configure_constraints( ) if config is None: # noinspection PyUnboundLocalVariable config = { "optimization-strategy": optimization_strategy, "optimization-strategy-settings": { "$type": "QuantConnect.Optimizer.Strategies.StepBaseOptimizationStrategySettings, QuantConnect.Optimizer", "default-segment-amount": 10 }, "optimization-criterion": { "target": optimization_target.target, "extremum": optimization_target.extremum.value }, "parameters": [parameter.dict() for parameter in optimization_parameters], "constraints": [ constraint.dict(by_alias=True) for constraint in optimization_constraints ] } config["optimizer-close-automatically"] = True config["results-destination-folder"] = "/Results" config_path = output / "optimizer-config.json" config_path.parent.mkdir(parents=True, exist_ok=True) with config_path.open("w+", encoding="utf-8") as file: file.write(json.dumps(config, indent=4) + "\n") project_config_manager = container.project_config_manager() cli_config_manager = container.cli_config_manager() project_config = project_config_manager.get_project_config( algorithm_file.parent) engine_image = cli_config_manager.get_engine_image( image or project_config.get("engine-image", None)) lean_config_manager = container.lean_config_manager() lean_config = lean_config_manager.get_complete_lean_config( "backtesting", algorithm_file, None) if not output.exists(): output.mkdir(parents=True) output_config_manager = container.output_config_manager() lean_config["algorithm-id"] = str( output_config_manager.get_optimization_id(output)) lean_config["messaging-handler"] = "QuantConnect.Messaging.Messaging" lean_runner = container.lean_runner() run_options = lean_runner.get_basic_docker_config(lean_config, algorithm_file, output, None, release, detach) run_options["working_dir"] = "/Lean/Optimizer.Launcher/bin/Debug" run_options["commands"].append( "dotnet QuantConnect.Optimizer.Launcher.dll") run_options["mounts"].append( Mount(target="/Lean/Optimizer.Launcher/bin/Debug/config.json", source=str(config_path), type="bind", read_only=True)) container.update_manager().pull_docker_image_if_necessary( engine_image, update) project_manager.copy_code(algorithm_file.parent, output / "code") success = container.docker_manager().run_image(engine_image, **run_options) logger = container.logger() cli_root_dir = container.lean_config_manager().get_cli_root_directory() relative_project_dir = project.relative_to(cli_root_dir) relative_output_dir = output.relative_to(cli_root_dir) if detach: temp_manager = container.temp_manager() temp_manager.delete_temporary_directories_when_done = False logger.info( f"Successfully started optimization for '{relative_project_dir}' in the '{run_options['name']}' container" ) logger.info(f"The output will be stored in '{relative_output_dir}'") logger.info( "You can use Docker's own commands to manage the detached container" ) elif success: optimizer_logs = (output / "log.txt").read_text(encoding="utf-8") groups = re.findall(r"ParameterSet: \(([^)]+)\) backtestId '([^']+)'", optimizer_logs) if len(groups) > 0: optimal_parameters, optimal_id = groups[0] optimal_results = json.loads( (output / optimal_id / f"{optimal_id}.json").read_text(encoding="utf-8")) optimal_backtest = QCBacktest( backtestId=optimal_id, projectId=1, status="", name=optimal_id, created=datetime.now(), completed=True, progress=1.0, runtimeStatistics=optimal_results["RuntimeStatistics"], statistics=optimal_results["Statistics"]) logger.info( f"Optimal parameters: {optimal_parameters.replace(':', ': ').replace(',', ', ')}" ) logger.info(f"Optimal backtest results:") logger.info(optimal_backtest.get_statistics_table()) logger.info( f"Successfully optimized '{relative_project_dir}' and stored the output in '{relative_output_dir}'" ) else: raise RuntimeError( f"Something went wrong while running the optimization, the output is stored in '{relative_output_dir}'" )