Ejemplo n.º 1
0
def remove(project: Path, name: str, no_local: bool) -> None:
    """Remove a custom library from a project.

    PROJECT must be the path to the project directory.

    NAME must be the name of the NuGet package (for C# projects) or of the PyPI package (for Python projects) to remove.

    Custom C# libraries are removed from the project's .csproj file,
    which is then restored if dotnet is on your PATH and the --no-local flag has not been given.

    Custom Python libraries are removed from the project's requirements.txt file.

    \b
    C# example usage:
    $ lean library remove "My CSharp Project" Microsoft.ML

    \b
    Python example usage:
    $ lean library remove "My Python Project" tensorflow
    """
    project_config = container.project_config_manager().get_project_config(
        project)
    project_language = project_config.get("algorithm-language", None)

    if project_language is None:
        raise MoreInfoError(
            f"{project} is not a Lean CLI project",
            "https://www.lean.io/docs/lean-cli/tutorials/project-management#02-Creating-new-projects"
        )

    if project_language == "CSharp":
        _remove_csharp(project, name, no_local)
    else:
        _remove_python(project, name)
Ejemplo n.º 2
0
def add(project: Path, name: str, version: Optional[str],
        no_local: bool) -> None:
    """Add a custom library to a project.

    PROJECT must be the path to the project.

    NAME must be the name of a NuGet package (for C# projects) or of a PyPI package (for Python projects).

    If --version is not given, the package is pinned to the latest compatible version.
    For C# projects, this is the latest available version.
    For Python projects, this is the latest version compatible with Python 3.6 (which is what the Docker images use).

    Custom C# libraries are added to your project's .csproj file,
    which is then restored if dotnet is on your PATH and the --no-local flag has not been given.

    Custom Python libraries are added to your project's requirements.txt file and are installed in your local Python
    environment so you get local autocomplete for the library. The last step can be skipped with the --no-local flag.

    \b
    C# example usage:
    $ lean library add "My CSharp Project" Microsoft.ML
    $ lean library add "My CSharp Project" Microsoft.ML --version 1.5.5

    \b
    Python example usage:
    $ lean library add "My Python Project" tensorflow
    $ lean library add "My Python Project" tensorflow --version 2.5.0
    """
    project_config = container.project_config_manager().get_project_config(
        project)
    project_language = project_config.get("algorithm-language", None)

    if project_language is None:
        raise MoreInfoError(
            f"{project} is not a Lean CLI project",
            "https://www.lean.io/docs/lean-cli/projects/project-management#02-Create-Projects"
        )

    if project_language == "CSharp":
        _add_csharp(project, name, version, no_local)
    else:
        _add_python(project, name, version, no_local)
Ejemplo n.º 3
0
def push(project: Optional[Path]) -> None:
    """Push local projects to QuantConnect.

    This command overrides the content of cloud files with the content of their respective local counterparts.

    This command will not delete cloud files which don't have a local counterpart.
    """
    # Parse which projects need to be pushed
    if project is not None:
        project_config_manager = container.project_config_manager()
        if not project_config_manager.get_project_config(
                project).file.exists():
            raise RuntimeError(f"'{project}' is not a Lean project")

        projects_to_push = [project]
    else:
        projects_to_push = [
            p.parent for p in Path.cwd().rglob(PROJECT_CONFIG_FILE_NAME)
        ]

    push_manager = container.push_manager()
    push_manager.push_projects(projects_to_push)
Ejemplo n.º 4
0
def optimize(project: Path, output: Optional[Path],
             optimizer_config: Optional[Path], image: Optional[str],
             update: bool) -> None:
    """Optimize a project's parameters locally using Docker.

    \b
    If PROJECT is a directory, the algorithm in the main.py or Main.cs file inside it will be executed.
    If PROJECT is a file, the algorithm in the specified file will be executed.

    \b
    The --optimizer-config option can be used to specify the configuration to run the optimizer with.
    When using the option it should point to a file like this (the algorithm-* properties should be omitted):
    https://github.com/QuantConnect/Lean/blob/master/Optimizer.Launcher/config.json

    When --optimizer-config is not set, an interactive prompt will be shown to configure the optimizer.

    By default the official LEAN engine image is used.
    You can override this using the --image option.
    Alternatively you can set the default engine image for all commands using `lean config set engine-image <image>`.
    """
    project_manager = container.project_manager()
    algorithm_file = project_manager.find_algorithm_file(project)

    if output is None:
        output = algorithm_file.parent / "optimizations" / datetime.now(
        ).strftime("%Y-%m-%d_%H-%M-%S")

    if optimizer_config is None:
        project_config_manager = container.project_config_manager()
        project_config = project_config_manager.get_project_config(
            algorithm_file.parent)
        project_parameters = [
            QCParameter(key=k, value=v)
            for k, v in project_config.get("parameters", {}).items()
        ]

        if len(project_parameters) == 0:
            raise MoreInfoError(
                "The given project has no parameters to optimize",
                "https://www.lean.io/docs/lean-cli/tutorials/optimization/project-parameters"
            )

        optimizer_config_manager = container.optimizer_config_manager()
        optimization_strategy = optimizer_config_manager.configure_strategy(
            cloud=False)
        optimization_target = optimizer_config_manager.configure_target()
        optimization_parameters = optimizer_config_manager.configure_parameters(
            project_parameters, cloud=False)
        optimization_constraints = optimizer_config_manager.configure_constraints(
        )

        config = {
            "optimization-strategy":
            optimization_strategy,
            "optimization-strategy-settings": {
                "$type":
                "QuantConnect.Optimizer.Strategies.StepBaseOptimizationStrategySettings, QuantConnect.Optimizer",
                "default-segment-amount": 10
            },
            "optimization-criterion": {
                "target": optimization_target.target,
                "extremum": optimization_target.extremum.value
            },
            "parameters":
            [parameter.dict() for parameter in optimization_parameters],
            "constraints": [
                constraint.dict(by_alias=True)
                for constraint in optimization_constraints
            ]
        }
    else:
        config = json5.loads(optimizer_config.read_text(encoding="utf-8"))

        # Remove keys which are configured in the Lean config
        for key in [
                "algorithm-type-name", "algorithm-language",
                "algorithm-location"
        ]:
            config.pop(key, None)

    config["optimizer-close-automatically"] = True
    config["results-destination-folder"] = "/Results"

    config_path = output / "optimizer-config.json"
    config_path.parent.mkdir(parents=True, exist_ok=True)
    with config_path.open("w+", encoding="utf-8") as file:
        file.write(json.dumps(config, indent=4) + "\n")

    cli_config_manager = container.cli_config_manager()
    engine_image = cli_config_manager.get_engine_image(image)

    lean_config_manager = container.lean_config_manager()
    lean_config = lean_config_manager.get_complete_lean_config(
        "backtesting", algorithm_file, None, None)

    lean_runner = container.lean_runner()
    run_options = lean_runner.get_basic_docker_config(lean_config,
                                                      algorithm_file, output,
                                                      None)

    run_options["working_dir"] = "/Lean/Optimizer.Launcher/bin/Debug"
    run_options["commands"].append(
        "dotnet QuantConnect.Optimizer.Launcher.dll")
    run_options["mounts"].append(
        Mount(target="/Lean/Optimizer.Launcher/bin/Debug/config.json",
              source=str(config_path),
              type="bind",
              read_only=True))

    docker_manager = container.docker_manager()

    if update or not docker_manager.supports_dotnet_5(engine_image):
        docker_manager.pull_image(engine_image)

    success = docker_manager.run_image(engine_image, **run_options)

    cli_root_dir = container.lean_config_manager().get_cli_root_directory()
    relative_project_dir = project.relative_to(cli_root_dir)
    relative_output_dir = output.relative_to(cli_root_dir)

    if success:
        logger = container.logger()

        optimizer_logs = (output / "log.txt").read_text(encoding="utf-8")
        groups = re.findall(r"ParameterSet: \(([^)]+)\) backtestId '([^']+)'",
                            optimizer_logs)

        if len(groups) > 0:
            optimal_parameters, optimal_id = groups[0]

            optimal_results = json.loads(
                (output / optimal_id /
                 f"{optimal_id}.json").read_text(encoding="utf-8"))
            optimal_backtest = QCBacktest(
                backtestId=optimal_id,
                projectId=1,
                status="",
                name=optimal_id,
                created=datetime.now(),
                completed=True,
                progress=1.0,
                runtimeStatistics=optimal_results["RuntimeStatistics"],
                statistics=optimal_results["Statistics"])

            logger.info(
                f"Optimal parameters: {optimal_parameters.replace(':', ': ').replace(',', ', ')}"
            )
            logger.info(f"Optimal backtest results:")
            logger.info(optimal_backtest.get_statistics_table())

        logger.info(
            f"Successfully optimized '{relative_project_dir}' and stored the output in '{relative_output_dir}'"
        )
    else:
        raise RuntimeError(
            f"Something went wrong while running the optimization, the output is stored in '{relative_output_dir}'"
        )

    if str(engine_image) == DEFAULT_ENGINE_IMAGE and not update:
        update_manager = container.update_manager()
        update_manager.warn_if_docker_image_outdated(engine_image)
Ejemplo n.º 5
0
def research(project: Path, port: int, data_provider: Optional[str],
             download_data: bool, data_purchase_limit: Optional[int],
             detach: bool, no_open: bool, image: Optional[str],
             update: bool) -> None:
    """Run a Jupyter Lab environment locally using Docker.

    By default the official LEAN research image is used.
    You can override this using the --image option.
    Alternatively you can set the default research image using `lean config set research-image <image>`.
    """
    project_manager = container.project_manager()
    algorithm_file = project_manager.find_algorithm_file(project)

    lean_config_manager = container.lean_config_manager()
    lean_config = lean_config_manager.get_complete_lean_config(
        "backtesting", algorithm_file, None)
    lean_config["composer-dll-directory"] = "/Lean/Launcher/bin/Debug"

    if download_data:
        data_provider = QuantConnectDataProvider.get_name()

    if data_provider is not None:
        data_provider = next(dp for dp in all_data_providers
                             if dp.get_name() == data_provider)
        data_provider.build(lean_config, container.logger()).configure(
            lean_config, "backtesting")

    lean_config_manager.configure_data_purchase_limit(lean_config,
                                                      data_purchase_limit)

    lean_runner = container.lean_runner()
    temp_manager = container.temp_manager()
    run_options = lean_runner.get_basic_docker_config(
        lean_config, algorithm_file, temp_manager.create_temporary_directory(),
        None, False, detach)

    # Mount the config in the notebooks directory as well
    local_config_path = next(m["Source"] for m in run_options["mounts"]
                             if m["Target"].endswith("config.json"))
    run_options["mounts"].append(
        Mount(target="/Lean/Launcher/bin/Debug/Notebooks/config.json",
              source=str(local_config_path),
              type="bind",
              read_only=True))

    # Jupyter Lab runs on port 8888, we expose it to the local port specified by the user
    run_options["ports"]["8888"] = str(port)

    # Open the browser as soon as Jupyter Lab has started
    if detach or not no_open:
        run_options["on_output"] = lambda chunk: _check_docker_output(
            chunk, port)

    # Give container an identifiable name when running it from the GUI
    if container.module_manager().is_module_installed(GUI_PRODUCT_INSTALL_ID):
        project_id = container.project_config_manager().get_local_id(
            algorithm_file.parent)
        run_options["name"] = f"lean_cli_gui_research_{project_id}"

    # Make Ctrl+C stop Jupyter Lab immediately
    run_options["stop_signal"] = "SIGKILL"

    # Mount the project to the notebooks directory
    run_options["volumes"][str(project)] = {
        "bind": "/Lean/Launcher/bin/Debug/Notebooks",
        "mode": "rw"
    }

    # Add references to all DLLs in QuantConnect.csx so custom C# libraries can be imported with using statements
    run_options["commands"].append(" && ".join([
        'find . -maxdepth 1 -iname "*.dll" | xargs -I _ echo \'#r "_"\' | cat - QuantConnect.csx > NewQuantConnect.csx',
        "mv NewQuantConnect.csx QuantConnect.csx"
    ]))

    # Allow notebooks to be embedded in iframes
    run_options["commands"].append("mkdir -p ~/.jupyter")
    run_options["commands"].append(
        'echo "c.NotebookApp.disable_check_xsrf = True\nc.NotebookApp.tornado_settings = {\'headers\': {\'Content-Security-Policy\': \'frame-ancestors self *\'}}" > ~/.jupyter/jupyter_notebook_config.py'
    )

    # Hide headers in notebooks
    run_options["commands"].append(
        "mkdir -p ~/.ipython/profile_default/static/custom")
    run_options["commands"].append(
        'echo "#header-container { display: none !important; }" > ~/.ipython/profile_default/static/custom/custom.css'
    )

    # Run the script that starts Jupyter Lab when all set up has been done
    run_options["commands"].append("./start.sh")

    project_config_manager = container.project_config_manager()
    cli_config_manager = container.cli_config_manager()

    project_config = project_config_manager.get_project_config(
        algorithm_file.parent)
    research_image = cli_config_manager.get_research_image(
        image or project_config.get("research-image", None))

    container.update_manager().pull_docker_image_if_necessary(
        research_image, update)

    try:
        container.docker_manager().run_image(research_image, **run_options)
    except APIError as error:
        msg = error.explanation
        if isinstance(msg, str) and any(m in msg.lower() for m in [
                "port is already allocated", "ports are not available"
                "an attempt was made to access a socket in a way forbidden by its access permissions"
        ]):
            raise RuntimeError(
                f"Port {port} is already in use, please specify a different port using --port <number>"
            )
        raise error

    if detach:
        temp_manager.delete_temporary_directories_when_done = False

        logger = container.logger()
        relative_project_dir = algorithm_file.parent.relative_to(
            lean_config_manager.get_cli_root_directory())

        logger.info(
            f"Successfully started Jupyter Lab environment for '{relative_project_dir}' in the '{run_options['name']}' container"
        )
        logger.info(
            "You can use Docker's own commands to manage the detached container"
        )
Ejemplo n.º 6
0
def research(project: Path, port: int, image: Optional[str], update: bool) -> None:
    """Run a Jupyter Lab environment locally using Docker.

    By default the official LEAN research image is used.
    You can override this using the --image option.
    Alternatively you can set the default research image using `lean config set research-image <image>`.
    """
    cli_config_manager = container.cli_config_manager()

    project_config_manager = container.project_config_manager()
    project_config = project_config_manager.get_project_config(project)

    # Copy the config to a temporary config file before we add some research-specific configuration to it
    config_path = container.temp_manager().create_temporary_directory() / "config.json"
    project_config.file = config_path

    project_config.set("composer-dll-directory", "/Lean/Launcher/bin/Debug")
    project_config.set("messaging-handler", "QuantConnect.Messaging.Messaging")
    project_config.set("job-queue-handler", "QuantConnect.Queues.JobQueue")
    project_config.set("api-handler", "QuantConnect.Api.Api")
    project_config.set("job-user-id", cli_config_manager.user_id.get_value("1"))
    project_config.set("api-access-token", cli_config_manager.api_token.get_value("default"))

    lean_config_manager = container.lean_config_manager()
    data_dir = lean_config_manager.get_data_directory()

    run_options = {
        "mounts": [
            Mount(target="/Lean/Launcher/bin/Debug/Notebooks/config.json",
                  source=str(config_path),
                  type="bind",
                  read_only=True)
        ],
        "volumes": {
            str(data_dir): {
                "bind": "/Lean/Launcher/Data",
                "mode": "rw"
            },
            str(project): {
                "bind": "/Lean/Launcher/bin/Debug/Notebooks",
                "mode": "rw"
            }
        },
        "ports": {
            "8888": str(port)
        },
        "on_run": lambda: webbrowser.open(f"http://localhost:{port}/")
    }

    cli_config_manager = container.cli_config_manager()
    research_image = cli_config_manager.get_research_image(image)

    docker_manager = container.docker_manager()

    if update or not docker_manager.supports_dotnet_5(research_image):
        docker_manager.pull_image(research_image)

    if str(research_image) == DEFAULT_RESEARCH_IMAGE and not update:
        update_manager = container.update_manager()
        update_manager.warn_if_docker_image_outdated(research_image)

    try:
        docker_manager.run_image(research_image, **run_options)
    except APIError as error:
        if "port is already allocated" in error.explanation:
            raise RuntimeError(f"Port {port} is already in use, please specify a different port using --port <number>")
        raise error
Ejemplo n.º 7
0
def report(backtest_data_source_file: Path,
           live_data_source_file: Optional[Path],
           report_destination: Path,
           strategy_name: Optional[str],
           strategy_version: Optional[str],
           strategy_description: Optional[str],
           overwrite: bool,
           image: Optional[str],
           update: bool) -> None:
    """Generate a report of a backtest.

    This runs the LEAN Report Creator in Docker to generate a polished, professional-grade report of a backtest.

    The name, description, and version are optional and will be blank if not given.

    If the given backtest data source file is stored in a project directory (or one of its subdirectories, like the
    default <project>/backtests/<timestamp>), the default name is the name of the project directory and the default
    description is the description stored in the project's config.json file.

    By default the official LEAN engine image is used.
    You can override this using the --image option.
    Alternatively you can set the default engine image for all commands using `lean config set engine-image <image>`.
    """
    if report_destination.exists() and not overwrite:
        raise RuntimeError(f"{report_destination} already exists, use --overwrite to overwrite it")

    project_directory = _find_project_directory(backtest_data_source_file)

    if project_directory is not None:
        if strategy_name is None:
            strategy_name = project_directory.name

        if strategy_description is None:
            project_config_manager = container.project_config_manager()
            project_config = project_config_manager.get_project_config(project_directory)
            strategy_description = project_config.get("description", "")

    # The configuration given to the report creator
    # See https://github.com/QuantConnect/Lean/blob/master/Report/config.example.json
    report_config = {
        "data-folder": "/Lean/Data",
        "strategy-name": strategy_name or "",
        "strategy-version": strategy_version or "",
        "strategy-description": strategy_description or "",
        "live-data-source-file": "live-data-source-file.json" if live_data_source_file is not None else "",
        "backtest-data-source-file": "backtest-data-source-file.json",
        "report-destination": "/Results/report.html",

        "environment": "report",

        "log-handler": "QuantConnect.Logging.CompositeLogHandler",
        "messaging-handler": "QuantConnect.Messaging.Messaging",
        "job-queue-handler": "QuantConnect.Queues.JobQueue",
        "api-handler": "QuantConnect.Api.Api",
        "map-file-provider": "QuantConnect.Data.Auxiliary.LocalDiskMapFileProvider",
        "factor-file-provider": "QuantConnect.Data.Auxiliary.LocalDiskFactorFileProvider",
        "data-provider": "QuantConnect.Lean.Engine.DataFeeds.DefaultDataProvider",
        "alpha-handler": "QuantConnect.Lean.Engine.Alphas.DefaultAlphaHandler",
        "data-channel-provider": "DataChannelProvider",

        "environments": {
            "report": {
                "live-mode": False,

                "setup-handler": "QuantConnect.Lean.Engine.Setup.ConsoleSetupHandler",
                "result-handler": "QuantConnect.Lean.Engine.Results.BacktestingResultHandler",
                "data-feed-handler": "QuantConnect.Lean.Engine.DataFeeds.FileSystemDataFeed",
                "real-time-handler": "QuantConnect.Lean.Engine.RealTime.BacktestingRealTimeHandler",
                "history-provider": "QuantConnect.Lean.Engine.HistoricalData.SubscriptionDataReaderHistoryProvider",
                "transaction-handler": "QuantConnect.Lean.Engine.TransactionHandlers.BacktestingTransactionHandler"
            }
        }
    }

    output_dir = container.temp_manager().create_temporary_directory()

    config_path = output_dir / "config.json"
    with config_path.open("w+", encoding="utf-8") as file:
        json.dump(report_config, file)

    lean_config_manager = container.lean_config_manager()
    data_dir = lean_config_manager.get_data_directory()

    run_options: Dict[str, Any] = {
        "working_dir": "/Lean/Report/bin/Debug",
        "entrypoint": ["dotnet", "QuantConnect.Report.dll"],
        "mounts": [
            Mount(target="/Lean/Report/bin/Debug/config.json",
                  source=str(config_path),
                  type="bind",
                  read_only=True),
            Mount(target="/Lean/Report/bin/Debug/backtest-data-source-file.json",
                  source=str(backtest_data_source_file),
                  type="bind",
                  read_only=True)
        ],
        "volumes": {
            str(data_dir): {
                "bind": "/Lean/Data",
                "mode": "ro"
            },
            str(output_dir): {
                "bind": "/Results",
                "mode": "rw"
            }
        }
    }

    if live_data_source_file is not None:
        run_options["mounts"].append(Mount(target="/Lean/Report/bin/Debug/live-data-source-file.json",
                                           source=str(live_data_source_file),
                                           type="bind",
                                           read_only=True))

    cli_config_manager = container.cli_config_manager()
    engine_image = cli_config_manager.get_engine_image(image)

    docker_manager = container.docker_manager()

    if update or not docker_manager.supports_dotnet_5(engine_image):
        docker_manager.pull_image(engine_image)

    success = docker_manager.run_image(engine_image, **run_options)
    if not success:
        raise RuntimeError(
            "Something went wrong while running the LEAN Report Creator, see the logs above for more information")

    report_destination.parent.mkdir(parents=True, exist_ok=True)
    shutil.copyfile(output_dir / "report.html", report_destination)

    logger = container.logger()
    logger.info(f"Successfully generated report to '{report_destination}'")

    if str(engine_image) == DEFAULT_ENGINE_IMAGE and not update:
        update_manager = container.update_manager()
        update_manager.warn_if_docker_image_outdated(engine_image)
Ejemplo n.º 8
0
def report(backtest_results: Optional[Path], live_results: Optional[Path],
           report_destination: Path, detach: bool,
           strategy_name: Optional[str], strategy_version: Optional[str],
           strategy_description: Optional[str], overwrite: bool,
           image: Optional[str], update: bool) -> None:
    """Generate a report of a backtest.

    This runs the LEAN Report Creator in Docker to generate a polished, professional-grade report of a backtest.

    If --backtest-results is not given, a report is generated for the most recent local backtest.

    The name, description, and version are optional and will be blank if not given.

    If the given backtest data source file is stored in a project directory (or one of its subdirectories, like the
    default <project>/backtests/<timestamp>), the default name is the name of the project directory and the default
    description is the description stored in the project's config.json file.

    By default the official LEAN engine image is used.
    You can override this using the --image option.
    Alternatively you can set the default engine image for all commands using `lean config set engine-image <image>`.
    """
    if report_destination.exists() and not overwrite:
        raise RuntimeError(
            f"{report_destination} already exists, use --overwrite to overwrite it"
        )

    if backtest_results is None:
        backtest_json_files = list(Path.cwd().rglob("backtests/*/*.json"))
        result_json_files = [
            f for f in backtest_json_files
            if not f.name.endswith("-order-events.json")
            and not f.name.endswith("alpha-results.json")
        ]

        if len(result_json_files) == 0:
            raise MoreInfoError(
                "Could not find a recent backtest result file, please use the --backtest-results option",
                "https://www.lean.io/docs/lean-cli/backtesting/report#02-Generate-a-Report"
            )

        backtest_results = sorted(result_json_files,
                                  key=lambda f: f.stat().st_mtime,
                                  reverse=True)[0]

    logger = container.logger()

    if live_results is None:
        logger.info(f"Generating a report from '{backtest_results}'")
    else:
        logger.info(
            f"Generating a report from '{backtest_results}' and '{live_results}'"
        )

    project_directory = _find_project_directory(backtest_results)

    if project_directory is not None:
        if strategy_name is None:
            strategy_name = project_directory.name

        if strategy_description is None:
            project_config_manager = container.project_config_manager()
            project_config = project_config_manager.get_project_config(
                project_directory)
            strategy_description = project_config.get("description", "")

    # The configuration given to the report creator
    # See https://github.com/QuantConnect/Lean/blob/master/Report/config.example.json
    report_config = {
        "data-folder":
        "/Lean/Data",
        "strategy-name":
        strategy_name or "",
        "strategy-version":
        strategy_version or "",
        "strategy-description":
        strategy_description or "",
        "live-data-source-file":
        "live-data-source-file.json" if live_results is not None else "",
        "backtest-data-source-file":
        "backtest-data-source-file.json",
        "report-destination":
        "/tmp/report.html",
        "environment":
        "report",
        "log-handler":
        "QuantConnect.Logging.CompositeLogHandler",
        "messaging-handler":
        "QuantConnect.Messaging.Messaging",
        "job-queue-handler":
        "QuantConnect.Queues.JobQueue",
        "api-handler":
        "QuantConnect.Api.Api",
        "map-file-provider":
        "QuantConnect.Data.Auxiliary.LocalDiskMapFileProvider",
        "factor-file-provider":
        "QuantConnect.Data.Auxiliary.LocalDiskFactorFileProvider",
        "data-provider":
        "QuantConnect.Lean.Engine.DataFeeds.DefaultDataProvider",
        "alpha-handler":
        "QuantConnect.Lean.Engine.Alphas.DefaultAlphaHandler",
        "data-channel-provider":
        "DataChannelProvider",
        "environments": {
            "report": {
                "live-mode":
                False,
                "setup-handler":
                "QuantConnect.Lean.Engine.Setup.ConsoleSetupHandler",
                "result-handler":
                "QuantConnect.Lean.Engine.Results.BacktestingResultHandler",
                "data-feed-handler":
                "QuantConnect.Lean.Engine.DataFeeds.FileSystemDataFeed",
                "real-time-handler":
                "QuantConnect.Lean.Engine.RealTime.BacktestingRealTimeHandler",
                "history-provider":
                "QuantConnect.Lean.Engine.HistoricalData.SubscriptionDataReaderHistoryProvider",
                "transaction-handler":
                "QuantConnect.Lean.Engine.TransactionHandlers.BacktestingTransactionHandler"
            }
        }
    }

    config_path = container.temp_manager().create_temporary_directory(
    ) / "config.json"
    with config_path.open("w+", encoding="utf-8") as file:
        json.dump(report_config, file)

    backtest_id = container.output_config_manager().get_backtest_id(
        backtest_results.parent)

    lean_config_manager = container.lean_config_manager()
    data_dir = lean_config_manager.get_data_directory()

    report_destination.parent.mkdir(parents=True, exist_ok=True)

    run_options: Dict[str, Any] = {
        "detach":
        detach,
        "name":
        f"lean_cli_report_{backtest_id}",
        "working_dir":
        "/Lean/Report/bin/Debug",
        "commands": [
            "dotnet QuantConnect.Report.dll",
            f'cp /tmp/report.html "/Output/{report_destination.name}"'
        ],
        "mounts": [
            Mount(target="/Lean/Report/bin/Debug/config.json",
                  source=str(config_path),
                  type="bind",
                  read_only=True),
            Mount(
                target="/Lean/Report/bin/Debug/backtest-data-source-file.json",
                source=str(backtest_results),
                type="bind",
                read_only=True)
        ],
        "volumes": {
            str(data_dir): {
                "bind": "/Lean/Data",
                "mode": "rw"
            },
            str(report_destination.parent): {
                "bind": "/Output",
                "mode": "rw"
            }
        }
    }

    if live_results is not None:
        run_options["mounts"].append(
            Mount(target="/Lean/Report/bin/Debug/live-data-source-file.json",
                  source=str(live_results),
                  type="bind",
                  read_only=True))

    cli_config_manager = container.cli_config_manager()
    engine_image_override = image

    if engine_image_override is None and project_directory is not None:
        project_config_manager = container.project_config_manager()
        project_config = project_config_manager.get_project_config(
            project_directory)
        engine_image_override = project_config.get("engine-image", None)

    engine_image = cli_config_manager.get_engine_image(engine_image_override)

    container.update_manager().pull_docker_image_if_necessary(
        engine_image, update)

    success = container.docker_manager().run_image(engine_image, **run_options)
    if not success:
        raise RuntimeError(
            "Something went wrong while running the LEAN Report Creator, see the logs above for more information"
        )

    if detach:
        temp_manager = container.temp_manager()
        temp_manager.delete_temporary_directories_when_done = False

        logger.info(
            f"Successfully started the report creator in the '{run_options['name']}' container"
        )
        logger.info(f"The report will be generated to '{report_destination}'")
        logger.info(
            "You can use Docker's own commands to manage the detached container"
        )
        return

    logger.info(f"Successfully generated report to '{report_destination}'")
Ejemplo n.º 9
0
def backtest(project: Path, output: Optional[Path], detach: bool,
             debug: Optional[str], data_provider: Optional[str],
             download_data: bool, data_purchase_limit: Optional[int],
             release: bool, image: Optional[str], update: bool) -> None:
    """Backtest a project locally using Docker.

    \b
    If PROJECT is a directory, the algorithm in the main.py or Main.cs file inside it will be executed.
    If PROJECT is a file, the algorithm in the specified file will be executed.

    \b
    Go to the following url to learn how to debug backtests locally using the Lean CLI:
    https://www.lean.io/docs/lean-cli/tutorials/backtesting/debugging

    By default the official LEAN engine image is used.
    You can override this using the --image option.
    Alternatively you can set the default engine image for all commands using `lean config set engine-image <image>`.
    """
    project_manager = container.project_manager()
    algorithm_file = project_manager.find_algorithm_file(Path(project))
    lean_config_manager = container.lean_config_manager()

    if output is None:
        output = algorithm_file.parent / "backtests" / datetime.now().strftime(
            "%Y-%m-%d_%H-%M-%S")

    debugging_method = None
    if debug == "pycharm":
        debugging_method = DebuggingMethod.PyCharm
        _migrate_python_pycharm(algorithm_file.parent)
    elif debug == "ptvsd":
        debugging_method = DebuggingMethod.PTVSD
        _migrate_python_vscode(algorithm_file.parent)
    elif debug == "vsdbg":
        debugging_method = DebuggingMethod.VSDBG
        _migrate_csharp_vscode(algorithm_file.parent)
    elif debug == "rider":
        debugging_method = DebuggingMethod.Rider
        _migrate_csharp_rider(algorithm_file.parent)

    if debugging_method is not None and detach:
        raise RuntimeError(
            "Running a debugging session in a detached container is not supported"
        )

    if algorithm_file.name.endswith(".cs"):
        _migrate_csharp_csproj(algorithm_file.parent)

    lean_config = lean_config_manager.get_complete_lean_config(
        "backtesting", algorithm_file, debugging_method)

    if download_data:
        data_provider = QuantConnectDataProvider.get_name()

    if data_provider is not None:
        data_provider = next(dp for dp in all_data_providers
                             if dp.get_name() == data_provider)
        data_provider.build(lean_config, container.logger()).configure(
            lean_config, "backtesting")

    lean_config_manager.configure_data_purchase_limit(lean_config,
                                                      data_purchase_limit)

    cli_config_manager = container.cli_config_manager()
    project_config_manager = container.project_config_manager()

    project_config = project_config_manager.get_project_config(
        algorithm_file.parent)
    engine_image = cli_config_manager.get_engine_image(
        image or project_config.get("engine-image", None))

    container.update_manager().pull_docker_image_if_necessary(
        engine_image, update)

    if not output.exists():
        output.mkdir(parents=True)

    output_config_manager = container.output_config_manager()
    lean_config["algorithm-id"] = str(
        output_config_manager.get_backtest_id(output))

    lean_runner = container.lean_runner()
    lean_runner.run_lean(lean_config, "backtesting", algorithm_file, output,
                         engine_image, debugging_method, release, detach)
Ejemplo n.º 10
0
def research(project: Path, port: int, image: Optional[str], update: bool) -> None:
    """Run a Jupyter Lab environment locally using Docker.

    By default the official LEAN research image is used.
    You can override this using the --image option.
    Alternatively you can set the default research image using `lean config set research-image <image>`.
    """
    cli_config_manager = container.cli_config_manager()

    project_config_manager = container.project_config_manager()
    project_config = project_config_manager.get_project_config(project)

    # Copy the config to a temporary config file before we add some research-specific configuration to it
    config_path = container.temp_manager().create_temporary_directory() / "config.json"
    project_config.file = config_path

    project_config.set("composer-dll-directory", "/Lean/Launcher/bin/Debug")
    project_config.set("messaging-handler", "QuantConnect.Messaging.Messaging")
    project_config.set("job-queue-handler", "QuantConnect.Queues.JobQueue")
    project_config.set("api-handler", "QuantConnect.Api.Api")
    project_config.set("job-user-id", cli_config_manager.user_id.get_value("1"))
    project_config.set("api-access-token", cli_config_manager.api_token.get_value("default"))

    lean_config_manager = container.lean_config_manager()
    data_dir = lean_config_manager.get_data_directory()

    run_options: Dict[str, Any] = {
        "commands": [],
        "environment": {},
        "mounts": [
            Mount(target="/Lean/Launcher/bin/Debug/Notebooks/config.json",
                  source=str(config_path),
                  type="bind",
                  read_only=True)
        ],
        "volumes": {
            str(data_dir): {
                "bind": "/Lean/Launcher/Data",
                "mode": "rw"
            }
        },
        "ports": {
            "8888": str(port)
        },
        "on_output": lambda chunk: _check_docker_output(chunk, port)
    }

    lean_runner = container.lean_runner()
    if project_config.get("algorithm-language", "Python") == "Python":
        lean_runner.set_up_python_options(project, "/Lean/Launcher/bin/Debug/Notebooks", run_options)
    else:
        lean_runner.set_up_csharp_options(project, run_options)
        run_options["volumes"][str(project)] = {
            "bind": "/Lean/Launcher/bin/Debug/Notebooks",
            "mode": "rw"
        }

    # Add references to all DLLs in QuantConnect.csx so custom C# libraries can be imported with using statements
    run_options["commands"].append(" && ".join([
        'find . -maxdepth 1 -iname "*.dll" | xargs -I _ echo \'#r "_"\' | cat - QuantConnect.csx > NewQuantConnect.csx',
        "mv NewQuantConnect.csx QuantConnect.csx"
    ]))

    # Run the script that starts Jupyter Lab when all set up has been done
    run_options["commands"].append("./start.sh")

    cli_config_manager = container.cli_config_manager()
    research_image = cli_config_manager.get_research_image(image)

    docker_manager = container.docker_manager()

    if update or not docker_manager.supports_dotnet_5(research_image):
        docker_manager.pull_image(research_image)

    if str(research_image) == DEFAULT_RESEARCH_IMAGE and not update:
        update_manager = container.update_manager()
        update_manager.warn_if_docker_image_outdated(research_image)

    try:
        docker_manager.run_image(research_image, **run_options)
    except APIError as error:
        msg = error.explanation
        if isinstance(msg, str) and "port is already allocated" in msg:
            raise RuntimeError(f"Port {port} is already in use, please specify a different port using --port <number>")
        raise error
Ejemplo n.º 11
0
def optimize(project: Path, output: Optional[Path], detach: bool,
             optimizer_config: Optional[Path], strategy: Optional[str],
             target: Optional[str], target_direction: Optional[str],
             parameter: List[Tuple[str, float, float,
                                   float]], constraint: List[str],
             release: bool, image: Optional[str], update: bool) -> None:
    """Optimize a project's parameters locally using Docker.

    \b
    If PROJECT is a directory, the algorithm in the main.py or Main.cs file inside it will be executed.
    If PROJECT is a file, the algorithm in the specified file will be executed.

    By default an interactive wizard is shown letting you configure the optimizer.
    If --optimizer-config or --strategy is given the command runs in non-interactive mode.
    In this mode the CLI does not prompt for input.

    \b
    The --optimizer-config option can be used to specify the configuration to run the optimizer with.
    When using the option it should point to a file like this (the algorithm-* properties should be omitted):
    https://github.com/QuantConnect/Lean/blob/master/Optimizer.Launcher/config.json

    If --strategy is given the optimizer configuration is read from the given options.
    In this case --strategy, --target, --target-direction and --parameter become required.

    \b
    In non-interactive mode the --parameter option can be provided multiple times to configure multiple parameters:
    - --parameter <name> <min value> <max value> <step size>
    - --parameter my-first-parameter 1 10 0.5 --parameter my-second-parameter 20 30 5

    \b
    In non-interactive mode the --constraint option can be provided multiple times to configure multiple constraints:
    - --constraint "<statistic> <operator> <value>"
    - --constraint "Sharpe Ratio >= 0.5" --constraint "Drawdown < 0.25"

    By default the official LEAN engine image is used.
    You can override this using the --image option.
    Alternatively you can set the default engine image for all commands using `lean config set engine-image <image>`.
    """
    project_manager = container.project_manager()
    algorithm_file = project_manager.find_algorithm_file(project)

    if output is None:
        output = algorithm_file.parent / "optimizations" / datetime.now(
        ).strftime("%Y-%m-%d_%H-%M-%S")

    optimizer_config_manager = container.optimizer_config_manager()
    config = None

    if optimizer_config is not None and strategy is not None:
        raise RuntimeError(
            "--optimizer-config and --strategy are mutually exclusive")

    if optimizer_config is not None:
        config = json5.loads(optimizer_config.read_text(encoding="utf-8"))

        # Remove keys which are configured in the Lean config
        for key in [
                "algorithm-type-name", "algorithm-language",
                "algorithm-location"
        ]:
            config.pop(key, None)
    elif strategy is not None:
        ensure_options(["strategy", "target", "target_direction", "parameter"])

        optimization_strategy = f"QuantConnect.Optimizer.Strategies.{strategy.replace(' ', '')}OptimizationStrategy"
        optimization_target = OptimizationTarget(
            target=optimizer_config_manager.parse_target(target),
            extremum=target_direction)
        optimization_parameters = optimizer_config_manager.parse_parameters(
            parameter)
        optimization_constraints = optimizer_config_manager.parse_constraints(
            constraint)
    else:
        project_config_manager = container.project_config_manager()
        project_config = project_config_manager.get_project_config(
            algorithm_file.parent)
        project_parameters = [
            QCParameter(key=k, value=v)
            for k, v in project_config.get("parameters", {}).items()
        ]

        if len(project_parameters) == 0:
            raise MoreInfoError(
                "The given project has no parameters to optimize",
                "https://www.lean.io/docs/lean-cli/optimization/parameters")

        optimization_strategy = optimizer_config_manager.configure_strategy(
            cloud=False)
        optimization_target = optimizer_config_manager.configure_target()
        optimization_parameters = optimizer_config_manager.configure_parameters(
            project_parameters, cloud=False)
        optimization_constraints = optimizer_config_manager.configure_constraints(
        )

    if config is None:
        # noinspection PyUnboundLocalVariable
        config = {
            "optimization-strategy":
            optimization_strategy,
            "optimization-strategy-settings": {
                "$type":
                "QuantConnect.Optimizer.Strategies.StepBaseOptimizationStrategySettings, QuantConnect.Optimizer",
                "default-segment-amount": 10
            },
            "optimization-criterion": {
                "target": optimization_target.target,
                "extremum": optimization_target.extremum.value
            },
            "parameters":
            [parameter.dict() for parameter in optimization_parameters],
            "constraints": [
                constraint.dict(by_alias=True)
                for constraint in optimization_constraints
            ]
        }

    config["optimizer-close-automatically"] = True
    config["results-destination-folder"] = "/Results"

    config_path = output / "optimizer-config.json"
    config_path.parent.mkdir(parents=True, exist_ok=True)
    with config_path.open("w+", encoding="utf-8") as file:
        file.write(json.dumps(config, indent=4) + "\n")

    project_config_manager = container.project_config_manager()
    cli_config_manager = container.cli_config_manager()

    project_config = project_config_manager.get_project_config(
        algorithm_file.parent)
    engine_image = cli_config_manager.get_engine_image(
        image or project_config.get("engine-image", None))

    lean_config_manager = container.lean_config_manager()
    lean_config = lean_config_manager.get_complete_lean_config(
        "backtesting", algorithm_file, None)

    if not output.exists():
        output.mkdir(parents=True)

    output_config_manager = container.output_config_manager()
    lean_config["algorithm-id"] = str(
        output_config_manager.get_optimization_id(output))
    lean_config["messaging-handler"] = "QuantConnect.Messaging.Messaging"

    lean_runner = container.lean_runner()
    run_options = lean_runner.get_basic_docker_config(lean_config,
                                                      algorithm_file, output,
                                                      None, release, detach)

    run_options["working_dir"] = "/Lean/Optimizer.Launcher/bin/Debug"
    run_options["commands"].append(
        "dotnet QuantConnect.Optimizer.Launcher.dll")
    run_options["mounts"].append(
        Mount(target="/Lean/Optimizer.Launcher/bin/Debug/config.json",
              source=str(config_path),
              type="bind",
              read_only=True))

    container.update_manager().pull_docker_image_if_necessary(
        engine_image, update)

    project_manager.copy_code(algorithm_file.parent, output / "code")

    success = container.docker_manager().run_image(engine_image, **run_options)

    logger = container.logger()
    cli_root_dir = container.lean_config_manager().get_cli_root_directory()
    relative_project_dir = project.relative_to(cli_root_dir)
    relative_output_dir = output.relative_to(cli_root_dir)

    if detach:
        temp_manager = container.temp_manager()
        temp_manager.delete_temporary_directories_when_done = False

        logger.info(
            f"Successfully started optimization for '{relative_project_dir}' in the '{run_options['name']}' container"
        )
        logger.info(f"The output will be stored in '{relative_output_dir}'")
        logger.info(
            "You can use Docker's own commands to manage the detached container"
        )
    elif success:
        optimizer_logs = (output / "log.txt").read_text(encoding="utf-8")
        groups = re.findall(r"ParameterSet: \(([^)]+)\) backtestId '([^']+)'",
                            optimizer_logs)

        if len(groups) > 0:
            optimal_parameters, optimal_id = groups[0]

            optimal_results = json.loads(
                (output / optimal_id /
                 f"{optimal_id}.json").read_text(encoding="utf-8"))
            optimal_backtest = QCBacktest(
                backtestId=optimal_id,
                projectId=1,
                status="",
                name=optimal_id,
                created=datetime.now(),
                completed=True,
                progress=1.0,
                runtimeStatistics=optimal_results["RuntimeStatistics"],
                statistics=optimal_results["Statistics"])

            logger.info(
                f"Optimal parameters: {optimal_parameters.replace(':', ': ').replace(',', ', ')}"
            )
            logger.info(f"Optimal backtest results:")
            logger.info(optimal_backtest.get_statistics_table())

        logger.info(
            f"Successfully optimized '{relative_project_dir}' and stored the output in '{relative_output_dir}'"
        )
    else:
        raise RuntimeError(
            f"Something went wrong while running the optimization, the output is stored in '{relative_output_dir}'"
        )