예제 #1
0
def backtest(project: str, name: Optional[str], push: bool, open_browser: bool) -> None:
    """Backtest a project in the cloud.

    PROJECT must be the name or id of the project to run a backtest for.

    If the project that has to be backtested has been pulled to the local drive
    with `lean cloud pull` it is possible to use the --push option to push local
    modifications to the cloud before running the backtest.
    """
    logger = container.logger()

    cloud_project_manager = container.cloud_project_manager()
    cloud_project = cloud_project_manager.get_cloud_project(project, push)

    if name is None:
        name = container.name_generator().generate_name()

    cloud_runner = container.cloud_runner()
    finished_backtest = cloud_runner.run_backtest(cloud_project, name)

    if finished_backtest.error is None and finished_backtest.stacktrace is None:
        logger.info(finished_backtest.get_statistics_table())

    logger.info(f"Backtest id: {finished_backtest.backtestId}")
    logger.info(f"Backtest name: {finished_backtest.name}")
    logger.info(f"Backtest url: {finished_backtest.get_url()}")

    if finished_backtest.error is not None or finished_backtest.stacktrace is not None:
        error = finished_backtest.stacktrace or finished_backtest.error
        error = error.strip()

        logger.error("An error occurred during this backtest:")
        logger.error(error)

        # Don't open the results in the browser if the error happened during initialization
        # In the browser the logs won't show these errors, you'll just get empty charts and empty logs
        if error.startswith("During the algorithm initialization, the following exception has occurred:"):
            open_browser = False

    if open_browser:
        webbrowser.open(finished_backtest.get_url())
예제 #2
0
def live(project: str, push: bool, open_browser: bool) -> None:
    """Start live trading for a project in the cloud.

    An interactive prompt will be shown to configure the deployment.

    PROJECT must be the name or the id of the project to start live trading for.

    If the project that has to be live traded has been pulled to the local drive
    with `lean cloud pull` it is possible to use the --push option to push local
    modifications to the cloud before starting live trading.
    """
    logger = container.logger()
    api_client = container.api_client()

    cloud_project_manager = container.cloud_project_manager()
    cloud_project = cloud_project_manager.get_cloud_project(project, push)

    cloud_runner = container.cloud_runner()
    finished_compile = cloud_runner.compile_project(cloud_project)

    brokerages = [
        PaperTradingBrokerage(),
        InteractiveBrokersBrokerage(),
        TradierBrokerage(),
        FXCMBrokerage(),
        OANDABrokerage(),
        BitfinexBrokerage(),
        CoinbaseProBrokerage()
    ]

    brokerage_options = [
        Option(id=brokerage, label=brokerage.name) for brokerage in brokerages
    ]

    brokerage: CloudBrokerage = logger.prompt_list("Select a brokerage",
                                                   brokerage_options)
    brokerage_settings = brokerage.get_settings(logger)
    price_data_handler = brokerage.get_price_data_handler()

    organization = api_client.accounts.get_organization()
    nodes = api_client.nodes.get_all(organization.organizationId)

    live_nodes = [node for node in nodes.live if not node.busy]
    if len(live_nodes) == 0:
        raise RuntimeError(
            "You don't have any live nodes available, you can manage your nodes on https://www.quantconnect.com/terminal/#organization/resources"
        )

    node_options = [
        Option(id=node, label=f"{node.name} - {node.description}")
        for node in nodes.live
    ]
    node: QCNode = logger.prompt_list("Select a node", node_options)

    logger.info(
        "You can optionally request for your strategy to send notifications when it generates an order or emits an insight"
    )
    logger.info(
        "You can use any combination of email notifications, webhook notifications and SMS notifications"
    )
    notify_order_events = click.confirm(
        "Do you want to send notifications on order events?", default=False)
    notify_insights = click.confirm(
        "Do you want to send notifications on insights?", default=False)
    notify_methods = []

    if notify_order_events or notify_insights:
        _log_notification_methods(notify_methods)
        notify_methods.append(_prompt_notification_method())

        while True:
            _log_notification_methods(notify_methods)
            if not click.confirm(
                    "Do you want to add another notification method?",
                    default=False):
                break
            notify_methods.append(_prompt_notification_method())

    logger.info(
        "Automatic restarting uses best efforts to restart the algorithm if it fails due to a runtime error"
    )
    logger.info(
        "This can help improve its resilience to temporary errors such as a brokerage API disconnection"
    )
    automatic_redeploy = click.confirm(
        "Do you want to enable automatic algorithm restarting?", default=True)

    logger.info(f"Brokerage: {brokerage.name}")
    logger.info(f"Project id: {cloud_project.projectId}")
    logger.info(f"Environment: {brokerage_settings['environment'].title()}")
    logger.info(f"Server name: {node.name}")
    logger.info(f"Server type: {node.sku}")
    logger.info(f"Data provider: {price_data_handler.replace('Handler', '')}")
    logger.info(f"LEAN version: {cloud_project.leanVersionId}")
    logger.info(
        f"Order event notifications: {'Yes' if notify_order_events else 'No'}")
    logger.info(f"Insight notifications: {'Yes' if notify_insights else 'No'}")
    if notify_order_events or notify_insights:
        _log_notification_methods(notify_methods)
    logger.info(
        f"Automatic algorithm restarting: {'Yes' if automatic_redeploy else 'No'}"
    )

    click.confirm(
        f"Are you sure you want to start live trading for project '{cloud_project.name}'?",
        default=False,
        abort=True)

    api_client.live.start(cloud_project.projectId, finished_compile.compileId,
                          node.id, brokerage_settings, price_data_handler,
                          automatic_redeploy, cloud_project.leanVersionId,
                          notify_order_events, notify_insights, notify_methods)

    live_url = cloud_project.get_url().replace("#open", "#openLive")
    logger.info(f"Live url: {live_url}")

    if open_browser:
        webbrowser.open(live_url)
예제 #3
0
def live(project: str, brokerage: str, ib_user_name: Optional[str],
         ib_account: Optional[str], ib_password: Optional[str],
         ib_data_feed: Optional[bool], tradier_account_id: Optional[str],
         tradier_access_token: Optional[str],
         tradier_environment: Optional[str], oanda_account_id: Optional[str],
         oanda_access_token: Optional[str], oanda_environment: Optional[str],
         bitfinex_api_key: Optional[str], bitfinex_api_secret: Optional[str],
         gdax_api_key: Optional[str], gdax_api_secret: Optional[str],
         gdax_passphrase: Optional[str], gdax_environment: Optional[str],
         binance_api_key: Optional[str], binance_api_secret: Optional[str],
         binance_environment: Optional[str], kraken_api_key: Optional[str],
         kraken_api_secret: Optional[str],
         kraken_verification_tier: Optional[str], ftx_api_key: Optional[str],
         ftx_api_secret: Optional[str], ftx_account_tier: Optional[str],
         ftx_exchange_name: Optional[str], zerodha_api_key: Optional[str],
         zerodha_access_token: Optional[str],
         zerodha_product_type: Optional[str],
         zerodha_trading_segment: Optional[str],
         samco_client_id: Optional[str], samco_client_password: Optional[str],
         samco_year_of_birth: Optional[str], samco_product_type: Optional[str],
         samco_trading_segment: Optional[str], node: str, auto_restart: bool,
         notify_order_events: Optional[bool], notify_insights: Optional[bool],
         notify_emails: Optional[str], notify_webhooks: Optional[str],
         notify_sms: Optional[str], push: bool, open_browser: bool) -> None:
    """Start live trading for a project in the cloud.

    PROJECT must be the name or the id of the project to start live trading for.

    By default an interactive wizard is shown letting you configure the deployment.
    If --brokerage is given the command runs in non-interactive mode.
    In this mode the CLI does not prompt for input or confirmation.
    In non-interactive mode the options specific to the given brokerage are required,
    as well as --node, --auto-restart, --notify-order-events and --notify-insights.
    """
    logger = container.logger()
    api_client = container.api_client()

    cloud_project_manager = container.cloud_project_manager()
    cloud_project = cloud_project_manager.get_cloud_project(project, push)

    cloud_runner = container.cloud_runner()
    finished_compile = cloud_runner.compile_project(cloud_project)

    if brokerage is not None:
        ensure_options([
            "brokerage", "node", "auto_restart", "notify_order_events",
            "notify_insights"
        ])

        brokerage_instance = None

        if brokerage == PaperTradingBrokerage.get_name():
            brokerage_instance = PaperTradingBrokerage()
        elif brokerage == InteractiveBrokersBrokerage.get_name():
            ensure_options(
                ["ib_user_name", "ib_account", "ib_password", "ib_data_feed"])
            brokerage_instance = InteractiveBrokersBrokerage(
                ib_user_name, ib_account, ib_password, ib_data_feed)
        elif brokerage == TradierBrokerage.get_name():
            ensure_options([
                "tradier_account_id", "tradier_access_token",
                "tradier_environment"
            ])
            brokerage_instance = TradierBrokerage(tradier_account_id,
                                                  tradier_access_token,
                                                  tradier_environment)
        elif brokerage == OANDABrokerage.get_name():
            ensure_options([
                "oanda_account_id", "oanda_access_token", "oanda_environment"
            ])
            brokerage_instance = OANDABrokerage(oanda_account_id,
                                                oanda_access_token,
                                                oanda_environment)
        elif brokerage == BitfinexBrokerage.get_name():
            ensure_options(["bitfinex_api_key", "bitfinex_api_secret"])
            brokerage_instance = BitfinexBrokerage(bitfinex_api_key,
                                                   bitfinex_api_secret)
        elif brokerage == CoinbaseProBrokerage.get_name():
            ensure_options([
                "gdax_api_key", "gdax_api_secret", "gdax_passphrase",
                "gdax_environment"
            ])
            brokerage_instance = CoinbaseProBrokerage(gdax_api_key,
                                                      gdax_api_secret,
                                                      gdax_passphrase,
                                                      gdax_environment)
        elif brokerage == BinanceBrokerage.get_name():
            ensure_options([
                "binance_api_key", "binance_api_secret", "binance_environment"
            ])
            brokerage_instance = BinanceBrokerage(binance_api_key,
                                                  binance_api_secret,
                                                  binance_environment)
        elif brokerage == KrakenBrokerage.get_name():
            ensure_options([
                "kraken_api_key", "kraken_api_secret",
                "kraken_verification_tier"
            ])
            brokerage_instance = KrakenBrokerage(kraken_api_key,
                                                 kraken_api_secret,
                                                 kraken_verification_tier)
        elif brokerage == FTXBrokerage.get_name():
            ensure_options([
                "ftx_api_key", "ftx_api_secret", "ftx_account_tier",
                "ftx_exchange_name"
            ])
            brokerage_instance = FTXBrokerage(ftx_api_key, ftx_api_secret,
                                              ftx_account_tier,
                                              ftx_exchange_name)
        elif brokerage == ZerodhaBrokerage.get_name():
            ensure_options([
                "zerodha_api_key", "zerodha_access_token",
                "zerodha_product_type", "zerodha_trading_segment"
            ])
            brokerage_instance = ZerodhaBrokerage(zerodha_api_key,
                                                  zerodha_access_token,
                                                  zerodha_product_type,
                                                  zerodha_trading_segment)
        elif brokerage == SamcoBrokerage.get_name():
            ensure_options([
                "samco_client_id", "samco_client_password",
                "samco_year_of_birth", "samco_product_type",
                "samco_trading_segment"
            ])
            brokerage_instance = SamcoBrokerage(samco_client_id,
                                                samco_client_password,
                                                samco_year_of_birth,
                                                samco_product_type,
                                                samco_trading_segment)

        all_nodes = api_client.nodes.get_all(cloud_project.organizationId)
        live_node = next(
            (n for n in all_nodes.live if n.id == node or n.name == node),
            None)

        if live_node is None:
            raise RuntimeError(
                f"You have no live node with name or id '{node}'")

        if live_node.busy:
            raise RuntimeError(
                f"The live node named '{live_node.name}' is already in use by '{live_node.usedBy}'"
            )

        notify_methods = []

        if notify_emails is not None:
            for config in notify_emails.split(","):
                address, subject = config.split(":")
                notify_methods.append(
                    QCEmailNotificationMethod(address=address,
                                              subject=subject))

        if notify_webhooks is not None:
            for config in notify_webhooks.split(","):
                address, *headers = config.split(":")
                headers = {
                    header.split("=")[0]: header.split("=")[1]
                    for header in headers
                }
                notify_methods.append(
                    QCWebhookNotificationMethod(address=address,
                                                headers=headers))

        if notify_sms is not None:
            for phoneNumber in notify_sms.split(","):
                notify_methods.append(
                    QCSMSNotificationMethod(phoneNumber=phoneNumber))
    else:
        brokerage_instance = _configure_brokerage(logger)
        live_node = _configure_live_node(logger, api_client, cloud_project)
        notify_order_events, notify_insights, notify_methods = _configure_notifications(
            logger)
        auto_restart = _configure_auto_restart(logger)

    brokerage_settings = brokerage_instance.get_settings()
    price_data_handler = brokerage_instance.get_price_data_handler()

    logger.info(f"Brokerage: {brokerage_instance.get_name()}")
    logger.info(f"Project id: {cloud_project.projectId}")
    logger.info(f"Environment: {brokerage_settings['environment'].title()}")
    logger.info(f"Server name: {live_node.name}")
    logger.info(f"Server type: {live_node.sku}")
    logger.info(f"Data provider: {price_data_handler.replace('Handler', '')}")
    logger.info(f"LEAN version: {cloud_project.leanVersionId}")
    logger.info(
        f"Order event notifications: {'Yes' if notify_order_events else 'No'}")
    logger.info(f"Insight notifications: {'Yes' if notify_insights else 'No'}")
    if notify_order_events or notify_insights:
        _log_notification_methods(notify_methods)
    logger.info(
        f"Automatic algorithm restarting: {'Yes' if auto_restart else 'No'}")

    if brokerage is None:
        click.confirm(
            f"Are you sure you want to start live trading for project '{cloud_project.name}'?",
            default=False,
            abort=True)

    live_algorithm = api_client.live.start(
        cloud_project.projectId, finished_compile.compileId, live_node.id,
        brokerage_settings, price_data_handler, auto_restart,
        cloud_project.leanVersionId, notify_order_events, notify_insights,
        notify_methods)

    logger.info(f"Live url: {live_algorithm.get_url()}")

    if open_browser:
        webbrowser.open(live_algorithm.get_url())
예제 #4
0
def optimize(project: str, target: Optional[str],
             target_direction: Optional[str],
             parameter: List[Tuple[str, float, float,
                                   float]], constraint: List[str],
             node: Optional[str], parallel_nodes: Optional[int],
             name: Optional[str], push: bool) -> None:
    """Optimize a project in the cloud.

    PROJECT must be the name or id of the project to optimize.

    An interactive prompt will be shown to configure the optimizer.
    If --target is given the command runs in non-interactive mode.
    In this mode the CLI does not prompt for input and the following options become required:
    --target, --target-direction, --parameter, --node and --parallel-nodes.

    \b
    In non-interactive mode the --parameter option can be provided multiple times to configure multiple parameters:
    - --parameter <name> <min value> <max value> <step size>
    - --parameter my-first-parameter 1 10 0.5 --parameter my-second-parameter 20 30 5

    \b
    In non-interactive mode the --constraint option can be provided multiple times to configure multiple constraints:
    - --constraint "<statistic> <operator> <value>"
    - --constraint "Sharpe Ratio >= 0.5" --constraint "Drawdown < 0.25"

    If the project that has to be optimized has been pulled to the local drive
    with `lean cloud pull` it is possible to use the --push option to push local
    modifications to the cloud before running the optimization.
    """
    logger = container.logger()
    api_client = container.api_client()

    cloud_project_manager = container.cloud_project_manager()
    cloud_project = cloud_project_manager.get_cloud_project(project, push)

    if name is None:
        name = container.name_generator().generate_name()

    cloud_runner = container.cloud_runner()
    finished_compile = cloud_runner.compile_project(cloud_project)

    optimizer_config_manager = container.optimizer_config_manager()
    organization = api_client.organizations.get(cloud_project.organizationId)

    if target is not None:
        ensure_options([
            "target", "target_direction", "parameter", "node", "parallel_nodes"
        ])

        optimization_strategy = "QuantConnect.Optimizer.Strategies.GridSearchOptimizationStrategy"
        optimization_target = OptimizationTarget(
            target=optimizer_config_manager.parse_target(target),
            extremum=target_direction)
        optimization_parameters = optimizer_config_manager.parse_parameters(
            parameter)
        optimization_constraints = optimizer_config_manager.parse_constraints(
            constraint)

        node = next(n for n in available_nodes if n.name == node)
        if parallel_nodes < node.min_nodes:
            raise RuntimeError(
                f"The minimum number of parallel nodes for {node.name} is {node.min_nodes}"
            )
        if parallel_nodes > node.max_nodes:
            raise RuntimeError(
                f"The maximum number of parallel nodes for {node.name} is {node.max_nodes}"
            )

        _display_estimate(cloud_project, finished_compile, organization, name,
                          optimization_strategy, optimization_target,
                          optimization_parameters, optimization_constraints,
                          node, parallel_nodes)
    else:
        optimization_strategy = optimizer_config_manager.configure_strategy(
            cloud=True)
        optimization_target = optimizer_config_manager.configure_target()
        optimization_parameters = optimizer_config_manager.configure_parameters(
            cloud_project.parameters, cloud=True)
        optimization_constraints = optimizer_config_manager.configure_constraints(
        )

        while True:
            node, parallel_nodes = optimizer_config_manager.configure_node()

            _display_estimate(cloud_project, finished_compile, organization,
                              name, optimization_strategy, optimization_target,
                              optimization_parameters,
                              optimization_constraints, node, parallel_nodes)

            if click.confirm(
                    "Do you want to start the optimization on the selected node type?",
                    default=True):
                break

    optimization = cloud_runner.run_optimization(
        cloud_project, finished_compile, name, optimization_strategy,
        optimization_target, optimization_parameters, optimization_constraints,
        node.name, parallel_nodes)

    backtests = optimization.backtests.values()
    backtests = [b for b in backtests if b.exitCode == 0]
    backtests = [
        b for b in backtests
        if _backtest_meets_constraints(b, optimization_constraints)
    ]

    if len(backtests) == 0:
        logger.info(
            "No optimal parameter combination found, no successful backtests meet all constraints"
        )
        return

    optimal_backtest = sorted(backtests,
                              key=lambda backtest: _get_backtest_statistic(
                                  backtest, optimization_target.target),
                              reverse=optimization_target.extremum ==
                              OptimizationExtremum.Maximum)[0]

    parameters = ", ".join(f"{key}: {optimal_backtest.parameterSet[key]}"
                           for key in optimal_backtest.parameterSet)
    logger.info(f"Optimal parameters: {parameters}")

    optimal_backtest = api_client.backtests.get(cloud_project.projectId,
                                                optimal_backtest.id)

    logger.info(f"Optimal backtest id: {optimal_backtest.backtestId}")
    logger.info(f"Optimal backtest name: {optimal_backtest.name}")
    logger.info(f"Optimal backtest results:")
    logger.info(optimal_backtest.get_statistics_table())
예제 #5
0
def optimize(project: str, name: Optional[str], push: bool) -> None:
    """Optimize a project in the cloud.

    An interactive prompt will be shown to configure the optimizer.

    PROJECT must be the name or id of the project to optimize.

    If the project that has to be optimized has been pulled to the local drive
    with `lean cloud pull` it is possible to use the --push option to push local
    modifications to the cloud before running the optimization.
    """
    logger = container.logger()
    api_client = container.api_client()

    cloud_project_manager = container.cloud_project_manager()
    cloud_project = cloud_project_manager.get_cloud_project(project, push)

    if name is None:
        name = container.name_generator().generate_name()

    cloud_runner = container.cloud_runner()
    finished_compile = cloud_runner.compile_project(cloud_project)

    optimizer_config_manager = container.optimizer_config_manager()
    optimization_strategy = optimizer_config_manager.configure_strategy(
        cloud=True)
    optimization_target = optimizer_config_manager.configure_target()
    optimization_parameters = optimizer_config_manager.configure_parameters(
        cloud_project.parameters, cloud=True)
    optimization_constraints = optimizer_config_manager.configure_constraints()

    backtest_count = _calculate_backtest_count(optimization_parameters)

    organization = api_client.organizations.get(cloud_project.organizationId)

    while True:
        node, parallel_nodes = optimizer_config_manager.configure_node()

        estimate = api_client.optimizations.estimate(
            cloud_project.projectId, finished_compile.compileId, name,
            optimization_strategy, optimization_target,
            optimization_parameters, optimization_constraints, node.name,
            parallel_nodes)

        hours = _calculate_hours(estimate.time, backtest_count)
        batch_time = ceil((hours * 100) / parallel_nodes) / 100
        batch_cost = max(0.01, ceil(node.price * hours * 100) / 100)

        logger.info(f"Estimated number of backtests: {backtest_count:,}")
        logger.info(f"Estimated batch time: {_format_hours(batch_time)}")
        logger.info(f"Estimated batch cost: ${batch_cost:,.2f}")
        logger.info(
            f"Organization balance: {organization.credit.balance:,.0f} QCC (${organization.credit.balance / 100:,.2f})"
        )

        if click.confirm(
                "Do you want to start the optimization on the selected node type?",
                default=True):
            break

    optimization = cloud_runner.run_optimization(
        cloud_project, finished_compile, name, optimization_strategy,
        optimization_target, optimization_parameters, optimization_constraints,
        node.name, parallel_nodes)

    backtests = optimization.backtests.values()
    backtests = [b for b in backtests if b.exitCode == 0]
    backtests = [
        b for b in backtests
        if _backtest_meets_constraints(b, optimization_constraints)
    ]

    if len(backtests) == 0:
        logger.info(
            "No optimal parameter combination found, no successful backtests meet all constraints"
        )
        return

    optimal_backtest = sorted(backtests,
                              key=lambda backtest: _get_backtest_statistic(
                                  backtest, optimization_target.target),
                              reverse=optimization_target.extremum ==
                              OptimizationExtremum.Maximum)[0]

    parameters = ", ".join(f"{key}: {optimal_backtest.parameterSet[key]}"
                           for key in optimal_backtest.parameterSet)
    logger.info(f"Optimal parameters: {parameters}")

    optimal_backtest = api_client.backtests.get(cloud_project.projectId,
                                                optimal_backtest.id)

    logger.info(f"Optimal backtest id: {optimal_backtest.backtestId}")
    logger.info(f"Optimal backtest name: {optimal_backtest.name}")
    logger.info(f"Optimal backtest results:")
    logger.info(optimal_backtest.get_statistics_table())