コード例 #1
0
ファイル: wait.py プロジェクト: mwiater/dcos-e2e
def _wait_for_variant(cluster: Cluster) -> None:
    """
    Wait for a particular file to be available on the cluster.
    This means that the cluster variant can be determined.
    """
    if get_cluster_variant(cluster) is None:
        raise DCOSTimeoutError
コード例 #2
0
ファイル: wait.py プロジェクト: mwiater/dcos-e2e
def wait_for_dcos(
    cluster: Cluster,
    superuser_username: str,
    superuser_password: str,
    http_checks: bool,
    doctor_command_name: str,
    enable_spinner: bool,
) -> None:
    """
    Wait for DC/OS to start.

    Args:
        cluster: The cluster to wait for.
        superuser_username: If the cluster is a DC/OS Enterprise cluster, use
            this username to wait for DC/OS.
        superuser_password: If the cluster is a DC/OS Enterprise cluster, use
            this password to wait for DC/OS.
        http_checks: Whether or not to wait for checks which require an HTTP
            connection to the cluster.
        doctor_command_name: A ``doctor`` command to advise a user to use.
        enable_spinner: Whether to enable the spinner animation.
    """
    message = (
        'A cluster may take some time to be ready.\n'
        'The amount of time it takes to start a cluster depends on a variety '
        'of factors.\n'
        'If you are concerned that this is hanging, try '
        '"{doctor_command_name}" to diagnose common issues.').format(
            doctor_command_name=doctor_command_name)
    click.echo(message)

    no_login_message = ('If you cancel this command while it is running, '
                        'you may not be able to log in. '
                        'To resolve that, run this command again.')

    spinner = Halo(enabled=enable_spinner)
    spinner.start(text='Waiting for DC/OS variant')
    _wait_for_variant(cluster=cluster)
    dcos_variant = get_cluster_variant(cluster=cluster)
    spinner.succeed()
    if dcos_variant == DCOSVariant.OSS:
        click.echo(no_login_message)
    spinner.start(text='Waiting for DC/OS to start')
    try:
        if dcos_variant == DCOSVariant.ENTERPRISE:
            cluster.wait_for_dcos_ee(
                superuser_username=superuser_username,
                superuser_password=superuser_password,
                http_checks=http_checks,
            )
        else:
            cluster.wait_for_dcos_oss(http_checks=http_checks)
    except DCOSTimeoutError:
        spinner.fail(text='Waiting for DC/OS to start timed out.')
        sys.exit(1)

    spinner.succeed()
コード例 #3
0
ファイル: inspect_cluster.py プロジェクト: rajcspsg/dcos-e2e
def show_cluster_details(
    cluster_id: str,
    cluster_representation: ClusterRepresentation,
) -> None:
    """
    Show details of a cluster for "inspect" views.

    Args:
        cluster_id: The ID of the cluster.
        cluster_representation: A representation of the cluster.
    """
    keys = {
        'masters': cluster_representation.masters,
        'agents': cluster_representation.agents,
        'public_agents': cluster_representation.public_agents,
    }

    nodes = {
        key: [
            cluster_representation.to_dict(node_representation=container)
            for container in representation
        ]
        for key, representation in keys.items()
    }

    cluster = cluster_representation.cluster
    dcos_variant = get_cluster_variant(cluster=cluster)
    variant_name = str(dcos_variant if dcos_variant else None)
    master = next(iter(cluster.masters))
    web_ui = 'http://' + str(master.public_ip_address)

    data = {
        'Cluster ID': cluster_id,
        'Web UI': web_ui,
        'Nodes': nodes,
        'SSH Default User': cluster_representation.ssh_default_user,
        'SSH key': str(cluster_representation.ssh_key_path),
        'DC/OS Variant': variant_name,
    }  # type: Dict[str, Any]
    click.echo(
        json.dumps(data, indent=4, separators=(',', ': '), sort_keys=True), )
コード例 #4
0
def sync_code_to_masters(
    cluster: Cluster,
    dcos_checkout_dir: Path,
    sudo: bool,
) -> None:
    """
    Sync files from a DC/OS checkout to master nodes.

    This syncs integration test files and bootstrap files.

    This is not covered by automated tests, and it is non-trivial.

    In the following instructions, running a test might look like:

    `minidcos docker run --test-env pytest <test_filename>`

    The manual test cases we want to work are:
    * Sync a DC/OS Enterprise checkout and run a test - it should work.
    * Delete a test file, sync, try to run this test file - it should fail
      with "file not found".
    * Add a test file, sync, try to run this test file - it should work.
    * Add `assert False`, sync, to a test file and run this test file - it
      should fail.
    * Test bootstrap sync with no changes (a partial test that nothing
      breaks):
      - Sync
      - `minidcos docker run systemctl restart dcos-mesos-master`
      - `minidcos docker run journalctl -f -u dcos-mesos-master`
      - We expect to see no assertion error.
    * Test bootstrap sync with some changes
      - Add `assert False` to
        `packages/bootstrap/extra/dcos_internal_utils/bootstrap.py`
      - `minidcos docker run systemctl restart dcos-mesos-master`
      - `minidcos docker run journalctl -f -u dcos-mesos-master`
      - We expect to see the assertion error.
    * Test sync DC/OS OSS tests to a DC/OS Enterprise cluster
      - Modify a DC/OS OSS checkout to include a new integration test file and
        a new file in "util".
      - Modify the DC/OS OSS checkout to remove an integration test.
      - Sync the DC/OS OSS checkout to a DC/OS Enterprise cluster.
      - Assert that the DC/OS Enterprise tests still exist.
      - Assert that the "open_source_tests" directory within the Enterprise
        cluster's test directory includes the new integration test.
      - Assert that the "util" directory in the Enterprise cluster's test
        directory includes the new file.
      - Assert that there is no "util" directory in the "open_source_tests"
        directory.
      - Assert that there is no "conftest.py" in the "open_source_tests"
        directory.
      - Assert that the removed integration test is not present in the
        "open_source_tests" directory.
      - Run a test from the "open_source_tests" directory.

    Args:
        cluster: The cluster to sync code to.
        dcos_checkout_dir: The path to a DC/OS (Enterprise) checkout to sync
            code from.
        sudo: Whether to use sudo for commands running on nodes.

    Raises:
        click.BadArgumentUsage: If ``DCOS_CHECKOUT_DIR`` is set to something
            that is not a checkout of a DC/OS repository.
    """
    local_packages = dcos_checkout_dir / 'packages'
    local_test_dir = local_packages / 'dcos-integration-test' / 'extra'
    if not Path(local_test_dir).exists():
        message = (
            'DCOS_CHECKOUT_DIR must be set to the checkout of a DC/OS '
            'repository.\n'
            '"{local_test_dir}" does not exist.'
        ).format(local_test_dir=local_test_dir)
        raise click.BadArgumentUsage(message=message)

    dcos_checkout_dir_variant = _dcos_checkout_dir_variant(
        dcos_checkout_dir=dcos_checkout_dir,
    )

    node_test_dir = Path('/opt/mesosphere/active/dcos-integration-test')

    test_tarstream = _tar_with_filter(
        path=local_test_dir,
        tar_filter=_cache_filter,
    )

    dcos_variant = get_cluster_variant(cluster=cluster)
    if dcos_variant is None:
        message = (
            'The DC/OS variant cannot yet be determined. '
            'Therefore, code cannot be synced to the cluster.'
        )
        click.echo(message, err=True)
        sys.exit(1)

    syncing_oss_to_ee = bool(
        dcos_variant == DCOSVariant.ENTERPRISE
        and dcos_checkout_dir_variant == DCOSVariant.OSS,
    )

    node_active_dir = Path('/opt/mesosphere/active')
    node_test_dir = node_active_dir / 'dcos-integration-test'

    if syncing_oss_to_ee:
        # This matches part of
        # https://github.com/mesosphere/dcos-enterprise/blob/master/packages/dcos-integration-test/ee.build
        for master in cluster.masters:
            master.run(args=['rm', '-rf', str(node_test_dir / 'util')])

            # This makes an assumption that all tests are at the top level.
            master.run(
                args=[
                    'rm',
                    '-rf',
                    str(node_test_dir / 'open_source_tests' / '*.py'),
                ],
                # We use a wildcard character, `*`, so we need shell expansion.
                shell=True,
                sudo=sudo,
            )

            master.run(
                args=[
                    'mkdir',
                    '--parents',
                    str(node_test_dir / 'open_source_tests'),
                ],
                sudo=sudo,
            )

            _send_tarstream_to_node_and_extract(
                tarstream=test_tarstream,
                node=master,
                remote_path=node_test_dir / 'open_source_tests',
                sudo=sudo,
            )
            master.run(
                args=[
                    'rm',
                    '-rf',
                    str(node_test_dir / 'open_source_tests' / 'conftest.py'),
                ],
                sudo=sudo,
            )
            master.run(
                args=[
                    'mv',
                    str(node_test_dir / 'open_source_tests' / 'util'),
                    str(node_test_dir),
                ],
                sudo=sudo,
            )
    else:
        _sync_bootstrap_to_masters(
            cluster=cluster,
            dcos_checkout_dir=dcos_checkout_dir,
            sudo=sudo,
        )

        for master in cluster.masters:
            # This makes an assumption that all tests are at the top level.
            master.run(
                args=['rm', '-rf', str(node_test_dir / '*.py')],
                # We use a wildcard character, `*`, so we need shell expansion.
                shell=True,
                sudo=sudo,
            )
            _send_tarstream_to_node_and_extract(
                tarstream=test_tarstream,
                node=master,
                remote_path=node_test_dir,
                sudo=sudo,
            )