def send(self, action, install_method, num_errors):
        """Sends analytics track data to segmentIO.
        variant: string | open or enterprise
        action: string | preflight, deploy, or postflight
        install_method: string | gui, cli or advanced
        """
        analytics.write_key = "51ybGTeFEFU1xo6u10XMDrr6kATFyRyh"

        # Set customer key here rather than __init__ since we want the most up to date config
        # and config may change between __init__ and here.
        config = Config(CONFIG_PATH)
        customer_key = config.hacky_default_get('customer_key', None)

        # provider is always onprem when the cli installer is used
        provider = "onprem"
        # platform defaults to provider value, if not specified
        platform = config.hacky_default_get('platform', provider)

        analytics.track(user_id=customer_key, anonymous_id=self.uuid, event="installer", properties={
            "platform": platform,
            "provider": provider,
            "source": "installer",
            "variant": os.environ["BOOTSTRAP_VARIANT"],
            "install_id": self.uuid,
            "bootstrap_id": os.environ["BOOTSTRAP_ID"],
            "install_method": install_method,
            "action_name": action,
            "errors": num_errors,
            "customerKey": customer_key,
        })
        analytics.flush()
def test_configure(client):
    route = '/api/v1/configure'
    featured_methods = {
        'GET': [200, 'application/json'],
        # Should return a 400 if validation has errors,
        # which this POST will return since the ssh_port is not an integer.
        'POST': [400, 'application/json', '{"ssh_port": "asdf"}'],
        'PUT': [405, 'text/plain'],
        'DELETE': [405, 'text/plain'],
        'HEAD': [405, 'text/plain'],
        'TRACE': [405, 'text/plain'],
        'CONNECT': [405, 'text/plain'],
    }

    for method, expected in featured_methods.items():
        if method == 'POST':
            res = client.request(route,
                                 method=method,
                                 body=bytes(expected[2].encode('utf-8')),
                                 expect_errors=True)
        else:
            res = client.request(route, method=method, expect_errors=True)
        assert res.status_code == expected[0], '{}: {}'.format(
            method, expected)
        assert res.content_type == expected[1], '{}: {}'.format(
            method, expected)
        if expected[0] == 200:
            expected_config = Config('genconf/config.yaml').config
            # Add ui config parameters which are always set.
            # TODO(cmaloney): Make this unnecessary
            expected_config.update({'ssh_key': None, 'ip_detect_script': None})
            assert res.json == expected_config
def test_configure(client):
    route = '/api/v1/configure'
    featured_methods = {
        'GET': [200, 'application/json'],
        # Should return a 400 if validation has errors,
        # which this POST will return since the ssh_port is not an integer.
        'POST': [400, 'application/json', '{"ssh_port": "asdf"}'],
        'PUT': [405, 'text/plain'],
        'DELETE': [405, 'text/plain'],
        'HEAD': [405, 'text/plain'],
        'TRACE': [405, 'text/plain'],
        'CONNECT': [405, 'text/plain'],
    }

    for method, expected in featured_methods.items():
        if method == 'POST':
            res = client.request(route, method=method, body=bytes(expected[2].encode('utf-8')), expect_errors=True)
        else:
            res = client.request(route, method=method, expect_errors=True)
        assert res.status_code == expected[0], '{}: {}'.format(
            method,
            expected)
        assert res.content_type == expected[1], '{}: {}'.format(
            method,
            expected)
        if expected[0] == 200:
            expected_config = Config('genconf/config.yaml').config
            # Add ui config parameters which are always set.
            # TODO(cmaloney): Make this unnecessary
            expected_config.update({'ssh_key': None, 'ip_detect_script': None})
            assert res.json == expected_config
Beispiel #4
0
def do_configure(config_path=CONFIG_PATH):
    """Returns error code

    :param config_path: path to config.yaml
    :type config_path: string | CONFIG_PATH (genconf/config.yaml)
    """
    config = Config(config_path)

    validate_gen = config.do_validate(include_ssh=False)
    if len(validate_gen) > 0:
        for key, error in validate_gen.items():
            log.error('{}: {}'.format(key, error))
        return 1

    config_util.do_configure(config)
    return 0
Beispiel #5
0
def generate_node_upgrade_script(installed_cluster_version,
                                 config_path=CONFIG_PATH):

    if installed_cluster_version is None:
        print('Must provide the version of the cluster upgrading from')
        return 1

    config = Config(config_path)
    try:
        gen_out = config_util.onprem_generate(config)
    except ValidationError as e:
        validation = normalize_config_validation_exception(e)
        print_messages(validation)
        return 1
    except ExhibitorTLSBootstrapError as e:
        log.error('Failed to bootstrap Exhibitor TLS')
        for i, error in enumerate(e.errors):
            return log.error("{}: {}".format(i + 1, error))
        return 1

    config_util.make_serve_dir(gen_out)

    # generate the upgrade script
    upgrade.generate_node_upgrade_script(gen_out, installed_cluster_version)

    return 0
Beispiel #6
0
def do_configure(config_path=CONFIG_PATH):
    """Returns error code

    :param config_path: path to config.yaml
    :type config_path: string | CONFIG_PATH (genconf/config.yaml)
    """
    config = Config(config_path)

    validate_gen = config.do_validate(include_ssh=False)
    if len(validate_gen) > 0:
        for key, error in validate_gen.items():
            log.error('{}: {}'.format(key, error))
        return 1

    config_util.do_configure(config)
    return 0
Beispiel #7
0
def success(config: Config):
    """Returns the data for /success/ endpoint.
    :param config_path: path to config.yaml
    :type config_path: str | CONFIG_PATH (genconf/config.yaml)
    """
    master_ips = config.hacky_default_get('master_list', [])
    agent_ips = config.hacky_default_get('agent_list', [])

    code = 200
    msgs = {'success': "", 'master_count': 0, 'agent_count': 0}
    if not master_ips or not agent_ips:
        code = 400
        return msgs, code
    msgs['success'] = 'http://{}'.format(master_ips[0])
    msgs['master_count'] = len(master_ips)
    msgs['agent_count'] = len(agent_ips)
    return msgs, code
Beispiel #8
0
def do_validate_config(args):
    log_warn_only()
    config = Config(dcos_installer.constants.CONFIG_PATH)
    validation_errors = config.do_validate(include_ssh=True)
    if validation_errors:
        print_validation_errors(validation_errors)
        return 1
    return 0
def success(request):
    """Return /success

    :param request: a web requeest object.
    :type request: request | None
    """
    log.info("Request for success made.")
    msgs, code = backend.success(Config(CONFIG_PATH))
    return web.json_response(msgs, status=code)
def test_set_superuser_password(tmpdir):
    """Test that --set-superuser-hash works"""

    with tmpdir.as_cwd():
        tmpdir.join('genconf').ensure(dir=True)

        # TODO(cmaloney): Add tests for the behavior around a non-existent config.yaml

        # Setting in a non-empty config.yaml which has no password set
        make_default_config_if_needed('genconf/config.yaml')
        assert 'superuser_password_hash' not in Config('genconf/config.yaml').config

        # Set the password
        subprocess.check_call(['dcos_installer', '--set-superuser-password', 'foo'], cwd=str(tmpdir))

        # Check that config.yaml has the password set
        config = Config('genconf/config.yaml')
        assert passlib.hash.sha512_crypt.verify('foo', config['superuser_password_hash'])
Beispiel #11
0
def test_accept_overrides_for_undefined_config_params(tmpdir):
    temp_config_path = tmpdir.strpath + '/config.yaml'
    param = ('fake_test_param_name', 'fake_test_param_value')
    make_default_config_if_needed(temp_config_path)
    messages = backend.create_config_from_post(post_data=dict([param]),
                                               config_path=temp_config_path)

    assert not messages, "unexpected validation error: {}".format(messages)
    assert Config(config_path=temp_config_path)[param[0]] == param[1]
Beispiel #12
0
def create_config_from_post(post_data, config_path):
    """Returns error code and validation messages for only keys POSTed
    to the UI.

    :param config_path: path to config.yaml
    :type config_path: string | CONFIG_PATH (genconf/config.yaml)

    :param post_data: data from POST to UI
    :type post_data: dict | {}
    """
    log.info("Updating config with POST data.")

    # Make sure this isn't passed ssh_key how the web installer used to, the web installer should
    # take care of it's wrapping / unwrapping.
    assert 'ssh_key' not in post_data
    assert 'ip_detect_script' not in post_data

    # Create a new configuration object, pass it the config.yaml path and POSTed dictionary.
    # Add in "hidden config" we don't present in the config.yaml, and then create a meta
    # validation dictionary from gen and ssh validation libs.
    # We do not use the already built methods for this since those are used to read the
    # coniguration off disk, here we need to validate the configuration overridees, and
    # return the key and message for the POSTed parameter.
    config = Config(config_path)
    config.update(post_data)
    validation_messages = config.do_validate(include_ssh=True)

    # TODO(cmaloney): Return all errors to the UI so it can display / decide how
    # it wants to log (new parameter might cause an error with an old set key)
    # Return only the keys the UI POSTed, do not write config to disk if
    # validation fails.
    post_validation_errors = {
        key: validation_messages[key]
        for key in validation_messages if key in post_data
    }

    # If validation is successful, write the data to disk, otherwise, if
    # they keys POSTed failed, do not write to disk.
    if post_validation_errors:
        log.error("POSTed configuration has errors, not writing to disk.")
        for key, value in post_validation_errors.items():
            log.error('{}: {}'.format(key, value))
    else:
        log.debug("Success! POSTed configuration looks good, writing to disk.")
        config.update(post_data)
        config.write_config()

    return post_validation_errors
Beispiel #13
0
def success(config: Config):
    """Returns the data for /success/ endpoint.
    :param config_path: path to config.yaml
    :type config_path: str | CONFIG_PATH (genconf/config.yaml)
    """
    master_ips = config.hacky_default_get('master_list', [])
    agent_ips = config.hacky_default_get('agent_list', [])

    code = 200
    msgs = {
        'success': "",
        'master_count': 0,
        'agent_count': 0
    }
    if not master_ips or not agent_ips:
        code = 400
        return msgs, code
    msgs['success'] = 'http://{}'.format(master_ips[0])
    msgs['master_count'] = len(master_ips)
    msgs['agent_count'] = len(agent_ips)
    return msgs, code
def configure_status(request):
    """Return /configure/status

    :param request: a web requeest object.
    :type request: request | None
    """
    log.info("Request for configuration validation made.")
    code = 200
    messages = Config(CONFIG_PATH).do_validate(include_ssh=True)
    if messages:
        code = 400
    resp = web.json_response(messages, status=code)
    return resp
Beispiel #15
0
def determine_config_type(config_path=CONFIG_PATH):
    """Returns the configuration type to the UI. One of either 'minimal' or
    'advanced'. 'advanced' blocks UI usage.

    :param config_path: path to config.yaml
    :type config_path: str | CONFIG_PATH (genconf/config.yaml)
    """
    # TODO(cmaloney): If the config has any arguments not in the set of possible parameters then
    # the config is always advanced.
    config = Config(config_path)
    adv_found = {}
    advanced_cluster_config = {
        "bootstrap_url": 'file:///opt/dcos_install_tmp',
        "docker_remove_delay": None,
        "exhibitor_storage_backend": 'static',
        "gc_delay": None,
        "master_discovery": 'static',
        "roles": None,
        "weights": None
    }
    for key, value in advanced_cluster_config.items():
        # Skip if the key isn't in config
        if key not in config:
            continue

        # None indicates any value means this is advanced config.
        # A string indicates the value must match.
        if value is None:
            log.error('Advanced configuration found in config.yaml: {}: value'.format(key, value))
            adv_found[key] = config[key]
        elif value != config[key]:
            log.error('Advanced configuration found in config.yaml: {}: value'.format(key, config[key]))
            adv_found[key] = config[key]

    if adv_found:
        message = (
            "Advanced configuration detected in {config_path} ({adv_found}).\nPlease backup "
            "or remove {config_path} to use the UI installer.".format(
                config_path=CONFIG_PATH,
                adv_found=adv_found,
            )
        )
        config_type = 'advanced'
    else:
        message = ''
        config_type = 'minimal'

    return {
        'message': message,
        'type': config_type
    }
def configure(request):
    """Return /api/v1/configure

    :param request: a web requeest object.
    :type request: request | None
    """
    if request.method == 'POST':
        new_config = yield from request.json()

        # Save ssh_key, ip_detect as needed
        # TODO(cmaloney): make ssh_key derive from ssh_key_path so we can just set ssh_key and skip all this.
        new_config = extract_external(new_config, 'ssh_key', 'ssh_key_path',
                                      SSH_KEY_PATH, 0o600)
        # TODO(cmaloney): change this to ip_detect_contents removing the need for the remapping.
        new_config = extract_external(new_config, 'ip_detect_script',
                                      'ip_detect_path', IP_DETECT_PATH, 0o644)

        log.info('POST to configure: {}'.format(new_config))
        messages = backend.create_config_from_post(new_config, CONFIG_PATH)

        # Map  back to DC/OS UI configuration parameters.
        # TODO(cmaloney): Remove need to remap validation keys. The remapping is making things show up
        # under the key of the user config chunk that caused them rather than their particular key so
        # num_masters validation for instance shows up under master_list where the user would expect it.
        if "ssh_key_path" in messages:
            messages["ssh_key"] = messages["ssh_key_path"]

        if "ip_detect_contents" in messages:
            messages['ip_detect_path'] = messages['ip_detect_contents']

        if 'num_masters' in messages:
            messages['master_list'] = messages['num_masters']

        resp = web.json_response({}, status=200)
        if messages:
            resp = web.json_response(messages, status=400)

        return resp

    elif request.method == 'GET':
        config = Config(CONFIG_PATH).config
        # TODO(cmaloney): should exclude the value entirely if the file doesn't exist.
        config['ssh_key'] = try_read_file(SSH_KEY_PATH)
        config['ip_detect_script'] = try_read_file(IP_DETECT_PATH)
        resp = web.json_response(config)

    resp.headers['Content-Type'] = 'application/json'
    return resp
Beispiel #17
0
def create_config_from_post(post_data, config_path):
    """Returns error code and validation messages for only keys POSTed
    to the UI.

    :param config_path: path to config.yaml
    :type config_path: string | CONFIG_PATH (genconf/config.yaml)

    :param post_data: data from POST to UI
    :type post_data: dict | {}
    """
    log.info("Updating config with POST data.")

    # Make sure this isn't passed ssh_key how the web installer used to, the web installer should
    # take care of it's wrapping / unwrapping.
    assert 'ssh_key' not in post_data
    assert 'ip_detect_script' not in post_data

    # Create a new configuration object, pass it the config.yaml path and POSTed dictionary.
    # Add in "hidden config" we don't present in the config.yaml, and then create a meta
    # validation dictionary from gen and ssh validation libs.
    # We do not use the already built methods for this since those are used to read the
    # coniguration off disk, here we need to validate the configuration overridees, and
    # return the key and message for the POSTed parameter.
    config = Config(config_path)
    config.update(post_data)
    validation_messages = config.do_validate(include_ssh=True)

    # TODO(cmaloney): Return all errors to the UI so it can display / decide how
    # it wants to log (new parameter might cause an error with an old set key)
    # Return only the keys the UI POSTed, do not write config to disk if
    # validation fails.
    post_validation_errors = {key: validation_messages[key] for key in validation_messages if key in post_data}

    # If validation is successful, write the data to disk, otherwise, if
    # they keys POSTed failed, do not write to disk.
    if post_validation_errors:
        log.error("POSTed configuration has errors, not writing to disk.")
        for key, value in post_validation_errors.items():
            log.error('{}: {}'.format(key, value))
    else:
        log.debug("Success! POSTed configuration looks good, writing to disk.")
        config.update(post_data)
        config.write_config()

    return post_validation_errors
Beispiel #18
0
def test_get_config(tmpdir):
    workspace = tmpdir.strpath
    temp_config_path = workspace + '/config.yaml'

    expected_data = {
        'cluster_name': 'DC/OS',
        'master_discovery': 'static',
        'exhibitor_storage_backend': 'static',
        'resolvers': ['8.8.8.8', '8.8.4.4'],
        'process_timeout': 10000,
        'bootstrap_url': 'file:///opt/dcos_install_tmp',
    }

    make_default_config_if_needed(temp_config_path)
    config = Config(temp_config_path)
    assert expected_data == config.config
def test_do_validate_config(tmpdir):
    # Create a temp config
    genconf_dir = tmpdir.join('genconf')
    genconf_dir.ensure(dir=True)
    temp_config_path = str(genconf_dir.join('config.yaml'))

    # Initialize with defautls
    make_default_config_if_needed(temp_config_path)

    expected_output = {
        'ip_detect_contents': 'ip-detect script `genconf/ip-detect` must exist',
        'ssh_user': '******',
        'master_list': 'Must set master_list, no way to calculate value.',
        'ssh_key_path': 'could not find ssh private key: genconf/ssh_key'
    }
    assert Config(config_path=temp_config_path).do_validate(include_ssh=True) == expected_output
Beispiel #20
0
def do_configure(config_path=CONFIG_PATH):
    """Returns error code

    :param config_path: path to config.yaml
    :type config_path: string | CONFIG_PATH (genconf/config.yaml)
    """
    config = Config(config_path)

    validation = validate_gen(config)

    if not validation:
        return 1

    config_util.do_configure(config)

    return 0
Beispiel #21
0
def do_configure(config_path=CONFIG_PATH):
    """Returns error code

    :param config_path: path to config.yaml
    :type config_path: string | CONFIG_PATH (genconf/config.yaml)
    """
    config = Config(config_path)

    try:
        gen_out = config_util.onprem_generate(config)
    except ValidationError as e:
        validation = normalize_config_validation_exception(e)
        print_messages(validation)
        return 1

    config_util.make_serve_dir(gen_out)

    return 0
Beispiel #22
0
def generate_node_upgrade_script(installed_cluster_version,
                                 config_path=CONFIG_PATH):

    if installed_cluster_version is None:
        print('Must provide the version of the cluster upgrading from')
        return 1

    config = Config(config_path)
    validation = validate_gen(config)
    if not validation:
        return 1
    gen_out = config_util.onprem_generate(config)
    config_util.make_serve_dir(gen_out)

    # generate the upgrade script
    upgrade.generate_node_upgrade_script(gen_out, installed_cluster_version)

    return 0
Beispiel #23
0
def test_do_validate_config(tmpdir, monkeypatch):
    monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant')

    # Create a temp config
    genconf_dir = tmpdir.join('genconf')
    genconf_dir.ensure(dir=True)
    temp_config_path = str(genconf_dir.join('config.yaml'))

    # Initialize with defautls
    make_default_config_if_needed(temp_config_path)

    create_fake_build_artifacts(tmpdir)
    expected_output = {
        'ip_detect_contents': 'ip-detect script `genconf/ip-detect` must exist',
        'master_list': 'Must set master_list, no way to calculate value.',
    }
    with tmpdir.as_cwd():
        assert Config(config_path='genconf/config.yaml').do_validate() == expected_output
Beispiel #24
0
def generate_node_upgrade_script(installed_cluster_version, config_path=CONFIG_PATH):

    if installed_cluster_version is None:
        print('Must provide the version of the cluster upgrading from')
        return 1

    config = Config(config_path)
    try:
        gen_out = config_util.onprem_generate(config)
    except ValidationError as e:
        validation = normalize_config_validation_exception(e)
        print_messages(validation)
        return 1

    config_util.make_serve_dir(gen_out)

    # generate the upgrade script
    upgrade.generate_node_upgrade_script(gen_out, installed_cluster_version)

    return 0
Beispiel #25
0
def do_configure(config_path=CONFIG_PATH):
    """Returns error code

    :param config_path: path to config.yaml
    :type config_path: string | CONFIG_PATH (genconf/config.yaml)
    """
    config = Config(config_path)

    try:
        gen_out = config_util.onprem_generate(config)
    except ValidationError as e:
        validation = normalize_config_validation_exception(e)
        print_messages(validation)
        return 1
    except ExhibitorTLSBootstrapError as e:
        log.error('Failed to bootstrap Exhibitor TLS')
        for i, error in enumerate(e.errors):
            return log.error("{}: {}".format(i + 1, error))
        return 1
    config_util.make_serve_dir(gen_out)

    return 0
Beispiel #26
0
def do_aws_cf_configure():
    """Returns error code

    Generates AWS templates using a custom config.yaml
    """

    # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623
    config = Config(CONFIG_PATH)

    # This process is usually ran from a docker container where default boto3 credential
    # method may fail and as such, we allow passing these creds explicitly
    if 'aws_template_storage_access_key_id' in config:
        os.environ['AWS_ACCESS_KEY_ID'] = config[
            'aws_template_storage_access_key_id']
    if 'aws_template_storage_secret_access_key' in config:
        os.environ['AWS_SECRET_ACCESS_KEY'] = config[
            'aws_template_storage_secret_access_key']
    if 'aws_template_storage_region_name' in config:
        os.environ['AWS_DEFAULT_REGION'] = config[
            'aws_template_storage_region_name']

    gen_config = config.as_gen_format()

    extra_sources = [
        gen.build_deploy.aws.aws_base_source, aws_advanced_source,
        gen.build_deploy.aws.groups['master'][1]
    ]

    sources, targets, _ = gen.get_dcosconfig_source_target_and_templates(
        gen_config, [], extra_sources)
    targets.append(get_aws_advanced_target())
    resolver = gen.internals.resolve_configuration(sources, targets)
    # TODO(cmaloney): kill this function and make the API return the structured
    # results api as was always intended rather than the flattened / lossy other
    # format. This will be an  API incompatible change. The messages format was
    # specifically so that there wouldn't be this sort of API incompatibility.
    messages = normalize_config_validation(resolver.status_dict)
    if messages:
        print_messages(messages)
        return 1

    # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into
    # one currently.
    # Get out the calculated arguments and manually move critical calculated ones to the gen_config
    # object.
    # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already
    # done a validation run.
    full_config = {k: v.value for k, v in resolver.arguments.items()}

    # Calculate the config ID and values that depend on it.
    config_id = gen.get_config_id(full_config)
    reproducible_artifact_path = 'config_id/{}'.format(config_id)
    cloudformation_s3_url = '{}/config_id/{}'.format(
        full_config['bootstrap_url'], config_id)
    cloudformation_s3_url_full = '{}/cloudformation'.format(
        cloudformation_s3_url)

    # TODO(cmaloney): Switch to using the targets
    gen_config['bootstrap_url'] = full_config['bootstrap_url']
    gen_config['provider'] = full_config['provider']
    gen_config['bootstrap_id'] = full_config['bootstrap_id']
    gen_config['package_ids'] = full_config['package_ids']
    gen_config['cloudformation_s3_url_full'] = cloudformation_s3_url_full

    # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all
    # the tooling (never has empty string, uses None to say "no variant")
    bootstrap_variant = full_config['bootstrap_variant'] if full_config[
        'bootstrap_variant'] else None

    artifacts = list()
    for built_resource in list(
            gen.build_deploy.aws.do_create(
                tag='dcos_generate_config.sh --aws-cloudformation',
                build_name='Custom',
                reproducible_artifact_path=reproducible_artifact_path,
                variant_arguments={bootstrap_variant: gen_config},
                commit=full_config['dcos_image_commit'],
                all_completes=None)):
        artifacts += release.built_resource_to_artifacts(built_resource)

    artifacts += list(
        release.make_bootstrap_artifacts(
            full_config['bootstrap_id'],
            json.loads(full_config['package_ids']),
            bootstrap_variant,
            'artifacts',
        ))

    for package_id in json.loads(full_config['package_ids']):
        package_filename = release.make_package_filename(package_id)
        artifacts.append({
            'reproducible_path': package_filename,
            'local_path': 'artifacts/' + package_filename,
        })

    # Upload all the artifacts to the config-id path and then print out what
    # the path that should be used is, as well as saving a local json file for
    # easy machine access / processing.
    repository = release.Repository(
        full_config['aws_template_storage_bucket_path'], None,
        'config_id/' + config_id)

    storage_commands = repository.make_commands({
        'core_artifacts': [],
        'channel_artifacts': artifacts
    })

    cf_dir = GENCONF_DIR + '/cloudformation'
    log.warning("Writing local copies to {}".format(cf_dir))
    storage_provider = release.storage.local.LocalStorageProvider(cf_dir)
    release.apply_storage_commands({'local': storage_provider},
                                   storage_commands)

    log.warning("Generated templates locally available at %s",
                cf_dir + "/" + reproducible_artifact_path)
    # TODO(cmaloney): Print where the user can find the files locally

    if full_config['aws_template_upload'] == 'false':
        return 0

    storage_provider = release.storage.aws.S3StorageProvider(
        bucket=full_config['aws_template_storage_bucket'],
        object_prefix=None,
        download_url=cloudformation_s3_url,
        region_name=full_config['aws_template_storage_region_name'],
        access_key_id=full_config['aws_template_storage_access_key_id'],
        secret_access_key=full_config['aws_template_storage_secret_access_key']
    )

    log.warning("Uploading to AWS")
    release.apply_storage_commands({'aws': storage_provider}, storage_commands)
    log.warning("AWS CloudFormation templates now available at: {}".format(
        cloudformation_s3_url))

    # TODO(cmaloney): Print where the user can find the files in AWS
    # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier.
    return 0
def action_action_name(request):
    """Return /action/<action_name>

    :param request: a web requeest object.
    :type request: request | None
    """
    global current_action
    action_name = request.match_info['action_name']

    # Update the global action
    json_state = read_json_state(action_name)
    current_action = action_name

    if request.method == 'GET':
        log.info('GET {}'.format(action_name))

        if json_state:
            return web.json_response(json_state)
        return web.json_response({})

    elif request.method == 'POST':
        log.info('POST {}'.format(action_name))
        action = action_map.get(action_name)
        # If the action name is preflight, attempt to run configuration
        # generation. If genconf fails, present the UI with a usable error
        # for the end-user
        if action_name == 'preflight':
            try:
                log.warning("GENERATING CONFIGURATION")
                backend.do_configure()
            except:
                genconf_failure = {
                    "errors":
                    "Configuration generation failed, please see command line for details"
                }
                return web.json_response(genconf_failure, status=400)

        params = yield from request.post()

        if json_state:
            if action_name == 'deploy' and 'retry' in params:
                if 'hosts' in json_state:
                    failed_hosts = []
                    for deploy_host, deploy_params in json_state[
                            'hosts'].items():
                        if deploy_params['host_status'] != 'success':
                            failed_hosts.append(
                                Node(deploy_host,
                                     tags=deploy_params['tags'],
                                     default_port=int(
                                         Config(CONFIG_PATH).hacky_default_get(
                                             'ssh_port', 22))))
                    log.debug('failed hosts: {}'.format(failed_hosts))
                    if failed_hosts:
                        yield from asyncio. async (action(
                            Config(CONFIG_PATH),
                            state_json_dir=STATE_DIR,
                            hosts=failed_hosts,
                            try_remove_stale_dcos=True,
                            **params))
                        return web.json_response({
                            'status':
                            'retried',
                            'details':
                            sorted([
                                '{}:{}'.format(node.ip, node.port)
                                for node in failed_hosts
                            ])
                        })

            if action_name not in remove_on_done:
                return web.json_response({
                    'status':
                    '{} was already executed, skipping'.format(action_name)
                })

            running = False
            for host, attributes in json_state['hosts'].items():
                if attributes['host_status'].lower() == 'running':
                    running = True

            log.debug('is action running: {}'.format(running))
            if running:
                return web.json_response(
                    {'status': '{} is running, skipping'.format(action_name)})
            else:
                unlink_state_file(action_name)

        yield from asyncio. async (action(Config(CONFIG_PATH),
                                          state_json_dir=STATE_DIR,
                                          options=options,
                                          **params))
        return web.json_response({'status': '{} started'.format(action_name)})
Beispiel #28
0
def do_aws_cf_configure():
    """Returns error code

    Generates AWS templates using a custom config.yaml
    """

    # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623
    config = Config(CONFIG_PATH)

    aws_config_target = gen.ConfigTarget(aws_advanced_parameters)
    aws_config_target.add_entry(aws_advanced_entry, False)

    gen_config = config.as_gen_format()
    # TODO(cmaloney): this is hacky....
    del gen_config['provider']

    config_targets = [
        gen.get_dcosconfig_target_and_templates(gen_config, [])[0],
        aws_config_target]

    messages = gen.validate_config_for_targets(config_targets, gen_config)
    # TODO(cmaloney): kill this function and make the API return the structured
    # results api as was always intended rather than the flattened / lossy other
    # format. This will be an  API incompatible change. The messages format was
    # specifically so that there wouldn't be this sort of API incompatibility.
    messages = normalize_config_validation(messages)
    if messages:
        print_messages(messages)
        return 1

    # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into
    # one currently.
    # Get out the calculated arguments and manually move critical calculated ones to the gen_config
    # object.
    # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already
    # done a validation run.
    full_config = gen.calculate_config_for_targets(config_targets, gen_config)
    gen_config['bootstrap_url'] = full_config['bootstrap_url']
    gen_config['provider'] = full_config['provider']
    gen_config['bootstrap_id'] = full_config['bootstrap_id']
    gen_config['cloudformation_s3_url'] = full_config['cloudformation_s3_url']

    # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all
    # the tooling (never has empty string, uses None to say "no variant")
    bootstrap_variant = full_config['bootstrap_variant'] if full_config['bootstrap_variant'] else None

    artifacts = list()
    for built_resource in list(gen.installer.aws.do_create(
            tag='dcos_generate_config.sh --aws-cloudformation',
            build_name='Custom',
            reproducible_artifact_path=full_config['reproducible_artifact_path'],
            variant_arguments={bootstrap_variant: gen_config},
            commit=full_config['dcos_image_commit'],
            all_bootstraps=None)):
        artifacts += release.built_resource_to_artifacts(built_resource)

    artifacts += list(release.make_bootstrap_artifacts(full_config['bootstrap_id'], bootstrap_variant, 'artifacts'))

    # Upload all the artifacts to the config-id path and then print out what
    # the path that should be used is, as well as saving a local json file for
    # easy machine access / processing.
    repository = release.Repository(
        full_config['aws_template_storage_bucket_path'],
        None,
        'config_id/' + full_config['config_id'])

    storage_commands = repository.make_commands({'core_artifacts': [], 'channel_artifacts': artifacts})

    log.warning("Writing local copies to genconf/cloudformation")
    storage_provider = release.storage.local.LocalStorageProvider('genconf/cloudformation')
    release.apply_storage_commands({'local': storage_provider}, storage_commands)

    log.warning(
        "Generated templates locally available at %s",
        "genconf/cloudformation/" + full_config["reproducible_artifact_path"])
    # TODO(cmaloney): Print where the user can find the files locally

    if full_config['aws_template_upload'] == 'false':
        return 0

    storage_provider = release.storage.aws.S3StorageProvider(
        bucket=full_config['aws_template_storage_bucket'],
        object_prefix=None,
        download_url=full_config['cloudformation_s3_url'],
        region_name=full_config['aws_template_storage_region_name'],
        access_key_id=full_config['aws_template_storage_access_key_id'],
        secret_access_key=full_config['aws_template_storage_secret_access_key'])

    log.warning("Uploading to AWS")
    release.apply_storage_commands({'aws': storage_provider}, storage_commands)
    log.warning("AWS CloudFormation templates now available at: {}".format(
        full_config['cloudformation_s3_url']))

    # TODO(cmaloney): Print where the user can find the files in AWS
    # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier.
    return 1
Beispiel #29
0
def do_aws_cf_configure():
    """Returns error code

    Generates AWS templates using a custom config.yaml
    """

    # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623
    config = Config(CONFIG_PATH)

    gen_config = config.as_gen_format()

    extra_sources = [
        gen.build_deploy.aws.aws_base_source,
        aws_advanced_source,
        gen.build_deploy.aws.groups['master'][1]]

    sources, targets, _ = gen.get_dcosconfig_source_target_and_templates(gen_config, [], extra_sources)
    targets.append(get_aws_advanced_target())
    resolver = gen.internals.resolve_configuration(sources, targets)
    # TODO(cmaloney): kill this function and make the API return the structured
    # results api as was always intended rather than the flattened / lossy other
    # format. This will be an  API incompatible change. The messages format was
    # specifically so that there wouldn't be this sort of API incompatibility.
    messages = normalize_config_validation(resolver.status_dict)
    if messages:
        print_messages(messages)
        return 1

    # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into
    # one currently.
    # Get out the calculated arguments and manually move critical calculated ones to the gen_config
    # object.
    # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already
    # done a validation run.
    full_config = {k: v.value for k, v in resolver.arguments.items()}

    # TODO(cmaloney): Switch to using the targets
    gen_config['bootstrap_url'] = full_config['bootstrap_url']
    gen_config['provider'] = full_config['provider']
    gen_config['bootstrap_id'] = full_config['bootstrap_id']
    gen_config['package_ids'] = full_config['package_ids']
    gen_config['cloudformation_s3_url_full'] = full_config['cloudformation_s3_url_full']

    # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all
    # the tooling (never has empty string, uses None to say "no variant")
    bootstrap_variant = full_config['bootstrap_variant'] if full_config['bootstrap_variant'] else None

    artifacts = list()
    for built_resource in list(gen.build_deploy.aws.do_create(
            tag='dcos_generate_config.sh --aws-cloudformation',
            build_name='Custom',
            reproducible_artifact_path=full_config['reproducible_artifact_path'],
            variant_arguments={bootstrap_variant: gen_config},
            commit=full_config['dcos_image_commit'],
            all_completes=None)):
        artifacts += release.built_resource_to_artifacts(built_resource)

    artifacts += list(release.make_bootstrap_artifacts(
        full_config['bootstrap_id'],
        json.loads(full_config['package_ids']),
        bootstrap_variant,
        'artifacts',
    ))

    # Upload all the artifacts to the config-id path and then print out what
    # the path that should be used is, as well as saving a local json file for
    # easy machine access / processing.
    repository = release.Repository(
        full_config['aws_template_storage_bucket_path'],
        None,
        'config_id/' + full_config['config_id'])

    storage_commands = repository.make_commands({'core_artifacts': [], 'channel_artifacts': artifacts})

    cf_dir = GENCONF_DIR + '/cloudformation'
    log.warning("Writing local copies to {}".format(cf_dir))
    storage_provider = release.storage.local.LocalStorageProvider(cf_dir)
    release.apply_storage_commands({'local': storage_provider}, storage_commands)

    log.warning(
        "Generated templates locally available at %s",
        cf_dir + "/" + full_config["reproducible_artifact_path"])
    # TODO(cmaloney): Print where the user can find the files locally

    if full_config['aws_template_upload'] == 'false':
        return 0

    storage_provider = release.storage.aws.S3StorageProvider(
        bucket=full_config['aws_template_storage_bucket'],
        object_prefix=None,
        download_url=full_config['cloudformation_s3_url'],
        region_name=full_config['aws_template_storage_region_name'],
        access_key_id=full_config['aws_template_storage_access_key_id'],
        secret_access_key=full_config['aws_template_storage_secret_access_key'])

    log.warning("Uploading to AWS")
    release.apply_storage_commands({'aws': storage_provider}, storage_commands)
    log.warning("AWS CloudFormation templates now available at: {}".format(
        full_config['cloudformation_s3_url']))

    # TODO(cmaloney): Print where the user can find the files in AWS
    # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier.
    return 0