コード例 #1
0
ファイル: config.py プロジェクト: dcos/dcos
    def do_validate(self, include_ssh):
        user_arguments = self.as_gen_format()
        extra_sources = [onprem_source]
        extra_targets = []
        if include_ssh:
            extra_sources.append(ssh.validate.source)
            extra_targets.append(ssh.validate.get_target())

        sources, targets, _ = gen.get_dcosconfig_source_target_and_templates(user_arguments, [], extra_sources)
        targets = targets + extra_targets

        resolver = gen.internals.resolve_configuration(sources, targets)
        # TODO(cmaloney): kill this function and make the API return the structured
        # results api as was always intended rather than the flattened / lossy other
        # format. This will be an  API incompatible change. The messages format was
        # specifically so that there wouldn't be this sort of API incompatibility.
        return normalize_config_validation(resolver.status_dict)
コード例 #2
0
ファイル: config.py プロジェクト: tamarrow/dcos
def validate_onprem_dcos_config_contents(onprem_dcos_config_contents, num_masters, prevalidate_onprem_config):
    # TODO DCOS-14033: [gen.internals] Source validate functions are global only
    if prevalidate_onprem_config != 'true':
        return
    user_config = yaml.load(onprem_dcos_config_contents)
    # Use the default config in the installer
    config = yaml.load(dcos_installer.config.config_sample)
    config.update(user_config)
    # This field is required and auto-added by installer, so add a dummy here
    if 'bootstrap_id' not in config:
        config['bootstrap_id'] = 'deadbeef'

    # dummy master list to pass validation
    config['master_list'] = [('10.0.0.' + str(i)) for i in range(int(num_masters))]

    # Use the default config in the installer
    sources, targets, templates = gen.get_dcosconfig_source_target_and_templates(
        gen.stringify_configuration(config), list(), [ssh.validate.source, gen.build_deploy.bash.onprem_source])

    # Copy the gen target from dcos_installer/config.py, but instead remove
    # 'ssh_key_path' from the target because the validate fn in ssh_source is
    # too strict I.E. we cannot validate a key if we are going to generate
    # Furthermore, we cannot use the target ssh_key_path as it will automatically
    # invoked the validate fn from ssh/validate.py Luckily, we can instead use
    # the more idiomatic 'ssh_private_key_filename'
    targets.append(Target({
        'ssh_user',
        'ssh_port',
        'master_list',
        'agent_list',
        'public_agent_list',
        'ssh_parallelism',
        'process_timeout'}))

    resolver = resolve_configuration(sources, targets)
    status = resolver.status_dict
    if status['status'] == 'errors':
        raise AssertionError(pretty_print_validate_error(status['errors'], status['unset']))
コード例 #3
0
ファイル: backend.py プロジェクト: rlugojr/dcos
def do_aws_cf_configure():
    """Returns error code

    Generates AWS templates using a custom config.yaml
    """

    # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623
    config = Config(CONFIG_PATH)

    gen_config = config.as_gen_format()

    extra_sources = [
        gen.build_deploy.aws.aws_base_source,
        aws_advanced_source,
        gen.build_deploy.aws.groups['master'][1]]

    sources, targets, _ = gen.get_dcosconfig_source_target_and_templates(gen_config, [], extra_sources)
    targets.append(get_aws_advanced_target())
    resolver = gen.internals.resolve_configuration(sources, targets)
    # TODO(cmaloney): kill this function and make the API return the structured
    # results api as was always intended rather than the flattened / lossy other
    # format. This will be an  API incompatible change. The messages format was
    # specifically so that there wouldn't be this sort of API incompatibility.
    messages = normalize_config_validation(resolver.status_dict)
    if messages:
        print_messages(messages)
        return 1

    # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into
    # one currently.
    # Get out the calculated arguments and manually move critical calculated ones to the gen_config
    # object.
    # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already
    # done a validation run.
    full_config = {k: v.value for k, v in resolver.arguments.items()}

    # TODO(cmaloney): Switch to using the targets
    gen_config['bootstrap_url'] = full_config['bootstrap_url']
    gen_config['provider'] = full_config['provider']
    gen_config['bootstrap_id'] = full_config['bootstrap_id']
    gen_config['package_ids'] = full_config['package_ids']
    gen_config['cloudformation_s3_url_full'] = full_config['cloudformation_s3_url_full']

    # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all
    # the tooling (never has empty string, uses None to say "no variant")
    bootstrap_variant = full_config['bootstrap_variant'] if full_config['bootstrap_variant'] else None

    artifacts = list()
    for built_resource in list(gen.build_deploy.aws.do_create(
            tag='dcos_generate_config.sh --aws-cloudformation',
            build_name='Custom',
            reproducible_artifact_path=full_config['reproducible_artifact_path'],
            variant_arguments={bootstrap_variant: gen_config},
            commit=full_config['dcos_image_commit'],
            all_completes=None)):
        artifacts += release.built_resource_to_artifacts(built_resource)

    artifacts += list(release.make_bootstrap_artifacts(
        full_config['bootstrap_id'],
        json.loads(full_config['package_ids']),
        bootstrap_variant,
        'artifacts',
    ))

    # Upload all the artifacts to the config-id path and then print out what
    # the path that should be used is, as well as saving a local json file for
    # easy machine access / processing.
    repository = release.Repository(
        full_config['aws_template_storage_bucket_path'],
        None,
        'config_id/' + full_config['config_id'])

    storage_commands = repository.make_commands({'core_artifacts': [], 'channel_artifacts': artifacts})

    cf_dir = GENCONF_DIR + '/cloudformation'
    log.warning("Writing local copies to {}".format(cf_dir))
    storage_provider = release.storage.local.LocalStorageProvider(cf_dir)
    release.apply_storage_commands({'local': storage_provider}, storage_commands)

    log.warning(
        "Generated templates locally available at %s",
        cf_dir + "/" + full_config["reproducible_artifact_path"])
    # TODO(cmaloney): Print where the user can find the files locally

    if full_config['aws_template_upload'] == 'false':
        return 0

    storage_provider = release.storage.aws.S3StorageProvider(
        bucket=full_config['aws_template_storage_bucket'],
        object_prefix=None,
        download_url=full_config['cloudformation_s3_url'],
        region_name=full_config['aws_template_storage_region_name'],
        access_key_id=full_config['aws_template_storage_access_key_id'],
        secret_access_key=full_config['aws_template_storage_secret_access_key'])

    log.warning("Uploading to AWS")
    release.apply_storage_commands({'aws': storage_provider}, storage_commands)
    log.warning("AWS CloudFormation templates now available at: {}".format(
        full_config['cloudformation_s3_url']))

    # TODO(cmaloney): Print where the user can find the files in AWS
    # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier.
    return 0
コード例 #4
0
def do_aws_cf_configure():
    """Returns error code

    Generates AWS templates using a custom config.yaml
    """

    # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623
    config = Config(CONFIG_PATH)

    # This process is usually ran from a docker container where default boto3 credential
    # method may fail and as such, we allow passing these creds explicitly
    if 'aws_template_storage_access_key_id' in config:
        os.environ['AWS_ACCESS_KEY_ID'] = config[
            'aws_template_storage_access_key_id']
    if 'aws_template_storage_secret_access_key' in config:
        os.environ['AWS_SECRET_ACCESS_KEY'] = config[
            'aws_template_storage_secret_access_key']
    if 'aws_template_storage_region_name' in config:
        os.environ['AWS_DEFAULT_REGION'] = config[
            'aws_template_storage_region_name']

    gen_config = config.as_gen_format()

    extra_sources = [
        gen.build_deploy.aws.aws_base_source, aws_advanced_source,
        gen.build_deploy.aws.groups['master'][1]
    ]

    sources, targets, _ = gen.get_dcosconfig_source_target_and_templates(
        gen_config, [], extra_sources)
    targets.append(get_aws_advanced_target())
    resolver = gen.internals.resolve_configuration(sources, targets)
    # TODO(cmaloney): kill this function and make the API return the structured
    # results api as was always intended rather than the flattened / lossy other
    # format. This will be an  API incompatible change. The messages format was
    # specifically so that there wouldn't be this sort of API incompatibility.
    messages = normalize_config_validation(resolver.status_dict)
    if messages:
        print_messages(messages)
        return 1

    # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into
    # one currently.
    # Get out the calculated arguments and manually move critical calculated ones to the gen_config
    # object.
    # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already
    # done a validation run.
    full_config = {k: v.value for k, v in resolver.arguments.items()}

    # TODO(cmaloney): Switch to using the targets
    gen_config['bootstrap_url'] = full_config['bootstrap_url']
    gen_config['provider'] = full_config['provider']
    gen_config['bootstrap_id'] = full_config['bootstrap_id']
    gen_config['package_ids'] = full_config['package_ids']
    gen_config['cloudformation_s3_url_full'] = full_config[
        'cloudformation_s3_url_full']

    # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all
    # the tooling (never has empty string, uses None to say "no variant")
    bootstrap_variant = full_config['bootstrap_variant'] if full_config[
        'bootstrap_variant'] else None

    artifacts = list()
    for built_resource in list(
            gen.build_deploy.aws.do_create(
                tag='dcos_generate_config.sh --aws-cloudformation',
                build_name='Custom',
                reproducible_artifact_path=full_config[
                    'reproducible_artifact_path'],
                variant_arguments={bootstrap_variant: gen_config},
                commit=full_config['dcos_image_commit'],
                all_completes=None)):
        artifacts += release.built_resource_to_artifacts(built_resource)

    artifacts += list(
        release.make_bootstrap_artifacts(
            full_config['bootstrap_id'],
            json.loads(full_config['package_ids']),
            bootstrap_variant,
            'artifacts',
        ))

    for package_id in json.loads(full_config['package_ids']):
        package_filename = release.make_package_filename(package_id)
        artifacts.append({
            'reproducible_path': package_filename,
            'local_path': 'artifacts/' + package_filename,
        })

    # Upload all the artifacts to the config-id path and then print out what
    # the path that should be used is, as well as saving a local json file for
    # easy machine access / processing.
    repository = release.Repository(
        full_config['aws_template_storage_bucket_path'], None,
        'config_id/' + full_config['config_id'])

    storage_commands = repository.make_commands({
        'core_artifacts': [],
        'channel_artifacts': artifacts
    })

    cf_dir = GENCONF_DIR + '/cloudformation'
    log.warning("Writing local copies to {}".format(cf_dir))
    storage_provider = release.storage.local.LocalStorageProvider(cf_dir)
    release.apply_storage_commands({'local': storage_provider},
                                   storage_commands)

    log.warning("Generated templates locally available at %s",
                cf_dir + "/" + full_config["reproducible_artifact_path"])
    # TODO(cmaloney): Print where the user can find the files locally

    if full_config['aws_template_upload'] == 'false':
        return 0

    storage_provider = release.storage.aws.S3StorageProvider(
        bucket=full_config['aws_template_storage_bucket'],
        object_prefix=None,
        download_url=full_config['cloudformation_s3_url'],
        region_name=full_config['aws_template_storage_region_name'],
        access_key_id=full_config['aws_template_storage_access_key_id'],
        secret_access_key=full_config['aws_template_storage_secret_access_key']
    )

    log.warning("Uploading to AWS")
    release.apply_storage_commands({'aws': storage_provider}, storage_commands)
    log.warning("AWS CloudFormation templates now available at: {}".format(
        full_config['cloudformation_s3_url']))

    # TODO(cmaloney): Print where the user can find the files in AWS
    # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier.
    return 0
コード例 #5
0
ファイル: backend.py プロジェクト: zouyee/dcos
def do_aws_cf_configure():
    """Returns error code

    Generates AWS templates using a custom config.yaml
    """

    # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623
    config = Config(CONFIG_PATH)

    gen_config = config.as_gen_format()
    # TODO(cmaloney): this is hacky....
    del gen_config['provider']

    sources, targets, _ = gen.get_dcosconfig_source_target_and_templates(
        gen_config, [])
    sources.append(aws_advanced_source)
    targets.append(aws_advanced_target)
    messages = gen.internals.validate_configuration(sources, targets,
                                                    gen_config)
    # TODO(cmaloney): kill this function and make the API return the structured
    # results api as was always intended rather than the flattened / lossy other
    # format. This will be an  API incompatible change. The messages format was
    # specifically so that there wouldn't be this sort of API incompatibility.
    messages = normalize_config_validation(messages)
    if messages:
        print_messages(messages)
        return 1

    # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into
    # one currently.
    # Get out the calculated arguments and manually move critical calculated ones to the gen_config
    # object.
    # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already
    # done a validation run.
    full_config = gen.internals.resolve_configuration(sources, targets,
                                                      gen_config)
    gen_config['bootstrap_url'] = full_config['bootstrap_url']
    gen_config['provider'] = full_config['provider']
    gen_config['bootstrap_id'] = full_config['bootstrap_id']
    gen_config['cloudformation_s3_url'] = full_config['cloudformation_s3_url']

    # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all
    # the tooling (never has empty string, uses None to say "no variant")
    bootstrap_variant = full_config['bootstrap_variant'] if full_config[
        'bootstrap_variant'] else None

    artifacts = list()
    for built_resource in list(
            gen.installer.aws.do_create(
                tag='dcos_generate_config.sh --aws-cloudformation',
                build_name='Custom',
                reproducible_artifact_path=full_config[
                    'reproducible_artifact_path'],
                variant_arguments={bootstrap_variant: gen_config},
                commit=full_config['dcos_image_commit'],
                all_bootstraps=None)):
        artifacts += release.built_resource_to_artifacts(built_resource)

    artifacts += list(
        release.make_bootstrap_artifacts(full_config['bootstrap_id'],
                                         bootstrap_variant, 'artifacts'))

    # Upload all the artifacts to the config-id path and then print out what
    # the path that should be used is, as well as saving a local json file for
    # easy machine access / processing.
    repository = release.Repository(
        full_config['aws_template_storage_bucket_path'], None,
        'config_id/' + full_config['config_id'])

    storage_commands = repository.make_commands({
        'core_artifacts': [],
        'channel_artifacts': artifacts
    })

    log.warning("Writing local copies to genconf/cloudformation")
    storage_provider = release.storage.local.LocalStorageProvider(
        'genconf/cloudformation')
    release.apply_storage_commands({'local': storage_provider},
                                   storage_commands)

    log.warning(
        "Generated templates locally available at %s",
        "genconf/cloudformation/" + full_config["reproducible_artifact_path"])
    # TODO(cmaloney): Print where the user can find the files locally

    if full_config['aws_template_upload'] == 'false':
        return 0

    storage_provider = release.storage.aws.S3StorageProvider(
        bucket=full_config['aws_template_storage_bucket'],
        object_prefix=None,
        download_url=full_config['cloudformation_s3_url'],
        region_name=full_config['aws_template_storage_region_name'],
        access_key_id=full_config['aws_template_storage_access_key_id'],
        secret_access_key=full_config['aws_template_storage_secret_access_key']
    )

    log.warning("Uploading to AWS")
    release.apply_storage_commands({'aws': storage_provider}, storage_commands)
    log.warning("AWS CloudFormation templates now available at: {}".format(
        full_config['cloudformation_s3_url']))

    # TODO(cmaloney): Print where the user can find the files in AWS
    # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier.
    return 0