def do_aws_cf_configure(): """Returns error code Generates AWS templates using a custom config.yaml """ # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623 config = Config(CONFIG_PATH) gen_config = config.as_gen_format() extra_sources = [ gen.build_deploy.aws.aws_base_source, aws_advanced_source, gen.build_deploy.aws.groups['master'][1]] sources, targets, _ = gen.get_dcosconfig_source_target_and_templates(gen_config, [], extra_sources) targets.append(get_aws_advanced_target()) resolver = gen.internals.resolve_configuration(sources, targets) # TODO(cmaloney): kill this function and make the API return the structured # results api as was always intended rather than the flattened / lossy other # format. This will be an API incompatible change. The messages format was # specifically so that there wouldn't be this sort of API incompatibility. messages = normalize_config_validation(resolver.status_dict) if messages: print_messages(messages) return 1 # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into # one currently. # Get out the calculated arguments and manually move critical calculated ones to the gen_config # object. # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already # done a validation run. full_config = {k: v.value for k, v in resolver.arguments.items()} # TODO(cmaloney): Switch to using the targets gen_config['bootstrap_url'] = full_config['bootstrap_url'] gen_config['provider'] = full_config['provider'] gen_config['bootstrap_id'] = full_config['bootstrap_id'] gen_config['package_ids'] = full_config['package_ids'] gen_config['cloudformation_s3_url_full'] = full_config['cloudformation_s3_url_full'] # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all # the tooling (never has empty string, uses None to say "no variant") bootstrap_variant = full_config['bootstrap_variant'] if full_config['bootstrap_variant'] else None artifacts = list() for built_resource in list(gen.build_deploy.aws.do_create( tag='dcos_generate_config.sh --aws-cloudformation', build_name='Custom', reproducible_artifact_path=full_config['reproducible_artifact_path'], variant_arguments={bootstrap_variant: gen_config}, commit=full_config['dcos_image_commit'], all_completes=None)): artifacts += release.built_resource_to_artifacts(built_resource) artifacts += list(release.make_bootstrap_artifacts( full_config['bootstrap_id'], json.loads(full_config['package_ids']), bootstrap_variant, 'artifacts', )) # Upload all the artifacts to the config-id path and then print out what # the path that should be used is, as well as saving a local json file for # easy machine access / processing. repository = release.Repository( full_config['aws_template_storage_bucket_path'], None, 'config_id/' + full_config['config_id']) storage_commands = repository.make_commands({'core_artifacts': [], 'channel_artifacts': artifacts}) cf_dir = GENCONF_DIR + '/cloudformation' log.warning("Writing local copies to {}".format(cf_dir)) storage_provider = release.storage.local.LocalStorageProvider(cf_dir) release.apply_storage_commands({'local': storage_provider}, storage_commands) log.warning( "Generated templates locally available at %s", cf_dir + "/" + full_config["reproducible_artifact_path"]) # TODO(cmaloney): Print where the user can find the files locally if full_config['aws_template_upload'] == 'false': return 0 storage_provider = release.storage.aws.S3StorageProvider( bucket=full_config['aws_template_storage_bucket'], object_prefix=None, download_url=full_config['cloudformation_s3_url'], region_name=full_config['aws_template_storage_region_name'], access_key_id=full_config['aws_template_storage_access_key_id'], secret_access_key=full_config['aws_template_storage_secret_access_key']) log.warning("Uploading to AWS") release.apply_storage_commands({'aws': storage_provider}, storage_commands) log.warning("AWS CloudFormation templates now available at: {}".format( full_config['cloudformation_s3_url'])) # TODO(cmaloney): Print where the user can find the files in AWS # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier. return 0
def do_aws_cf_configure(): """Returns error code Generates AWS templates using a custom config.yaml """ # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623 config = Config(CONFIG_PATH) # This process is usually ran from a docker container where default boto3 credential # method may fail and as such, we allow passing these creds explicitly if 'aws_template_storage_access_key_id' in config: os.environ['AWS_ACCESS_KEY_ID'] = config[ 'aws_template_storage_access_key_id'] if 'aws_template_storage_secret_access_key' in config: os.environ['AWS_SECRET_ACCESS_KEY'] = config[ 'aws_template_storage_secret_access_key'] if 'aws_template_storage_region_name' in config: os.environ['AWS_DEFAULT_REGION'] = config[ 'aws_template_storage_region_name'] gen_config = config.as_gen_format() extra_sources = [ gen.build_deploy.aws.aws_base_source, aws_advanced_source, gen.build_deploy.aws.groups['master'][1] ] sources, targets, _ = gen.get_dcosconfig_source_target_and_templates( gen_config, [], extra_sources) targets.append(get_aws_advanced_target()) resolver = gen.internals.resolve_configuration(sources, targets) # TODO(cmaloney): kill this function and make the API return the structured # results api as was always intended rather than the flattened / lossy other # format. This will be an API incompatible change. The messages format was # specifically so that there wouldn't be this sort of API incompatibility. messages = normalize_config_validation(resolver.status_dict) if messages: print_messages(messages) return 1 # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into # one currently. # Get out the calculated arguments and manually move critical calculated ones to the gen_config # object. # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already # done a validation run. full_config = {k: v.value for k, v in resolver.arguments.items()} # Calculate the config ID and values that depend on it. config_id = gen.get_config_id(full_config) reproducible_artifact_path = 'config_id/{}'.format(config_id) cloudformation_s3_url = '{}/config_id/{}'.format( full_config['bootstrap_url'], config_id) cloudformation_s3_url_full = '{}/cloudformation'.format( cloudformation_s3_url) # TODO(cmaloney): Switch to using the targets gen_config['bootstrap_url'] = full_config['bootstrap_url'] gen_config['provider'] = full_config['provider'] gen_config['bootstrap_id'] = full_config['bootstrap_id'] gen_config['package_ids'] = full_config['package_ids'] gen_config['cloudformation_s3_url_full'] = cloudformation_s3_url_full # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all # the tooling (never has empty string, uses None to say "no variant") bootstrap_variant = full_config['bootstrap_variant'] if full_config[ 'bootstrap_variant'] else None artifacts = list() for built_resource in list( gen.build_deploy.aws.do_create( tag='dcos_generate_config.sh --aws-cloudformation', build_name='Custom', reproducible_artifact_path=reproducible_artifact_path, variant_arguments={bootstrap_variant: gen_config}, commit=full_config['dcos_image_commit'], all_completes=None)): artifacts += release.built_resource_to_artifacts(built_resource) artifacts += list( release.make_bootstrap_artifacts( full_config['bootstrap_id'], json.loads(full_config['package_ids']), bootstrap_variant, 'artifacts', )) for package_id in json.loads(full_config['package_ids']): package_filename = release.make_package_filename(package_id) artifacts.append({ 'reproducible_path': package_filename, 'local_path': 'artifacts/' + package_filename, }) # Upload all the artifacts to the config-id path and then print out what # the path that should be used is, as well as saving a local json file for # easy machine access / processing. repository = release.Repository( full_config['aws_template_storage_bucket_path'], None, 'config_id/' + config_id) storage_commands = repository.make_commands({ 'core_artifacts': [], 'channel_artifacts': artifacts }) cf_dir = GENCONF_DIR + '/cloudformation' log.warning("Writing local copies to {}".format(cf_dir)) storage_provider = release.storage.local.LocalStorageProvider(cf_dir) release.apply_storage_commands({'local': storage_provider}, storage_commands) log.warning("Generated templates locally available at %s", cf_dir + "/" + reproducible_artifact_path) # TODO(cmaloney): Print where the user can find the files locally if full_config['aws_template_upload'] == 'false': return 0 storage_provider = release.storage.aws.S3StorageProvider( bucket=full_config['aws_template_storage_bucket'], object_prefix=None, download_url=cloudformation_s3_url, region_name=full_config['aws_template_storage_region_name'], access_key_id=full_config['aws_template_storage_access_key_id'], secret_access_key=full_config['aws_template_storage_secret_access_key'] ) log.warning("Uploading to AWS") release.apply_storage_commands({'aws': storage_provider}, storage_commands) log.warning("AWS CloudFormation templates now available at: {}".format( cloudformation_s3_url)) # TODO(cmaloney): Print where the user can find the files in AWS # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier. return 0
def do_aws_cf_configure(): """Returns error code Generates AWS templates using a custom config.yaml """ # TODO(cmaloney): Move to Config class introduced in https://github.com/dcos/dcos/pull/623 config = Config(CONFIG_PATH) aws_config_target = gen.ConfigTarget(aws_advanced_parameters) aws_config_target.add_entry(aws_advanced_entry, False) gen_config = config.as_gen_format() # TODO(cmaloney): this is hacky.... del gen_config['provider'] config_targets = [ gen.get_dcosconfig_target_and_templates(gen_config, [])[0], aws_config_target] messages = gen.validate_config_for_targets(config_targets, gen_config) # TODO(cmaloney): kill this function and make the API return the structured # results api as was always intended rather than the flattened / lossy other # format. This will be an API incompatible change. The messages format was # specifically so that there wouldn't be this sort of API incompatibility. messages = normalize_config_validation(messages) if messages: print_messages(messages) return 1 # TODO(cmaloney): This is really hacky but a lot simpler than merging all the config flows into # one currently. # Get out the calculated arguments and manually move critical calculated ones to the gen_config # object. # NOTE: the copying across, as well as validation is guaranteed to succeed because we've already # done a validation run. full_config = gen.calculate_config_for_targets(config_targets, gen_config) gen_config['bootstrap_url'] = full_config['bootstrap_url'] gen_config['provider'] = full_config['provider'] gen_config['bootstrap_id'] = full_config['bootstrap_id'] gen_config['cloudformation_s3_url'] = full_config['cloudformation_s3_url'] # Convert the bootstrap_Variant string we have back to a bootstrap_id as used internally by all # the tooling (never has empty string, uses None to say "no variant") bootstrap_variant = full_config['bootstrap_variant'] if full_config['bootstrap_variant'] else None artifacts = list() for built_resource in list(gen.installer.aws.do_create( tag='dcos_generate_config.sh --aws-cloudformation', build_name='Custom', reproducible_artifact_path=full_config['reproducible_artifact_path'], variant_arguments={bootstrap_variant: gen_config}, commit=full_config['dcos_image_commit'], all_bootstraps=None)): artifacts += release.built_resource_to_artifacts(built_resource) artifacts += list(release.make_bootstrap_artifacts(full_config['bootstrap_id'], bootstrap_variant, 'artifacts')) # Upload all the artifacts to the config-id path and then print out what # the path that should be used is, as well as saving a local json file for # easy machine access / processing. repository = release.Repository( full_config['aws_template_storage_bucket_path'], None, 'config_id/' + full_config['config_id']) storage_commands = repository.make_commands({'core_artifacts': [], 'channel_artifacts': artifacts}) log.warning("Writing local copies to genconf/cloudformation") storage_provider = release.storage.local.LocalStorageProvider('genconf/cloudformation') release.apply_storage_commands({'local': storage_provider}, storage_commands) log.warning( "Generated templates locally available at %s", "genconf/cloudformation/" + full_config["reproducible_artifact_path"]) # TODO(cmaloney): Print where the user can find the files locally if full_config['aws_template_upload'] == 'false': return 0 storage_provider = release.storage.aws.S3StorageProvider( bucket=full_config['aws_template_storage_bucket'], object_prefix=None, download_url=full_config['cloudformation_s3_url'], region_name=full_config['aws_template_storage_region_name'], access_key_id=full_config['aws_template_storage_access_key_id'], secret_access_key=full_config['aws_template_storage_secret_access_key']) log.warning("Uploading to AWS") release.apply_storage_commands({'aws': storage_provider}, storage_commands) log.warning("AWS CloudFormation templates now available at: {}".format( full_config['cloudformation_s3_url'])) # TODO(cmaloney): Print where the user can find the files in AWS # TODO(cmaloney): Dump out a JSON with machine paths to make scripting easier. return 1