def aws_cf_configure(config, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') create_config(config, tmpdir) create_fake_build_artifacts(tmpdir) with tmpdir.as_cwd(): return backend.do_aws_cf_configure()
def test_do_aws_cf_configure_valid_storage_config(config_aws, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') session = gen.build_deploy.aws.get_test_session(config_aws) s3 = session.resource('s3') bucket = str(uuid.uuid4()) s3_bucket = s3.Bucket(bucket) s3_bucket.create(CreateBucketConfiguration={ 'LocationConstraint': config_aws['region_name'] }) try: config_str = valid_storage_config.format( key_id=config_aws["access_key_id"], bucket=bucket, access_key=config_aws["secret_access_key"]) create_config(config_str, tmpdir) create_fake_build_artifacts(tmpdir) with tmpdir.as_cwd(): assert backend.do_aws_cf_configure() == 0 # TODO: add an assertion that the config that was resolved inside do_aws_cf_configure # ended up with the correct region where the above testing bucket was created. finally: objects = [{'Key': o.key} for o in s3_bucket.objects.all()] s3_bucket.delete_objects(Delete={'Objects': objects}) s3_bucket.delete()
def test_do_aws_configure(release_config_aws, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') create_config(aws_base_config, tmpdir) create_fake_build_artifacts(tmpdir) with tmpdir.as_cwd(): assert backend.do_aws_cf_configure() == 0
def aws_cf_configure(config, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') create_config(config, tmpdir) create_fake_build_artifacts(tmpdir) with tmpdir.as_cwd(): return backend.do_aws_cf_configure()
def test_do_aws_configure(release_config_aws, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') create_config(aws_base_config, tmpdir) create_fake_build_artifacts(tmpdir) with tmpdir.as_cwd(): assert backend.do_aws_cf_configure() == 0
def test_do_aws_configure(tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') genconf_dir = tmpdir.join('genconf') genconf_dir.ensure(dir=True) config_path = genconf_dir.join('config.yaml') config_path.write(aws_base_config) artifact_dir = tmpdir.join('artifacts/bootstrap') artifact_dir.ensure(dir=True) artifact_dir.join('12345.bootstrap.tar.xz').write("compressed_bootstrap_contents") artifact_dir.join('12345.active.json').write("['a-package']") artifact_dir.join('test_variant.bootstrap.latest').write("12345") tmpdir.join('artifacts/complete/test_variant.complete.latest.json').write('{"complete": "contents"}', ensure=True) with tmpdir.as_cwd(): assert backend.do_aws_cf_configure() == 0
def aws_cf_configure(s3_bucket_name, config, config_aws, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') session = gen.build_deploy.aws.get_test_session(config_aws) s3 = session.resource('s3') s3_bucket = s3.Bucket(s3_bucket_name) s3_bucket.create(CreateBucketConfiguration={'LocationConstraint': config_aws['region_name']}) create_config(config, tmpdir) create_fake_build_artifacts(tmpdir) try: with tmpdir.as_cwd(): return backend.do_aws_cf_configure() finally: objects = [{'Key': o.key} for o in s3_bucket.objects.all()] s3_bucket.delete_objects(Delete={'Objects': objects}) s3_bucket.delete()
def aws_cf_configure(s3_bucket_name, config, config_aws, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') session = gen.build_deploy.aws.get_test_session(config_aws) s3 = session.resource('s3') s3_bucket = s3.Bucket(s3_bucket_name) s3_bucket.create(CreateBucketConfiguration={ 'LocationConstraint': config_aws['region_name'] }) create_config(config, tmpdir) create_fake_build_artifacts(tmpdir) try: with tmpdir.as_cwd(): return backend.do_aws_cf_configure() finally: objects = [{'Key': o.key} for o in s3_bucket.objects.all()] s3_bucket.delete_objects(Delete={'Objects': objects}) s3_bucket.delete()
def test_do_aws_cf_configure_valid_storage_config(config_aws, tmpdir, monkeypatch): monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant') session = gen.installer.aws.get_test_session(config_aws) s3 = session.resource('s3') bucket = str(uuid.uuid4()) s3_bucket = s3.Bucket(bucket) s3_bucket.create(CreateBucketConfiguration={'LocationConstraint': config_aws['region_name']}) try: config_str = valid_storage_config.format( key_id=config_aws["access_key_id"], bucket=bucket, access_key=config_aws["secret_access_key"]) create_fake_build_artifacts(config_str, tmpdir) with tmpdir.as_cwd(): assert backend.do_aws_cf_configure() == 0 # TODO: add an assertion that the config that was resolved inside do_aws_cf_configure # ended up with the correct region where the above testing bucket was created. finally: objects = [{'Key': o.key} for o in s3_bucket.objects.all()] s3_bucket.delete_objects(Delete={'Objects': objects}) s3_bucket.delete()
dispatch_dict_simple = { 'version': (do_version, None, 'Print the DC/OS version'), 'web': ( dcos_installer.async_server.start, 'Starting DC/OS installer in web mode', 'Run the web interface'), 'genconf': ( lambda args: backend.do_configure(), 'EXECUTING CONFIGURATION GENERATION', 'Execute the configuration generation (genconf).'), 'validate-config': ( do_validate_config, 'VALIDATING CONFIGURATION', 'Validate the configuration for executing --genconf and deploy arguments in config.yaml'), 'aws-cloudformation': ( lambda args: backend.do_aws_cf_configure(), 'EXECUTING AWS CLOUD FORMATION TEMPLATE GENERATION', 'Generate AWS Advanced AWS CloudFormation templates using the provided config') } dispatch_dict_aio = { 'preflight': ( action_lib.run_preflight, 'EXECUTING_PREFLIGHT', 'Execute the preflight checks on a series of nodes.'), 'install-prereqs': ( action_lib.install_prereqs, 'EXECUTING INSTALL PREREQUISITES', 'Execute the preflight checks on a series of nodes.'), 'deploy': ( action_lib.install_dcos,