def test_purge_bluegreen(get_blue_green_apps, cloud_connections, get_autoscaling_group_and_processes_to_suspend, suspend_autoscaling_group_processes, flush_instances_update_autoscale, load_balancing): # Set up mocks and variables green_app = get_test_application(name="test-app-green", _id='id_green', autoscale={'name': 'autoscale-green'}) blue_app = get_test_application(name="test-app-blue", _id='id_blue', autoscale={'name': 'autoscale-blue'}) connection_mock = MagicMock() connection_pool = MagicMock() cloud_connections.get.return_value.return_value = connection_pool connection_pool.get_connection.return_value = connection_mock worker = MagicMock() worker.app = green_app worker.log_file = LOG_FILE def assert_done(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_done # blue app is online and green app is offline get_blue_green_apps.return_value = (blue_app, green_app) load_balancing.get_lb_manager.return_value.list_lbs_from_autoscale.return_value = [ 'bgtmp-id_green' ] get_autoscaling_group_and_processes_to_suspend.return_value = ( 'autoscale-green', ['suspend_process']) # Launching command swap_cmd = Purgebluegreen(worker) swap_cmd.execute() # Check that everything has gone as planned assert get_blue_green_apps.called == 1 get_autoscaling_group_and_processes_to_suspend.assert_called_once_with( connection_mock, green_app, LOG_FILE) suspend_autoscaling_group_processes.assert_called_once_with( connection_mock, 'autoscale-green', ['suspend_process'], LOG_FILE) flush_instances_update_autoscale.assert_called_once_with( connection_mock, connection_pool, green_app, LOG_FILE) load_balancing.get_lb_manager.return_value.register_lbs_into_autoscale.assert_called_once_with( 'autoscale-green', ['bgtmp-id_green'], None, LOG_FILE) load_balancing.get_lb_manager.return_value.destroy_lb.assert_called_once_with( 'bgtmp-id_green', LOG_FILE)
def test_purge_launch_configuration(): cloud_connection = MagicMock() connection = MagicMock() cloud_connection.get_connection.return_value = connection connection.get_paginator.return_value = get_aws_data_paginator( "autoscaling--describe-launch-configurations") def delete_lc(LaunchConfigurationName=None): if LaunchConfigurationName == "launchconfig.test.eu-west-1.webfront.test-app.test3": raise ClientError({'Error': {'Code': 'ResourceInUse'}}, "message") connection.delete_launch_configuration.side_effect = delete_lc ret = purge_launch_configuration(cloud_connection, get_test_application(), 1) assert ret assert connection.delete_launch_configuration.call_count == 4 connection.delete_launch_configuration.assert_has_calls([ call(LaunchConfigurationName= "launchconfig.test.eu-west-1.webfront.test-app.test1"), call(LaunchConfigurationName= "launchconfig.test.eu-west-1.webfront.test-app.test3"), call(LaunchConfigurationName= "launchconfig.test.eu-west-1.webfront.test-app.test4"), call(LaunchConfigurationName= "launchconfig.test.eu-west-1.webfront.test-app.test5"), ], any_order=True)
def test_purge(lxd_client_cls): # Application context app = get_test_application() job = { "_id": "test_job_id", "app_id": "test_app_id", "command": "buildimage", "instance_type": "test_instance_type", "options": [False] # Do not skip bootstrap } test_config = get_test_config(ami_retention=3) # Mocks lxd_client = mock.MagicMock() lxd_client_cls.return_value = lxd_client lxd_image = mock.MagicMock() lxd_client.images.all.return_value = [lxd_image for i in range(0, 6)] # Purge images with mock.patch('ghost_tools.config', new=test_config): image_builder = LXDImageBuilder(app, job, None, LOG_FILE, test_config) image_builder.purge_old_images() # Test assert lxd_image.delete.call_count == 3
def test_buildimage_ami_error_with_lxd(awsimagebuilder_mock, lxdimagebuilder_mock, lxd_is_available_mock): """ Test build AWS AMI is failed """ # Set up mocks and variables test_app = get_test_application() worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE def assert_failed(status, message=None): assert status == "failed", "Status is {} and not failed: {}".format( status, message) worker.update_status = assert_failed lxd_is_available_mock.return_value = True # Launching command cmd = Buildimage(worker) cmd._update_app_ami = void cmd._update_container_source = void cmd._aws_image_builder.start_builder.return_value = "ERROR", "ERROR" cmd.execute() assert awsimagebuilder_mock.call_count == 1 assert lxdimagebuilder_mock.call_count == 1
def test_buildimage_lxd(AWSImageBuilder_mock, LXDImageBuilder_mock, lxd_is_available_mock): """ Test LXD Image Build """ # Set up mocks and variables test_app = get_test_application() test_app['build_infos'][ 'source_container_image'] = 'dummy_lxc_source_image' worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE def assert_ok(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_ok lxd_is_available_mock.return_value = True # Launching command cmd = Buildimage(worker) cmd._update_app_ami = void cmd._update_container_source = void cmd._aws_image_builder.start_builder.return_value = "ami_id", "ami_name" cmd.execute() assert AWSImageBuilder_mock.call_count == 1 assert LXDImageBuilder_mock.call_count == 1
def test_buildimage_ami(awsimagebuilder_mock, lxdimagebuilder_mock, lxd_is_available_mock): """ Test AWS AMI basic ok """ # Set up mocks and variables test_app = get_test_application() worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE def assert_ok(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_ok lxd_is_available_mock.return_value = False # Launching command cmd = Buildimage(worker) cmd._update_app_ami = void cmd._aws_image_builder.start_builder.return_value = "ami_id", "ami_name" cmd.execute() assert awsimagebuilder_mock.call_count == 1 assert lxdimagebuilder_mock.call_count == 0
def test_executescript_cmd_deep( cloud_connections, get_ghost_env_variables, HostDeploymentManager, ): """ Test standard execution flow (with Serial option), deep dive on _exec_script flow """ # Set up mocks and variables test_app = get_test_application() connection_mock = MagicMock() connection_pool = MagicMock() cloud_connections.get.return_value.return_value = connection_pool connection_pool.get_connection.return_value = connection_mock worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE worker._config = {'enable_executescript_command': 'true'} worker.job = { 'options': [ get_dummy_bash_script(True), '', 'serial', '1by1', ], '_id': '42', 'user': '******' } get_ghost_env_variables.return_value = {} def assert_done(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_done # Launching command cmd = Executescript(worker) cmd.execute() # Check that everything has gone as planned assert get_ghost_env_variables.called == 1 assert HostDeploymentManager.called == 1 get_ghost_env_variables.assert_called_once_with(test_app, None, worker.job['user']) HostDeploymentManager.assert_called_once_with( connection_pool, test_app, None, LOG_FILE, test_app['safe-deployment'], 'serial', 'executescript', { 'script': get_dummy_bash_script(), 'context_path': '/tmp', 'sudoer_uid': 0, 'jobid': worker.job['_id'], 'env_vars': {}, })
def test_command_createinstance_available(): app = get_test_application(name="test-app", _id='id') ret = createinstance.is_available(app) assert ret == True del app['ami'] ret = createinstance.is_available(app) assert ret == False
def test_command_updateautoscaling_available(): app = get_test_application(name="test-app", _id='id') ret = updateautoscaling.is_available(app) assert ret == True app['autoscale']['name'] = '' ret = updateautoscaling.is_available(app) assert ret == False del app['autoscale'] ret = updateautoscaling.is_available(app) assert ret == False
def test_command_purge_bg_available(ghost_has_blue_green_enabled): ghost_has_blue_green_enabled.return_value = True app = get_test_application(name="test-app", _id='id') ret = purgebluegreen.is_available(None) assert ret == True ret = purgebluegreen.is_available(app) assert ret == False app['blue_green'] = {'color': 'blue'} ret = purgebluegreen.is_available(app) assert ret == True ghost_has_blue_green_enabled.return_value = False ret = purgebluegreen.is_available(None) assert ret == False
def test_executescript_cmd(cloud_connections): """ Test standard execution flow (with Serial option) """ # Set up mocks and variables test_app = get_test_application() connection_mock = MagicMock() connection_pool = MagicMock() cloud_connections.get.return_value.return_value = connection_pool connection_pool.get_connection.return_value = connection_mock worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE worker._config = {'enable_executescript_command': 'true'} worker.job = { 'options': [ get_dummy_bash_script(True), '', 'serial', '1by1', ], '_id': '42', 'user': '******' } def assert_done(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_done def assert_exec(script, module_name, fabric_execution_strategy, safe_deployment_strategy): assert fabric_execution_strategy == 'serial' and safe_deployment_strategy == '1by1',\ "fabric_execution_strategy {} or safe_deployment_strategy {} is not valid".format(fabric_execution_strategy, safe_deployment_strategy) # Launching command cmd = Executescript(worker) cmd._exec_script = assert_exec cmd.execute()
def test_executescript_cmd_abort_disabled(): """ Test missing mandatory options """ # Set up mocks and variables test_app = get_test_application() worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE def assert_aborted(status, message=None): assert status == "aborted", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_aborted # Launching command cmd = Executescript(worker) cmd.execute()
def test_executescript_cmd_single_host(cloud_connections): """ Test basic flow with 'single' Host option """ # Set up mocks and variables test_app = get_test_application() connection_mock = MagicMock() connection_pool = MagicMock() cloud_connections.get.return_value.return_value = connection_pool connection_pool.get_connection.return_value = connection_mock worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE worker._config = {'enable_executescript_command': 'true'} worker.job = { 'options': [ get_dummy_bash_script(True), '', 'single', '10.0.0.1', ], '_id': '42', 'user': '******' } def assert_done(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_done def assert_exec_single(script, module_name, single_host_ip): assert single_host_ip == '10.0.0.1', "Single_host_ip {} is not valid".format( single_host_ip) # Launching command cmd = Executescript(worker) cmd._exec_script_single_host = assert_exec_single cmd.execute()
def test_build_image(lxd_client_cls): # Application context app = get_test_application() job = { "_id": "test_job_id", "app_id": "test_app_id", "command": "buildimage", "instance_type": "test_instance_type", "options": [False] # Do not skip bootstrap } test_config = get_test_config( features_provisioners={ 'ansible': { 'git_revision': 'master', 'git_repo': 'my_ansible_repo', 'base_playbook_file': 'tests/provisioners_data/base_playbook.yml', 'base_playbook_requirements_file': 'tests/provisioners_data/base_requirements.yml', } }) # Mocks lxd_client = mock.MagicMock() lxd_client_cls.return_value = lxd_client lxd_containers_mock = lxd_client.containers lxd_profiles_mock = lxd_client.profiles lxd_container_mock = mock.MagicMock() lxd_client.containers.create.return_value = lxd_container_mock valid_execution_mock = mock.MagicMock() valid_execution_mock.exit_code = 0 valid_execution_mock.stdout = 'STDOUT' valid_execution_mock.stderr = 'STDERR' lxd_container_mock.execute.return_value = valid_execution_mock lxd_client.images.all.return_value = [] venv_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.tox/py27') venv_bin_dir = os.path.join(venv_dir, 'bin') # Build image with mock.patch('ghost_tools.config', new=test_config): image_builder = LXDImageBuilder(app, job, None, LOG_FILE, test_config) image_builder.set_source_hooks('/source-hook-path') image_builder.start_builder() # Test container_name = image_builder._container_name assert container_name.startswith("ami-test-eu-west-1-webfront-test-app-") expected_container_config = { 'source': { "type": "image", "protocol": "lxd", "mode": "pull", "fingerprint": "lxd-container-image-test", "server": "http://lxd-image-endpoint:1234", }, 'config': { "security.privileged": 'True' }, 'ephemeral': False, 'name': container_name, 'profiles': ["default", container_name], } lxd_containers_mock.create.assert_called_once_with( expected_container_config, wait=True) lxd_container_mock.start.assert_called_once_with(wait=True) lxd_container_mock.stop.assert_called_once_with(wait=True) lxd_container_mock.execute.assert_any_call( ["sh", "/ghost/hook-pre_buildimage"], image_builder._get_ghost_env_vars()) lxd_container_mock.execute.assert_any_call( ["sh", "/ghost/hook-post_buildimage"], image_builder._get_ghost_env_vars()) lxd_container_mock.execute.assert_any_call([ os.path.join(venv_bin_dir, "ansible-playbook"), "-i", "localhost,", "--connection=local", "/srv/ansible/main.yml", "-v" ]) lxd_container_mock.execute.assert_any_call([ "salt-call", "state.highstate", "--file-root=/srv/salt/salt", "--pillar-root=/srv/salt/pillar", "--local", "-l", "info" ]) expected_devices_config = { 'venv': { 'path': venv_dir, 'source': venv_dir, 'type': 'disk', }, 'salt': { 'path': '/srv/salt', 'source': "/tmp/ghost-features-provisioner/salt-test_job_id", 'type': 'disk' }, 'ansible': { 'path': '/srv/ansible', 'source': "/tmp/ghost-features-provisioner/ansible-test_job_id", 'type': 'disk' }, 'hooks': { 'path': "/ghost", 'source': "/source-hook-path", 'type': 'disk' }, } lxd_profiles_mock.create.assert_called_once_with( container_name, devices=expected_devices_config) lxd_container_mock.publish.assert_called_once_with(wait=True)
def test_prepare_bluegreen(get_blue_green_apps, cloud_connections, get_autoscaling_group_and_processes_to_suspend, suspend_autoscaling_group_processes, resume_autoscaling_group_processes, update_auto_scale, load_balancing): # Set up mocks and variables green_app = get_test_application(name="test-app-green", _id='id_green', autoscale={'name': 'autoscale-green'}) blue_app = get_test_application(name="test-app-blue", _id='id_blue', autoscale={'name': 'autoscale-blue'}) connection_mock = MagicMock() connection_pool = MagicMock() cloud_connections.get.return_value.return_value = connection_pool connection_pool.get_connection.return_value = connection_mock worker = MagicMock() worker.app = green_app worker.log_file = LOG_FILE def assert_done(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_done # blue app is online and green app is offline get_blue_green_apps.return_value = (blue_app, green_app) load_balancing.get_lb_manager.return_value.list_lbs_from_autoscale.return_value = [ 'elb_online' ] def get_asgapts_behavior(as_conn, app, log_file): if app == green_app: return 'autoscale-green', ['suspend_process'] if app == blue_app: return 'autoscale-blue', [] raise Exception( "get_autoscaling_group_and_processes_to_suspend : application in parameter is not correct" ) get_autoscaling_group_and_processes_to_suspend.side_effect = get_asgapts_behavior # Launching command swap_cmd = Preparebluegreen(worker) swap_cmd.execute() # Check that everything has gone as planned load_balancing.get_lb_manager.assert_called_once_with( connection_pool, "eu-west-1", 'elb') assert get_blue_green_apps.called == 1 suspend_autoscaling_group_processes.assert_called_once_with( connection_mock, 'autoscale-green', ['suspend_process'], LOG_FILE) resume_autoscaling_group_processes.assert_called_once_with( connection_mock, 'autoscale-green', ['suspend_process'], LOG_FILE) update_auto_scale.assert_called_once_with(connection_pool, green_app, None, LOG_FILE, update_as_params=True) load_balancing.get_lb_manager.return_value.copy_lb.assert_called_once_with( 'bgtmp-id_green', 'elb_online', { 'bluegreen-temporary': 'true', 'app_id': 'id_green' }, LOG_FILE) load_balancing.get_lb_manager.return_value.register_lbs_into_autoscale.assert_called_once_with( 'autoscale-green', [], ['bgtmp-id_green'], LOG_FILE)
def test_build_image_ansible(packer_run_packer_cmd, gcall, provisioner_get_local_repo_path): # Application context app = get_test_application() job = { "_id": "test_job_id", "app_id": "test_app_id", "command": "buildimage", "instance_type": "test_instance_type", "options": [False] # Do not skip bootstrap } test_config = get_test_config( features_provisioners={ 'ansible': { 'git_revision': 'master', 'git_repo': 'my_ansible_repo', 'base_playbook_file': 'tests/provisioners_data/base_playbook.yml', 'base_playbook_requirements_file': 'tests/provisioners_data/base_requirements.yml', } }) del test_config['features_provisioners']['salt'] # Mocks packer_run_packer_cmd.return_value = (0, "something:test_ami_id") tmp_dir = tempfile.mkdtemp() venv_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.tox/py27/bin/') shutil.copyfile( os.path.join(os.path.dirname(__file__), 'provisioners_data', 'requirements.yml'), os.path.join(tmp_dir, 'requirements.yml')) provisioner_get_local_repo_path.return_value = tmp_dir # Build image with mock.patch('ghost_tools.config', new=test_config): image_builder = AWSImageBuilder(app, job, None, LOG_FILE, test_config) ami_id, ami_name = image_builder.start_builder() # Test assert ami_id == "test_ami_id" assert ami_name.startswith("ami.test.eu-west-1.webfront.test-app.") gcall.assert_called_once_with( "{0}ansible-galaxy install -r {1}/requirement_app.yml -p {1}/roles". format(venv_dir, tmp_dir), 'Ansible - ansible-galaxy command', LOG_FILE) with open(os.path.join(PACKER_JSON_PATH, job['_id'], 'aws_builder.json'), 'r') as f: # Verify generated ansible files with open(os.path.join(tmp_dir, 'requirement_app.yml'), 'r') as f2: requirement_app = yaml.load(f2) assert requirement_app == [{ "src": "base-role-src", "version": "base-role-version" }, { "name": "feature-ansible", "scm": "test-scm", "src": "test-src", "version": "test-version" }] with open(os.path.join(tmp_dir, 'main.yml'), 'r') as f3: playbook = yaml.load(f3) assert playbook == [{ "name": "Base playbook", "hosts": "all", "roles": ['ansible-base-role'] }, { "name": "Ghost application features", "hosts": "all", "roles": [{ "role": "feature-ansible", "feature-property": "property" }] }] # Verify packer config packer_config = json.load(f) packer_config_reference = { "provisioners": [{ "type": "shell", "environment_vars": [ "EMPTY_ENV=", "GHOST_APP=test-app", "GHOST_ENV=test", "GHOST_ENV_COLOR=", "GHOST_ROLE=webfront", ], "script": "/ghost/test-app/test/webfront/hook-pre_buildimage" }, { "type": "ansible", "playbook_file": os.path.join(tmp_dir, "main.yml"), "ansible_env_vars": [ "ANSIBLE_HOST_KEY_CHECKING=False", "ANSIBLE_FORCE_COLOR=1", "PYTHONUNBUFFERED=1", "ANSIBLE_ROLES_PATH={}".format(tmp_dir) ], "user": "******", "command": os.path.join(venv_dir, "ansible-playbook"), "extra_arguments": ['-v'], }, { "type": "shell", "environment_vars": [ "EMPTY_ENV=", "GHOST_APP=test-app", "GHOST_ENV=test", "GHOST_ENV_COLOR=", "GHOST_ROLE=webfront", ], "script": "/ghost/test-app/test/webfront/hook-post_buildimage" }], "builders": [{ "ami_block_device_mappings": [], "launch_block_device_mappings": [], "source_ami": "ami-source", "tags": { "Name": "ec2.name.test", "tag-name": "tag-value", }, "subnet_id": "subnet-test", "ssh_username": "******", "ssh_interface": "private_ip", "region": "eu-west-1", "security_group_ids": ["sg-test"], "ami_name": ami_name, "iam_instance_profile": "iam.profile.test", "instance_type": "test_instance_type", "associate_public_ip_address": True, "vpc_id": "vpc-test", "type": "amazon-ebs", "ssh_pty": True }] } assert packer_config == packer_config_reference
def test_executescript_cmd_single_host_deep( get_ghost_env_variables, get_ec2_instance, cloud_connections, launch_executescript, ): """ Test _exec_script_single_host internal call when using option "single" Host """ # Set up mocks and variables test_app = get_test_application() connection_mock = MagicMock() connection_pool = MagicMock() cloud_connections.get.return_value.return_value = connection_pool connection_pool.get_connection.return_value = connection_mock worker = MagicMock() worker.app = test_app worker.log_file = LOG_FILE worker._config = {'enable_executescript_command': 'true'} worker.job = { 'options': [ get_dummy_bash_script(True), '', 'single', '10.0.0.1', ], '_id': '42', 'user': '******' } def assert_done(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_done get_ec2_instance.return_value = type( 'X', (object, ), { 'id': 'myid', 'vpc_id': test_app['vpc_id'], 'private_ip_address': '10.0.0.1', 'tags': { 'app': test_app['name'], 'env': test_app['env'], 'role': test_app['role'], }, })() get_ghost_env_variables.return_value = {} # Launching command cmd = Executescript(worker) cmd.execute() # Check that everything has gone as planned assert get_ec2_instance.called == 1 assert launch_executescript.called == 1 assert get_ghost_env_variables.called == 1 get_ghost_env_variables.assert_called_once_with(test_app, None, worker.job['user']) get_ec2_instance.assert_called_once_with( connection_pool, test_app['region'], { 'private-ip-address': '10.0.0.1', 'vpc-id': test_app['vpc_id'], }) launch_executescript.assert_called_once_with(test_app, get_dummy_bash_script(), '/tmp', 0, '42', ['10.0.0.1'], 'serial', LOG_FILE, {})
def test_build_image_custom_envvars(packer_run_packer_cmd, provisioner_get_local_repo_path): # Application context app = get_test_application(env_vars=[{ "var_value": u"hello world !", "var_key": "TESTVAR" }, { "var_value": u"ên français avec des accents! héhé Ã@¤.", "var_key": "C_USTom2" }]) job = { "_id": "test_job_id", "app_id": "test_app_id", "command": "buildimage", "instance_type": "test_instance_type", "options": [False] # Do not skip bootstrap } test_config = get_test_config() # Mocks packer_run_packer_cmd.return_value = (0, "something:test_ami_id") tmp_dir = tempfile.mkdtemp() provisioner_get_local_repo_path.return_value = tmp_dir # Build image with mock.patch('ghost_tools.config', new=test_config): image_builder = AWSImageBuilder(app, job, None, LOG_FILE, test_config) ami_id, ami_name = image_builder.start_builder() # Test assert ami_id == "test_ami_id" assert ami_name.startswith("ami.test.eu-west-1.webfront.test-app.") with open(os.path.join(PACKER_JSON_PATH, job['_id'], 'aws_builder.json'), 'r') as f: # Verify packer config packer_config = json.load(f) packer_config_reference = { "provisioners": [{ "type": "shell", "environment_vars": [ u"C_USTom2=\u00ean fran\u00e7ais avec des accents! h\u00e9h\u00e9 \u00c3@\u00a4.", "GHOST_APP=test-app", "GHOST_ENV=test", "GHOST_ENV_COLOR=", "GHOST_ROLE=webfront", "TESTVAR=hello world !" ], "script": "/ghost/test-app/test/webfront/hook-pre_buildimage" }, { "skip_bootstrap": False, "log_level": "info", "local_state_tree": os.path.join(tmp_dir, 'salt'), "local_pillar_roots": os.path.join(tmp_dir, 'pillar'), "type": "salt-masterless" }, { "type": "shell", "environment_vars": [ u"C_USTom2=\u00ean fran\u00e7ais avec des accents! h\u00e9h\u00e9 \u00c3@\u00a4.", "GHOST_APP=test-app", "GHOST_ENV=test", "GHOST_ENV_COLOR=", "GHOST_ROLE=webfront", "TESTVAR=hello world !" ], "script": "/ghost/test-app/test/webfront/hook-post_buildimage" }, { "inline": [ "sudo rm -rf /srv/salt || echo 'Salt - no cleanup salt'", "sudo rm -rf /srv/pillar || echo 'Salt - no cleanup pillar'" ], "type": "shell" }], "builders": [{ "ami_block_device_mappings": [], "launch_block_device_mappings": [], "source_ami": "ami-source", "tags": { "Name": "ec2.name.test", "tag-name": "tag-value", }, "subnet_id": "subnet-test", "ssh_username": "******", "ssh_interface": "private_ip", "region": "eu-west-1", "security_group_ids": ["sg-test"], "ami_name": ami_name, "iam_instance_profile": "iam.profile.test", "instance_type": "test_instance_type", "associate_public_ip_address": True, "vpc_id": "vpc-test", "type": "amazon-ebs", "ssh_pty": True }] } assert packer_config == packer_config_reference
def test_swap_bluegreen_clb(get_blue_green_apps, cloud_connections, get_autoscaling_group_and_processes_to_suspend, suspend_autoscaling_group_processes, resume_autoscaling_group_processes, load_balancing): # Set up mocks and variables green_app = get_test_application(name="test-app-green", _id='id_green', autoscale={'name': 'autoscale-green'}, blue_green={ "is_online": True, "color": "green" }) blue_app = get_test_application(name="test-app-blue", _id='id_blue', autoscale={'name': 'autoscale-blue'}, blue_green={ "is_online": False, "color": "blue" }) connection_mock = MagicMock() connection_pool = MagicMock() cloud_connections.get.return_value.return_value = connection_pool connection_pool.get_connection.return_value = connection_mock worker = MagicMock() worker.app = green_app worker.log_file = LOG_FILE load_balancing.LB_TYPE_AWS_CLB = 'aws_clb' def assert_done(status, message=None): assert status == "done", "Status is {} and not done : {}".format( status, message) worker.update_status = assert_done # blue app is online and green app is offline get_blue_green_apps.return_value = (blue_app, green_app) def get_isag_behavior(as_group, log_file): if as_group == 'autoscale-green': return {'elb_temp': {'instance_green1': 'inservice'}} if as_group == 'autoscale-blue': return {'elb_online': {'instance_blue1': 'inservice'}} raise Exception( "get_elb_instance_status_autoscaling_group : auto scale parameter is not correct" ) load_balancing.get_lb_manager.return_value.get_instances_status_from_autoscale.side_effect = get_isag_behavior def get_asgapts_behavior(as_conn, app, log_file): if app == green_app: return 'autoscale-green', ['suspend_process'] if app == blue_app: return 'autoscale-blue', [] raise Exception( "get_autoscaling_group_and_processes_to_suspend : application in parameter is not correct" ) get_autoscaling_group_and_processes_to_suspend.side_effect = get_asgapts_behavior # Launching command swap_cmd = Swapbluegreen(worker) swap_cmd.execute() # Check that everything has gone as planned load_balancing.get_lb_manager.assert_called_once_with( connection_pool, "eu-west-1", 'elb') assert get_blue_green_apps.called == 1 assert blue_app.get('blue_green').get('is_online') == False assert green_app.get('blue_green').get('is_online') == True assert suspend_autoscaling_group_processes.call_count == 2 suspend_autoscaling_group_processes.assert_has_calls([ call(connection_mock, 'autoscale-green', ['suspend_process'], LOG_FILE), call(connection_mock, 'autoscale-blue', [], LOG_FILE) ], True) assert resume_autoscaling_group_processes.call_count == 2 resume_autoscaling_group_processes.assert_has_calls([ call(connection_mock, 'autoscale-green', ['suspend_process'], LOG_FILE), call(connection_mock, 'autoscale-blue', [], LOG_FILE) ], True) assert load_balancing.get_lb_manager.return_value.register_all_instances_to_lbs.call_count == 2 load_balancing.get_lb_manager.return_value.register_all_instances_to_lbs.assert_has_calls( [ call(['elb_online'], {'elb_temp': { 'instance_green1': 'inservice' }}, LOG_FILE), call(['elb_temp'], {'elb_online': { 'instance_blue1': 'inservice' }}, LOG_FILE) ], False) assert load_balancing.get_lb_manager.return_value.deregister_all_instances_from_lbs.call_count == 2 load_balancing.get_lb_manager.return_value.deregister_all_instances_from_lbs.assert_has_calls( [ call({'elb_online': { 'instance_blue1': 'inservice' }}, LOG_FILE), call({'elb_temp': { 'instance_green1': 'inservice' }}, LOG_FILE) ], False)