def test_pull_repo_https_credentials(self, mock_check_output, mock_check_call): """ Tests the pull_repo by mocking the subprocess.check_output to return git http credentials. Args: mock_check_output: MagicMock, returns valid git responses in order of being called, with the repo coming from the ef_site_config.py Returns: None Raises: AssertionError if any of the assert checks fail """ mock_check_output.side_effect = [ "origin\thttps://" + EFConfig.EF_REPO + ".git", EFConfig.EF_REPO_BRANCH ] try: ef_utils.pull_repo() except RuntimeError as exception: self.fail( "Exception occurred during test_pull_repo_ssh_credentials: " + exception.message)
def main(): # Fetch args and load context context = handle_args_and_set_context(sys.argv[1:]) # Refresh from repo if necessary and possible (gets don't need service registry, sets do) if (context.rollback or context.value) and not (context.devel or getenv( "JENKINS_URL", False)): print("Refreshing repo") try: pull_repo() except RuntimeError as error: fail("Error checking or pulling repo", error) # Sign on to AWS and create clients if context.whereami in ["ec2"]: # Always use instance credentials in EC2. One day we'll have "lambda" in there too, so use "in" w/ list aws_session_alias = None else: # Otherwise use local user credential matching the account alias aws_session_alias = context.account_alias # Make AWS clients try: context.set_aws_clients( create_aws_clients(EFConfig.DEFAULT_REGION, aws_session_alias, "ec2", "s3", "sts")) except RuntimeError: fail( "Exception creating AWS client in region {} with aws account alias {} (None=instance credentials)" .format(EFConfig.DEFAULT_REGION, aws_session_alias)) # Instantiate a versionresolver - we'll use some of its methods context._versionresolver = EFVersionResolver(context.aws_client()) # Carry out the requested action if context.get: cmd_get(context) elif context.history: cmd_history(context) elif context.rollback: cmd_rollback(context) elif context.show: cmd_show(context) elif context.value: cmd_set(context)
def test_pull_repo_incorrect_repo(self, mock_check_output): """ Tests pull_repo to see if it throws an exception when the supplied repo doesn't match the one in ef_site_config.py Args: mock_check_output: MagicMock, returns git responses with non matching repo names Returns: None Raises: AssertionError if any of the assert checks fail """ mock_check_output.side_effect = [ "[email protected]:company/wrong_repo.git " "[email protected]:company/wrong_repo.git" ] with self.assertRaises(RuntimeError) as exception: ef_utils.pull_repo() self.assertIn("Must be in", exception.exception.message)
def test_pull_repo_incorrect_branch(self, mock_check_output): """ Tests pull_repo to see if it throws an error when the mocked check_output states it's on a branch other than the one specified in ef_site_config.py Args: mock_check_output: MagicMock, returns some valid git responses, with the repo coming from the ef_site_config.py, and then a non matching branch name Returns: None Raises: AssertionError if any of the assert checks fail """ mock_check_output.side_effect = [ "user@" + EFConfig.EF_REPO.replace("/", ":", 1) + ".git", "wrong_branch" ] with self.assertRaises(RuntimeError) as exception: ef_utils.pull_repo() self.assertIn("Must be on branch:", exception.exception.message)
def main(): context = handle_args_and_set_context(sys.argv[1:]) # argument sanity checks and contextual messages if context.commit and context.changeset: fail("Cannot use --changeset and --commit together") if context.changeset: print( "=== CHANGESET ===\nCreating changeset only. See AWS GUI for changeset\n=== CHANGESET ===" ) elif not context.commit: print( "=== DRY RUN ===\nValidation only. Use --commit to push template to CF\n=== DRY RUN ===" ) service_name = basename(splitext(context.template_file)[0]) template_file_dir = dirname(context.template_file) # parameter file may not exist, but compute the name it would have if it did parameter_file_dir = template_file_dir + "/../parameters" parameter_file = parameter_file_dir + "/" + service_name + ".parameters." + context.env_full + ".json" # If running in EC2, use instance credentials (i.e. profile = None) # otherwise, use local credentials with profile name in .aws/credentials == account alias name if context.whereami == "ec2": profile = None else: profile = context.account_alias # Get service registry and refresh repo if appropriate try: if not (context.devel or getenv("JENKINS_URL", False)): pull_repo() else: print( "not refreshing repo because --devel was set or running on Jenkins" ) except Exception as error: fail("Error: ", error) # Service must exist in service registry if context.service_registry.service_record(service_name) is None: fail("service: {} not found in service registry: {}".format( service_name, context.service_registry.filespec)) if not context.env_full in context.service_registry.valid_envs( service_name): fail("Invalid environment: {} for service_name: {}\nValid environments are: {}" \ .format(context.env_full, service_name, ", ".join(context.service_registry.valid_envs(service_name)))) if context.verbose: print("service_name: {}".format(service_name)) print("env: {}".format(context.env)) print("env_full: {}".format(context.env_full)) print("env_short: {}".format(context.env_short)) print("template_file: {}".format(context.template_file)) print("parameter_file: {}".format(parameter_file)) if profile: print("profile: {}".format(profile)) print("whereami: {}".format(context.whereami)) print("service type: {}".format( context.service_registry.service_record(service_name)["type"])) template = resolve_template(template=context.template_file, profile=profile, env=context.env, region=EFConfig.DEFAULT_REGION, service=service_name, verbose=context.verbose) # Create clients - if accessing by role, profile should be None try: clients = create_aws_clients(EFConfig.DEFAULT_REGION, profile, "cloudformation") except RuntimeError as error: fail( "Exception creating clients in region {} with profile {}".format( EFConfig.DEFAULT_REGION, profile), error) stack_name = context.env + "-" + service_name try: stack_exists = clients["cloudformation"].describe_stacks( StackName=stack_name) except botocore.exceptions.ClientError: stack_exists = False # Load parameters from file if isfile(parameter_file): parameters_template = resolve_template(template=parameter_file, profile=profile, env=context.env, region=EFConfig.DEFAULT_REGION, service=service_name, verbose=context.verbose) try: parameters = json.loads(parameters_template) except ValueError as error: fail("JSON error in parameter file: {}".format( parameter_file, error)) else: parameters = [] # Validate rendered template before trying the stack operation if context.verbose: print("Validating template") try: clients["cloudformation"].validate_template(TemplateBody=template) except botocore.exceptions.ClientError as error: fail("Template did not pass validation", error) print("Template passed validation") # DO IT try: if context.changeset: print("Creating changeset: {}".format(stack_name)) clients["cloudformation"].create_change_set( StackName=stack_name, TemplateBody=template, Parameters=parameters, Capabilities=['CAPABILITY_IAM'], ChangeSetName=stack_name, ClientToken=stack_name) elif context.commit: if stack_exists: print("Updating stack: {}".format(stack_name)) clients["cloudformation"].update_stack( StackName=stack_name, TemplateBody=template, Parameters=parameters, Capabilities=['CAPABILITY_IAM']) else: print("Creating stack: {}".format(stack_name)) clients["cloudformation"].create_stack( StackName=stack_name, TemplateBody=template, Parameters=parameters, Capabilities=['CAPABILITY_IAM']) if context.poll_status: while True: stack_status = clients["cloudformation"].describe_stacks( StackName=stack_name)["Stacks"][0]["StackStatus"] if context.verbose: print("{}".format(stack_status)) if stack_status.endswith('ROLLBACK_COMPLETE'): print( "Stack went into rollback with status: {}".format( stack_status)) sys.exit(1) elif re.match(r".*_COMPLETE(?!.)", stack_status) is not None: break elif re.match(r".*_FAILED(?!.)", stack_status) is not None: print("Stack failed with status: {}".format( stack_status)) sys.exit(1) elif re.match(r".*_IN_PROGRESS(?!.)", stack_status) is not None: time.sleep(EFConfig.EF_CF_POLL_PERIOD) run_plugins(context, clients) except botocore.exceptions.ClientError as error: if error.response["Error"][ "Message"] in "No updates are to be performed.": # Don't fail when there is no update to the stack print("No updates are to be performed.") else: fail("Error occurred when creating or updating stack", error)
def main(): global CONTEXT, CLIENTS, AWS_RESOLVER CONTEXT = handle_args_and_set_context(sys.argv[1:]) if not (CONTEXT.devel or getenv("JENKINS_URL", False)): try: pull_repo() except RuntimeError as error: fail("Error checking or pulling repo", error) else: print("Not refreshing repo because --devel was set or running on Jenkins") # sign on to AWS and create clients and get account ID try: # If running in EC2, always use instance credentials. One day we'll have "lambda" in there too, so use "in" w/ list if CONTEXT.whereami == "ec2": CLIENTS = create_aws_clients(EFConfig.DEFAULT_REGION, None, "ec2", "iam", "kms") CONTEXT.account_id = str(json.loads(http_get_metadata('iam/info'))["InstanceProfileArn"].split(":")[4]) else: # Otherwise, we use local user creds based on the account alias CLIENTS = create_aws_clients(EFConfig.DEFAULT_REGION, CONTEXT.account_alias, "ec2", "iam", "kms") CONTEXT.account_id = CLIENTS["SESSION"].resource('iam').CurrentUser().arn.split(':')[4] except RuntimeError: fail("Exception creating AWS clients in region {} with profile {}".format( EFConfig.DEFAULT_REGION, CONTEXT.account_alias)) # Instantiate an AWSResolver to lookup AWS resources AWS_RESOLVER = EFAwsResolver(CLIENTS) # Show where we're working if not CONTEXT.commit: print("=== DRY RUN ===\nUse --commit to create roles and security groups\n=== DRY RUN ===") print("env: {}".format(CONTEXT.env)) print("env_full: {}".format(CONTEXT.env_full)) print("env_short: {}".format(CONTEXT.env_short)) print("aws account profile: {}".format(CONTEXT.account_alias)) print("aws account number: {}".format(CONTEXT.account_id)) # Step through all services in the service registry for CONTEXT.service in CONTEXT.service_registry.iter_services(): service_name = CONTEXT.service[0] target_name = "{}-{}".format(CONTEXT.env, service_name) sr_entry = CONTEXT.service[1] service_type = sr_entry['type'] print_if_verbose("service: {} in env: {}".format(service_name, CONTEXT.env)) # Is this service_type handled by this tool? if service_type not in SUPPORTED_SERVICE_TYPES: print_if_verbose("unsupported service type: {}".format(service_type)) continue # Is the env valid for this service? if CONTEXT.env_full not in CONTEXT.service_registry.valid_envs(service_name): print_if_verbose("env: {} not valid for service {}".format(CONTEXT.env_full, service_name)) continue # Is the service_type allowed in 'global'? if CONTEXT.env == "global" and service_type not in GLOBAL_SERVICE_TYPES: print_if_verbose("env: {} not valid for service type {}".format(CONTEXT.env, service_type)) continue # 1. CONDITIONALLY MAKE ROLE AND/OR INSTANCE PROFILE FOR THE SERVICE # If service gets a role, create with either a custom or default AssumeRole policy document conditionally_create_role(target_name, sr_entry) # Instance profiles and security groups are not allowed in the global scope if CONTEXT.env != "global": conditionally_create_profile(target_name, service_type) # 2. SECURITY GROUP(S) FOR THE SERVICE : only some types of services get security groups conditionally_create_security_groups(CONTEXT.env, service_name, service_type) # 3. KMS KEY FOR THE SERVICE : only some types of services get kms keys conditionally_create_kms_key(target_name, service_type) # 4. INLINE SERVICE'S POLICIES INTO ROLE # only eligible service types with "policies" sections in the service registry get policies conditionally_inline_policies(target_name, sr_entry) # 5. Execute plugins run_plugins(context_obj=CONTEXT, boto3_clients=CLIENTS) print("Exit: success")