def create_cfn(): template_file = open(r'data.yaml') template_str = template_file.read() template = json.loads(to_json(template_str)) accessLogsBucket = template["Resources"]["AccessLogsBucket"] print(accessLogsBucket)
def test_to_json_with_yaml(self): """ Test that to_json performs correctly """ actual = cfn_flip.to_json(self.input_yaml) parsed_actual = json.loads(actual) self.assertDictEqual(parsed_actual, self.parsed_json)
def test_to_json_with_yaml(self): """ Test that to_json performs correctly """ actual = cfn_flip.to_json(self.input_yaml) parsed_actual = json.loads(actual) self.assertDictEqual(parsed_actual, self.parsed_json)
def get_template(stack, change_set=None): if change_set: template = cfn.get_template(StackName=stack, ChangeSetName=changeset)['TemplateBody'] else: template = cfn.get_template(StackName=stack)['TemplateBody'] if isinstance(template, str): template = json.loads(to_json(template)) return template
def test_generate_service_with_new_alb(self, mock_region_service, mock_get_account_id, mock_build_config): environment = 'staging' application_name = 'dummy' mock_service_configuration = MagicMock(spec=ServiceConfiguration, service_name=application_name, environment=environment) mock_service_configuration.get_config.return_value = { "cloudlift_version": 'test-version', "notifications_arn": "some", "ecr_repo": {"name": "test-service-repo"}, "services": { "Dummy": { "memory_reservation": Decimal(1000), "secrets_name": "something", "command": None, "http_interface": { "internal": False, "alb": { "create_new": True, "target_5xx_error_threshold": 10 }, "container_port": Decimal(7003), "restrict_access_to": ["0.0.0.0/0"], "health_check_path": "/elb-check" }, "autoscaling": { "max_capacity": 10, "min_capacity": 5, "request_count_per_target": { "target_value": 10, "scale_in_cool_down_seconds": 120, "scale_out_cool_down_seconds": 60 } }, 'log_configuration': {'LogDriver': 'json-file', 'Options': {'max-size': '10m', 'max-file': '3'}}, 'ulimits': [{'name': 'core', 'soft_limit': 0, 'hard_limit': 0, }], }, } } mock_build_config.side_effect = mock_build_config_impl mock_get_account_id.return_value = "12537612" mock_region_service.get_region_for_environment.return_value = "us-west-2" mock_region_service.get_ssl_certification_for_environment.return_value = "certificateARN1234" template_generator = ServiceTemplateGenerator(mock_service_configuration, self._get_env_stack(), './test/templates/test_env.sample', "12537612.dkr.ecr.us-west-2.amazonaws.com/test-service-repo:1.1.1", desired_counts={"Dummy": 100, "DummyRunSidekiqsh": 199}) generated_template = template_generator.generate_service() template_file_path = os.path.join(os.path.dirname(__file__), '../templates/expected_service_with_new_alb_template.yml') with(open(template_file_path)) as expected_template_file: self.assert_template(to_json(''.join(expected_template_file.readlines())), to_json(generated_template))
def test_to_json_with_json(self): """ Test that to_json still works when passed json (All json is valid yaml) """ actual = cfn_flip.to_json(self.input_json) parsed_actual = json.loads(actual) self.assertDictEqual(parsed_actual, self.parsed_json)
def generate(f): extension = f.name.split(".").pop() if extension in ["yaml", "yml"]: j = to_json(f) elif extension in ["json"]: j = f else: raise Exception("{}: not a valid file extension".format(extension)) template = json.loads(j) result = core.generate(template, ".".join(f.name.split(".")[0:-1])) click.echo(result)
def test_to_json_with_json(self): """ Test that to_json still works when passed json (All json is valid yaml) """ actual = cfn_flip.to_json(self.input_json) parsed_actual = json.loads(actual) self.assertDictEqual(parsed_actual, self.parsed_json)
def test_flip_to_json_with_multi_level_getatt(self): """ Test that we correctly convert multi-level Fn::GetAtt from YAML to JSON format """ data = "!GetAtt 'First.Second.Third'\n" expected = {"Fn::GetAtt": ["First", "Second", "Third"]} actual = cfn_flip.to_json(data, clean_up=True) self.assertEqual(expected, json.loads(actual))
def from_yaml(yaml): try: json = cfn_flip.to_json(yaml) except Exception as e: return user_error(e.message) return { "headers": { "Content-Type": CONTENT_TYPE_JSON, }, "body": json, }
def cli(template, verbose=False, validate=False, version=False): click.echo(_greeter(), err=True) if version: sys.exit(0) print("{}: {}".format( crayons.white("Inspecting template", bold=True), crayons.blue(template.name))) template = template.read() try: t = json.loads(template) except Exception as e: pass try: json_template = to_json(template) t = json.loads(json_template) except Exception as e: click.echo( "Could not determine the input format or format not sane: {}".format(e), err=True) sys.exit(1) if 'Description' in t: print("{}: {}".format( crayons.white("Description", bold=True), crayons.white("{}".format(t['Description']), bold=False))) if 'Parameters' in t: _pprint_parameters(t['Parameters'], verbose=verbose) if 'Mappings' in t and verbose: _pprint_mappings(t['Mappings']) if 'Conditions' in t: _pprint_conditions(t['Conditions'], verbose=verbose) if 'Resources' in t: _pprint_resources(t['Resources'], verbose=verbose) if 'Outputs' in t: _pprint_outputs(t['Outputs'], verbose=verbose) if validate: if len(template) > 51200: click.echo( crayons.red("Can't validate the template AWS - template size exceeds 51200 bytes"), err=True) sys.exit(1) try: result = _boto_validate(template) if result[0] == True: print(crayons.cyan("Yay ! template is valid", bold=True)) else: print(crayons.cyan(":(, template is not valid: {}".format(result[1]), bold=True)) except Exception as e: click.echo(crayons.red("Problem with boto3 connection, {}".format(e)), err=True) sys.exit(1) sys.exit(0)
def generate(self): if self.input_file: try: with open(self.input_file, "r", encoding="utf-8") as f: self.template = json.loads(to_json(f.read())) except: raise InvalidTemplate("Invalid template (could not parse)") elif self.stack_name: try: template_body = self.cfnclient.get_template( StackName=self.stack_name, TemplateStage='Processed')['TemplateBody'] self.template = json.loads(to_json(template_body)) except: raise InvalidTemplate("Could not retrieve remote stack") else: raise InvalidArguments("No template provided") if "Resources" not in self.template: raise InvalidArguments("Resources not in template") for resname, res in self.template["Resources"].items(): self.get_permissions(resname, res) policy = self.permissions.generate(self.consolidate_policy) if len(self.skipped_types) > 0: sys.stderr.write( "WARNING: Skipped the following types: {}\n".format(", ".join( sorted(set(self.skipped_types))))) if len(json.dumps(policy, separators=(',', ': '))) > 10240: sys.stderr.write( "WARNING: The generated policy size is greater than the maximum 10240 character limit\n" ) sys.stderr.flush() return json.dumps(policy, indent=4, separators=(',', ': '))
def test_getatt_from_yaml(): """ Test that we correctly convert the short form of GetAtt into the correct JSON format from YAML """ source = """ - !GetAtt foo.bar - Fn::GetAtt: [foo, bar] """ expected = [ {"Fn::GetAtt": ["foo", "bar"]}, {"Fn::GetAtt": ["foo", "bar"]}, ] # No clean actual = cfn_flip.to_json(source, clean_up=False) assert load_json(actual) == expected # With clean actual = cfn_flip.to_json(source, clean_up=True) assert load_json(actual) == expected
def test_getatt_from_yaml(self): """ Test that we correctly convert the short form of GetAtt into the correct JSON format from YAML """ source = """ - !GetAtt foo.bar - Fn::GetAtt: [foo, bar] """ expected = [ {"Fn::GetAtt": ["foo", "bar"]}, {"Fn::GetAtt": ["foo", "bar"]}, ] # No clean actual = cfn_flip.to_json(source, clean_up=False) self.assertEqual(expected, json.loads(actual)) # With clean actual = cfn_flip.to_json(source, clean_up=True) self.assertEqual(expected, json.loads(actual))
def test_flip_to_json_with_multi_level_getatt(self): """ Test that we correctly convert multi-level Fn::GetAtt from YAML to JSON format """ data = "!GetAtt 'First.Second.Third'\n" expected = { "Fn::GetAtt": ["First", "Second", "Third"] } actual = cfn_flip.to_json(data, clean_up=True) self.assertEqual(expected, json.loads(actual))
def _load_template(branch): logger.info("Loading template") directory = f'/tmp/bananas-as-a-service-{branch}/infrastructure' template = f'{directory}/cloudformation-pipeline.yml' try: with open(template) as yaml_file: data = json.loads(to_json(yaml_file)) if not data: raise RuntimeError(f"YAML file: {template} is empty") except (IOError, FileNotFoundError) as err: raise SystemError(f"{err}") else: return data
def create_stack(): # file must in the same dir as script template_file_location = 'cfn-template.yml' stack_name = 'ansibleStack1' # # read entire file as yaml with open(template_file_location, 'r') as content_file: content = to_json(content_file) #content_json = to_json(template_file_location) cloud_formation_client = boto3.client('cloudformation') print("Creating {}".format(stack_name)) response = cloud_formation_client.create_stack(StackName=stack_name, TemplateBody=content)
def yaml_to_json(self, yaml_file): """ takes the yaml file path and converts the returns the converted JSON object """ template_json = None with open(yaml_file) as yml_file: try: template_json = json.loads(to_json(yml_file.read())) except: file_name = yaml_file.split("/")[-1] logger.error( "Failed to load yaml file, please check yaml file contains correct content: %s", file_name) return template_json
def convert_json_or_yaml_to_dict(file_content): with suppress(ValueError): return json.loads(file_content) try: # Convert file_content (assuming that is YAML) to JSON if possible file_content = to_json(file_content) return json.loads(file_content) except yaml.YAMLError: logger.exception("Could not convert YAML to JSON template") except ValueError: logger.exception("Could not parse JSON template") return None
def parse(self, file_path): """ docstring """ template_json = None with open(file_path) as scanned_file: try: template_json = json.loads(to_json(scanned_file.read())) except: file_name = file_path.split("/")[-1] logger.error( "\t\t ERROR: please check yaml file contains correct content: %s", file_name) return template_json
def get_list_of_resources(file_name): """ considers only yaml format or yaml string """ if os.path.exists(file_name): with open(file_name) as f: cloud_formation = f.read() else: cloud_formation = file_name AwsCloudFormationJson = json.loads(to_json(cloud_formation)) resources_used = set() for resourceName, resourceDict in AwsCloudFormationJson["Resources"].items( ): resources_used.add(resourceDict['Type']) return resources_used
def change_set_info(stack, changeset): change_set = cfn.describe_change_set(StackName=stack, ChangeSetName=changeset) original_template_body = cfn.get_template(StackName=stack)['TemplateBody'] change_set_template_body = cfn.get_template(StackName=stack, ChangeSetName=changeset)['TemplateBody'] if isinstance(original_template_body, str): original_template = json.loads(to_json(original_template_body)) else: original_template = dict(original_template_body) if isinstance(change_set_template_body, str): change_set_template = json.loads(to_json(change_set_template_body)) else: change_set_template = dict(change_set_template_body) if original_template: orig_resources = original_template['Resources'] else: orig_resources = {} new_resources = change_set_template['Resources'] diffs = resource_diffs(orig_resources, new_resources) parameters = {item['ParameterKey']:item['ParameterValue'] for item in change_set['Parameters']} set_details = { 'parameters': parameters, 'changes': list(map(lambda x: { 'Action': x['ResourceChange']['Action'], 'LogicalResourceId': x['ResourceChange'].get('LogicalResourceId', None), 'PhysicalResourceId': x['ResourceChange'].get('PhysicalResourceId', None), 'Replacement': x['ResourceChange'].get('Replacement', None), 'ResourceType': x['ResourceChange']['ResourceType'], 'Scope': x['ResourceChange']['Scope'], 'Details': parse_reasons(x['ResourceChange']['Details'], parameters) }, change_set['Changes'])) } set_info = { 'raw': change_set, 'processed': set_details, 'orig': orig_resources, 'new': new_resources, 'diffs': diffs } return set_info
def file_to_dict(filename, data): """Converts JSON file to dict :param filename: filename :param data: string :return: dict object """ try: try: json_data = to_json(data) return json.loads(json_data) except Exception as _: return json.loads(data) except Exception as error: logger.error("Failed to parse s3 file {}, error: {}".format(filename, str(error))) raise ValueError("Unable to load JSON file {} error: {}".format(filename, str(error)))
def file_to_dict(filename, data): """Converts JSON file to dict :param filename: filename :param data: string :return: dict object """ try: try: json_data = to_json(data) return json.loads(json_data) except Exception as _: return json.loads(data) except Exception as error: logger.error("Failed to parse s3 file {}, error: {}".format(filename, str(error))) raise ValueError("Unable to load JSON file {} error: {}".format(filename, str(error)))
def _gitlab_import(self): logging.info('Import {} from Gitlab.'.format(self.title)) project = self.get_value('Project') branch = self.get_value('Branch', default=BRANCH_DEFAULT) path = self.get_value('Path', default=TEMPLATE_NAME_DEFAULT) token = self.get_value('OAuthToken', default='') gl = gitlab.Gitlab('https://gitlab.com', private_token=token, api_version=4) gl.auth() project = gl.projects.get(project) template = project.files.raw(file_path=path, ref=branch) return json.loads(to_json(template))
def _codecommit_import(self): logging.info('Import {} from Codecommit.'.format(self.title)) repo = self.get_value('Repo') branch = self.get_value('Branch', default=BRANCH_DEFAULT) path = self.get_value('Path', default=TEMPLATE_NAME_DEFAULT) assert type(repo) == str, ASSERT_MESSAGE.format('Repo', self.title) assert type(branch) == str, ASSERT_MESSAGE.format('Branch', self.title) assert type(path) == str, ASSERT_MESSAGE.format('Path', self.title) response = cm.get_file(repositoryName=repo, commitSpecifier=branch, filePath=path) template = json.loads(to_json(response['fileContent'])) return template
def test_flip_to_json_with_datetimes(): """ Test that the json encoder correctly handles dates and datetimes """ tricky_data = """ a date: 2017-03-02 a datetime: 2017-03-02 19:52:00 """ actual = cfn_flip.to_json(tricky_data) parsed_actual = load_json(actual) assert parsed_actual == { "a date": "2017-03-02", "a datetime": "2017-03-02T19:52:00", }
def test_generate_service_for_ecs_with_custom_roles(self, mock_region_service, mock_get_account_id, mock_build_config): environment = 'staging' application_name = 'dummy' mock_service_configuration = MagicMock(spec=ServiceConfiguration, service_name=application_name, environment=environment) mock_service_configuration.get_config.return_value = { "cloudlift_version": 'test-version', "notifications_arn": "some", "ecr_repo": {"name": "main-repo", "assume_role_arn": "arn1234", "account_id": "1234"}, "services": { "Dummy": { "memory_reservation": Decimal(1000), "secrets_name": "something", "command": None, "task_role_arn": "TASK_ARN", "task_execution_role_arn": "TASK_EXECUTION_ARN" }, } } mock_build_config.side_effect = mock_build_config_impl mock_get_account_id.return_value = "12537612" mock_region_service.get_region_for_environment.return_value = "us-west-2" mock_region_service.get_ssl_certification_for_environment.return_value = "certificateARN1234" template_generator = ServiceTemplateGenerator(mock_service_configuration, self._get_env_stack(), './test/templates/test_env.sample', "12537612.dkr.ecr.us-west-2.amazonaws.com/test-service-repo:1.1.1", desired_counts={"Dummy": 1}) generated_template = template_generator.generate_service() loaded_template = load(to_json(generated_template)) self.assertGreaterEqual(len(loaded_template), 1, "no template generated") generated = loaded_template[0] assert "DummyRole" in generated['Resources'] assert "DummyTaskExecutionRole" in generated['Resources'] assert "ECSServiceRole" in generated['Resources'] td = generated['Resources']['DummyTaskDefinition'] assert td['Properties']['TaskRoleArn'] == 'TASK_ARN' assert td['Properties']['ExecutionRoleArn'] == 'TASK_EXECUTION_ARN'
def test_generate_tcp_service(self, mock_region_service, mock_get_account_id, mock_build_config): environment = 'staging' application_name = 'dummy' mock_service_configuration = MagicMock(spec=ServiceConfiguration, service_name=application_name, environment=environment) mock_service_configuration.get_config.return_value = mocked_tcp_service_config() mock_build_config.side_effect = mock_build_config_impl mock_get_account_id.return_value = "12537612" mock_region_service.get_region_for_environment.return_value = "us-west-2" template_generator = ServiceTemplateGenerator(mock_service_configuration, self._get_env_stack(), './test/templates/test_env.sample', "12537612.dkr.ecr.us-west-2.amazonaws.com/test-service-repo:1.1.1", desired_counts={"FreeradiusServer": 100}) generated_template = template_generator.generate_service() template_file_path = os.path.join(os.path.dirname(__file__), '../templates/expected_tcp_service_template.yml') with(open(template_file_path)) as expected_template_file: self.assert_template(to_json(''.join(expected_template_file.readlines())), to_json(generated_template))
def is_template_file(self, file_path): """ check for valid template file for parse cloudformation template """ if len(file_path.split(".")) > 0 and file_path.split(".")[-1] in [ "json", "yaml" ]: template_json = None if file_path.endswith(".yaml") and exists_file(file_path): with open(file_path) as yml_file: try: template_json = json.loads(to_json(yml_file.read())) except: pass elif file_path.endswith(".json"): template_json = json_from_file(file_path) if template_json and "AWSTemplateFormatVersion" in template_json: return True return False
def test_flip_to_json_with_datetimes(self): """ Test that the json encoder correctly handles dates and datetimes """ from datetime import date, datetime, time tricky_data = """ a date: 2017-03-02 a datetime: 2017-03-02 19:52:00 """ actual = cfn_flip.to_json(tricky_data) parsed_actual = json.loads(actual) self.assertDictEqual(parsed_actual, { "a date": "2017-03-02", "a datetime": "2017-03-02T19:52:00", })
def test_flip_to_json_with_datetimes(self): """ Test that the json encoder correctly handles dates and datetimes """ from datetime import date, datetime, time tricky_data = """ a date: 2017-03-02 a datetime: 2017-03-02 19:52:00 """ actual = cfn_flip.to_json(tricky_data) parsed_actual = json.loads(actual) self.assertDictEqual(parsed_actual, { "a date": "2017-03-02", "a datetime": "2017-03-02T19:52:00", })
def _s3_import(self): logging.info('Import {} from S3.'.format(self.title)) bucket = self.get_value('Bucket') key = self.get_value('Key') assert type(bucket) == str, ASSERT_MESSAGE.format('Bucket', self.title) assert type(key) == str, ASSERT_MESSAGE.format('Key', self.title) assert self.aws_cfn_request_id, 'Request Id is None.' file = '/tmp/' + self.aws_cfn_request_id + '/' + key.replace('/', '_') logging.info('Save file in {}, from s3://{}/{}.'.format( file, bucket, key)) with open(file, 'wb') as f: s3.download_fileobj(bucket, key, f) with open(file) as f: template = json.loads(to_json(f.read())) return template
def test_flip_to_json_with_condition(self): """ Test that the Condition key is correctly converted """ source = """ MyAndCondition: !And - !Equals ["sg-mysggroup", !Ref "ASecurityGroup"] - !Condition SomeOtherCondition """ expected = { "MyAndCondition": { "Fn::And": [ {"Fn::Equals": ["sg-mysggroup", {"Ref": "ASecurityGroup"}]}, {"Condition": "SomeOtherCondition"} ] } } actual = cfn_flip.to_json(source, clean_up=True) self.assertEqual(expected, json.loads(actual))
def readCloudformationTemplate(path_to_file): template = None try: with open(path_to_file, "r") as file: raw_yaml = file.read() except: etype, evalue, etb = sys.exc_info() logger.error( "Could not read config file %s. Exception: %s, Error: %s." % (path_to_file, etype, evalue)) return False try: # We convert the raw yaml to json to prevent unknown tag errors for unknown tags, e.g. "!Ref" raw_json = cfn_flip.to_json(raw_yaml) template = json.loads(raw_json) except: etype, evalue, etb = sys.exc_info() logger.error( "Could not parse config file %s. Exception: %s, Error: %s." % (path_to_file, etype, evalue)) sys.exit(1) return template
def test_flip_to_json_with_condition(): """ Test that the Condition key is correctly converted """ source = """ MyAndCondition: !And - !Equals ["sg-mysggroup", !Ref "ASecurityGroup"] - !Condition SomeOtherCondition """ expected = { "MyAndCondition": { "Fn::And": [ {"Fn::Equals": ["sg-mysggroup", {"Ref": "ASecurityGroup"}]}, {"Condition": "SomeOtherCondition"} ] } } actual = cfn_flip.to_json(source, clean_up=True) assert load_json(actual) == expected
def filter_iam_repos(repo): if repo.archived: return False # get files in root contents = repo.get_contents("") for content in contents: # search for tags.y(a)ml file if content.path != ".ci.yml" and content.path != ".ci.yaml": continue # open contents tags = cfn_flip.to_json(content.decoded_content) tags = json.loads(tags) if recursive_get(tags, "ci-policies"): return repo.name, recursive_get(tags, "ci-policies") return False return False