def test_named_object_hooks(mocker): TFObject.reset() def aws_hook(attrs): new_attrs = attrs.copy() new_attrs["region"] = "us-east-1" return new_attrs mock_hook = mocker.MagicMock(side_effect=aws_hook) Provider.add_hook("aws", mock_hook) Provider("aws", alias="aws1") compiled = TFObject.compile() assert mock_hook.mock_calls == [mocker.call({"alias": "aws1"})] # since providers use DuplicateKey as their key we need to json dump to compare equality here assert json.dumps(compiled) == json.dumps( {"provider": { "aws": { "alias": "aws1", "region": "us-east-1" } }})
def test_duplicate_key_collisions(): """When creating two different types of Provider objects they would collide with each other during recursive update because they had the same __hash__ value The fix was use the hash() value of the key being provide PLUS our incremented ID This test covers that by making sure that all of the expected providers are in our compiled output. """ TFObject.reset() Provider("aws", alias="aws1") Provider("aws", alias="aws2") Provider("mysql", alias="mysql1") Provider("mysql", alias="mysql2") assert len(Provider._instances) == 4 compiled = Provider.compile() seen = [] for data in six.itervalues(compiled["provider"]): seen.append(data["alias"]) seen.sort() assert seen == ["aws1", "aws2", "mysql1", "mysql2"]
def test_tf_type(): TFObject.reset() class TestResource(Resource): pass TestResource("res1", "foo", attr="value") assert TFObject.compile() == {"resource": {"res1": {"foo": {"attr": "value",}}}}
def test_provider(): TFObject.reset() Provider("mysql", host="db-wordpress") Provider("mysql", host="db-finpro") result = json.dumps(TFObject.compile(), sort_keys=True) desired = '{"provider": {"mysql": {"host": "db-wordpress"}, "mysql": {"host": "db-finpro"}}}' assert result == desired
def test_compile(): TFObject.reset() Resource("res1", "foo", attr="value") Resource("res1", "bar", attr="other") Variable("var1", default="value") assert TFObject.compile() == { "resource": {"res1": {"foo": {"attr": "value",}, "bar": {"attr": "other",}},}, "variable": {"var1": {"default": "value"}}, }
def test_provider(): TFObject.reset() Provider("mysql", host="db-wordpress") Provider("mysql", host="db-finpro") compiled = TFObject.compile() seen = [] for data in six.itervalues(compiled["provider"]): seen.append(data["host"]) seen.sort() assert seen == ["db-finpro", "db-wordpress"]
def test_tf_type(): TFObject.reset() class TestResource(Resource): pass TestResource('res1', 'foo', attr='value') assert TFObject.compile() == { 'resource': { 'res1': { 'foo': { 'attr': 'value', } } } }
def test_typed_object_hooks(mocker): """Ensure that our hooks work as expected and can modify resource data during compilation""" TFObject.reset() def attr_always_true(object_id, object_attrs): """attr_always_true is a contrived hook that ensures `attr` of `some_type` resources is always True""" object_attrs = object_attrs.copy() for attr_name in object_attrs: if attr_name == "attr": object_attrs[attr_name] = True return object_attrs # Make our hook a mock so we can assert on calls mock_hook = mocker.MagicMock(side_effect=attr_always_true) # Add the hook for resources Resource.add_hook("some_type", mock_hook) Resource("some_type", "some_id", attr=True) Resource("some_type", "other_id", attr=False) compiled = TFObject.compile() assert mock_hook.mock_calls == [ mocker.call("some_id", {"attr": True}), mocker.call("other_id", {"attr": False}), ] assert compiled == { "resource": { "some_type": { "some_id": { "attr": True, }, "other_id": { "attr": True, }, } } }
def test_compile(): TFObject.reset() Resource('res1', 'foo', attr='value') Resource('res1', 'bar', attr='other') Variable('var1', default='value') assert TFObject.compile() == { 'resource': { 'res1': { 'foo': { 'attr': 'value', }, 'bar': { 'attr': 'other', } }, }, 'variable': { 'var1': { 'default': 'value' } } }
def generate_(project, environment): """.""" # Load the stackvars. stackvars = StackVars(project, environment=environment, var_dir=Path.cwd(),) stackvars.load() # Import the libraries. library = Path("library") for lib in library.glob("**/*.py"): # Import the libs. logger.debug(f'Importing library "{lib}".') spec = importlib.util.spec_from_file_location(lib.stem, lib) module = importlib.util.module_from_spec(spec) sys.modules[lib.stem] = module spec.loader.exec_module(module) # Get the project stacks. s = Path("stacks") for stack_ in s.glob(f"{project}/**/*.tf.py"): # Import the stack. logger.debug(f'Importing stack "{stack_}".') spec = importlib.util.spec_from_file_location( stack_.name.replace("".join(stack_.suffixes), ""), stack_ ) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) # Render it. render = getattr(module, "render") render(stackvars) # Compile them. tf_json = json.dumps(TFObject.compile(), indent=4, sort_keys=True) # Prepare the output file. p = Path(f"generated/{project}") if environment: p = p / environment p.mkdir(parents=True, exist_ok=True) p = p / "main.tf.json" # Save the rendered stack to the file. p.write_text(tf_json)
def reset_tfobject(): TFObject.reset()