def test_init_terraform(self): """ Configure and initialize the backend. """ workflow.delete_files("**/*.json", "*.zip") self.pretf.init()
def test_disabled(self): delete_files("src/*", "out.zip") # Use a different configuration that has enabled = false. # It will not create an archive. Path("src/one.txt").write_text("a") outputs = self.apply("v2") assert not outputs["output_md5"] assert not outputs["search_results"] assert not os.path.exists("out.zip")
def test_create(self): workflow.delete_files("*.json") with self.create("one.tf.json"): one = yield variable.one(default=True) yield output.one(value=one) self.tf.init() outputs = self.tf.apply() assert outputs == {"one": True}
def test_create(self): workflow.delete_files("*.json") with self.create("one.tf.json"): one = yield block("variable", "one", {"default": True}) yield block("output", "one", {"value": one}) self.tf.init() outputs = self.tf.apply() assert outputs == {"one": True}
def test_timestamps(self): delete_files("src/*", "out.zip") # Create an archive. Path("src/one.txt").write_text("a") when = time.mktime(datetime.datetime(2017, 7, 7, 7, 7, 7).timetuple()) os.utime("src/one.txt", (when, when)) outputs = self.apply("v1") first_output_md5 = outputs["output_md5"] assert os.path.exists("out.zip") delete_files("out.zip") # Change timestamps without affecting the hash. when = time.mktime(datetime.datetime(2012, 2, 2, 2, 2, 2).timetuple()) os.utime("src/one.txt", (when, when)) outputs = self.apply("v1") third_output_md5 = outputs["output_md5"] assert third_output_md5 == first_output_md5 assert os.path.exists("out.zip")
def test_permissions(self): delete_files("src/*", "out.zip") # Create an archive. Path("src/one.txt").write_text("a") os.chmod("src/one.txt", 0o644) outputs = self.apply("v1") first_output_md5 = outputs["output_md5"] assert os.path.exists("out.zip") delete_files("out.zip") # Change group permissions without affecting the hash. os.chmod("src/one.txt", 0o666) outputs = self.apply("v1") second_output_md5 = outputs["output_md5"] assert second_output_md5 == first_output_md5 assert os.path.exists("out.zip") delete_files("out.zip") # Change executable permissions and affect the hash. os.chmod("src/one.txt", 0o744) outputs = self.apply("v1") third_output_md5 = outputs["output_md5"] assert third_output_md5 != first_output_md5 assert os.path.exists("out.zip")
def test_changes(self): delete_files("src/*", "out.zip") # Create an archive. Path("src/one.txt").write_text("a") outputs = self.apply("v1") assert outputs["search_results"] == ["one.txt"] first_output_md5 = outputs["output_md5"] assert os.path.exists("out.zip") delete_files("out.zip") # Add a second file and affect the hash and search results. Path("src/two.txt").write_text("b") outputs = self.apply("v1") assert outputs["search_results"] == ["one.txt", "two.txt"] second_output_md5 = outputs["output_md5"] assert first_output_md5 != second_output_md5 assert os.path.exists("out.zip") delete_files("out.zip") # Change the file contents and affect the hash. Path("src/two.txt").write_text("c") outputs = self.apply("v1") third_output_md5 = outputs["output_md5"] assert third_output_md5 != second_output_md5 assert os.path.exists("out.zip")
def pretf_workflow(root_module=None): if os.environ.get("GITHUB_COMMENT"): return github_action(os.environ["GITHUB_COMMENT"]) # Check that the working directory contains a terraform.tfvars file. # Those are the only directories which are set up to run Terraform. # An error will be displayed when running from the wrong directory. workflow.require_files("terraform.tfvars") # Clean up files and links from previous failed executions. workflow.delete_files() workflow.delete_links() # Link these files into the working directory # as they are used by all stacks and environments. created = workflow.link_files("*.tf", "*.tf.py", "*.tfvars", "*.tfvars.py") # Link this root module into the working directory. The details must # be passed in by the pretf.workflow.py files in the stack directories # (different stacks can specify different modules). if root_module: created += workflow.link_module(**root_module) # Create *.tf.json and *.tfvars.json files # from *.tf.py and *.tfvars.py files. created += workflow.create_files() # Execute Terraform, raising an exception if it fails. proc = workflow.execute_terraform() # If it got this far, then it was successful. # Clean up all of the files that were created. workflow.clean_files(created) # Return the execution result. return proc