def main(): directory = sys.argv[1] combined_hcl = combine_hcl(directory) hcl_obj = hcl.loads(combined_hcl) for mod in get_modules(hcl_obj): all_hcl_obj = dict(hcl_obj) all_hcl_obj.update(hcl.loads(combine_hcl(directory+mod))) all_resources = get_resources(all_hcl_obj) print check_if_allowed_service(all_resources) print check_policies(all_resources)
def test_parser_bytes(hcl_fname, invalid, export_comments): with open(join(PARSE_FIXTURE_DIR, hcl_fname), 'rb') as fp: input = fp.read() print(input) if not invalid: hcl.loads(input, export_comments) else: with pytest.raises(ValueError): hcl.loads(input, export_comments)
def test_parser_str(hcl_fname, invalid): with open(join(PARSE_FIXTURE_DIR, hcl_fname), 'r') as fp: input = fp.read() print(input) if not invalid: hcl.loads(input) else: with pytest.raises(ValueError): hcl.loads(input)
def test_stack_parser(hcl_fname, invalid): with open(os.path.join(FIXTURE_DIR, hcl_fname), 'rb') as fp: print(f'hcl_fname is {hcl_fname}') inp = fp.read() if hcl_fname not in ['common-render.hcl']: if not invalid: output = StackParser(hcl.loads(inp)).stack pprint(output) else: with pytest.raises(ValueError): StackParser(hcl.loads(inp))
def get_local_template(tf_template_file, tf_variable_files): """Retrieves the TF template and variable file(s) as python dictionaries""" # parse the files -- hcl.load() dually supports both JSON and HCL formats # If HCL contains heredoc notation, and file has CRLF encodings, it will # fail. See: https://github.com/virtuald/pyhcl/issues/25 template_str = tf_template_file.read() # load the entire file into String hcl.loads(template_str) # test parse / raise error on parse failure # read the variable file(s), updating/overwriting variables_dict as we go variables_dict = {} if tf_variable_files: for variable_file in tf_variable_files: variables_dict.update(hcl.load(variable_file)['variable']) return [template_str, {'variable': variables_dict}]
def parse_terraform_directory(self,path): terraform_string = "" for directory, subdirectories, files in os.walk(path): for file in files: if (file.endswith(".tf")): with open(os.path.join(directory, file)) as fp: new_terraform = fp.read() try: hcl.loads(new_terraform) except ValueError as e: raise TerraformSyntaxException("Invalid terraform configuration in {0}\n{1}".format(os.path.join(directory,file),e)) terraform_string += new_terraform terraform = hcl.loads(terraform_string) return terraform
def get_full_configuration(self): """Get full backend configuration.""" if not self.config_file: return self._raw_config result = hcl.loads(self.config_file.read_text()) result.update(self._raw_config) return result
def get_rule_mode(self, remote): # pragma: no cover """ Calls GitHub API and determines if the specified rule has different modes If no modes are available, returns None. If modes are available, returns "remediate" """ try: # pragma: no cover repo = self.github_client.get_repo(remote) except github.GithubException: # pragma: no cover LOGGER.warning("No remote resource found at github.com/%s", remote) return None try: contents = repo.get_contents("variables.tf") content_string = base64.b64decode(contents.content).decode("ascii") content_dict = hcl.loads(content_string) if "mode" in content_dict["variables"]: LOGGER.debug("Found 'mode' variable in %s", remote) return "remediate" return None except Exception as exception: # pylint: disable=broad-except LOGGER.warning( "Something went wrong when trying to determine mode for %s", remote, ) LOGGER.exception(exception) return None
def parse_tf_file_for_variables(path: Path) -> List[dict]: """ This is a really bad parser for *.tf and *.tfvars files, with the only goal being to parse variable definitions and variable values. From https://www.hashicorp.com/blog/terraform-0-12-reliable-json-syntax In future versions of Terraform, we will also support native tooling to convert HCL to JSON and JSON to HCL cleanly (including comments). When that happens, consider replacing this code with calls to that. """ blocks = [] for block_string in parse_tf_file_for_block_strings(path): if block_string.strip().startswith("variable "): try: parsed = hcl.loads(block_string) except ValueError: log.bad(f"error parsing {path}") print(block_string) raise else: blocks.append(parsed) return blocks
def write_rules() -> object: """ (Over)write a set of rules :return: JSON of success message or error message """ db=Db() body = request.get_json() if not('rule_set' in body): return jsonify({"error": "rule_set is a required attribute"}) rule_set = body['rule_set'] rules = None try: rules = hcl.loads(rule_set) except Exception as e: return jsonify({"error": "%s%s" % (str(e),rule_set)}) for ruleName, ruleDef in rules['rule'].items(): dbRule = db.Rules( name=ruleName, source=ruleDef['source'] ) if 'mandatory' in ruleDef: db.Rules.objects(name=ruleName).update_one(source=ruleDef['source'], mandatory=ruleDef['mandatory'], upsert=True, write_concern=None) else: db.Rules.objects(name=ruleName).update_one(source=ruleDef['source'], upsert=True, write_concern=None) return jsonify({"success": "Written successfully"})
def get_full_configuration(self) -> Dict[str, str]: """Get full backend configuration.""" if not self.config_file: return self.data.dict(exclude_none=True) result = cast(Dict[str, str], hcl.loads(self.config_file.read_text())) result.update(self.data.dict(exclude_none=True)) return result
def _parse_aws_providers(rendered): blocks = [] buffer = [] for line in rendered.splitlines(): stripped_line = line.strip() if blocks: buffer.append(line) if stripped_line == '}': blocks.pop() if not blocks: try: parsed = hcl.loads('\n'.join(buffer)) except ValueError: log.bad('Error parsing AWS provider:\n{}', '\n'.join(buffer)) raise else: yield parsed['provider']['aws'] buffer.clear() else: match = re.match(r'^([a-z_]+)\s+\{$', stripped_line) if match: blocks.append(match.group(1)) else: match = re.match(r'^(provider\s+"aws")\s+\{$', stripped_line) if match: blocks.append(match.group(1)) buffer.append(line)
def read_config_file(file): """Read files into objects.""" file_extension = file.split('.')[-1] if not os.path.exists(file): raise FileNotFoundError logger.debug( 'Using \"{}\" as input file and \"{}\" as file extension'.format( file, file_extension)) if file_extension == 'json': with open(file) as f: config = json.load(f, object_pairs_hook=OrderedDict) return config elif file_extension in ('yaml', 'yml', 'nukirc'): with open(file, encoding='utf-8') as f: config = yaml.safe_load(f) return config elif file_extension == 'hcl': with open(file) as f: config = hcl.loads(f.read()) return config else: raise ValueError( 'Unable to parse file {}. Error: Unsupported extension (json/yaml only)' ''.format(file)) # noqa
def get_policy_command(): name = demisto.args()['name'] res = get_policy(name) if not res or 'rules' not in res: return_error('Policy not found') rules = hcl.loads(res['rules']) mapped_rules = [{ 'Path': k, 'Capabilities': v['capabilities'] } for k, v in rules.get('path', {}).iteritems()] mapped_policy = {'Name': res['name'], 'Rule': mapped_rules} demisto.results({ 'Type': entryTypes['note'], 'Contents': res, 'ContentsFormat': formats['json'], 'ReadableContentsFormat': formats['markdown'], 'HumanReadable': tableToMarkdown('HashiCorp Vault Policy - ' + name, mapped_rules, removeNull=True), 'EntryContext': { 'HashiCorp.Policy(val.Name===obj.Name)': createContext(mapped_policy, removeNull=True) } })
def ask_stack_modules(self): self.use_stack_modules = self.choice_question( 'Do you want to use a generic stack?\n', ['y', 'n']) if self.use_stack_modules == 'y': # stack_options = os.listdir(self.stacks_dir).remove('common.hcl') # This won't work for testing but # something like it should be built to qualify a list of possible stacks stack_options = [ 'basic-p-rep', 'decoupled-p-rep', 'data-science', 'data-engineering-hadoop' ] self.stack_type = self.choice_question( 'What type of stack are you building?\n', stack_options) # TODO: Perhaps qualify the options first or allow for alternative input self.stack_template = str(self.stack_type) + '.hcl' self.get_and_write_raw_content( ['hooks', 'stacks', self.stack_template]) stack_str = self.stack_env.get_template( self.stack_template).render(self.common_dict) try: self.common_modules = hcl.loads(stack_str) parsed_stack = StackParser(self.common_modules).stack self.stack[self.r]['modules'].update(parsed_stack['modules']) self.stack[self.r]['region_inputs'].update( parsed_stack['region_inputs']) self.stack['env_inputs'].update(parsed_stack['env_inputs']) print('here') except: err_msg = 'Could not read common modules, invalid format' print(err_msg) raise ValueError(err_msg)
def parse_terraform_directory(self, path): terraform_string = "" for directory, subdirectories, files in os.walk(path): for file in files: if (file.endswith(".tf")): with open(os.path.join(directory, file)) as fp: new_terraform = fp.read() try: hcl.loads(new_terraform) except ValueError as e: raise TerraformSyntaxException( "Invalid terraform configuration in {0}\n{1}". format(os.path.join(directory, file), e)) terraform_string += new_terraform terraform = hcl.loads(terraform_string) return terraform
def decode_rules_as_hcl_string(rules_as_hcl): """ Converts the given HCL (string) representation of rules into a list of rule domain models. :param rules_as_hcl: the HCL (string) representation of a collection of rules :return: the equivalent domain model to the given rules """ rules_as_hcl = to_text(rules_as_hcl) rules_as_json = hcl.loads(rules_as_hcl) return decode_rules_as_json(rules_as_json)
def parse_hcl_file(path_to_hcl_file: str) -> dict: """take a path to a HCL file & returns it as a valid python dict. Arguments: path_to_hcl_file -- the path of the hcl file Returns: config_file_dict -- dict of the file """ return hcl.loads(read_file(path_to_hcl_file))
def test_render_service_vpc(tpl_fname, invalid, version, tmpdir): with open(os.path.join(FIXTURE_DIR, tpl_fname), 'rb') as fp: print(f'\n\ntpl_fname is {tpl_fname}\n\n') if not invalid: inp = fp.read() tg = TerragruntGenerator(debug=True, terraform_version=version, headless=True) tg.templates_dir = FIXTURE_DIR tg.stack = SINGLE_STACK tg.ask_terragrunt_version() p = tmpdir.mkdir("sub") os.chdir(p) tg.make_all() # print(os.listdir("ap-northeast-1/ebs")) else: with pytest.raises((ValueError, UndefinedError)): hcl.loads(fp.read())
def check_parsed_file(self, require_remote_state_block=True): # this function makes sure that self.outstring contains a legit hcl file with a remote state config obj = hcl.loads(self.out_string) debug(obj) try: d = obj["remote_state"] except KeyError: if require_remote_state_block: return "No remote_state block found." return True
def parse_environment_variable_for_variables(name: str, value: str) -> dict: cleaned = clean_block_string(f"{name[7:]} = {value}") try: return hcl.loads(cleaned) except ValueError as error: print(file=sys.stderr) log.bad("Error parsing:") print(value, file=sys.stderr) log.bad(f"Raising: {error.__class__.__name__}") raise
def parse_terraform_directory(self, path): ## It looks like we are repeating ourselves. This is done to first process the initial directory ## and then gain the details for the modules. Future modification may DRY this by separating ## it into different functions. Alas, I have no more time to work on this. terraform_string = "" for directory, subdirectories, files in os.walk(path): for file in files: if (file.endswith(".tf")): new_terraform = self.read_terraform_file(directory + "/" + file) try: hcl.loads(new_terraform) except ValueError as e: raise TerraformSyntaxException( "Invalid terraform configuration in {0}\n{1}". format(os.path.join(directory, file), e)) modules_to_process = self.check_terraform_for_modules( new_terraform) terraform_string += new_terraform if (modules_to_process is not None): for module_directory in modules_to_process: for directory, subdirectories, files in os.walk( module_directory): for file in files: if (file.endswith(".tf")): try: module_terraform = self.read_terraform_file( directory + "/" + file) hcl.loads(module_terraform) except ValueError as e: raise TerraformSyntaxException( "Invalid terraform configuration in {0}\n{1}" .format( os.path.join( directory, file), e)) terraform_string += new_terraform terraform = hcl.loads(terraform_string) return terraform
def __init__(self, wfile): """Loads the workflow as a dict from the `.workflow` file.""" super(HCLWorkflow, self).__init__() self.wf_fmt = "hcl" if not os.path.exists(wfile): # try to load a string self.wf_dict = hcl.loads(wfile) else: with open(wfile) as fp: self.wf_dict = hcl.load(fp) if 'action' in self.wf_dict: self.wf_dict['steps'] = self.wf_dict.pop('action')
def parse_hcl_value(hcl_string): if str(hcl_string).startswith(('${', '"${')): hcl = "key = \"{}\"".format(str(hcl_string).replace("'", "\"").lower()) try: hcl = loads(hcl) return eval(hcl['key'].replace('${', '{')) except ValueError: return hcl_string if type(hcl_string) is dict: hcl_string = {k.lower(): v for k, v in hcl_string.items()} return hcl_string
def load_rules_for_token(module, consul_api, token): try: rules = Rules() info = consul_api.acl.info(token) if info and info['Rules']: rule_set = hcl.loads(to_ascii(info['Rules'])) for rule_type in rule_set: for pattern, policy in rule_set[rule_type].iteritems(): rules.add_rule(rule_type, Rule(pattern, policy['policy'])) return rules except Exception, e: module.fail_json( msg="Could not load rule list from retrieved rule data %s, %s" % (token, e))
def load_rules_for_token(module, consul_api, token): try: rules = Rules() info = consul_api.acl.info(token) if info and info['Rules']: rule_set = hcl.loads(to_ascii(info['Rules'])) for rule_type in rule_set: for pattern, policy in rule_set[rule_type].iteritems(): rules.add_rule(rule_type, Rule(pattern, policy['policy'])) return rules except Exception, e: module.fail_json( msg="Could not load rule list from retrieved rule data %s, %s" % ( token, e))
def get_policy(self, name, parse=False): """ GET /sys/policy/<name> """ try: policy = self._get('/v1/sys/policy/{0}'.format(name)).json()['rules'] if parse: if not has_hcl_parser: raise ImportError('pyhcl is required for policy parsing') policy = hcl.loads(policy) return policy except exceptions.InvalidPath: return None
def get_active_vault_addresses(self): vault_addresses = [] for config_path in self.config_paths: config_hcl = load_config_file(config_path) config = hcl.loads(config_hcl) try: vault_address = 'https://{addr}'.format(addr=config['listener']['tcp']['address']) except KeyError as error: logger.debug('Unable to find explict Vault address in config file {path}: {err}'.format( path=config_path, err=error, )) vault_address = 'https://127.0.0.1:8200' logger.debug('Using default address: {addr}'.format(addr=vault_address)) vault_addresses.append(vault_address) return vault_addresses
def write_rule(ruleName: str) -> object: """ (Over)write a single rule :param ruleName: Rule Name :return: JSON of success message or error message """ db=Db() body = request.get_json() if not('rule' in body): return jsonify({"error": "rule is a required attribute"}) pol = body['rule'] rule = None try: rule = hcl.loads(pol) except Exception as e: return jsonify({"error": str(e)}) if not('rule' in rule): return jsonify({"error": "Invalid json"}) if not (ruleName in rule['rule']): return jsonify({"error": "rule name mismatch with provided hcl and url"}) if not ('source' in rule['rule'][ruleName]): return jsonify({"error": "source is a required attribute"}) dbRule = db.Rules( name=ruleName, source=rule['rule'][ruleName]['source'] ) if 'mandatory' in rule['rule'][ruleName]: db.Rules.objects(name=ruleName).update_one(source=rule['rule'][ruleName]['source'], mandatory=rule['rule'][ruleName]['mandatory'], upsert=True, write_concern=None) else: db.Rules.objects(name=ruleName).update_one(source=rule['rule'][ruleName]['source'], upsert=True, write_concern=None) return jsonify({"success": "Written successfully"})
def _get_tf_min_capacities(*, account_name, role_arn, cluster_name): """ Ask the Terraform: what's the min these services should ever be warmed to? """ tfstate_location = _get_tfstate_location(account_name=account_name, cluster_name=cluster_name) s3_client = get_aws_client("s3", role_arn=role_arn) tfstate = hcl.loads(get_s3_object(s3_client, **tfstate_location)) min_capacities = _find_autoscaling_config(tfstate) return { name: capacity for name, capacity in min_capacities.items() if name.startswith(f"service/{cluster_name}/") }
def parse_apply_outputs(stdout: str) -> dict: try: outputs_string = stdout.split("Outputs:\n", 1)[1] except IndexError: return {} cleaned = clean_block_string(outputs_string) if not cleaned: return {} try: return hcl.loads(cleaned) except ValueError as error: print(file=sys.stderr) log.bad("Error parsing:") print(stdout, file=sys.stderr) log.bad(f"Raising: {error.__class__.__name__}") raise
def ask_common_modules(self): self.use_common_modules = self.choice_question( 'Would you like to use common modules', ['y', 'n']) if self.use_common_modules == 'y': try: self.get_and_write_raw_content( ['hooks', 'stacks', 'common.hcl']) common_str = self.stack_env.get_template( self.common_template).render(self.common_dict) self.common_modules = hcl.loads(common_str) parsed_stack = StackParser(self.common_modules).stack self.stack[self.r]['modules'].update(parsed_stack['modules']) self.stack[self.r]['region_inputs'].update( parsed_stack['region_inputs']) self.stack['env_inputs'].update(parsed_stack['env_inputs']) except: err_msg = 'Could not read common modules, invalid format' print(err_msg) raise ValueError(err_msg)