示例#1
0
def main(file_name, branch_name):
    session = boto3.Session(profile_name=os.environ.get("AWS_PROFILE"))
    client = session.client("ssm")
    file_path = os.path.normpath(os.getcwd()).split(os.sep)
    dir_name = f"{file_path[-2]}/{file_path[-1]}"
    fname_no_ext = os.path.splitext(os.path.basename(file_name))[0]
    repo = Repo(os.path.abspath(os.path.join(__file__, "../..")))
    repo_name = repo.remotes.origin.url.split(".git")[0].split("/")[-1]
    if branch_name is None:
        branch_name = repo.head.ref
    repo_branch = f"{repo_name}/{branch_name}"
    tags = [{
        "Key": "terraform",
        "Value": "false"
    }, {
        "Key": "appname",
        "Value": file_path[-1]
    }, {
        "Key": "environment",
        "Value": fname_no_ext
    }]
    with open(file_name, "r") as f:
        params = hcl2.load(f)
        for param_name, param_value in params.items():
            full_param_name = (
                f"/tfvars/{fname_no_ext}/{repo_branch}/{dir_name}/{param_name}"
            )
            try:
                output = client.get_parameter(Name=full_param_name)
                if output["Parameter"]["Value"] != json.dumps(param_value[0]):
                    client.put_parameter(Name=full_param_name,
                                         Value=json.dumps(param_value[0]),
                                         Overwrite=True)
                    print(full_param_name, "Found and updated")
                else:
                    print(full_param_name, "Found and matched")
            except ClientError:
                with open("variables.tf", "r") as file:
                    vars = hcl2.load(file)
                    for var in [
                            v for v in vars["variable"] if param_name in v
                    ]:
                        var_desc = var[param_name]["description"][0]
                client.put_parameter(Name=full_param_name,
                                     Value=json.dumps(param_value[0]),
                                     Description=var_desc,
                                     Type="String",
                                     Tags=tags)
                print(full_param_name, "Not found and created")
示例#2
0
def _load_or_die_quietly(file: os.PathLike,
                         parsing_errors: Dict,
                         clean_definitions: bool = True) -> Optional[Mapping]:
    """
Load JSON or HCL, depending on filename.
    :return: None if the file can't be loaded
    """

    file_path = os.fspath(file)
    file_name = os.path.basename(file_path)

    try:
        logging.debug(f"Parsing {file_path}")
        with open(file, "r") as f:
            if file_name.endswith(".json"):
                return json.load(f)
            else:
                raw_data = hcl2.load(f)
                non_malformed_definitions = _validate_malformed_definitions(
                    raw_data)
                if clean_definitions:
                    return _clean_bad_definitions(non_malformed_definitions)
                else:
                    return non_malformed_definitions
    except Exception as e:
        logging.debug(f'failed while parsing file {file_path}', exc_info=e)
        parsing_errors[file_path] = e
        return None
示例#3
0
def get_auto_vars(root_directory: str) -> Dict[str, Set[Variable]]:
    """
    Recursively scan a directory and find all variables that are being exposed via tfvars files
    :param root_directory: directory where to start search
    :return:
    """
    auto_vars: Dict[str, Set[Variable]] = defaultdict(set)
    # pylint: disable=unused-variable
    for current_dir, dirs, files in os.walk(root_directory, followlinks=True):
        if '.terraform' in current_dir:
            continue

        vars_files = [
            file_name for file_name in files if file_name.endswith('.tfvars')
        ]
        for file_name in vars_files:
            with open(current_dir + '/' + file_name, 'r') as file:
                variables = hcl2.load(file)
                for key, value in variables.items():
                    # python-hcl2 will always return a list for each variable value in case
                    # a variable is defined multiple times in the same time
                    # We will just use the last appearance of a variable
                    value = value[-1]
                    auto_vars[os.path.join(current_dir, file_name)].add(
                        Variable(key, _make_hashable(value)))

    return dict(auto_vars)
示例#4
0
 def parse_tf_module(tf_module):
     """Parse all tf files in directory."""
     tf_objects = []
     for file in glob.glob(f"{tf_module}/*.tf"):
         with open(file, "r", encoding="utf8") as handle:
             tf_objects.append(hcl2.load(handle))
     return tf_objects
    def _load_test_files(self):
        """Recursively parse all files in a directory"""
        # pylint: disable=unused-variable
        for current_dir, dirs, files in os.walk("terraform-config"):
            dir_prefix = os.path.commonpath([HCL2_DIR, current_dir])
            relative_current_dir = current_dir.replace(dir_prefix, '')
            current_out_dir = os.path.join(JSON_DIR, relative_current_dir)
            for file_name in files:
                file_path = os.path.join(current_dir, file_name)
                json_file_path = os.path.join(current_out_dir, file_name)
                json_file_path = os.path.splitext(json_file_path)[0] + '.json'

                with self.subTest(msg=file_path):
                    with open(file_path,
                              'r') as hcl2_file, open(json_file_path,
                                                      'r') as json_file:
                        try:
                            hcl2_dict = hcl2.load(hcl2_file)
                        except Exception as ex:
                            raise RuntimeError(
                                f"failed to tokenize terraform in `{file_path}`"
                            ) from ex
                        json_dict = json.load(json_file)

                        self.assertDictEqual(hcl2_dict, json_dict,
                                             f"failed comparing {file_path}")
示例#6
0
 def __init__(self, tf_folder: str, tf_glob="*.tf"):
     self.current = -1
     self._vars: Dict[str, Var] = {}
     variable_key = "variable"
     description_key = "description"
     default_key = "default"
     for tf in pathlib.Path(tf_folder).glob(tf_glob):
         with tf.open("r") as f:
             try:
                 data = hcl2.load(f)
             except Exception as e:
                 continue
             if not data or variable_key not in data:
                 continue
             try:
                 _ = (e for e in data[variable_key])
             except TypeError as e:
                 continue
             for r in data[variable_key]:
                 for k, v in r.items():
                     var_name = k
                     var_help = ""
                     var_default = None
                     if description_key in v:
                         var_help = v[description_key][0]
                     if default_key in v:
                         var_default = v[default_key][0]
                     self._vars[var_name] = Var(var_name, var_default,
                                                var_help)
示例#7
0
    def __init__(self, directory=None, settings=None):
        self.settings = settings if settings else {}

        # handle the root module first...
        self.directory = directory if directory else os.getcwd()
        #print(self.directory)
        self.config_str = ''
        iterator = iglob(self.directory + '/*.tf')
        for fname in iterator:
            with open(fname, 'r', encoding='utf-8') as f:
                self.config_str += f.read() + ' '

        config_io = io.StringIO(self.config_str)
        self.config = hcl2.load(config_io)

        # then any submodules it may contain, skipping any remote modules for
        # the time being.
        self.modules = {}
        if 'module' in self.config:
            for item in self.config['module']:
                for name, mod in item.items():
                    if 'source' not in mod:
                        continue
                    source = mod['source'][0]
                    # '//' used to refer to a subdirectory in a git repo
                    if re.match(r'.*\/\/.*', source):
                        continue
                    # '@' should only appear in ssh urls
                    elif re.match(r'.*\@.*', source):
                        continue
                    # 'github.com' special behavior.
                    elif re.match(r'github\.com.*', source):
                        continue
                    # points to new TFE module registry
                    elif re.match(r'app\.terraform\.io', source):
                        continue
                    # bitbucket public and private repos
                    elif re.match(r'bitbucket\.org.*', source):
                        continue
                    # git::https or git::ssh sources
                    elif re.match(r'^git::', source):
                        continue
                    # git:// sources
                    elif re.match(r'^git:\/\/', source):
                        continue
                    # Generic Mercurial repos
                    elif re.match(r'^hg::', source):
                        continue
                    # Public Terraform Module Registry
                    elif re.match(
                            r'^[a-zA-Z0-9\-_]+\/[a-zA-Z0-9\-_]+\/[a-zA-Z0-9\-_]+',
                            source):
                        continue
                    # AWS S3 buckets
                    elif re.match(r's3.*\.amazonaws\.com', source):
                        continue
                    # fixme path join. eek.
                    self.modules[name] = Terraform(directory=self.directory +
                                                   '/' + source,
                                                   settings=mod)
示例#8
0
文件: parser.py 项目: wybczu/checkov
    def hcl2(self, directory, tf_definitions={}, parsing_errors={}):
        modules_scan = []

        for file in os.listdir(directory):
            if file.endswith(".tf"):
                tf_file = os.path.join(directory, file)
                if tf_file not in tf_definitions.keys():
                    try:
                        with (open(tf_file, 'r')) as file:
                            file.seek(0)
                            dict = hcl2.load(file)
                            tf_defenition = dict
                            tf_definitions[tf_file] = tf_defenition
                            # TODO move from here
                            # tf_defenitions = context_registry.enrich_context(tf_file,tf_defenitions)

                            for modules in dict.get("module", []):
                                for module in modules.values():
                                    relative_path = module['source'][0]
                                    abs_path = os.path.join(
                                        directory, relative_path)
                                    modules_scan.append(abs_path)
                    except Exception as e:
                        self.logger.debug('failed while parsing file %s' %
                                          tf_file,
                                          exc_info=e)
                        parsing_errors[tf_file] = e
        for m in modules_scan:
            if path.exists(m):
                self.hcl2(directory=m, tf_definitions=tf_definitions)
def get_variables():
    variables = {}
    for file in glob.glob("**/variables.tf", recursive=True):
        with open(file, "r") as f:
            data = hcl2.load(f)
            for var in data['variable']:
                name = list(var.keys())[0]
                if name not in variables.keys():
                    variables[name] = {"definitions": []}
                if "description" in list(var[name].keys()):
                    v_description = var[name]["description"][0]
                else:
                    v_description = "NULL"
                if "type" in list(var[name].keys()):
                    v_type = var[name]["type"][0]
                else:
                    v_type = "NULL"
                variables[name]["definitions"].append({
                    "type":
                    v_type,
                    "description":
                    v_description,
                    "used_in":
                    "/".join(file.split("/")[1:-1])
                })
    return variables
示例#10
0
    def hcl2(self, directory, tf_definitions={}, parsing_errors={}):
        modules_scan = []

        for file in os.listdir(directory):
            if file.endswith(".tf"):
                tf_file = os.path.join(directory, file)
                if tf_file not in tf_definitions.keys():
                    try:
                        with(open(tf_file, 'r')) as file:
                            file.seek(0)
                            tf_definition = hcl2.load(file)
                            for resource_type in tf_definition.get('resource', []):
                                for resource in resource_type.values():
                                    for named_resource in resource.values():
                                        for dynamic_block in named_resource.get('dynamic', []):
                                            for dynamic_field_name, dynamic_field_value in dynamic_block.items():
                                                named_resource[dynamic_field_name] = dynamic_field_value['for_each']
                            tf_definitions[tf_file] = tf_definition
                            # TODO move from here
                            # tf_defenitions = context_registry.enrich_context(tf_file,tf_defenitions)

                            for modules in tf_definition.get("module", []):
                                for module in modules.values():
                                    relative_path = module['source'][0]
                                    abs_path = os.path.join(directory, relative_path)
                                    modules_scan.append(abs_path)
                    except Exception as e:
                        self.logger.debug('failed while parsing file %s' % tf_file, exc_info=e)
                        parsing_errors[tf_file] = e
        for m in modules_scan:
            if path.exists(m):
                self.hcl2(directory=m, tf_definitions=tf_definitions)
示例#11
0
def hcl2json(f):
    if f not in JSON_CACHE:
        try:
            with(open(f, 'r')) as file:
                JSON_CACHE[f] = hcl2.load(file)
        except UnexpectedToken as e:
            LOG.error("Error parsing {}: {}".format(f, e))
            sys.exit(1)
    return JSON_CACHE[f]
    def get_variable_list(self):
        results = list()

        for tf_file in pathlib.Path(self.working_dir).glob("*.tf"):
            with open(tf_file, "r") as fp:
                terraform_file_dict = hcl2.load(fp)
                results += terraform_file_dict["variable"] if "variable" in terraform_file_dict else list()

        return list(map(lambda d: next(iter(d)), results))
示例#13
0
def export(name):
    with open(metrics_file, 'r') as file:
        metrics = hcl.load(file)['metric']
        data = request.json
        start = int(data['start'])
        end = int(data['end'])
        bucket_name = data['bucket_name']

        path = generate_reports(metrics, bucket_name, name, start, end)
        return f'reports generated at {path}', 201
示例#14
0
 def test_module_double_slash_cleanup(self):
     with open(os.path.join(os.path.dirname(__file__), 'resources', 'double_slash.tf')) as f:
         tf = hcl2.load(f)
     non_malformed_definitions = validate_malformed_definitions(tf)
     definitions = {
         '/mock/path/to.tf': clean_bad_definitions(non_malformed_definitions)
     }
     module, _ = Parser().parse_hcl_module_from_tf_definitions(definitions, '', 'terraform')
     print(module)
     self.assertEqual(1, len(module.blocks))
     self.assertEqual('ingress.annotations.kubernetes\\.io/ingress\\.class', module.blocks[0].attributes['set.name'])
示例#15
0
def tf_module_recursive(file_path):
    directory = file_path if file_path else os.getcwd()
    config_str = ""
    iterator = iglob(directory + "/*.tf")
    for fname in iterator:
        with open(fname, "r", encoding="utf-8") as f:
            config_str += f.read() + " "
    config_io = io.StringIO(config_str)
    config = hcl2.load(config_io)
    modules = {k: v for k, v in config.items() if k == "module"}
    return modules
示例#16
0
 def parse_file(self, file, tf_definitions={}, parsing_errors={}):
     if file.endswith(".tf"):
         try:
             with(open(file, 'r')) as tf_file:
                 tf_file.seek(0)
                 dict = hcl2.load(tf_file)
                 tf_defenition = dict
                 tf_definitions[file] = tf_defenition
         except Exception as e:
             self.logger.debug('failed while parsing file %s' % file, exc_info=e)
             parsing_errors[file] = e
示例#17
0
 def _parse_tf_definitions(tf_file):
     with(open(tf_file, 'r')) as file:
         file.seek(0)
         tf_definition = hcl2.load(file)
         for resource_type in tf_definition.get('resource', []):
             for resource in resource_type.values():
                 for named_resource in resource.values():
                     for dynamic_block in named_resource.get('dynamic', []):
                         for dynamic_field_name, dynamic_field_value in dynamic_block.items():
                             named_resource[dynamic_field_name] = dynamic_field_value['for_each']
     return tf_definition
示例#18
0
    def parse_file(self, filename, fail_on_error=True):
        self.filename = filename
        try:
            with open(filename, "r") as f:
                return self.parse(hcl2.load(f), fail_on_error=fail_on_error)

        except ValidationError as e:
            with open(filename, 'r', encoding='utf8') as f:
                config = f.read()
            handle_parse_exception(e, config,
                                   'Error parsing hcl file ' + filename)
示例#19
0
def parse_variable_files(variable_files: List[str]) -> Dict[str, str]:
    """
    Convenience function for parsing variable files and returning the variables as a dictionary
    :param variable_files: List of file paths to variable files. Variable files overwrite files before them.
    :return: A dictionary representing the contents of those variable files.
    """
    variables: Dict = {}

    for variable_file in variable_files:
        with open(variable_file, encoding='utf-8') as var_file:
            variables.update(hcl2.load(var_file).items())

    return variables
示例#20
0
def _parse_backend_config_for_file(file_path: str) -> Optional[BackendsConfig]:
    with open(file_path) as tf_file:
        try:
            configs: Dict[str, List] = hcl2.load(tf_file)

            terraform_config_blocks: List[Dict] = configs.get('terraform', [])
            for terraform_config in terraform_config_blocks:
                if 'backend' in terraform_config:
                    return jsons.load(terraform_config['backend'][0], BackendsConfig, strict=True)
            return None
        except Exception:
            print('Error while parsing file %s' % file_path)
            raise
示例#21
0
def parse_variable_files(variable_files: List[str]) -> Dict[str, str]:
    """
    Convenience function for parsing variable files and returning the variables as a dictionary
    :param variable_files: List of file paths to variable files. Variable files overwrite files before them.
    :return: A dictionary representing the contents of those variable files.
    """
    variables: Dict = {}

    for variable_file in variable_files:
        with open(variable_file) as var_file:
            flat_vars = {key: values[0] for key, values in hcl2.load(var_file).items()}
            variables.update(flat_vars)

    return variables
示例#22
0
def get_nondefault_variables_for_file(file_path: str) -> Set[str]:
    """
    Find all variables missing default values that are declared in a terraform file
    :param file_path: a terraform file
    :return: Set of variable names declared in the file
    """
    variables = set()
    with open(file_path, 'r', encoding='utf-8') as file:
        tf_info = hcl2.load(file)
        for variable in tf_info.get('variable', []):
            for variable_name, var_config in variable.items():
                if not var_config.get('default'):
                    variables.add(variable_name)

    return variables
示例#23
0
def validate_file(filename):
    try:
        with (open(filename, 'r')) as file:
            tf = hcl2.load(file)
    except lark.exceptions.UnexpectedToken as e:
        return [
            parliament.finding.Finding("Failed to parse file", str(e),
                                       filename)
        ]

    findings = []

    # Validate data.aws_iam_policy_document
    for policy_document in filter(
            lambda x: x.get('aws_iam_policy_document', None),
            tf.get('data', [])):
        iam_statements = []

        for policy_name, policy in policy_document[
                'aws_iam_policy_document'].items():
            if 'statement' in policy:
                for statement_data in policy['statement']:
                    # Don't check assume role policies; these will have spurious findings for
                    # "Statement contains neither Resource nor NotResource"
                    actions = statement_data.get('actions')[0]
                    if actions == ['sts:AssumeRole'
                                   ] or actions == ['sts:AssumeRoleWithSAML']:
                        continue

                    iam_statements.append(
                        mock_iam_statement_from_tf(statement_data))

        policy_string = policy_template.format(
            iam_statements=json.dumps(iam_statements))
        findings += parliament.analyze_policy_string(policy_string).findings

    # Validate resource.aws_iam_policy
    for policy_resource in filter(lambda x: x.get('aws_iam_policy', None),
                                  tf.get('resource', [])):
        for policy_name, policy in policy_resource['aws_iam_policy'].items():
            try:
                policy_string = policy['policy'][0]
                policy_string = policy_string.replace('\\"', '"')
            except KeyError:
                continue
            findings += parliament.analyze_policy_string(
                policy_string).findings
    return findings
示例#24
0
def _get_modules_for_file(directory: str,
                          file_name: str) -> Tuple[str, Set[str]]:
    """
    Get the modules used in a terraform file
    :param directory: Directory where the file is in
    :param file_name: Name of file
    :return:
    """
    modules = set()
    with open(directory + '/' + file_name, 'r') as file:
        tf_info = hcl2.load(file)
        for module in tf_info.get('module', []):
            for module_config in module.values():
                modules.add(os.path.normpath(module_config['source'][0]))

    return directory, modules
 def provider_names(self):
     """ Extract only the providers used by a definition """
     result = set(self._providers.keys())
     if self._tf_version_major >= 13:
         version_path = PurePath(self._target) / "versions.tf"
         if Path(version_path).exists():
             with open(version_path, "r") as reader:
                 vinfo = hcl2.load(reader)
                 tf_element = vinfo.get("terraform", [None]).pop()
                 if tf_element:
                     rp_element = tf_element.get("required_providers",
                                                 [None]).pop()
                     if rp_element:
                         required_providers = set(rp_element.keys())
                         result = result.intersection(required_providers)
     return result
示例#26
0
    def _read_tf_module_config(self) -> dict:
        tf_module_config = {}

        # Get all of the (non-nested) terraform files in the current module.
        tf_files = [
            entry for entry in os.scandir(self.module_dir_path)
            if entry.path.endswith(".tf")
        ]

        # Read and parse each terraform file.
        for tf_file in tf_files:
            with open(tf_file) as f:
                tf_file_config = hcl2.load(f)
            tf_module_config[tf_file.name] = tf_file_config

        return tf_module_config
示例#27
0
def parse_envs_file(envs_file):
    """
    The envs.tf file contains metadata that required to build an
    environment for a specific workspace
    :param envs_file:
    :return: list of workspaces and workspace data dict
    """
    with (open(envs_file, 'r')) as env_file:
        env_dict = hcl2.load(env_file)
    workspaces_dict = env_dict['variable'][0]['envs']['default']

    # setup the workspace/account to display and prompt user to select one to build
    workspaces = []
    for workspace in workspaces_dict:
        for key, val in workspace.items():
            workspaces.append(key + "|" + val['account_id'] + "|" +
                              val['account'])
    return workspaces, workspaces_dict
示例#28
0
def hcl_to_json(file_path):
    """
    converts the hcl file to json file
    """
    json_data = {}
    try:
        with open(file_path, 'r') as fp:
            json_data = hcl2.load(fp)
            json_data = remove_list_from_values(json_data)
    except Exception:
        try:
            with open(file_path, 'r') as fp:
                json_data = hcl.load(fp)
        except Exception as e:
            logger.error(
                "Failed to convert hcl to json data, file: %s , error: %s",
                file_path, str(e))

    return json_data
示例#29
0
    def __init__(self, directory=None, settings=None):
        self.settings = settings if settings else {}

        # handle the root module first...
        self.directory = directory if directory else os.getcwd()
        #print(self.directory)
        
        self.config = {}
        iterator = iglob( self.directory + '/*.tf')
        for fname in iterator:
            with open(fname, 'r', encoding='utf-8') as f:
                try:
                    raw = io.StringIO(f.read())
                    parsed = hcl.load(raw)
                    self.config.update(parsed)
                except Exception as e:
                    raise RuntimeError('Exception occurred while parsing ' + fname) from e


        # then any submodules it may contain
        self.modules = {}
        if 'module' in self.config:
            for name, mod in [(k, v) for x in self.config['module'] for (k, v) in x.items()]:
                if 'source' not in mod:
                    continue
                source = mod['source'][0]
                
                path = os.path.join(self.directory, source)
                if os.path.exists(path):
                    # local module
                    self.modules[name] = Terraform(directory=path, settings=mod)
                else:
                    # remote module
                    # Since terraform must be init'd before use, we can
                    # assume remote modules have been downloaded to .terraform/modules
                    path = os.path.join(os.getcwd(), '.terraform', 'modules', name)
                    
                    # Get the subdir if any
                    match = re.match(r'.*(\/\/.*)(?!:)', source)
                    if re.match(r'.*\/(\/.*)(?!:)', source):
                        path = os.path.join(path, match.groups()[0])
                    
                    self.modules = Terraform(directory=path, settings=mod)
示例#30
0
def _load_or_die_quietly(file: os.PathLike, parsing_errors: Dict) -> Optional[Mapping]:
    """
Load JSON or HCL, depending on filename.
    :return: None if the file can't be loaded
    """

    file_path = os.fspath(file)
    file_name = os.path.basename(file_path)

    try:
        with open(file, "r") as f:
            if file_name.endswith(".json"):
                return _clean_bad_definitions(json.load(f))
            else:
                return _clean_bad_definitions(hcl2.load(f))
    except Exception as e:
        LOGGER.debug(f'failed while parsing file {file}', exc_info=e)
        parsing_errors[file_path] = e
        return None