Esempio n. 1
0
def generate_vars_file():
    tf_default_vars = dict()
    tf_vars = dict()

    with open("variables.tf", 'r') as stream:
        tf_default_vars = hcl.load(stream)

    ssh_user = tf_default_vars['variable']['ssh_user']['default']
    private_key = tf_default_vars['variable']['ssh_key']['default']
    cluster_prefix = tf_default_vars['variable']['cluster_prefix']['default']

    with open("terraform.tfvars", 'r') as stream:
        tf_vars = hcl.load(stream)

    if 'cluster_prefix' in tf_vars:
        cluster_prefix = tf_vars['cluster_prefix']
    if 'ssh_key' in tf_vars:
        private_key = tf_vars['ssh_key']
    if 'ssh_user' in tf_vars:
        ssh_user = tf_vars['ssh_user']

    data = {
        "edge_host": "{}-edge-000".format(cluster_prefix),
        "edge_ip": "{{ hostvars.get(edge_host)[\"ansible_host\"] }}",
        "ssh_user": ssh_user,
        "private_key": private_key
    }

    vars_file = 'playbooks/group_vars/all'

    with open(vars_file, 'w') as fh:
        fh.write(yaml.dump(data, default_flow_style=False))
Esempio n. 2
0
def _parse_tfvars(tfvars=None, tfdir=None):
    'Parse vars and tfvars files and return variables.'
    logging.info('parsing tf variables')
    result = {}
    try:
        with open(os.path.join(tfdir, 'variables.tf')) as f:
            result = {
                k: v.get('default')
                for k, v in hcl.load(f)['variable'].items()
            }
        if tfvars:
            with open(os.path.join(tfdir, tfvars)) as f:
                result.update(hcl.load(f))
        else:
            logging.info('no tfvars file used')
    except (KeyError, ValueError) as e:
        raise Error(f'Wrong variable files syntax: {e}')
    except (IOError, OSError) as e:
        raise Error(f'Cannot open variable files: {e}')
    for k, v in result.items():
        if k == 'post_bootstrap_config':
            continue
        if v is None:
            raise Error(f'Terraform variable {k} not set.')
    return result
Esempio n. 3
0
def remove_stale_tf_config(path, backend_tfvars_file):
    """Ensure TF is ready for init.

    If deploying a TF module to multiple regions (or any scenario requiring
    multiple backend configs), switching the backend will cause TF to
    compare the old and new backends. This will frequently cause an access
    error as the creds/role for the new backend won't always have access to
    the old one.

    This method compares the defined & initialized backend configs and
    trashes the terraform directory if they're out of sync.
    """
    terrform_dir = os.path.join(path, '.terraform')
    tfstate_filepath = os.path.join(terrform_dir, 'terraform.tfstate')
    if os.path.isfile(tfstate_filepath):
        LOGGER.debug('Comparing previous & desired Terraform backend '
                     'configs')
        with open(tfstate_filepath, 'r') as fco:
            state_config = hcl.load(fco)

        if state_config.get('backend') and state_config['backend'].get(
                'config'):  # noqa
            backend_tfvars_filepath = os.path.join(path, backend_tfvars_file)
            with open(backend_tfvars_filepath, 'r') as fco:
                backend_config = hcl.load(fco)
            if any(state_config['backend']['config'][key] != value for (key, value) in viewitems(backend_config)):  # noqa pylint: disable=line-too-long
                LOGGER.info(
                    "Desired and previously initialized TF "
                    "backend config is out of sync; trashing "
                    "local TF state directory %s", terrform_dir)
                send2trash(terrform_dir)
def as_text():
    dephell = {}
    for root, dirs, files in os.walk(args.directory):
        if "terragrunt-cache" in root:
            continue
        for name in files:
            if name == "terraform.tfvars":
                tf_filename = os.path.join(root, name)
                with open(tf_filename, "r") as fp:
                    obj = hcl.load(fp)
                    try:
                        dephell[root] = []
                        dependencies = obj["terragrunt"]["dependencies"][
                            "paths"]
                        for dep in dependencies:
                            dephell[root].append(
                                os.path.join(args.directory, dep[3:]))
                    except KeyError:
                        pass

    success = False
    try:
        for k in dephell.keys():
            for d in dephell[k]:
                if k in dephell[d]:
                    print("Circular dep found between %s and %s" % (k, d))
                    success = True

    except KeyError as e:
        print(
            "%s :: has possible dependencies problem: module %s looks for %s" %
            (e, k, d))
    return success
Esempio n. 5
0
 def __init__(self, wfile):
     # Read and parse the workflow file.
     with open(wfile, 'r') as fp:
         self.parsed_workflow = hcl.load(fp)
         fp.seek(0)
         self.workflow_content = fp.readlines()
         self.workflow_path = wfile
Esempio n. 6
0
    def __init__(self, wfile, workspace, quiet, debug, dry_run):
        wfile = pu.find_default_wfile(wfile)

        with open(wfile, 'r') as fp:
            self.wf = hcl.load(fp)

        self.workspace = workspace
        self.debug = debug
        if debug:
            self.quiet = False
        else:
            self.quiet = quiet
        self.dry_run = dry_run

        self.actions_cache_path = os.path.join('/', 'tmp', 'actions')
        self.validate_syntax()
        self.check_secrets()
        self.normalize()
        self.complete_graph()

        self.env = {
            'GITHUB_WORKSPACE': self.workspace,
            'GITHUB_WORKFLOW': self.wf['name'],
            'GITHUB_ACTOR': 'popper',
            'GITHUB_REPOSITORY': '{}/{}'.format(scm.get_user(),
                                                scm.get_name()),
            'GITHUB_EVENT_NAME': self.wf['on'],
            'GITHUB_EVENT_PATH': '/{}/{}'.format(self.workspace,
                                                 'workflow/event.json'),
            'GITHUB_SHA': scm.get_sha(),
            'GITHUB_REF': scm.get_ref()
        }

        for e in dict(self.env):
            self.env.update({e.replace('GITHUB_', 'POPPER_'): self.env[e]})
Esempio n. 7
0
def prep_workspace_switch(module_path, backend_options, env_name, env_region,
                          env_vars):
    """Clean terraform directory and run init if necessary.

    Creating a new workspace after a previous workspace has been created with
    a defined 'key' will result in the new workspace retaining the same key.
    Additionally, existing workspaces will not show up in a `tf workspace
    list` if they have a different custom key than the previously
    initialized workspace.

    This function will check for a custom key and re-init.
    """
    terrform_dir = os.path.join(module_path, '.terraform')
    backend_filepath = os.path.join(module_path,
                                    backend_options.get('filename'))
    if os.path.isdir(terrform_dir) and (backend_options.get('config')
                                        or os.path.isfile(backend_filepath)):
        if backend_options.get('config'):
            state_config = backend_options.get('config')
        else:
            with open(backend_filepath, 'r') as stream:
                state_config = hcl.load(stream)
        if 'key' in state_config:
            LOGGER.info("Backend config defines a custom state key, "
                        "which Terraform will not respect when listing/"
                        "switching workspaces. Deleting the current "
                        ".terraform directory to ensure the key is used.")
            send2trash(terrform_dir)
            LOGGER.info(".terraform directory removed; proceeding with "
                        "init...")
            run_terraform_init(module_path=module_path,
                               backend_options=backend_options,
                               env_name=env_name,
                               env_region=env_region,
                               env_vars=env_vars)
Esempio n. 8
0
def tf_to_json(input_file, output_file):

    with open('configrule.tf', 'r') as fp:
        json_out = hcl.load(fp)

    with open('config-out.json', 'w') as outfile:
        json.dump(json_out, outfile, indent=4)
Esempio n. 9
0
    def __init__(self, terraform_path=None, terragrunt_path=None):
        self.terragrunt_v = None
        self.terraform_v = None

        conf_file = "{}/config.hcl".format(self.conf_dir)
        if os.path.isfile(conf_file):
            with open(conf_file, 'r') as fp:
                self.conf = hcl.load(fp)
        else:
            self.conf = {}

        try:
            self.bin_dir = os.path.expanduser(self.conf['bin_dir'])
        except:
            pass
        if terraform_path == None:
            terraform_path = "{}/terraform".format(self.bin_dir)
            if not os.path.isdir(self.bin_dir):
                os.makedirs(self.bin_dir)

        self.terraform_path = terraform_path

        if terragrunt_path == None:
            terragrunt_path = "{}/terragrunt".format(self.bin_dir)
            if not os.path.isdir(self.bin_dir):
                os.makedirs(self.bin_dir)

        self.terragrunt_path = terragrunt_path

        if not os.path.isdir(self.conf_dir):
            os.makedirs(self.conf_dir)
Esempio n. 10
0
def find_min_required(path):
    """Inspect terraform files and find minimum version."""
    found_min_required = ''
    for filename in glob.glob(os.path.join(path, '*.tf')):
        with open(filename, 'r') as stream:
            tf_config = hcl.load(stream)
            if tf_config.get('terraform', {}).get('required_version'):
                found_min_required = tf_config.get('terraform',
                                                   {}).get('required_version')
                break

    if found_min_required:
        if re.match(r'^!=.+', found_min_required):
            LOGGER.error('Min required Terraform version is a negation (%s) '
                         '- unable to determine required version',
                         found_min_required)
            sys.exit(1)
        else:
            found_min_required = re.search(r'[0-9]*\.[0-9]*(?:\.[0-9]*)?',
                                           found_min_required).group(0)
            LOGGER.debug("Detected minimum terraform version is %s",
                         found_min_required)
            return found_min_required
    LOGGER.error('Terraform version specified as min-required, but unable to '
                 'find a specified version requirement in this module\'s tf '
                 'files')
    sys.exit(1)
Esempio n. 11
0
def change_mem_and_cpu():
    """
    Increase CPUs and memory for nodes
    """
    worker_num_cpus = config.ENV_DATA.get('worker_num_cpus')
    master_num_cpus = config.ENV_DATA.get('master_num_cpus')
    worker_memory = config.ENV_DATA.get('compute_memory')
    master_memory = config.ENV_DATA.get('master_memory')
    if (
            worker_num_cpus
            or master_num_cpus
            or master_memory
            or worker_memory
    ):
        with open(constants.VSPHERE_MAIN, 'r') as fd:
            obj = hcl.load(fd)
            if worker_num_cpus:
                obj['module']['compute']['num_cpu'] = worker_num_cpus
            if master_num_cpus:
                obj['module']['control_plane']['num_cpu'] = master_num_cpus
            if worker_memory:
                obj['module']['compute']['memory'] = worker_memory
            if master_memory:
                obj['module']['control_plane']['memory'] = master_memory
        # Dump data to json file since hcl module
        # doesn't support dumping of data in HCL format
        dump_data_to_json(obj, f"{constants.VSPHERE_MAIN}.json")
        os.rename(constants.VSPHERE_MAIN, f"{constants.VSPHERE_MAIN}.backup")
Esempio n. 12
0
def pytest_load_initial_conftests(early_config, parser, args):
    # TODO: どうにかしたい
    dir = ''
    for arg in args:
        m = re.match(r'--terraform-dir=(.*)', arg)
        if m:
            dir = m.groups()[0]

    # --terraform-dir で指定されたディレクトリから *.tf を取得
    tf_files = glob.glob('{terraform_dir}/*.tf'.format(terraform_dir=dir))

    # --terraform-dir で指定されたディレクトリから resource "hogehoge" "fugafuga" {} の定義を取り出す
    resources = defaultdict(lambda: [])
    for tf_file in tf_files:
        # hcl で記述されている .tf を開く
        with open(tf_file, 'r') as f:
            obj = dot(hcl.load(f))

        # resource セクションのみ処理
        if 'resource' in list(obj.keys()):
            terraform_resource = obj.resource
            for resource_key, resource in terraform_resource.items():
                for name, param in resource.items():
                    resources[resource_key].append(name)

    for resource, names in resources.items():
        print(resource, names)
        for name in names:
            inject_terraform_fixture(resource, name)

        inject_terraform_class(resource, names)
Esempio n. 13
0
def find_unused_vars(tfdir):
    all_vars = {}
    for root, dirs, files in os.walk(tfdir):
        if not any(_file.endswith('.tf') for _file in files):
            continue
        for _file in os.listdir(root):
            if _file.endswith('.tf'):
                tf_vars = set()
                with open(os.path.join(root, _file)) as terraform:
                    _tf = hcl.load(terraform)
                    try:
                        tf_vars.update(_tf["variable"].keys())
                    except KeyError:
                        tf_vars.update([])
                for var in tf_vars:
                    all_vars[var] = f"{root}/{_file}"

        for _file in os.listdir(root):
            if _file.endswith('.tf'):
                _tf = open(os.path.join(root, _file)).read()
                for _var in list(all_vars):
                    if f"var.{_var}" in _tf:
                        del all_vars[_var]
        for _var, _path in all_vars.items():
            print(f'var.{_var} in {_path}')
Esempio n. 14
0
File: config.py Progetto: xxlatgh/qb
def load_config():
    if os.path.exists(CUSTOM_CONFIG):
        with open(CUSTOM_CONFIG) as f:
            return hcl.load(f)
    elif os.path.exists(DEFAULT_CONFIG):
        with open(DEFAULT_CONFIG) as f:
            return hcl.load(f)
    elif os.path.exists('/ssd-c/qanta/qb/qanta.hcl'):
        with open('/ssd-c/qanta/qb/qanta.hcl') as f:
            return hcl.load(f)
    elif os.path.exists('/ssd-c/qanta/qb/qanta-defaults.hcl'):
        with open('/ssd-c/qanta/qb/qanta-defaults.hcl') as f:
            return hcl.load(f)
    else:
        raise ValueError(
            'Qanta HCL configuration could not be found in qanta-defaults.hcl or qanta.hcl')
Esempio n. 15
0
def verify_vars_exists(var_file, non_interactive=False):
    missing_vars = []
    with open(var_file, 'r') as variables:
        tf_vars = hcl.load(variables)
        for variable in tf_vars["variable"]:
            expected = "TF_VAR_{}".format(variable)
            if not os.environ.get(
                    expected
            ) and not expected == "TF_VAR_environment" and not "default" in tf_vars[
                    "variable"][variable]:
                missing_vars.append(expected)

    if missing_vars:
        exceptions = set(['TF_VAR_dev_api_key', 'TF_VAR_subnet_cidrs'])
        # EXCEPTION: because this is created after the first-run non-interactively
        if exceptions.intersection(missing_vars):
            print('[SKIP missing var] allowing {} to be empty'.format(
                exceptions))
        elif non_interactive:
            exit("{}Terraform needs ENVIRONMENT VARIABLES to be set {}".format(
                bcolors.FAIL, missing_vars))
        else:
            for missing in missing_vars:
                os.environ[missing] = query_answer(
                    "Enter value for {}: ".format(missing))
Esempio n. 16
0
def get_module_paths(terraform_module_file):
    with open(terraform_module_file, 'r') as fp:
        modules_tf = hcl.load(fp)
        return [
            source['source'].replace("./", "")
            for source in modules_tf["module"].values()
        ]
Esempio n. 17
0
    def _get_vars(self, tfvars_file):
        try:
            with open(tfvars_file, 'r') as fp:
                obj = hcl.load(fp)

                return obj
        except Exception:
            return {}
Esempio n. 18
0
def _load(path):
    try:
        with open(path) as settings_file:
            return hcl.load(settings_file)
    except IOError as error:
        errors.string_exit('Error reading settings: {}'.format(error))
    except Exception as error:
        errors.string_exit('Error parsing settings: {}'.format(error))
Esempio n. 19
0
def parse_variables_tf(variables_tf):
    variables = {}  # name => file name
    try:
        with open(variables_tf, 'r') as f:
            variables = hcl.load(f)['variable']
    except FileNotFoundError:
        pass  # KOPS generated terraform doesn't have a variables.tf file
    return variables
Esempio n. 20
0
    def __init__(self, directory=None, settings=None):
        self.settings = settings if settings else {}

        # handle the root module first...
        self.directory = directory if directory else os.getcwd()
        #print(self.directory)
        self.config_str = ''
        iterator = iglob(self.directory + '/*.tf')
        for fname in iterator:
            with open(fname, 'r', encoding='utf-8') as f:
                self.config_str += f.read() + ' '
        config_io = io.StringIO(self.config_str)
        self.config = hcl.load(config_io)

        # then any submodules it may contain, skipping any remote modules for
        # the time being.
        self.modules = {}
        if 'module' in self.config:
            for name, mod in self.config['module'].items():
                if 'source' not in mod:
                    continue
                source = mod['source']
                # '//' used to refer to a subdirectory in a git repo
                if re.match(r'.*\/\/.*', source):
                    continue
                # '@' should only appear in ssh urls
                elif re.match(r'.*\@.*', source):
                    continue
                # 'github.com' special behavior.
                elif re.match(r'github\.com.*', source):
                    continue
                # points to new TFE module registry
                elif re.match(r'app\.terraform\.io', source):
                    continue
                # bitbucket public and private repos
                elif re.match(r'bitbucket\.org.*', source):
                    continue
                # git::https or git::ssh sources
                elif re.match(r'^git::', source):
                    continue
                # git:// sources
                elif re.match(r'^git:\/\/', source):
                    continue
                # Generic Mercurial repos
                elif re.match(r'^hg::', source):
                    continue
                # Public Terraform Module Registry
                elif re.match(
                        r'^[a-zA-Z0-9\-_]+\/[a-zA-Z0-9\-_]+\/[a-zA-Z0-9\-_]+',
                        source):
                    continue
                # AWS S3 buckets
                elif re.match(r's3.*\.amazonaws\.com', source):
                    continue
                # fixme path join. eek.
                self.modules[name] = Terraform(directory=self.directory + '/' +
                                               source,
                                               settings=mod)
Esempio n. 21
0
def load_variables(filenames):
    """Load terraform variables"""

    variables = {}

    for terrafile in glob.glob('./*.tf'):
        with open(terrafile) as fh:
            data = hcl.load(fh)
            for key, value in data.get('variable', {}).iteritems():
                if 'default' in value:
                    variables.update({key: value['default']})

    for varfile in filenames:
        with open(varfile) as fh:
            data = hcl.load(fh)
            variables.update(data)

    return variables
Esempio n. 22
0
def load_tfvars_file(tfvar_file):

    try:
        with open(tfvar_file, 'r') as fp:
            obj = hcl.load(fp)
        return obj['delegated_account_ids_map']
    except Exception as e:
        print(f'There was an error opening file: {e}')
        raise (e)
Esempio n. 23
0
def digest_hcl_file(hcl_path: str) -> str:
    with open(hcl_path, 'r') as fp:
        obj = hcl.load(fp)
    as_json_normalized = io.StringIO()
    json.dump(obj=obj,
              fp=as_json_normalized,
              sort_keys=True,
              separators=(',', ':'))
    return __blake2_hexdigest(as_json_normalized.getvalue())
Esempio n. 24
0
    def __init__(self, directory=None):
        config_str = ''
        iterator = iglob(directory + '/*.tf') if directory else iglob('*.tf')
        for fname in iterator:
            with open(fname, 'r', encoding='utf-8') as f:
                config_str += f.read() + ' '
        config_io = io.StringIO(config_str)

        self.config = hcl.load(config_io)
Esempio n. 25
0
    def __init__(self) -> None:
        """Parse the terraform.tfvars config file and make sure it contains every variable.

        Raises:
            InvalidConfigError: If any variable is defined in variables.tf but not terraform.tfvars.
        """
        with open(CONFIG_FILE) as f:
            self._config = hcl.load(f)  # Dict[str, Union[int, str]]

        with open(VARIABLES_FILE) as f:
            variable_names = hcl.load(f)['variable'].keys()

        for variable in variable_names:
            # Verify that the variable is defined.
            if variable not in self._config:
                raise InvalidConfigError(
                    'variable "{}" is not defined in {}'.format(variable, CONFIG_FILE)
                )
Esempio n. 26
0
def loadConfig(path):
    name = '.loopci.hcl'
    if len([f for f in os.listdir(path) if f == name]) != 1:
        raise Exception('{0} is not found'.format(name))
    f = open(os.path.join(path, name), 'r')
    obj = hcl.load(f)
    print(obj)
    f.close()
    return obj
Esempio n. 27
0
    def __init__(self, config_file):
        """Parse config and setup boto3.

        Args:
            config_file: [String] path to the terraform.tfvars configuration file.
        """
        with open(config_file) as f:
            self._config = hcl.load(f)

        boto3.setup_default_session(region_name=self._config['aws_region'])
Esempio n. 28
0
def import_tfvars(config, tfvars_pathname):
    """Import the tfvars"""
    config['tfvars_pathname'] = tfvars_pathname
    if check_exists('file', tfvars_pathname, False):
        try:
            with open(tfvars_pathname, 'r') as fp:
                config['tfvars'] = hcl.load(fp)
        except:
            config['tfvars'] = {}
    else:
        config['tfvars'] = {}
Esempio n. 29
0
def tfvars_to_dictionnary(tfvars_file):
    """
  Load a tfvars file and return a python dictionnary
  """
    try:
        with open(tfvars_file, 'r') as tfvars:
            return load(tfvars)
    except FileNotFoundError as error:
        raise ResolverError(error)
    except ValueError as error:
        raise ResolverError("Loading tfvars file : {}".format(error))
Esempio n. 30
0
    def get_terraform_config_json(terraform_var_file):

        try:
            with open(terraform_var_file, 'r') as f:
                tf_config_json = hcl.load(f)
            return tf_config_json
        except IOError as e:
            # print(e)
            print("ERROR: The terraform variables file " +
                  f"'{terraform_var_file}' was not found.")
            exit(1)
Esempio n. 31
0
    def __init__(self, workflow_file):

        # Read's the workflow file.
        with open(workflow_file, 'r') as fp:
            self.workflow = hcl.load(fp)
            fp.seek(0)
            self.workflow_content = fp.readlines()

        self.validate_syntax()
        self.normalize()
        self.complete_graph()
Esempio n. 32
0
def get_spot_ids():
    with open('terraform.tfstate') as f:
        state = hcl.load(f)

    resources = state['modules'][0]['resources']

    if 'aws_spot_instance_request.qanta' in resources:
        return [resources['aws_spot_instance_request.qanta']['primary']['id']]
    elif 'aws_spot_instance_request.qanta.0' in resources:
        instances = [r for r in resources if 'aws_spot_instance_request.qanta' in r]
        return [resources[r]['primary']['id'] for r in instances]
    else:
        raise ValueError('No matching instances found')
Esempio n. 33
0
def test_decoder(hcl_fname, json_fname, struct):
    
    with open(join(FIXTURE_DIR, hcl_fname), 'r') as fp:
        hcl_json = hcl.load(fp)
    
    assert json_fname is not None or struct is not None

    if json_fname is not None:
        with open(join(FIXTURE_DIR, json_fname), 'r') as fp:
            good_json = json.load(fp)
        
        assert hcl_json == good_json
    
    if struct is not None:
        assert hcl_json == struct
Esempio n. 34
0
    def __init__(self, directory=None, settings=None):
        self.settings = settings if settings else {}

        # handle the root module first...
        self.directory = directory if directory else os.getcwd()
        #print(self.directory)
        self.config_str = ''
        iterator = iglob( self.directory + '/*.tf')
        for fname in iterator:
            with open(fname, 'r', encoding='utf-8') as f:
                self.config_str += f.read() + ' '
        config_io = io.StringIO(self.config_str)
        self.config = hcl.load(config_io)

        # then any submodules it may contain, skipping any remote modules for
        # the time being.
        self.modules = {}
        if 'module' in self.config:
            for name, mod in self.config['module'].items():
                if 'source' not in mod:
                    continue
                source = mod['source']
                # '//' used to refer to a subdirectory in a git repo
                if re.match(r'.*\/\/.*', source):
                    continue
                # '@' should only appear in ssh urls
                elif re.match(r'.*\@.*', source):
                    continue
                # 'github.com' special behavior.
                elif re.match(r'github\.com.*', source):
                    continue
                # points to module registry.
                elif re.match(r'hashicorp.*', source):
                    continue
                # fixme path join. eek.
                self.modules[name] = Terraform(directory=self.directory+'/'+source, settings=mod)
Esempio n. 35
0
 def terraform_config(self):
     if self._terraform_config is None:
         with open(self.dynamodb_tf) as hcl_file:
             self._terraform_config = hcl.load(hcl_file)
     return self._terraform_config