Exemplo n.º 1
0
def delete_bastion(plateform):
    tf = Terraform(working_dir='terraform/layer-bastion')
    code, _, _ = tf.cmd("workspace select " + plateform['name'],
                        capture_output=False,
                        no_color=IsNotFlagged,
                        skip_plan=IsNotFlagged)
    code, _, _ = tf.destroy(var={
        'region':
        plateform['region'],
        'remote_bucket':
        plateform['remote-bucket'],
        'prefix_bucket':
        plateform['prefix-bucket'],
        'gcp-project':
        plateform['gcp-project'],
        'instance_type':
        plateform['infrastructure']['bastion']['instance-type'],
        'instance_image':
        plateform['infrastructure']['bastion']['image'],
    },
                            capture_output=False,
                            no_color=IsNotFlagged,
                            skip_plan=IsNotFlagged,
                            auto_approve=True)

    if code != 0:
        raise Exception("error in Terraform layer-data")
Exemplo n.º 2
0
def destroy(pathToInf):

    terra = Terraform(pathToInf)

    return_code, stdout, stderr = terra.destroy()

    return return_code
Exemplo n.º 3
0
def delete_base(plateform):
    tf = Terraform(working_dir='terraform/layer-base')
    code, _, _ = tf.cmd("workspace select " + plateform['name'],
                        capture_output=False,
                        no_color=IsNotFlagged,
                        skip_plan=IsNotFlagged)
    code, _, _ = tf.destroy(var={
        'region':
        plateform['region'],
        'remote_bucket':
        plateform['remote-bucket'],
        'prefix_bucket':
        plateform['prefix-bucket'],
        'gcp-project':
        plateform['gcp-project'],
        'range-ip':
        plateform['infrastructure']['range-ip'],
        'range-ip-pod':
        plateform['infrastructure']['range-ip-pod'],
        'range-ip-svc':
        plateform['infrastructure']['range-ip-svc'],
        'range-plateform':
        plateform['infrastructure']['range-plateform'],
        'allowed-ips':
        plateform['ips_whitelist'],
        'env':
        plateform['type']
    },
                            capture_output=False,
                            no_color=IsNotFlagged,
                            skip_plan=IsNotFlagged,
                            auto_approve=True)
    if code != 0:
        raise Exception("error in Terraform layer-base")
Exemplo n.º 4
0
def create_project():
    tf = Terraform(working_dir='terraform/layer-project')
    code, _, _ = tf.apply(capture_output=False,
                          no_color=IsNotFlagged,
                          skip_plan=IsNotFlagged)
    if code != 0:
        raise Exception("error in Terraform layer-project")
Exemplo n.º 5
0
def main(username, password):
    #get_default_cli().invoke(['login', "--use-device-code"], out_file=sys.stdout)
    #
    # Destroy Infrastructure
    #
    tfstate_file = 'terraform.tfstate'
    tfstate_files = ['terraform.tfstate', 'terraform.tfstate.backup']

    fpath = './WebInDeploy/' + tfstate_file
    if os.path.isfile(fpath):
        tf = Terraform(working_dir='./WebInDeploy')
        rg_name = tf.output('RG_Name')
        rg_name1 = tf.output('Attacker_RG_Name')
        delete_rg_cmd = 'group delete --name ' + rg_name + ' --yes'
        az_cli(delete_rg_cmd)
    #
    # Delete state files WebInDeploy
    #
    delete_state_files('./WebInDeploy/', tfstate_files)

    fpath = './WebInBootstrap/' + tfstate_file
    if os.path.isfile(fpath):
        delete_rg_cmd = 'group delete --name ' + rg_name1 + ' --yes'
        az_cli(delete_rg_cmd)
    #
    # Delete state files WebInBootstrap
    #
    delete_state_files('./WebInBootstrap/', tfstate_files)

    #
    # Delete state files WebInFWConf
    #
    delete_state_files('./WebInFWConf/', tfstate_files)
Exemplo n.º 6
0
 def init(self, working_dir):
   self.tf = Terraform(working_dir)
   self.tf.cmd(
     "init -backend-config=bucket=" + self.bucket_component_state + " -backend-config=region=" + self.region,
     capture_output=True,
     no_color=IsNotFlagged
   )
Exemplo n.º 7
0
 def deploy(self):
     os.makedirs('.overcloud', exist_ok=True)
     with open('.overcloud/plan.tf.json', 'w') as planfile:
         json.dump(self.tf.to_dict(), planfile, indent=2)
     cli = TfCli(working_dir='.overcloud')
     cli.init(capture_output=False)
     cli.apply(capture_output=False)
Exemplo n.º 8
0
 def test_destroy(self):
     tf = Terraform(working_dir=current_path,
                    variables={"test_var": "test"})
     tf.init("var_to_output")
     ret, out, err = tf.destroy("var_to_output")
     assert ret == 0
     assert "Destroy complete! Resources: 0 destroyed." in out
Exemplo n.º 9
0
def terraform_apply(env_data, tf: Terraform):

    retry_count = 0
    return_code = 0
    while retry_count < 5:
        logger.debug("Try {}".format(retry_count))
        return_code, stdout, stderr = tf.apply(skip_plan=True,
                                               var_file=env_data,
                                               capture_output=True)
        logger.debug('Terraform apply return code is {}'.format(return_code))
        logger.debug('Terraform apply stdout is {}'.format(stdout))
        logger.debug("Terraform apply stderr is {}".format(stderr))
        retry_count += 1
        if return_code == 0:
            break
        time.sleep(30)

    if return_code == 0:
        show_return_code, tf_state, stdout = tf.show(json=True)
        logger.debug(
            'Terraform show return code is {}'.format(show_return_code))
        logger.debug('Terraform show stdout is {}'.format(stdout))
        tf_outputs = tf.output()
        for output_value in tf_outputs:
            logger.debug('Terraform output value is {}'.format(output_value))
    else:
        # TODO get output for errors
        tf_state = {}
        tf_outputs = {}
        traceback.print_stack()
    return {
        "tf_return_code": return_code,
        "tf_outputs": tf_outputs,
        "tf_state": tf_state
    }
Exemplo n.º 10
0
class Terraform:
    def __init__(self, init):
        from python_terraform import Terraform as PythonTerraform

        self.terraform = PythonTerraform(working_dir='terraform')
        Path(self.working_dir).mkdir(exist_ok=True)
        if init:
            return_code, _, err = self.terraform.init(
                dir_or_plan=self.working_dir)
            if return_code != 0:
                raise CwsCommandError(err)

    @property
    def working_dir(self):
        return self.terraform.working_dir

    def init(self):
        return_code, _, err = self.terraform.init()
        if return_code != 0:
            raise CwsCommandError(err)

    def apply(self, workspace, targets):
        self.select_workspace(workspace)
        return_code, _, err = self.terraform.apply(target=targets,
                                                   skip_plan=True,
                                                   input=False,
                                                   raise_on_error=False,
                                                   parallelism=1)
        if return_code != 0:
            raise CwsCommandError(err)

    def destroy(self, workspace, targets):
        self.select_workspace(workspace)
        return_code, _, err = self.terraform.destroy(target=targets)
        if return_code != 0:
            raise CwsCommandError(err)

    def output(self):
        self.select_workspace("default")
        values = self.terraform.output(capture_output=True)
        return {key: value['value']
                for key, value in values.items()} if values else "{}"

    def workspace_list(self):
        self.select_workspace("default")
        return_code, out, err = self.terraform.cmd('workspace', 'list')
        if return_code != 0:
            raise CwsCommandError(err)
        values = out[1:].translate(str.maketrans('', '', ' \t\r')).split('\n')
        return filter(None, values)

    def select_workspace(self, workspace):
        return_code, out, err = self.terraform.workspace('select', workspace)
        if workspace != 'default' and return_code != 0:
            _, out, err = self.terraform.workspace('new',
                                                   workspace,
                                                   raise_on_error=True)
        if not (Path(self.working_dir) / '.terraform').exists():
            self.terraform.init(input=False, raise_on_error=True)
Exemplo n.º 11
0
 def __init__(self, working_dir):
     logging.info("TF FOLDER %s ", working_dir)
     self.working_dir = working_dir
     self.var_file_path = os.path.join(working_dir, self.VAR_FILE)
     self.tf = Terraform(working_dir=working_dir,
                         state="terraform.tfstate",
                         var_file=self.VAR_FILE)
     self.init_tf()
Exemplo n.º 12
0
 def __init__(self, config):
     """Creates a new BaseAction given a StackStorm config object (kwargs works too)
     Also stores the Terraform class from python_terraform in a class variable
     :param config: StackStorm configuration object for the pack
     :returns: a new BaseAction
     """
     super(TerraformBaseAction, self).__init__(config)
     self.terraform = Terraform()
Exemplo n.º 13
0
    def __init__(self, working_dir: str, terraform_init: bool = True):
        log.info("TF FOLDER %s ", working_dir)
        self.working_dir = working_dir
        self.var_file_path = os.path.join(working_dir, self.VAR_FILE)
        self.tf = Terraform(working_dir=working_dir, state=self.STATE_FILE, var_file=self.VAR_FILE)

        if terraform_init:
            self.init_tf()
Exemplo n.º 14
0
def terraform_output(setup_terraform,
                     terraform_bin_path) -> Dict[str, Dict[str, str]]:
    tf = Terraform(working_dir=full_path,
                   terraform_bin_path=terraform_bin_path)
    outputs = tf.output()
    if outputs is not None:
        return outputs

    raise Exception("Cannot retrieve the outputs")
Exemplo n.º 15
0
 def wrapper(workspace_name, create=True, delete=True, *args, **kwargs):
     tf = Terraform(working_dir=current_path)
     tf.init()
     if create:
         tf.create_workspace(workspace_name, *args, **kwargs)
     yield tf
     if delete:
         tf.set_workspace("default")
         tf.delete_workspace(workspace_name)
 def terraform_init(self, init_spec):
     name = init_spec['name']
     wd = init_spec['wd']
     tf = Terraform(working_dir=wd)
     return_code, stdout, stderr = tf.init()
     error = self.check_output(name, return_code, stdout, stderr)
     if error:
         return name, None
     return name, tf
Exemplo n.º 17
0
 def test_apply(self, folder, variables, var_files, expected_output,
                options):
     tf = Terraform(working_dir=current_path,
                    variables=variables,
                    var_file=var_files)
     tf.init(folder)
     ret, out, err = tf.apply(folder, **options)
     assert ret == 0
     assert expected_output in out.replace("\n", "").replace(" ", "")
     assert err == ""
Exemplo n.º 18
0
 def destroy_terraform(self, cluster):
     if not self.working_path:
         self.working_path = create_terrafrom_working_dir(cluster_name=cluster)
     t = Terraform(working_dir=self.working_path)
     p, _, _ = t.destroy('./', synchronous=False, no_color=IsNotFlagged, refresh=True)
     for i in p.stdout:
         print(i.decode())
     _, err = p.communicate()
     print(err.decode())
     return p.returncode == 0
Exemplo n.º 19
0
 def terraform_init(self, init_spec):
     name = init_spec['name']
     wd = init_spec['wd']
     tf = Terraform(working_dir=wd)
     return_code, stdout, stderr = tf.init()
     error = self.check_output(name, 'init', return_code, stdout, stderr)
     if error:
         raise TerraformCommandError(
             return_code, 'init', out=stdout, err=stderr)
     return name, tf
Exemplo n.º 20
0
def run_terraform(directory, terraform_vars, target_module):
    terraform = Terraform(directory)
    terraform.init(from_module=target_module)

    with open(directory + "terraform.tfvars.json", "w") as fh_:
        fh_.write(json.dumps(terraform_vars))

    ret_code, stdout, stderr = (terraform.apply(auto_approve=True,
                                                capture_output=False,
                                                raise_on_error=True))
Exemplo n.º 21
0
def get_terraform_outputs() -> dict:
    tf = Terraform(working_dir='./WebInDeploy')
    rc, out, err = tf.cmd('output', '-json')

    if rc == 0:
        try:
            return json.loads(out)
        except ValueError as ve:
            print('Could not parse terraform outputs!')
            return dict()
Exemplo n.º 22
0
def main():

    albDns = ''
    nlbDns = ''
    fwMgt = ''

    # Set run_plan to TRUE is you wish to run terraform plan before apply
    run_plan = False
    deployment_status = {}
    kwargs = {"auto-approve": True}

    #
    # Destroy Infrastructure
    #
    tf = Terraform(working_dir='./waf_conf')

    if run_plan:
        print('Calling tf.plan')
        tf.plan(capture_output=False)

    return_code1, stdout, stderr = tf.cmd('destroy',
                                          capture_output=False,
                                          **kwargs)
    #return_code1 =0
    print('Got return code {}'.format(return_code1))

    if return_code1 != 0:
        logger.info("Failed to destroy WebInDeploy ")

        exit()
    else:

        logger.info("Destroyed WebInDeploy ")

    tf = Terraform(working_dir='./WebInDeploy')

    if run_plan:
        print('Calling tf.plan')
        tf.plan(capture_output=False)

    return_code1, stdout, stderr = tf.cmd('destroy',
                                          capture_output=False,
                                          **kwargs)
    #return_code1 =0
    print('Got return code {}'.format(return_code1))

    if return_code1 != 0:
        logger.info("WebInDeploy destroyed")
        deployment_status = {'WebInDeploy': 'Fail'}

        exit()
    else:
        deployment_status = {'WebInDeploy': 'Success'}
        exit()
Exemplo n.º 23
0
    def __init__(self, configuration, terraform_workspace):
        log.info("Preparing terraform deployment")
        log.debug("Using workspace: {}".format(terraform_workspace))

        self._backend_provider = get_backend_provider(configuration,
                                                      terraform_workspace)
        self._controller = Terraform(
            working_dir=terraform_workspace,
            variables=configuration["terraform"]["parameters"])

        self._controller.init(capture_output=False, force_copy=IsFlagged)
 def apply_terraform(self, cluster, hosts_dict):
     if not self.working_path:
         self.working_path = create_terrafrom_working_dir(cluster_name=cluster.name)
     generate_terraform_file(self.working_path, self.cloud_config_path, cluster.plan.mixed_vars, hosts_dict)
     self.init_terraform()
     t = Terraform(working_dir=self.working_path)
     p, _, _ = t.apply('./', refresh=True, skip_plan=True, no_color=IsNotFlagged, synchronous=False)
     for i in p.stdout:
         print(i.decode())
     _, err = p.communicate()
     print(err.decode())
     return p.returncode == 0
Exemplo n.º 25
0
def create_terraform_stack(cluster_name, tf_vars, dir_path, state_path):
    hostname = re.sub(r"[^a-zA-Z0-9]+", '-', cluster_name).lower()
    tf_vars['hostname'] = hostname
    state_file = "{}/{}.tfstate".format(state_path, tf_vars['cluster_uuid'])

    tf_vars_file = create_tf_vars_file(state_path, tf_vars)
    tf = Terraform(dir_path)
    return_code, stdout, stderr = tf.get(capture_output=False)
    return_code, stdout, stderr = tf.init(capture_output=False)
    return_code, stdout, stderr = tf.apply(var_file=tf_vars_file, skip_plan=True, auto_approve=IsFlagged,
                                           capture_output=False, state=state_file)
    return return_code, stdout, stderr
Exemplo n.º 26
0
def delete_kubernetes(plateform):
    tf = Terraform(working_dir='terraform/layer-kubernetes')
    code, _, _ = tf.cmd("workspace select " + plateform['name'],
                        capture_output=False,
                        no_color=IsNotFlagged,
                        skip_plan=IsNotFlagged)

    ip_1 = '10.0.0.1/32'
    ip_2 = '10.0.0.1/32'
    ip_3 = '10.0.0.1/32'
    ip_4 = '10.0.0.1/32'
    ip_5 = '10.0.0.1/32'

    code, _, _ = tf.destroy(var={
        'region':
        plateform['region'],
        'remote_bucket':
        plateform['remote-bucket'],
        'prefix_bucket':
        plateform['prefix-bucket'],
        'gcp-project':
        plateform['gcp-project'],
        'k8s-version':
        plateform['infrastructure']['gke']['version'],
        'preemptible':
        plateform['infrastructure']['gke']['preemptible'],
        'instance-type':
        plateform['infrastructure']['gke']['instance-type'],
        'white-ip-1':
        ip_1,
        'white-ip-2':
        ip_2,
        'white-ip-3':
        ip_3,
        'white-ip-4':
        ip_4,
        'white-ip-5':
        ip_5,
        'min_node':
        plateform['infrastructure']['gke']['min'],
        'max_node':
        plateform['infrastructure']['gke']['max'],
        'range_ip_master':
        plateform['infrastructure']['range-ip-master']
    },
                            capture_output=False,
                            no_color=IsNotFlagged,
                            skip_plan=IsNotFlagged,
                            auto_approve=True)

    if code != 0:
        raise Exception("error in Terraform layer-kubernetes")
Exemplo n.º 27
0
def check_config():
    defined_variables = []

    with open('configurations.json') as json_file:
        data = json.load(json_file)
    for machine in data["machines"]:
        if machine["name"] == "terraform":
            continue
        print(
            "\n",
            machine["name"],
        )
        if machine.get("variables") is None:
            print("MISSING VARIABLES KEY")
            return 1
        for _, value in machine["variables"].items():
            defined_variables += value

        if machine["variables"].get("api") is not None:
            missing_endpoints = []
            for api in machine["variables"]["api"]:
                if data.get("api_endpoints") is None or \
                  data["api_endpoints"].get(api) is None:
                    missing_endpoints.append(api)
            if missing_endpoints:
                print("MISSING ENDPOINTS FOR API VARIABLES: ",
                      missing_endpoints)
                return 1

        tf = Terraform(working_dir=machine["name"])
        code, err, log = tf.plan()
        if code:
            if log.find("No configuration files") != -1:
                print("NO TERRAFORM FILE FOUND, PLEASE CREATE ONE")
                return 1
            if log.find("No value for required variable"):
                variables = get_variables_from_log(log)
                missing_variables = []
                for var in variables:
                    if var not in defined_variables:
                        missing_variables.append(var)
                if missing_variables:
                    print(
                        "THERE ARE MISSING VARIABLES, WHICH ARE DEFINED IN %s, "
                        "PLEASE SPECIFY THEM IN config.json" % machine["name"])
                    print("MISSING VARIABLES: ", missing_variables)
                    return 1
            else:
                print("UNKNOWN ERROR")
                return 1
    return 0
Exemplo n.º 28
0
def delete_terraform_stack(cluster_uuid, project_id, dir_path, state_path, proejct_deleted):
    state_file = "{}/{}.tfstate".format(state_path, cluster_uuid)
    tf_vars_file = "{}/vars.tf".format(state_path)
    tf = Terraform(dir_path)
    return_code, stdout, stderr = tf.get(capture_output=False)
    return_code, stdout, stderr = tf.init(capture_output=False)
    return_code, stdout, stderr = tf.destroy(var_file=tf_vars_file, auto_approve=IsFlagged, capture_output=False,
                                             state=state_file)

    shutil.rmtree(state_path)
    if proejct_deleted:
        shutil.rmtree("{}/{}".format(dir_path, project_id))

    return return_code, stdout, stderr
Exemplo n.º 29
0
def apply():
    tf = Terraform(working_dir='./terraform')
    parser = SafeConfigParser()
    config = os.path.expanduser('~/.aws/config')
    parser.read(config)
    if not parser.has_section('profile pollexy'):
        print "You need to run 'pollexy credentials configure'"
        return
    region = parser.get('profile pollexy', 'region')
    print 'Applying environment . . . '
    code, stdout, stderr = tf.apply(var={'aws_region': region})
    if (stderr):
        print stderr
    else:
        print stdout
Exemplo n.º 30
0
def delete_folder(folder):
    tf_data = dict()
    folder_name = folder.get("id")

    # variables not used on delete
    tf_data['parent_folder_id'] = NOT_USED_ON_DESTROY
    tf_data['random_element'] = NOT_USED_ON_DESTROY
    tf_data['region'] = NOT_USED_ON_DESTROY
    tf_data['region_zone'] = NOT_USED_ON_DESTROY
    tf_data['tb_discriminator'] = NOT_USED_ON_DESTROY
    tf_data['folder_name'] = NOT_USED_ON_DESTROY

    ec_config = config.read_config_map()
    tf_data['billing_account'] = ec_config['billing_account']
    tb_discriminator = ec_config['tb_discriminator']
    tf_data['tb_discriminator'] = tb_discriminator

    env_data = '/app/terraform/input.tfvars'

    backend_prefix = get_folder_backend_prefix(folder_name, tb_discriminator)
    terraform_state_bucket = ec_config['terraform_state_bucket']
    terraform_source_path = '/app/terraform/folder_creation'

    tf = Terraform(working_dir=terraform_source_path, variables=tf_data)
    terraform_init(backend_prefix, terraform_state_bucket, tf)

    return terraform_destroy(env_data, tf)
Exemplo n.º 31
0
def init(stackname, context):
    working_dir = join(TERRAFORM_DIR, stackname) # ll: ./.cfn/terraform/project--prod/
    terraform = Terraform(working_dir=working_dir)
    with _open(stackname, 'backend', mode='w') as fp:
        fp.write(json.dumps({
            'terraform': {
                'backend': {
                    's3': {
                        'bucket': BUILDER_BUCKET,
                        'key': 'terraform/%s.tfstate' % stackname,
                        'region': BUILDER_REGION,
                    },
                },
            },
        }))
    with _open(stackname, 'providers', mode='w') as fp:
        # TODO: possibly remove unused providers
        # Terraform already prunes them when running, but would
        # simplify the .cfn/terraform/$stackname/ files
        # TODO: use TerraformTemplate?
        providers = {
            'provider': {
                'fastly': {
                    # exact version constraint
                    'version': "= %s" % PROVIDER_FASTLY_VERSION,
                    'api_key': "${data.%s.%s.data[\"api_key\"]}" % (DATA_TYPE_VAULT_GENERIC_SECRET, DATA_NAME_VAULT_FASTLY_API_KEY),
                },
                'aws': {
                    'version': "= %s" % '2.3.0',
                    'region': context['aws']['region'],
                },
                'google': {
                    'version': "= %s" % '1.20.0',
                    'region': 'us-east4',
                    'credentials': "${data.%s.%s.data[\"credentials\"]}" % (DATA_TYPE_VAULT_GENERIC_SECRET, DATA_NAME_VAULT_GCP_API_KEY),
                },
                'vault': {
                    'address': context['vault']['address'],
                    # exact version constraint
                    'version': "= %s" % PROVIDER_VAULT_VERSION,
                },
            },
            'data': {
                DATA_TYPE_VAULT_GENERIC_SECRET: {
                    # TODO: this should not be used unless Fastly is involved
                    DATA_NAME_VAULT_FASTLY_API_KEY: {
                        'path': VAULT_PATH_FASTLY,
                    },
                    # TODO: this should not be used unless GCP is involved
                    DATA_NAME_VAULT_GCP_API_KEY: {
                        'path': VAULT_PATH_GCP,
                    },
                },
            },
        }
        if context.get('eks'):
            providers['provider']['kubernetes'] = {
                'version': "= %s" % '1.5.2',
                'host': '${data.aws_eks_cluster.main.endpoint}',
                'cluster_ca_certificate': '${base64decode(data.aws_eks_cluster.main.certificate_authority.0.data)}',
                'token': '${data.aws_eks_cluster_auth.main.token}',
                'load_config_file': False,
            }
            providers['data']['aws_eks_cluster'] = {
                'main': {
                    'name': '${aws_eks_cluster.main.name}',
                },
            }
            providers['data']['aws_eks_cluster_auth'] = {
                'main': {
                    'name': '${aws_eks_cluster.main.name}',
                },
            }
            if context['eks']['helm']:
                providers['provider']['helm'] = {
                    'version': '= 0.9.0',
                    'service_account': '${kubernetes_cluster_role_binding.tiller.subject.0.name}',
                    'kubernetes': {
                        'host': '${data.aws_eks_cluster.main.endpoint}',
                        'cluster_ca_certificate': '${base64decode(data.aws_eks_cluster.main.certificate_authority.0.data)}',
                        'token': '${data.aws_eks_cluster_auth.main.token}',
                        'load_config_file': False,
                    },
                }
        fp.write(json.dumps(providers))
    terraform.init(input=False, capture_output=False, raise_on_error=True)
    return terraform