Beispiel #1
0
def cli_mfa_add_token():
    """ Adds an MFA token to be used with role assumption.
        Tokens will be saved in a .ndt subdirectory in the user's home directory.
        If a token with the same name already exists, it will not be overwritten."""
    parser = get_parser()
    parser.add_argument("token_name",
                        help="Name for the token. Use this to refer to the token later with " +
                        "the assume-role command.")
    parser.add_argument("-i", "--interactive", help="Ask for token details interactively.",
                        action="store_true")
    parser.add_argument("-a", "--token_arn", help="ARN identifier for the token.")
    parser.add_argument("-s", "--token_secret", help="Token secret.")
    parser.add_argument("-f", "--force", help="Force an overwrite if the token already exists.",
                        action="store_true")
    argcomplete.autocomplete(parser)
    args = parser.parse_args()
    if args.interactive:
        args.token_secret = _to_bytes(input("Enter token secret: "))
        code_1 = mfa_generate_code_with_secret(args.token_secret)
        print("First sync code: " + code_1)
        print("Waiting to generate second sync code. This could take 30 seconds...")
        code_2 = mfa_generate_code_with_secret(args.token_secret)
        while code_1 == code_2:
            time.sleep(5)
            code_2 = mfa_generate_code_with_secret(args.token_secret)
        print("Second sync code: " + code_2)
        args.token_arn = _to_str(input("Enter token ARN: "))
    elif not args.token_secret:
        parser.error("Token secret is required.")
    try:
        mfa_add_token(args)
    except ValueError as error:
        parser.error(error)
Beispiel #2
0
def type_guess(val):
    if not val:
        return None
    try:
        return literal_eval(val)
    except:
        return _to_str(val)
Beispiel #3
0
def print_profile_expiry(profile):
    safe_profile = re.sub("[^A-Z0-9]", "_", profile.upper())
    expiry = read_profile_expiry(profile)
    epoc = _epoc_secs(parse(expiry).replace(tzinfo=tzutc()))
    print("AWS_SESSION_EXPIRATION_EPOC_" + safe_profile + "=" + _to_str(epoc))
    print("AWS_SESSION_EXPIRATION_" + safe_profile + "=" + expiry)
    print("export AWS_SESSION_EXPIRATION_" + safe_profile +
          " AWS_SESSION_EXPIRATION_EPOC_" + safe_profile + ";")
Beispiel #4
0
def print_profile(profile_name, params):
    safe_profile = re.sub("[^A-Z0-9]", "_", profile_name.upper())
    profile = get_profile(profile_name)
    for key, value in list(profile.items()):
        upper_param = key.upper()
        if key == "aws_session_expiration" or key == "aws_expiration":
            d = parse(value)
            print("AWS_SESSION_EXPIRATION_EPOC_" + safe_profile + "=\"" +
                  _to_str(_epoc_secs(d)) + "\"")
            params.append("AWS_SESSION_EXPIRATION_EPOC_" + safe_profile)
        params.append(upper_param)
        if value.startswith("\""):
            value = value[1:-1]
        print(upper_param + "=\"" + value + "\"")
    print("export " + " ".join(params) + ";")
Beispiel #5
0
def enable_profile(profile_type, profile):
    profile = re.sub("[^a-zA-Z0-9_-]", "_", profile)
    safe_profile = re.sub("[^A-Z0-9]", "_", profile.upper())
    if profile_type == "iam":
        _print_profile_switch(profile)
    elif profile_type == "azure" or profile_type == "adfs":
        _print_profile_switch(profile)
        if "AWS_SESSION_EXPIRATION_EPOC_" + safe_profile in os.environ:
            expiry = int(os.environ["AWS_SESSION_EXPIRATION_EPOC_" +
                                    safe_profile])
        else:
            expiry = _epoc_secs(
                parse(read_profile_expiry(profile)).replace(tzinfo=tzutc()))
        if expiry < _epoc_secs(datetime.now(tzutc())):
            if "AWS_SESSION_EXPIRATION_EPOC_" + safe_profile in os.environ:
                print("unset AWS_SESSION_EXPIRATION_EPOC_" + safe_profile +
                      ";")
            if profile_type == "azure":
                profile_data = get_profile(profile)
                gui_mode = ""
                if "azure_login_mode" in profile_data and profile_data[
                        "azure_login_mode"] == "gui":
                    gui_mode = " --mode=gui"
                print("aws-azure-login --profile " + profile + gui_mode +
                      " --no-prompt")
            else:
                print("adfs-aws-login --profile " + profile + " --no-prompt")
        elif "AWS_SESSION_EXPIRATION_EPOC_" + safe_profile not in os.environ:
            print_profile_expiry(profile)
    elif profile_type == "ndt":
        if "AWS_SESSION_EXPIRATION_EPOC_" + safe_profile in os.environ:
            expiry = int(os.environ["AWS_SESSION_EXPIRATION_EPOC_" +
                                    safe_profile])
        else:
            expiry = _epoc_secs(
                parse(read_profile_expiry(profile)).replace(tzinfo=tzutc()))
        if expiry < _epoc_secs(datetime.now(tzutc())):
            if "AWS_SESSION_EXPIRATION_EPOC_" + safe_profile in os.environ:
                print("unset AWS_SESSION_EXPIRATION_EPOC_" + safe_profile +
                      ";")
            profile_data = get_profile(profile)
            if "ndt_origin_profile" not in profile_data:
                return
            origin_profile = profile_data["ndt_origin_profile"]
            origin_profile_data = get_profile(origin_profile)
            if "azure_tenant_id" in origin_profile_data:
                origin_type = "azure"
            elif "adfs_login_url" in origin_profile_data:
                origin_type = "adfs"
            else:
                origin_type = "iam"
            enable_profile(origin_type, origin_profile)

            command = ["ndt", "assume-role"]
            if "ndt_mfa_token" in profile_data:
                command.append("-t")
                command.append(profile_data["ndt_mfa_token"])
            if "ndt_default_duration_hours" in profile_data:
                command.append("-d")
                duration = _to_str(
                    int(profile_data["ndt_default_duration_hours"]) * 60)
                command.append(duration)
            command.append("-p")
            command.append(profile)
            command.append(profile_data["ndt_role_arn"])
            print(" ".join(command))
        elif "AWS_SESSION_EXPIRATION_EPOC_" + safe_profile not in os.environ:
            print_profile_expiry(profile)
        _print_profile_switch(profile)
def _preprocess_template(data, root, basefile, path, templateParams):
    param_refresh_callback = lambda: templateParams.update(_get_params(root, basefile))
    param_refresh_callback()
    global gotImportErrors
    if isinstance(data, OrderedDict):
        if 'Fn::ImportFile' in data:
            val = data['Fn::ImportFile']
            file = expand_vars(val, templateParams, None, [])
            script_import = resolve_file(file, basefile)
            if script_import:
                params = OrderedDict(list(templateParams.items()))
                params.update(data)
                data.clear()
                contents = expand_only_double_paranthesis_params(import_script(script_import), params, None, [])
                data['Fn::Join'] = ["", contents]
            else:
                print("ERROR: " + val + ": Can't import file \"" + val +
                      "\" - file not found on include paths or relative to " +
                      basefile)
                gotImportErrors = True
        elif 'Fn::ImportYaml' in data:
            val = data['Fn::ImportYaml']
            jmespath = None
            if "jmespath" in data and data["jmespath"]:
                jmespath = data["jmespath"]
            file = expand_vars(val, templateParams, None, [])
            yaml_file = resolve_file(file, basefile)
            del data['Fn::ImportYaml']
            if yaml_file:
                contents = yaml_load(open(yaml_file))
                params = OrderedDict(list(templateParams.items()))
                params.update(data)
                contents = expand_vars(contents, params, None, [])
                data['Fn::ImportYaml'] = OrderedDict()
                data['Fn::ImportYaml']['Result'] = contents
                param_refresh_callback()
                while True:
                    expanded_result = expand_vars(contents, templateParams, None, [])
                    if expanded_result == contents:
                        break
                    else:
                        contents.clear()
                        contents.update(expanded_result)
                        param_refresh_callback()
                data.clear()
                if isinstance(contents, OrderedDict):
                    for k, val in list(contents.items()):
                        data[k] = _preprocess_template(val, root, yaml_file, path +
                                                       k + "_", templateParams)
                elif isinstance(contents, list):
                    data = contents
                    for i in range(0, len(data)):
                        data[i] = _preprocess_template(data[i], root, yaml_file,
                                                       path + str(i) + "_", templateParams)
                else:
                    print("ERROR: " + path + ": Can't import yaml file \"" +
                          yaml_file + "\" that isn't an associative array or" +
                          " a list in file " + basefile)
                    gotImportErrors = True
                if jmespath:
                    data = search(jmespath, data)
            else:
                if not ('optional' in data and data['optional']):
                    print("ERROR: " + val + ": Can't import file \"" + val +
                          "\" - file not found on include paths or relative to " +
                          basefile)
                    gotImportErrors = True
                else:
                    for k in data:
                        del data[k]
            if data and "optional" in data:
                del data["optional"]
            data = _preprocess_template(data, root, yaml_file, path, templateParams)
        elif 'Fn::Merge' in data:
            merge_list = data['Fn::Merge']['Source'] if 'Source' in data['Fn::Merge'] else data['Fn::Merge']
            result = data['Fn::Merge']['Result'] if 'Result' in data['Fn::Merge'] else OrderedDict()
            data['Fn::Merge'] = OrderedDict([('Source', merge_list), ('Result', result)])
            if not isinstance(merge_list, list):
                print("ERROR: " + path + ": Fn::Merge must associate to a list in file " + basefile)
                gotImportErrors = True
                return data
            merge = _preprocess_template(expand_vars(merge_list.pop(0), templateParams, None, []), root, basefile,
                                         path + "/", templateParams)
            if not result:
                result = merge
                data['Fn::Merge'] = OrderedDict([('Source', merge_list), ('Result', result)])
            elif not isinstance(merge, type(result)):
                print("ERROR: " + path + ": First Fn::Merge entries " +
                        "were of type " + str(type(result)) + ", but the following entry was not: \n" + \
                        json.dumps(merge, indent=2) + "\nIn file " + basefile)
                gotImportErrors = True
            elif isinstance(merge, OrderedDict):
                result.update(merge)
            elif isinstance(merge, list):
                result.extend(merge)
            else:
                print("ERROR: " + path + ": Unsupported " + str(type(merge)))
                gotImportErrors = True
            param_refresh_callback()
            while True:
                expanded_result = expand_vars(result, templateParams, None, [])
                if expanded_result == result:
                    break
                else:
                    result.clear()
                    result.update(expanded_result)
                    param_refresh_callback()
            if not merge_list:
                del data['Fn::Merge']
                return result
            else:
                return _preprocess_template(data, root, basefile, path + "/", templateParams)
        elif 'StackRef' in data:
            stack_var = expand_vars(data['StackRef'], templateParams, None, [])
            stack_var = _check_refs(stack_var, basefile,
                                    path + "StackRef_", templateParams,
                                    True)
            data.clear()
            stack_value = _resolve_stackref_from_dict(stack_var)
            if not stack_value:
                raise StackRefUnresolved("Did not find value for: " + stack_var['paramName'] + \
                                        " in stack " + stack_var['region'] + "." + stack_var['stackName'])
            param_refresh_callback()
            return stack_value
        elif 'TFRef' in data:
            tf_var = expand_vars(data['TFRef'], templateParams, None, [])
            tf_var = _check_refs(tf_var, basefile,
                                 path + "TFRef_", templateParams, True)
            data.clear()
            tf_value = _resolve_tfref_from_dict(tf_var)
            if not tf_value:
                ref = stack_var['paramName'] if 'paramName' in stack_var else stack_var['jmespath']
                raise TFRefUnresolved("Did not find value for: " + ref + \
                                    " in terraform compnent " + stack_var['component'] + "." + stack_var['terraform'])
            param_refresh_callback()
            return tf_value
        elif 'Encrypt' in data and 'value' in data['Encrypt']:
            to_encrypt = data['Encrypt']['value']
            enc_conf = data['Encrypt']
            del enc_conf['value']
            vault = Vault(**enc_conf)
            resolved_value = _preprocess_template(to_encrypt, root, basefile, path + "Encrypt_", templateParams)
            if not isinstance(resolved_value, six.string_types):
                raise EncryptException("Encrypted value needs to be a string")
            return b64encode(vault.direct_encrypt(resolved_value))
        elif 'Ref' in data:
            data['__source'] = basefile
        elif 'SsmRef' in data:
            ssm_key = expand_vars(data['SsmRef'], templateParams, None, [])
            ssm_resp = ssm().get_parameter(Name=ssm_key)
            return _resolve_ssm_parameter(ssm_key)
        elif 'ProductAmi' in data:
            product_code = expand_vars(data['ProductAmi'], templateParams, None, [])
            return _resolve_product_ami(product_code)
        elif 'OwnerNamedAmi' in data:
            owner_named = expand_vars(data['OwnerNamedAmi'], templateParams, None, [])
            if "owner" in owner_named and "name" in owner_named:
                return _resolve_onwer_named_ami(owner_named["owner"], owner_named["name"])
        else:
            if 'Parameters' in data:
                data['Parameters'] = _preprocess_template(data['Parameters'], root, basefile, path + "Parameters_",
                                                          templateParams)
                param_refresh_callback()
            for k, val in list(data.items()):
                if k != 'Parameters':
                    data[k] = expand_vars(_preprocess_template(val, root, basefile, path + _to_str(k) + "_", templateParams), templateParams, None, [])
    elif isinstance(data, list):
        for i in range(0, len(data)):
            data[i] = _preprocess_template(data[i], root, basefile, path + str(i) + "_", templateParams)
    return data
def _process_value(value, used_params):
    value = value.strip()
    if (value.startswith("\"") and value.endswith("\"")) or (value.startswith("'") and value.endswith("'")):
        value = value[1:-1]
    value = expand_vars(value, used_params, None, [])
    # Don't go into external refs if:
    #   a) resolving base variables like REGION and paramEnvId
    #   b) resolving basic variables used in terraform backend configuration
    if  "DO_NOT_RESOLVE_EXTERNAL_REFS" not in os.environ and "TF_INIT_OUTPUT" not in os.environ:
        if value.strip().startswith("StackRef:"):
            stackref_doc = yaml_load(_to_str(value))
            stack_value = _resolve_stackref_from_dict(stackref_doc['StackRef'])
            if stack_value:
                value = stack_value
        if value.strip().startswith("TFRef:"):
            tfref_doc = yaml_load(_to_str(value))
            tf_value = _resolve_tfref_from_dict(tfref_doc['TFRef'])
            if tf_value:
                value = tf_value
        if value.strip().startswith("Encrypt:"):
            enc_doc = yaml_load(_to_str(value))
            enc_conf = enc_doc["Encrypt"]
            if isinstance(enc_conf, OrderedDict):
                to_encrypt = yaml_save(enc_conf["value"])
            else:
                to_encrypt = enc_conf["value"]
            value = _process_value(to_encrypt, used_params)
            del enc_conf["value"]
            vault = Vault(**enc_conf)
            value = b64encode(vault.direct_encrypt(value))
        if value.strip().startswith("YamlRef:"):
            yamlref_doc = yaml_load(_to_str(value))
            if "file" in yamlref_doc["YamlRef"] and "jmespath" in yamlref_doc["YamlRef"]:
                yaml_file = yamlref_doc["YamlRef"]["file"]
                contents = yaml_load(open(yaml_file))
                value = search(yamlref_doc["YamlRef"]["jmespath"], contents)
                if value:
                    value = expand_vars(value, used_params, None, [])
        if value.strip().startswith("SsmRef:"):
            ssmref_doc = yaml_load(_to_str(value))
            if "SsmRef" in ssmref_doc:
                ssm_key = ssmref_doc["SsmRef"]
                ssm_value = _resolve_ssm_parameter(ssm_key)
                if ssm_value:
                    value = ssm_value
        if value.strip().startswith("ProductAmi:"):
            product_doc = yaml_load(_to_str(value))
            if "ProductAmi" in product_doc:
                product_code = product_doc["ProductAmi"]
                product_ami = _resolve_product_ami(product_code)
                if product_ami:
                    value = product_ami
        if value.strip().startswith("OwnerNamedAmi:"):
            product_doc = yaml_load(_to_str(value))
            if "OwnerNamedAmi" in product_doc and "owner" in product_doc["OwnerNamedAmi"] and "name" in product_doc["OwnerNamedAmi"]:
                owner = product_doc["OwnerNamedAmi"]["owner"]
                name = product_doc["OwnerNamedAmi"]["name"]
                owner_ami = _resolve_onwer_named_ami(owner, name)
                if owner_ami:
                    value = owner_ami
    return value
stacks = dict()
terraforms = dict()
parameters = dict()
ssm_params = dict()
product_amis = dict()
owner_amis = dict()
CFG_PREFIX = "AWS::CloudFormation::Init_config_files_"

############################################################################
# _THE_ yaml & json deserialize/serialize functions

yaml.SafeDumper.yaml_representers[None] = lambda self, data: \
    yaml.representer.SafeRepresenter.represent_str(
        self,
        _to_str(data),
    )

SOURCED_PARAMS = None


def run_command(command):
    proc = subprocess.Popen(command, stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE, universal_newlines=True)
    output = proc.communicate()
    if proc.returncode:
        raise Exception("Failed to run " + str(command))
    return output[0]

def _resolve_stackref_from_dict(stack_var):
    if "region" in stack_var and "stackName" in stack_var and "paramName" in stack_var: