Esempio n. 1
0
def decrypt_vault(filename,
                  vault_password=None,
                  vault_password_file=None,
                  vault_prompt=False):
    """
    filename: name of your encrypted file that needs decrypted.
    vault_password: key that will decrypt the vault.
    vault_password_file: file containing key that will decrypt the vault.
    vault_prompt: Force vault to prompt for a password if everything else fails.
    """

    loader = DataLoader()
    if vault_password:
        vault_secret = [([], VaultSecret(vault_password.encode()))]
    elif vault_password_file:
        vault_secret = CLI.setup_vault_secrets(loader=loader,
                                               vault_ids=[vault_password_file])
    else:
        vault_secret = CLI.setup_vault_secrets(loader=loader,
                                               vault_ids=[],
                                               auto_prompt=vault_prompt)

    vault = VaultLib(vault_secret)

    with open(filename) as f:
        unencrypted_yaml = vault.decrypt(f.read())
        unencrypted_yaml = yaml.safe_load(unencrypted_yaml)
        return unencrypted_yaml
Esempio n. 2
0
def do_unvault(vault, secret, vaultid='filter_default'):

    if not isinstance(secret, (string_types, binary_type, Undefined)):
        raise AnsibleFilterTypeError(
            "Secret passed is required to be as string, instead we got: %s" %
            type(secret))

    if not isinstance(
            vault,
        (string_types, binary_type, AnsibleVaultEncryptedUnicode, Undefined)):
        raise AnsibleFilterTypeError(
            "Vault should be in the form of a string, instead we got: %s" %
            type(vault))

    data = ''
    vs = VaultSecret(to_bytes(secret))
    vl = VaultLib([(vaultid, vs)])
    if isinstance(vault, AnsibleVaultEncryptedUnicode):
        vault.vault = vl
        data = vault.data
    elif is_encrypted(vault):
        try:
            data = vl.decrypt(vault)
        except UndefinedError:
            raise
        except Exception as e:
            raise AnsibleFilterError("Unable to decrypt: %s" % to_native(e),
                                     orig_exc=e)
    else:
        data = vault

    return to_native(data)
Esempio n. 3
0
def secrets(environment, ref, diff):
    environment_path = os.path.join('deployment', 'environments', environment)
    secrets_path = os.path.join(environment_path, 'group_vars', 'all',
                                'secrets.yml')

    with open('.vault_pass', 'rb') as f:
        password = f.read().strip()

    vault = VaultLib(password)

    secrets = [read_git(ref, secrets_path)]
    if diff is not None:
        secrets.append(read_git(diff, secrets_path))

    # Decrypt the ciphertext
    plaintext = [vault.decrypt(x).decode('utf-8') for x in secrets]

    if diff is None:
        click.echo(plaintext[0])
    else:
        click.echo(''.join(
            difflib.unified_diff(plaintext[1].splitlines(True),
                                 plaintext[0].splitlines(True),
                                 fromfile=':'.join((diff, secrets_path)),
                                 tofile=':'.join((ref, secrets_path)))))
Esempio n. 4
0
 def read(self):
     vault = VaultLib([(DEFAULT_VAULT_ID_MATCH,
                        VaultSecret(self.keyphrase.encode('utf-8')))])
     with open(self.path) as f:
         ciphered = f.read()
     cleartext = vault.decrypt(ciphered)
     self.data = yaml.safe_load(cleartext)
Esempio n. 5
0
class Vault(object):
    """Read and write data using the Ansible vault."""
    def __init__(self, password):
        """Create a vault."""
        self.password = password
        try:
            from ansible.parsing.vault import VaultSecret
            from ansible.module_utils._text import to_bytes
            pass_bytes = to_bytes(password, encoding='utf-8', errors='strict')
            secrets = [('password', VaultSecret(_bytes=pass_bytes))]
            # pylint: disable=unexpected-keyword-arg, no-value-for-parameter
            self.vault = VaultLib(secrets=secrets)
        except ImportError:
            self.vault = VaultLib(password)

    def dump(self, data, stream=None):
        """Encrypt data and print stdout or write to stream.

        :param data: The information to be encrypted
        :param stream: If not None the location to write the encrypted data to.
        :returns: If stream is None then the encrypted bytes otherwise None.
        """
        encrypted = self.vault.encrypt(data)
        if stream:
            stream.write(encrypted)
        else:
            return encrypted

    def load(self, stream):
        """Read vault steam and return python object.

        :param stream: The stream to read data from
        :returns: The decrypted data
        """
        return self.vault.decrypt(stream)

    def dump_as_yaml(self, obj, stream=None):
        """Convert object to yaml and encrypt the data.

        :param obj: Python object to convert to yaml
        :param stream: If not None the location to write the encrypted data to.
        :returns: If stream is None then the encrypted bytes otherwise None.
        """
        data = yaml.dump(obj,
                         allow_unicode=True,
                         default_flow_style=False,
                         Dumper=AnsibleDumper)
        return self.dump(data, stream)

    def dump_as_yaml_to_tempfile(self, obj):
        """Convert object to yaml and encrypt the data.

        :param obj: Python object to convert to yaml
        :returns: The filepath to write data
        """
        with tempfile.NamedTemporaryFile(delete=False,
                                         suffix='.yaml') as data_temp:
            self.dump_as_yaml(obj, data_temp)
        data_temp.close()
        return data_temp.name
Esempio n. 6
0
class Vault(object):
    def __init__(self, password):
        self._ansible_ver = _ansible_ver

        self.secret = password.encode('utf-8')
        self.vaultLib = VaultLib(self._make_secrets(self.secret))

    def _make_secrets(self, secret):
        if self._ansible_ver < 2.4:
            return secret

        from ansible.constants import DEFAULT_VAULT_ID_MATCH #@UnresolvedImport
        from ansible.parsing.vault import VaultSecret
        return [(DEFAULT_VAULT_ID_MATCH, VaultSecret(secret))]
    
    def encrypt(self, text):
        return self.vaultLib.encrypt(text)

    def encryptedFile2String(self, fileName):
        data = file2String(fileName)
        was_encrypted = False
        if is_encrypted(data):
            was_encrypted = True
            data = self.vaultLib.decrypt(data)
        return data, was_encrypted
            
    def stringToEncryptedFile(self, data, filename):
        data = self.encrypt(data)
        stream = file(filename, 'w')
        stream.write(data)
        stream.close()

        
Esempio n. 7
0
    def __init__(self):
        """
        This constructor function provides variables from the input files.
        """
        # Retrieving secret variables from config_secrets.json file
        encrypted_file = open("config_secrets.json")
        key = getpass("Enter key for encrypted variables:")

        # Configuring key and decrypting encrypted variables
        config_vault = VaultLib([(DEFAULT_VAULT_ID_MATCH,
                                  VaultSecret(key.encode('utf-8')))])
        config = json.loads(config_vault.decrypt(encrypted_file.read()))

        # Closing opened encrypted file
        encrypted_file.close()

        #Retrieving variables from user_input.json file
        with open("user_input.json", "r") as json_fp:
            input_json_data = json.load(json_fp)

        self.username = config["CONSOLE_USERNAME"]
        self.password = config["CONSOLE_PASSWORD"]

        self.controller_IP = input_json_data["CONTROLLER_IP"]
        self.cluster_name = input_json_data["CLUSTER_NAME"]
        self.expand_shrink = input_json_data["EXPAND_OR_SHRINK"]
        self.host_ips = input_json_data["HOST_IP's"]
        self.host_role = input_json_data["HOST_ROLE"]
Esempio n. 8
0
class Vault(object):
    '''R/W an ansible-vault yaml file'''
    def __init__(self, password):
        self._ansible_ver = _ansible_ver

        self.secret = password.encode('utf-8')
        self.vault = VaultLib(self._make_secrets(self.secret))

    def _make_secrets(self, secret):
        if self._ansible_ver < 2.4:
            return secret

        from ansible.constants import DEFAULT_VAULT_ID_MATCH
        from ansible.parsing.vault import VaultSecret
        return [(DEFAULT_VAULT_ID_MATCH, VaultSecret(secret))]

    def load(self, stream):
        '''read vault steam and return python object'''
        return yaml.safe_load(self.vault.decrypt(stream))

    def dump(self, data, stream=None):
        '''encrypt data and print stdout or write to stream'''
        yaml_text = yaml.dump(data,
                              default_flow_style=False,
                              allow_unicode=True)
        encrypted = self.vault.encrypt(yaml_text)
        if stream:
            stream.write(encrypted)
        else:
            return encrypted
Esempio n. 9
0
    def __init__(self):
        """
        This constructor function provides encrypted as well as normal variables based input files.
        """
        # Retriving secret variables from config_secrets.json file
        encrypted_file = open("config_secrets.json")
        key = getpass("Enter key for encrypted variables:")

        # Configuring key and decrypting encrypted variables
        config_vault = VaultLib([(DEFAULT_VAULT_ID_MATCH, VaultSecret(key.encode('utf-8')))])
        config = json.loads(config_vault.decrypt(encrypted_file.read()))

        # Closing opened encrypted file 
        encrypted_file.close()

        # Retriving all user input files from file into a variable
        with open ("userinput.json", "r") as json_fp:
            input_json_data = json.load(json_fp)

        self.username = config["OPENSHIFT_USERNAME"]
        self.password = config["OPENSHIFT_PASSWORD"]
        self.oc_host = ":".join([input_json_data["OPENSHIFT_DOMAIN"], input_json_data["OPENSHIFT_PORT"]])
        self.operator_list = input_json_data["OPENSHIFT_OPERATOR_LIST"]
        self.channel_list = input_json_data["OPERATOR_CHANNEL_LIST"]
        self.source_list = input_json_data["OPERATOR_SOURCE_LIST"]
        self.install_plan_approval_list = input_json_data["OPERATOR_INSTALL_PLAN"]
        self.namespaces = input_json_data["OPENSHIFT_PROJECT_NAME"]
        self.oc_path = input_json_data["OPENSHIFT_CLIENT_PATH"]
        self.storage_class_name = input_json_data["OPENSHIFT_STORAGE_CLASS_NAME"]
Esempio n. 10
0
def open_vault(vaultfile=''):
    vault_pw = os.environ.get('VAULT_PW')
    if not vault_pw:
        vault_pw = getpass.getpass('Vault Password: ')
    vault = VaultLib(vault_pw)
    vault_secret = vault.decrypt(open(vaultfile).read())

    return yaml.load(vault_secret, Loader=yaml.SafeLoader)
Esempio n. 11
0
 def test_encrypt_decrypt_aes256(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     v.cipher_name = 'AES256'
     enc_data = v.encrypt("foobar")
     dec_data = v.decrypt(enc_data)
     assert enc_data != "foobar", "encryption failed"
     assert dec_data == "foobar", "decryption failed"
Esempio n. 12
0
 def test_encrypt_decrypt_aes(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     v.cipher_name = 'AES'
     enc_data = v.encrypt("foobar")
     dec_data = v.decrypt(enc_data)
     assert enc_data != "foobar", "encryption failed"
     assert dec_data == "foobar", "decryption failed"           
Esempio n. 13
0
 def test_encrypt_decrypt_aes(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     v.cipher_name = u'AES'
     # AES encryption code has been removed, so this is old output for
     # AES-encrypted 'foobar' with password 'ansible'.
     enc_data = b'$ANSIBLE_VAULT;1.1;AES\n53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3\nfe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e\n786a5a15efeb787e1958cbdd480d076c\n'
     dec_data = v.decrypt(enc_data)
     assert dec_data == b"foobar", "decryption failed"
Esempio n. 14
0
 def test_encrypt_decrypt_aes(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     v.cipher_name = u'AES'
     # AES encryption code has been removed, so this is old output for
     # AES-encrypted 'foobar' with password 'ansible'.
     enc_data = '$ANSIBLE_VAULT;1.1;AES\n53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3\nfe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e\n786a5a15efeb787e1958cbdd480d076c\n'
     dec_data = v.decrypt(enc_data)
     assert dec_data == "foobar", "decryption failed"
Esempio n. 15
0
 def test_decrypt_decrypted(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     data = "ansible"
     error_hit = False
     try:
         dec_data = v.decrypt(data)
     except errors.AnsibleError, e:
         error_hit = True
Esempio n. 16
0
 def test_decrypt_decrypted(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     data = "ansible"
     error_hit = False
     try:
         dec_data = v.decrypt(data)
     except errors.AnsibleError as e:
         error_hit = True
     assert error_hit, "No error was thrown when trying to decrypt data without a header"
Esempio n. 17
0
def get_key_value(key):
    vault_password = open("/tmp/.vaultpwd").readlines()[0].rstrip('\n')
    data = open("keychain.yml").read()

    vault = VaultLib(password=vault_password)
    if vault.is_encrypted(data):
        data = vault.decrypt(data)
        ydata = yaml.load(data)
        return ydata['aws'][key]
    else:
        return None
Esempio n. 18
0
def get_key_value(key):
    vault_password = open("/tmp/.vaultpwd").readlines()[0].rstrip('\n')
    data = open("keychain.yml").read()

    vault = VaultLib(password=vault_password)
    if vault.is_encrypted(data):
        data = vault.decrypt(data)
        ydata = yaml.load(data)
        return ydata['aws'][key]
    else:
        return None
Esempio n. 19
0
    def read(self):
        """
        Read vault data as a Python dictionary.

        """
        with open(expanduser(self.vault_file), "rb") as vault_file:
            encrypted = vault_file.read()

        vault_lib = VaultLib(self.secrets.items())
        plaintext = vault_lib.decrypt(encrypted, filename=self.vault_file)
        return load(plaintext, Loader=SafeLoader)
Esempio n. 20
0
 def test_decrypt_decrypted(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     data = "ansible"
     error_hit = False
     try:
         dec_data = v.decrypt(data)
     except errors.AnsibleError as e:
         error_hit = True
     assert error_hit, "No error was thrown when trying to decrypt data without a header"
Esempio n. 21
0
def load_yaml(yaml_file, vault_secret=None):
    """
    Load a YAML file into a python dictionary.

    The YAML file can be fully encrypted by Ansible-Vault or can contain
    multiple inline Ansible-Vault encrypted values. Ansible Vault
    encryption is ideal to store passwords or encrypt the entire file
    with sensitive data if required.
    """
    vault = VaultLib()

    if vault_secret:
        secret_file = get_file_vault_secret(filename=vault_secret, loader=DataLoader())
        secret_file.load()
        vault.secrets = [('default', secret_file)]

    # YAML ENV VAR
    # name: !ENV env_var('FOO')/bar
    yaml.add_implicit_resolver("!ENV", ENV_VAR_PATTERN)
    yaml.add_constructor('!ENV', env_var_constructor)

    data = None
    if os.path.isfile(yaml_file):
        with open(yaml_file, 'r') as stream:
            # Render environment variables using jinja templates
            contents = stream.read()
            template = Template(contents)
            stream = StringIO(template.render(env_var=os.environ))
            try:
                if is_encrypted_file(stream):
                    file_data = stream.read()
                    data = yaml.load(vault.decrypt(file_data, None))
                else:
                    file_data = stream.read()
                    data = yaml.load(file_data, Loader=yaml.Loader)

                    '''
                    Commenting code below for posterity. We are not using ansible functionality but yaml load should 
                    follow the same code path regardless of the file encryption state.
                    '''
                    #loader = AnsibleLoader(stream, None, vault.secrets)
                    #try:
                    #    data = loader.get_single_data()
                    #except Exception as exc:
                    #    raise Exception(f'Error when loading YAML config at {yaml_file} {exc}') from exc
                    #finally:
                    #    loader.dispose()
            except yaml.YAMLError as exc:
                raise Exception(f'Error when loading YAML config at {yaml_file} {exc}') from exc
    else:
        LOGGER.debug('No file at %s', yaml_file)

    return data
Esempio n. 22
0
    def test_encrypt_decrypt_aes256_bad_hmac(self):
        # FIXME This test isn't working quite yet.
        raise SkipTest

        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        v = VaultLib('test-vault-password')
        v.cipher_name = 'AES256'
        # plaintext = "Setec Astronomy"
        enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
        b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
        b_data = v._split_header(b_data)
        foo = binascii.unhexlify(b_data)
        lines = foo.splitlines()
        # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
        b_salt = lines[0]
        b_hmac = lines[1]
        b_ciphertext_data = b'\n'.join(lines[2:])

        b_ciphertext = binascii.unhexlify(b_ciphertext_data)
        # b_orig_ciphertext = b_ciphertext[:]

        # now muck with the text
        # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
        # b_munged_ciphertext = b_ciphertext
        # assert b_orig_ciphertext != b_munged_ciphertext

        b_ciphertext_data = binascii.hexlify(b_ciphertext)
        b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
        # reformat
        b_invalid_ciphertext = v._format_output(b_payload)

        # assert we throw an error
        v.decrypt(b_invalid_ciphertext)
Esempio n. 23
0
def uncrypted(data, script):
    import imp
    from ansible.parsing.vault import VaultLib

    vault_pass = imp.load_source('vault_pass', script)
    password = vault_pass.get_password()

    vault = VaultLib(password)
    uncrypt = vault.decrypt(data)

    loader = DataLoader()
    result = loader.load(data=uncrypt)

    return result
Esempio n. 24
0
def main(request):
    if request.method == 'POST':
        form_posted = VaultForm(request.POST)
        if form_posted.is_valid():
            vault = VaultLib([
                (DEFAULT_VAULT_ID_MATCH,
                 VaultSecret(form_posted.cleaned_data['password'].encode()))
            ])

            if form_posted.cleaned_data[
                    'yaml'] and not form_posted.cleaned_data['vault']:
                vault = vault.encrypt(
                    form_posted.cleaned_data['yaml']).decode()
                form = VaultForm(
                    initial={
                        'password': form_posted.cleaned_data['password'],
                        'yaml': form_posted.cleaned_data['yaml'],
                        'vault': vault
                    })

                action = 'chiffrement'

            elif form_posted.cleaned_data[
                    'vault'] and not form_posted.cleaned_data['yaml']:
                vault_stringio = io.StringIO(form_posted.cleaned_data['vault'])
                yaml = vault.decrypt(vault_stringio.read()).decode()
                form = VaultForm(
                    initial={
                        'password': form_posted.cleaned_data['password'],
                        'yaml': yaml,
                        'vault': form_posted.cleaned_data['vault']
                    })

                action = 'déchiffrement'

            else:
                form = VaultForm(
                    initial={
                        'password': form_posted.cleaned_data['password'],
                        'yaml': form_posted.cleaned_data['yaml'],
                        'vault': form_posted.cleaned_data['vault']
                    })

                action = 'vous attendez quoi ?'

    else:
        action = ''
        form = VaultForm()

    return render(request, 'main.html', {'form': form, 'action': action})
    def __init__(self):
        """
        This constructor function provides encrypted as well as normal variables based input files.
        """
        try:
            # Retrieving secret variables from config_secrets.json file
            encrypted_file = open("config_secrets.json")
            key = getpass("Enter key for encrypted variables:")

            # Configuring key and decrypting encrypted variables
            config_vault = VaultLib([(DEFAULT_VAULT_ID_MATCH,
                                      VaultSecret(key.encode('utf-8')))])
            config = json.loads(config_vault.decrypt(encrypted_file.read()))

            # Closing opened encrypted file
            encrypted_file.close()

            # Retrieving all user input files from file into a variable
            with open("userinput.json", "r") as json_fp:
                input_json_data = json.load(json_fp)

            self.username = config["OPENSHIFT_USERNAME"]
            self.password = config["OPENSHIFT_PASSWORD"]
            self.oc_host = ":".join([
                input_json_data["OPENSHIFT_DOMAIN"],
                input_json_data["OPENSHIFT_PORT"]
            ])
            self.oc_path = input_json_data["OPENSHIFT_CLIENT_PATH"]
            self.local_storage_namespace = input_json_data[
                "LOCAL_STORAGE_NAMESPACE"]
            self.ocs_namespace = input_json_data[
                "OPENSHIFT_CONTAINER_STORAGE_NAMESPACE"]
            self.ocs_fs_storage = input_json_data[
                "OPENSHIFT_CONTAINER_STORAGE_FILESYSTEM_STORAGE"]
            self.ocs_block_storage = input_json_data[
                "OPENSHIFT_CONTAINER_STORAGE_BLOCK_STORAGE"]
            self.local_storage_version = input_json_data[
                "LOCAL_STORAGE_VERSION"]
            self.ocs_version = input_json_data[
                "OPENSHIFT_CONTAINER_STORAGE_VERSION"]
            self.ocp_worker_nodes = input_json_data[
                "OPENSHIFT_CONTAINER_PLATFORM_WORKER_NODES"]
            self.ocs_block_volume = input_json_data[
                "OPENSHIFT_CONTAINER_STORAGE_BLOCK_VOLUME"]
        except Exception as run_err:
            print(
                "config.py - Config: Entered WRONG password and exception is: {}"
                .format(run_err))
            sys.exit()
Esempio n. 26
0
def load_yaml(yaml_file, vault_secret=None):
    """
    Load a YAML file into a python dictionary.

    The YAML file can be fully encrypted by Ansible-Vault or can contain
    multiple inline Ansible-Vault encrypted values. Ansible Vault
    encryption is ideal to store passwords or encrypt the entire file
    with sensitive data if required.
    """
    vault = VaultLib()

    if vault_secret:
        secret_file = get_file_vault_secret(filename=vault_secret,
                                            loader=DataLoader())
        secret_file.load()
        vault.secrets = [('default', secret_file)]

    data = None
    if os.path.isfile(yaml_file):
        with open(yaml_file, 'r', encoding='utf-8') as stream:
            # Render environment variables using jinja templates
            contents = stream.read()
            template = Template(contents)
            stream = StringIO(template.render(env_var=os.environ))
            try:
                if is_encrypted_file(stream):
                    file_data = stream.read()
                    data = yaml.load(vault.decrypt(file_data, None))
                else:
                    loader = AnsibleLoader(stream, None, vault.secrets)
                    try:
                        data = loader.get_single_data()
                    except Exception as exc:
                        raise Exception(
                            f'Error when loading YAML config at {yaml_file} {exc}'
                        ) from exc
                    finally:
                        loader.dispose()
            except yaml.YAMLError as exc:
                raise Exception(
                    f'Error when loading YAML config at {yaml_file} {exc}'
                ) from exc
    else:
        LOGGER.debug('No file at %s', yaml_file)

    if isinstance(data, AnsibleMapping):
        data = dict(data)

    return data
Esempio n. 27
0
    def test_encrypt_decrypt_aes256_existing_vault(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        v = VaultLib('test-vault-password')
        v.cipher_name = 'AES256'
        plaintext = b"Setec Astronomy"
        enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''

        dec_data = v.decrypt(enc_data)
        assert dec_data == plaintext, "decryption failed"
Esempio n. 28
0
    def test_rekey_migration(self):
        """
        Skip testing rekeying files if we don't have access to AES, KDF or
        Counter, or we are running on python3 since VaultAES hasn't been backported.
        """
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or sys.version > '3':
            raise SkipTest

        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(to_bytes(v10_data))

        ve = VaultEditor(None, "ansible", v10_file.name)

        # make sure the password functions for the cipher
        error_hit = False
        try:
            ve.rekey_file('ansible2')
        except errors.AnsibleError:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        self.assertFalse(error_hit, "error rekeying 1.0 file to 1.1")

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib("ansible2")
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError as e:
            error_hit = True

        os.unlink(v10_file.name)

        self.assertEqual(
            vl.cipher_name, "AES256",
            "wrong cipher name set after rekey: %s" % vl.cipher_name)
        self.assertFalse(error_hit, "error decrypting migrated 1.0 file")
        self.assertEqual(
            dec_data.strip(), "foo",
            "incorrect decryption of rekeyed/migrated file: %s" % dec_data)
Esempio n. 29
0
def load_vaulted_pass(vault_password=None,
                      vault_password_file=None,
                      vault_prompt=False):
    loader = DataLoader()
    if vault_password:
        vault_secret = [([], VaultSecret(vault_password.encode()))]
    elif vault_password_file:
        vault_secret = CLI.setup_vault_secrets(loader=loader,
                                               vault_ids=[vault_password_file])
    else:
        vault_secret = CLI.setup_vault_secrets(loader=loader,
                                               vault_ids=[],
                                               auto_prompt=vault_prompt)
    vault = VaultLib(vault_secret)
    unencrypted_yaml = vault.decrypt(open("vaulted_password").read())
    unencrypted_yaml = yaml.safe_load(unencrypted_yaml)
    return unencrypted_yaml["password"]
Esempio n. 30
0
    def load_json(self):
        """
        This function loads the json file
        """
        try:
            # Retriving secret variables from config_secrets.json file
            encrypted_file = open(self.json_file)
            vault_pass = self.key
            # Configuring key and decrypting encrypted variables
            config_vault = VaultLib([(DEFAULT_VAULT_ID_MATCH, VaultSecret(vault_pass.encode('utf-8')))])
            self.data = json.loads(config_vault.decrypt(encrypted_file.read()))

            # Closing opened encrypted file 
            encrypted_file.close() 
        except:
            print("Unexpected error:", sys.exc_info()[0])
            raise
Esempio n. 31
0
def load_yaml(yaml_file, vault_secret=None):
    '''
    Load a YAML file into a python dictionary.

    The YAML file can be fully encrypted by Ansible-Vault or can contain
    multiple inline Ansible-Vault encrypted values. Ansible Vault
    encryption is ideal to store passwords or encrypt the entire file
    with sensitive data if required.
    '''
    vault = VaultLib()

    if vault_secret:
        secret_file = get_file_vault_secret(filename=vault_secret,
                                            loader=DataLoader())
        secret_file.load()
        vault.secrets = [('default', secret_file)]

    data = None
    if os.path.isfile(yaml_file):
        with open(yaml_file, 'r') as stream:
            try:
                if is_encrypted_file(stream):
                    file_data = stream.read()
                    data = yaml.load(vault.decrypt(file_data, None))
                else:
                    loader = AnsibleLoader(stream, None, vault.secrets)
                    try:
                        data = loader.get_single_data()
                    except Exception as exc:
                        raise Exception(
                            "Error when loading YAML config at {} {}".format(
                                yaml_file, exc))
                    finally:
                        loader.dispose()
            except yaml.YAMLError as exc:
                raise Exception(
                    "Error when loading YAML config at {} {}".format(
                        yaml_file, exc))
    else:
        logger.debug("No file at {}".format(yaml_file))

    return data
Esempio n. 32
0
    def test_rekey_migration(self):
        """
        Skip testing rekeying files if we don't have access to AES, KDF or
        Counter, or we are running on python3 since VaultAES hasn't been backported.
        """
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or sys.version > '3':
            raise SkipTest

        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(to_bytes(v10_data))

        ve = VaultEditor(None, "ansible", v10_file.name)

        # make sure the password functions for the cipher
        error_hit = False
        try:
            ve.rekey_file('ansible2')
        except errors.AnsibleError as e:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit == False, "error rekeying 1.0 file to 1.1"

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib("ansible2")
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError as e:
            error_hit = True

        os.unlink(v10_file.name)

        assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name
        assert error_hit == False, "error decrypting migrated 1.0 file"
        assert dec_data.strip() == "foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
Esempio n. 33
0
    def test_rekey_migration(self):
        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(to_bytes(v10_data))

        ve = self._vault_editor(self._secrets("ansible"))

        # make sure the password functions for the cipher
        error_hit = False
        new_secrets = self._secrets("ansible2")
        try:
            ve.rekey_file(v10_file.name,
                          vault.match_encrypt_secret(new_secrets)[1])
        except errors.AnsibleError:
            raise
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit is False, "error rekeying 1.0 file to 1.1"

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib(new_secrets)
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError:
            raise
            error_hit = True

        os.unlink(v10_file.name)

        self.assertIn(b'AES256', fdata,
                      'AES256 was not found in vault file %s' % to_text(fdata))
        assert error_hit is False, "error decrypting migrated 1.0 file"
        assert dec_data.strip(
        ) == b"foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
Esempio n. 34
0
    def test_rekey_migration(self):
        # Skip testing rekeying files if we don't have access to AES, KDF or Counter.
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest

        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(to_bytes(v10_data))

        ve = VaultEditor("ansible")

        # make sure the password functions for the cipher
        error_hit = False
        try:
            ve.rekey_file(v10_file.name, 'ansible2')
        except errors.AnsibleError:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit is False, "error rekeying 1.0 file to 1.1"

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib("ansible2")
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError:
            error_hit = True

        os.unlink(v10_file.name)

        assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name
        assert error_hit is False, "error decrypting migrated 1.0 file"
        assert dec_data.strip(
        ) == b"foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
Esempio n. 35
0
class Vault(object):
    '''R/W an ansible-vault yaml file'''

    def __init__(self, password):
        self._ansible_ver = _ansible_ver

        self.secret = password.encode('utf-8')
        self.vault = VaultLib(self._make_secrets(self.secret))

    def _make_secrets(self, secret):
        if self._ansible_ver < 2.4:
            return secret

        from ansible.constants import DEFAULT_VAULT_ID_MATCH
        from ansible.parsing.vault import VaultSecret
        return [(DEFAULT_VAULT_ID_MATCH, VaultSecret(secret))]

    def load_raw(self, stream):
        """Read vault stream and return raw data."""
        return self.vault.decrypt(stream)

    def dump_raw(self, text, stream=None):
        """Encrypt raw data and write to stream."""
        encrypted = self.vault.encrypt(text)
        if stream:
            stream.write(encrypted)
        else:
            return encrypted

    def load(self, stream):
        """Read vault steam and return python object."""
        return yaml.safe_load(self.load_raw(stream))

    def dump(self, data, stream=None):
        """Encrypt data and print stdout or write to stream."""
        yaml_text = yaml.dump(
            data,
            default_flow_style=False,
            allow_unicode=True)
        return self.dump_raw(yaml_text, stream=stream)
Esempio n. 36
0
def parse_file(filename, vault_enc, vault_pass):
    if json.loads(vault_enc.lower()):
        vault_pass = vault_pass.encode('utf-8')
        vault = VaultLib(_make_secrets(vault_pass))
        cred_str = vault.decrypt(open(filename).read())
    else:
        cred_str = open(filename, "r").read()
    try:
        out = json.loads(cred_str)
    except Exception as e:
        try:
            out = yaml.load(cred_str)
        except Exception as e:
            try:
                config = ConfigDict()
                f = open(filename)
                config.readfp(f)
                out = config.as_dict()
                f.close()
            except Exception as e:
                module.fail_json(msg="Error: {0} ".format(str(e)))
    return out
Esempio n. 37
0
def parse_file(filename, vault_enc, vault_pass):
    if json.loads(vault_enc.lower()):
        vault_pass = vault_pass.encode('utf-8')
        vault = VaultLib(_make_secrets(vault_pass))
        cred_str = vault.decrypt(open(filename).read())
    else:
        cred_str = open(filename, "r").read()
    try:
        out = json.loads(cred_str)
    except Exception as e:
        try:
            out = yaml.load(cred_str)
        except Exception as e:
            try:
                config = ConfigDict()
                f = open(filename)
                config.readfp(f)
                out = config.as_dict()
                f.close()
            except Exception as e:
                module.fail_json(msg="Error: {0} ".format(str(e)))
    return out
Esempio n. 38
0
    def test_rekey_migration(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest

        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(v10_data)

        ve = VaultEditor(None, "ansible", v10_file.name)

        # make sure the password functions for the cipher
        error_hit = False
        try:        
            ve.rekey_file('ansible2')
        except errors.AnsibleError as e:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit == False, "error rekeying 1.0 file to 1.1"            

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib("ansible2")
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError as e:
            error_hit = True

        os.unlink(v10_file.name)

        assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name
        assert error_hit == False, "error decrypting migrated 1.0 file"            
        assert dec_data.strip() == "foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
Esempio n. 39
0
    def test_rekey_migration(self):
        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(to_bytes(v10_data))

        ve = self._vault_editor(self._secrets("ansible"))

        # make sure the password functions for the cipher
        error_hit = False
        new_secrets = self._secrets("ansible2")
        try:
            ve.rekey_file(v10_file.name, vault.match_encrypt_secret(new_secrets)[1])
        except errors.AnsibleError:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit is False, "error rekeying 1.0 file to 1.1"

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib(new_secrets)
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError:
            error_hit = True

        os.unlink(v10_file.name)

        self.assertIn(b'AES256', fdata, 'AES256 was not found in vault file %s' % to_text(fdata))
        assert error_hit is False, "error decrypting migrated 1.0 file"
        assert dec_data.strip() == b"foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
Esempio n. 40
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            try:
                # if loading JSON failed for any reason, we go ahead
                # and try to parse it as YAML instead
                return self._safe_load(data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            return self._FILE_CACHE[file_name]

        # read the file contents and load the data structure from them
        (file_data, show_content) = self._get_file_contents(file_name)
        parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

        # cache the file contents for next time
        self._FILE_CACHE[file_name] = parsed_data

        return parsed_data

    def path_exists(self, path):
        return os.path.exists(path)

    def is_directory(self, path):
        return os.path.isdir(path)

    def is_file(self, path):
        return os.path.isfile(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            loader.dispose()

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleParserError("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occured while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occured, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.set_position_info(file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = basedir

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)

        if given.startswith("/"):
            return os.path.abspath(given)
        elif given.startswith("~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            return os.path.abspath(os.path.join(self._basedir, given))

    def path_dwim_relative(self, role_path, dirname, source):
        ''' find one file in a directory one level up in a dir named dirname relative to current '''

        basedir = os.path.dirname(role_path)
        if os.path.islink(basedir):
            basedir = unfrackpath(basedir)
            template2 = os.path.join(basedir, dirname, source)
        else:
            template2 = os.path.join(basedir, '..', dirname, source)

        source1 = os.path.join(role_path, dirname, source)
        if os.path.exists(source1):
            return source1

        cur_basedir = self._basedir
        self.set_basedir(basedir)
        source2 = self.path_dwim(template2)
        if os.path.exists(source2):
            self.set_basedir(cur_basedir)
            return source2

        obvious_local_path = self.path_dwim(source)
        if os.path.exists(obvious_local_path):
            self.set_basedir(cur_basedir)
            return obvious_local_path

        self.set_basedir(cur_basedir)
        return source2 # which does not exist
Esempio n. 41
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        content = self._task.args.get('content', None)
        dest    = self._task.args.get('dest', None)
        raw     = boolean(self._task.args.get('raw', 'no'))
        force   = boolean(self._task.args.get('force', 'yes'))
        faf     = self._task.first_available_file
        remote_src = boolean(self._task.args.get('remote_src', False))

        if (source is None and content is None and faf is None) or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result
        elif (source is not None or faf is not None) and content is not None:
            result['failed'] = True
            result['msg'] = "src and content are mutually exclusive"
            return result
        elif content is not None and dest is not None and dest.endswith("/"):
            result['failed'] = True
            result['msg'] = "dest must be a file if content is defined"
            return result

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = self._connection._shell.path_has_trailing_slash(source)

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if isinstance(content, dict) or isinstance(content, list):
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception as err:
                result['failed'] = True
                result['msg'] = "could not write content temp file: %s" % err
                return result

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        elif faf:
            source = self._get_first_available_file(faf, task_vars.get('_original_file', None))
            if source is None:
                result['failed'] = True
                result['msg'] = "could not find src in first_available_file list"
                return result

        elif remote_src:
            result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False))
            return result

        else:
            if self._task._role is not None:
                source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
            else:
                source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)

        # A list of source file tuples (full_path, relative_path) which will try to copy to the destination
        source_files = []

        # If source is a directory populate our list else source is a file and translate it to a tuple.
        if os.path.isdir(source):
            # Get the amount of spaces to remove to get the relative path.
            if source_trailing_slash:
                sz = len(source)
            else:
                sz = len(source.rsplit('/', 1)[0]) + 1

            # Walk the directory and append the file tuples to source_files.
            for base_path, sub_folders, files in os.walk(source):
                for file in files:
                    full_path = os.path.join(base_path, file)
                    rel_path = full_path[sz:]
                    if rel_path.startswith('/'):
                        rel_path = rel_path[1:]
                    source_files.append((full_path, rel_path))

            # If it's recursive copy, destination is always a dir,
            # explicitly mark it so (note - copy module relies on this).
            if not self._connection._shell.path_has_trailing_slash(dest):
                dest = self._connection._shell.join_path(dest, '')
        else:
            source_files.append((source, os.path.basename(source)))

        changed = False
        module_return = dict(changed=False)

        # A register for if we executed a module.
        # Used to cut down on command calls when not recursive.
        module_executed = False

        # Tell _execute_module to delete the file if there is one file.
        delete_remote_tmp = (len(source_files) == 1)

        # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
        if not delete_remote_tmp:
            if tmp is None or "-tmp-" not in tmp:
                tmp = self._make_tmp_path()

        # expand any user home dir specifier
        dest = self._remote_expand_user(dest)

        vault = VaultLib(password=self._loader._vault_password)
        diffs = []
        for source_full, source_rel in source_files:
            
            vault_temp_file = None
            data = None

            try:
                data = open(source_full).read()
            except IOError:
                raise errors.AnsibleError("file could not read: %s" % source_full)

            if vault.is_encrypted(data):
                # if the file is encrypted and no password was specified,
                # the decrypt call would throw an error, but we check first
                # since the decrypt function doesn't know the file name
                if self._loader._vault_password is None:
                    raise errors.AnsibleError("A vault password must be specified to decrypt %s" % source_full)
                    
                data = vault.decrypt(data)
                # Make a temp file
                vault_temp_file = self._create_content_tempfile(data)
                source_full = vault_temp_file;
                
            # Generate a hash of the local file.
            local_checksum = checksum(source_full)

            # If local_checksum is not defined we can't find the file so we should fail out.
            if local_checksum is None:
                result['failed'] = True
                result['msg'] = "could not find src=%s" % source_full
                return result

            # This is kind of optimization - if user told us destination is
            # dir, do path manipulation right away, otherwise we still check
            # for dest being a dir via remote call below.
            if self._connection._shell.path_has_trailing_slash(dest):
                dest_file = self._connection._shell.join_path(dest, source_rel)
            else:
                dest_file = self._connection._shell.join_path(dest)

            # Attempt to get the remote checksum
            remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars)

            if remote_checksum == '3':
                # The remote_checksum was executed on a directory.
                if content is not None:
                    # If source was defined as content remove the temporary file and fail out.
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    result['failed'] = True
                    result['msg'] = "can not use content with a dir as dest"
                    return result
                else:
                    # Append the relative source location to the destination and retry remote_checksum
                    dest_file = self._connection._shell.join_path(dest, source_rel)
                    remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars)

            if remote_checksum != '1' and not force:
                # remote_file does not exist so continue to next iteration.
                continue

            if local_checksum != remote_checksum:
                # The checksums don't match and we will change or error out.
                changed = True

                # Create a tmp path if missing only if this is not recursive.
                # If this is recursive we already have a tmp path.
                if delete_remote_tmp:
                    if tmp is None or "-tmp-" not in tmp:
                        tmp = self._make_tmp_path()

                if self._play_context.diff and not raw:
                    diffs.append(self._get_diff_data(dest_file, source_full, task_vars))

                if self._play_context.check_mode:
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    changed = True
                    module_return = dict(changed=True)
                    continue

                # Define a remote directory that we will copy the file to.
                tmp_src = self._connection._shell.join_path(tmp, 'source')

                if not raw:
                    self._connection.put_file(source_full, tmp_src)
                else:
                    self._connection.put_file(source_full, dest_file)

                # We have copied the file remotely and no longer require our content_tempfile
                self._remove_tempfile_if_content_defined(content, content_tempfile)

                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None

                # fix file permissions when the copy is done as a different user
                if self._play_context.become and self._play_context.become_user != 'root':
                    self._remote_chmod('a+r', tmp_src)

                if raw:
                    # Continue to next iteration if raw is defined.
                    continue

                # Run the copy module

                # src and dest here come after original and override them
                # we pass dest only to make sure it includes trailing slash in case of recursive copy
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=tmp_src,
                        dest=dest,
                        original_basename=source_rel,
                    )
                )

                module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            else:
                # no need to transfer the file, already correct hash, but still need to call
                # the file module in case we want to change attributes
                self._remove_tempfile_if_content_defined(content, content_tempfile)

                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None
                    
                if raw:
                    # Continue to next iteration if raw is defined.
                    self._remove_tmp_path(tmp)
                    continue

                # Build temporary module_args.
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=source_rel,
                        dest=dest,
                        original_basename=source_rel
                    )
                )

                # Execute the file module.
                module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            if not module_return.get('checksum'):
                module_return['checksum'] = local_checksum
            if module_return.get('failed'):
                result.update(module_return)
                return result
            if module_return.get('changed'):
                changed = True

            # the file module returns the file path as 'path', but
            # the copy module uses 'dest', so add it if it's not there
            if 'path' in module_return and 'dest' not in module_return:
                module_return['dest'] = module_return['path']

        # Delete tmp path if we were recursive or if we did not execute a module.
        if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed):
            self._remove_tmp_path(tmp)

        if module_executed and len(source_files) == 1:
            result.update(module_return)
        else:
            result.update(dict(dest=dest, src=source, changed=changed))

        if diffs:
            result['diff'] = diffs

        return result
Esempio n. 42
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, vault_password):
        self._vault_password = vault_password
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            # if loading JSON failed for any reason, we go ahead
            # and try to parse it as YAML instead

            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                new_data = text_type(data)
            else:
                new_data = data
            try:
                new_data = self._safe_load(new_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos
            return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(file_name, 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False

            data = to_unicode(data, errors='strict')
            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_unicode(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_unicode(given, errors='strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_unicode(self._basedir, errors='strict')
            return os.path.abspath(os.path.join(basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith('/'):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))

            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='strict')) \
                or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='strict')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname,source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='strict')):
                break

        return candidate

    def read_vault_password_file(self, vault_password_file):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(to_bytes(os.path.expanduser(vault_password_file), errors='strict'))
        if not os.path.exists(to_bytes(this_path, errors='strict')):
            raise AnsibleFileNotFound("The vault password file %s was not found" % this_path)

        if self.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            self.set_vault_password(stdout.strip('\r\n'))
        else:
            try:
                f = open(this_path, "rb")
                self.set_vault_password(f.read().strip())
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))
Esempio n. 43
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, vault_password):
        self._vault_password = vault_password
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None
        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name, self._vault_password)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass  # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        b_file_name = to_bytes(file_name)
        if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                if is_encrypted(data):
                    data = self._vault.decrypt(data, filename=b_file_name)
                    show_content = False

            data = to_text(data, errors='surrogate_or_strict')
            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_text(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_text(given, errors='surrogate_or_strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_text(self._basedir, errors='surrogate_or_strict')
            return os.path.abspath(os.path.join(basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith(os.path.sep):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))
            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='surrogate_or_strict')) \
                    or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='surrogate_or_strict')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname,source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
                break

        return candidate

    def path_dwim_relative_stack(self, paths, dirname, source):
        '''
        find one file in first path in stack taking roles into account and adding play basedir as fallback

        :arg paths: A list of text strings which are the paths to look for the filename in.
        :arg dirname: A text string representing a directory.  The directory
            is prepended to the source to form the path to search for.
        :arg source: A text string which is the filename to search for
        :rtype: A text string
        :returns: An absolute path to the filename ``source``
        '''
        b_dirname = to_bytes(dirname)
        b_source = to_bytes(source)

        result = None
        if source is None:
            display.warning('Invalid request to find a file that matches a "null" value')
        elif source and (source.startswith('~') or source.startswith(os.path.sep)):
            # path is absolute, no relative needed, check existence and return source
            test_path = unfrackpath(b_source)
            if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
                result = test_path
        else:
            search = []
            for path in paths:
                upath = unfrackpath(path)
                b_upath = to_bytes(upath, errors='surrogate_or_strict')
                b_mydir = os.path.dirname(b_upath)

                # if path is in role and 'tasks' not there already, add it into the search
                if b_upath.endswith(b'tasks') and os.path.exists(os.path.join(b_upath, b'main.yml')) \
                        or os.path.exists(os.path.join(b_upath, b'tasks/main.yml')) \
                        or os.path.exists(os.path.join(b_mydir, b'tasks/main.yml')):
                    if b_mydir.endswith(b'tasks'):
                        search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
                        search.append(os.path.join(b_mydir, b_source))
                    else:
                        # don't add dirname if user already is using it in source
                        if b_source.split(b'/')[0] == b_dirname:
                            search.append(os.path.join(b_upath, b_source))
                        else:
                            search.append(os.path.join(b_upath, b_dirname, b_source))
                        search.append(os.path.join(b_upath, b'tasks', b_source))
                elif b_dirname not in b_source.split(b'/'):
                    # don't add dirname if user already is using it in source
                    search.append(os.path.join(b_upath, b_dirname, b_source))
                    search.append(os.path.join(b_upath, b_source))

            # always append basedir as last resort
            search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
            search.append(os.path.join(to_bytes(self.get_basedir()), b_source))

            display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
            for b_candidate in search:
                display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
                if os.path.exists(b_candidate):
                    result = to_text(b_candidate)
                    break

        return result

    def read_vault_password_file(self, vault_password_file):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(to_bytes(os.path.expanduser(vault_password_file), errors='surrogate_or_strict'))
        if not os.path.exists(to_bytes(this_path, errors='surrogate_or_strict')):
            raise AnsibleFileNotFound("The vault password file %s was not found" % this_path)

        if self.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError("Problem running vault password script %s (%s)."
                        " If this is not a script, remove the executable bit from the file." % (' '.join(this_path), to_native(e)))
            stdout, stderr = p.communicate()
            self.set_vault_password(stdout.strip('\r\n'))
        else:
            try:
                f = open(this_path, "rb")
                self.set_vault_password(f.read().strip())
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))

        b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_native(file_path))

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                if is_encrypted_file(f):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    data = f.read()
                    if not self._vault_password:
                        raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)

                    data = self._vault.decrypt(data, filename=real_path)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)))

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except:
                pass  # TODO: this should at least warn
Esempio n. 44
0
class TestVaultLib(unittest.TestCase):
    def setUp(self):
        self.v = VaultLib('test-vault-password')

    def test_encrypt(self):
        plaintext = u'Some text to encrypt in a café'
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_encrypt_bytes(self):

        plaintext = to_bytes(u'Some text to encrypt in a café')
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_is_encrypted(self):
        self.assertFalse(self.v.is_encrypted(b"foobar"), msg="encryption check on plaintext yielded false positive")
        b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        self.assertTrue(self.v.is_encrypted(b_data), msg="encryption check on headered text failed")

    def test_format_output(self):
        self.v.cipher_name = "TEST"
        b_ciphertext = b"ansible"
        b_vaulttext = self.v._format_output(b_ciphertext)
        b_lines = b_vaulttext.split(b'\n')
        self.assertGreater(len(b_lines), 1, msg="failed to properly add header")

        b_header = b_lines[0]
        self.assertTrue(b_header.endswith(b';TEST'), msg="header does not end with cipher name")

        b_header_parts = b_header.split(b';')
        self.assertEqual(len(b_header_parts), 3, msg="header has the wrong number of parts")
        self.assertEqual(b_header_parts[0], b'$ANSIBLE_VAULT', msg="header does not start with $ANSIBLE_VAULT")
        self.assertEqual(b_header_parts[1], self.v.b_version, msg="header version is incorrect")
        self.assertEqual(b_header_parts[2], b'TEST', msg="header does not end with cipher name")

    def test_split_header(self):
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
        b_ciphertext = self.v._split_header(b_vaulttext)
        b_lines = b_ciphertext.split(b'\n')
        self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
        self.assertEqual(self.v.cipher_name, u'TEST', msg="cipher name was not properly set")
        self.assertEqual(self.v.b_version, b"9.9", msg="version was not properly set")

    def test_encrypt_decrypt_aes(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        self.v.b_password = b'ansible'
        # AES encryption code has been removed, so this is old output for
        # AES-encrypted 'foobar' with password 'ansible'.
        b_vaulttext = b'''$ANSIBLE_VAULT;1.1;AES
53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3
fe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e
786a5a15efeb787e1958cbdd480d076c
'''
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        plaintext = u"foobar"
        b_vaulttext = self.v.encrypt(plaintext)
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertNotEqual(b_vaulttext, b"foobar", msg="encryption failed")
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256_existing_vault(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        b_orig_plaintext = b"Setec Astronomy"
        vaulttext = u'''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''

        b_plaintext = self.v.decrypt(vaulttext)
        self.assertEqual(b_plaintext, b_plaintext, msg="decryption failed")

        b_vaulttext = to_bytes(vaulttext, encoding='ascii', errors='strict')
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")

    def test_encrypt_decrypt_aes256_bad_hmac(self):
        # FIXME This test isn't working quite yet.
        raise SkipTest

        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = 'AES256'
        # plaintext = "Setec Astronomy"
        enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
        b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
        b_data = self.v._split_header(b_data)
        foo = binascii.unhexlify(b_data)
        lines = foo.splitlines()
        # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
        b_salt = lines[0]
        b_hmac = lines[1]
        b_ciphertext_data = b'\n'.join(lines[2:])

        b_ciphertext = binascii.unhexlify(b_ciphertext_data)
        # b_orig_ciphertext = b_ciphertext[:]

        # now muck with the text
        # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
        # b_munged_ciphertext = b_ciphertext
        # assert b_orig_ciphertext != b_munged_ciphertext

        b_ciphertext_data = binascii.hexlify(b_ciphertext)
        b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
        # reformat
        b_invalid_ciphertext = self.v._format_output(b_payload)

        # assert we throw an error
        self.v.decrypt(b_invalid_ciphertext)

    def test_encrypt_encrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        vaulttext = to_text(b_vaulttext, errors='strict')
        self.assertRaises(errors.AnsibleError, self.v.encrypt, b_vaulttext)
        self.assertRaises(errors.AnsibleError, self.v.encrypt, vaulttext)

    def test_decrypt_decrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, plaintext)

        b_plaintext = b"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, b_plaintext)

    def test_cipher_not_set(self):
        # not setting the cipher should default to AES256
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.v.encrypt(plaintext)
        self.assertEquals(self.v.cipher_name, "AES256")
Esempio n. 45
0
class DataLoader:

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        # TODO: replace with a ref to something that can get the password
        #       a creds/auth provider
        # self.set_vault_password(None)
        self._vaults = {}
        self._vault = VaultLib()
        self.set_vault_secrets(None)

    # TODO: since we can query vault_secrets late, we could provide this to DataLoader init
    def set_vault_secrets(self, vault_secrets):
        self._vault.secrets = vault_secrets

    def load(self, data, file_name='<string>', show_content=True):
        '''Backwards compat for now'''
        return from_yaml(data, file_name, show_content, self._vault.secrets)

    def load_from_file(self, file_name, cache=True, unsafe=False):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)
        display.debug("Loading data from %s" % file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if cache and file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (b_file_data, show_content) = self._get_file_contents(file_name)

            file_data = to_text(b_file_data, errors='surrogate_or_strict')
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        if unsafe:
            return parsed_data
        else:
            # return a deep copy here, so the cache is not affected
            return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _decrypt_if_vault_data(self, b_vault_data, b_file_name=None):
        '''Decrypt b_vault_data if encrypted and return b_data and the show_content flag'''

        if not is_encrypted(b_vault_data):
            show_content = True
            return b_vault_data, show_content

        b_ciphertext, b_version, cipher_name, vault_id = parse_vaulttext_envelope(b_vault_data)
        b_data = self._vault.decrypt(b_vault_data, filename=b_file_name)

        show_content = False
        return b_data, show_content

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name

        If the contents are vault-encrypted, it will decrypt them and return
        the decrypted data

        :arg file_name: The name of the file to read.  If this is a relative
            path, it will be expanded relative to the basedir
        :raises AnsibleFileNotFOund: if the file_name does not refer to a file
        :raises AnsibleParserError: if we were unable to read the file
        :return: Returns a byte string of the file contents
        '''
        if not file_name or not isinstance(file_name, (binary_type, text_type)):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        b_file_name = to_bytes(self.path_dwim(file_name))
        # This is what we really want but have to fix unittests to make it pass
        # if not os.path.exists(b_file_name) or not os.path.isfile(b_file_name):
        if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
            raise AnsibleFileNotFound("Unable to retrieve file contents", file_name=file_name)

        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                return self._decrypt_if_vault_data(data, b_file_name)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)), orig_exc=e)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_text(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_text(given, errors='surrogate_or_strict')

        if given.startswith(to_text(os.path.sep)) or given.startswith(u'~'):
            path = given
        else:
            basedir = to_text(self._basedir, errors='surrogate_or_strict')
            path = os.path.join(basedir, given)

        return unfrackpath(path, follow=False)

    def _is_role(self, path):
        ''' imperfect role detection, roles are still valid w/o tasks|meta/main.yml|yaml|etc '''

        b_path = to_bytes(path, errors='surrogate_or_strict')
        b_upath = to_bytes(unfrackpath(path, follow=False), errors='surrogate_or_strict')

        for b_finddir in (b'meta', b'tasks'):
            for b_suffix in (b'.yml', b'.yaml', b''):
                b_main = b'main%s' % (b_suffix)
                b_tasked = os.path.join(b_finddir, b_main)

                if (
                    RE_TASKS.search(path) and
                    os.path.exists(os.path.join(b_path, b_main)) or
                    os.path.exists(os.path.join(b_upath, b_tasked)) or
                    os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked))
                ):
                    return True
        return False

    def path_dwim_relative(self, path, dirname, source, is_role=False):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        source = to_text(source, errors='surrogate_or_strict')

        # I have full path, nothing else needs to be looked at
        if source.startswith(to_text(os.path.sep)) or source.startswith(u'~'):
            search.append(unfrackpath(source, follow=False))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))
            basedir = unfrackpath(path, follow=False)

            # not told if role, but detect if it is a role and if so make sure you get correct base path
            if not is_role:
                is_role = self._is_role(path)

            if is_role and RE_TASKS.search(path):
                basedir = unfrackpath(os.path.dirname(path), follow=False)

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(unfrackpath(os.path.join(basedir, dirname, source), follow=False))
            self.set_basedir(cur_basedir)

            if is_role and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(unfrackpath(os.path.join(basedir, 'tasks', source), follow=False))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(unfrackpath(os.path.join(dirname, source), follow=False))

            # try to create absolute path for loader basedir
            search.append(unfrackpath(os.path.join(basedir, source), follow=False))

            # try to create absolute path for  dirname + filename
            search.append(self.path_dwim(os.path.join(dirname, source)))

            # try to create absolute path for filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
                break

        return candidate

    def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
        '''
        find one file in first path in stack taking roles into account and adding play basedir as fallback

        :arg paths: A list of text strings which are the paths to look for the filename in.
        :arg dirname: A text string representing a directory.  The directory
            is prepended to the source to form the path to search for.
        :arg source: A text string which is the filename to search for
        :rtype: A text string
        :returns: An absolute path to the filename ``source`` if found
        :raises: An AnsibleFileNotFound Exception if the file is found to exist in the search paths
        '''
        b_dirname = to_bytes(dirname)
        b_source = to_bytes(source)

        result = None
        search = []
        if source is None:
            display.warning('Invalid request to find a file that matches a "null" value')
        elif source and (source.startswith('~') or source.startswith(os.path.sep)):
            # path is absolute, no relative needed, check existence and return source
            test_path = unfrackpath(b_source, follow=False)
            if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
                result = test_path
        else:
            display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
            for path in paths:
                upath = unfrackpath(path, follow=False)
                b_upath = to_bytes(upath, errors='surrogate_or_strict')
                b_mydir = os.path.dirname(b_upath)

                # if path is in role and 'tasks' not there already, add it into the search
                if (is_role or self._is_role(path)) and b_mydir.endswith(b'tasks'):
                        search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
                        search.append(os.path.join(b_mydir, b_source))
                else:
                    # don't add dirname if user already is using it in source
                    if b_source.split(b'/')[0] != dirname:
                        search.append(os.path.join(b_upath, b_dirname, b_source))
                    search.append(os.path.join(b_upath, b_source))

            # always append basedir as last resort
            # don't add dirname if user already is using it in source
            if b_source.split(b'/')[0] != dirname:
                search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
            search.append(os.path.join(to_bytes(self.get_basedir()), b_source))

            display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
            for b_candidate in search:
                display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
                if os.path.exists(b_candidate):
                    result = to_text(b_candidate)
                    break

        if result is None:
            raise AnsibleFileNotFound(file_name=source, paths=[to_text(p) for p in search])

        return result

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path, decrypt=True):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, (binary_type, text_type)):
            raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))

        b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound(file_name=file_path)

        real_path = self.path_dwim(file_path)

        try:
            if decrypt:
                with open(to_bytes(real_path), 'rb') as f:
                    # Limit how much of the file is read since we do not know
                    # whether this is a vault file and therefore it could be very
                    # large.
                    if is_encrypted_file(f, count=len(b_HEADER)):
                        # if the file is encrypted and no password was specified,
                        # the decrypt call would throw an error, but we check first
                        # since the decrypt function doesn't know the file name
                        data = f.read()
                        if not self._vault.secrets:
                            raise AnsibleParserError("A vault password or secret must be specified to decrypt %s" % to_native(file_path))

                        data = self._vault.decrypt(data, filename=real_path)
                        # Make a temp file
                        real_path = self._create_content_tempfile(data)
                        self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)), orig_exc=e)

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except Exception as e:
                display.warning("Unable to cleanup temp files: %s" % to_native(e))

    def find_vars_files(self, path, name, extensions=None, allow_dir=True):
        """
        Find vars files in a given path with specified name. This will find
        files in a dir named <name>/ or a file called <name> ending in known
        extensions.
        """

        b_path = to_bytes(os.path.join(path, name))
        found = []

        if extensions is None:
            # Look for file with no extension first to find dir before file
            extensions = [''] + C.YAML_FILENAME_EXTENSIONS
        # add valid extensions to name
        for ext in extensions:

            if '.' in ext:
                full_path = b_path + to_bytes(ext)
            elif ext:
                full_path = b'.'.join([b_path, to_bytes(ext)])
            else:
                full_path = b_path

            if self.path_exists(full_path):
                if self.is_directory(full_path):
                    if allow_dir:
                        found.extend(self._get_dir_vars_files(to_text(full_path), extensions))
                    else:
                        next
                else:
                    found.append(full_path)
                break
        return found

    def _get_dir_vars_files(self, path, extensions):
        found = []
        for spath in sorted(self.list_directory(path)):
            if not spath.startswith(u'.') and not spath.endswith(u'~'):  # skip hidden and backups

                ext = os.path.splitext(spath)[-1]
                full_spath = os.path.join(path, spath)

                if self.is_directory(full_spath) and not ext:  # recursive search if dir
                    found.extend(self._get_dir_vars_files(full_spath, extensions))
                elif self.is_file(full_spath) and (not ext or to_text(ext) in extensions):
                    # only consider files with valid extensions or no extension
                    found.append(full_spath)

        return found