예제 #1
0
 def test_split_header(self):
     v = VaultLib("ansible")
     data = "$ANSIBLE_VAULT;9.9;TEST\nansible"
     rdata = v._split_header(data)
     lines = rdata.split("\n")
     assert lines[0] == "ansible"
     assert v.cipher_name == "TEST", "cipher name was not set"
     assert v.version == "9.9"
예제 #2
0
 def test_split_header(self):
     v = VaultLib('ansible')
     data = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
     rdata = v._split_header(data)
     lines = rdata.split(b'\n')
     assert lines[0] == b"ansible"
     assert v.cipher_name == 'TEST', "cipher name was not set"
     assert v.b_version == "9.9"
예제 #3
0
 def test_encrypt_decrypt_aes256(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     v.cipher_name = 'AES256'
     enc_data = v.encrypt("foobar")
     dec_data = v.decrypt(enc_data)
     assert enc_data != "foobar", "encryption failed"
     assert dec_data == "foobar", "decryption failed"
예제 #4
0
 def test_encrypt_decrypt_aes(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     v.cipher_name = u'AES'
     # AES encryption code has been removed, so this is old output for
     # AES-encrypted 'foobar' with password 'ansible'.
     enc_data = '$ANSIBLE_VAULT;1.1;AES\n53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3\nfe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e\n786a5a15efeb787e1958cbdd480d076c\n'
     dec_data = v.decrypt(enc_data)
     assert dec_data == "foobar", "decryption failed"
예제 #5
0
def get_key_value(key):
    vault_password = open("/tmp/.vaultpwd").readlines()[0].rstrip('\n')
    data = open("keychain.yml").read()

    vault = VaultLib(password=vault_password)
    if vault.is_encrypted(data):
        data = vault.decrypt(data)
        ydata = yaml.load(data)
        return ydata['aws'][key]
    else:
        return None
예제 #6
0
 def test_decrypt_decrypted(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     data = "ansible"
     error_hit = False
     try:
         dec_data = v.decrypt(data)
     except errors.AnsibleError as e:
         error_hit = True
     assert error_hit, "No error was thrown when trying to decrypt data without a header"
예제 #7
0
 def test_encrypt_encrypted(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     v.cipher_name = 'AES'
     data = "$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(six.b("ansible"))
     error_hit = False
     try:
         enc_data = v.encrypt(data)
     except errors.AnsibleError as e:
         error_hit = True
     assert error_hit, "No error was thrown when trying to encrypt data with a header"
예제 #8
0
 def test_cipher_not_set(self):
     # not setting the cipher should default to AES256
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     data = "ansible"
     error_hit = False
     try:
         enc_data = v.encrypt(data)
     except errors.AnsibleError as e:
         error_hit = True
     assert not error_hit, "An error was thrown when trying to encrypt data without the cipher set"
     assert v.cipher_name == "AES256", "cipher name is not set to AES256: %s" % v.cipher_name
예제 #9
0
 def test_add_header(self):
     v = VaultLib("ansible")
     v.cipher_name = "TEST"
     sensitive_data = "ansible"
     data = v._add_header(sensitive_data)
     lines = data.split("\n")
     assert len(lines) > 1, "failed to properly add header"
     header = lines[0]
     assert header.endswith(";TEST"), "header does end with cipher name"
     header_parts = header.split(";")
     assert len(header_parts) == 3, "header has the wrong number of parts"
     assert header_parts[0] == "$ANSIBLE_VAULT", "header does not start with $ANSIBLE_VAULT"
     assert header_parts[1] == v.version, "header version is incorrect"
     assert header_parts[2] == "TEST", "header does end with cipher name"
예제 #10
0
 def test_format_output(self):
     v = VaultLib('ansible')
     v.cipher_name = "TEST"
     sensitive_data = "ansible"
     data = v._format_output(sensitive_data)
     lines = data.split(b'\n')
     assert len(lines) > 1, "failed to properly add header"
     header = to_unicode(lines[0])
     assert header.endswith(';TEST'), "header does end with cipher name"
     header_parts = header.split(';')
     assert len(header_parts) == 3, "header has the wrong number of parts"
     assert header_parts[0] == '$ANSIBLE_VAULT', "header does not start with $ANSIBLE_VAULT"
     assert header_parts[1] == v.b_version, "header version is incorrect"
     assert header_parts[2] == 'TEST', "header does end with cipher name"
예제 #11
0
    def test_rekey_migration(self):
        """
        Skip testing rekeying files if we don't have access to AES, KDF or
        Counter, or we are running on python3 since VaultAES hasn't been backported.
        """
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or sys.version > '3':
            raise SkipTest

        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(to_bytes(v10_data))

        ve = VaultEditor(None, "ansible", v10_file.name)

        # make sure the password functions for the cipher
        error_hit = False
        try:
            ve.rekey_file('ansible2')
        except errors.AnsibleError as e:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit == False, "error rekeying 1.0 file to 1.1"

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib("ansible2")
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError as e:
            error_hit = True

        os.unlink(v10_file.name)

        assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name
        assert error_hit == False, "error decrypting migrated 1.0 file"
        assert dec_data.strip() == "foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
예제 #12
0
    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        # TODO: replace with a ref to something that can get the password
        #       a creds/auth provider
        # self.set_vault_password(None)
        self._vaults = {}
        self._vault = VaultLib()
        self.set_vault_secrets(None)
예제 #13
0
파일: api.py 프로젝트: jptomo/ansible-vault
class Vault(object):
    '''R/W an ansible-vault yaml file'''

    def __init__(self, password):
        self._ansible_ver = _ansible_ver

        self.secret = password.encode('utf-8')
        self.vault = VaultLib(self._make_secrets(self.secret))

    def _make_secrets(self, secret):
        if self._ansible_ver < 2.4:
            return secret

        from ansible.constants import DEFAULT_VAULT_ID_MATCH
        from ansible.parsing.vault import VaultSecret
        return [(DEFAULT_VAULT_ID_MATCH, VaultSecret(secret))]

    def load_raw(self, stream):
        """Read vault stream and return raw data."""
        return self.vault.decrypt(stream)

    def dump_raw(self, text, stream=None):
        """Encrypt raw data and write to stream."""
        encrypted = self.vault.encrypt(text)
        if stream:
            stream.write(encrypted)
        else:
            return encrypted

    def load(self, stream):
        """Read vault steam and return python object."""
        return yaml.safe_load(self.load_raw(stream))

    def dump(self, data, stream=None):
        """Encrypt data and print stdout or write to stream."""
        yaml_text = yaml.dump(
            data,
            default_flow_style=False,
            allow_unicode=True)
        return self.dump_raw(yaml_text, stream=stream)
예제 #14
0
def parse_file(filename, vault_enc, vault_pass):
    if json.loads(vault_enc.lower()):
        vault_pass = vault_pass.encode('utf-8')
        vault = VaultLib(_make_secrets(vault_pass))
        cred_str = vault.decrypt(open(filename).read())
    else:
        cred_str = open(filename, "r").read()
    try:
        out = json.loads(cred_str)
    except Exception as e:
        try:
            out = yaml.load(cred_str)
        except Exception as e:
            try:
                config = ConfigDict()
                f = open(filename)
                config.readfp(f)
                out = config.as_dict()
                f.close()
            except Exception as e:
                module.fail_json(msg="Error: {0} ".format(str(e)))
    return out
예제 #15
0
    def test_rekey_migration(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest

        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(v10_data)

        ve = VaultEditor(None, "ansible", v10_file.name)

        # make sure the password functions for the cipher
        error_hit = False
        try:        
            ve.rekey_file('ansible2')
        except errors.AnsibleError as e:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit == False, "error rekeying 1.0 file to 1.1"            

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib("ansible2")
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError as e:
            error_hit = True

        os.unlink(v10_file.name)

        assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name
        assert error_hit == False, "error decrypting migrated 1.0 file"            
        assert dec_data.strip() == "foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
예제 #16
0
    def test_rekey_migration(self):
        v10_file = tempfile.NamedTemporaryFile(delete=False)
        with v10_file as f:
            f.write(to_bytes(v10_data))

        ve = self._vault_editor(self._secrets("ansible"))

        # make sure the password functions for the cipher
        error_hit = False
        new_secrets = self._secrets("ansible2")
        try:
            ve.rekey_file(v10_file.name, vault.match_encrypt_secret(new_secrets)[1])
        except errors.AnsibleError:
            error_hit = True

        # verify decrypted content
        f = open(v10_file.name, "rb")
        fdata = f.read()
        f.close()

        assert error_hit is False, "error rekeying 1.0 file to 1.1"

        # ensure filedata can be decrypted, is 1.1 and is AES256
        vl = VaultLib(new_secrets)
        dec_data = None
        error_hit = False
        try:
            dec_data = vl.decrypt(fdata)
        except errors.AnsibleError:
            error_hit = True

        os.unlink(v10_file.name)

        self.assertIn(b'AES256', fdata, 'AES256 was not found in vault file %s' % to_text(fdata))
        assert error_hit is False, "error decrypting migrated 1.0 file"
        assert dec_data.strip() == b"foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
예제 #17
0
    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))

        b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_native(file_path))

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                # Limit how much of the file is read since we do not know
                # whether this is a vault file and therefore it could be very
                # large.
                if is_encrypted_file(f, count=len(b_HEADER)):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    data = f.read()
                    if not self._vault_password:
                        raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)

                    data = self._vault.decrypt(data, filename=real_path)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)))
예제 #18
0
    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_path))

        if not self.path_exists(file_path) or not self.is_file(file_path):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_path)

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    if not self._vault_password:
                        raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)

                    data = self._vault.decrypt(data)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (real_path, str(e)))
    def create(self):
        try:
            print('')
            new_file = self.args.file
            if new_file is None:
                new_file = input('File to create: ')
            if os.path.exists(os.path.join(self.args.vault_path, new_file)):
                eprint('This file already exists')
                sys.exit(2)

            plugin_name = self.args.plugin
            if plugin_name is None:
                plugin_name = input('Keyring plugin name to use [' +
                                    ', '.join(list_plugins()) + ']: ')
            plugin = self.get_plugin_instance(plugin_name)
            id = plugin.generate_id(self.args.plugin_vars)

            print('New ID to use: ' + id)
            print('')
            stdin_pass = self.args.stdin_pass
            if not stdin_pass and sys.stdin.isatty():
                password = getpass.getpass('New password: '******'Confirm password: '******'Passwords missmatch')
                    sys.exit(2)
            else:
                password = sys.stdin.read()

            password = password.strip()
            if password == '':
                print('Your password is empty !')
                sys.exit(2)
        except KeyboardInterrupt:
            print('')
            sys.exit(0)

        try:
            new_version = plugin.set_password(id, password)
            id = plugin.append_id_version(new_version)

            vault_metadata = get_metadata(self.args.vault_path)
            vault_metadata['vault_ids'].append({
                METADATA_ID_KEY: id,
                METADATA_PLUGIN_KEY: plugin_name,
                METADATA_VAULT_FILES: [new_file]
            })
            write_metadata(vault_metadata, self.args.vault_path)

            VaultLib = get_vault_lib()
            vault_api = VaultLib(_make_secrets(password))
            with open(os.path.join(self.args.vault_path, new_file),
                      'w') as stream:
                encrypted = vault_api.encrypt('---')
                stream.write(encrypted)
        except Exception as e:
            eprint(e)
            sys.exit(2)
            if (self.args.verbose):
                import traceback
                traceback.print_exc()
예제 #20
0
 def set_vault_password(self, vault_password):
     self._vault_password = vault_password
     self._vault = VaultLib(password=vault_password)
예제 #21
0
파일: __init__.py 프로젝트: jinnko/ansible
 def __init__(self, vault_password=None):
     self._basedir = '.'
     self._vault = VaultLib(password=vault_password)
예제 #22
0
class DataLoader:

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        # TODO: replace with a ref to something that can get the password
        #       a creds/auth provider
        # self.set_vault_password(None)
        self._vaults = {}
        self._vault = VaultLib()
        self.set_vault_secrets(None)

    # TODO: since we can query vault_secrets late, we could provide this to DataLoader init
    def set_vault_secrets(self, vault_secrets):
        self._vault.secrets = vault_secrets

    def load(self, data, file_name='<string>', show_content=True):
        '''Backwards compat for now'''
        return from_yaml(data, file_name, show_content, self._vault.secrets)

    def load_from_file(self, file_name, cache=True, unsafe=False):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)
        display.debug("Loading data from %s" % file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if cache and file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (b_file_data, show_content) = self._get_file_contents(file_name)

            file_data = to_text(b_file_data, errors='surrogate_or_strict')
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        if unsafe:
            return parsed_data
        else:
            # return a deep copy here, so the cache is not affected
            return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _decrypt_if_vault_data(self, b_vault_data, b_file_name=None):
        '''Decrypt b_vault_data if encrypted and return b_data and the show_content flag'''

        if not is_encrypted(b_vault_data):
            show_content = True
            return b_vault_data, show_content

        b_ciphertext, b_version, cipher_name, vault_id = parse_vaulttext_envelope(b_vault_data)
        b_data = self._vault.decrypt(b_vault_data, filename=b_file_name)

        show_content = False
        return b_data, show_content

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name

        If the contents are vault-encrypted, it will decrypt them and return
        the decrypted data

        :arg file_name: The name of the file to read.  If this is a relative
            path, it will be expanded relative to the basedir
        :raises AnsibleFileNotFOund: if the file_name does not refer to a file
        :raises AnsibleParserError: if we were unable to read the file
        :return: Returns a byte string of the file contents
        '''
        if not file_name or not isinstance(file_name, (binary_type, text_type)):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        b_file_name = to_bytes(self.path_dwim(file_name))
        # This is what we really want but have to fix unittests to make it pass
        # if not os.path.exists(b_file_name) or not os.path.isfile(b_file_name):
        if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
            raise AnsibleFileNotFound("Unable to retrieve file contents", file_name=file_name)

        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                return self._decrypt_if_vault_data(data, b_file_name)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)), orig_exc=e)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_text(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_text(given, errors='surrogate_or_strict')

        if given.startswith(to_text(os.path.sep)) or given.startswith(u'~'):
            path = given
        else:
            basedir = to_text(self._basedir, errors='surrogate_or_strict')
            path = os.path.join(basedir, given)

        return unfrackpath(path, follow=False)

    def _is_role(self, path):
        ''' imperfect role detection, roles are still valid w/o tasks|meta/main.yml|yaml|etc '''

        b_path = to_bytes(path, errors='surrogate_or_strict')
        b_upath = to_bytes(unfrackpath(path, follow=False), errors='surrogate_or_strict')

        for b_finddir in (b'meta', b'tasks'):
            for b_suffix in (b'.yml', b'.yaml', b''):
                b_main = b'main%s' % (b_suffix)
                b_tasked = os.path.join(b_finddir, b_main)

                if (
                    RE_TASKS.search(path) and
                    os.path.exists(os.path.join(b_path, b_main)) or
                    os.path.exists(os.path.join(b_upath, b_tasked)) or
                    os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked))
                ):
                    return True
        return False

    def path_dwim_relative(self, path, dirname, source, is_role=False):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        source = to_text(source, errors='surrogate_or_strict')

        # I have full path, nothing else needs to be looked at
        if source.startswith(to_text(os.path.sep)) or source.startswith(u'~'):
            search.append(unfrackpath(source, follow=False))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))
            basedir = unfrackpath(path, follow=False)

            # not told if role, but detect if it is a role and if so make sure you get correct base path
            if not is_role:
                is_role = self._is_role(path)

            if is_role and RE_TASKS.search(path):
                basedir = unfrackpath(os.path.dirname(path), follow=False)

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(unfrackpath(os.path.join(basedir, dirname, source), follow=False))
            self.set_basedir(cur_basedir)

            if is_role and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(unfrackpath(os.path.join(basedir, 'tasks', source), follow=False))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(unfrackpath(os.path.join(dirname, source), follow=False))

            # try to create absolute path for loader basedir
            search.append(unfrackpath(os.path.join(basedir, source), follow=False))

            # try to create absolute path for  dirname + filename
            search.append(self.path_dwim(os.path.join(dirname, source)))

            # try to create absolute path for filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
                break

        return candidate

    def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
        '''
        find one file in first path in stack taking roles into account and adding play basedir as fallback

        :arg paths: A list of text strings which are the paths to look for the filename in.
        :arg dirname: A text string representing a directory.  The directory
            is prepended to the source to form the path to search for.
        :arg source: A text string which is the filename to search for
        :rtype: A text string
        :returns: An absolute path to the filename ``source`` if found
        :raises: An AnsibleFileNotFound Exception if the file is found to exist in the search paths
        '''
        b_dirname = to_bytes(dirname)
        b_source = to_bytes(source)

        result = None
        search = []
        if source is None:
            display.warning('Invalid request to find a file that matches a "null" value')
        elif source and (source.startswith('~') or source.startswith(os.path.sep)):
            # path is absolute, no relative needed, check existence and return source
            test_path = unfrackpath(b_source, follow=False)
            if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
                result = test_path
        else:
            display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
            for path in paths:
                upath = unfrackpath(path, follow=False)
                b_upath = to_bytes(upath, errors='surrogate_or_strict')
                b_mydir = os.path.dirname(b_upath)

                # if path is in role and 'tasks' not there already, add it into the search
                if (is_role or self._is_role(path)) and b_mydir.endswith(b'tasks'):
                        search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
                        search.append(os.path.join(b_mydir, b_source))
                else:
                    # don't add dirname if user already is using it in source
                    if b_source.split(b'/')[0] != dirname:
                        search.append(os.path.join(b_upath, b_dirname, b_source))
                    search.append(os.path.join(b_upath, b_source))

            # always append basedir as last resort
            # don't add dirname if user already is using it in source
            if b_source.split(b'/')[0] != dirname:
                search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
            search.append(os.path.join(to_bytes(self.get_basedir()), b_source))

            display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
            for b_candidate in search:
                display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
                if os.path.exists(b_candidate):
                    result = to_text(b_candidate)
                    break

        if result is None:
            raise AnsibleFileNotFound(file_name=source, paths=[to_text(p) for p in search])

        return result

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path, decrypt=True):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, (binary_type, text_type)):
            raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))

        b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound(file_name=file_path)

        real_path = self.path_dwim(file_path)

        try:
            if decrypt:
                with open(to_bytes(real_path), 'rb') as f:
                    # Limit how much of the file is read since we do not know
                    # whether this is a vault file and therefore it could be very
                    # large.
                    if is_encrypted_file(f, count=len(b_HEADER)):
                        # if the file is encrypted and no password was specified,
                        # the decrypt call would throw an error, but we check first
                        # since the decrypt function doesn't know the file name
                        data = f.read()
                        if not self._vault.secrets:
                            raise AnsibleParserError("A vault password or secret must be specified to decrypt %s" % to_native(file_path))

                        data = self._vault.decrypt(data, filename=real_path)
                        # Make a temp file
                        real_path = self._create_content_tempfile(data)
                        self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)), orig_exc=e)

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except Exception as e:
                display.warning("Unable to cleanup temp files: %s" % to_native(e))

    def find_vars_files(self, path, name, extensions=None, allow_dir=True):
        """
        Find vars files in a given path with specified name. This will find
        files in a dir named <name>/ or a file called <name> ending in known
        extensions.
        """

        b_path = to_bytes(os.path.join(path, name))
        found = []

        if extensions is None:
            # Look for file with no extension first to find dir before file
            extensions = [''] + C.YAML_FILENAME_EXTENSIONS
        # add valid extensions to name
        for ext in extensions:

            if '.' in ext:
                full_path = b_path + to_bytes(ext)
            elif ext:
                full_path = b'.'.join([b_path, to_bytes(ext)])
            else:
                full_path = b_path

            if self.path_exists(full_path):
                if self.is_directory(full_path):
                    if allow_dir:
                        found.extend(self._get_dir_vars_files(to_text(full_path), extensions))
                    else:
                        next
                else:
                    found.append(full_path)
                break
        return found

    def _get_dir_vars_files(self, path, extensions):
        found = []
        for spath in sorted(self.list_directory(path)):
            if not spath.startswith(u'.') and not spath.endswith(u'~'):  # skip hidden and backups

                ext = os.path.splitext(spath)[-1]
                full_spath = os.path.join(path, spath)

                if self.is_directory(full_spath) and not ext:  # recursive search if dir
                    found.extend(self._get_dir_vars_files(full_spath, extensions))
                elif self.is_file(full_spath) and (not ext or to_text(ext) in extensions):
                    # only consider files with valid extensions or no extension
                    found.append(full_spath)

        return found
예제 #23
0
class TestVaultLib(unittest.TestCase):
    def setUp(self):
        self.v = VaultLib('test-vault-password')

    def test_encrypt(self):
        plaintext = u'Some text to encrypt in a café'
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_encrypt_bytes(self):

        plaintext = to_bytes(u'Some text to encrypt in a café')
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_is_encrypted(self):
        self.assertFalse(self.v.is_encrypted(b"foobar"), msg="encryption check on plaintext yielded false positive")
        b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        self.assertTrue(self.v.is_encrypted(b_data), msg="encryption check on headered text failed")

    def test_format_output(self):
        self.v.cipher_name = "TEST"
        b_ciphertext = b"ansible"
        b_vaulttext = self.v._format_output(b_ciphertext)
        b_lines = b_vaulttext.split(b'\n')
        self.assertGreater(len(b_lines), 1, msg="failed to properly add header")

        b_header = b_lines[0]
        self.assertTrue(b_header.endswith(b';TEST'), msg="header does not end with cipher name")

        b_header_parts = b_header.split(b';')
        self.assertEqual(len(b_header_parts), 3, msg="header has the wrong number of parts")
        self.assertEqual(b_header_parts[0], b'$ANSIBLE_VAULT', msg="header does not start with $ANSIBLE_VAULT")
        self.assertEqual(b_header_parts[1], self.v.b_version, msg="header version is incorrect")
        self.assertEqual(b_header_parts[2], b'TEST', msg="header does not end with cipher name")

    def test_split_header(self):
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
        b_ciphertext = self.v._split_header(b_vaulttext)
        b_lines = b_ciphertext.split(b'\n')
        self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
        self.assertEqual(self.v.cipher_name, u'TEST', msg="cipher name was not properly set")
        self.assertEqual(self.v.b_version, b"9.9", msg="version was not properly set")

    def test_encrypt_decrypt_aes(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        self.v.b_password = b'ansible'
        # AES encryption code has been removed, so this is old output for
        # AES-encrypted 'foobar' with password 'ansible'.
        b_vaulttext = b'''$ANSIBLE_VAULT;1.1;AES
53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3
fe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e
786a5a15efeb787e1958cbdd480d076c
'''
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        plaintext = u"foobar"
        b_vaulttext = self.v.encrypt(plaintext)
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertNotEqual(b_vaulttext, b"foobar", msg="encryption failed")
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256_existing_vault(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        b_orig_plaintext = b"Setec Astronomy"
        vaulttext = u'''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''

        b_plaintext = self.v.decrypt(vaulttext)
        self.assertEqual(b_plaintext, b_plaintext, msg="decryption failed")

        b_vaulttext = to_bytes(vaulttext, encoding='ascii', errors='strict')
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")

    def test_encrypt_decrypt_aes256_bad_hmac(self):
        # FIXME This test isn't working quite yet.
        raise SkipTest

        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = 'AES256'
        # plaintext = "Setec Astronomy"
        enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
        b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
        b_data = self.v._split_header(b_data)
        foo = binascii.unhexlify(b_data)
        lines = foo.splitlines()
        # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
        b_salt = lines[0]
        b_hmac = lines[1]
        b_ciphertext_data = b'\n'.join(lines[2:])

        b_ciphertext = binascii.unhexlify(b_ciphertext_data)
        # b_orig_ciphertext = b_ciphertext[:]

        # now muck with the text
        # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
        # b_munged_ciphertext = b_ciphertext
        # assert b_orig_ciphertext != b_munged_ciphertext

        b_ciphertext_data = binascii.hexlify(b_ciphertext)
        b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
        # reformat
        b_invalid_ciphertext = self.v._format_output(b_payload)

        # assert we throw an error
        self.v.decrypt(b_invalid_ciphertext)

    def test_encrypt_encrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        vaulttext = to_text(b_vaulttext, errors='strict')
        self.assertRaises(errors.AnsibleError, self.v.encrypt, b_vaulttext)
        self.assertRaises(errors.AnsibleError, self.v.encrypt, vaulttext)

    def test_decrypt_decrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, plaintext)

        b_plaintext = b"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, b_plaintext)

    def test_cipher_not_set(self):
        # not setting the cipher should default to AES256
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.v.encrypt(plaintext)
        self.assertEquals(self.v.cipher_name, "AES256")
예제 #24
0
 def test_decrypt_decrypted(self):
     if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
         raise SkipTest
     v = VaultLib('ansible')
     data = "ansible"
     self.assertRaises(errors.AnsibleError, v.decrypt, data)
예제 #25
0
class DataLoader():
    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''
    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            # if loading JSON failed for any reason, we go ahead
            # and try to parse it as YAML instead

            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                new_data = unicode(data)
            else:
                new_data = data
            try:
                new_data = self._safe_load(new_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos
            return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            return self._FILE_CACHE[file_name]

        # read the file contents and load the data structure from them
        (file_data, show_content) = self._get_file_contents(file_name)
        parsed_data = self.load(data=file_data,
                                file_name=file_name,
                                show_content=show_content)

        # cache the file contents for next time
        self._FILE_CACHE[file_name] = parsed_data

        return parsed_data

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(path)

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(path)

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(path)

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            loader.dispose()

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, basestring):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleParserError(
                "the file_name '%s' does not exist, or is not readable" %
                file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1,
                                   yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR,
                                 obj=err_obj,
                                 show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_unicode(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)

        if given.startswith("/"):
            return os.path.abspath(given)
        elif given.startswith("~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            return os.path.abspath(os.path.join(self._basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        ''' find one file in a role/playbook dirs with/without dirname subdir '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith('/'):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))

            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(os.path.join(path,'main.yml')) \
                or os.path.exists(os.path.join(path,'tasks/main.yml')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(
                self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(
                    self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(candidate):
                break

        return candidate
예제 #26
0
 def set_vault_password(self, b_vault_password):
     self._b_vault_password = b_vault_password
     self._vault = VaultLib(b_password=b_vault_password)
예제 #27
0
class DataLoader:

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, b_vault_password):
        self._b_vault_password = b_vault_password
        self._vault = VaultLib(b_password=b_vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None

        # YAML parser will take JSON as it is a subset.
        if isinstance(data, AnsibleUnicode):
            # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
            # they are unable to cope with our subclass.
            # Unwrap and re-wrap the unicode so we can keep track of line
            # numbers
            in_data = text_type(data)
        else:
            in_data = data

        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data

    def load_from_file(self, file_name, cache=True, unsafe=False):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)
        display.debug("Loading data from %s" % file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if cache and file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (b_file_data, show_content) = self._get_file_contents(file_name)

            file_data = to_text(b_file_data, errors='surrogate_or_strict')
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        if unsafe:
            return parsed_data
        else:
            # return a deep copy here, so the cache is not affected
            return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name, self._b_vault_password)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass  # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        b_file_name = to_bytes(file_name)
        if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
            raise AnsibleFileNotFound("the file named '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                if is_encrypted(data):
                    data = self._vault.decrypt(data, filename=b_file_name)
                    show_content = False

            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)), orig_exc=e)

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content, orig_exc=yaml_exc)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_text(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_text(given, errors='surrogate_or_strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_text(self._basedir, errors='surrogate_or_strict')
            return os.path.abspath(os.path.join(basedir, given))

    def _is_role(self, path):
        ''' imperfect role detection, roles are still valid w/o main.yml/yaml/etc '''

        isit = False
        b_path = to_bytes(path, errors='surrogate_or_strict')
        b_upath = to_bytes(unfrackpath(path), errors='surrogate_or_strict')

        for suffix in (b'.yml', b'.yaml', b''):
            b_main = b'main%s' % (suffix)
            b_tasked = b'tasks/%s' % (b_main)

            if (
                b_path.endswith(b'tasks') and
                os.path.exists(os.path.join(b_path, b_main)) or
                os.path.exists(os.path.join(b_upath, b_tasked)) or
                os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked))
            ):
                isit = True
                break

        return isit

    def path_dwim_relative(self, path, dirname, source, is_role=False):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith(os.path.sep):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))
            basedir = unfrackpath(path)

            # not told if role, but detect if it is a role and if so make sure you get correct base path
            if not is_role:
                is_role = self._is_role(path)

            if is_role and path.endswith('tasks'):
                basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if is_role and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname, source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
                break

        return candidate

    def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
        '''
        find one file in first path in stack taking roles into account and adding play basedir as fallback

        :arg paths: A list of text strings which are the paths to look for the filename in.
        :arg dirname: A text string representing a directory.  The directory
            is prepended to the source to form the path to search for.
        :arg source: A text string which is the filename to search for
        :rtype: A text string
        :returns: An absolute path to the filename ``source``
        '''
        b_dirname = to_bytes(dirname)
        b_source = to_bytes(source)

        result = None
        if source is None:
            display.warning('Invalid request to find a file that matches a "null" value')
        elif source and (source.startswith('~') or source.startswith(os.path.sep)):
            # path is absolute, no relative needed, check existence and return source
            test_path = unfrackpath(b_source)
            if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
                result = test_path
        else:
            search = []
            display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
            for path in paths:
                upath = unfrackpath(path)
                b_upath = to_bytes(upath, errors='surrogate_or_strict')
                b_mydir = os.path.dirname(b_upath)

                # FIXME: this detection fails with non main.yml roles
                # if path is in role and 'tasks' not there already, add it into the search
                if is_role or self._is_role(path):
                    if b_mydir.endswith(b'tasks'):
                        search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
                        search.append(os.path.join(b_mydir, b_source))
                    else:
                        # don't add dirname if user already is using it in source
                        if b_source.split(b'/')[0] != b_dirname:
                            search.append(os.path.join(b_upath, b_dirname, b_source))
                        search.append(os.path.join(b_upath, b_source))

                elif b_dirname not in b_source.split(b'/'):
                    # don't add dirname if user already is using it in source
                    if b_source.split(b'/')[0] != dirname:
                        search.append(os.path.join(b_upath, b_dirname, b_source))
                    search.append(os.path.join(b_upath, b_source))

            # always append basedir as last resort
            # don't add dirname if user already is using it in source
            if b_source.split(b'/')[0] != dirname:
                search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
            search.append(os.path.join(to_bytes(self.get_basedir()), b_source))

            display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
            for b_candidate in search:
                display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
                if os.path.exists(b_candidate):
                    result = to_text(b_candidate)
                    break

        return result

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path, decrypt=True):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))

        b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound("the file named '%s' does not exist, or is not accessible" % to_native(file_path))

        if not self._vault:
            self._vault = VaultLib(b_password="")

        real_path = self.path_dwim(file_path)

        try:
            if decrypt:
                with open(to_bytes(real_path), 'rb') as f:
                    # Limit how much of the file is read since we do not know
                    # whether this is a vault file and therefore it could be very
                    # large.
                    if is_encrypted_file(f, count=len(b_HEADER)):
                        # if the file is encrypted and no password was specified,
                        # the decrypt call would throw an error, but we check first
                        # since the decrypt function doesn't know the file name
                        data = f.read()
                        if not self._b_vault_password:
                            raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)

                        data = self._vault.decrypt(data, filename=real_path)
                        # Make a temp file
                        real_path = self._create_content_tempfile(data)
                        self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)), orig_exc=e)

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except Exception as e:
                display.warning("Unable to cleanup temp files: %s" % to_native(e))
예제 #28
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        content = self._task.args.get('content', None)
        dest    = self._task.args.get('dest', None)
        raw     = boolean(self._task.args.get('raw', 'no'))
        force   = boolean(self._task.args.get('force', 'yes'))
        faf     = self._task.first_available_file
        remote_src = boolean(self._task.args.get('remote_src', False))

        if (source is None and content is None and faf is None) or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result
        elif (source is not None or faf is not None) and content is not None:
            result['failed'] = True
            result['msg'] = "src and content are mutually exclusive"
            return result
        elif content is not None and dest is not None and dest.endswith("/"):
            result['failed'] = True
            result['msg'] = "dest must be a file if content is defined"
            return result

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = self._connection._shell.path_has_trailing_slash(source)

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if isinstance(content, dict) or isinstance(content, list):
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception as err:
                result['failed'] = True
                result['msg'] = "could not write content temp file: %s" % err
                return result

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        elif faf:
            source = self._get_first_available_file(faf, task_vars.get('_original_file', None))
            if source is None:
                result['failed'] = True
                result['msg'] = "could not find src in first_available_file list"
                return result

        elif remote_src:
            result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False))
            return result

        else:
            if self._task._role is not None:
                source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
            else:
                source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)

        # A list of source file tuples (full_path, relative_path) which will try to copy to the destination
        source_files = []

        # If source is a directory populate our list else source is a file and translate it to a tuple.
        if os.path.isdir(source):
            # Get the amount of spaces to remove to get the relative path.
            if source_trailing_slash:
                sz = len(source)
            else:
                sz = len(source.rsplit('/', 1)[0]) + 1

            # Walk the directory and append the file tuples to source_files.
            for base_path, sub_folders, files in os.walk(source):
                for file in files:
                    full_path = os.path.join(base_path, file)
                    rel_path = full_path[sz:]
                    if rel_path.startswith('/'):
                        rel_path = rel_path[1:]
                    source_files.append((full_path, rel_path))

            # If it's recursive copy, destination is always a dir,
            # explicitly mark it so (note - copy module relies on this).
            if not self._connection._shell.path_has_trailing_slash(dest):
                dest = self._connection._shell.join_path(dest, '')
        else:
            source_files.append((source, os.path.basename(source)))

        changed = False
        module_return = dict(changed=False)

        # A register for if we executed a module.
        # Used to cut down on command calls when not recursive.
        module_executed = False

        # Tell _execute_module to delete the file if there is one file.
        delete_remote_tmp = (len(source_files) == 1)

        # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
        if not delete_remote_tmp:
            if tmp is None or "-tmp-" not in tmp:
                tmp = self._make_tmp_path()

        # expand any user home dir specifier
        dest = self._remote_expand_user(dest)

        vault = VaultLib(password=self._loader._vault_password)
        diffs = []
        for source_full, source_rel in source_files:
            
            vault_temp_file = None
            data = None

            try:
                data = open(source_full).read()
            except IOError:
                raise errors.AnsibleError("file could not read: %s" % source_full)

            if vault.is_encrypted(data):
                # if the file is encrypted and no password was specified,
                # the decrypt call would throw an error, but we check first
                # since the decrypt function doesn't know the file name
                if self._loader._vault_password is None:
                    raise errors.AnsibleError("A vault password must be specified to decrypt %s" % source_full)
                    
                data = vault.decrypt(data)
                # Make a temp file
                vault_temp_file = self._create_content_tempfile(data)
                source_full = vault_temp_file;
                
            # Generate a hash of the local file.
            local_checksum = checksum(source_full)

            # If local_checksum is not defined we can't find the file so we should fail out.
            if local_checksum is None:
                result['failed'] = True
                result['msg'] = "could not find src=%s" % source_full
                return result

            # This is kind of optimization - if user told us destination is
            # dir, do path manipulation right away, otherwise we still check
            # for dest being a dir via remote call below.
            if self._connection._shell.path_has_trailing_slash(dest):
                dest_file = self._connection._shell.join_path(dest, source_rel)
            else:
                dest_file = self._connection._shell.join_path(dest)

            # Attempt to get the remote checksum
            remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars)

            if remote_checksum == '3':
                # The remote_checksum was executed on a directory.
                if content is not None:
                    # If source was defined as content remove the temporary file and fail out.
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    result['failed'] = True
                    result['msg'] = "can not use content with a dir as dest"
                    return result
                else:
                    # Append the relative source location to the destination and retry remote_checksum
                    dest_file = self._connection._shell.join_path(dest, source_rel)
                    remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars)

            if remote_checksum != '1' and not force:
                # remote_file does not exist so continue to next iteration.
                continue

            if local_checksum != remote_checksum:
                # The checksums don't match and we will change or error out.
                changed = True

                # Create a tmp path if missing only if this is not recursive.
                # If this is recursive we already have a tmp path.
                if delete_remote_tmp:
                    if tmp is None or "-tmp-" not in tmp:
                        tmp = self._make_tmp_path()

                if self._play_context.diff and not raw:
                    diffs.append(self._get_diff_data(dest_file, source_full, task_vars))

                if self._play_context.check_mode:
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    changed = True
                    module_return = dict(changed=True)
                    continue

                # Define a remote directory that we will copy the file to.
                tmp_src = self._connection._shell.join_path(tmp, 'source')

                if not raw:
                    self._connection.put_file(source_full, tmp_src)
                else:
                    self._connection.put_file(source_full, dest_file)

                # We have copied the file remotely and no longer require our content_tempfile
                self._remove_tempfile_if_content_defined(content, content_tempfile)

                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None

                # fix file permissions when the copy is done as a different user
                if self._play_context.become and self._play_context.become_user != 'root':
                    self._remote_chmod('a+r', tmp_src)

                if raw:
                    # Continue to next iteration if raw is defined.
                    continue

                # Run the copy module

                # src and dest here come after original and override them
                # we pass dest only to make sure it includes trailing slash in case of recursive copy
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=tmp_src,
                        dest=dest,
                        original_basename=source_rel,
                    )
                )

                module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            else:
                # no need to transfer the file, already correct hash, but still need to call
                # the file module in case we want to change attributes
                self._remove_tempfile_if_content_defined(content, content_tempfile)

                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None
                    
                if raw:
                    # Continue to next iteration if raw is defined.
                    self._remove_tmp_path(tmp)
                    continue

                # Build temporary module_args.
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=source_rel,
                        dest=dest,
                        original_basename=source_rel
                    )
                )

                # Execute the file module.
                module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            if not module_return.get('checksum'):
                module_return['checksum'] = local_checksum
            if module_return.get('failed'):
                result.update(module_return)
                return result
            if module_return.get('changed'):
                changed = True

            # the file module returns the file path as 'path', but
            # the copy module uses 'dest', so add it if it's not there
            if 'path' in module_return and 'dest' not in module_return:
                module_return['dest'] = module_return['path']

        # Delete tmp path if we were recursive or if we did not execute a module.
        if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed):
            self._remove_tmp_path(tmp)

        if module_executed and len(source_files) == 1:
            result.update(module_return)
        else:
            result.update(dict(dest=dest, src=source, changed=changed))

        if diffs:
            result['diff'] = diffs

        return result
예제 #29
0
 def __init__(self, vault_password=None):
     self._basedir = '.'
     self._vault = VaultLib(password=vault_password)
예제 #30
0
class DataLoader():
    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    _FILE_CACHE = dict()

    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            try:
                # if loading JSON failed for any reason, we go ahead
                # and try to parse it as YAML instead
                return self._safe_load(data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            return self._FILE_CACHE[file_name]

        # read the file contents and load the data structure from them
        (file_data, show_content) = self._get_file_contents(file_name)
        parsed_data = self.load(data=file_data,
                                file_name=file_name,
                                show_content=show_content)

        # cache the file contents for next time
        self._FILE_CACHE[file_name] = parsed_data

        return parsed_data

    def path_exists(self, path):
        return os.path.exists(path)

    def is_directory(self, path):
        return os.path.isdir(path)

    def is_file(self, path):
        return os.path.isfile(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            loader.dispose()

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleParserError(
                "the file_name '%s' does not exist, or is not readable" %
                file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.set_position_info(file_name,
                                      yaml_exc.problem_mark.line + 1,
                                      yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR,
                                 obj=err_obj,
                                 show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = basedir

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)

        if given.startswith("/"):
            return os.path.abspath(given)
        elif given.startswith("~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            return os.path.abspath(os.path.join(self._basedir, given))
예제 #31
0
 def __init__(self, password):
     self.password = password
     self.vault = VaultLib(password)
예제 #32
0
class Vault(object):
    """ R/W an ansible-vault file """
    def __init__(self, password):
        self.password = password
        self.vault = VaultLib(password)

    def load(self, stream):
        """ Read vault steam and return python object
        :param stream: The stream to read data from
        :returns: The decrypted data
        """
        return self.vault.decrypt(stream)

    def load_secure_file(self, secure_file):
        """ Read vault secured file and return python object
        :param secure_file: The file to read data from
        :returns: The decrpted data
        """
        return self.load(open(secure_file).read())

    def load_as_json(self, secure_file):
        """ Read vault secured file and return json decoded object
        :param secure_file: The file to read data from as json
        :returns: The JSON data
        """
        return json.loads(self.load_secure_file(secure_file).decode('UTF-8'))

    def dump(self, data, stream=None):
        """ Encrypt data and print stdout or write to stream
        :param data: The information to be encrypted
        :param stream: If not None the location to write the encrypted data to.
        :returns: If stream is None then the encrypted bytes otherwise None.
        """
        encrypted = self.vault.encrypt(data)
        if stream:
            stream.write(encrypted)
        else:
            return encrypted

    def dump_as_json(self, obj, stream=None):
        """ Convert object to json and encrypt the data.
        :param obj: Python object to convert to json
        :param stream: If not None the location to write the encrypted data to.
        :returns: If stream is None then the encrypted bytes otherwise None.
        """
        data = json.dumps(obj, separators=(',', ': '))
        return self.dump(data, stream)

    def dump_as_json_to_file(self, obj, file_path):
        """ Convert object to json and encrypt the data.
        :param obj: Python object to convert to json
        :param file_path: The file to write data to via temp file
        """
        tempdir = gettempdir()
        tempfilename = 'tmp_' + str(uuid.uuid4())
        temppath = os.path.join(tempdir, tempfilename)
        with open(temppath, 'wb') as data_temp:
            self.dump_as_json(obj, data_temp)
        data_temp.close()
        move(temppath, os.path.abspath(file_path))

    def dump_as_yaml(self, obj, stream=None):
        """ Convert object to yaml and encrypt the data.
        :param obj: Python object to convert to yaml
        :param stream: If not None the location to write the encrypted data to.
        :returns: If stream is None then the encrypted bytes otherwise None.
        """
        data = yaml.dump(obj, default_flow_style=False)
        return self.dump(data, stream)

    def dump_as_yaml_to_file(self, obj, file_path):
        """ Convert object to yaml and encrypt the data.
        :param obj: Python object to convert to yaml
        :param file_path: The file to write data to via temp file
        """
        tempdir = gettempdir()
        tempfilename = 'tmp_' + str(uuid.uuid4())
        temppath = os.path.join(tempdir, tempfilename)
        with open(temppath, 'wb') as data_temp:
            self.dump_as_yaml(obj, data_temp)
        data_temp.close()
        move(temppath, os.path.abspath(file_path))
예제 #33
0
    def encrypt(self, to_encrypt):
        """Encrypts a scalar value using ansible-vault"""

        return VaultLib().encrypt(to_encrypt, self.generate_secrets())
예제 #34
0
class TestVaultLib(unittest.TestCase):
    def setUp(self):
        self.v = VaultLib('test-vault-password')

    def test_encrypt(self):
        plaintext = u'Some text to encrypt in a café'
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_encrypt_bytes(self):

        plaintext = to_bytes(u'Some text to encrypt in a café')
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_is_encrypted(self):
        self.assertFalse(self.v.is_encrypted(b"foobar"), msg="encryption check on plaintext yielded false positive")
        b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        self.assertTrue(self.v.is_encrypted(b_data), msg="encryption check on headered text failed")

    def test_format_output(self):
        self.v.cipher_name = "TEST"
        b_ciphertext = b"ansible"
        b_vaulttext = self.v._format_output(b_ciphertext)
        b_lines = b_vaulttext.split(b'\n')
        self.assertGreater(len(b_lines), 1, msg="failed to properly add header")

        b_header = b_lines[0]
        self.assertTrue(b_header.endswith(b';TEST'), msg="header does not end with cipher name")

        b_header_parts = b_header.split(b';')
        self.assertEqual(len(b_header_parts), 3, msg="header has the wrong number of parts")
        self.assertEqual(b_header_parts[0], b'$ANSIBLE_VAULT', msg="header does not start with $ANSIBLE_VAULT")
        self.assertEqual(b_header_parts[1], self.v.b_version, msg="header version is incorrect")
        self.assertEqual(b_header_parts[2], b'TEST', msg="header does not end with cipher name")

    def test_split_header(self):
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
        b_ciphertext = self.v._split_header(b_vaulttext)
        b_lines = b_ciphertext.split(b'\n')
        self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
        self.assertEqual(self.v.cipher_name, u'TEST', msg="cipher name was not properly set")
        self.assertEqual(self.v.b_version, b"9.9", msg="version was not properly set")

    def test_encrypt_decrypt_aes(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        self.v.b_password = b'ansible'
        # AES encryption code has been removed, so this is old output for
        # AES-encrypted 'foobar' with password 'ansible'.
        b_vaulttext = b'''$ANSIBLE_VAULT;1.1;AES
53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3
fe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e
786a5a15efeb787e1958cbdd480d076c
'''
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        plaintext = u"foobar"
        b_vaulttext = self.v.encrypt(plaintext)
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertNotEqual(b_vaulttext, b"foobar", msg="encryption failed")
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256_existing_vault(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        b_orig_plaintext = b"Setec Astronomy"
        vaulttext = u'''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''

        b_plaintext = self.v.decrypt(vaulttext)
        self.assertEqual(b_plaintext, b_plaintext, msg="decryption failed")

        b_vaulttext = to_bytes(vaulttext, encoding='ascii', errors='strict')
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")

    def test_encrypt_decrypt_aes256_bad_hmac(self):
        # FIXME This test isn't working quite yet.
        raise SkipTest

        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = 'AES256'
        # plaintext = "Setec Astronomy"
        enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
        b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
        b_data = self.v._split_header(b_data)
        foo = binascii.unhexlify(b_data)
        lines = foo.splitlines()
        # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
        b_salt = lines[0]
        b_hmac = lines[1]
        b_ciphertext_data = b'\n'.join(lines[2:])

        b_ciphertext = binascii.unhexlify(b_ciphertext_data)
        # b_orig_ciphertext = b_ciphertext[:]

        # now muck with the text
        # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
        # b_munged_ciphertext = b_ciphertext
        # assert b_orig_ciphertext != b_munged_ciphertext

        b_ciphertext_data = binascii.hexlify(b_ciphertext)
        b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
        # reformat
        b_invalid_ciphertext = self.v._format_output(b_payload)

        # assert we throw an error
        self.v.decrypt(b_invalid_ciphertext)

    def test_encrypt_encrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        vaulttext = to_text(b_vaulttext, errors='strict')
        self.assertRaises(errors.AnsibleError, self.v.encrypt, b_vaulttext)
        self.assertRaises(errors.AnsibleError, self.v.encrypt, vaulttext)

    def test_decrypt_decrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, plaintext)

        b_plaintext = b"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, b_plaintext)

    def test_cipher_not_set(self):
        # not setting the cipher should default to AES256
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.v.encrypt(plaintext)
        self.assertEquals(self.v.cipher_name, "AES256")
예제 #35
0
class VaultpwCLI(CLI):
    '''
    '''

    VALID_ACTIONS = frozenset(("show", "store"))

    def __init__(self, args):
        super().__init__(args)

        self.encrypt_secret = None
        self.encrypt_vault_id = None

    def set_action(self):
        super().set_action()

        if self.action == "show":
            self.parser.set_usage(
                "usage: %prog show /path/to/example_password.yml")
        elif self.action == "store":
            self.parser.set_usage(
                "usage: %prog store /path/to/example_password.yml [-c command]"
            )
            self.parser.add_option('-c',
                                   '--command',
                                   dest='password_command',
                                   action='store',
                                   type='string',
                                   help="command to run to obtain a password")
            self.parser.add_option(
                '--encrypt-vault-id',
                default=[],
                dest='encrypt_vault_id',
                action='store',
                type='string',
                help=
                'the vault id used to encrypt (required if more than one vault-id is provided)'
            )

    def init_parser(self):
        super().init_parser(
            usage="usage: %%prog [%s] [options] /path/to/example_password.yml"
            % "|".join(sorted(self.VALID_ACTIONS)),
            desc=
            "utility to store or fetch vault-encrypted passwords in YAML inventory files",
            epilog=
            "\nSee '%s <command> --help' for more information on a specific command.\n\n"
            % os.path.basename(sys.argv[0]))
        opt_help.add_vault_options(self.parser)

        self.set_action()

    def post_process_args(self, options, args):
        options, args = super().post_process_args(options, args)
        self.validate_conflicts(options,
                                vault_opts=True,
                                vault_rekey_opts=False)

        display.verbosity = options.verbosity

        if options.vault_ids:
            for vault_id in options.vault_ids:
                if u';' in vault_id:
                    raise AnsibleOptionsError(
                        "Invalid character ';' found in vault id: %s" %
                        vault_id)

        return options, args

    def run(self):
        super().run()
        self.loader = DataLoader()

        vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST + list(
            context.CLIARGS['vault_ids'])
        vault_secrets = self.setup_vault_secrets(
            self.loader,
            vault_ids=vault_ids,
            vault_password_files=list(context.CLIARGS['vault_password_files']),
            ask_vault_pass=context.CLIARGS['ask_vault_pass'])

        if not vault_secrets:
            raise AnsibleOptionsError(
                "A vault password is required to use ansible-vault")

        encrypt_vault_id = context.CLIARGS.get(
            'encrypt_vault_id') or C.DEFAULT_VAULT_ENCRYPT_IDENTITY
        if len(vault_secrets) > 1 and not encrypt_vault_id:
            raise AnsibleOptionsError(
                "Use '--encrypt-vault-id id' to choose one of the following vault ids to use for encryption: %s"
                % ','.join([x[0] for x in vault_secrets]))

        encrypt_secret = match_encrypt_secret(
            vault_secrets, encrypt_vault_id=encrypt_vault_id)

        self.encrypt_vault_id = encrypt_secret[0]
        self.encrypt_secret = encrypt_secret[1]

        self.loader.set_vault_secrets(vault_secrets)

        self.vault = VaultLib(vault_secrets)

        if len(context.CLIARGS['args']) != 1:
            raise AnsibleOptionsError(
                "Exactly one inventory file must be specified")

        self.file = os.path.expanduser(context.CLIARGS['args'][0])

        old_umask = os.umask(0o077)

        self.execute()

        os.umask(old_umask)

    def execute_store(self):
        '''
        Takes the path to an inventory file such as
        inventory/group_vars/tag_Cluster_xxx/secrets/example_password.yml and
        overwrites the file with an assignment of "example_password: password"
        in vault-encrypted YAML format. The password is obtained by prompting
        the user or, if a command is specified, by running the command and
        reading stdout.
        '''

        b_plaintext = b''

        command = context.CLIARGS['password_command']
        if command:
            try:
                pw = subprocess.run(command, capture_output=True)
                if pw.returncode != 0:
                    raise Exception('non-zero exit code: %s' % pw.returncode)
                b_plaintext = pw.stdout.strip()
            except Exception as e:
                print("ERROR: password command failed: %s" % str(e),
                      file=sys.stderr)
                sys.exit(-1)
        else:
            b_plaintext = to_bytes(display.prompt("Password: "******"ERROR: cannot encrypt password: %s" % str(e),
                  file=sys.stderr)
            sys.exit(-1)

        name = os.path.basename(self.file).replace('.yml', '')

        lines = []
        lines.append("%s: !vault |\n" % name)
        for l in to_text(b_ciphertext).splitlines():
            lines.append("    %s\n" % l)

        try:
            fh = open(self.file, 'wb')
            fh.write(to_bytes(''.join(lines)))
            fh.close()
        except Exception as e:
            print("ERROR: cannot write output to %s: %s" % (self.file, str(e)),
                  file=sys.stderr)
            sys.exit(-1)

    def execute_show(self):
        '''
        Takes the path to an inventory file such as
        inventory/group_vars/tag_Cluster_xxx/secrets/example_password.yml and
        prints the password defined therein. The file must contain a variable
        assignment of the form "example_password: password"; either the whole
        file is vault-encrypted, or only the password is.
        '''

        if not os.path.exists(self.file):
            print("ERROR: inventory file does not exist: %s" % self.file,
                  file=sys.stderr)
            sys.exit(-1)

        try:
            name = os.path.basename(self.file).replace('.yml', '')
            y = self.loader.load_from_file(self.file)
            print(y[name])
        except Exception as e:
            print("ERROR: cannot show password from %s: %s" %
                  (self.file, str(e)),
                  file=sys.stderr)
            sys.exit(-1)
예제 #36
0
        exit(0)

    try:
        # Reading Composer Security Configurations from the input file
        composer_security_configuration_file = open(
            composer_security_configuration_file_path, encoding='utf-8')
        security_configurations = json.load(
            composer_security_configuration_file)

        # oneview_credentials_file = open(oneview_credentials_file_path, encoding='utf-8')
        # oneview_config = json.load(oneview_credentials_file)

        # Reading OneView configurations from the input file
        key = getpass("Enter the key to decrypt OneView credentials file : ")
        oneview_credentials_key = VaultLib([
            (DEFAULT_VAULT_ID_MATCH, VaultSecret(key.encode('utf-8')))
        ])
        oneview_credentials_file = open(oneview_credentials_file_path)
        oneview_config = json.loads(
            oneview_credentials_key.decrypt(oneview_credentials_file.read()))

        # Validating input Composer Security Configurations
        inputs_state = validate_inputs(security_configurations)
        if not inputs_state:
            print("Failed: Input format is invalid.")
            exit(0)

        # Defining HPE OneView API endpoints
        api_endpoints = {
            "allowSshAccess": "/rest/appliance/ssh-access",
            "GlobalSettings": "/rest/logindomains/global-settings",
예제 #37
0
    if vaultfile is None:
        vaultfile = args.vault_file
    if not os.path.isfile(vaultfile):
        sys.stderr.write(
            "No ansible vault found in %s. Either create one or set the environment variable VAULTFILE.\n"
            % vaultfile)
        sys.exit(3)

    vaultpass = os.environ.get("VAULTPASS", None)
    if vaultpass is None:
        sys.stderr.write(
            "Set the VAULTPASS environment variable to unlock the ansible vault.\n"
        )
        sys.exit(3)

    vault = VaultLib([(DEFAULT_VAULT_ID_MATCH,
                       VaultSecret(vaultpass.encode('utf-8')))])
    try:
        content = vault.decrypt(open(vaultfile).read())
    except AnsibleVaultError:
        sys.stderr.write("Invalid vault password, could not decrypt vault.\n")
        sys.exit(3)
    data = yaml.load(content, Loader=yaml.CLoader)

    cmd = ["terraform", args.action]

    for key, value in data.items():
        cmd.append("--var '{}={}'".format(key, value))

    cmd += options

    runcmd = ' '.join(cmd)
예제 #38
0
파일: __init__.py 프로젝트: dataxu/ansible
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            try:
                # if loading JSON failed for any reason, we go ahead
                # and try to parse it as YAML instead
                return self._safe_load(data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            return self._FILE_CACHE[file_name]

        # read the file contents and load the data structure from them
        (file_data, show_content) = self._get_file_contents(file_name)
        parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

        # cache the file contents for next time
        self._FILE_CACHE[file_name] = parsed_data

        return parsed_data

    def path_exists(self, path):
        return os.path.exists(path)

    def is_directory(self, path):
        return os.path.isdir(path)

    def is_file(self, path):
        return os.path.isfile(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            loader.dispose()

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleParserError("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occured while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occured, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.set_position_info(file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = basedir

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)

        if given.startswith("/"):
            return os.path.abspath(given)
        elif given.startswith("~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            return os.path.abspath(os.path.join(self._basedir, given))

    def path_dwim_relative(self, role_path, dirname, source):
        ''' find one file in a directory one level up in a dir named dirname relative to current '''

        basedir = os.path.dirname(role_path)
        if os.path.islink(basedir):
            basedir = unfrackpath(basedir)
            template2 = os.path.join(basedir, dirname, source)
        else:
            template2 = os.path.join(basedir, '..', dirname, source)

        source1 = os.path.join(role_path, dirname, source)
        if os.path.exists(source1):
            return source1

        cur_basedir = self._basedir
        self.set_basedir(basedir)
        source2 = self.path_dwim(template2)
        if os.path.exists(source2):
            self.set_basedir(cur_basedir)
            return source2

        obvious_local_path = self.path_dwim(source)
        if os.path.exists(obvious_local_path):
            self.set_basedir(cur_basedir)
            return obvious_local_path

        self.set_basedir(cur_basedir)
        return source2 # which does not exist
예제 #39
0
    # Opening input files
    kickstart_files = {
        "rhel7_master": "kickstart_files/ks_rhel7.cfg",
        "rhel7_worker": "kickstart_files/ks_rhel7_worker.cfg",
        "esxi67": "kickstart_files/ks_esxi67.cfg"
    }

    config_path = 'input_files/config.json'
    servers_path = 'input_files/server_details.json'

    # Chckign if input files exist or not
    if os.path.exists(config_path) and os.path.exists(servers_path):
        # Enter decryption key to decrypt input files
        key = getpass("Enter Key:")
        try:
            config_vault = VaultLib([(DEFAULT_VAULT_ID_MATCH,
                                      VaultSecret(key.encode('utf-8')))])
            # Opening config file
            configuration_file = open(config_path)
            # Decrypting config.json file
            config = json.loads(config_vault.decrypt(
                configuration_file.read()))
            server_vault = VaultLib([(DEFAULT_VAULT_ID_MATCH,
                                      VaultSecret(key.encode('utf-8')))])
            # Opening server details file
            server_input_file = open(servers_path)
            # Decrypting server_details.json file
            servers = json.loads(server_vault.decrypt(
                server_input_file.read()))

        except Exception as e:
            print(
예제 #40
0
 def setUp(self):
     self.v = VaultLib('test-vault-password')
예제 #41
0
 def __init__(self, file_name=None, vault_secrets=None):
     self._ansible_file_name = file_name
     super(AnsibleConstructor, self).__init__()
     self._vaults = {}
     self.vault_secrets = vault_secrets or []
     self._vaults['default'] = VaultLib(secrets=self.vault_secrets)
예제 #42
0
    def test_encrypt(self):
        v = VaultLib(password='******')
        plaintext = u'Some text to encrypt.'
        ciphertext = v.encrypt(plaintext)

        self.assertIsInstance(ciphertext, (bytes, str))
예제 #43
0
파일: t2.py 프로젝트: infra-ops/cloud-ops
from ansible.constants import DEFAULT_VAULT_ID_MATCH
from ansible.parsing.vault import VaultLib
from ansible.parsing.vault import VaultSecret

vault = VaultLib([(DEFAULT_VAULT_ID_MATCH, VaultSecret('tower@123'))])
print vault.decrypt(
    open(
        '/home/nik/Desktop/git-repo/cloud-ops/python-ops/rest-api/json-outs/env-1.json'
    ).read())
예제 #44
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, vault_password):
        self._vault_password = vault_password
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None
        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name, self._vault_password)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass  # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        b_file_name = to_bytes(file_name)
        if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                if is_encrypted(data):
                    data = self._vault.decrypt(data, filename=b_file_name)
                    show_content = False

            data = to_text(data, errors='surrogate_or_strict')
            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_text(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_text(given, errors='surrogate_or_strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_text(self._basedir, errors='surrogate_or_strict')
            return os.path.abspath(os.path.join(basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith(os.path.sep):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))
            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='surrogate_or_strict')) \
                    or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='surrogate_or_strict')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname,source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
                break

        return candidate

    def path_dwim_relative_stack(self, paths, dirname, source):
        '''
        find one file in first path in stack taking roles into account and adding play basedir as fallback

        :arg paths: A list of text strings which are the paths to look for the filename in.
        :arg dirname: A text string representing a directory.  The directory
            is prepended to the source to form the path to search for.
        :arg source: A text string which is the filename to search for
        :rtype: A text string
        :returns: An absolute path to the filename ``source``
        '''
        b_dirname = to_bytes(dirname)
        b_source = to_bytes(source)

        result = None
        if source is None:
            display.warning('Invalid request to find a file that matches a "null" value')
        elif source and (source.startswith('~') or source.startswith(os.path.sep)):
            # path is absolute, no relative needed, check existence and return source
            test_path = unfrackpath(b_source)
            if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
                result = test_path
        else:
            search = []
            for path in paths:
                upath = unfrackpath(path)
                b_upath = to_bytes(upath, errors='surrogate_or_strict')
                b_mydir = os.path.dirname(b_upath)

                # if path is in role and 'tasks' not there already, add it into the search
                if b_upath.endswith(b'tasks') and os.path.exists(os.path.join(b_upath, b'main.yml')) \
                        or os.path.exists(os.path.join(b_upath, b'tasks/main.yml')) \
                        or os.path.exists(os.path.join(b_mydir, b'tasks/main.yml')):
                    if b_mydir.endswith(b'tasks'):
                        search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
                        search.append(os.path.join(b_mydir, b_source))
                    else:
                        # don't add dirname if user already is using it in source
                        if b_source.split(b'/')[0] == b_dirname:
                            search.append(os.path.join(b_upath, b_source))
                        else:
                            search.append(os.path.join(b_upath, b_dirname, b_source))
                        search.append(os.path.join(b_upath, b'tasks', b_source))
                elif b_dirname not in b_source.split(b'/'):
                    # don't add dirname if user already is using it in source
                    search.append(os.path.join(b_upath, b_dirname, b_source))
                    search.append(os.path.join(b_upath, b_source))

            # always append basedir as last resort
            search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
            search.append(os.path.join(to_bytes(self.get_basedir()), b_source))

            display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
            for b_candidate in search:
                display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
                if os.path.exists(b_candidate):
                    result = to_text(b_candidate)
                    break

        return result

    def read_vault_password_file(self, vault_password_file):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(to_bytes(os.path.expanduser(vault_password_file), errors='surrogate_or_strict'))
        if not os.path.exists(to_bytes(this_path, errors='surrogate_or_strict')):
            raise AnsibleFileNotFound("The vault password file %s was not found" % this_path)

        if self.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError("Problem running vault password script %s (%s)."
                        " If this is not a script, remove the executable bit from the file." % (' '.join(this_path), to_native(e)))
            stdout, stderr = p.communicate()
            self.set_vault_password(stdout.strip('\r\n'))
        else:
            try:
                f = open(this_path, "rb")
                self.set_vault_password(f.read().strip())
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))

        b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_native(file_path))

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                if is_encrypted_file(f):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    data = f.read()
                    if not self._vault_password:
                        raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)

                    data = self._vault.decrypt(data, filename=real_path)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)))

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except:
                pass  # TODO: this should at least warn
예제 #45
0
class DataLoader():
    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''
    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, vault_password):
        self._vault_password = vault_password
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None
        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data,
                                    file_name=file_name,
                                    show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path,
                                       errors='strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass  # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        b_file_name = to_bytes(file_name)
        if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
            raise AnsibleFileNotFound(
                "the file_name '%s' does not exist, or is not readable" %
                file_name)

        show_content = True
        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data, filename=b_file_name)
                    show_content = False

            data = to_unicode(data, errors='strict')
            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1,
                                   yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR,
                                 obj=err_obj,
                                 show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_unicode(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_unicode(given, errors='strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_unicode(self._basedir, errors='strict')
            return os.path.abspath(os.path.join(basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith('/'):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))

            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='strict')) \
                or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='strict')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(
                self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(
                    self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname, source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='strict')):
                break

        return candidate

    def read_vault_password_file(self, vault_password_file):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(
            to_bytes(os.path.expanduser(vault_password_file), errors='strict'))
        if not os.path.exists(to_bytes(this_path, errors='strict')):
            raise AnsibleFileNotFound(
                "The vault password file %s was not found" % this_path)

        if self.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError(
                    "Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file."
                    % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            self.set_vault_password(stdout.strip('\r\n'))
        else:
            try:
                f = open(this_path, "rb")
                self.set_vault_password(f.read().strip())
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError(
                    "Could not read vault password file %s: %s" %
                    (this_path, e))

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_path))

        if not self.path_exists(file_path) or not self.is_file(file_path):
            raise AnsibleFileNotFound(
                "the file_name '%s' does not exist, or is not readable" %
                file_path)

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    if not self._vault_password:
                        raise AnsibleParserError(
                            "A vault password must be specified to decrypt %s"
                            % file_path)

                    data = self._vault.decrypt(data, filename=real_path)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (real_path, str(e)))

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except:
                pass  #TODO: this should at least warn
예제 #46
0
파일: api.py 프로젝트: jptomo/ansible-vault
    def __init__(self, password):
        self._ansible_ver = _ansible_ver

        self.secret = password.encode('utf-8')
        self.vault = VaultLib(self._make_secrets(self.secret))
예제 #47
0
 def set_vault_password(self, vault_password):
     self._vault_password = vault_password
     self._vault = VaultLib(password=vault_password)
예제 #48
0
파일: ajson.py 프로젝트: yijxiang/ansible-1
 def set_secrets(cls, secrets):
     cls._vaults['default'] = VaultLib(secrets=secrets)
예제 #49
0
class DataLoader:
    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''
    def __init__(self):

        self._basedir = '.'

        # NOTE: not effective with forks as the main copy does not get updated.
        # avoids rereading files
        self._FILE_CACHE = dict()

        # NOTE: not thread safe, also issues with forks not returning data to main proc
        #       so they need to be cleaned independantly. See WorkerProcess for example.
        # used to keep track of temp files for cleaning
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        # TODO: replace with a ref to something that can get the password
        #       a creds/auth provider
        # self.set_vault_password(None)
        self._vaults = {}
        self._vault = VaultLib()
        self.set_vault_secrets(None)

    # TODO: since we can query vault_secrets late, we could provide this to DataLoader init
    def set_vault_secrets(self, vault_secrets):
        self._vault.secrets = vault_secrets

    def load(self, data, file_name='<string>', show_content=True):
        '''Backwards compat for now'''
        return from_yaml(data, file_name, show_content, self._vault.secrets)

    def load_from_file(self, file_name, cache=True, unsafe=False):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)
        display.debug("Loading data from %s" % file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if cache and file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (b_file_data, show_content) = self._get_file_contents(file_name)

            file_data = to_text(b_file_data, errors='surrogate_or_strict')
            parsed_data = self.load(data=file_data,
                                    file_name=file_name,
                                    show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        if unsafe:
            return parsed_data
        else:
            # return a deep copy here, so the cache is not affected
            return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(
            path, errors='surrogate_or_strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _decrypt_if_vault_data(self, b_vault_data, b_file_name=None):
        '''Decrypt b_vault_data if encrypted and return b_data and the show_content flag'''

        if not is_encrypted(b_vault_data):
            show_content = True
            return b_vault_data, show_content

        b_ciphertext, b_version, cipher_name, vault_id = parse_vaulttext_envelope(
            b_vault_data)
        b_data = self._vault.decrypt(b_vault_data, filename=b_file_name)

        show_content = False
        return b_data, show_content

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name

        If the contents are vault-encrypted, it will decrypt them and return
        the decrypted data

        :arg file_name: The name of the file to read.  If this is a relative
            path, it will be expanded relative to the basedir
        :raises AnsibleFileNotFound: if the file_name does not refer to a file
        :raises AnsibleParserError: if we were unable to read the file
        :return: Returns a byte string of the file contents
        '''
        if not file_name or not isinstance(file_name,
                                           (binary_type, text_type)):
            raise AnsibleParserError("Invalid filename: '%s'" %
                                     to_native(file_name))

        b_file_name = to_bytes(self.path_dwim(file_name))
        # This is what we really want but have to fix unittests to make it pass
        # if not os.path.exists(b_file_name) or not os.path.isfile(b_file_name):
        if not self.path_exists(b_file_name):
            raise AnsibleFileNotFound("Unable to retrieve file contents",
                                      file_name=file_name)

        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                return self._decrypt_if_vault_data(data, b_file_name)
        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (file_name, to_native(e)),
                orig_exc=e)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_text(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_text(given, errors='surrogate_or_strict')

        if given.startswith(to_text(os.path.sep)) or given.startswith(u'~'):
            path = given
        else:
            basedir = to_text(self._basedir, errors='surrogate_or_strict')
            path = os.path.join(basedir, given)

        return unfrackpath(path, follow=False)

    def _is_role(self, path):
        ''' imperfect role detection, roles are still valid w/o tasks|meta/main.yml|yaml|etc '''

        b_path = to_bytes(path, errors='surrogate_or_strict')
        b_upath = to_bytes(unfrackpath(path, follow=False),
                           errors='surrogate_or_strict')

        for b_finddir in (b'meta', b'tasks'):
            for b_suffix in (b'.yml', b'.yaml', b''):
                b_main = b'main%s' % (b_suffix)
                b_tasked = os.path.join(b_finddir, b_main)

                if (RE_TASKS.search(path)
                        and os.path.exists(os.path.join(b_path, b_main))
                        or os.path.exists(os.path.join(b_upath, b_tasked))
                        or os.path.exists(
                            os.path.join(os.path.dirname(b_path), b_tasked))):
                    return True
        return False

    def path_dwim_relative(self, path, dirname, source, is_role=False):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        source = to_text(source, errors='surrogate_or_strict')

        # I have full path, nothing else needs to be looked at
        if source.startswith(to_text(os.path.sep)) or source.startswith(u'~'):
            search.append(unfrackpath(source, follow=False))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))
            basedir = unfrackpath(path, follow=False)

            # not told if role, but detect if it is a role and if so make sure you get correct base path
            if not is_role:
                is_role = self._is_role(path)

            if is_role and RE_TASKS.search(path):
                basedir = unfrackpath(os.path.dirname(path), follow=False)

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(
                unfrackpath(os.path.join(basedir, dirname, source),
                            follow=False))
            self.set_basedir(cur_basedir)

            if is_role and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(
                    unfrackpath(os.path.join(basedir, 'tasks', source),
                                follow=False))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(
                unfrackpath(os.path.join(dirname, source), follow=False))

            # try to create absolute path for loader basedir
            search.append(
                unfrackpath(os.path.join(basedir, source), follow=False))

            # try to create absolute path for  dirname + filename
            search.append(self.path_dwim(os.path.join(dirname, source)))

            # try to create absolute path for filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate,
                                       errors='surrogate_or_strict')):
                break

        return candidate

    def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
        '''
        find one file in first path in stack taking roles into account and adding play basedir as fallback

        :arg paths: A list of text strings which are the paths to look for the filename in.
        :arg dirname: A text string representing a directory.  The directory
            is prepended to the source to form the path to search for.
        :arg source: A text string which is the filename to search for
        :rtype: A text string
        :returns: An absolute path to the filename ``source`` if found
        :raises: An AnsibleFileNotFound Exception if the file is found to exist in the search paths
        '''
        b_dirname = to_bytes(dirname)
        b_source = to_bytes(source)

        result = None
        search = []
        if source is None:
            display.warning(
                'Invalid request to find a file that matches a "null" value')
        elif source and (source.startswith('~')
                         or source.startswith(os.path.sep)):
            # path is absolute, no relative needed, check existence and return source
            test_path = unfrackpath(b_source, follow=False)
            if os.path.exists(to_bytes(test_path,
                                       errors='surrogate_or_strict')):
                result = test_path
        else:
            display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
            for path in paths:
                upath = unfrackpath(path, follow=False)
                b_upath = to_bytes(upath, errors='surrogate_or_strict')
                b_pb_base_dir = os.path.dirname(b_upath)

                # if path is in role and 'tasks' not there already, add it into the search
                if (is_role or self._is_role(path)
                    ) and b_pb_base_dir.endswith(b'/tasks'):
                    search.append(
                        os.path.join(os.path.dirname(b_pb_base_dir), b_dirname,
                                     b_source))
                    search.append(os.path.join(b_pb_base_dir, b_source))
                else:
                    # don't add dirname if user already is using it in source
                    if b_source.split(b'/')[0] != dirname:
                        search.append(
                            os.path.join(b_upath, b_dirname, b_source))
                    search.append(os.path.join(b_upath, b_source))

            # always append basedir as last resort
            # don't add dirname if user already is using it in source
            if b_source.split(b'/')[0] != dirname:
                search.append(
                    os.path.join(to_bytes(self.get_basedir()), b_dirname,
                                 b_source))
            search.append(os.path.join(to_bytes(self.get_basedir()), b_source))

            display.debug(u'search_path:\n\t%s' %
                          to_text(b'\n\t'.join(search)))
            for b_candidate in search:
                display.vvvvv(u'looking for "%s" at "%s"' %
                              (source, to_text(b_candidate)))
                if os.path.exists(b_candidate):
                    result = to_text(b_candidate)
                    break

        if result is None:
            raise AnsibleFileNotFound(file_name=source,
                                      paths=[to_text(p) for p in search])

        return result

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path, decrypt=True):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path,
                                           (binary_type, text_type)):
            raise AnsibleParserError("Invalid filename: '%s'" %
                                     to_native(file_path))

        b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
        if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
            raise AnsibleFileNotFound(file_name=file_path)

        real_path = self.path_dwim(file_path)

        try:
            if decrypt:
                with open(to_bytes(real_path), 'rb') as f:
                    # Limit how much of the file is read since we do not know
                    # whether this is a vault file and therefore it could be very
                    # large.
                    if is_encrypted_file(f, count=len(b_HEADER)):
                        # if the file is encrypted and no password was specified,
                        # the decrypt call would throw an error, but we check first
                        # since the decrypt function doesn't know the file name
                        data = f.read()
                        if not self._vault.secrets:
                            raise AnsibleParserError(
                                "A vault password or secret must be specified to decrypt %s"
                                % to_native(file_path))

                        data = self._vault.decrypt(data, filename=real_path)
                        # Make a temp file
                        real_path = self._create_content_tempfile(data)
                        self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (to_native(real_path), to_native(e)),
                orig_exc=e)

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        """
        Removes all temporary files that DataLoader has created
        NOTE: not thread safe, forks also need special handling see __init__ for details.
        """
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except Exception as e:
                display.warning("Unable to cleanup temp files: %s" %
                                to_text(e))

    def find_vars_files(self, path, name, extensions=None, allow_dir=True):
        """
        Find vars files in a given path with specified name. This will find
        files in a dir named <name>/ or a file called <name> ending in known
        extensions.
        """

        b_path = to_bytes(os.path.join(path, name))
        found = []

        if extensions is None:
            # Look for file with no extension first to find dir before file
            extensions = [''] + C.YAML_FILENAME_EXTENSIONS
        # add valid extensions to name
        for ext in extensions:

            if '.' in ext:
                full_path = b_path + to_bytes(ext)
            elif ext:
                full_path = b'.'.join([b_path, to_bytes(ext)])
            else:
                full_path = b_path

            if self.path_exists(full_path):
                if self.is_directory(full_path):
                    if allow_dir:
                        found.extend(
                            self._get_dir_vars_files(to_text(full_path),
                                                     extensions))
                    else:
                        continue
                else:
                    found.append(full_path)
                break
        return found

    def _get_dir_vars_files(self, path, extensions):
        found = []
        for spath in sorted(self.list_directory(path)):
            if not spath.startswith(u'.') and not spath.endswith(
                    u'~'):  # skip hidden and backups

                ext = os.path.splitext(spath)[-1]
                full_spath = os.path.join(path, spath)

                if self.is_directory(
                        full_spath) and not ext:  # recursive search if dir
                    found.extend(
                        self._get_dir_vars_files(full_spath, extensions))
                elif self.is_file(full_spath) and (not ext or to_text(ext)
                                                   in extensions):
                    # only consider files with valid extensions or no extension
                    found.append(full_spath)

        return found
예제 #50
0
 def setUp(self):
     self.v = VaultLib('test-vault-password')
예제 #51
0
    def run(self):
        super(VaultCLI, self).run()
        loader = DataLoader()

        # set default restrictive umask
        old_umask = os.umask(0o077)

        vault_ids = self.options.vault_ids

        # there are 3 types of actions, those that just 'read' (decrypt, view) and only
        # need to ask for a password once, and those that 'write' (create, encrypt) that
        # ask for a new password and confirm it, and 'read/write (rekey) that asks for the
        # old password, then asks for a new one and confirms it.

        default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
        vault_ids = default_vault_ids + vault_ids

        # TODO: instead of prompting for these before, we could let VaultEditor
        #       call a callback when it needs it.
        if self.action in ['decrypt', 'view', 'rekey', 'edit']:
            vault_secrets = self.setup_vault_secrets(
                loader,
                vault_ids=vault_ids,
                vault_password_files=self.options.vault_password_files,
                ask_vault_pass=self.options.ask_vault_pass)
            if not vault_secrets:
                raise AnsibleOptionsError(
                    "A vault password is required to use Ansible's Vault")

        if self.action in ['encrypt', 'encrypt_string', 'create']:

            encrypt_vault_id = None
            # no --encrypt-vault-id self.options.encrypt_vault_id for 'edit'
            if self.action not in ['edit']:
                encrypt_vault_id = self.options.encrypt_vault_id or C.DEFAULT_VAULT_ENCRYPT_IDENTITY

            vault_secrets = None
            vault_secrets = \
                self.setup_vault_secrets(loader,
                                         vault_ids=vault_ids,
                                         vault_password_files=self.options.vault_password_files,
                                         ask_vault_pass=self.options.ask_vault_pass,
                                         create_new_password=True)

            if len(vault_secrets) > 1 and not encrypt_vault_id:
                raise AnsibleOptionsError(
                    "The vault-ids %s are available to encrypt. Specify the vault-id to encrypt with --encrypt-vault-id"
                    % ','.join([x[0] for x in vault_secrets]))

            if not vault_secrets:
                raise AnsibleOptionsError(
                    "A vault password is required to use Ansible's Vault")

            encrypt_secret = match_encrypt_secret(
                vault_secrets, encrypt_vault_id=encrypt_vault_id)

            # only one secret for encrypt for now, use the first vault_id and use its first secret
            # TODO: exception if more than one?
            self.encrypt_vault_id = encrypt_secret[0]
            self.encrypt_secret = encrypt_secret[1]

        if self.action in ['rekey']:
            encrypt_vault_id = self.options.encrypt_vault_id or C.DEFAULT_VAULT_ENCRYPT_IDENTITY
            # print('encrypt_vault_id: %s' % encrypt_vault_id)
            # print('default_encrypt_vault_id: %s' % default_encrypt_vault_id)

            # new_vault_ids should only ever be one item, from
            # load the default vault ids if we are using encrypt-vault-id
            new_vault_ids = []
            if encrypt_vault_id:
                new_vault_ids = default_vault_ids
            if self.options.new_vault_id:
                new_vault_ids.append(self.options.new_vault_id)

            new_vault_password_files = []
            if self.options.new_vault_password_file:
                new_vault_password_files.append(
                    self.options.new_vault_password_file)

            new_vault_secrets = \
                self.setup_vault_secrets(loader,
                                         vault_ids=new_vault_ids,
                                         vault_password_files=new_vault_password_files,
                                         ask_vault_pass=self.options.ask_vault_pass,
                                         create_new_password=True)

            if not new_vault_secrets:
                raise AnsibleOptionsError(
                    "A new vault password is required to use Ansible's Vault rekey"
                )

            # There is only one new_vault_id currently and one new_vault_secret, or we
            # use the id specified in --encrypt-vault-id
            new_encrypt_secret = match_encrypt_secret(
                new_vault_secrets, encrypt_vault_id=encrypt_vault_id)

            self.new_encrypt_vault_id = new_encrypt_secret[0]
            self.new_encrypt_secret = new_encrypt_secret[1]

        loader.set_vault_secrets(vault_secrets)

        # FIXME: do we need to create VaultEditor here? its not reused
        vault = VaultLib(vault_secrets)
        self.editor = VaultEditor(vault)

        self.execute()

        # and restore umask
        os.umask(old_umask)
예제 #52
0
    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)
예제 #53
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, vault_password):
        self._vault_password = vault_password
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            # if loading JSON failed for any reason, we go ahead
            # and try to parse it as YAML instead

            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                new_data = text_type(data)
            else:
                new_data = data
            try:
                new_data = self._safe_load(new_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos
            return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(file_name, 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False

            data = to_unicode(data, errors='strict')
            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_unicode(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_unicode(given, errors='strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_unicode(self._basedir, errors='strict')
            return os.path.abspath(os.path.join(basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith('/'):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))

            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='strict')) \
                or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='strict')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname,source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='strict')):
                break

        return candidate

    def read_vault_password_file(self, vault_password_file):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(to_bytes(os.path.expanduser(vault_password_file), errors='strict'))
        if not os.path.exists(to_bytes(this_path, errors='strict')):
            raise AnsibleFileNotFound("The vault password file %s was not found" % this_path)

        if self.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            self.set_vault_password(stdout.strip('\r\n'))
        else:
            try:
                f = open(this_path, "rb")
                self.set_vault_password(f.read().strip())
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))
예제 #54
0
 def test_is_encrypted(self):
     v = VaultLib(None)
     assert not v.is_encrypted(u"foobar"), "encryption check on plaintext failed"
     data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
     assert v.is_encrypted(data), "encryption check on headered text failed"
예제 #55
0
 def test_is_encrypted_bytes(self):
     v = VaultLib(None)
     assert not v.is_encrypted(
         b"foobar"), "encryption check on plaintext failed"
     data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" + hexlify(b"ansible")
     assert v.is_encrypted(data), "encryption check on headered text failed"
예제 #56
0
    def run(self):
        super(VaultCLI, self).run()
        loader = DataLoader()

        # set default restrictive umask
        old_umask = os.umask(0o077)

        vault_ids = self.options.vault_ids

        # there are 3 types of actions, those that just 'read' (decrypt, view) and only
        # need to ask for a password once, and those that 'write' (create, encrypt) that
        # ask for a new password and confirm it, and 'read/write (rekey) that asks for the
        # old password, then asks for a new one and confirms it.

        default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
        vault_ids = default_vault_ids + vault_ids

        # TODO: instead of prompting for these before, we could let VaultEditor
        #       call a callback when it needs it.
        if self.action in ['decrypt', 'view', 'rekey']:
            vault_secrets = self.setup_vault_secrets(
                loader,
                vault_ids=vault_ids,
                vault_password_files=self.options.vault_password_files,
                ask_vault_pass=self.options.ask_vault_pass)
            if not vault_secrets:
                raise AnsibleOptionsError(
                    "A vault password is required to use Ansible's Vault")

        if self.action in ['encrypt', 'encrypt_string', 'create', 'edit']:
            if len(vault_ids) > 1:
                raise AnsibleOptionsError(
                    "Only one --vault-id can be used for encryption")

            vault_secrets = None
            vault_secrets = \
                self.setup_vault_secrets(loader,
                                         vault_ids=vault_ids,
                                         vault_password_files=self.options.vault_password_files,
                                         ask_vault_pass=self.options.ask_vault_pass,
                                         create_new_password=True)
            if not vault_secrets:
                raise AnsibleOptionsError(
                    "A vault password is required to use Ansible's Vault")

            encrypt_secret = match_encrypt_secret(vault_secrets)
            # only one secret for encrypt for now, use the first vault_id and use its first secret
            # self.encrypt_vault_id = list(vault_secrets.keys())[0]
            # self.encrypt_secret = vault_secrets[self.encrypt_vault_id][0]
            self.encrypt_vault_id = encrypt_secret[0]
            self.encrypt_secret = encrypt_secret[1]

        if self.action in ['rekey']:
            new_vault_ids = []
            if self.options.new_vault_id:
                new_vault_ids.append(self.options.new_vault_id)

            new_vault_secrets = \
                self.setup_vault_secrets(loader,
                                         vault_ids=new_vault_ids,
                                         vault_password_files=self.options.new_vault_password_files,
                                         ask_vault_pass=self.options.ask_vault_pass,
                                         create_new_password=True)

            if not new_vault_secrets:
                raise AnsibleOptionsError(
                    "A new vault password is required to use Ansible's Vault rekey"
                )

            # There is only one new_vault_id currently and one new_vault_secret
            new_encrypt_secret = match_encrypt_secret(new_vault_secrets)

            self.new_encrypt_vault_id = new_encrypt_secret[0]
            self.new_encrypt_secret = new_encrypt_secret[1]

        loader.set_vault_secrets(vault_secrets)

        # FIXME: do we need to create VaultEditor here? its not reused
        vault = VaultLib(vault_secrets)
        self.editor = VaultEditor(vault)

        self.execute()

        # and restore umask
        os.umask(old_umask)
 def _vault_editor(self, vault_secrets=None):
     if vault_secrets is None:
         vault_secrets = self._secrets(self.vault_password)
     return VaultEditor(VaultLib(vault_secrets))
예제 #58
0
파일: __init__.py 프로젝트: dataxu/ansible
    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)
예제 #59
0
 def __init__(self, file_name=None, vault_password=None):
     self._vault_password = vault_password
     self._ansible_file_name = file_name
     super(AnsibleConstructor, self).__init__()
     self._vaults = {}
     self._vaults['default'] = VaultLib(password=self._vault_password)