Пример #1
0
 def test_is_encrypted(self):
     v = VaultLib(None)
     assert not v.is_encrypted(
         "foobar".encode('utf-8')), "encryption check on plaintext failed"
     data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
     assert v.is_encrypted(
         data.encode('utf-8')), "encryption check on headered text failed"
Пример #2
0
    def _run(self, tmp=None, task_vars=None):
        err = self.check_args(self.ARGS, self._task.args)
        if err:
            return err

        getarg = lambda n, d=None: self._task.args.get(n, d)

        dest = getarg('dest')
        content = getarg('content')
        encrypt = getarg('encrypt', False)
        if isinstance(encrypt, basestring):
            encrypt = encrypt in ('yes', 'true', '1')

        if encrypt:
            password = self._task._loader._vault_password
            vault = VaultLib(password=password)
            if vault.is_encrypted(content):
                return fail(E_ALREADYCRYPT)
            content = vault.encrypt(content)

        root = self._loader.get_basedir()
        if self._task._role is not None:
            root = self._task._role._role_path

        outpath = os.path.join(root, dest)
        parent = os.path.dirname(outpath)
        if not os.path.exists(parent):
            os.makedirs(parent)

        open(outpath, 'wb').write(content)

        return dict(path=outpath)
Пример #3
0
def get_key_value(key):
    vault_password = open("/tmp/.vaultpwd").readlines()[0].rstrip('\n')
    data = open("keychain.yml").read()

    vault = VaultLib(password=vault_password)
    if vault.is_encrypted(data):
        data = vault.decrypt(data)
        ydata = yaml.load(data)
        return ydata['aws'][key]
    else:
        return None
Пример #4
0
def get_key_value(key):
    vault_password = open("/tmp/.vaultpwd").readlines()[0].rstrip('\n')
    data = open("keychain.yml").read()

    vault = VaultLib(password=vault_password)
    if vault.is_encrypted(data):
        data = vault.decrypt(data)
        ydata = yaml.load(data)
        return ydata['aws'][key]
    else:
        return None
Пример #5
0
class AnsibleAdapter20(AnsibleAdapterBase):
    def init(self):
        self.vault_editor = VaultEditor(self.vault_password)
        self.vault_lib = VaultLib(self.vault_password)

    def is_encrypted_vault(self, filename):
        return self.vault_lib.is_encrypted(utils.read_file_contents(filename))

    def encrypt_vault(self, filename):
        self.vault_editor.encrypt_file(filename)

    def decrypt_vault(self, filename):
        self.vault_editor.decrypt_file(filename)

    def vault_plaintext(self, filename):
        return self.vault_editor.plaintext(filename)
Пример #6
0
class DataLoader():
    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    _FILE_CACHE = dict()

    def __init__(self, vault_password=None):
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            try:
                # if loading JSON failed for any reason, we go ahead
                # and try to parse it as YAML instead
                return self._safe_load(data)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            return self._FILE_CACHE[file_name]

        # read the file contents and load the data structure from them
        (file_data, show_content) = self._get_file_contents(file_name)
        parsed_data = self.load(data=file_data,
                                file_name=file_name,
                                show_content=show_content)

        # cache the file contents for next time
        self._FILE_CACHE[file_name] = parsed_data

        return parsed_data

    def _safe_load(self, stream):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''
        return load(stream, AnsibleLoader)

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not os.path.exists(file_name) or not os.path.isfile(file_name):
            raise AnsibleParserError(
                "the file_name '%s' does not exist, or is not readable" %
                file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occured while trying to read the file '%s': %s" %
                (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occured, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.set_position_info(file_name,
                                      yaml_exc.problem_mark.line + 1,
                                      yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR,
                                 obj=err_obj,
                                 show_content=show_content)
Пример #7
0
class DataLoader():
    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''
    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        #print("in load, data is: %s (%s)" % (data, type(data)))
        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            try:
                # if loading JSON failed for any reason, we go ahead
                # and try to parse it as YAML instead
                return self._safe_load(data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            return self._FILE_CACHE[file_name]

        # read the file contents and load the data structure from them
        (file_data, show_content) = self._get_file_contents(file_name)
        parsed_data = self.load(data=file_data,
                                file_name=file_name,
                                show_content=show_content)

        # cache the file contents for next time
        self._FILE_CACHE[file_name] = parsed_data

        return parsed_data

    def path_exists(self, path):
        return os.path.exists(path)

    def is_directory(self, path):
        return os.path.isdir(path)

    def is_file(self, path):
        return os.path.isfile(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        #print("stream is: %s" % stream)
        #print("file name is: %s" % file_name)
        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            loader.dispose()

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleParserError(
                "the file_name '%s' does not exist, or is not readable" %
                file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occured while trying to read the file '%s': %s" %
                (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occured, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.set_position_info(file_name,
                                      yaml_exc.problem_mark.line + 1,
                                      yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR,
                                 obj=err_obj,
                                 show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = basedir

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)

        if given.startswith("/"):
            return os.path.abspath(given)
        elif given.startswith("~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            return os.path.abspath(os.path.join(self._basedir, given))

    def path_dwim_relative(self, role_path, dirname, source):
        ''' find one file in a directory one level up in a dir named dirname relative to current '''

        basedir = os.path.dirname(role_path)
        if os.path.islink(basedir):
            # FIXME: implement unfrackpath
            #basedir = unfrackpath(basedir)
            template2 = os.path.join(basedir, dirname, source)
        else:
            template2 = os.path.join(basedir, '..', dirname, source)

        source1 = os.path.join(role_path, dirname, source)
        if os.path.exists(source1):
            return source1

        cur_basedir = self._basedir
        self.set_basedir(basedir)
        source2 = self.path_dwim(template2)
        if os.path.exists(source2):
            self.set_basedir(cur_basedir)
            return source2

        obvious_local_path = self.path_dwim(source)
        if os.path.exists(obvious_local_path):
            self.set_basedir(cur_basedir)
            return obvious_local_path

        self.set_basedir(cur_basedir)
        return source2  # which does not exist
Пример #8
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            try:
                # if loading JSON failed for any reason, we go ahead
                # and try to parse it as YAML instead
                return self._safe_load(data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            return self._FILE_CACHE[file_name]

        # read the file contents and load the data structure from them
        (file_data, show_content) = self._get_file_contents(file_name)
        parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

        # cache the file contents for next time
        self._FILE_CACHE[file_name] = parsed_data

        return parsed_data

    def path_exists(self, path):
        return os.path.exists(path)

    def is_directory(self, path):
        return os.path.isdir(path)

    def is_file(self, path):
        return os.path.isfile(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            loader.dispose()

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleParserError("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occured while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occured, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.set_position_info(file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = basedir

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)

        if given.startswith("/"):
            return os.path.abspath(given)
        elif given.startswith("~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            return os.path.abspath(os.path.join(self._basedir, given))

    def path_dwim_relative(self, role_path, dirname, source):
        ''' find one file in a directory one level up in a dir named dirname relative to current '''

        basedir = os.path.dirname(role_path)
        if os.path.islink(basedir):
            basedir = unfrackpath(basedir)
            template2 = os.path.join(basedir, dirname, source)
        else:
            template2 = os.path.join(basedir, '..', dirname, source)

        source1 = os.path.join(role_path, dirname, source)
        if os.path.exists(source1):
            return source1

        cur_basedir = self._basedir
        self.set_basedir(basedir)
        source2 = self.path_dwim(template2)
        if os.path.exists(source2):
            self.set_basedir(cur_basedir)
            return source2

        obvious_local_path = self.path_dwim(source)
        if os.path.exists(obvious_local_path):
            self.set_basedir(cur_basedir)
            return obvious_local_path

        self.set_basedir(cur_basedir)
        return source2 # which does not exist
Пример #9
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        (or)
        dl = DataLoader(vault_password='******')

        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self, vault_password=None):
        self._basedir = '.'
        self._vault_password = vault_password
        self._FILE_CACHE = dict()

        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string. 
        '''

        try:
            # we first try to load this data as JSON
            return json.loads(data)
        except:
            # if loading JSON failed for any reason, we go ahead
            # and try to parse it as YAML instead

            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                new_data = unicode(data)
            else:
                new_data = data
            try:
                new_data = self._safe_load(new_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos
            return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(path)

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(path)

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(path)

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            loader.dispose()

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, basestring):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleParserError("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(file_name, 'r') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False
            return (data, show_content)
        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_unicode(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)

        if given.startswith("/"):
            return os.path.abspath(given)
        elif given.startswith("~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            return os.path.abspath(os.path.join(self._basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        ''' find one file in a role/playbook dirs with/without dirname subdir '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith('/'):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))

            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(os.path.join(path,'main.yml')) \
                or os.path.exists(os.path.join(path,'tasks/main.yml')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname,source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(candidate):
                break

        return candidate
Пример #10
0
 def test_is_encrypted(self):
     v = VaultLib(None)
     assert not v.is_encrypted(u"foobar"), "encryption check on plaintext failed"
     data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
     assert v.is_encrypted(data), "encryption check on headered text failed"
Пример #11
0
class DataLoader():
    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''
    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, vault_password):
        self._vault_password = vault_password
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None
        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data,
                                    file_name=file_name,
                                    show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path,
                                       errors='strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass  # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        b_file_name = to_bytes(file_name)
        if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
            raise AnsibleFileNotFound(
                "the file_name '%s' does not exist, or is not readable" %
                file_name)

        show_content = True
        try:
            with open(b_file_name, 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data, filename=b_file_name)
                    show_content = False

            data = to_unicode(data, errors='strict')
            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1,
                                   yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR,
                                 obj=err_obj,
                                 show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_unicode(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_unicode(given, errors='strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_unicode(self._basedir, errors='strict')
            return os.path.abspath(os.path.join(basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith(os.path.sep):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))
            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='strict')) \
                or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='strict')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(
                self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(
                    self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname, source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='strict')):
                break

        return candidate

    def path_dwim_relative_stack(self, paths, dirname, source):
        '''
        find one file in first path in stack taking roles into account and adding play basedir as fallback
        '''
        result = None
        if source.startswith('~') or source.startswith(os.path.sep):
            # path is absolute, no relative needed, check existence and return source
            test_path = to_bytes(unfrackpath(source), errors='strict')
            if os.path.exists(test_path):
                result = test_path
        else:
            search = []
            for path in paths:
                upath = unfrackpath(path)
                mydir = os.path.dirname(upath)

                # if path is in role and 'tasks' not there already, add it into the search
                if upath.endswith('tasks') and os.path.exists(to_bytes(os.path.join(upath,'main.yml'), errors='strict')) \
                    or os.path.exists(to_bytes(os.path.join(upath,'tasks/main.yml'), errors='strict')) \
                    or os.path.exists(to_bytes(os.path.join(os.path.dirname(upath),'tasks/main.yml'), errors='strict')):
                    if mydir.endswith('tasks'):
                        search.append(
                            os.path.join(os.path.dirname(mydir), dirname,
                                         source))
                        search.append(os.path.join(mydir, source))
                    else:
                        search.append(os.path.join(upath, dirname, source))
                        search.append(os.path.join(upath, 'tasks', source))
                elif dirname not in source.split('/'):
                    # don't add dirname if user already is using it in source
                    search.append(os.path.join(upath, dirname, source))
                    search.append(os.path.join(upath, source))

            # always append basedir as last resort
            search.append(os.path.join(self.get_basedir(), dirname, source))
            search.append(os.path.join(self.get_basedir(), source))

            display.debug('search_path:\n\t' + '\n\t'.join(search))
            for candidate in search:
                display.vvvvv('looking for "%s" at "%s"' % (source, candidate))
                if os.path.exists(to_bytes(candidate, errors='strict')):
                    result = candidate
                    break

        return result

    def read_vault_password_file(self, vault_password_file):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(
            to_bytes(os.path.expanduser(vault_password_file), errors='strict'))
        if not os.path.exists(to_bytes(this_path, errors='strict')):
            raise AnsibleFileNotFound(
                "The vault password file %s was not found" % this_path)

        if self.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError(
                    "Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file."
                    % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            self.set_vault_password(stdout.strip('\r\n'))
        else:
            try:
                f = open(this_path, "rb")
                self.set_vault_password(f.read().strip())
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError(
                    "Could not read vault password file %s: %s" %
                    (this_path, e))

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_path))

        if not self.path_exists(file_path) or not self.is_file(file_path):
            raise AnsibleFileNotFound(
                "the file_name '%s' does not exist, or is not readable" %
                file_path)

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    if not self._vault_password:
                        raise AnsibleParserError(
                            "A vault password must be specified to decrypt %s"
                            % file_path)

                    data = self._vault.decrypt(data, filename=real_path)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError(
                "an error occurred while trying to read the file '%s': %s" %
                (real_path, str(e)))

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path)

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except:
                pass  #TODO: this should at least warn
Пример #12
0
 def test_is_encrypted_bytes(self):
     v = VaultLib(None)
     assert not v.is_encrypted(b"foobar"), "encryption check on plaintext failed"
     data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" + hexlify(b"ansible")
     assert v.is_encrypted(data), "encryption check on headered text failed"
Пример #13
0
class TestVaultLib(unittest.TestCase):
    def setUp(self):
        self.v = VaultLib('test-vault-password')

    def test_encrypt(self):
        plaintext = u'Some text to encrypt in a café'
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_encrypt_bytes(self):

        plaintext = to_bytes(u'Some text to encrypt in a café')
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_is_encrypted(self):
        self.assertFalse(
            self.v.is_encrypted(b"foobar"),
            msg="encryption check on plaintext yielded false positive")
        b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        self.assertTrue(self.v.is_encrypted(b_data),
                        msg="encryption check on headered text failed")

    def test_format_output(self):
        self.v.cipher_name = "TEST"
        b_ciphertext = b"ansible"
        b_vaulttext = self.v._format_output(b_ciphertext)
        b_lines = b_vaulttext.split(b'\n')
        self.assertGreater(len(b_lines),
                           1,
                           msg="failed to properly add header")

        b_header = b_lines[0]
        self.assertTrue(b_header.endswith(b';TEST'),
                        msg="header does not end with cipher name")

        b_header_parts = b_header.split(b';')
        self.assertEqual(len(b_header_parts),
                         3,
                         msg="header has the wrong number of parts")
        self.assertEqual(b_header_parts[0],
                         b'$ANSIBLE_VAULT',
                         msg="header does not start with $ANSIBLE_VAULT")
        self.assertEqual(b_header_parts[1],
                         self.v.b_version,
                         msg="header version is incorrect")
        self.assertEqual(b_header_parts[2],
                         b'TEST',
                         msg="header does not end with cipher name")

    def test_split_header(self):
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
        b_ciphertext = self.v._split_header(b_vaulttext)
        b_lines = b_ciphertext.split(b'\n')
        self.assertEqual(b_lines[0],
                         b"ansible",
                         msg="Payload was not properly split from the header")
        self.assertEqual(self.v.cipher_name,
                         u'TEST',
                         msg="cipher name was not properly set")
        self.assertEqual(self.v.b_version,
                         b"9.9",
                         msg="version was not properly set")

    def test_encrypt_decrypt_aes(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        self.v.b_password = b'ansible'
        # AES encryption code has been removed, so this is old output for
        # AES-encrypted 'foobar' with password 'ansible'.
        b_vaulttext = b'''$ANSIBLE_VAULT;1.1;AES
53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3
fe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e
786a5a15efeb787e1958cbdd480d076c
'''
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        plaintext = u"foobar"
        b_vaulttext = self.v.encrypt(plaintext)
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertNotEqual(b_vaulttext, b"foobar", msg="encryption failed")
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256_existing_vault(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        b_orig_plaintext = b"Setec Astronomy"
        vaulttext = u'''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''

        b_plaintext = self.v.decrypt(vaulttext)
        self.assertEqual(b_plaintext, b_plaintext, msg="decryption failed")

        b_vaulttext = to_bytes(vaulttext, encoding='ascii', errors='strict')
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext,
                         b_orig_plaintext,
                         msg="decryption failed")

    def test_encrypt_decrypt_aes256_bad_hmac(self):
        # FIXME This test isn't working quite yet.
        raise SkipTest

        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = 'AES256'
        # plaintext = "Setec Astronomy"
        enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
        b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
        b_data = self.v._split_header(b_data)
        foo = binascii.unhexlify(b_data)
        lines = foo.splitlines()
        # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
        b_salt = lines[0]
        b_hmac = lines[1]
        b_ciphertext_data = b'\n'.join(lines[2:])

        b_ciphertext = binascii.unhexlify(b_ciphertext_data)
        # b_orig_ciphertext = b_ciphertext[:]

        # now muck with the text
        # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
        # b_munged_ciphertext = b_ciphertext
        # assert b_orig_ciphertext != b_munged_ciphertext

        b_ciphertext_data = binascii.hexlify(b_ciphertext)
        b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
        # reformat
        b_invalid_ciphertext = self.v._format_output(b_payload)

        # assert we throw an error
        self.v.decrypt(b_invalid_ciphertext)

    def test_encrypt_encrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        vaulttext = to_text(b_vaulttext, errors='strict')
        self.assertRaises(errors.AnsibleError, self.v.encrypt, b_vaulttext)
        self.assertRaises(errors.AnsibleError, self.v.encrypt, vaulttext)

    def test_decrypt_decrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, plaintext)

        b_plaintext = b"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, b_plaintext)

    def test_cipher_not_set(self):
        # not setting the cipher should default to AES256
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.v.encrypt(plaintext)
        self.assertEquals(self.v.cipher_name, "AES256")
Пример #14
0
class TestVaultLib(unittest.TestCase):
    def setUp(self):
        self.v = VaultLib('test-vault-password')

    def test_encrypt(self):
        plaintext = u'Some text to encrypt in a café'
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_encrypt_bytes(self):

        plaintext = to_bytes(u'Some text to encrypt in a café')
        b_vaulttext = self.v.encrypt(plaintext)

        self.assertIsInstance(b_vaulttext, six.binary_type)

        b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
        self.assertEqual(b_vaulttext[:len(b_header)], b_header)

    def test_is_encrypted(self):
        self.assertFalse(self.v.is_encrypted(b"foobar"), msg="encryption check on plaintext yielded false positive")
        b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        self.assertTrue(self.v.is_encrypted(b_data), msg="encryption check on headered text failed")

    def test_format_output(self):
        self.v.cipher_name = "TEST"
        b_ciphertext = b"ansible"
        b_vaulttext = self.v._format_output(b_ciphertext)
        b_lines = b_vaulttext.split(b'\n')
        self.assertGreater(len(b_lines), 1, msg="failed to properly add header")

        b_header = b_lines[0]
        self.assertTrue(b_header.endswith(b';TEST'), msg="header does not end with cipher name")

        b_header_parts = b_header.split(b';')
        self.assertEqual(len(b_header_parts), 3, msg="header has the wrong number of parts")
        self.assertEqual(b_header_parts[0], b'$ANSIBLE_VAULT', msg="header does not start with $ANSIBLE_VAULT")
        self.assertEqual(b_header_parts[1], self.v.b_version, msg="header version is incorrect")
        self.assertEqual(b_header_parts[2], b'TEST', msg="header does not end with cipher name")

    def test_split_header(self):
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
        b_ciphertext = self.v._split_header(b_vaulttext)
        b_lines = b_ciphertext.split(b'\n')
        self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
        self.assertEqual(self.v.cipher_name, u'TEST', msg="cipher name was not properly set")
        self.assertEqual(self.v.b_version, b"9.9", msg="version was not properly set")

    def test_encrypt_decrypt_aes(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        self.v.b_password = b'ansible'
        # AES encryption code has been removed, so this is old output for
        # AES-encrypted 'foobar' with password 'ansible'.
        b_vaulttext = b'''$ANSIBLE_VAULT;1.1;AES
53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3
fe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e
786a5a15efeb787e1958cbdd480d076c
'''
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        plaintext = u"foobar"
        b_vaulttext = self.v.encrypt(plaintext)
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertNotEqual(b_vaulttext, b"foobar", msg="encryption failed")
        self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")

    def test_encrypt_decrypt_aes256_existing_vault(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES256'
        b_orig_plaintext = b"Setec Astronomy"
        vaulttext = u'''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''

        b_plaintext = self.v.decrypt(vaulttext)
        self.assertEqual(b_plaintext, b_plaintext, msg="decryption failed")

        b_vaulttext = to_bytes(vaulttext, encoding='ascii', errors='strict')
        b_plaintext = self.v.decrypt(b_vaulttext)
        self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")

    def test_encrypt_decrypt_aes256_bad_hmac(self):
        # FIXME This test isn't working quite yet.
        raise SkipTest

        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = 'AES256'
        # plaintext = "Setec Astronomy"
        enc_data = '''$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138'''
        b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
        b_data = self.v._split_header(b_data)
        foo = binascii.unhexlify(b_data)
        lines = foo.splitlines()
        # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
        b_salt = lines[0]
        b_hmac = lines[1]
        b_ciphertext_data = b'\n'.join(lines[2:])

        b_ciphertext = binascii.unhexlify(b_ciphertext_data)
        # b_orig_ciphertext = b_ciphertext[:]

        # now muck with the text
        # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
        # b_munged_ciphertext = b_ciphertext
        # assert b_orig_ciphertext != b_munged_ciphertext

        b_ciphertext_data = binascii.hexlify(b_ciphertext)
        b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
        # reformat
        b_invalid_ciphertext = self.v._format_output(b_payload)

        # assert we throw an error
        self.v.decrypt(b_invalid_ciphertext)

    def test_encrypt_encrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        self.v.cipher_name = u'AES'
        b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
        vaulttext = to_text(b_vaulttext, errors='strict')
        self.assertRaises(errors.AnsibleError, self.v.encrypt, b_vaulttext)
        self.assertRaises(errors.AnsibleError, self.v.encrypt, vaulttext)

    def test_decrypt_decrypted(self):
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, plaintext)

        b_plaintext = b"ansible"
        self.assertRaises(errors.AnsibleError, self.v.decrypt, b_plaintext)

    def test_cipher_not_set(self):
        # not setting the cipher should default to AES256
        if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
            raise SkipTest
        plaintext = u"ansible"
        self.v.encrypt(plaintext)
        self.assertEquals(self.v.cipher_name, "AES256")
Пример #15
0
class DataLoader():

    '''
    The DataLoader class is used to load and parse YAML or JSON content,
    either from a given file name or from a string that was previously
    read in through other means. A Vault password can be specified, and
    any vault-encrypted files will be decrypted.

    Data read from files will also be cached, so the file will never be
    read from disk more than once.

    Usage:

        dl = DataLoader()
        # optionally: dl.set_vault_password('foo')
        ds = dl.load('...')
        ds = dl.load_from_file('/path/to/file')
    '''

    def __init__(self):
        self._basedir = '.'
        self._FILE_CACHE = dict()
        self._tempfiles = set()

        # initialize the vault stuff with an empty password
        self.set_vault_password(None)

    def set_vault_password(self, vault_password):
        self._vault_password = vault_password
        self._vault = VaultLib(password=vault_password)

    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None
        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data

    def load_from_file(self, file_name):
        ''' Loads data from a file, which can contain either JSON or YAML.  '''

        file_name = self.path_dwim(file_name)

        # if the file has already been read in and cached, we'll
        # return those results to avoid more file/vault operations
        if file_name in self._FILE_CACHE:
            parsed_data = self._FILE_CACHE[file_name]
        else:
            # read the file contents and load the data structure from them
            (file_data, show_content) = self._get_file_contents(file_name)
            parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)

            # cache the file contents for next time
            self._FILE_CACHE[file_name] = parsed_data

        # return a deep copy here, so the cache is not affected
        return copy.deepcopy(parsed_data)

    def path_exists(self, path):
        path = self.path_dwim(path)
        return os.path.exists(to_bytes(path, errors='strict'))

    def is_file(self, path):
        path = self.path_dwim(path)
        return os.path.isfile(to_bytes(path, errors='strict')) or path == os.devnull

    def is_directory(self, path):
        path = self.path_dwim(path)
        return os.path.isdir(to_bytes(path, errors='strict'))

    def list_directory(self, path):
        path = self.path_dwim(path)
        return os.listdir(path)

    def is_executable(self, path):
        '''is the given path executable?'''
        path = self.path_dwim(path)
        return is_executable(path)

    def _safe_load(self, stream, file_name=None):
        ''' Implements yaml.safe_load(), except using our custom loader class. '''

        loader = AnsibleLoader(stream, file_name)
        try:
            return loader.get_single_data()
        finally:
            try:
                loader.dispose()
            except AttributeError:
                pass # older versions of yaml don't have dispose function, ignore

    def _get_file_contents(self, file_name):
        '''
        Reads the file contents from the given file name, and will decrypt them
        if they are found to be vault-encrypted.
        '''
        if not file_name or not isinstance(file_name, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))

        if not self.path_exists(file_name) or not self.is_file(file_name):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_name)

        show_content = True
        try:
            with open(file_name, 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    data = self._vault.decrypt(data)
                    show_content = False

            data = to_unicode(data, errors='strict')
            return (data, show_content)

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))

    def _handle_error(self, yaml_exc, file_name, show_content):
        '''
        Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
        file name/position where a YAML exception occurred, and raises an AnsibleParserError
        to display the syntax exception information.
        '''

        # if the YAML exception contains a problem mark, use it to construct
        # an object the error class can use to display the faulty line
        err_obj = None
        if hasattr(yaml_exc, 'problem_mark'):
            err_obj = AnsibleBaseYAMLObject()
            err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)

        raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)

    def get_basedir(self):
        ''' returns the current basedir '''
        return self._basedir

    def set_basedir(self, basedir):
        ''' sets the base directory, used to find files when a relative path is given '''

        if basedir is not None:
            self._basedir = to_unicode(basedir)

    def path_dwim(self, given):
        '''
        make relative paths work like folks expect.
        '''

        given = unquote(given)
        given = to_unicode(given, errors='strict')

        if given.startswith(u"/"):
            return os.path.abspath(given)
        elif given.startswith(u"~"):
            return os.path.abspath(os.path.expanduser(given))
        else:
            basedir = to_unicode(self._basedir, errors='strict')
            return os.path.abspath(os.path.join(basedir, given))

    def path_dwim_relative(self, path, dirname, source):
        '''
        find one file in either a role or playbook dir with or without
        explicitly named dirname subdirs

        Used in action plugins and lookups to find supplemental files that
        could be in either place.
        '''

        search = []
        isrole = False

        # I have full path, nothing else needs to be looked at
        if source.startswith('~') or source.startswith('/'):
            search.append(self.path_dwim(source))
        else:
            # base role/play path + templates/files/vars + relative filename
            search.append(os.path.join(path, dirname, source))

            basedir = unfrackpath(path)

            # is it a role and if so make sure you get correct base path
            if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='strict')) \
                or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='strict')):
                isrole = True
                if path.endswith('tasks'):
                    basedir = unfrackpath(os.path.dirname(path))

            cur_basedir = self._basedir
            self.set_basedir(basedir)
            # resolved base role/play path + templates/files/vars + relative filename
            search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
            self.set_basedir(cur_basedir)

            if isrole and not source.endswith(dirname):
                # look in role's tasks dir w/o dirname
                search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))

            # try to create absolute path for loader basedir + templates/files/vars + filename
            search.append(self.path_dwim(os.path.join(dirname,source)))
            search.append(self.path_dwim(os.path.join(basedir, source)))

            # try to create absolute path for loader basedir + filename
            search.append(self.path_dwim(source))

        for candidate in search:
            if os.path.exists(to_bytes(candidate, errors='strict')):
                break

        return candidate

    def read_vault_password_file(self, vault_password_file):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(to_bytes(os.path.expanduser(vault_password_file), errors='strict'))
        if not os.path.exists(to_bytes(this_path, errors='strict')):
            raise AnsibleFileNotFound("The vault password file %s was not found" % this_path)

        if self.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            self.set_vault_password(stdout.strip('\r\n'))
        else:
            try:
                f = open(this_path, "rb")
                self.set_vault_password(f.read().strip())
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))

    def _create_content_tempfile(self, content):
        ''' Create a tempfile containing defined content '''
        fd, content_tempfile = tempfile.mkstemp()
        f = os.fdopen(fd, 'wb')
        content = to_bytes(content)
        try:
            f.write(content)
        except Exception as err:
            os.remove(content_tempfile)
            raise Exception(err)
        finally:
            f.close()
        return content_tempfile

    def get_real_file(self, file_path):
        """
        If the file is vault encrypted return a path to a temporary decrypted file
        If the file is not encrypted then the path is returned
        Temporary files are cleanup in the destructor
        """

        if not file_path or not isinstance(file_path, string_types):
            raise AnsibleParserError("Invalid filename: '%s'" % str(file_path))

        if not self.path_exists(file_path) or not self.is_file(file_path):
            raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_path)

        if not self._vault:
            self._vault = VaultLib(password="")

        real_path = self.path_dwim(file_path)

        try:
            with open(to_bytes(real_path), 'rb') as f:
                data = f.read()
                if self._vault.is_encrypted(data):
                    # if the file is encrypted and no password was specified,
                    # the decrypt call would throw an error, but we check first
                    # since the decrypt function doesn't know the file name
                    if not self._vault_password:
                        raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)

                    data = self._vault.decrypt(data)
                    # Make a temp file
                    real_path = self._create_content_tempfile(data)
                    self._tempfiles.add(real_path)

            return real_path

        except (IOError, OSError) as e:
            raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (real_path, str(e)))

    def cleanup_tmp_file(self, file_path):
        """
        Removes any temporary files created from a previous call to
        get_real_file. file_path must be the path returned from a
        previous call to get_real_file.
        """
        if file_path in self._tempfiles:
            os.unlink(file_path)
            self._tempfiles.remove(file_path);

    def cleanup_all_tmp_files(self):
        for f in self._tempfiles:
            try:
                self.cleanup_tmp_file(f)
            except:
                pass #TODO: this should at least warn
Пример #16
0
    def run(self, tmp=None, task_vars=None):
        ''' handler for file transfer operations '''
        if task_vars is None:
            task_vars = dict()

        result = super(ActionModule, self).run(tmp, task_vars)

        source  = self._task.args.get('src', None)
        content = self._task.args.get('content', None)
        dest    = self._task.args.get('dest', None)
        raw     = boolean(self._task.args.get('raw', 'no'))
        force   = boolean(self._task.args.get('force', 'yes'))
        faf     = self._task.first_available_file
        remote_src = boolean(self._task.args.get('remote_src', False))

        if (source is None and content is None and faf is None) or dest is None:
            result['failed'] = True
            result['msg'] = "src (or content) and dest are required"
            return result
        elif (source is not None or faf is not None) and content is not None:
            result['failed'] = True
            result['msg'] = "src and content are mutually exclusive"
            return result
        elif content is not None and dest is not None and dest.endswith("/"):
            result['failed'] = True
            result['msg'] = "dest must be a file if content is defined"
            return result

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = self._connection._shell.path_has_trailing_slash(source)

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if isinstance(content, dict) or isinstance(content, list):
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception as err:
                result['failed'] = True
                result['msg'] = "could not write content temp file: %s" % err
                return result

        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        elif faf:
            source = self._get_first_available_file(faf, task_vars.get('_original_file', None))
            if source is None:
                result['failed'] = True
                result['msg'] = "could not find src in first_available_file list"
                return result

        elif remote_src:
            result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False))
            return result

        else:
            if self._task._role is not None:
                source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
            else:
                source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)

        # A list of source file tuples (full_path, relative_path) which will try to copy to the destination
        source_files = []

        # If source is a directory populate our list else source is a file and translate it to a tuple.
        if os.path.isdir(source):
            # Get the amount of spaces to remove to get the relative path.
            if source_trailing_slash:
                sz = len(source)
            else:
                sz = len(source.rsplit('/', 1)[0]) + 1

            # Walk the directory and append the file tuples to source_files.
            for base_path, sub_folders, files in os.walk(source):
                for file in files:
                    full_path = os.path.join(base_path, file)
                    rel_path = full_path[sz:]
                    if rel_path.startswith('/'):
                        rel_path = rel_path[1:]
                    source_files.append((full_path, rel_path))

            # If it's recursive copy, destination is always a dir,
            # explicitly mark it so (note - copy module relies on this).
            if not self._connection._shell.path_has_trailing_slash(dest):
                dest = self._connection._shell.join_path(dest, '')
        else:
            source_files.append((source, os.path.basename(source)))

        changed = False
        module_return = dict(changed=False)

        # A register for if we executed a module.
        # Used to cut down on command calls when not recursive.
        module_executed = False

        # Tell _execute_module to delete the file if there is one file.
        delete_remote_tmp = (len(source_files) == 1)

        # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
        if not delete_remote_tmp:
            if tmp is None or "-tmp-" not in tmp:
                tmp = self._make_tmp_path()

        # expand any user home dir specifier
        dest = self._remote_expand_user(dest)

        vault = VaultLib(password=self._loader._vault_password)
        diffs = []
        for source_full, source_rel in source_files:
            
            vault_temp_file = None
            data = None

            try:
                data = open(source_full).read()
            except IOError:
                raise errors.AnsibleError("file could not read: %s" % source_full)

            if vault.is_encrypted(data):
                # if the file is encrypted and no password was specified,
                # the decrypt call would throw an error, but we check first
                # since the decrypt function doesn't know the file name
                if self._loader._vault_password is None:
                    raise errors.AnsibleError("A vault password must be specified to decrypt %s" % source_full)
                    
                data = vault.decrypt(data)
                # Make a temp file
                vault_temp_file = self._create_content_tempfile(data)
                source_full = vault_temp_file;
                
            # Generate a hash of the local file.
            local_checksum = checksum(source_full)

            # If local_checksum is not defined we can't find the file so we should fail out.
            if local_checksum is None:
                result['failed'] = True
                result['msg'] = "could not find src=%s" % source_full
                return result

            # This is kind of optimization - if user told us destination is
            # dir, do path manipulation right away, otherwise we still check
            # for dest being a dir via remote call below.
            if self._connection._shell.path_has_trailing_slash(dest):
                dest_file = self._connection._shell.join_path(dest, source_rel)
            else:
                dest_file = self._connection._shell.join_path(dest)

            # Attempt to get the remote checksum
            remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars)

            if remote_checksum == '3':
                # The remote_checksum was executed on a directory.
                if content is not None:
                    # If source was defined as content remove the temporary file and fail out.
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    result['failed'] = True
                    result['msg'] = "can not use content with a dir as dest"
                    return result
                else:
                    # Append the relative source location to the destination and retry remote_checksum
                    dest_file = self._connection._shell.join_path(dest, source_rel)
                    remote_checksum = self._remote_checksum(dest_file, all_vars=task_vars)

            if remote_checksum != '1' and not force:
                # remote_file does not exist so continue to next iteration.
                continue

            if local_checksum != remote_checksum:
                # The checksums don't match and we will change or error out.
                changed = True

                # Create a tmp path if missing only if this is not recursive.
                # If this is recursive we already have a tmp path.
                if delete_remote_tmp:
                    if tmp is None or "-tmp-" not in tmp:
                        tmp = self._make_tmp_path()

                if self._play_context.diff and not raw:
                    diffs.append(self._get_diff_data(dest_file, source_full, task_vars))

                if self._play_context.check_mode:
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    changed = True
                    module_return = dict(changed=True)
                    continue

                # Define a remote directory that we will copy the file to.
                tmp_src = self._connection._shell.join_path(tmp, 'source')

                if not raw:
                    self._connection.put_file(source_full, tmp_src)
                else:
                    self._connection.put_file(source_full, dest_file)

                # We have copied the file remotely and no longer require our content_tempfile
                self._remove_tempfile_if_content_defined(content, content_tempfile)

                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None

                # fix file permissions when the copy is done as a different user
                if self._play_context.become and self._play_context.become_user != 'root':
                    self._remote_chmod('a+r', tmp_src)

                if raw:
                    # Continue to next iteration if raw is defined.
                    continue

                # Run the copy module

                # src and dest here come after original and override them
                # we pass dest only to make sure it includes trailing slash in case of recursive copy
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=tmp_src,
                        dest=dest,
                        original_basename=source_rel,
                    )
                )

                module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            else:
                # no need to transfer the file, already correct hash, but still need to call
                # the file module in case we want to change attributes
                self._remove_tempfile_if_content_defined(content, content_tempfile)

                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None
                    
                if raw:
                    # Continue to next iteration if raw is defined.
                    self._remove_tmp_path(tmp)
                    continue

                # Build temporary module_args.
                new_module_args = self._task.args.copy()
                new_module_args.update(
                    dict(
                        src=source_rel,
                        dest=dest,
                        original_basename=source_rel
                    )
                )

                # Execute the file module.
                module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            if not module_return.get('checksum'):
                module_return['checksum'] = local_checksum
            if module_return.get('failed'):
                result.update(module_return)
                return result
            if module_return.get('changed'):
                changed = True

            # the file module returns the file path as 'path', but
            # the copy module uses 'dest', so add it if it's not there
            if 'path' in module_return and 'dest' not in module_return:
                module_return['dest'] = module_return['path']

        # Delete tmp path if we were recursive or if we did not execute a module.
        if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed):
            self._remove_tmp_path(tmp)

        if module_executed and len(source_files) == 1:
            result.update(module_return)
        else:
            result.update(dict(dest=dest, src=source, changed=changed))

        if diffs:
            result['diff'] = diffs

        return result