コード例 #1
0
class TestDataLoader(unittest.TestCase):

    def setUp(self):
        self._loader = DataLoader()

    def tearDown(self):
        pass

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_json_from_file(self, mock_def):
        mock_def.return_value = ("""{"a": 1, "b": 2, "c": 3}""", True)
        output = self._loader.load_from_file('dummy_json.txt')
        self.assertEqual(output, dict(a=1,b=2,c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_yaml_from_file(self, mock_def):
        mock_def.return_value = ("""
        a: 1
        b: 2
        c: 3
        """, True)
        output = self._loader.load_from_file('dummy_yaml.txt')
        self.assertEqual(output, dict(a=1,b=2,c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_fail_from_file(self, mock_def):
        mock_def.return_value = ("""
        TEXT:
            ***
               NOT VALID
        """, True)
        self.assertRaises(AnsibleParserError, self._loader.load_from_file, 'dummy_yaml_bad.txt')
コード例 #2
0
class TestDataLoader(unittest.TestCase):
    def setUp(self):
        self._loader = DataLoader()

    def tearDown(self):
        pass

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_json_from_file(self, mock_def):
        mock_def.return_value = ("""{"a": 1, "b": 2, "c": 3}""", True)
        output = self._loader.load_from_file('dummy_json.txt')
        self.assertEqual(output, dict(a=1, b=2, c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_yaml_from_file(self, mock_def):
        mock_def.return_value = ("""
        a: 1
        b: 2
        c: 3
        """, True)
        output = self._loader.load_from_file('dummy_yaml.txt')
        self.assertEqual(output, dict(a=1, b=2, c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_fail_from_file(self, mock_def):
        mock_def.return_value = ("""
        TEXT:
            ***
               NOT VALID
        """, True)
        self.assertRaises(AnsibleParserError, self._loader.load_from_file,
                          'dummy_yaml_bad.txt')
コード例 #3
0
def boilerplate_module(modfile, args, interpreter, check, destfile):
    """ simulate what ansible does with new style modules """

    loader = DataLoader()

    complex_args = {}
    if args.startswith("@"):
        # Argument is a YAML file (JSON is a subset of YAML)
        complex_args = utils_vars.combine_vars(complex_args,
                                               loader.load_from_file(args[1:]))
        args = ''
    elif args.startswith("{"):
        # Argument is a YAML document (not a file)
        complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
        args = ''

    if args:
        parsed_args = parse_kv(args)
        complex_args = utils_vars.combine_vars(complex_args, parsed_args)

    task_vars = {}
    if interpreter:
        if '=' not in interpreter:
            print("interpreter must by in the form of \
                   ansible_python_interpreter=/usr/bin/python")
            sys.exit(1)
        interpreter_type, interpreter_path = interpreter.split('=')
        if not interpreter_type.startswith('ansible_'):
            interpreter_type = 'ansible_%s' % interpreter_type
        if not interpreter_type.endswith('_interpreter'):
            interpreter_type = '%s_interpreter' % interpreter_type
        task_vars[interpreter_type] = interpreter_path

    if check:
        complex_args['_ansible_check_mode'] = True

    modname = os.path.basename(modfile)
    modname = os.path.splitext(modname)[0]
    (module_data, module_style, shebang) = module_common.modify_module(
        modname,
        modfile,
        complex_args,
        task_vars=task_vars
    )

    if module_style == 'new' \
       and 'ZIPLOADER_WRAPPER = True' in module_data:
        module_style = 'ziploader'

    modfile2_path = os.path.expanduser(destfile)
    print("* including generated source,\
           if any, saving to: %s" % modfile2_path)
    if module_style not in ('ziploader', 'old'):
        print("* this may offset any line numbers in tracebacks/debuggers!")
    modfile2 = open(modfile2_path, 'w')
    modfile2.write(module_data)
    modfile2.close()
    modfile = modfile2_path

    return (modfile2_path, modname, module_style)
コード例 #4
0
class TestDataLoaderWithVault(unittest.TestCase):

    def setUp(self):
        self._loader = DataLoader()
        self._loader.set_vault_password('ansible')

    def tearDown(self):
        pass

    @patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True)
    def test_parse_from_vault_1_1_file(self):
        vaulted_data = """$ANSIBLE_VAULT;1.1;AES256
33343734386261666161626433386662623039356366656637303939306563376130623138626165
6436333766346533353463636566313332623130383662340a393835656134633665333861393331
37666233346464636263636530626332623035633135363732623332313534306438393366323966
3135306561356164310a343937653834643433343734653137383339323330626437313562306630
3035
"""
        if PY3:
            builtins_name = 'builtins'
        else:
            builtins_name = '__builtin__'

        with patch(builtins_name + '.open', mock_open(read_data=vaulted_data.encode('utf-8'))):
            output = self._loader.load_from_file('dummy_vault.txt')
            self.assertEqual(output, dict(foo='bar'))
コード例 #5
0
class TestDataLoaderWithVault(unittest.TestCase):

    def setUp(self):
        self._loader = DataLoader()
        self._loader.set_vault_password('ansible')

    def tearDown(self):
        pass

    @patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True)
    def test_parse_from_vault_1_1_file(self):
        vaulted_data = """$ANSIBLE_VAULT;1.1;AES256
33343734386261666161626433386662623039356366656637303939306563376130623138626165
6436333766346533353463636566313332623130383662340a393835656134633665333861393331
37666233346464636263636530626332623035633135363732623332313534306438393366323966
3135306561356164310a343937653834643433343734653137383339323330626437313562306630
3035
"""
        if PY3:
            builtins_name = 'builtins'
        else:
            builtins_name = '__builtin__'

        with patch(builtins_name + '.open', mock_open(read_data=vaulted_data.encode('utf-8'))):
            output = self._loader.load_from_file('dummy_vault.txt')
            self.assertEqual(output, dict(foo='bar'))
コード例 #6
0
 def __init__(self, filepath, rolename):
     self.filepath = filepath
     dl = DataLoader()
     self.ans_data = dl.load_from_file(filepath)
     if self.ans_data is None:
         raise LSRException(f"file is empty {filepath}")
     self.file_type = get_file_type(self.ans_data)
     self.rolename = rolename
     buf = open(filepath).read()
     self.ruamel_yaml = YAML(typ="rt")
     match = re.search(LSRFileTransformerBase.HEADER_RE, buf)
     if match:
         self.header = match.group(1)
     else:
         self.header = ""
     match = re.search(LSRFileTransformerBase.FOOTER_RE, buf)
     if match:
         self.footer = match.group(1) + "\n"
     else:
         self.footer = ""
     self.ruamel_yaml.default_flow_style = False
     self.ruamel_yaml.preserve_quotes = True
     self.ruamel_yaml.width = 1024
     self.ruamel_data = self.ruamel_yaml.load(buf)
     self.ruamel_yaml.indent(mapping=2, sequence=4, offset=2)
     self.outputfile = None
     self.outputstream = sys.stdout
コード例 #7
0
ファイル: module_utils.py プロジェクト: jjpryor/linchpin
def boilerplate_module(modfile, args, interpreter, check, destfile):
    """ simulate what ansible does with new style modules """

    loader = DataLoader()

    complex_args = {}
    if args.startswith("@"):
        # Argument is a YAML file (JSON is a subset of YAML)
        complex_args = utils_vars.combine_vars(complex_args,
                                               loader.load_from_file(args[1:]))
        args = ''
    elif args.startswith("{"):
        # Argument is a YAML document (not a file)
        complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
        args = ''

    if args:
        parsed_args = parse_kv(args)
        complex_args = utils_vars.combine_vars(complex_args, parsed_args)

    task_vars = {}
    if interpreter:
        if '=' not in interpreter:
            print("interpreter must by in the form of \
                   ansible_python_interpreter=/usr/bin/python")
            sys.exit(1)
        interpreter_type, interpreter_path = interpreter.split('=')
        if not interpreter_type.startswith('ansible_'):
            interpreter_type = 'ansible_%s' % interpreter_type
        if not interpreter_type.endswith('_interpreter'):
            interpreter_type = '%s_interpreter' % interpreter_type
        task_vars[interpreter_type] = interpreter_path

    if check:
        complex_args['_ansible_check_mode'] = True

    modname = os.path.basename(modfile)
    modname = os.path.splitext(modname)[0]
    (module_data, module_style,
     shebang) = module_common.modify_module(modname,
                                            modfile,
                                            complex_args,
                                            task_vars=task_vars)

    if module_style == 'new' \
       and 'ZIPLOADER_WRAPPER = True' in module_data:
        module_style = 'ziploader'

    modfile2_path = os.path.expanduser(destfile)
    print("* including generated source,\
           if any, saving to: %s" % modfile2_path)
    if module_style not in ('ziploader', 'old'):
        print("* this may offset any line numbers in tracebacks/debuggers!")
    modfile2 = open(modfile2_path, 'w')
    modfile2.write(module_data)
    modfile2.close()
    modfile = modfile2_path

    return (modfile2_path, modname, module_style)
コード例 #8
0
    def main(self, path):
        data_dir = self.conf['data_dir']
        
        loader = DataLoader()

        full_path="%s/%s" % (data_dir, path)

        if os.path.isfile("%s.yaml" % full_path):
            ds = loader.load_from_file("%s.yaml" % full_path)
        elif os.path.isfile("%s.yml" % full_path):
            ds = loader.load_from_file("%s.yml" % full_path)
        else:
            ds={}
        if ds is None:
            ds = {}

        return ds
コード例 #9
0
class TestDataLoader(unittest.TestCase):
    def setUp(self):
        self._loader = DataLoader()

    def tearDown(self):
        pass

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_json_from_file(self, mock_def):
        mock_def.return_value = (b"""{"a": 1, "b": 2, "c": 3}""", True)
        output = self._loader.load_from_file('dummy_json.txt')
        self.assertEqual(output, dict(a=1, b=2, c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_yaml_from_file(self, mock_def):
        mock_def.return_value = (b"""
        a: 1
        b: 2
        c: 3
        """, True)
        output = self._loader.load_from_file('dummy_yaml.txt')
        self.assertEqual(output, dict(a=1, b=2, c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_fail_from_file(self, mock_def):
        mock_def.return_value = (b"""
        TEXT:
            ***
               NOT VALID
        """, True)
        self.assertRaises(AnsibleParserError, self._loader.load_from_file,
                          'dummy_yaml_bad.txt')

    @patch('ansible.errors.AnsibleError._get_error_lines_from_file')
    @patch.object(DataLoader, '_get_file_contents')
    def test_tab_error(self, mock_def, mock_get_error_lines):
        mock_def.return_value = (
            u"""---\nhosts: localhost\nvars:\n  foo: bar\n\tblip: baz""", True)
        mock_get_error_lines.return_value = ('''\tblip: baz''',
                                             '''..foo: bar''')
        with self.assertRaises(AnsibleParserError) as cm:
            self._loader.load_from_file('dummy_yaml_text.txt')
        self.assertIn(yaml_strings.YAML_COMMON_LEADING_TAB_ERROR,
                      str(cm.exception))
        self.assertIn('foo: bar', str(cm.exception))
コード例 #10
0
    def get_task(self, playbook_name):
        loader = DataLoader(
        )  # Takes care of finding and reading yaml, json and ini files

        playbook_data = loader.load_from_file(
            f"/home/gilmar/ansible/{playbook_name}.yml")

        print(playbook_data)

        for data in playbook_data:
            if 'roles' in data:
                for role in data['roles']:
                    roles_data = loader.load_from_file(
                        f"/home/gilmar/ansible/roles/{role}/tasks/main.yml")

                    #print(roles_data)

        return roles_data
コード例 #11
0
ファイル: utils.py プロジェクト: hasiotis/vmbuilder
def load_vars(fname):
    if not os.path.isfile(fname):
        return {}

    if HAS_ANSIBLE2:
        loader = DataLoader()
        return loader.load_from_file(file_name=fname)
    else:
        vars = {}
        return ansible.utils.load_vars(fname, vars)
コード例 #12
0
class TestDataLoader(unittest.TestCase):

    def setUp(self):
        self._loader = DataLoader()

    def tearDown(self):
        pass

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_json_from_file(self, mock_def):
        mock_def.return_value = (b"""{"a": 1, "b": 2, "c": 3}""", True)
        output = self._loader.load_from_file('dummy_json.txt')
        self.assertEqual(output, dict(a=1,b=2,c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_yaml_from_file(self, mock_def):
        mock_def.return_value = (b"""
        a: 1
        b: 2
        c: 3
        """, True)
        output = self._loader.load_from_file('dummy_yaml.txt')
        self.assertEqual(output, dict(a=1,b=2,c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_fail_from_file(self, mock_def):
        mock_def.return_value = (b"""
        TEXT:
            ***
               NOT VALID
        """, True)
        self.assertRaises(AnsibleParserError, self._loader.load_from_file, 'dummy_yaml_bad.txt')

    @patch('ansible.errors.AnsibleError._get_error_lines_from_file')
    @patch.object(DataLoader, '_get_file_contents')
    def test_tab_error(self, mock_def, mock_get_error_lines):
        mock_def.return_value = (u"""---\nhosts: localhost\nvars:\n  foo: bar\n\tblip: baz""", True)
        mock_get_error_lines.return_value = ('''\tblip: baz''', '''..foo: bar''')
        with self.assertRaises(AnsibleParserError) as cm:
            self._loader.load_from_file('dummy_yaml_text.txt')
        self.assertIn(yaml_strings.YAML_COMMON_LEADING_TAB_ERROR, str(cm.exception))
        self.assertIn('foo: bar', str(cm.exception))
コード例 #13
0
ファイル: ndmtk.py プロジェクト: sebashmh/ndmtk
 def _load_auth_secrets(self, hosts=[], secrets=[]):
     for _safe, _lockpick in secrets:
         try:
             _safe_loader = DataLoader()
             _safe_lockpick = None
             try:
                 _safe_lockpick = CLI.read_vault_password_file(
                     _lockpick, loader=_safe_loader)
                 _safe_loader.set_vault_password(_safe_lockpick)
                 _safe_contents = _safe_loader.load_from_file(_safe)
             except:
                 _safe_lockpick = CLI.setup_vault_secrets(
                     _safe_loader, [_lockpick])
                 _safe_contents = _safe_loader.load_from_file(_safe)
             if 'credentials' not in _safe_contents:
                 return dict()
             #display.display(pprint.pformat(_safe_contents, indent=4), color='green');
             return _safe_contents['credentials']
         except Exception as e:
             display.display('[ERROR] ' + str(e), color='red')
     return dict()
コード例 #14
0
class TestDataLoaderWithVault(unittest.TestCase):
    def setUp(self):
        self._loader = DataLoader()
        vault_secrets = [('default', TextVaultSecret('ansible'))]
        self._loader.set_vault_secrets(vault_secrets)
        self.test_vault_data_path = os.path.join(os.path.dirname(__file__),
                                                 'fixtures', 'vault.yml')

    def tearDown(self):
        pass

    def test_get_real_file_vault(self):
        real_file_path = self._loader.get_real_file(self.test_vault_data_path)
        self.assertTrue(os.path.exists(real_file_path))

    def test_get_real_file_vault_no_vault(self):
        self._loader.set_vault_secrets(None)
        self.assertRaises(AnsibleParserError, self._loader.get_real_file,
                          self.test_vault_data_path)

    def test_get_real_file_vault_wrong_password(self):
        wrong_vault = [('default', TextVaultSecret('wrong_password'))]
        self._loader.set_vault_secrets(wrong_vault)
        self.assertRaises(AnsibleVaultError, self._loader.get_real_file,
                          self.test_vault_data_path)

    def test_get_real_file_not_a_path(self):
        self.assertRaisesRegexp(AnsibleParserError, 'Invalid filename',
                                self._loader.get_real_file, None)

    @patch.multiple(DataLoader,
                    path_exists=lambda s, x: True,
                    is_file=lambda s, x: True)
    def test_parse_from_vault_1_1_file(self):
        vaulted_data = """$ANSIBLE_VAULT;1.1;AES256
33343734386261666161626433386662623039356366656637303939306563376130623138626165
6436333766346533353463636566313332623130383662340a393835656134633665333861393331
37666233346464636263636530626332623035633135363732623332313534306438393366323966
3135306561356164310a343937653834643433343734653137383339323330626437313562306630
3035
"""
        if PY3:
            builtins_name = 'builtins'
        else:
            builtins_name = '__builtin__'

        with patch(builtins_name + '.open',
                   mock_open(read_data=vaulted_data.encode('utf-8'))):
            output = self._loader.load_from_file('dummy_vault.txt')
            self.assertEqual(output, dict(foo='bar'))
コード例 #15
0
ファイル: task_bash.py プロジェクト: jplotnikov/cikit
    def test(self):
        loader = DataLoader()
        tasks = loader.load_from_file(lib.cikit.dirs['self'] + '/' + self.file)

        self.assertEqual(len(tasks), 1)
        self.assertTrue('shell' in tasks[0])
        self.assertTrue('name' in tasks[0])
        self.assertTrue('when' in tasks[0])
        self.assertTrue('args' in tasks[0])

        for test in self.tests:
            for item in ['name', 'shell']:
                self.assertEqual(
                    jinja2.Template(tasks[0][item]).render({'item': test['args']}),
                    test['result'][item]
                )
コード例 #16
0
    def get_host_vars(self, host, vault_password=None):
        """ Get host specific variables. """
        resulting_host_vars = {}
        var_files = []

        for host_var_folder in self.host_vars_folders:
            var_files.extend(vars_files_loading(host_var_folder, host.name))

        _dataloader = DataLoader()
        _dataloader.set_vault_password(vault_password)
        for filename in var_files:
            display.vvvvv("Hostname {}: Loading var file {}".format(
                host.name, filename))
            data = _dataloader.load_from_file(filename)
            if data is not None:
                resulting_host_vars = merge_hash(resulting_host_vars, data)
        return resulting_host_vars
コード例 #17
0
def merge_files(files2include):
    loader = DataLoader()
    filenames = build_files_list(loader, files2include)
    results = dict()

    for filename in filenames:
        raw_hash = loader.load_from_file(filename)
        results = merge_hash(results, raw_hash)

    tempfd, merged_hash_file = tempfile.mkstemp(suffix='.yml')
    yaml.dump(results, open(merged_hash_file, 'w'), Dumper=AnsibleDumper, default_flow_style=False)
    if os.environ.get('SUDO_UID'):
        os.chown(merged_hash_file, int(os.environ.get('SUDO_UID')), int(os.environ.get('SUDO_GID')))

    results.update(merged_hash_file = merged_hash_file)

    return results
コード例 #18
0
    def get_group_vars(self, group, vault_password=None):
        """ Get group specific variables. """

        resulting_group_vars = {}
        var_files = []

        for grp_var_folder in self.grp_vars_folders:
            var_files.extend(vars_files_loading(grp_var_folder, group.name))

        _dataloader = DataLoader()
        _dataloader.set_vault_password(vault_password)
        for filename in var_files:
            display.vvvvv("Group {}: Loading var file {}".format(
                group.name, filename))
            data = _dataloader.load_from_file(filename)
            if data is not None:
                resulting_group_vars = merge_hash(resulting_group_vars, data)
        return resulting_group_vars
コード例 #19
0
    def _initialize_ssh_agent(self, password):
        """Initializes the SSH agent and loads the required private keys.
           This to prevent private key material being stored on disk."""

        dl = DataLoader()
        dl.set_vault_secrets(self._initialize_vault_secrets(self, password))

        # databaaaaaaase
        # maybe make distinction between multiple secrets depending on deploy
        ds = dl.load_from_file('/etc/ansible/key.ssh')
        key = self._return_valid_key(self, bytes(ds, encoding='utf-8'))

        ssh_add_cmd = "ssh-add -"

        ret = Popen((ssh_add_cmd.split()), stdin=PIPE)
        ret.communicate(key)

        if ret.returncode:
            raise OSError('--- something went wrong while loading the key')
コード例 #20
0
ファイル: ansible_wrapper.py プロジェクト: bmwant/cluc
def main():
    display = Display(verbosity=settings.DEFAULT_ANSIBLE_VERBOSITY)
    results_callback = CallbackModule()
    results_callback._display = display
    loader = DataLoader()
    loader.set_basedir(basedir_2)

    # create inventory, use path to host config file as source
    # or hosts in a comma separated string
    inventory = InventoryManager(loader=loader, sources='192.168.245.9,')

    # variable manager takes care of merging all the different sources
    # to give you a unifed view of variables available in each context
    variable_manager = VariableManager(loader=loader, inventory=inventory)
    variable_manager.extra_vars = extra_vars_2
    play_source = loader.load_from_file(play_source_2)[0]
    # play_source['ansible_ssh_user'] = "******"
    # play_source['ansible_ssh_private_key_file'] = "/home/user/.ssh/id_rsa"

    # Create play object, playbook objects use .load instead of init or new methods,
    # this will also automatically create the task objects from the info provided in play_source
    play = Play().load(play_source, variable_manager=variable_manager, loader=loader)

    tqm = None
    try:
        tqm = TaskQueueManager(
            inventory=inventory,
            variable_manager=variable_manager,
            loader=loader,
            options=options,
            passwords={},
            stdout_callback=results_callback,
        )
        result = tqm.run(play)
        print(result)
    finally:
        if tqm is not None:
            tqm.cleanup()

        # Remove ansible tmpdir
        shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
コード例 #21
0
    def run(self, data_dir='data', conf_file='echelon.yml'):
        from ansible.parsing.dataloader import DataLoader

        loader = DataLoader()
        ds = loader.load_from_file(conf_file)
        conf_data = self.template_loader(ds)

        hierarchies = {}
        if not 'hierarchy' in conf_data:
            return hierarchies

        # Load the backends
        backends = {}
        if not 'backends' in conf_data:
            raise AnsibleError("No 'backends' found in echeclon config file")

        backend_plugins = []
        for backend in conf_data['backends']:
            for k in backend:
                try:
                    backend_plugins.append(self.backend_loader(k, backend[k]))
                except Exception as e:
                    raise AnsibleError(
                        "Failed to load backend plugin (%s): %s" % (k, str(e)))

        for hierarchy in conf_data['hierarchy']:
            for k in hierarchy:
                data = {}
                for path in hierarchy[k]:
                    for plugin in backend_plugins:
                        full_path = "%s/%s" % (k, path)
                        data_new = self.template_loader(plugin.main(full_path))
                        if data_new == {}:
                            continue
                        else:
                            data = self.merge_dicts(data_new, data)
                            break
                    hierarchies[k] = data

        return hierarchies
コード例 #22
0
ファイル: echelon.py プロジェクト: tlipatov/ansible-oss
    def run(self, data_dir="data", conf_file="echelon.yml"):
        from ansible.parsing.dataloader import DataLoader

        loader = DataLoader()
        ds = loader.load_from_file(conf_file)
        conf_data = self.template_loader(ds)

        hierarchies = {}
        if not "hierarchy" in conf_data:
            return hierarchies

        # Load the backends
        backends = {}
        if not "backends" in conf_data:
            raise AnsibleError("No 'backends' found in echeclon config file")

        backend_plugins = []
        for backend in conf_data["backends"]:
            for k in backend:
                try:
                    backend_plugins.append(self.backend_loader(k, backend[k]))
                except Exception as e:
                    raise AnsibleError("Failed to load backend plugin (%s): %s" % (k, str(e)))

        for hierarchy in conf_data["hierarchy"]:
            for k in hierarchy:
                data = {}
                for path in hierarchy[k]:
                    for plugin in backend_plugins:
                        full_path = "%s/%s" % (k, path)
                        data_new = self.template_loader(plugin.main(full_path))
                        if data_new == {}:
                            continue
                        else:
                            data = self.merge_dicts(data_new, data)
                            break
                    hierarchies[k] = data

        return hierarchies
コード例 #23
0
def read_vault_yaml(path: str) -> dict:
    """Read YAML with vault-encrypted values.
    """
    # Read YAML without decrypting.
    raw_clean = open(path).read().replace('!vault', '')
    data = yaml.load(raw_clean, Loader=yaml.FullLoader)

    # Pop out PW file, if provided.
    pw_file = data.get('vault_password_file')
    pw_files = [pw_file] if pw_file else None

    loader = DataLoader()

    vault_secrets = CLI.setup_vault_secrets(
        loader=loader,
        vault_ids=C.DEFAULT_VAULT_IDENTITY_LIST,
        vault_password_files=pw_files,
    )

    loader.set_vault_secrets(vault_secrets)

    # Re-read with decryption.
    return loader.load_from_file(path)
コード例 #24
0
ファイル: pillar.py プロジェクト: geneseven/pypillar
class VarsModule(object):
    
    """
    Loads variables from 'pillar/' directory in inventory base directory or in the same directory
    as the playbook. If inventory base directory and playbook base directory both contain 'pillar/'
    directory, then only 'pillar/' in playbook directory will be used.
    
    You can explicitly specify ANSIBLE_PILLARS_DIRECTORY environment variable. In this case it will 
    take precedence, and 'pillar/' folders in inventory base directory and playbook base directory
    will not be scanned at all.
    """

    def __init__(self, inventory):

        """ constructor """

        self.inventory = inventory
        self.inventory_basedir = inventory.basedir()
        self.loader = DataLoader()
        basedir = inventory.playbook_basedir()
        if basedir is not None: 
            basedir = os.path.abspath(basedir)
        self.playbook_basedir = basedir
        
        
    def get_pillar_path(self):
        
        """
        Returns absolute path to the 'pillar/' folder or None, if it cannot be calculated.
        """
        
        # First try to use ANSIBLE_PILLARS_DIRECTORY environment variable
        # Use this path, if it exists and not empty
        env_ansible_pillar_path = os.environ.get('ANSIBLE_PILLARS_DIRECTORY')
        if env_ansible_pillar_path is not None and env_ansible_pillar_path != "":
            
            pillar_path = os.path.abspath(env_ansible_pillar_path)
            
            # In case there is no such directory, stop
            if (not os.path.exists(pillar_path) or 
                not os.path.isdir(pillar_path)):
                raise errors.AnsibleError("Profiles directory that is specified by ANSIBLE_PILLARS_DIRECTORY does not exists or not a directory: %s" % env_ansible_pillar_path)
            
            return pillar_path
        
        # Second, try to use 'pillar/' directory in playbook directory.
        # If not found, then use 'pillar/' in inventory directory.
        for basedir in [ self.playbook_basedir, self.inventory_basedir ]:
            if basedir is None:
                continue
            
            pillar_path = os.path.abspath(os.path.join(basedir, "pillar"))
            
            if (not os.path.exists(pillar_path) or
                not os.path.isdir(pillar_path)):
                continue
            
            return pillar_path
            
        # It means that we didn't find path to 'pillar/' directory
        return None
        
        
    def get_config(self):
        
        """
        Returns config dictionary or None, if config cannot be constructed.
        """        
        config = {}
        
        # First, try to use ANSIBLE_PILLAR environment variable
        # Use this variable if it exists
        env_ansible_pillar = os.environ.get('ANSIBLE_PILLAR')
        if env_ansible_pillar is not None:
            config['pillar'] = env_ansible_pillar
            
        # Second, try to use '.pillar' file in playbook directory.
        # If not found, then use '.pillar' in inventory directory.
        else: 
            for basedir in [ self.playbook_basedir, self.inventory_basedir ]:
                
                if basedir is None:
                    continue
                
                config_path = os.path.abspath(os.path.join(basedir, ".pillar"))
                
                # If there is no such file, proceed to the next folder
                if (not os.path.exists(config_path) or
                    not os.path.isfile(config_path)):
                    continue
                
                data = self.loader.load_from_file(config_path)
                if type(data) != dict:
                    raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)

                config = data
        
        return self.sanitize_config(config)

    
    def sanitize_config(self, config):
    
        if 'pillar' not in config or config['pillar'] is None:
            config['pillar'] = ''
            
        # Remove leading '/' symbol
        # We do not support absolute paths for now
        if config['pillar'].startswith('/'):
            config['pillar'] = config['pillar'][1:]
    
        return config
        

    def run(self, host, vault_password):

        """ Main body of the plugin, does actual loading """

        results = {}

        # Load config
        config = self.get_config()
        if config is None:
            return results

        # Calculate pillar path (path to the 'pillar/' directory)
        pillar_path = self.get_pillar_path()
        
        # Prepare absolute pillar path (path to the actual pillar folder
        # in 'pillar/' folder)
        pillar_path = os.path.join(pillar_path, config['pillar']) if config['pillar'] else pillar_path
        if not os.path.exists(pillar_path) or not os.path.isdir(pillar_path):
            raise errors.AnsibleError("There is no such pillar: %s" % pillar_path)            
        
        # Start from specified pillar path
        current_path = os.path.abspath(pillar_path)
        
        # Traverse directories up, until we reach 'pillar_path'
        while True:
            files = [os.path.join(current_path,x) for x in os.listdir(current_path) if os.path.isfile(os.path.join(current_path,x))]
            for vars_path in files:
#                vars_path = os.path.join(current_path, "vars.yml")
            
                if (os.path.exists(vars_path) and 
                    os.path.isfile(vars_path) and
                    os.stat(vars_path).st_size != 0):            
            
                    data = self.loader.load_from_file(vars_path)
#                    if type(data) != dict:
#                        raise errors.AnsibleError("%s must be stored as a dictionary/hash" % vars_path)            
                 
                    results = vars.combine_vars(data, results)
            # if we reached pillar folder, than we traversed all 
            # directories till pillar folder.
            if current_path == pillar_path:
                break;
            
            # select parent directory
            current_path = os.path.abspath(os.path.join(current_path, os.pardir))
 
        # debug
        #print(results)            
        # all done, results is a dictionary of variables
        return results
コード例 #25
0
class SimpleProvider(object):
    __metaclass__ = ABCMeta

    def __init__(self, name, general_type=None, box_extra_type=None):
        self.env = Environment(name, general_type, box_extra_type)
        self.loader = DataLoader()
        self.provisioned = False
        self.tags = {}
        self.extra_vars = {'prudentia_dir': io.prudentia_python_dir()}
        self.load_tags()
        self.active_user = pwd.getpwuid(os.geteuid())[0]

    def boxes(self):
        return self.env.boxes.values()

    def get_box(self, box_name):
        b = self.env.get(box_name)
        if not b:
            print ('The box \'%s\' you entered does not exists.\n\n' \
                  'After typing the command press Tab for box suggestions.\n' % box_name)
            return None
        else:
            return b

    def _show_current_vars(self):
        print('Current set variables:\n%s\n' % '\n'.join(
            [n + ' -> ' + str(v) for n, v in self.extra_vars.iteritems()]))

    def set_var(self, var, value):
        if var in self.extra_vars:
            print ('NOTICE: Variable \'{0}\' is already set to this value: \'{1}\' ' \
                  'and it will be overwritten.'.format(var, self.extra_vars[var]))
        self.extra_vars[var] = value
        if provisioning.VERBOSITY > 0:
            print("Set \'{0}\' -> {1}\n".format(var, value))

    def unset_var(self, var):
        if not var:
            print('Please provide a valid variable name to unset.\n')
            self._show_current_vars()
        elif var not in self.extra_vars:
            print(
                'WARNING: Variable \'{0}\' is NOT present so cannot be unset.\n'
                .format(var))
            self._show_current_vars()
        else:
            self.extra_vars.pop(var, None)
            print("Unset \'{0}\'\n".format(var))

    def set_vault_password(self):
        vault_pwd = io.input_value('Ansible vault password', hidden=True)
        try:
            # Ansible 2.4
            self.loader.set_vault_secrets(vault_pwd)
        except:
            # Ansible 2.3
            self.loader.set_vault_password(vault_pwd)

    def load_vars(self, vars_file):
        if not vars_file:
            vars_file = io.input_path('path of the variables file')
        vars_dict = self.loader.load_from_file(vars_file)
        for key, value in vars_dict.items():
            self.set_var(key, value)

    def add_box(self, box):
        self.env.add(box)
        self.load_tags(box)

    def load_tags(self, box=None):
        for b in [box] if box else self.boxes():
            if not os.path.exists(b.playbook):
                print ('WARNING: Box \'{0}\' points to a NON existing playbook. ' \
                      'Please `reconfigure` or `unregister` the box.\n'.format(b.name))
            else:
                plays = Playbook.load(
                    b.playbook,
                    variable_manager=provisioning.get_variable_manager(
                        self.loader),
                    loader=self.loader).get_plays()
                all_tags = set()
                for p in plays:
                    for block in p.compile():
                        for task in block.block:
                            all_tags.update(task.tags)
                self.tags[b.name] = list(all_tags)

    def remove_box(self, box):
        if box.name in self.tags:
            self.tags.pop(box.name)
        return self.env.remove(box)

    def register(self):
        try:
            box = self.define_box()
            if box:
                self.add_box(box)
                print("\nBox %s added." % box)
        except Exception as ex:
            io.track_error('cannot add box', ex)

    @abstractmethod
    def define_box(self):
        pass

    def reconfigure(self, previous_box):
        try:
            box = self.redefine_box(previous_box)
            if box:
                self.remove_box(previous_box)
                self.add_box(box)
                print("\nBox %s reconfigured." % box)
        except Exception as ex:
            io.track_error('cannot reconfigure box', ex)

    @abstractmethod
    def redefine_box(self, previous_box):
        pass

    def unregister(self, box):
        self.remove_box(box)
        print("\nBox %s removed.\n" % box.name)

    def fetch_box_hosts(self, playbook):
        ds = self.loader.load_from_file(playbook)
        if ds:
            return ds[0][
                'hosts']  # a playbook is an array of plays we take the first one

    def suggest_name(self, hostname):
        if hostname not in self.env.boxes:
            return hostname
        else:
            return hostname + '-' + str(random.randint(0, 100))

    def provision(self, box, tags):
        self.provisioned = provisioning.run_playbook(
            playbook_file=box.playbook,
            inventory_file=provisioning.generate_inventory(box),
            loader=self.loader,
            remote_user=box.get_remote_user(),
            remote_pass=box.get_remote_pwd(),
            transport=box.get_transport(),
            extra_vars=self.extra_vars,
            only_tags=tags)

    @staticmethod
    def verbose(value):
        if value:
            try:
                iv = int(value)
            except ValueError:
                iv = -1
            if 0 <= iv <= 4:
                provisioning.VERBOSITY = iv
            else:
                print(
                    'Verbosity value \'{0}\' not allowed, should be a number between 0 and 4.'
                    .format(value))
        else:
            print('Current verbosity: {0}'.format(provisioning.VERBOSITY))

    def facts(self, box, regex='*'):
        return provisioning.gather_facts(box, regex, self.loader)
コード例 #26
0
ファイル: infoblox.py プロジェクト: awiddersheim/ansible
def main():
    args = parse_args()

    for config_file in CONFIG_FILES:
        if os.path.exists(config_file):
            break
    else:
        sys.stdout.write('unable to locate config file at /etc/ansible/infoblox.yaml\n')
        sys.exit(-1)

    try:
        loader = DataLoader()
        config = loader.load_from_file(config_file)
        provider = config.get('provider') or {}
        wapi = WapiInventory(provider)
    except Exception as exc:
        sys.stdout.write(to_text(exc))
        sys.exit(-1)

    if args.host:
        host_filter = {'name': args.host}
    else:
        host_filter = {}

    config_filters = config.get('filters')

    if config_filters.get('view') is not None:
        host_filter['view'] = config_filters['view']

    if config_filters.get('extattrs'):
        extattrs = normalize_extattrs(config_filters['extattrs'])
    else:
        extattrs = {}

    hostvars = {}
    inventory = {
        '_meta': {
            'hostvars': hostvars
        }
    }

    return_fields = ['name', 'view', 'extattrs', 'ipv4addrs']

    hosts = wapi.get_object('record:host',
                            host_filter,
                            extattrs=extattrs,
                            return_fields=return_fields)

    if hosts:
        for item in hosts:
            view = item['view']
            name = item['name']

            if view not in inventory:
                inventory[view] = {'hosts': []}

            inventory[view]['hosts'].append(name)

            hostvars[name] = {
                'view': view
            }

            if item.get('extattrs'):
                for key, value in iteritems(flatten_extattrs(item['extattrs'])):
                    if key.startswith('ansible_'):
                        hostvars[name][key] = value
                    else:
                        if 'extattrs' not in hostvars[name]:
                            hostvars[name]['extattrs'] = {}
                        hostvars[name]['extattrs'][key] = value

    sys.stdout.write(json.dumps(inventory, indent=4))
    sys.exit(0)
コード例 #27
0
class TestDataLoader(unittest.TestCase):
    def setUp(self):
        self._loader = DataLoader()

    def tearDown(self):
        pass

    @patch('os.path.exists')
    def test__is_role(self, p_exists):
        p_exists.side_effect = lambda p: p == b'test_path/tasks/main.yml'
        self.assertTrue(self._loader._is_role('test_path/tasks'))
        self.assertTrue(self._loader._is_role('test_path/'))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_json_from_file(self, mock_def):
        mock_def.return_value = (b"""{"a": 1, "b": 2, "c": 3}""", True)
        output = self._loader.load_from_file('dummy_json.txt')
        self.assertEqual(output, dict(a=1, b=2, c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_yaml_from_file(self, mock_def):
        mock_def.return_value = (b"""
        a: 1
        b: 2
        c: 3
        """, True)
        output = self._loader.load_from_file('dummy_yaml.txt')
        self.assertEqual(output, dict(a=1, b=2, c=3))

    @patch.object(DataLoader, '_get_file_contents')
    def test_parse_fail_from_file(self, mock_def):
        mock_def.return_value = (b"""
        TEXT:
            ***
               NOT VALID
        """, True)
        self.assertRaises(AnsibleParserError, self._loader.load_from_file,
                          'dummy_yaml_bad.txt')

    @patch('ansible.errors.AnsibleError._get_error_lines_from_file')
    @patch.object(DataLoader, '_get_file_contents')
    def test_tab_error(self, mock_def, mock_get_error_lines):
        mock_def.return_value = (
            u"""---\nhosts: localhost\nvars:\n  foo: bar\n\tblip: baz""", True)
        mock_get_error_lines.return_value = ('''\tblip: baz''',
                                             '''..foo: bar''')
        with self.assertRaises(AnsibleParserError) as cm:
            self._loader.load_from_file('dummy_yaml_text.txt')
        self.assertIn(yaml_strings.YAML_COMMON_LEADING_TAB_ERROR,
                      str(cm.exception))
        self.assertIn('foo: bar', str(cm.exception))

    @patch('ansible.parsing.dataloader.unfrackpath', mock_unfrackpath_noop)
    @patch.object(DataLoader, '_is_role')
    def test_path_dwim_relative(self, mock_is_role):
        """
        simulate a nested dynamic include:

        playbook.yml:
        - hosts: localhost
          roles:
            - { role: 'testrole' }

        testrole/tasks/main.yml:
        - include: "include1.yml"
          static: no

        testrole/tasks/include1.yml:
        - include: include2.yml
          static: no

        testrole/tasks/include2.yml:
        - debug: msg="blah"
        """
        mock_is_role.return_value = False
        with patch('os.path.exists') as mock_os_path_exists:
            mock_os_path_exists.return_value = False
            self._loader.path_dwim_relative('/tmp/roles/testrole/tasks',
                                            'tasks', 'included2.yml')

            # Fetch first args for every call
            # mock_os_path_exists.assert_any_call isn't used because os.path.normpath must be used in order to compare paths
            called_args = [
                os.path.normpath(to_text(call[0][0]))
                for call in mock_os_path_exists.call_args_list
            ]

            # 'path_dwim_relative' docstrings say 'with or without explicitly named dirname subdirs':
            self.assertIn('/tmp/roles/testrole/tasks/included2.yml',
                          called_args)
            self.assertIn('/tmp/roles/testrole/tasks/tasks/included2.yml',
                          called_args)

            # relative directories below are taken in account too:
            self.assertIn('tasks/included2.yml', called_args)
            self.assertIn('included2.yml', called_args)

    def test_path_dwim_root(self):
        self.assertEqual(self._loader.path_dwim('/'), '/')

    def test_path_dwim_home(self):
        self.assertEqual(self._loader.path_dwim('~'), os.path.expanduser('~'))

    def test_path_dwim_tilde_slash(self):
        self.assertEqual(self._loader.path_dwim('~/'), os.path.expanduser('~'))

    def test_get_real_file(self):
        self.assertEqual(self._loader.get_real_file(__file__), __file__)

    def test_is_file(self):
        self.assertTrue(self._loader.is_file(__file__))

    def test_is_directory_positive(self):
        self.assertTrue(self._loader.is_directory(os.path.dirname(__file__)))

    def test_get_file_contents_none_path(self):
        self.assertRaisesRegexp(AnsibleParserError, 'Invalid filename',
                                self._loader._get_file_contents, None)

    def test_get_file_contents_non_existent_path(self):
        self.assertRaises(AnsibleFileNotFound, self._loader._get_file_contents,
                          '/non_existent_file')
コード例 #28
0
class Transport:
    """
    Transport using Ansible.
    """
    def __init__(self):
        """
        Creates an instance of the Transport.
        """
        self.logger = logging.getLogger('transport')
        self.Options = namedtuple('Options', [
            'connection', 'module_path', 'forks', 'remote_user',
            'private_key_file', 'ssh_common_args', 'ssh_extra_args',
            'sftp_extra_args', 'scp_extra_args', 'become', 'become_method',
            'become_user', 'verbosity', 'check'
        ])
        # initialize needed objects
        self.variable_manager = VariableManager()
        self.loader = DataLoader()
        self.passwords = {}

    def _run(self,
             ips,
             key_file,
             play_file,
             expected_results=[0],
             play_vars={}):
        """
        Common code used for each run.

        :param ips: IP address(es) to check.
        :type ips: str or list
        :param key_file: Full path the the file holding the private SSH key.
        :type key_file: string
        :param play_file: Path to the ansible play file.
        :type play_file: str
        :param expected_results: List of expected return codes. Default: [0]
        :type expected_results: list
        :returns: Ansible exit code
        :type: int
        """
        if type(ips) != list:
            ips = [ips]

        ssh_args = ('-o StrictHostKeyChecking=no -o '
                    'ControlMaster=auto -o ControlPersist=60s')
        options = self.Options(connection='ssh',
                               module_path=None,
                               forks=1,
                               remote_user='******',
                               private_key_file=key_file,
                               ssh_common_args=ssh_args,
                               ssh_extra_args=ssh_args,
                               sftp_extra_args=None,
                               scp_extra_args=None,
                               become=None,
                               become_method=None,
                               become_user=None,
                               verbosity=None,
                               check=False)
        # create inventory and pass to var manager
        inventory = Inventory(loader=self.loader,
                              variable_manager=self.variable_manager,
                              host_list=ips)
        self.logger.debug('Options: {0}'.format(options))

        group = Group('commissaire_targets')
        for ip in ips:
            host = Host(ip, 22)
            group.add_host(host)

        inventory.groups.update({'commissaire_targets': group})
        self.logger.debug('Inventory: {0}'.format(inventory))

        self.variable_manager.set_inventory(inventory)

        play_source = self.loader.load_from_file(play_file)[0]
        play = Play().load(play_source,
                           variable_manager=self.variable_manager,
                           loader=self.loader)

        # Add any variables provided into the play
        play.vars.update(play_vars)

        self.logger.debug(
            'Running play for hosts {0}: play={1}, vars={2}'.format(
                ips, play_source, play.vars))

        # actually run it
        for cnt in range(0, 3):
            tqm = None
            try:
                tqm = TaskQueueManager(
                    inventory=inventory,
                    variable_manager=self.variable_manager,
                    loader=self.loader,
                    options=options,
                    passwords=self.passwords,
                    stdout_callback=LogForward(),
                )
                result = tqm.run(play)

                # Deal with unreachable hosts (result == 3) by retrying
                # up to 3 times, sleeping 5 seconds after each attempt.
                if result == 3 and cnt < 2:
                    self.logger.warn(
                        'One or more hosts in {0} is unreachable, '
                        'retrying in 5 seconds...'.format(ips))
                    sleep(5)
                else:
                    break
            finally:
                if tqm is not None:
                    self.logger.debug(
                        'Cleaning up after the TaskQueueManager.')
                    tqm.cleanup()

        if result in expected_results:
            self.logger.debug('{0}: Good result {1}'.format(ip, result))
            fact_cache = self.variable_manager._fact_cache.get(ip, {})
            return (result, fact_cache)

        self.logger.debug('{0}: Bad result {1}'.format(ip, result))
        raise Exception('Can not run for {0}'.format(ip))

    def upgrade(self, ips, key_file, oscmd):
        """
        Upgrades a host via ansible.

        :param ips: IP address(es) to upgrade.
        :type ips: str or list
        :param key_file: Full path the the file holding the private SSH key.
        :param oscmd: OSCmd class to use
        :type oscmd: commissaire.oscmd.OSCmdBase
        :type key_file: str
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        play_file = resource_filename('commissaire',
                                      'data/ansible/playbooks/upgrade.yaml')
        return self._run(
            ips, key_file, play_file, [0],
            {'commissaire_upgrade_command': " ".join(oscmd.upgrade())})

    def restart(self, ips, key_file, oscmd):
        """
        Restarts a host via ansible.

        :param ips: IP address(es) to restart.
        :type ips: str or list
        :param key_file: Full path the the file holding the private SSH key.
        :type key_file: str
        :param oscmd: OSCmd class to use
        :type oscmd: commissaire.oscmd.OSCmdBase
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        play_file = resource_filename('commissaire',
                                      'data/ansible/playbooks/restart.yaml')
        return self._run(
            ips, key_file, play_file, [0, 2],
            {'commissaire_restart_command': " ".join(oscmd.restart())})

    def get_info(self, ip, key_file):
        """
        Get's information from the host via ansible.

        :param ip: IP address to check.
        :type ip: str
        :param key_file: Full path the the file holding the private SSH key.
        :type key_file: str
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        play_file = resource_filename('commissaire',
                                      'data/ansible/playbooks/get_info.yaml')
        result, fact_cache = self._run(ip, key_file, play_file)
        facts = {}
        facts['os'] = fact_cache['ansible_distribution'].lower()
        facts['cpus'] = fact_cache['ansible_processor_cores']
        facts['memory'] = fact_cache['ansible_memory_mb']['real']['total']
        space = 0
        for x in fact_cache['ansible_mounts']:
            space += x['size_total']
        facts['space'] = space

        # Special case for atomic: Since Atomic doesn't advertise itself
        # and instead calls itself 'redhat' or 'centos' or 'fedora', we
        # need to check for 'atomicos' in other ansible_cmdline facts.
        atomic_os_types = {
            'redhat': '/ostree/rhel-atomic-host',
            'centos': '/ostree/centos-atomic-host',
            'fedora': '/ostree/fedora-atomic'
        }
        os_type = facts['os']
        if os_type in atomic_os_types:
            self.logger.debug('Found os of {0}. Checking for special '
                              'atomic case...'.format(os_type))
            boot_image = fact_cache.get('ansible_cmdline',
                                        {}).get('BOOT_IMAGE', '')
            root_mapper = fact_cache.get('ansible_cmdline', {}).get('root', '')
            if (boot_image.startswith(atomic_os_types[os_type])
                    or 'atomicos' in root_mapper):
                facts['os'] = 'atomic'
            self.logger.debug('Facts: {0}'.format(facts))

        return (result, facts)

    def bootstrap(self, ip, key_file, config, oscmd):
        """
        Bootstraps a host via ansible.

        :param ip: IP address to reboot.
        :type ip: str
        :param key_file: Full path the the file holding the private SSH key.
        :type key_file: str
        :param config: Configuration information.
        :type config: commissaire.config.Config
        :param oscmd: OSCmd class to use
        :type oscmd: commissaire.oscmd.OSCmdBase
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        self.logger.debug('Using {0} as the oscmd class for {1}'.format(
            oscmd.os_type, ip))

        play_vars = {
            'commissaire_bootstrap_ip':
            ip,
            'commissaire_kubernetes_api_server_scheme':
            config.kubernetes.get('uri').scheme,
            'commissaire_kubernetes_api_server_host':
            config.kubernetes.get('uri').hostname,
            'commissaire_kubernetes_api_server_port':
            config.kubernetes.get('uri').port,
            'commissaire_kubernetes_bearer_token':
            config.kubernetes.get('token', ''),
            # TODO: Where do we get this?
            'commissaire_docker_registry_host':
            '127.0.0.1',
            # TODO: Where do we get this?
            'commissaire_docker_registry_port':
            8080,
            'commissaire_etcd_scheme':
            config.etcd['uri'].scheme,
            'commissaire_etcd_host':
            config.etcd['uri'].hostname,
            'commissaire_etcd_port':
            config.etcd['uri'].port,
            # TODO: Where do we get this?
            'commissaire_flannel_key':
            '/atomic01/network',
            'commissaire_docker_config_local':
            resource_filename('commissaire', 'data/templates/docker'),
            'commissaire_flanneld_config_local':
            resource_filename('commissaire', 'data/templates/flanneld'),
            'commissaire_kubelet_config_local':
            resource_filename('commissaire', 'data/templates/kubelet'),
            'commissaire_kubernetes_config_local':
            resource_filename('commissaire', 'data/templates/kube_config'),
            'commissaire_kubeconfig_config_local':
            resource_filename('commissaire', 'data/templates/kubeconfig'),
            'commissaire_install_libselinux_python':
            " ".join(oscmd.install_libselinux_python()),
            'commissaire_docker_config':
            oscmd.docker_config,
            'commissaire_flanneld_config':
            oscmd.flanneld_config,
            'commissaire_kubelet_config':
            oscmd.kubelet_config,
            'commissaire_kubernetes_config':
            oscmd.kubernetes_config,
            'commissaire_kubeconfig_config':
            oscmd.kubernetes_kubeconfig,
            'commissaire_install_flannel':
            " ".join(oscmd.install_flannel()),
            'commissaire_install_docker':
            " ".join(oscmd.install_docker()),
            'commissaire_install_kube':
            " ".join(oscmd.install_kube()),
            'commissaire_flannel_service':
            oscmd.flannel_service,
            'commissaire_docker_service':
            oscmd.flannel_service,
            'commissaire_kubelet_service':
            oscmd.kubelet_service,
            'commissaire_kubeproxy_service':
            oscmd.kubelet_proxy_service,
        }

        # Provide the CA if etcd is being used over https
        if (config.etcd['uri'].scheme == 'https'
                and config.etcd.get('certificate_ca_path', None)):
            play_vars['commissaire_etcd_ca_path'] = oscmd.etcd_ca
            play_vars['commissaire_etcd_ca_path_local'] = (
                config.etcd['certificate_ca_path'])

        # Client Certificate additions
        if config.etcd.get('certificate_path', None):
            self.logger.info('Using etcd client certs')
            play_vars['commissaire_etcd_client_cert_path'] = (
                oscmd.etcd_client_cert)
            play_vars['commissaire_etcd_client_cert_path_local'] = (
                config.etcd['certificate_path'])
            play_vars['commissaire_etcd_client_key_path'] = (
                oscmd.etcd_client_key)
            play_vars['commissaire_etcd_client_key_path_local'] = (
                config.etcd['certificate_key_path'])

        if config.kubernetes.get('certificate_path', None):
            self.logger.info('Using kubernetes client certs')
            play_vars['commissaire_kubernetes_client_cert_path'] = (
                oscmd.kube_client_cert)
            play_vars['commissaire_kubernetes_client_cert_path_local'] = (
                config.kubernetes['certificate_path'])
            play_vars['commissaire_kubernetes_client_key_path'] = (
                oscmd.kube_client_key)
            play_vars['commissaire_kubernetes_client_key_path_local'] = (
                config.kubernetes['certificate_key_path'])

        # XXX: Need to enable some package repositories for OS 'rhel'
        #      (or 'redhat').  This is a hack for a single corner case.
        #      We discussed how to generalize future cases where we need
        #      extra commands for a specific OS but decided to defer until
        #      more crop up.
        #
        #      See https://github.com/projectatomic/commissaire/pull/56
        #
        if oscmd.os_type in ('rhel', 'redhat'):
            play_vars['commissaire_enable_pkg_repos'] = (
                'subscription-manager repos '
                '--enable=rhel-7-server-extras-rpms '
                '--enable=rhel-7-server-optional-rpms')
        else:
            play_vars['commissaire_enable_pkg_repos'] = 'true'

        self.logger.debug('Variables for bootstrap: {0}'.format(play_vars))

        play_file = resource_filename('commissaire',
                                      'data/ansible/playbooks/bootstrap.yaml')
        results = self._run(ip, key_file, play_file, [0], play_vars)

        return results
コード例 #29
0
ファイル: utils.py プロジェクト: shivasps/ansible-lint
def parse_yaml_from_file(filepath):
    dl = DataLoader()
    if hasattr(dl, 'set_vault_password'):
        dl.set_vault_password(DEFAULT_VAULT_PASSWORD)
    return dl.load_from_file(filepath)
コード例 #30
0
ファイル: utils.py プロジェクト: willthames/ansible-lint
 def parse_yaml_from_file(filepath):
     dl = DataLoader()
     if hasattr(dl, 'set_vault_password'):
         dl.set_vault_password(DEFAULT_VAULT_PASSWORD)
     return dl.load_from_file(filepath)
コード例 #31
0
class Runner(object):
    
    def __init__(self, pb_file, sources=['inventory/hosts'], **kwargs):
        self.pb_file = pb_file
        self.sources = sources
        Options = namedtuple('Options', ['connection',
                                         'module_path',
                                         'forks',
                                         'become',
                                         'become_method',
                                         'become_user',
                                         'check',
                                         'diff'])
        # initialize needed objects
        self.Options = Options(connection='smart',
                               module_path=None,
                               forks=100,
                               become=True,
                               become_method='sudo',
                               become_user='******',
                               check=False,
                               diff=False)
        self.loader = DataLoader()
        passwords = dict(vault_pass='******')

        # Instantiate our ResultCallback for handling results as they come in
        self.results_callback = ResultCallback()

        # create inventory and pass to var manager
        #self.inventory = InventoryManager(loader=loader, sources=['/home/alvin/git/ansible/ansible-playbook/inventory/hosts'])
        self.inventory = self._gen_inventory()
        self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory)
        self.variable_manager.extra_vars = kwargs

        self.tqm = TaskQueueManager(
            inventory=self.inventory,
            variable_manager=self.variable_manager,
            loader=self.loader,
            options=self.Options,
            passwords=passwords,
            stdout_callback=self.results_callback,
        )

    def _gen_inventory(self):
        if isinstance(self.sources, str):
            assert os.path.isfile(self.sources), "Inventory file ['{}'] not exist".format(self.sources)
            return InventoryManager(loader=self.loader, sources=self.sources)
        elif isinstance(self.sources, list):
            for source in self.sources:
                assert os.path.isfile(source), "One or more inventory file not exist in {}".format(self.sources)
            return InventoryManager(loader=self.loader, sources=self.sources)

    def _load_play_source(self):
        for source in self.loader.load_from_file(self.pb_file):
            self.play_source.update(source)

    def run(self):
        self.play_source = {}
        self._load_play_source()

        self.play = Play().load(self.play_source, variable_manager=self.variable_manager, loader=self.loader)

        try:
            ret = self.tqm.run(self.play)
            return ret
        finally:
            if self.tqm is not None:
                self.tqm.cleanup()
コード例 #32
0
ファイル: kraken_ansible.py プロジェクト: led-spb/kraken
class AnsibleRunner(object):
    """Run ansible playbook and retrieve results"""

    def __init__(self, hosts, playbook, verbosity='info', config={},
                 vars_filename='.variables', vault_password=""):
        required_defaults = (
            'forks',
            'remote_user',
            'private_key_file',
            'become',
            'become_method',
            'become_user'
        )
        for default in required_defaults:
            if default not in config:
                config[default] = getattr(
                    C, 'DEFAULT_{}'.format(default.upper())
                )
        config['connection'] = config.get('connection', 'smart')
        config['ssh_common_args'] = config.get('ssh_common_args', None)
        config['ssh_extra_args'] = config.get('ssh_extra_args', None)
        config['sftp_extra_args'] = config.get('sftp_extra_args', None)
        config['scp_extra_args'] = config.get('scp_extra_args', None)
        config['extra_vars'] = config.get('extra_vars', {})
        config['diff'] = config.get('diff', False)
        config['listhosts'] = config.get('listhosts', False)
        config['listtasks'] = config.get('listtasks', False)
        config['listtags'] = config.get('listtags', False)
        config['syntax'] = config.get('syntax', False)
        config['verbosity'] = VERBOSITY.get(verbosity)
        config['module_path'] = './'
        config['check'] = False

        self.options = options_as_class(config)

        # create default data loader
        self.loader = DataLoader()
        # self.loader.set_vault_password(vault_password)
        variables = {}
        try:
            variables = self.loader.load_from_file(vars_filename)
        except Exception:
            pass

        # loading inventory
        self.inventory = InventoryManager(
            loader=self.loader,
            sources=None
        )
        for group in hosts.keys():
            self.inventory.add_group(group)
            for host in hosts[group]:
                self.inventory.add_host(host=host, group=group)

        # create variable manager
        self.vm = VariableManager(
            loader=self.loader,
            inventory=self.inventory
        )
        self.vm.extra_vars = variables

        # create a playbook executor
        self.pbex = playbook_executor.PlaybookExecutor(
            playbooks=[playbook],
            inventory=self.inventory,
            variable_manager=self.vm,
            loader=self.loader,
            options=self.options,
            passwords={}
        )

        self.result_callback = JsonResultCallback()
        # self.result_callback.set_options(self.options)
        self.pbex._tqm._callback_plugins.append(self.result_callback)
        pass

    def run(self):
        self.pbex.run()
        return {
            'plays': self.result_callback.results,
            'stats': self.result_callback.summary
        }
        pass
コード例 #33
0
                  check=False,
                  diff=False)
passwords = dict(vault_pass='******')

# Instantiate our ResultCallback for handling results as they come in
results_callback = ResultCallback()
results_callback.nodeid = 'my_id_node'
list = get_template()

# create inventory and pass to var manager
inventory = InventoryManager(loader=loader, sources=['my_inventory.txt'])
variable_manager = VariableManager(loader=loader, inventory=inventory)

# create play with tasks

play_source = loader.load_from_file(
    '/mnt/extra-addons/bibind/create_site_ovh.yml')[0]
play = Play().load(play_source,
                   variable_manager=variable_manager,
                   loader=loader)

# actually run it
tqm = None
try:
    tqm = TaskQueueManager(
        inventory=inventory,
        variable_manager=variable_manager,
        loader=loader,
        options=options,
        passwords=passwords,
        stdout_callback=
        results_callback,  # Use our custom callback instead of the ``default`` callback plugin
def _main():
  parser = argparse.ArgumentParser(
    description="template Ansible style YAML files that only contain variables, using Ansible's codebase",
  )
  parser.add_argument(
    "yaml_files_dir",
    help="folder where the YAML files to template are stored",
  )
  parser.add_argument(
    "--output-as-yaml",
    dest="output_as_yaml",
    action="store_true",
    help="Output resulting variables as YAML instead of JSON",
  )
  args = parser.parse_args()

  # Load variables from the YAML files
  yaml_files_dir = os.path.join(args.yaml_files_dir)
  var_files = [
    os.path.join(yaml_files_dir, file_name)
      for file_name in os.listdir(yaml_files_dir)
  ]
  dl = DataLoader()
  vars_to_template = dict()
  for var_file in var_files:
    vars_to_template.update(dl.load_from_file(var_file))

  templar = Templar(loader=dl)
  result_vars = dict()
  # because some variables depend on the value of other variables and we don't
  # want to spend the effort to do a topological sort on them, we adopt the
  # following strategy:
  #
  # 1. maintain a dict of all successfully templated variables in `result_vars`
  # 2. until the `vars_to_template` dict is empty, do the following:
  #
  #    Try templating each variable using `ansible.template.Templar.template`.
  #
  #    If we get a `AnsibleUndefinedVariable` error, this means that the current
  #    variable depends on another variable. We ignore the error and keep the
  #    variable around for a future round of templating.
  #
  #    Otherwise, we have successfully templated the variable and add it to the
  #    `result_vars` variable. We also add the variable name to the
  #    `successfully_templated_vars` list.
  #
  #    At the end of each templating round, remove all variables in
  #    `successfully_templated_vars` from `vars_to_template`.
  #
  #
  # Note that the above algorithm will only work if all variables required for
  # interpolation are present. Otherwise, it will be stuck in an infinite loop.
  while vars_to_template:
    successfully_templated_vars = []
    for var_name, value in vars_to_template.items():
      try:
        templated_value = templar.template(value)
        result_vars[var_name] = templated_value
        successfully_templated_vars.append(var_name)
        templar.set_available_variables(result_vars.copy())
      except AnsibleUndefinedVariable:
        pass
    for var_name in successfully_templated_vars:
      del vars_to_template[var_name]

  if args.output_as_yaml:
    # NOTE: While it seems from printing `result_vars` that the most fundamental
    #       values are strings, they are in fact
    #       `ansible.parsing.yaml.objects.AnsibleUnicode` objects. Hence when
    #       we use `yaml.dump` to serialize `result_vars`, we get some rather
    #       intimidating-looking stuff that may make it seem like we've gotten
    #       an error when in fact we haven't. So do not be too alarmed by the
    #       voluminous output.
    import yaml
    print yaml.dump(result_vars)
  else:
    import json
    print json.dumps(result_vars)
コード例 #35
0
ファイル: ansibleapi.py プロジェクト: cooktheryan/commissaire
class Transport:
    """
    Transport using Ansible.
    """

    def __init__(self, remote_user='******'):
        """
        Creates an instance of the Transport.
        """
        self.logger = logging.getLogger('transport')
        self.Options = namedtuple(
            'Options', ['connection', 'module_path', 'forks', 'remote_user',
                        'private_key_file', 'ssh_common_args',
                        'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args',
                        'become', 'become_method', 'become_user', 'verbosity',
                        'check'])
        # initialize needed objects
        self.variable_manager = VariableManager()
        self.loader = DataLoader()
        self.passwords = {}
        self.remote_user = remote_user

    def _run(self, ips, key_file, play_file,
             expected_results=[0], play_vars={}):
        """
        Common code used for each run.

        :param ips: IP address(es) to check.
        :type ips: str or list
        :param key_file: Full path to the file holding the private SSH key.
        :type key_file: string
        :param play_file: Path to the ansible play file.
        :type play_file: str
        :param expected_results: List of expected return codes. Default: [0]
        :type expected_results: list
        :returns: Ansible exit code
        :type: int
        """
        if type(ips) != list:
            ips = [ips]

        ssh_args = ('-o StrictHostKeyChecking=no -o '
                    'ControlMaster=auto -o ControlPersist=60s')
        become = {
            'become': None,
            'become_user': None,
        }
        if self.remote_user != 'root':
            self.logger.debug('Using user {0} for ssh communication.'.format(
                self.remote_user))
            become['become'] = True
            become['become_user'] = '******'

        options = self.Options(
            connection='ssh', module_path=None, forks=1,
            remote_user=self.remote_user, private_key_file=key_file,
            ssh_common_args=ssh_args, ssh_extra_args=ssh_args,
            sftp_extra_args=None, scp_extra_args=None,
            become=become['become'], become_method='sudo',
            become_user=become['become_user'],
            verbosity=None, check=False)
        # create inventory and pass to var manager
        inventory = Inventory(
            loader=self.loader,
            variable_manager=self.variable_manager,
            host_list=ips)
        self.logger.debug('Options: {0}'.format(options))

        group = Group('commissaire_targets')
        for ip in ips:
            host = Host(ip, 22)
            group.add_host(host)

        inventory.groups.update({'commissaire_targets': group})
        self.logger.debug('Inventory: {0}'.format(inventory))

        self.variable_manager.set_inventory(inventory)

        play_source = self.loader.load_from_file(play_file)[0]
        play = Play().load(
            play_source,
            variable_manager=self.variable_manager,
            loader=self.loader)

        # Add any variables provided into the play
        play.vars.update(play_vars)

        self.logger.debug(
            'Running play for hosts {0}: play={1}, vars={2}'.format(
                ips, play_source, play.vars))

        # actually run it
        for cnt in range(0, 3):
            tqm = None
            try:
                tqm = TaskQueueManager(
                    inventory=inventory,
                    variable_manager=self.variable_manager,
                    loader=self.loader,
                    options=options,
                    passwords=self.passwords,
                    stdout_callback=LogForward(),
                )
                result = tqm.run(play)

                # Deal with unreachable hosts (result == 3) by retrying
                # up to 3 times, sleeping 5 seconds after each attempt.
                if result == 3 and cnt < 2:
                    self.logger.warn(
                        'One or more hosts in {0} is unreachable, '
                        'retrying in 5 seconds...'.format(ips))
                    sleep(5)
                else:
                    break
            finally:
                if tqm is not None:
                    self.logger.debug(
                        'Cleaning up after the TaskQueueManager.')
                    tqm.cleanup()

        if result in expected_results:
            self.logger.debug('{0}: Good result {1}'.format(ip, result))
            fact_cache = self.variable_manager._fact_cache.get(ip, {})
            return (result, fact_cache)

        self.logger.debug('{0}: Bad result {1}'.format(ip, result))
        raise Exception('Can not run for {0}'.format(ip))

    def deploy(self, ips, key_file, oscmd, kwargs):
        """
        Deploys a tree image on a host via ansible.

        :param ips: IP address(es) to upgrade.
        :type ips: str or list
        :param key_file: Full path to the file holding the private SSH key.
        :type key_file: str
        :param oscmd: OSCmd class to use
        :type oscmd: commissaire.oscmd.OSCmdBase
        :param kwargs: keyword arguments for the remote command
        :type kwargs: dict
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        play_file = resource_filename(
            'commissaire', 'data/ansible/playbooks/deploy.yaml')
        deploy_command = " ".join(oscmd.deploy(kwargs['version']))
        return self._run(
            ips, key_file, play_file, [0],
            {'commissaire_deploy_command': deploy_command})

    def upgrade(self, ips, key_file, oscmd, kwargs):
        """
        Upgrades a host via ansible.

        :param ips: IP address(es) to upgrade.
        :type ips: str or list
        :param key_file: Full path to the file holding the private SSH key.
        :type key_file: str
        :param oscmd: OSCmd class to use
        :type oscmd: commissaire.oscmd.OSCmdBase
        :param kwargs: keyword arguments for the remote command
        :type kwargs: dict
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        play_file = resource_filename(
            'commissaire', 'data/ansible/playbooks/upgrade.yaml')
        upgrade_command = " ".join(oscmd.upgrade())
        return self._run(
            ips, key_file, play_file, [0],
            {'commissaire_upgrade_command': upgrade_command})

    def restart(self, ips, key_file, oscmd, kwargs):
        """
        Restarts a host via ansible.

        :param ips: IP address(es) to restart.
        :type ips: str or list
        :param key_file: Full path to the file holding the private SSH key.
        :type key_file: str
        :param oscmd: OSCmd class to use
        :type oscmd: commissaire.oscmd.OSCmdBase
        :param kwargs: keyword arguments for the remote command
        :type kwargs: dict
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        play_file = resource_filename(
            'commissaire', 'data/ansible/playbooks/restart.yaml')
        restart_command = " ".join(oscmd.restart())
        return self._run(
            ips, key_file, play_file, [0, 2],
            {'commissaire_restart_command': restart_command})

    def get_info(self, ip, key_file):
        """
        Get's information from the host via ansible.

        :param ip: IP address to check.
        :type ip: str
        :param key_file: Full path to the file holding the private SSH key.
        :type key_file: str
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        play_file = resource_filename(
            'commissaire', 'data/ansible/playbooks/get_info.yaml')
        result, fact_cache = self._run(ip, key_file, play_file)
        facts = {}
        facts['os'] = fact_cache['ansible_distribution'].lower()
        facts['cpus'] = fact_cache['ansible_processor_cores']
        facts['memory'] = fact_cache['ansible_memory_mb']['real']['total']
        space = 0
        for x in fact_cache['ansible_mounts']:
            space += x['size_total']
        facts['space'] = space

        # Special case for atomic: Since Atomic doesn't advertise itself
        # and instead calls itself 'redhat' or 'centos' or 'fedora', we
        # need to check for 'atomicos' in other ansible_cmdline facts.
        atomic_os_types = {
            'redhat': '/ostree/rhel-atomic-host',
            'centos': '/ostree/centos-atomic-host',
            'fedora': '/ostree/fedora-atomic'
        }
        os_type = facts['os']
        if os_type in atomic_os_types:
            self.logger.debug(
                'Found os of {0}. Checking for special '
                'atomic case...'.format(os_type))
            boot_image = fact_cache.get(
                'ansible_cmdline', {}).get('BOOT_IMAGE', '')
            root_mapper = fact_cache.get('ansible_cmdline', {}).get('root', '')
            if (boot_image.startswith(atomic_os_types[os_type]) or
                    'atomicos' in root_mapper):
                facts['os'] = 'atomic'
            self.logger.debug('Facts: {0}'.format(facts))

        return (result, facts)

    def bootstrap(self, ip, key_file, config, oscmd):
        """
        Bootstraps a host via ansible.

        :param ip: IP address to reboot.
        :type ip: str
        :param key_file: Full path to the file holding the private SSH key.
        :type key_file: str
        :param config: Configuration information.
        :type config: commissaire.config.Config
        :param oscmd: OSCmd class to use
        :type oscmd: commissaire.oscmd.OSCmdBase
        :returns: tuple -- (exitcode(int), facts(dict)).
        """
        self.logger.debug('Using {0} as the oscmd class for {1}'.format(
            oscmd.os_type, ip))

        play_vars = {
            'commissaire_bootstrap_ip': ip,
            'commissaire_kubernetes_api_server_scheme': config.kubernetes.get(
                'uri').scheme,
            'commissaire_kubernetes_api_server_host': config.kubernetes.get(
                'uri').hostname,
            'commissaire_kubernetes_api_server_port': config.kubernetes.get(
                'uri').port,
            'commissaire_kubernetes_bearer_token': config.kubernetes.get(
                'token', ''),
            # TODO: Where do we get this?
            'commissaire_docker_registry_host': '127.0.0.1',
            # TODO: Where do we get this?
            'commissaire_docker_registry_port': 8080,
            'commissaire_etcd_scheme': config.etcd['uri'].scheme,
            'commissaire_etcd_host': config.etcd['uri'].hostname,
            'commissaire_etcd_port': config.etcd['uri'].port,
            # TODO: Where do we get this?
            'commissaire_flannel_key': '/atomic01/network',
            'commissaire_docker_config_local': resource_filename(
                'commissaire', 'data/templates/docker'),
            'commissaire_flanneld_config_local': resource_filename(
                'commissaire', 'data/templates/flanneld'),
            'commissaire_kubelet_config_local': resource_filename(
                'commissaire', 'data/templates/kubelet'),
            'commissaire_kubernetes_config_local': resource_filename(
                'commissaire', 'data/templates/kube_config'),
            'commissaire_kubeconfig_config_local': resource_filename(
                'commissaire', 'data/templates/kubeconfig'),
            'commissaire_install_libselinux_python': " ".join(
                oscmd.install_libselinux_python()),
            'commissaire_docker_config': oscmd.docker_config,
            'commissaire_flanneld_config': oscmd.flanneld_config,
            'commissaire_kubelet_config': oscmd.kubelet_config,
            'commissaire_kubernetes_config': oscmd.kubernetes_config,
            'commissaire_kubeconfig_config': oscmd.kubernetes_kubeconfig,
            'commissaire_install_flannel': " ".join(oscmd.install_flannel()),
            'commissaire_install_docker': " ".join(oscmd.install_docker()),
            'commissaire_install_kube': " ".join(oscmd.install_kube()),
            'commissaire_flannel_service': oscmd.flannel_service,
            'commissaire_docker_service': oscmd.flannel_service,
            'commissaire_kubelet_service': oscmd.kubelet_service,
            'commissaire_kubeproxy_service': oscmd.kubelet_proxy_service,
        }

        # Provide the CA if etcd is being used over https
        if (
                config.etcd['uri'].scheme == 'https' and
                config.etcd.get('certificate_ca_path', None)):
            play_vars['commissaire_etcd_ca_path'] = oscmd.etcd_ca
            play_vars['commissaire_etcd_ca_path_local'] = (
                config.etcd['certificate_ca_path'])

        # Client Certificate additions
        if config.etcd.get('certificate_path', None):
            self.logger.info('Using etcd client certs')
            play_vars['commissaire_etcd_client_cert_path'] = (
                oscmd.etcd_client_cert)
            play_vars['commissaire_etcd_client_cert_path_local'] = (
                config.etcd['certificate_path'])
            play_vars['commissaire_etcd_client_key_path'] = (
                oscmd.etcd_client_key)
            play_vars['commissaire_etcd_client_key_path_local'] = (
                config.etcd['certificate_key_path'])

        if config.kubernetes.get('certificate_path', None):
            self.logger.info('Using kubernetes client certs')
            play_vars['commissaire_kubernetes_client_cert_path'] = (
                oscmd.kube_client_cert)
            play_vars['commissaire_kubernetes_client_cert_path_local'] = (
                config.kubernetes['certificate_path'])
            play_vars['commissaire_kubernetes_client_key_path'] = (
                oscmd.kube_client_key)
            play_vars['commissaire_kubernetes_client_key_path_local'] = (
                config.kubernetes['certificate_key_path'])

        # XXX: Need to enable some package repositories for OS 'rhel'
        #      (or 'redhat').  This is a hack for a single corner case.
        #      We discussed how to generalize future cases where we need
        #      extra commands for a specific OS but decided to defer until
        #      more crop up.
        #
        #      See https://github.com/projectatomic/commissaire/pull/56
        #
        if oscmd.os_type in ('rhel', 'redhat'):
            play_vars['commissaire_enable_pkg_repos'] = (
                'subscription-manager repos '
                '--enable=rhel-7-server-extras-rpms '
                '--enable=rhel-7-server-optional-rpms')
        else:
            play_vars['commissaire_enable_pkg_repos'] = 'true'

        self.logger.debug('Variables for bootstrap: {0}'.format(play_vars))

        play_file = resource_filename(
            'commissaire', 'data/ansible/playbooks/bootstrap.yaml')
        results = self._run(ip, key_file, play_file, [0], play_vars)

        return results
コード例 #36
0
 def parse_yaml_from_file(filepath):
     dl = DataLoader()
     return dl.load_from_file(filepath)
コード例 #37
0
            if option not in args.extra or not isinstance(args.extra[option], basestring) or len(args.extra[option]) < 2:
                functions.error(
                    (
                        'The "--%s" option is required for the "%s" command and '
                        'currently missing or has a value less than 2 symbols.'
                    )
                    %
                    (
                        option,
                        args.playbook
                    ),
                    errno.EPERM
                )

    if yaml_data_loader.is_file(project_config_paths['environment']):
        for key, value in yaml_data_loader.load_from_file(project_config_paths['environment']).iteritems():
            # Add the value from environment config only if it's not specified as
            # an option to the command.
            if key not in args.extra:
                args.extra[key] = value

    if 'ANSIBLE_INVENTORY' in os.environ:
        PARAMS.append("-i '%s'" % os.environ['ANSIBLE_INVENTORY'])

    # @todo Improve for Ansible 2.5 - https://github.com/ansible/ansible/pull/30722
    # Remove these lines and adjust docs in favor of "ANSIBLE_RUN_TAGS" environment variable.
    if 'CIKIT_TAGS' in os.environ:
        PARAMS.append("-t '%s'" % os.environ['CIKIT_TAGS'])

# Require privileged execution of CIKit upgrades.
if 'self-update' == args.playbook:
コード例 #38
0
ファイル: utils.py プロジェクト: Lowess/ansible-lint
 def parse_yaml_from_file(filepath):
     dl = DataLoader()
     return dl.load_from_file(filepath)
コード例 #39
0
ファイル: test-module.py プロジェクト: mrlesmithjr/ansible-1
def boilerplate_module(modfile, args, interpreters, check, destfile):
    """ simulate what ansible does with new style modules """

    # module_fh = open(modfile)
    # module_data = module_fh.read()
    # module_fh.close()

    # replacer = module_common.ModuleReplacer()
    loader = DataLoader()

    # included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1

    complex_args = {}

    # default selinux fs list is pass in as _ansible_selinux_special_fs arg
    complex_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
    complex_args['_ansible_tmpdir'] = C.DEFAULT_LOCAL_TMP
    complex_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
    complex_args['_ansible_version'] = __version__

    if args.startswith("@"):
        # Argument is a YAML file (JSON is a subset of YAML)
        complex_args = utils_vars.combine_vars(complex_args,
                                               loader.load_from_file(args[1:]))
        args = ''
    elif args.startswith("{"):
        # Argument is a YAML document (not a file)
        complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
        args = ''

    if args:
        parsed_args = parse_kv(args)
        complex_args = utils_vars.combine_vars(complex_args, parsed_args)

    task_vars = interpreters

    if check:
        complex_args['_ansible_check_mode'] = True

    modname = os.path.basename(modfile)
    modname = os.path.splitext(modname)[0]
    (module_data, module_style,
     shebang) = module_common.modify_module(modname,
                                            modfile,
                                            complex_args,
                                            Templar(loader=loader),
                                            task_vars=task_vars)

    if module_style == 'new' and '_ANSIBALLZ_WRAPPER = True' in to_native(
            module_data):
        module_style = 'ansiballz'

    modfile2_path = os.path.expanduser(destfile)
    print("* including generated source, if any, saving to: %s" %
          modfile2_path)
    if module_style not in ('ansiballz', 'old'):
        print("* this may offset any line numbers in tracebacks/debuggers!")
    modfile2 = open(modfile2_path, 'wb')
    modfile2.write(module_data)
    modfile2.close()
    modfile = modfile2_path

    return (modfile2_path, modname, module_style)
コード例 #40
0
ファイル: infoblox.py プロジェクト: roelsieg/network_nios
def main():
    args = parse_args()

    for config_file in CONFIG_FILES:
        if os.path.exists(config_file):
            break
    else:
        sys.stderr.write(
            'unable to locate config file at /etc/ansible/infoblox.yaml\n')
        sys.exit(-1)

    try:
        loader = DataLoader()
        config = loader.load_from_file(config_file)
        provider = config.get('provider') or {}
        wapi = WapiInventory(provider)
    except Exception as exc:
        sys.stderr.write(to_text(exc))
        sys.exit(-1)

    if args.host:
        host_filter = {'name': args.host}
    else:
        host_filter = {}

    config_filters = config.get('filters')

    if config_filters.get('view') is not None:
        host_filter['view'] = config_filters['view']

    if config_filters.get('extattrs'):
        extattrs = normalize_extattrs(config_filters['extattrs'])
    else:
        extattrs = {}

    hostvars = {}
    inventory = {'_meta': {'hostvars': hostvars}}

    return_fields = ['name', 'view', 'extattrs', 'ipv4addrs']

    hosts = wapi.get_object('record:host',
                            host_filter,
                            extattrs=extattrs,
                            return_fields=return_fields)

    if hosts:
        for item in hosts:
            view = item['view']
            name = item['name']

            if view not in inventory:
                inventory[view] = {'hosts': []}

            inventory[view]['hosts'].append(name)

            hostvars[name] = {'view': view}

            if item.get('extattrs'):
                for key, value in iteritems(flatten_extattrs(
                        item['extattrs'])):
                    if key.startswith('ansible_'):
                        hostvars[name][key] = value
                    else:
                        if 'extattrs' not in hostvars[name]:
                            hostvars[name]['extattrs'] = {}
                        hostvars[name]['extattrs'][key] = value

    sys.stdout.write(json.dumps(inventory, indent=4))
    sys.exit(0)
コード例 #41
0
    def exec_ansible_api(self, playbooks_file):
        variable_manager = VariableManager()
        loader = DataLoader()

        ds = loader.load_from_file(playbooks_file)
        Options = namedtuple('Options', [
            'connection', 'module_path', 'forks', 'remote_user',
            'private_key_file', 'ssh_common_args', 'ssh_extra_args',
            'sftp_extra_args', 'scp_extra_args', 'become', 'become_method',
            'become_user', 'verbosity', 'check'
        ])
        options = Options(connection='ssh',
                          module_path=None,
                          forks=100,
                          remote_user='******',
                          private_key_file='id_rsa',
                          ssh_common_args=None,
                          ssh_extra_args=None,
                          sftp_extra_args=None,
                          scp_extra_args=None,
                          become=None,
                          become_method=None,
                          become_user=None,
                          verbosity=None,
                          check=False)

        # create inventory and pass to var manager
        inventory = Inventory(loader=loader,
                              variable_manager=variable_manager,
                              host_list=Global.inventory)
        variable_manager.set_inventory(inventory)

        # Currently we are limiting to one playbook
        play_source = ds[0]
        play = Play().load(play_source,
                           variable_manager=variable_manager,
                           loader=loader)

        tqm = None
        try:
            tqm = TaskQueueManager(inventory=inventory,
                                   variable_manager=variable_manager,
                                   loader=loader,
                                   options=options,
                                   passwords=None,
                                   stdout_callback=Global.display)
            result = tqm.run(play)
            # Exit gdeploy in case of errors and user has explicitly set
            # not to ignore errors
            if result != 0 and Global.ignore_errors != 'yes':
                msg = "Error while executing playbook %s, exiting"\
                      %playbooks_file
                print msg
                Global.logger.error(msg)
                self.cleanup_and_quit(1)
            elif result != 0 and Global.ignore_errors == 'yes':
                msg = "Error while executing playbook %s, ignoring errors..."\
                      %playbooks_file
                Global.logger.error(msg)
                print msg
        except AnsibleError, e:
            print "%s" % e