def test_unequal_delimiter(self): settings1 = Settings({'delimiter': ':'}) settings2 = Settings({'delimiter': '%'}) p1, b1 = self._construct_mocked_params(settings=settings1) p2, b2 = self._construct_mocked_params(settings=settings2) b1.__eq__.return_value = False self.assertNotEqual(p1, p2) b1.__eq__.assert_called_once_with(b2)
def test_merge_none_over_dict_negative(self): settings = Settings({'allow_none_override': False}) p1 = Parameters(dict(key=SIMPLE), settings, '') p2 = Parameters(dict(key=None), settings, '') with self.assertRaises(TypeError): p1.merge(p2) p1.initialise_interpolation()
def test__resolve_fails(self): refitem = RefItem('', Settings({'delimiter': ':'})) context = {'foo': {'bar': 1}} reference = 'foo:baz' self.assertRaises(errors.ResolveError, refitem._resolve, reference, context)
def main(): try: defaults = { 'no_refs': OPT_NO_REFS, 'pretty_print': OPT_PRETTY_PRINT, 'output': OPT_OUTPUT } defaults.update(find_and_read_configfile()) options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, defaults=defaults) storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) reclass = Core(storage, class_mappings, settings) if options.mode == MODE_NODEINFO: data = reclass.nodeinfo(options.nodename) else: data = reclass.inventory() print( output(data, options.output, options.pretty_print, options.no_refs)) except ReclassException as e: e.exit_with_message(sys.stderr) sys.exit(posix.EX_OK)
def ext_pillar(minion_id, pillar, storage_type=OPT_STORAGE_TYPE, inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI, classes_uri=OPT_CLASSES_URI, class_mappings=None, propagate_pillar_data_to_reclass=False, **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) storage = get_storage(storage_type, nodes_uri, classes_uri) input_data = None if propagate_pillar_data_to_reclass: input_data = pillar settings = Settings(kwargs) reclass = Core(storage, class_mappings, settings, input_data=input_data) data = reclass.nodeinfo(minion_id) params = data.get('parameters', {}) params['__reclass__'] = {} params['__reclass__']['nodename'] = minion_id params['__reclass__']['applications'] = data['applications'] params['__reclass__']['classes'] = data['classes'] params['__reclass__']['environment'] = data['environment'] return params
def top(minion_id, storage_type=OPT_STORAGE_TYPE, inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI, classes_uri=OPT_CLASSES_URI, class_mappings=None, **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) storage = get_storage(storage_type, nodes_uri, classes_uri) settings = Settings(kwargs) reclass = Core(storage, class_mappings, settings, input_data=None) # if the minion_id is not None, then return just the applications for the # specific minion, otherwise return the entire top data (which we need for # CLI invocations of the adapter): if minion_id is not None: data = reclass.nodeinfo(minion_id) applications = data.get('applications', []) env = data['environment'] return {env: applications} else: data = reclass.inventory() nodes = {} for (node_id, node_data) in iteritems(data['nodes']): env = node_data['environment'] if env not in nodes: nodes[env] = {} nodes[env][node_id] = node_data['applications'] return nodes
def _core(self, dataset, opts={}): inventory_uri = os.path.dirname(os.path.abspath(__file__)) + '/data/' + dataset path_mangler = get_path_mangler('yaml_fs') nodes_uri, classes_uri = path_mangler(inventory_uri, 'nodes', 'classes') settings = Settings(opts) storage = get_storage('yaml_fs', nodes_uri, classes_uri, settings.compose_node_name) return Core(storage, None, settings)
def test_merge_none_over_dict_allow(self): settings = Settings({'allow_none_override': True}) p1 = Parameters(dict(key=SIMPLE), settings, '') p2 = Parameters(dict(key=None), settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['key'], None)
def test_merge_none_over_list(self): l = ['foo', 1, 2] settings = Settings({'allow_none_override': True}) p1 = Parameters(dict(key=l[:2]), settings, '') p2 = Parameters(dict(key=None), settings, '') p1.merge(p2) p1.initialise_interpolation() self.assertEqual(p1.as_dict()['key'], None)
def test_merge_list_into_scalar_allow(self): settings = Settings({'allow_list_over_scalar': True}) l = ['foo', 1, 2] p1 = Parameters(dict(key=l[0]), settings, '') p2 = Parameters(dict(key=l[1:]), settings, '') p1.merge(p2) p1.interpolate() self.assertListEqual(p1.as_dict()['key'], l)
def test_merge_scalar_over_dict(self): settings = Settings({'allow_scalar_over_dict': True}) p = Parameters(dict(base=SIMPLE), settings, '') mergee = {'base': 'foo'} p2 = Parameters(mergee, settings, '') p.merge(p2) p.initialise_interpolation() self.assertDictEqual(p.as_dict(), mergee)
def test_merge_scalar_over_dict_allow(self): d = {'one': 1, 'two': 2} settings = Settings({'allow_scalar_over_dict': True}) p1 = Parameters({'a': d}, settings, '') p2 = Parameters({'a': 1}, settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), {'a': 1})
def test_merge_scalar_over_list_allow(self): l = ['foo', 1, 2] settings = Settings({'allow_scalar_over_list': True}) p1 = Parameters(dict(key=l[:2]), settings, '') p2 = Parameters(dict(key=l[2]), settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['key'], l[2])
def test_classes_caching(self): p = MemcacheProxy(self._storage, cache_classes=True) NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_class.return_value = RET self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) expected = [mock.call(NAME, None, SETTINGS), mock.call(NAME2, None, SETTINGS)] # called once each self.assertListEqual(self._storage.get_class.call_args_list, expected)
def test_constant_parameter(self): settings = Settings({'strict_constant_parameters': False}) p1 = Parameters({'one': {'a': 1}}, settings, 'first') p2 = Parameters({'one': {'=a': 2}}, settings, 'second') p3 = Parameters({'one': {'a': 3}}, settings, 'third') r = {'one': {'a': 2}} p1.merge(p2) p1.merge(p3) p1.interpolate() self.assertEqual(p1.as_dict(), r)
def test_merge_none_over_list(self): l = ['foo', 1, 2] settings = Settings({'allow_none_override': False}) p1 = Parameters(dict(key=l[:2]), settings, '') p2 = Parameters(dict(key=None), settings, '') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() self.assertEqual( e.exception.message, "-> \n Cannot merge scalar over list, at key, in ; ")
def test_no_nodes_caching(self): p = MemcacheProxy(self._storage, cache_nodes=False) NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_node.return_value = RET self.assertEqual(p.get_node(NAME, SETTINGS), RET) self.assertEqual(p.get_node(NAME, SETTINGS), RET) self.assertEqual(p.get_node(NAME2, SETTINGS), RET) self.assertEqual(p.get_node(NAME2, SETTINGS), RET) expected = [mock.call(NAME, SETTINGS), mock.call(NAME, SETTINGS), mock.call(NAME2, SETTINGS), mock.call(NAME2, SETTINGS)] self.assertListEqual(self._storage.get_node.call_args_list, expected)
def inventory(self, resource=None): ''' Get inventory nodes from reclass and their associated services and roles. ''' storage = get_storage(self.metadata['storage_type'], self.metadata['node_dir'], self.metadata['class_dir']) settings = Settings({ 'no_refs': False, 'pretty_print': True, 'output': 'yaml' }) reclass = Core(storage, None, settings) if resource is None: return reclass.inventory()["nodes"] else: return reclass.inventory()["nodes"][resource]
def test__resolve_ok(self): reference = RefItem('', Settings({'delimiter': ':'})) result = reference._resolve('foo:bar', {'foo': {'bar': 1}}) self.assertEquals(result, 1)
from reclass import errors from reclass.settings import Settings from reclass.values.value import Value from reclass.values.compitem import CompItem from reclass.values.scaitem import ScaItem from reclass.values.valuelist import ValueList from reclass.values.listitem import ListItem from reclass.values.dictitem import DictItem from reclass.values.refitem import RefItem import unittest from mock import MagicMock SETTINGS = Settings() class TestRefItem(unittest.TestCase): def test_assembleRefs_ok(self): phonyitem = MagicMock() phonyitem.render = lambda x, k: 'bar' phonyitem.has_references = True phonyitem.get_references = lambda *x: ['foo'] iwr = RefItem([phonyitem], {}) self.assertEquals(iwr.get_references(), ['foo', 'bar']) self.assertTrue(iwr.allRefs) def test_assembleRefs_failedrefs(self): phonyitem = MagicMock() phonyitem.render.side_effect = errors.ResolveError('foo')
def cli(): try: # this adapter has to be symlinked to ansible_dir, so we can use this # information to initialise the inventory_base_uri to ansible_dir: ansible_dir = os.path.abspath(os.path.dirname(sys.argv[0])) defaults = { 'inventory_base_uri': ansible_dir, 'no_refs': False, 'pretty_print': True, 'output': 'json', 'applications_postfix': '_hosts' } defaults.update(find_and_read_configfile()) def add_ansible_options_group(parser, defaults): group = optparse.OptionGroup(parser, 'Ansible options', 'Ansible-specific options') group.add_option('--applications-postfix', dest='applications_postfix', default=defaults.get('applications_postfix'), help='postfix to append to applications to '\ 'turn them into groups') parser.add_option_group(group) options = get_options( RECLASS_NAME, VERSION, DESCRIPTION, inventory_shortopt='-l', inventory_longopt='--list', inventory_help='output the inventory', nodeinfo_shortopt='-t', nodeinfo_longopt='--host', nodeinfo_dest='hostname', nodeinfo_help='output host_vars for the given host', add_options_cb=add_ansible_options_group, defaults=defaults) storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) reclass = Core(storage, class_mappings, settings) if options.mode == MODE_NODEINFO: data = reclass.nodeinfo(options.hostname) # Massage and shift the data like Ansible wants it data['parameters']['__reclass__'] = data['__reclass__'] for i in ('classes', 'applications'): data['parameters']['__reclass__'][i] = data[i] data = data['parameters'] else: data = reclass.inventory() # Ansible inventory is only the list of groups. Groups are the set # of classes plus the set of applications with the postfix added: groups = data['classes'] apps = data['applications'] if options.applications_postfix: postfix = options.applications_postfix groups.update([(k + postfix, v) for k, v in apps.iteritems()]) else: groups.update(apps) data = groups print output(data, options.output, options.pretty_print, options.no_refs) except ReclassException, e: e.exit_with_message(sys.stderr)