def test_process_statistics(self, mock_update_client): metadata = self.load_clients_data(metadata=self.load_groups_data()) md = Mock() md.hostname = "client6" metadata.process_statistics(md, None) mock_update_client.assert_called_with(md.hostname, dict(auth="cert")) mock_update_client.reset_mock() md.hostname = "client5" metadata.process_statistics(md, None) self.assertFalse(mock_update_client.called)
def test__write_data_db(self): syncdb(TestProbesDB) Bcfg2.Options.setup.probes_db = True probes = self.get_obj() probes.probedata = self.get_test_probedata() probes.cgroups = self.get_test_cgroups() for cname in ["foo.example.com", "bar.example.com"]: client = Mock() client.hostname = cname probes._write_data_db(client) pdata = ProbesDataModel.objects.filter(hostname=cname).all() self.assertEqual(len(pdata), len(probes.probedata[cname])) for probe in pdata: self.assertEqual(probe.hostname, client.hostname) self.assertIsNotNone(probe.data) if probe.probe == "xml": xdata = lxml.etree.XML(probe.data) self.assertIsNotNone(xdata) self.assertIsNotNone(xdata.find("test")) self.assertEqual(xdata.find("test").get("foo"), "foo") elif probe.probe == "text": pass elif probe.probe == "multiline": self.assertGreater(len(probe.data.splitlines()), 1) elif probe.probe == "empty": self.assertEqual(probe.data, "") elif probe.probe == "yaml": self.assertItemsEqual(test_data, yaml.load(probe.data)) elif probe.probe == "json": self.assertItemsEqual(test_data, json.loads(probe.data)) else: assert False, "Strange probe found in _write_data_db data" pgroups = ProbesGroupsModel.objects.filter(hostname=cname).all() self.assertEqual(len(pgroups), len(probes.cgroups[cname])) # test that old probe data is removed properly cname = 'foo.example.com' del probes.probedata[cname]['text'] probes.cgroups[cname].pop() client = Mock() client.hostname = cname probes._write_data_db(client) pdata = ProbesDataModel.objects.filter(hostname=cname).all() self.assertEqual(len(pdata), len(probes.probedata[cname])) pgroups = ProbesGroupsModel.objects.filter(hostname=cname).all() self.assertEqual(len(pgroups), len(probes.cgroups[cname]))
def test_get_additional_data(self): probes = self.get_probes_object() test_probedata = self.get_test_probedata() probes.probedata = self.get_test_probedata() for cname in test_probedata.keys(): metadata = Mock() metadata.hostname = cname self.assertEqual(test_probedata[cname], probes.get_additional_data(metadata)) # test a non-existent client metadata = Mock() metadata.hostname = "nonexistent" self.assertEqual(probes.get_additional_data(metadata), ClientProbeDataSet())
def test_get_additional_groups(self): probes = self.get_probes_object() test_cgroups = self.get_test_cgroups() probes.cgroups = self.get_test_cgroups() for cname in test_cgroups.keys(): metadata = Mock() metadata.hostname = cname self.assertEqual(test_cgroups[cname], probes.get_additional_groups(metadata)) # test a non-existent client metadata = Mock() metadata.hostname = "nonexistent" self.assertEqual(probes.get_additional_groups(metadata), list())
def virtual(target, builder, svc, resource_helper): bigip = Mock() builder.mock_get_all_bigips(target, return_value=[bigip]) va = Mock() va.load.return_value = va va.name = svc['loadbalancer']['id'] vses = Mock() vses.get_resources.return_value = [vses] bigip_resource_helpers_returns = [va, vses, va] resource_helper.side_effect = bigip_resource_helpers_returns tenant_id = svc['loadbalancer']['tenant_id'] lb_id = svc['loadbalancer']['id'] hostnames = ['foobyyou'] bigip.hostname = hostnames[0] builder.mock__init_bigips(target) vses.destination = "/{}{}/{}".format( target.service_adapter.prefix, tenant_id, va.name) delattr(vses, 'pool') target.purge_orphaned_loadbalancer( tenant_id=tenant_id, loadbalancer_id=lb_id, hostnames=hostnames) builder.check_mocks(target) assert resource_helper.call_count == 3 assert vses.delete.call_count assert va.delete.call_count
def test__load_data_db(self): syncdb(TestProbesDB) Bcfg2.Options.setup.probes_db = True probes = self.get_obj() probes.probedata = self.get_test_probedata() probes.cgroups = self.get_test_cgroups() for cname in probes.probedata.keys(): client = Mock() client.hostname = cname probes._write_data_db(client) probes.probedata = dict() probes.cgroups = dict() probes._load_data_db() self.assertItemsEqual(probes.probedata, self.get_test_probedata()) # the db backend does not store groups at all if a client has # no groups set, so we can't just use assertItemsEqual here, # because loading saved data may _not_ result in the original # data if some clients had no groups set. test_cgroups = self.get_test_cgroups() for cname, groups in test_cgroups.items(): if cname in probes.cgroups: self.assertEqual(groups, probes.cgroups[cname]) else: self.assertEqual(groups, [])
def test_ReceiveData(self, mock_ReceiveDataItem, mock_write_data): # we use a simple (read: bogus) datalist here to make this # easy to test datalist = ["a", "b", "c"] probes = self.get_probes_object() probes.core.metadata_cache_mode = 'off' client = Mock() client.hostname = "foo.example.com" probes.ReceiveData(client, datalist) cgroups = [] cprobedata = ClientProbeDataSet() self.assertItemsEqual(mock_ReceiveDataItem.call_args_list, [call(client, "a", cgroups, cprobedata), call(client, "b", cgroups, cprobedata), call(client, "c", cgroups, cprobedata)]) mock_write_data.assert_called_with(client) self.assertFalse(probes.core.metadata_cache.expire.called) # change the datalist, ensure that the cache is cleared probes.cgroups[client.hostname] = datalist probes.core.metadata_cache_mode = 'aggressive' probes.ReceiveData(client, ['a', 'b', 'd']) mock_write_data.assert_called_with(client) probes.core.metadata_cache.expire.assert_called_with(client.hostname)
def test_save(self, mock_dump, mock_open): core = Mock() ts = self.get_obj(core) queue = Mock() queue.empty = Mock(side_effect=Empty) ts.work_queue = queue mock_open.side_effect = IOError # test that save does _not_ raise an exception even when # everything goes pear-shaped ts._save() queue.empty.assert_any_call() mock_open.assert_called_with(ts.pending_file, 'w') queue.reset_mock() mock_open.reset_mock() queue.data = [] for hostname, xml in self.data: md = Mock() md.hostname = hostname queue.data.append((md, lxml.etree.XML(xml))) queue.empty.side_effect = lambda: len(queue.data) == 0 queue.get_nowait = Mock(side_effect=lambda: queue.data.pop()) mock_open.side_effect = None ts._save() queue.empty.assert_any_call() queue.get_nowait.assert_any_call() mock_open.assert_called_with(ts.pending_file, 'w') mock_open.return_value.close.assert_any_call() # the order of the queue data gets changed, so we have to # verify this call in an ugly way self.assertItemsEqual(mock_dump.call_args[0][0], self.data) self.assertEqual(mock_dump.call_args[0][1], mock_open.return_value)
def test_ReceiveDataItem(self): probes = self.get_probes_object() for cname, cdata in self.get_test_probedata().items(): client = Mock() client.hostname = cname for pname, pdata in cdata.items(): dataitem = lxml.etree.Element("Probe", name=pname) if pname == "text": # add some groups to the plaintext test to test # group parsing data = [pdata] for group in self.get_test_cgroups()[cname]: data.append("group:%s" % group) dataitem.text = "\n".join(data) else: dataitem.text = str(pdata) probes.ReceiveDataItem(client, dataitem) self.assertIn(client.hostname, probes.probedata) self.assertIn(pname, probes.probedata[cname]) self.assertEqual(pdata, probes.probedata[cname][pname]) self.assertIn(client.hostname, probes.cgroups) self.assertEqual(probes.cgroups[cname], self.get_test_cgroups()[cname])
def _get_mock_segment(self, name, datadir, port, hostname, address): m = Mock() m.name = name m.datadir = datadir m.port = port m.hostname = hostname m.address = address return m
def __init__(self): self.SLEEP_TIME = 0 bs = Mock() args = Mock() args.hostname = 'test_hostname' args.platform = 'test_platform' args.status_file = None args.timeout = 100 Worker.__init__(self, bs, args)
def setUp(self): mock_host_rpm_builder = Mock(HostRpmBuilder) mock_host_rpm_builder.host_config_dir = 'host configuration directory' mock_host_rpm_builder.variables_dir = 'variables directory' mock_host_rpm_builder.hostname = 'devweb01' mock_host_rpm_builder.output_file_path = '/path/to/output/file' mock_host_rpm_builder.error_file_path = '/path/to/error/file' self.mock_host_rpm_builder = mock_host_rpm_builder
def error(target, svc, builder, resource_helper, logger, error): bigip = Mock() hostnames = ['foodoozoo'] bigip.hostname = hostnames[0] builder.mock_get_all_bigips(target, return_value=[bigip]) li_id = svc['healthmonitors'][0]['id'] t_id = svc['healthmonitors'][0]['tenant_id'] resource_helper.return_value.load.side_effect = error target.purge_orphaned_health_monitor(t_id, li_id, hostnames)
def test_should_lock_host(self): mock_host = Mock(yadtshell.components.Host) mock_host.hostname = 'foobar42.domain' yadtshell.components.Host.lock( mock_host, message='lock me!', force=False) mock_host.remote_call.assert_called_with( "yadt-host-lock 'lock me!'", 'lock_host', False)
def test_get_additional_data(self): """ Test AWSTags.get_additional_data() """ awstags = self.get_obj() awstags._ec2.get_all_instances = \ Mock(side_effect=get_all_instances) for hostname, expected in tags.items(): metadata = Mock() metadata.hostname = hostname self.assertItemsEqual(awstags.get_additional_data(metadata), expected)
def _get_registered_scheduler_driver_master(self): driver = Mock() framework_id = Mock() framework_id.value = 'framework_id' master_info = Mock() master_info.hostname = 'localhost' master_info.port = 1234 my_scheduler = scheduler.scale_scheduler.ScaleScheduler(None) my_scheduler.registered(driver, framework_id, master_info) return my_scheduler, driver, master_info
def test_should_update_next_artefacts_only(self): mock_host = Mock(yadtshell.components.Host) mock_host.next_artefacts = ['foo/1-2.3', 'bar/1-1.3/2'] mock_host.hostname = 'foobar42.domain' mock_host.remote_call.return_value = 'remote call' mock_host.reboot_required = False yadtshell.components.Host.update(mock_host) mock_host.remote_call.assert_called_with( 'yadt-host-update foo-1-2.3 bar-1-1.3/2', 'foobar42.domain_update')
def main_path(target, builder, svc, resource_helper): bigip = Mock() hostnames = ['foodoozoo'] bigip.hostname = hostnames[0] builder.mock_get_all_bigips(target, return_value=[bigip]) li_id = svc['healthmonitors'][0]['id'] t_id = svc['healthmonitors'][0]['tenant_id'] target.purge_orphaned_health_monitor(t_id, li_id, hostnames) builder.check_mocks(target) assert resource_helper.return_value.load.call_count assert resource_helper.call_count == 4
def _get_registered_scheduler_driver_master(self): driver = Mock() framework_id = Mock() framework_id.value = 'framework_id' master_info = Mock() master_info.hostname = 'localhost' master_info.port = 1234 my_scheduler = scheduler.scale_scheduler.ScaleScheduler(None) my_scheduler.registered(driver,framework_id,master_info) return my_scheduler, driver, master_info
def test_should_update_with_reboot_switch_when_reboot_required(self): mock_host = Mock(yadtshell.components.Host) mock_host.next_artefacts = ['foo/1-2.3', 'bar/1-1.3/2'] mock_host.hostname = 'foobar42.domain' mock_host.remote_call.return_value = 'remote call' mock_host.reboot_required = True mock_host.kwargs = {'reboot_required': True} yadtshell.components.Host.update(mock_host, reboot_required=True) mock_host.remote_call.assert_called_with( 'yadt-host-update -r foo-1-2.3 bar-1-1.3/2', 'foobar42.domain_update')
def test_should_update_with_reboot_switch_when_reboot_required(self): mock_host = Mock(yadtshell.components.Host) mock_host.next_artefacts = ['foo/1-2.3', 'bar/1-1.3/2'] mock_host.hostname = 'foobar42.domain' mock_host.remote_call.return_value = 'remote call' mock_host.reboot_required = True mock_host.ssh_poll_max_seconds = 42 mock_host.kwargs = {'reboot_required': True} yadtshell.components.Host.update(mock_host, reboot_required=True) mock_host.remote_call.assert_called_with( 'yadt-host-update -r foo-1-2.3 bar-1-1.3/2', 'foobar42.domain_update')
def test_ReceiveData(self, mock_ReceiveDataItem, mock_write_data): # we use a simple (read: bogus) datalist here to make this # easy to test datalist = ["a", "b", "c"] probes = self.get_probes_object() client = Mock() client.hostname = "foo.example.com" probes.ReceiveData(client, datalist) self.assertItemsEqual(mock_ReceiveDataItem.call_args_list, [call(client, "a"), call(client, "b"), call(client, "c")]) mock_write_data.assert_called_with(client)
def test_get_additional_groups_caching(self): """ Test AWSTags.get_additional_groups() with caching enabled """ awstags = self.get_obj() awstags._ec2.get_all_instances = \ Mock(side_effect=get_all_instances) for hostname, expected in groups.items(): metadata = Mock() metadata.hostname = hostname actual = awstags.get_additional_groups(metadata) msg = """%s has incorrect groups: actual: %s expected: %s""" % (hostname, actual, expected) self.assertItemsEqual(actual, expected, msg)
def setUp(self): mock_host_rpm_builder = Mock(HostRpmBuilder) mock_host_rpm_builder.hostname = 'berweb01' mock_host_rpm_builder.thread_name = 'thread-0' mock_host_rpm_builder.logger = Mock() mock_host_rpm_builder.work_dir = '/path/to/working/directory' mock_host_rpm_builder.rpm_build_dir = '/path/to/rpm/build/directory' mock_host_rpm_builder._tar_sources.return_value = '/path/to/tarred_sources.tar.gz' mock_process = Mock() mock_process.communicate.return_value = ('stdout', 'stderr') mock_process.returncode = 0 self.mock_host_rpm_builder = mock_host_rpm_builder self.mock_process = mock_process
def test_should_publish_finished_event(self): mock_protocol = Mock(ProcessProtocol) mock_broadcaster = Mock() mock_protocol.broadcaster = mock_broadcaster mock_protocol.hostname = 'hostname' mock_protocol.target = 'dev123' mock_protocol.readable_command = '/usr/bin/python abc' mock_protocol.tracking_id = 'tracking-id' mock_protocol.error_buffer = Mock() ProcessProtocol.publish_finished(mock_protocol) self.assertEquals(call('dev123', '/usr/bin/python abc', 'finished', '(hostname) target[dev123] request finished: "/usr/bin/python abc" succeeded.', tracking_id='tracking-id'), mock_broadcaster.publish_cmd_for_target.call_args) self.assertEqual(METRICS['commands_succeeded.dev123'], 1)
def test_end_client_run(self): trigger = self.get_obj() trigger.async_run = Mock() trigger.entries = {'foo.sh': Mock(), 'bar': Mock()} metadata = Mock() metadata.hostname = "host" metadata.profile = "profile" metadata.groups = ['a', 'b', 'c'] args = ['host', '-p', 'profile', '-g', 'a:b:c'] trigger.end_client_run(metadata) self.assertItemsEqual( [[os.path.join(trigger.data, 'foo.sh')] + args, [os.path.join(trigger.data, 'bar')] + args], [c[0][0] for c in trigger.async_run.call_args_list])
def test_end_client_run(self): trigger = self.get_obj() trigger.async_run = Mock() trigger.entries = {"foo.sh": Mock(), "bar": Mock()} metadata = Mock() metadata.hostname = "host" metadata.profile = "profile" metadata.groups = ["a", "b", "c"] args = ["host", "-p", "profile", "-g", "a:b:c"] trigger.end_client_run(metadata) self.assertItemsEqual( [[os.path.join(trigger.data, "foo.sh")] + args, [os.path.join(trigger.data, "bar")] + args], [c[0][0] for c in trigger.async_run.call_args_list], )
def test_periodic(self): c = self.Consumer() g = Gossip(c) g.on_node_lost = Mock() state = g.state = Mock() worker = Mock() state.workers = {'foo': worker} worker.alive = True worker.hostname = 'foo' g.periodic() worker.alive = False g.periodic() g.on_node_lost.assert_called_with(worker) with self.assertRaises(KeyError): state.workers['foo']
def setUp(self): self.persistence = Mock(spec=[ "get_unresolved_state_tree", "update_current_state", "get_latest_pdus_in_context", "get_current_state_pdu", "get_pdu", ]) self.replication = Mock(spec=["get_pdu"]) hs = Mock(spec=["get_datastore", "get_replication_layer"]) hs.get_datastore.return_value = self.persistence hs.get_replication_layer.return_value = self.replication hs.hostname = "bob.com" self.state = StateHandler(hs)
def setUp(self): self.persistence = Mock(spec=[ "get_unresolved_state_tree", "update_current_state", "get_latest_pdus_in_context", "get_current_state_pdu", "get_pdu", "get_power_level", ]) self.replication = Mock(spec=["get_pdu"]) hs = Mock(spec=["get_datastore", "get_replication_layer"]) hs.get_datastore.return_value = self.persistence hs.get_replication_layer.return_value = self.replication hs.hostname = "bob.com" self.state = StateHandler(hs)
def setUp(self): self.VARIABLES_DIRECTORY = 'variables-directory' self.RPM_REQUIRES_PATH = 'rpm-requires-path' mock_host_rpm_builder = Mock(HostRpmBuilder) mock_host_rpm_builder.thread_name = 'Mock-Thread' mock_host_rpm_builder.hostname = 'devweb01' mock_host_rpm_builder.logger = Mock() mock_host_rpm_builder.revision = '123' mock_host_rpm_builder.host_config_dir = '/foo/bar' mock_host_rpm_builder.variables_dir = self.VARIABLES_DIRECTORY mock_host_rpm_builder.rpm_requires_path = self.RPM_REQUIRES_PATH mock_host_rpm_builder.rpm_provides_path = 'rpm-provides-path' mock_host_rpm_builder.config_viewer_host_dir = 'config_viewer_host_dir' mock_host_rpm_builder._overlay_segment = self._create_mock_overlay_segment_method() self.mock_host_rpm_builder = mock_host_rpm_builder
def setUp(self): self.variables_directory = '/path/to/variables-directory' self.rpm_requires_path = '/path/to/rpm-requires' mock_host_rpm_builder = Mock(HostRpmBuilder) mock_host_rpm_builder.thread_name = 'Mock-Thread' mock_host_rpm_builder.hostname = 'devweb01' mock_host_rpm_builder.logger = Mock() mock_host_rpm_builder.revision = '123' mock_host_rpm_builder.host_config_dir = '/foo/bar' mock_host_rpm_builder.variables_dir = self.variables_directory mock_host_rpm_builder.rpm_requires_path = self.rpm_requires_path mock_host_rpm_builder.rpm_provides_path = 'rpm-provides-path' mock_host_rpm_builder.config_viewer_host_dir = 'config_viewer_host_dir' mock_host_rpm_builder._overlay_segment = self._create_mock_overlay_segment_method( ) self.mock_host_rpm_builder = mock_host_rpm_builder
def setUp(self): self.variables_directory = '/path/to/variables-directory' self.rpm_requires_path = '/path/to/rpm-requires' mock_host_rpm_builder = Mock(HostRpmBuilder) mock_host_rpm_builder.thread_name = 'Mock-Thread' mock_host_rpm_builder.hostname = 'devweb01' mock_host_rpm_builder.logger = Mock() mock_host_rpm_builder.revision = '123' mock_host_rpm_builder.host_config_dir = '/foo/bar' mock_host_rpm_builder.variables_dir = self.variables_directory mock_host_rpm_builder.rpm_requires_path = self.rpm_requires_path mock_host_rpm_builder.rpm_provides_path = 'rpm-provides-path' mock_host_rpm_builder.config_viewer_host_dir = 'config_viewer_host_dir' mock_host_rpm_builder.config_rpm_prefix = "any-config-prefix" mock_host_rpm_builder._overlay_segment = self._create_mock_overlay_segment_method() self.mock_host_rpm_builder = mock_host_rpm_builder
def mock_working_job(monkeypatch): job = Mock() job_returner = Mock(return_value=job) import spalloc.scripts.alloc monkeypatch.setattr(spalloc.scripts.alloc, "Job", job_returner) job.id = 123 job.state = JobState.queued job.wait_for_state_change.side_effect = [JobState.power, JobState.power, JobState.ready] job.width = 8 job.height = 8 job.connections = {(0, 0): "foobar"} job.hostname = "foobar" job.machine_name = "m" return job
def test_check_acl(self, mock_rmi_names_equal): af = self.get_obj() af.Match = Mock() metadata = Mock() mock_rmi_names_equal.side_effect = lambda a, b: a == b def reset(): af.Match.reset_mock() mock_rmi_names_equal.reset_mock() # test default allow af.entries = [] self.assertTrue(af.check_acl(metadata, 'ACL.test')) # test explicit allow and deny reset() af.entries = [ lxml.etree.Element("Allow", method='ACL.test'), lxml.etree.Element("Deny", method='ACL.test2') ] af.Match.return_value = af.entries self.assertTrue(af.check_acl(metadata, 'ACL.test')) af.Match.assert_called_with(metadata) self.assertIn(call('ACL.test', 'ACL.test'), mock_rmi_names_equal.call_args_list) reset() self.assertFalse(af.check_acl(metadata, 'ACL.test2')) af.Match.assert_called_with(metadata) self.assertIn(call('ACL.test2', 'ACL.test2'), mock_rmi_names_equal.call_args_list) # test default deny for non-localhost reset() self.assertFalse(af.check_acl(metadata, 'ACL.test3')) af.Match.assert_called_with(metadata) # test default allow for localhost reset() metadata.hostname = 'localhost' self.assertTrue(af.check_acl(metadata, 'ACL.test3')) af.Match.assert_called_with(metadata)
def test_should_publish_failed_event_with_stderr_from_process(self, mock_log): mock_protocol = Mock(ProcessProtocol) mock_broadcaster = Mock() mock_protocol.broadcaster = mock_broadcaster mock_protocol.hostname = 'hostname' mock_protocol.target = 'dev123' mock_protocol.readable_command = '/usr/bin/python abc' mock_protocol.tracking_id = 'tracking_id' mock_protocol.error_buffer = StringIO( 'Someone has shut down the internet.') ProcessProtocol.publish_failed(mock_protocol, 123) self.assertEquals(call('dev123', '/usr/bin/python abc', 'failed', message='Someone has shut down the internet.', tracking_id='tracking_id'), mock_broadcaster.publish_cmd_for_target.call_args) self.assertEqual(METRICS['commands_failed.dev123'], 1)
def test_should_publish_finished_event(self): mock_protocol = Mock(ProcessProtocol) mock_broadcaster = Mock() mock_protocol.broadcaster = mock_broadcaster mock_protocol.hostname = 'hostname' mock_protocol.target = 'dev123' mock_protocol.readable_command = '/usr/bin/python abc' mock_protocol.tracking_id = 'tracking-id' mock_protocol.error_buffer = Mock() ProcessProtocol.publish_finished(mock_protocol) self.assertEquals( call( 'dev123', '/usr/bin/python abc', 'finished', '(hostname) target[dev123] request finished: "/usr/bin/python abc" succeeded.', tracking_id='tracking-id'), mock_broadcaster.publish_cmd_for_target.call_args) self.assertEqual(METRICS['commands_succeeded.dev123'], 1)
def test_should_publish_failed_event_with_stderr_from_process( self, mock_log): mock_protocol = Mock(ProcessProtocol) mock_broadcaster = Mock() mock_protocol.broadcaster = mock_broadcaster mock_protocol.hostname = 'hostname' mock_protocol.target = 'dev123' mock_protocol.readable_command = '/usr/bin/python abc' mock_protocol.tracking_id = 'tracking_id' mock_protocol.error_buffer = StringIO( 'Someone has shut down the internet.') ProcessProtocol.publish_failed(mock_protocol, 123) self.assertEquals( call('dev123', '/usr/bin/python abc', 'failed', message='Someone has shut down the internet.', tracking_id='tracking_id'), mock_broadcaster.publish_cmd_for_target.call_args) self.assertEqual(METRICS['commands_failed.dev123'], 1)
def test__load_data_db(self): syncdb(TestProbesDB) probes = self.get_probes_object(use_db=True) probes.probedata = self.get_test_probedata() probes.cgroups = self.get_test_cgroups() for cname in probes.probedata.keys(): client = Mock() client.hostname = cname probes._write_data_db(client) probes.probedata = dict() probes.cgroups = dict() probes._load_data_db() self.assertItemsEqual(probes.probedata, self.get_test_probedata()) # the db backend does not store groups at all if a client has # no groups set, so we can't just use assertItemsEqual here, # because loading saved data may _not_ result in the original # data if some clients had no groups set. test_cgroups = self.get_test_cgroups() for cname, groups in test_cgroups.items(): if cname in probes.cgroups: self.assertEqual(groups, probes.cgroups[cname]) else: self.assertEqual(groups, [])
def setUp(self): mock_host_rpm_builder = Mock(HostRpmBuilder) mock_host_rpm_builder.config_viewer_host_dir = 'config-viewer-host-dir' mock_host_rpm_builder.hostname = 'hostname' mock_host_rpm_builder.revision = '1234' self.mock_host_rpm_builder = mock_host_rpm_builder
def Consumer(self, hostname='*****@*****.**', pid=4312): c = Mock() c.hostname = hostname c.pid = pid return c
def ClientMetadata(host, profile, groups, *args): rv = Mock() rv.hostname = host rv.profile = profile rv.groups = groups return rv
def build_metadata(host): rv = Mock() rv.hostname = host rv.profile = host return rv