def test_configure_multiple_ok(collectd_cvmfs, repos, attributes, memory, mounttime, mounttimeout, interval): # TODO: Simplify/fixturize the config init logic config = Mock() config.children = [Mock(key = 'Repo', values = [repo]) for repo in repos] + \ [Mock(key = 'Attribute', values = [attr]) for attr in attributes] + \ [Mock(key = 'Memory', values = [str(memory)]), Mock(key = 'MountTime', values = [str(mounttime)]), Mock(key = 'MountTimeout', values =[str(mounttimeout)]), Mock(key = 'Interval', values=[str(interval)])] with patch('collectd.register_read') as register_mock: probe_instance = collectd_cvmfs.CvmfsProbe() probe_instance.configure(config) collectd_cvmfs.collectd.register_read.assert_called_once args_sent = register_mock.call_args[1] probe_config = args_sent['data'] assert probe_config.repos == repos assert probe_config.attributes == attributes assert probe_config.memory == memory assert probe_config.mounttime == mounttime assert args_sent['name'] == probe_config.config_name assert args_sent['callback'] == probe_instance.read assert args_sent['interval'] == interval
def test_on_snapshot_loaded(self): m_response = Mock() endpoint_on_host = Mock() endpoint_on_host.key = ("/calico/v1/host/hostname/workload/" "orch/wlid/endpoint/epid") endpoint_on_host.value = ENDPOINT_STR bad_endpoint_on_host = Mock() bad_endpoint_on_host.key = ("/calico/v1/host/hostname/workload/" "orch/wlid/endpoint/epid2") bad_endpoint_on_host.value = ENDPOINT_STR[:10] endpoint_not_on_host = Mock() endpoint_not_on_host.key = ("/calico/v1/host/other/workload/" "orch/wlid/endpoint/epid") endpoint_not_on_host.value = ENDPOINT_STR still_ready = Mock() still_ready.key = ("/calico/v1/Ready") still_ready.value = "true" m_response.children = [ endpoint_on_host, bad_endpoint_on_host, endpoint_not_on_host, still_ready, ] with patch.object(self.watcher, "clean_up_endpoint_statuses") as m_clean: self.watcher._on_snapshot_loaded(m_response) # Cleanup should only get the endpoints on our host. m_clean.assert_called_once_with( set([EndpointId("hostname", "orch", "wlid", "epid")]))
def test_read_mounttime_failed(collectd_cvmfs, repos, attributes, memory, mounttime, mounttimeout, interval): # TODO: Simplify/fixturize the config init logic config = Mock() config.children = [ Mock(key='Repo', values=repos), Mock(key='Attribute', values=[]), Mock(key='Memory', values=["false"]), Mock(key='MountTime', values=[str(mounttime)]), Mock(key='MountTimeout', values=[str(mounttimeout)]), Mock(key='Interval', values=[str(interval)]) ] probe_instance = collectd_cvmfs.CvmfsProbe() probe_instance.configure(config) with patch('collectd.register_read') as register_mock: probe_instance.configure(config) probe_config = register_mock.call_args[1]['data'] with patch('collectd_cvmfs.CvmfsProbe.safe_scandir', side_effect=Exception('Catacroc!')): with patch('collectd.Values') as val_mock: probe_instance.read(probe_config) if mounttime: val_mock.return_value.dispatch.assert_any_call( type='mountok', values=[0], interval=probe_config.interval)
def _get_mock_page(path='mock', depth=0, parent=None): mock_page = Mock() mock_page.path = path mock_page.save.return_value = True mock_page.parent = parent mock_page.children = _get_children_mock(depth, parent=mock_page) return mock_page
def load_plugin(self, klass): plugin_dir = Mock() # TODO: neater without tuple (add test to node for this) plugin_dir.children = (self.create_plugin_dir(klass),) plugin_loader = PluginLoader() plugins = plugin_loader.load(plugin_dir) return plugins[0]
def mock_read_no_pools(path): """ EtcdClient mock side effect for read with no IPv4 pools. """ result = Mock(spec=EtcdResult) assert path == IPV4_POOLS_PATH result.children = [] return result
def mock_read_no_bgppeers(path): """ EtcdClient mock side effect for read with no IPv4 BGP Peers """ result = Mock(spec=EtcdResult) assert path == BGP_PEERS_PATH result.children = iter([]) return result
def test_read_ok(collectd_cvmfs, repos, attributes, memory, mounttime, mounttimeout, interval): # TODO: Simplify/fixturize the config init logic config = Mock() config.children = [ Mock(key='Repo', values=repos), Mock(key='Attribute', values=attributes), Mock(key='Memory', values=[str(memory)]), Mock(key='MountTime', values=[str(mounttime)]), Mock(key='MountTimeout', values=[str(mounttimeout)]), Mock(key='Interval', values=[str(interval)]) ] probe_instance = collectd_cvmfs.CvmfsProbe() probe_instance.configure(config) with patch('collectd.register_read') as register_mock: probe_instance.configure(config) probe_config = register_mock.call_args[1]['data'] with patch('collectd_cvmfs.CvmfsProbe.read_mounttime', return_value=MOCK_METRICS_MOUNTTIME): with patch('xattr.getxattr', return_value=str(MOCK_METRICS_XATTR)): with patch('psutil.Process') as psutil_mock: psutil_mock.return_value.get_memory_info.return_value = Mock( rss=MOCK_METRICS_MEM_RSS, vms=MOCK_METRICS_MEM_VMS) with patch('collectd.Values') as val_mock: probe_instance.read(probe_config) collectd_cvmfs.collectd.Values.assert_called_once_with( plugin=collectd_cvmfs.PLUGIN_NAME) if mounttime: val_mock.return_value.dispatch.assert_any_call( type='mounttime', values=[MOCK_METRICS_MOUNTTIME], interval=probe_config.interval) val_mock.return_value.dispatch.assert_any_call( type='mountok', values=[1], interval=probe_config.interval) if memory: psutil_mock.assert_any_call(MOCK_METRICS_XATTR) val_mock.return_value.dispatch.assert_any_call( type='memory', type_instance='rss', values=[MOCK_METRICS_MEM_RSS], interval=probe_config.interval) val_mock.return_value.dispatch.assert_any_call( type='memory', type_instance='vms', values=[MOCK_METRICS_MEM_VMS], interval=probe_config.interval) for attr in attributes: val_mock.return_value.dispatch.assert_any_call( type=attr, values=[MOCK_METRICS_XATTR], interval=probe_config.interval)
def create_plugin_dir(self, name, plugin_class): plugin_dir = Mock() plugin_dir.name = name plugin_dir.children = Mock() plugin_file = self.create_plugin_file(name, plugin_class) plugin_dir.children.find.return_value = plugin_file return plugin_dir
def test_config_callback(self): expected_ip_1 = '192.168.0.0' expected_port_1 = '8080' expected_ip_2 = '192.168.0.1' expected_port_2 = '8081' mock_config_ip_child_1 = _build_mock_config_child( STATUS_HOST, expected_ip_1) mock_config_port_child_1 = _build_mock_config_child( STATUS_PORT, expected_port_1) mock_config_ip_child_2 = _build_mock_config_child( STATUS_HOST, expected_ip_2) mock_config_port_child_2 = _build_mock_config_child( STATUS_PORT, expected_port_2) mock_config_1 = Mock() mock_config_1.children = [ mock_config_ip_child_1, mock_config_port_child_1 ] mock_config_2 = Mock() mock_config_2.children = [ mock_config_ip_child_2, mock_config_port_child_2 ] self.plugin_manager.config_callback(mock_config_1) self.plugin_manager.config_callback(mock_config_2) self.assertEquals(2, len(self.plugin_manager.plugins)) plugin_1 = self.plugin_manager.plugins[0] actual_ip_1 = plugin_1.nginx_agent.status_host actual_port_1 = plugin_1.nginx_agent.status_port self.assertEquals(expected_ip_1, actual_ip_1) self.assertEquals(expected_port_1, actual_port_1) plugin_2 = self.plugin_manager.plugins[1] actual_ip_2 = plugin_2.nginx_agent.status_host actual_port_2 = plugin_2.nginx_agent.status_port self.assertEquals(expected_ip_2, actual_ip_2) self.assertEquals(expected_port_2, actual_port_2)
def setup_method(self, method): plugin_dir = Mock() alias_dir = self.create_plugin_dir('alias', Alias) matches_dir = self.create_plugin_dir('matches', Matches) multiplealiases_dir = self.create_plugin_dir('multiplealiases', MultipleAliases) plugin_dir.children = (alias_dir, matches_dir, multiplealiases_dir) plugin_loader = PluginLoader() self.plugins = plugin_loader.load(plugin_dir)
def test_config_callback(self, mock_requests_get): mock_response = Mock() mock_response.status_code = 200 mock_response.json.return_value = [1, 2, 3, 4, 5, 6, 7] mock_requests_get.return_value = mock_response expected_ip_1 = '192.168.0.0' expected_port_1 = '8080' expected_ip_2 = '192.168.0.1' expected_port_2 = '8081' mock_config_ip_child_1 = _build_mock_config_child(STATUS_HOST, expected_ip_1) mock_config_port_child_1 = _build_mock_config_child(STATUS_PORT, expected_port_1) mock_config_ip_child_2 = _build_mock_config_child(STATUS_HOST, expected_ip_2) mock_config_port_child_2 = _build_mock_config_child(STATUS_PORT, expected_port_2) mock_config_1 = Mock() mock_config_1.children = [mock_config_ip_child_1, mock_config_port_child_1] mock_config_2 = Mock() mock_config_2.children = [mock_config_ip_child_2, mock_config_port_child_2] self.plugin_manager.config_callback(mock_config_1) self.plugin_manager.config_callback(mock_config_2) self.assertEquals(2, len(self.plugin_manager.plugins)) plugin_1 = self.plugin_manager.plugins[0] actual_ip_1 = plugin_1.nginx_agent.status_host actual_port_1 = plugin_1.nginx_agent.status_port self.assertEquals(expected_ip_1, actual_ip_1) self.assertEquals(expected_port_1, actual_port_1) plugin_2 = self.plugin_manager.plugins[1] actual_ip_2 = plugin_2.nginx_agent.status_host actual_port_2 = plugin_2.nginx_agent.status_port self.assertEquals(expected_ip_2, actual_ip_2) self.assertEquals(expected_port_2, actual_port_2)
def test_load_plugins(self): class Plugin1: pass class Plugin2: pass plugin_dir = Mock() plugin1_dir = self.create_plugin_dir('plugin1', Plugin1) plugin2_dir = self.create_plugin_dir('plugin2', Plugin2) plugin_dir.children = (plugin1_dir, plugin2_dir) plugin_loader = PluginLoader() assert plugin_loader.load(plugin_dir) == [Plugin1, Plugin2]
def mock_read_2_pools(path): """ EtcdClient mock side effect for read with 2 IPv4 pools. """ result = Mock(spec=EtcdResult) assert path == IPV4_POOLS_PATH children = [] for i, net in enumerate(["192.168.3.0/24", "192.168.5.0/24"]): node = Mock(spec=EtcdResult) node.value = net node.key = IPV4_POOLS_PATH + str(i) children.append(node) result.children = iter(children) return result
def create_plugin_dir(self, plugin_name, plugin_class): plugin_dir = Mock() plugin_dir.name = plugin_name plugin_dir.children = Mock() plugin_file = self.create_plugin_file(plugin_name, plugin_class) def find(name, type): assert type == 'py' if name is plugin_name: return plugin_file plugin_dir.find = find return plugin_dir
def _get_block(self, block_info): # Build spec spec = ['scope_ids', 'runtime', 'has_children', 'children'] for attr in block_info.get('attrs', []): spec.append(attr) # Assemble block block = Mock(spec=spec) scope_ids_mock = Mock() scope_ids_mock.usage_id = block_info.get('usage_id') block.scope_ids = scope_ids_mock block.runtime = self.runtime_mock block.children = [] for attr, val in block_info.get('attrs', {}).items(): setattr(block, attr, val) return block
def create_plugin_dir(self, klass): plugin_name = klass.__name__.lower() plugin_dir = Mock() plugin_dir.name = plugin_name plugin_dir.children = Mock() plugin_file = self.create_plugin_file(plugin_name, klass) def find(name, type): assert type == "py" if name is plugin_name: return plugin_file plugin_dir.find = find return plugin_dir
def mock_read_2_profiles(path, recursive): assert path == ALL_PROFILES_PATH assert recursive nodes = [CALICO_V_PATH + "/policy/profile/TEST", CALICO_V_PATH + "/policy/profile/TEST/tags", CALICO_V_PATH + "/policy/profile/TEST/rules", CALICO_V_PATH + "/policy/profile/UNIT", CALICO_V_PATH + "/policy/profile/UNIT/tags", CALICO_V_PATH + "/policy/profile/UNIT/rules"] children = [] for node in nodes: result = Mock(spec=EtcdResult) result.key = node children.append(result) results = Mock(spec=EtcdResult) results.children = iter(children) return results
def generate_config(input_file=None, mount_points=None, nfs_ops=None): mock_config_default_values = Mock() mock_config_default_values.children = [ ] if input_file: mock_config_default_values.children.append( ConfigOption('MountStatsPath', input_file) ) if mount_points: mock_config_default_values.children.append( ConfigOption('Mountpoints', mount_points) ) if nfs_ops: mock_config_default_values.children.append( ConfigOption('NFSOps', nfs_ops) ) return mock_config_default_values
def test_load_etcd(simple_spec, key): children = [ Mock(key='%smy_string' % key, value='str_val'), Mock(key='%smy_int' % key, value='123'), Mock(key='%smy_long' % key, value='12341234'), Mock(key='%smy_float' % key, value='123.123'), Mock(key='%smy_bool' % key, value='true'), Mock(key='%smy_complex' % key, value='1j'), ] etcd_result = Mock(dir=True) etcd_result.children = children client = Mock(spec=yapconf.etcd_client.Client) client.read = Mock(return_value=etcd_result) simple_spec.add_source('etcd', 'etcd', client=client) config = simple_spec.load_config('etcd') assert config == { 'my_string': 'str_val', 'my_int': 123, 'my_long': 12341234, 'my_float': 123.123, 'my_bool': True, 'my_complex': 1j, }
def test_on_snapshot_loaded(self): m_response = Mock() endpoint_on_host = Mock() endpoint_on_host.key = ("/calico/v1/host/hostname/workload/" "orch/wlid/endpoint/epid") endpoint_on_host.value = ENDPOINT_STR bad_endpoint_on_host = Mock() bad_endpoint_on_host.key = ("/calico/v1/host/hostname/workload/" "orch/wlid/endpoint/epid2") bad_endpoint_on_host.value = ENDPOINT_STR[:10] endpoint_not_on_host = Mock() endpoint_not_on_host.key = ("/calico/v1/host/other/workload/" "orch/wlid/endpoint/epid") endpoint_not_on_host.value = ENDPOINT_STR still_ready = Mock() still_ready.key = ("/calico/v1/Ready") still_ready.value = "true" m_response.children = [ endpoint_on_host, bad_endpoint_on_host, endpoint_not_on_host, still_ready, ] with patch.object(self.watcher, "clean_up_endpoint_statuses") as m_clean: self.watcher._on_snapshot_loaded(m_response) # Cleanup should only get the endpoints on our host. m_clean.assert_called_once_with( set([EndpointId("hostname", "orch", "wlid", "epid")]) )
def mock_read_no_profiles(path, recursive): assert path == ALL_PROFILES_PATH assert recursive results = Mock(spec=EtcdResult) results.children = iter([]) return results
'ereq': '0', 'pxname': 'sample_proxy', 'stot': '39728', 'sid': '0', 'bout': '188112702395', 'qlimit': '', 'status': 'OPEN', 'smax': '2', 'dreq': '0', 'econ': '', 'iid': '2', 'chkfail': '', 'downtime': '', 'qcur': '', 'eresp': '', 'throttle': '', 'scur': '0', 'bck': '', 'qmax': '', 'act': '', 'chkdown': '', 'svname': 'FRONTEND'}] return sample_data sys.modules['collectd'] = MockCollectd() import haproxy ConfigOption = collections.namedtuple('ConfigOption', ('key', 'values')) mock_config_default_values = Mock() mock_config_default_values.children = [ ConfigOption('Testing', ('True',)) ] def test_default_config(): module_config = haproxy.config(mock_config_default_values) assert module_config['socket'] == '/var/run/haproxy.sock' assert not module_config['enhanced_metrics'] assert module_config['proxy_monitors'] == ['server', 'frontend', 'backend'] assert module_config['testing'] assert module_config['excluded_metrics'] == set() mock_config_enhanced_metrics_off = Mock() mock_config_enhanced_metrics_off.children = [ ConfigOption('Socket', ('/var/run/haproxy.sock',)),
def test_metrics_submitted_for_resolvers(): haproxy.submit_metrics = MagicMock() mock_config = Mock() mock_config.children = [ConfigOption('Testing', ('True', ))] haproxy.collect_metrics(haproxy.config(mock_config)) haproxy.submit_metrics.assert_has_calls([ call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'cname_error', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'truncated', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'update', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'refused', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'any_err', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'cname', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'outdated', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'too_big', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'invalid', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'snd_error', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'nx', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'valid', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'timeout', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'other', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns2', 'type_instance': 'sent', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (4, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'cname_error', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'truncated', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'update', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'refused', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'any_err', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'cname', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'outdated', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'too_big', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'invalid', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'snd_error', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'nx', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (4, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'valid', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'timeout', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'other', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (8, ), 'plugin_instance': 'nameserver.dns1', 'type_instance': 'sent', 'type': 'gauge', 'plugin': 'haproxy' }) ])
def test_metrics_submitted_for_backend_and_server_with_correct_names(): haproxy.submit_metrics = MagicMock() mock_config = Mock() mock_config.children = [ ConfigOption('ProxyMonitor', ('backend', )), ConfigOption('EnhancedMetrics', ('True', )), ConfigOption('Testing', ('True', )) ] haproxy.collect_metrics(haproxy.config(mock_config)) haproxy.submit_metrics.assert_has_calls([ call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'rtime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (2, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'smax', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'lastsess', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'check_duration', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (2, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'rate', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'wredis', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'eresp', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'dresp', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'cli_abrt', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'bin', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (344777, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'lbtot', 'type': 'counter', 'plugin': 'haproxy' }), call({ 'values': (344777, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'stot', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'econ', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (18, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'ttime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'downtime', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'qcur', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'wretr', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'qtime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'srv_abrt', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'bout', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'ctime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'scur', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'bck', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'qmax', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (9, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'rate_max', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (1, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'act', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 'type_instance': 'chkfail', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'rtime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (2, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'smax', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'comp_byp', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'lastsess', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (3, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'rate', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'wredis', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'comp_out', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'eresp', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'dresp', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'comp_in', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'dreq', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'cli_abrt', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'bin', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (344777, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'lbtot', 'type': 'counter', 'plugin': 'haproxy' }), call({ 'values': (515751, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'stot', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'econ', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (18, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'ttime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (800, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'slim', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'downtime', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'qcur', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'comp_rsp', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'wretr', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'qtime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'srv_abrt', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'bout', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'ctime', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'scur', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'bck', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'qmax', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (9, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'rate_max', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (1, ), 'plugin_instance': 'backend.elasticsearch_backend', 'type_instance': 'act', 'type': 'gauge', 'plugin': 'haproxy' }) ])
def test_metrics_submitted_for_frontend_with_correct_names(): haproxy.submit_metrics = MagicMock() mock_config = Mock() mock_config.children = [ ConfigOption('ProxyMonitor', ('frontend', )), ConfigOption('EnhancedMetrics', ('True', )), ConfigOption('Testing', ('True', )) ] haproxy.collect_metrics(haproxy.config(mock_config)) haproxy.submit_metrics.assert_has_calls([ call({ 'values': (3, ), 'type_instance': 'connrate', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (5, ), 'type_instance': 'cumreq', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (78, ), 'type_instance': 'idle_pct', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'smax', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'rate', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'req_rate', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'dresp', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'ereq', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'dreq', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'bin', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'stot', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'req_rate_max', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (8000, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'slim', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'rate_lim', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'bout', 'type': 'derive', 'plugin': 'haproxy' }), call({ 'values': (0, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'scur', 'type': 'gauge', 'plugin': 'haproxy' }), call({ 'values': (10, ), 'plugin_instance': 'frontend.sensu_frontend', 'type_instance': 'rate_max', 'type': 'gauge', 'plugin': 'haproxy' }) ])
'qmax': '', 'act': '', 'chkdown': '', 'svname': 'FRONTEND' }] return sample_data sys.modules['collectd'] = MockCollectd() import haproxy ConfigOption = collections.namedtuple('ConfigOption', ('key', 'values')) mock_config_default_values = Mock() mock_config_default_values.children = [ConfigOption('Testing', ('True', ))] def test_default_config(): module_config = haproxy.config(mock_config_default_values) assert module_config['socket'] == '/var/run/haproxy.sock' assert module_config['proxy_monitors'] == ['server', 'frontend', 'backend'] assert module_config['testing'] class MockHAProxySocketComplex(object): def __init__(self, socket_file="whatever"): self.socket_file = socket_file def get_resolvers(self): return {