def test_invalid_and_unsupported_options_forced(self): config = { "SBD_DELAY_START": "yes", "SBD_WATCHDOG_TIMEOUT": "5", "SBD_STARTMODE": "clean", "SBD_WATCHDOG_DEV": "/dev/watchdog", "SBD_UNKNOWN": "", "SBD_OPTS": " ", "SBD_PACEMAKER": "false", } assert_report_item_list_equal( cmd_sbd._validate_sbd_options(config, allow_unknown_opts=True), [ ( Severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "SBD_WATCHDOG_DEV", "option_type": None, "allowed": self.allowed_sbd_options, "allowed_str": self.allowed_sbd_options_str }, None ), ( Severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "SBD_OPTS", "option_type": None, "allowed": self.allowed_sbd_options, "allowed_str": self.allowed_sbd_options_str }, None ), ( Severities.WARNING, report_codes.INVALID_OPTION, { "option_name": "SBD_UNKNOWN", "option_type": None, "allowed": self.allowed_sbd_options, "allowed_str": self.allowed_sbd_options_str }, None ), ( Severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "SBD_PACEMAKER", "option_type": None, "allowed": self.allowed_sbd_options, "allowed_str": self.allowed_sbd_options_str }, None ) ] )
def test_unsupported_options(self): config = { "SBD_DELAY_START": "yes", "SBD_WATCHDOG_TIMEOUT": "5", "SBD_STARTMODE": "clean", "SBD_WATCHDOG_DEV": "/dev/watchdog", "SBD_OPTS": " " } assert_report_item_list_equal( cmd_sbd._validate_sbd_options(config), [ ( Severities.ERROR, report_codes.INVALID_OPTION, { "option_names": ["SBD_WATCHDOG_DEV"], "option_type": None, "allowed": self.allowed_sbd_options, }, None ), ( Severities.ERROR, report_codes.INVALID_OPTION, { "option_names": ["SBD_OPTS"], "option_type": None, "allowed": self.allowed_sbd_options, }, None ) ] )
def test_unknown_options_forced(self): config = { "SBD_DELAY_START": "yes", "SBD_WATCHDOG_TIMEOUT": "5", "SBD_STARTMODE": "clean", "SBD_UNKNOWN": "", "another_unknown_option": "some value" } # just make sure there is no exception raised assert_report_item_list_equal( cmd_sbd._validate_sbd_options(config, allow_unknown_opts=True), [ ( Severities.WARNING, report_codes.INVALID_OPTION, { "option_names": ["SBD_UNKNOWN"], "option_type": None, "allowed": self.allowed_sbd_options, }, None ), ( Severities.WARNING, report_codes.INVALID_OPTION, { "option_names": ["another_unknown_option"], "option_type": None, "allowed": self.allowed_sbd_options, }, None ) ] )
def test_success_used_forced(self, mock_net_stop, mock_status): mock_status.return_value = 'Cluster "a_cluster":\n' lib.qdevice_stop(self.lib_env, "net", proceed_if_used=True) mock_net_stop.assert_called_once_with("mock_runner", "corosync-qnetd") assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.WARNING, report_codes.QDEVICE_USED_BY_CLUSTERS, { "clusters": ["a_cluster"], } ), ( severity.INFO, report_codes.SERVICE_STOP_STARTED, { "service": "quorum device", } ), ( severity.INFO, report_codes.SERVICE_STOP_SUCCESS, { "service": "quorum device", } ) ] )
def test_unknown_options(self): config = { "SBD_DELAY_START": "yes", "SBD_WATCHDOG_TIMEOUT": "5", "SBD_STARTMODE": "clean", "SBD_UNKNOWN": "", "another_unknown_option": "some value" } assert_report_item_list_equal( cmd_sbd._validate_sbd_options(config), [ ( Severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "SBD_UNKNOWN", "option_type": None, "allowed": self.allowed_sbd_options, "allowed_str": self.allowed_sbd_options_str }, report_codes.FORCE_OPTIONS ), ( Severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "another_unknown_option", "option_type": None, "allowed": self.allowed_sbd_options, "allowed_str": self.allowed_sbd_options_str }, report_codes.FORCE_OPTIONS ) ] )
def test_invalid_input(self, mock_pull): mock_pull.return_value = {} assert_raise_library_error( lambda: commands.pull_config(self.mock_env, "node", "name"), ( Severities.ERROR, report_codes.INVALID_RESPONSE_FORMAT, {"node": "node"} ) ) mock_pull.assert_called_once_with( self.mock_com, NodeAddresses("node"), "name" ) self.assertEqual(0, self.mock_env.booth.create_config.call_count) self.assertEqual(0, self.mock_env.booth.set_key_path.call_count) self.assertEqual(0, self.mock_env.booth.create_key.call_count) assert_report_item_list_equal( self.mock_rep.report_item_list, [( Severities.INFO, report_codes.BOOTH_FETCHING_CONFIG_FROM_NODE, { "node": "node", "config": "name" } )] )
def test_success_not_used(self, mock_net_stop, mock_status): mock_status.return_value = "" lib.qdevice_stop(self.lib_env, "net", proceed_if_used=False) mock_net_stop.assert_called_once_with("mock_runner", "corosync-qnetd") assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.INFO, report_codes.SERVICE_STOP_STARTED, { "service": "quorum device", } ), ( severity.INFO, report_codes.SERVICE_STOP_SUCCESS, { "service": "quorum device", } ) ] )
def test_errors_forced(self): nodes = ["node1", "node2"] node_addrs_list = NodeAddressesList([NodeAddresses(addr) for addr in nodes]) def side_effect(node, request, data): if node.ring0 == nodes[1]: raise NodeAuthenticationException(nodes[1], "command", "HTTP error: 401") return "{" # invalid json self.mock_communicator.call_node.side_effect = side_effect lib.check_corosync_offline_on_nodes( self.mock_communicator, self.mock_reporter, node_addrs_list, skip_offline_nodes=True ) assert_report_item_list_equal( self.mock_reporter.report_item_list, [ (severity.INFO, report_codes.COROSYNC_NOT_RUNNING_CHECK_STARTED, {}), (severity.WARNING, report_codes.COROSYNC_NOT_RUNNING_CHECK_NODE_ERROR, {"node": nodes[0]}), ( severity.WARNING, report_codes.NODE_COMMUNICATION_ERROR_NOT_AUTHORIZED, {"node": nodes[1], "command": "command", "reason": "HTTP error: 401"}, ), (severity.WARNING, report_codes.COROSYNC_NOT_RUNNING_CHECK_NODE_ERROR, {"node": nodes[1]}), ], )
def test_report_when_duplication_allowed(self, export_with_set): export_with_set.return_value = "exported_duplicate_element" element = mock.MagicMock() element.tag = "constraint_type" report_processor = MockLibraryReportProcessor() constraint.check_is_without_duplication( report_processor, fixture_constraint_section(["duplicate_element"]), element, are_duplicate=lambda e1, e2: True, export_element=constraint.export_with_set, duplication_alowed=True, ) assert_report_item_list_equal( report_processor.report_item_list, [ ( severities.WARNING, report_codes.DUPLICATE_CONSTRAINTS_EXIST, { 'constraint_info_list': ['exported_duplicate_element'], 'constraint_type': 'constraint_type' }, ) ] )
def test_success(self, mock_remote_start, mock_remote_stop): nodes = ["node1", "node2"] node_addrs_list = NodeAddressesList( [NodeAddresses(addr) for addr in nodes] ) lib.qdevice_reload_on_nodes( self.mock_communicator, self.mock_reporter, node_addrs_list ) node_calls = [ mock.call( self.mock_reporter, self.mock_communicator, node_addrs_list[0] ), mock.call( self.mock_reporter, self.mock_communicator, node_addrs_list[1] ), ] self.assertEqual(len(node_calls), len(mock_remote_stop.mock_calls)) self.assertEqual(len(node_calls), len(mock_remote_start.mock_calls)) mock_remote_stop.assert_has_calls(node_calls, any_order=True) mock_remote_start.assert_has_calls(node_calls, any_order=True) assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.INFO, report_codes.QDEVICE_CLIENT_RELOAD_STARTED, {} ), ] )
def test_success(self): nodes = ["node1", "node2"] node_addrs_list = NodeAddressesList( [NodeAddresses(addr) for addr in nodes] ) self.mock_communicator.call_node.return_value = '{"corosync": false}' lib.check_corosync_offline_on_nodes( self.mock_communicator, self.mock_reporter, node_addrs_list ) assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.INFO, report_codes.COROSYNC_NOT_RUNNING_CHECK_STARTED, {} ), ( severity.INFO, report_codes.COROSYNC_NOT_RUNNING_ON_NODE, {"node": nodes[0]} ), ( severity.INFO, report_codes.COROSYNC_NOT_RUNNING_ON_NODE, {"node": nodes[1]} ), ] )
def test_enable_failed( self, mock_net_setup, mock_net_enable, mock_net_start ): mock_net_enable.side_effect = EnableServiceError( "test service", "test error" ) assert_raise_library_error( lambda: lib.qdevice_setup(self.lib_env, "net", True, True), ( severity.ERROR, report_codes.SERVICE_ENABLE_ERROR, { "service": "test service", "reason": "test error", } ) ) mock_net_setup.assert_called_once_with("mock_runner") mock_net_enable.assert_called_once_with("mock_runner", "corosync-qnetd") mock_net_start.assert_not_called() assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.INFO, report_codes.QDEVICE_INITIALIZATION_SUCCESS, { "model": "net", } ) ] )
def test_failed(self, mock_net_stop): mock_net_stop.side_effect = StopServiceError( "test service", "test error" ) assert_raise_library_error( lambda: lib.qdevice_stop(self.lib_env, "net"), ( severity.ERROR, report_codes.SERVICE_STOP_ERROR, { "service": "test service", "reason": "test error", } ) ) mock_net_stop.assert_called_once_with("mock_runner", "corosync-qnetd") assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.INFO, report_codes.SERVICE_STOP_STARTED, { "service": "quorum device", } ) ] )
def _assert(self): self.assertEqual(1, self.mock_com.call_node.call_count) self.assertEqual(self.node, self.mock_com.call_node.call_args[0][0]) self.assertEqual( "remote/booth_set_config", self.mock_com.call_node.call_args[0][1] ) data = url_decode(self.mock_com.call_node.call_args[0][2]) self.assertTrue("data_json" in data) self.assertEqual( { "config": { "name": "cfg_name.conf", "data": "cfg" } }, json.loads(data["data_json"][0]) ) assert_report_item_list_equal( self.mock_rep.report_item_list, [( Severities.INFO, report_codes.BOOTH_CONFIGS_SAVED_ON_NODE, { "node": self.node.label, "name": "cfg_name", "name_list": ["cfg_name"] } )] )
def test_failures(self): resources = etree.fromstring("<resources />") reporter = MockLibraryReportProcessor() lib.verify(reporter, self.get_cib(), resources, []) report = [ ( severity.ERROR, report_codes.STONITH_RESOURCES_DO_NOT_EXIST, { "stonith_ids": [ "d1", "d2", "d3", "d4", "d5", "dR", "dR-special" ], }, None ), ( severity.ERROR, report_codes.NODE_NOT_FOUND, { "node": "nodeA", }, None ), ( severity.ERROR, report_codes.NODE_NOT_FOUND, { "node": "nodeB", }, None ), ] assert_report_item_list_equal(reporter.report_item_list, report)
def test_stdin(self, mock_popen): expected_output = "expected output" expected_retval = 123 command = ["a_command"] command_str = "a_command" stdin = "stdin string" mock_process = mock.MagicMock(spec_set=["communicate", "returncode"]) mock_process.communicate.return_value = (expected_output, "dummy") mock_process.returncode = expected_retval mock_popen.return_value = mock_process runner = lib.CommandRunner(self.mock_logger, self.mock_reporter) real_output, real_retval = runner.run(command, stdin_string=stdin) self.assertEqual(real_output, expected_output) self.assertEqual(real_retval, expected_retval) mock_process.communicate.assert_called_once_with(stdin) self.assert_popen_called_with( mock_popen, command, {"env": {}, "stdin": -1} ) logger_calls = [ mock.call("""\ Running: {0} --Debug Input Start-- {1} --Debug Input End--""".format(command_str, stdin)), mock.call("""\ Finished running: {0} Return value: {1} --Debug Output Start-- {2} --Debug Output End--""".format(command_str, expected_retval, expected_output)) ] self.assertEqual(self.mock_logger.debug.call_count, len(logger_calls)) self.mock_logger.debug.assert_has_calls(logger_calls) assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.DEBUG, report_codes.RUN_EXTERNAL_PROCESS_STARTED, { "command": command_str, "stdin": stdin, } ), ( severity.DEBUG, report_codes.RUN_EXTERNAL_PROCESS_FINISHED, { "command": command_str, "return_value": expected_retval, "stdout": expected_output, } ) ] )
def test_success_on_valid_options(self): report_processor = MockLibraryReportProcessor() config_structure.validate_ticket_options( report_processor, {"timeout": "10"}, allow_unknown_options=False, ) assert_report_item_list_equal(report_processor.report_item_list, [])
def test_return_empty_report_on_allowed_names(self): assert_report_item_list_equal( validate.names_in( ["a", "b", "c"], ["a", "b"], ), [], )
def test_success(self): reporter = MockLibraryReportProcessor() lib._validate_level(reporter, 1) lib._validate_level(reporter, "1") lib._validate_level(reporter, 9) lib._validate_level(reporter, "9") lib._validate_level(reporter, "05") assert_report_item_list_equal(reporter.report_item_list, [])
def assert_operation_produces_report(self, operation, report_list): assert_report_item_list_equal( operations.validate_operation( operation, operations.OPERATION_OPTIONS_VALIDATORS ), report_list )
def test_empty(self): resources = etree.fromstring("<resources />") topology = etree.fromstring("<fencing-topology />") reporter = MockLibraryReportProcessor() lib.verify(reporter, topology, resources, self.get_status()) assert_report_item_list_equal(reporter.report_item_list, [])
def test_raises_on_invalid_options(self): report_processor = MockLibraryReportProcessor() expected_errors = [ ( severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "site", "option_type": "booth ticket", "allowed": list(config_structure.TICKET_KEYS), }, ), ( severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "port", "option_type": "booth ticket", "allowed": list(config_structure.TICKET_KEYS), }, ), ( severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "timeout", "option_value": " ", "allowed_values": "no-empty", }, ), ( severities.ERROR, report_codes.INVALID_OPTION, { "option_name": "unknown", "option_type": "booth ticket", "allowed": list(config_structure.TICKET_KEYS), }, report_codes.FORCE_OPTIONS ), ] assert_raise_library_error( lambda: config_structure.validate_ticket_options( report_processor, { "site": "a", "port": "b", "timeout": " ", "unknown": "c", }, allow_unknown_options=False, ), *expected_errors ) assert_report_item_list_equal( report_processor.report_item_list, expected_errors )
def test_few_failures(self, mock_sbd_cfg, mock_get_nodes): def ret_val(communicator, node): self.assertEqual(communicator, self.mock_com) if node.label == "node0": return """\ # comment SBD_TEST=true ANOTHER_OPT=1 """ elif node.label == "node1": return """\ invalid value """ elif node.label == "node2": raise NodeConnectionException(node.label, "command", "reason") else: raise AssertionError( "Unexpected call: node={node}, node.label={label}".format( node=str(node), label=node.label ) ) mock_sbd_cfg.side_effect = ret_val self.mock_env.is_cman_cluster = False mock_get_nodes.return_value = self.node_list expected = [ { "node": self.node_list.find_by_label("node0"), "config": { "SBD_TEST": "true", "ANOTHER_OPT": "1" } }, { "node": self.node_list.find_by_label("node1"), "config": {} }, { "node": self.node_list.find_by_label("node2"), "config": None } ] self.assertEqual( expected, cmd_sbd.get_cluster_sbd_config(self.mock_env) ) mock_get_nodes.assert_called_once_with(self.mock_env) self.assertEqual(3, mock_sbd_cfg.call_count) assert_report_item_list_equal( self.mock_rep.report_item_list, [( Severities.WARNING, report_codes.UNABLE_TO_GET_SBD_CONFIG, {"node": "node2"} )] )
def test_success(self): reporter = MockLibraryReportProcessor() lib._validate_devices( reporter, self.resources_el, ["stonith1"] ) lib._validate_devices( reporter, self.resources_el, ["stonith1", "stonith2"] ) assert_report_item_list_equal(reporter.report_item_list, [])
def test_level(self): lib.remove_levels_by_params( self.reporter, self.tree, level=2 ) self.assertEqual( self.get_remaining_ids(), ["fl1", "fl3", "fl5", "fl7", "fl8", "fl9", "fl10"] ) assert_report_item_list_equal(self.reporter.report_item_list, [])
def test_more_devices(self): lib.remove_levels_by_params( self.reporter, self.tree, devices=["d2", "d1"] ) self.assertEqual( self.get_remaining_ids(), ["fl1", "fl2", "fl4", "fl5", "fl6", "fl7", "fl8", "fl9", "fl10"] ) assert_report_item_list_equal(self.reporter.report_item_list, [])
def test_combination(self): lib.remove_levels_by_params( self.reporter, self.tree, 2, TARGET_TYPE_NODE, "nodeB", ["d3"] ) self.assertEqual( self.get_remaining_ids(), ["fl1", "fl2", "fl3", "fl5", "fl6", "fl7", "fl8", "fl9", "fl10"] ) assert_report_item_list_equal(self.reporter.report_item_list, [])
def test_success(self): resources = etree.fromstring("<resources />") for name in ["d1", "d2", "d3", "d4", "d5", "dR", "dR-special"]: self.fixture_resource(resources, name) reporter = MockLibraryReportProcessor() lib.verify(reporter, self.get_cib(), resources, self.get_status()) assert_report_item_list_equal(reporter.report_item_list, [])
def test_warn_when_config_exists_and_overwrite_allowed(self, mock_exists): report_processor=MockLibraryReportProcessor() self.check(report_processor, can_overwrite_existing=True) assert_report_item_list_equal(report_processor.report_item_list, [( severities.WARNING, report_codes.FILE_ALREADY_EXISTS, { "file_path": "/etc/booth/some-name.conf" }, )])
def test_target_attrib(self): lib.remove_levels_by_params( self.reporter, self.tree, target_type=TARGET_TYPE_ATTRIBUTE, target_value=("fencing", "improved") ) self.assertEqual( self.get_remaining_ids(), ["fl1", "fl2", "fl3", "fl4", "fl5", "fl6", "fl9", "fl10"] ) assert_report_item_list_equal(self.reporter.report_item_list, [])
def test_not_unique_duplicity_allowed(self): alert.ensure_recipient_value_is_unique(self.mock_reporter, self.alert, "value1", allow_duplicity=True) assert_report_item_list_equal( self.mock_reporter.report_item_list, [(severities.WARNING, report_codes.CIB_ALERT_RECIPIENT_ALREADY_EXISTS, { "alert": "alert-1", "recipient": "value1" })])
def test_duplicity_of_value_not_allowed(self): report_item = (severities.ERROR, report_codes.CIB_ALERT_RECIPIENT_ALREADY_EXISTS, { "alert": "alert", "recipient": "value1" }, report_codes.FORCE_ALERT_RECIPIENT_VALUE_NOT_UNIQUE) assert_raise_library_error( lambda: alert.update_recipient(self.mock_reporter, self.tree, "alert-recipient", "value1"), report_item) assert_report_item_list_equal(self.mock_reporter.report_item_list, [report_item])
def test_success_used_forced( self, mock_net_destroy, mock_net_disable, mock_net_stop, mock_status ): mock_status.return_value = 'Cluster "a_cluster":\n' lib.qdevice_destroy(self.lib_env, "net", proceed_if_used=True) mock_net_stop.assert_called_once_with("mock_runner", "corosync-qnetd") mock_net_disable.assert_called_once_with( "mock_runner", "corosync-qnetd" ) mock_net_destroy.assert_called_once_with() assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.WARNING, report_codes.QDEVICE_USED_BY_CLUSTERS, { "clusters": ["a_cluster"], } ), ( severity.INFO, report_codes.SERVICE_STOP_STARTED, { "service": "quorum device", } ), ( severity.INFO, report_codes.SERVICE_STOP_SUCCESS, { "service": "quorum device", } ), ( severity.INFO, report_codes.SERVICE_DISABLE_SUCCESS, { "service": "quorum device", } ), ( severity.INFO, report_codes.QDEVICE_DESTROY_SUCCESS, { "model": "net", } ) ] )
def test_invalid(self): reporter = MockLibraryReportProcessor() lib._validate_devices(reporter, self.resources_el, ["dummy", "fenceX"]) report = [( severity.ERROR, report_codes.STONITH_RESOURCES_DO_NOT_EXIST, { "stonith_ids": ["dummy", "fenceX"], }, report_codes.FORCE_STONITH_RESOURCE_DOES_NOT_EXIST )] assert_report_item_list_equal(reporter.report_item_list, report)
def test_support_option_value_pair(self): assert_report_item_list_equal( validate.value_cond("a", self.predicate, "test")({ "a": validate.ValuePair(original="b", normalized="c") }), [ (severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "a", "option_value": "b", "allowed_values": "test", }, None), ])
def test_one_not_found(self): node_name_list = ["node0", "node3"] assert_report_item_list_equal( cmd_sbd._check_node_names_in_cluster( self.node_list, node_name_list ), [( Severities.ERROR, report_codes.NODE_NOT_FOUND, {"node": "node0"} )] )
def test_return_forceable_error_on_not_allowed_names(self): assert_report_item_list_equal( validate.names_in( ["a", "b", "c"], ["x", "y"], option_type="some option", code_to_allow_extra_names="FORCE_CODE", ), [(severities.ERROR, report_codes.INVALID_OPTION, { "option_names": ["x", "y"], "allowed": ["a", "b", "c"], "option_type": "some option", }, "FORCE_CODE")])
def test_support_OptionValuePair(self): assert_report_item_list_equal( validate.value_in("a", ["b"])({ "a": validate.ValuePair(original="C", normalized="c") }), [ (severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "a", "option_value": "C", "allowed_values": ["b"], }, None), ])
def test_supports_another_report_option_name(self): assert_report_item_list_equal( validate.value_in("a", ["b"], option_name_for_report="option a")({ "a": "c" }), [ (severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "option a", "option_value": "c", "allowed_values": ["b"], }, None), ])
def test_supports_forceable_errors(self): assert_report_item_list_equal( validate.value_in("a", ["b"], code_to_allow_extra_values="FORCE")({ "a": "c" }), [ (severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "a", "option_value": "c", "allowed_values": ["b"], }, "FORCE"), ])
def test_reports_about_invalid_interval(self): assert_report_item_list_equal( validate.value_time_interval("a")({ "a": "invalid_value" }), [ (severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "a", "option_value": "invalid_value", "allowed_values": "time interval (e.g. 1, 2s, 3m, 4h, ...)", }, None), ])
def test_watchdog_timeout_is_none(self): config = { "SBD_WATCHDOG_TIMEOUT": None, } assert_report_item_list_equal( cmd_sbd._validate_sbd_options(config), [(Severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "SBD_WATCHDOG_TIMEOUT", "option_value": None, "allowed_values": "a non-negative integer", }, None)])
def test_some_not_ok(self): watchdog_dict = { "node1": "", "node2": None, "node3": "/dev/watchdog", "node4": "../dev/watchdog", } assert_report_item_list_equal( cmd_sbd._validate_watchdog_dict(watchdog_dict), [(Severities.ERROR, report_codes.WATCHDOG_INVALID, { "watchdog": watchdog }) for watchdog in ["", None, "../dev/watchdog"]])
def test_invalid_char(self): assert_report_item_list_equal( validate.value_id("id", "test id")({ "id": "te#st" }), [ (severities.ERROR, report_codes.INVALID_ID, { "id": "te#st", "id_description": "test id", "invalid_character": "#", "is_first_char": False, }, None), ])
def test_more_ids(self): assert_report_item_list_equal( self.provider.book_ids("myId1", "myId2"), [] ) assert_report_item_list_equal( self.provider.book_ids("myId1", "myId2"), [ self.fixture_report("myId1"), self.fixture_report("myId2"), ] )
def test_returns_report_when_missing(self): assert_report_item_list_equal( validate.is_required_some_of(["first", "second"], "type")({ "third": "value", }), [ (severities.ERROR, report_codes.REQUIRED_OPTION_OF_ALTERNATIVES_IS_MISSING, { "option_names": ["first", "second"], "option_type": "type", }, None), ])
def test_few_failures(self, mock_sbd_cfg, mock_get_nodes): def ret_val(communicator, node): self.assertEqual(communicator, self.mock_com) if node.label == "node0": return """\ # comment SBD_TEST=true ANOTHER_OPT=1 """ elif node.label == "node1": return """\ invalid value """ elif node.label == "node2": raise NodeConnectionException(node.label, "command", "reason") else: raise AssertionError( "Unexpected call: node={node}, node.label={label}".format( node=str(node), label=node.label)) mock_sbd_cfg.side_effect = ret_val self.mock_env.is_cman_cluster = False mock_get_nodes.return_value = self.node_list expected = [{ "node": self.node_list.find_by_label("node0"), "config": { "SBD_TEST": "true", "ANOTHER_OPT": "1" } }, { "node": self.node_list.find_by_label("node1"), "config": {} }, { "node": self.node_list.find_by_label("node2"), "config": None }] _assert_equal_list_of_dictionaries_without_order( expected, cmd_sbd.get_cluster_sbd_config(self.mock_env)) mock_get_nodes.assert_called_once_with(self.mock_env) self.assertEqual(3, mock_sbd_cfg.call_count) assert_report_item_list_equal(self.mock_rep.report_item_list, [ (Severities.WARNING, report_codes.NODE_COMMUNICATION_ERROR_UNABLE_TO_CONNECT, { "node": "node2", "reason": "reason", "command": "command", }), (Severities.WARNING, report_codes.UNABLE_TO_GET_SBD_CONFIG, { "node": "node2" }), ])
def test_success(self, mock_find): mock_find.return_value = [] reporter = MockLibraryReportProcessor() lib._validate_level_target_devices_does_not_exist( reporter, "tree", "level", "target_type", "target_value", "devices" ) mock_find.assert_called_once_with( "tree", "level", "target_type", "target_value", "devices" ) assert_report_item_list_equal(reporter.report_item_list, [])
def test_without_authfile(self, mock_parallel): lib.send_config_to_all_nodes(self.mock_communicator, self.mock_reporter, self.node_list, "cfg_name", "config data") mock_parallel.assert_called_once_with(lib._set_config_on_node, [([ self.mock_communicator, self.mock_reporter, node, "cfg_name", "config data", None, None ], {}) for node in self.node_list], self.mock_reporter, False) assert_report_item_list_equal( self.mock_reporter.report_item_list, [(Severities.INFO, report_codes.BOOTH_CONFIG_DISTRIBUTION_STARTED, {})])
def test_failure(self): report_list = [(Severities.ERROR, report_codes.ID_NOT_FOUND, { "id": "unknown" }), (Severities.ERROR, report_codes.ID_NOT_FOUND, { "id": "unknown2" })] assert_raise_library_error( lambda: cmd_alert.remove_alert( self.mock_env, ["unknown", "alert1", "unknown2", "alert2"]), *report_list) assert_report_item_list_equal(self.mock_rep.report_item_list, report_list)
def test_action_is_deprecated_forced(self): assert_report_item_list_equal( self.agent.validate_parameters_update({ "required_param": "value", }, { "action": "reboot", }, force=True), [ self.report_warning, ], )
def test_resources_are_not_resources(self): # The validator expects to get resource elements. So this report is # not the best, but at least the validator detects the problem. # Validation using IDs provides better reporting in this case. assert_report_item_list_equal( self._validate("G", ["RB1-meta_attributes"]), [ fixture.error( report_codes.CANNOT_GROUP_RESOURCE_WRONG_TYPE, resource_id="RB1-meta_attributes", resource_type="meta_attributes", ), ])
def test_resources_are_not_resources(self): assert_report_item_list_equal( self._validate("G", ["RB1-meta_attributes"]), [ fixture.error( report_codes.ID_BELONGS_TO_UNEXPECTED_TYPE, id="RB1-meta_attributes", expected_types=[ "clone", "master", "group", "primitive", "bundle" ], current_type="meta_attributes", ), ])
def test_complex(self): # test ids existing in the cib, double booked, available # test reports not repeated self.fixture_add_primitive_with_id("myId1") self.fixture_add_primitive_with_id("myId2") assert_report_item_list_equal( self.provider.book_ids("myId1", "myId2", "myId3", "myId2", "myId3", "myId4", "myId3"), [ self.fixture_report("myId1"), self.fixture_report("myId2"), self.fixture_report("myId3"), ])
def test_raises_on_invalid_options(self): report_processor = MockLibraryReportProcessor() expected_errors = [ ( severities.ERROR, report_codes.INVALID_OPTIONS, { "option_names": ["site"], "option_type": "booth ticket", "allowed": list(config_structure.TICKET_KEYS), "allowed_patterns": [], }, ), ( severities.ERROR, report_codes.INVALID_OPTIONS, { "option_names": ["port"], "option_type": "booth ticket", "allowed": list(config_structure.TICKET_KEYS), "allowed_patterns": [], }, ), ( severities.ERROR, report_codes.INVALID_OPTION_VALUE, { "option_name": "timeout", "option_value": " ", "allowed_values": "no-empty", }, ), (severities.ERROR, report_codes.INVALID_OPTIONS, { "option_names": ["unknown"], "option_type": "booth ticket", "allowed": list(config_structure.TICKET_KEYS), "allowed_patterns": [], }, report_codes.FORCE_OPTIONS), ] assert_raise_library_error( lambda: config_structure.validate_ticket_options( report_processor, { "site": "a", "port": "b", "timeout": " ", "unknown": "c", }, allow_unknown_options=False, ), *expected_errors) assert_report_item_list_equal(report_processor.report_item_list, expected_errors)
def test_success(self, mock_corosync_live): conf_text = "test conf text" nodes = ["node1", "node2"] node_addrs_list = NodeAddressesList( [NodeAddresses(addr) for addr in nodes] ) mock_corosync_live.set_remote_corosync_conf = mock.MagicMock() lib.distribute_corosync_conf( self.mock_communicator, self.mock_reporter, node_addrs_list, conf_text ) corosync_live_calls = [ mock.call.set_remote_corosync_conf( "mock node communicator", node_addrs_list[0], conf_text ), mock.call.set_remote_corosync_conf( "mock node communicator", node_addrs_list[1], conf_text ), ] self.assertEqual( len(corosync_live_calls), len(mock_corosync_live.mock_calls) ) mock_corosync_live.set_remote_corosync_conf.assert_has_calls( corosync_live_calls, any_order=True ) assert_report_item_list_equal( self.mock_reporter.report_item_list, [ ( severity.INFO, report_codes.COROSYNC_CONFIG_DISTRIBUTION_STARTED, {} ), ( severity.INFO, report_codes.COROSYNC_CONFIG_ACCEPTED_BY_NODE, {"node": nodes[0]} ), ( severity.INFO, report_codes.COROSYNC_CONFIG_ACCEPTED_BY_NODE, {"node": nodes[1]} ), ] )
def test_broadcast_enabled_mcastaddr_set(self): assert_report_item_list_equal( config_validators.create_link_list_udp([{ "broadcast": "1", "mcastaddr": "225.0.0.1" }]), [ fixture.error( report_codes.PREREQUISITE_OPTION_MUST_BE_DISABLED, option_name="mcastaddr", option_type="link", prerequisite_name="broadcast", prerequisite_type="link"), ])
def test_crypto_enabled_cipher_disabled_hash(self): assert_report_item_list_equal( config_validators.create_transport_knet({}, {}, { "cipher": "aes256", "hash": "none", }), [ fixture.error( report_codes.PREREQUISITE_OPTION_MUST_BE_ENABLED_AS_WELL, option_name="cipher", option_type="crypto", prerequisite_name="hash", prerequisite_type="crypto") ])
def test_return_warning_on_not_allowed_names(self): assert_report_item_list_equal( validate.names_in( ["a", "b", "c"], ["x", "y"], option_type="some option", code_to_allow_extra_names="FORCE_CODE", allow_extra_names=True, ), [(severities.WARNING, report_codes.INVALID_OPTION, { "option_names": ["x", "y"], "allowed": ["a", "b", "c"], "option_type": "some option", }, None)])
def test_return_error_on_not_allowed_names_without_force_code(self): assert_report_item_list_equal( validate.names_in( ["a", "b", "c"], ["x", "y"], #does now work without code_to_allow_extra_names allow_extra_names=True, ), [(severities.ERROR, report_codes.INVALID_OPTION, { "option_names": ["x", "y"], "allowed": ["a", "b", "c"], "option_type": "option", }, None)])