def test_restore_two_node(self):
        config_no_devices = open(rc("corosync.conf")).read()
        config = re.sub(
            re.compile(r"quorum {[^}]*}", re.MULTILINE | re.DOTALL),
            dedent("""\
                quorum {
                    provider: corosync_votequorum

                    device {
                        option: value
                        model: net

                        net {
                            host: 127.0.0.1
                            port: 4433
                        }
                    }
                }
            """),
            config_no_devices
        )
        facade = lib.ConfigFacade.from_string(config)
        facade.remove_quorum_device()
        self.assertFalse(facade.need_stopped_cluster)
        self.assertFalse(facade.need_qdevice_reload)
        ac(
            config_no_devices,
            facade.config.export()
        )
    def test_success_net_minimal_ffsplit(self):
        config = open(rc("corosync-3nodes.conf")).read()
        facade = lib.ConfigFacade.from_string(config)
        facade.add_quorum_device(
            "net",
            {"host": "127.0.0.1", "algorithm": "ffsplit"},
            {},
            {}
        )
        ac(
            config.replace(
                "    provider: corosync_votequorum\n",
                outdent("""\
                    provider: corosync_votequorum

                    device {
                        model: net
                        votes: 1

                        net {
                            algorithm: ffsplit
                            host: 127.0.0.1
                        }
                    }
                """)
            ),
            facade.config.export()
        )
        self.assertFalse(facade.need_stopped_cluster)
        self.assertFalse(facade.need_qdevice_reload)
    def test_success_remove_nodes_votes(self):
        config = open(rc("corosync-3nodes.conf")).read()
        config_votes = config.replace("node {", "node {\nquorum_votes: 2")
        facade = lib.ConfigFacade.from_string(config_votes)
        facade.add_quorum_device(
            "net",
            {"host": "127.0.0.1", "algorithm": "lms"},
            {},
            {}
        )
        ac(
            config.replace(
                "    provider: corosync_votequorum\n",
                outdent("""\
                    provider: corosync_votequorum

                    device {
                        model: net

                        net {
                            algorithm: lms
                            host: 127.0.0.1
                        }
                    }
                """)
            ),
            facade.config.export()
        )
        self.assertFalse(facade.need_stopped_cluster)
        self.assertFalse(facade.need_qdevice_reload)
 def test_noop_on_no_heuristics(self):
     config = open(rc("corosync-3nodes-qdevice.conf")).read()
     facade = lib.ConfigFacade.from_string(config)
     facade.remove_quorum_device_heuristics()
     self.assertFalse(facade.need_stopped_cluster)
     self.assertTrue(facade.need_qdevice_reload)
     ac(config, facade.config.export())
示例#5
0
 def test_success(self):
     config = open(rc("corosync.conf")).read()
     facade = lib.ConfigFacade.from_string(config)
     self.assertEqual(facade.__class__, lib.ConfigFacade)
     self.assertEqual(facade.config.export(), config)
     self.assertFalse(facade.need_stopped_cluster)
     self.assertFalse(facade.need_qdevice_reload)
 def test_success_remove_heuristics(self):
     config = self.fixture_add_device_with_heuristics(
         open(rc("corosync-3nodes.conf")).read()
     )
     facade = lib.ConfigFacade.from_string(config)
     facade.update_quorum_device(
         {},
         {},
         {"mode": "", "exec_ls": "", "interval": ""}
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertTrue(facade.need_qdevice_reload)
     ac(
         self.fixture_add_device(
             open(rc("corosync-3nodes.conf")).read()
         ),
         facade.config.export()
     )
示例#7
0
 def test_upgrade_for_promoted_max(self):
     shutil.copy(rc("cib-empty-2.8.xml"), self.temp_cib)
     self.assert_pcs_success(
         "resource bundle create B container docker image=pcs:test"
     )
     self.assert_pcs_success(
         "resource bundle update B container promoted-max=3",
         "CIB has been upgraded to the latest schema version.\n"
     )
 def test_no_device(self):
     config = open(rc("corosync.conf")).read()
     facade = lib.ConfigFacade.from_string(config)
     self.assertEqual(
         (None, {}, {}, {}),
         facade.get_quorum_device_settings()
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertFalse(facade.need_qdevice_reload)
 def test_error_on_no_device(self):
     config = open(rc("corosync-3nodes.conf")).read()
     facade = lib.ConfigFacade.from_string(config)
     assert_raise_library_error(
         facade.remove_quorum_device_heuristics,
         fixture.error(report_codes.QDEVICE_NOT_DEFINED)
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertFalse(facade.need_qdevice_reload)
示例#10
0
    def load(
        self,
        modifiers=None,
        name="runner.cib.load",
        filename=None,
        before=None,
        returncode=0,
        stderr=None,
        instead=None,
        **modifier_shortcuts
    ):
        """
        Create call for loading cib.

        string name -- key of the call
        list of callable modifiers -- every callable takes etree.Element and
            returns new etree.Element with desired modification.
        string filename -- points to file with cib in the content
        string before -- key of call before which this new call is to be placed
        int returncode
        string stderr
        string instead -- key of call instead of which this new call is to be
            placed
        dict modifier_shortcuts -- a new modifier is generated from each
            modifier shortcut.
            As key there can be keys of MODIFIER_GENERATORS.
            Value is passed into appropriate generator from MODIFIER_GENERATORS.
            For details see pcs_test.tools.fixture_cib (mainly the variable
            MODIFIER_GENERATORS - please refer it when you are adding params
            here)
        """
        if(returncode != 0 or stderr is not None) and (
           modifiers is not None
           or
           filename is not None
           or
           modifier_shortcuts
        ):
            raise AssertionError(
                "Do not combine parameters 'returncode' and 'stderr' with"
                " parameters 'modifiers', 'filename' and 'modifier_shortcuts'"
            )

        command = "cibadmin --local --query"
        if returncode != 0:
            call = RunnerCall(command, stderr=stderr, returncode=returncode)
        else:
            cib = modify_cib(
                open(rc(filename if filename else self.cib_filename)).read(),
                modifiers,
                **modifier_shortcuts
            )
            call = RunnerCall(command, stdout=cib)

        self.__calls.place(name, call, before=before, instead=instead)
示例#11
0
 def test_returns_cib_from_cib_data(self):
     cib_filename = "cib-empty.xml"
     (self.config
         #Value of cib_data is unimportant here. See details in sibling test.
         .env.set_cib_data("whatever")
         .runner.cib.load(filename=cib_filename)
     )
     assert_xml_equal(
         etree_to_str(self.env_assist.get_env().get_cib()),
         open(rc(cib_filename)).read()
     )
 def test_no_device(self):
     config = open(rc("corosync-3nodes.conf")).read()
     facade = lib.ConfigFacade.from_string(config)
     assert_raise_library_error(
         facade.remove_quorum_device,
         (
             severity.ERROR,
             report_codes.QDEVICE_NOT_DEFINED,
             {}
         )
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertFalse(facade.need_qdevice_reload)
示例#13
0
 def test_dump_cib_file(self, mock_tmpfile, mock_runner):
     expected_runner = mock.MagicMock()
     mock_runner.return_value = expected_runner
     mock_instance = mock.MagicMock()
     mock_instance.name = rc("file.tmp")
     mock_tmpfile.return_value = mock_instance
     env = LibraryEnvironment(
         self.mock_logger,
         self.mock_reporter,
         cib_data="<cib />"
     )
     runner = env.cmd_runner()
     self.assertEqual(expected_runner, runner)
     mock_runner.assert_called_once_with(
         self.mock_logger,
         self.mock_reporter,
         {
             "LC_ALL": "C",
             "CIB_file": rc("file.tmp"),
         }
     )
     mock_tmpfile.assert_called_once_with("<cib />")
示例#14
0
 def setUp(self):
     self.env_assist, self.config = get_env_tools(self)
     self.config.env.set_known_hosts_dests(KNOWN_HOSTS_DESTS)
     cib_xml_man = XmlManipulation.from_file(rc("cib-empty.xml"))
     cib_xml_man.append_to_first_tag_name(
         "resources",
         """
             <primitive class="ocf" id="{0}"
                 provider="heartbeat" type="VirtualDomain"
             />
         """.format(VIRTUAL_MACHINE_ID)
     )
     self.config.env.set_cib_data(str(cib_xml_man))
示例#15
0
 def test_success(self):
     shutil.copy(rc("cib-empty-2.0.xml"), self.temp_cib)
     self.assert_effect(
         "resource bundle create B1 container docker image=pcs:test",
         """
             <resources>
                 <bundle id="B1">
                     <docker image="pcs:test" />
                 </bundle>
             </resources>
         """,
         "CIB has been upgraded to the latest schema version.\n"
     )
 def test_not_existing_add_heuristics(self):
     config = open(rc("corosync.conf")).read()
     facade = lib.ConfigFacade.from_string(config)
     assert_raise_library_error(
         lambda: facade.update_quorum_device(
             {},
             {},
             {"mode": "on"}
         ),
         fixture.error(report_codes.QDEVICE_NOT_DEFINED)
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertFalse(facade.need_qdevice_reload)
     ac(config, facade.config.export())
示例#17
0
 def test_upgrade_for_promoted_max(self):
     shutil.copy(rc("cib-empty-2.8.xml"), self.temp_cib)
     self.assert_effect(
         "resource bundle create B1 container docker image=pcs:test "
             "promoted-max=2"
         ,
         """
             <resources>
                 <bundle id="B1">
                     <docker image="pcs:test" promoted-max="2" />
                 </bundle>
             </resources>
         """,
         "CIB has been upgraded to the latest schema version.\n"
     )
示例#18
0
    def test_3nodes_atb_off(self):
        config = open(rc("corosync-3nodes.conf")).read()
        facade = lib.ConfigFacade.from_string(config)
        self.assertEqual(3, len(facade.get_nodes()))

        facade.set_quorum_options({"auto_tie_breaker": "0"})

        self.assertTrue(facade.need_stopped_cluster)
        self.assertFalse(facade.need_qdevice_reload)
        self.assertEqual(
            "0",
            facade.get_quorum_options().get("auto_tie_breaker", None)
        )

        two_node = self.get_two_node(facade)
        self.assertTrue(two_node is None or two_node == "0")
示例#19
0
    def test_add_all_options(self):
        config = open(rc("corosync.conf")).read()
        facade = lib.ConfigFacade.from_string(config)
        expected_options = {
            "auto_tie_breaker": "1",
            "last_man_standing": "0",
            "last_man_standing_window": "1000",
            "wait_for_all": "0",
        }
        facade.set_quorum_options(expected_options)

        self.assertTrue(facade.need_stopped_cluster)
        self.assertFalse(facade.need_qdevice_reload)
        test_facade = lib.ConfigFacade.from_string(facade.config.export())
        self.assertEqual(
            expected_options,
            test_facade.get_quorum_options()
        )
 def test_not_existing(self):
     config = open(rc("corosync.conf")).read()
     facade = lib.ConfigFacade.from_string(config)
     assert_raise_library_error(
         lambda: facade.update_quorum_device(
             {"host": "127.0.0.1"},
             {},
             {}
         ),
         (
             severity.ERROR,
             report_codes.QDEVICE_NOT_DEFINED,
             {}
         )
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertFalse(facade.need_qdevice_reload)
     ac(config, facade.config.export())
    def test_remove_conflicting_options(self):
        config = open(rc("corosync.conf")).read()
        config = config.replace(
            "    two_node: 1\n",
            "\n".join([
                "    two_node: 1",
                "    auto_tie_breaker: 1",
                "    last_man_standing: 1",
                "    last_man_standing_window: 987",
                "    allow_downscale: 1",
                ""
            ])
        )
        facade = lib.ConfigFacade.from_string(config)
        facade.add_quorum_device(
            "net",
            {"host": "127.0.0.1", "algorithm": "ffsplit"},
            {},
            {}
        )
        ac(
            re.sub(
                re.compile(r"quorum {[^}]*}\n", re.MULTILINE | re.DOTALL),
                dedent("""\
                    quorum {
                        provider: corosync_votequorum

                        device {
                            model: net
                            votes: 1

                            net {
                                algorithm: ffsplit
                                host: 127.0.0.1
                            }
                        }
                    }
                """),
                config
            ),
            facade.config.export()
        )
        self.assertTrue(facade.need_stopped_cluster)
        self.assertFalse(facade.need_qdevice_reload)
 def test_success_change_heuristics(self):
     config = self.fixture_add_device_with_heuristics(
         open(rc("corosync-3nodes.conf")).read()
     )
     facade = lib.ConfigFacade.from_string(config)
     facade.update_quorum_device(
         {},
         {},
         {"mode": "sync", "interval": "", "timeout": "20"}
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertTrue(facade.need_qdevice_reload)
     ac(
         config.replace(
             "interval: 30\n            mode: on",
             "mode: sync\n            timeout: 20",
         ),
         facade.config.export()
     )
 def test_success_generic_options(self):
     config = self.fixture_add_device(
         open(rc("corosync-3nodes.conf")).read()
     )
     facade = lib.ConfigFacade.from_string(config)
     facade.update_quorum_device(
         {},
         {"timeout": "", "sync_timeout": "23456"},
         {}
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertTrue(facade.need_qdevice_reload)
     ac(
         config.replace(
             "timeout: 12345\n        model: net",
             "model: net\n        sync_timeout: 23456",
         ),
         facade.config.export()
     )
 def test_success_model_options_net(self):
     config = self.fixture_add_device(
         open(rc("corosync-3nodes.conf")).read(),
         votes="1"
     )
     facade = lib.ConfigFacade.from_string(config)
     facade.update_quorum_device(
         {"host": "127.0.0.2", "port": "", "algorithm": "ffsplit"},
         {},
         {}
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertTrue(facade.need_qdevice_reload)
     ac(
         config.replace(
             "host: 127.0.0.1\n            port: 4433",
             "host: 127.0.0.2\n            algorithm: ffsplit"
         ),
         facade.config.export()
     )
 def test_success_all_options(self):
     config = self.fixture_add_device_with_heuristics(
         open(rc("corosync-3nodes.conf")).read()
     )
     facade = lib.ConfigFacade.from_string(config)
     facade.update_quorum_device(
         {"port": "4444"},
         {"timeout": "23456"},
         {"interval": "35"}
     )
     self.assertFalse(facade.need_stopped_cluster)
     self.assertTrue(facade.need_qdevice_reload)
     ac(
         config
             .replace("port: 4433", "port: 4444")
             .replace("timeout: 12345", "timeout: 23456")
             .replace("interval: 30", "interval: 35")
         ,
         facade.config.export()
     )
示例#26
0
    def load(
        self, node_name_list=None, name="corosync_conf.load",
        filename="corosync.conf", auto_tie_breaker=None, instead=None
    ):
        content = open(rc(filename)).read()
        corosync_conf = None
        if node_name_list:
            corosync_conf = ConfigFacade.from_string(content).config
            for nodelist in corosync_conf.get_sections(name="nodelist"):
                corosync_conf.del_section(nodelist)

            nodelist_section = Section("nodelist")
            corosync_conf.add_section(nodelist_section)
            for i, node_name in enumerate(node_name_list):
                node_section = Section("node")
                node_section.add_attribute("ring0_addr", node_name)
                node_section.add_attribute("nodeid", i)
                node_section.add_attribute("name", node_name)
                nodelist_section.add_section(node_section)


        if auto_tie_breaker is not None:
            corosync_conf = (
                corosync_conf if corosync_conf
                else ConfigFacade.from_string(content).config
            )
            for quorum in corosync_conf.get_sections(name="quorum"):
                quorum.set_attribute(
                    "auto_tie_breaker",
                    "1" if auto_tie_breaker else  "0"
                )

        if corosync_conf:
            content = corosync_conf.export()

        self.load_content(content, name=name, instead=instead)
示例#27
0
class CreateMixin:
    def setUp(self):
        # pylint does not know this method is defined in TestCase
        # pylint: disable=invalid-name
        self.env_assist, self.config = get_env_tools(test_case=self)

    def tearDown(self):
        # pylint does not know this method is defined in TestCase
        # pylint: disable=invalid-name
        # pylint: disable=no-self-use
        StonithAgent.clear_fenced_metadata_cache()

    def test_minimal_success(self):
        agent_name = "test_simple"

        (self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_filename="stonith_agent_fence_simple.xml",
        ).runner.cib.load().runner.pcmk.load_fenced_metadata().env.push_cib(
            resources=self._expected_cib(expected_cib_simple)))

        self._create(
            self.env_assist.get_env(),
            "stonith-test",
            agent_name,
            operations=[],
            meta_attributes={},
            instance_attributes={
                "must-set": "value",
                "must-set-new": "B",
            },
        )

    def test_unfencing(self):
        agent_name = "test_unfencing"

        (self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_filename="stonith_agent_fence_unfencing.xml",
        ).runner.cib.load().runner.pcmk.load_fenced_metadata().env.push_cib(
            resources=self._expected_cib(expected_cib_unfencing)))

        self._create(
            self.env_assist.get_env(),
            "stonith-test",
            agent_name,
            operations=[],
            meta_attributes={},
            instance_attributes={},
        )

    def test_disabled(self):
        agent_name = "test_simple"
        expected_cib = expected_cib_simple.replace(
            '<instance_attributes id="stonith-test-instance_attributes">',
            """
                <meta_attributes id="stonith-test-meta_attributes">
                    <nvpair id="stonith-test-meta_attributes-target-role"
                        name="target-role" value="Stopped"
                    />
                </meta_attributes>
                <instance_attributes id="stonith-test-instance_attributes">
            """,
        )

        (self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_filename="stonith_agent_fence_simple.xml",
        ).runner.cib.load().runner.pcmk.load_fenced_metadata().env.push_cib(
            resources=self._expected_cib(expected_cib)))

        self._create(
            self.env_assist.get_env(),
            "stonith-test",
            agent_name,
            operations=[],
            meta_attributes={},
            instance_attributes={
                "must-set": "value",
                "must-set-new": "B",
            },
            ensure_disabled=True,
        )

    def _assert_default_operations(self, use_default_operations):
        # use_default_operations currently has no effect because in both cases
        # only the monitor operation is created in cib. That is correct. Still
        # it is worth testing. If it ever changes, the test should fail and be
        # updated to test new behaviour.
        agent_name = "test_custom_actions"

        (self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_filename="stonith_agent_fence_custom_actions.xml",
        ).runner.cib.load().runner.pcmk.load_fenced_metadata().env.push_cib(
            resources=self._expected_cib(expected_cib_operations)))

        self._create(
            self.env_assist.get_env(),
            "stonith-test",
            agent_name,
            operations=[],
            meta_attributes={},
            instance_attributes={},
            use_default_operations=use_default_operations,
        )

    def test_default_operations_yes(self):
        self._assert_default_operations(True)

    def test_default_operations_no(self):
        self._assert_default_operations(False)

    def test_id_already_exists(self):
        agent_name = "test_simple"

        (self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_filename="stonith_agent_fence_simple.xml",
        ).runner.cib.load(resources=self._expected_cib(expected_cib_simple)))

        self.env_assist.assert_raise_library_error(
            lambda: self._create(
                self.env_assist.get_env(),
                "stonith-test",
                agent_name,
                operations=[],
                meta_attributes={},
                instance_attributes={
                    "must-set": "value",
                    "must-set-new": "B",
                },
            ),
            [fixture.error(report_codes.ID_ALREADY_EXISTS, id="stonith-test")],
            expected_in_processor=False,
        )

    def test_instance_meta_and_operations(self):
        agent_name = "test_simple"

        (self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_filename="stonith_agent_fence_simple.xml",
        ).runner.cib.load().runner.pcmk.load_fenced_metadata().env.push_cib(
            resources=self._expected_cib(expected_cib_simple_forced)))

        self._create(
            self.env_assist.get_env(),
            "stonith-test",
            agent_name,
            operations=[
                {
                    "name": "bad-action"
                },
            ],
            meta_attributes={
                "metaname": "metavalue",
            },
            instance_attributes={"undefined": "attribute"},
            allow_invalid_operation=True,
            allow_invalid_instance_attributes=True,
        )

        self.env_assist.assert_reports([
            fixture.warn(
                report_codes.INVALID_OPTION_VALUE,
                option_value="bad-action",
                option_name="operation name",
                allowed_values=[
                    "on",
                    "off",
                    "reboot",
                    "status",
                    "list",
                    "list-status",
                    "monitor",
                    "metadata",
                    "validate-all",
                ],
                cannot_be_empty=False,
                forbidden_characters=None,
            ),
            fixture.warn(
                report_codes.INVALID_OPTIONS,
                option_names=["undefined"],
                option_type="stonith",
                allowed=[
                    "may-set",
                    "must-set",
                    "must-set-new",
                    "must-set-old",
                    "pcmk_action_limit",
                    "pcmk_delay_base",
                    "pcmk_delay_max",
                    "pcmk_host_argument",
                    "pcmk_host_check",
                    "pcmk_host_list",
                    "pcmk_host_map",
                    "pcmk_list_action",
                    "pcmk_list_retries",
                    "pcmk_list_timeout",
                    "pcmk_monitor_action",
                    "pcmk_monitor_retries",
                    "pcmk_monitor_timeout",
                    "pcmk_off_action",
                    "pcmk_off_retries",
                    "pcmk_off_timeout",
                    "pcmk_on_action",
                    "pcmk_on_retries",
                    "pcmk_on_timeout",
                    "pcmk_reboot_action",
                    "pcmk_reboot_retries",
                    "pcmk_reboot_timeout",
                    "pcmk_status_action",
                    "pcmk_status_retries",
                    "pcmk_status_timeout",
                    "priority",
                ],
                allowed_patterns=[],
            ),
            fixture.warn(
                report_codes.REQUIRED_OPTIONS_ARE_MISSING,
                option_names=["must-set", "must-set-new"],
                option_type="stonith",
            ),
        ])

    def test_unknown_agent_forced(self):
        agent_name = "test_unknown"
        self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_is_missing=True,
        )
        self.config.runner.cib.load()
        self.config.env.push_cib(
            resources=self._expected_cib(expected_cib_unknown))

        self._create(
            self.env_assist.get_env(),
            "stonith-test",
            agent_name,
            operations=[],
            meta_attributes={},
            instance_attributes={},
            allow_absent_agent=True,
        )

        self.env_assist.assert_reports([
            fixture.warn(
                report_codes.UNABLE_TO_GET_AGENT_METADATA,
                agent="test_unknown",
                reason=("Agent stonith:test_unknown not found or does not "
                        "support meta-data: Invalid argument (22)\n"
                        "Metadata query for stonith:test_unknown failed: "
                        "Input/output error"),
            ),
        ])

    @mock.patch.object(
        settings,
        "pacemaker_api_result_schema",
        rc("pcmk_api_rng/api-result.rng"),
    )
    def test_minimal_wait_ok_run_ok(self):
        agent_name = "test_simple"
        instance_name = "stonith-test"
        timeout = 10
        expected_status = """
            <resources>
                <resource
                    id="{id}"
                    resource_agent="stonith:{agent}"
                    role="Started"
                    active="true"
                    failed="false"
                    nodes_running_on="1"
                >
                    <node name="node1" id="1" cached="false"/>
                </resource>
            </resources>
            """.format(id=instance_name, agent=agent_name)

        (self.config.runner.pcmk.load_agent(
            agent_name=f"stonith:{agent_name}",
            agent_filename="stonith_agent_fence_simple.xml",
        ).runner.cib.load().runner.pcmk.load_fenced_metadata().env.push_cib(
            resources=self._expected_cib(expected_cib_simple),
            wait=timeout).runner.pcmk.load_state(resources=expected_status))

        self._create(
            self.env_assist.get_env(),
            instance_name,
            agent_name,
            operations=[],
            meta_attributes={},
            instance_attributes={
                "must-set": "value",
                "must-set-new": "B",
            },
            wait=str(timeout),
        )
        self.env_assist.assert_reports([
            fixture.info(
                report_codes.RESOURCE_RUNNING_ON_NODES,
                roles_with_nodes={"Started": ["node1"]},
                resource_id=instance_name,
            ),
        ])
示例#28
0
    def load_agent(
        self,
        name="runner.pcmk.load_agent",
        agent_name="ocf:heartbeat:Dummy",
        agent_filename=None,
        agent_is_missing=False,
        stderr=None,
        instead=None,
    ):
        """
        Create call for loading resource agent metadata.

        string name -- key of the call
        string agent_name
        string agent_filename -- points to file with the agent metadata in the
            content
        string instead -- key of call instead of which this new call is to be
            placed
        """

        if agent_filename:
            agent_metadata_filename = agent_filename
        elif agent_name in AGENT_FILENAME_MAP:
            agent_metadata_filename = AGENT_FILENAME_MAP[agent_name]
        elif not agent_is_missing:
            raise AssertionError(
                ("Filename with metadata of agent '{0}' not specified.\n"
                 "Please specify file with metadata for agent:\n"
                 "  a) explicitly for this test:"
                 " config.runner.pcmk.load_agent(agent_name='{0}',"
                 " filename='FILENAME_HERE.xml')\n"
                 "  b) implicitly for agent '{0}' in 'AGENT_FILENAME_MAP' in"
                 " '{1}'\n"
                 "Place agent metadata into '{2}FILENAME_HERE.xml'").format(
                     agent_name, os.path.realpath(__file__), rc("")))

        if agent_is_missing:
            if stderr is None:
                stderr = (
                    f"Agent {agent_name} not found or does not support "
                    "meta-data: Invalid argument (22)\n"
                    f"Metadata query for {agent_name} failed: Input/output "
                    "error\n")
            self.__calls.place(
                name,
                RunnerCall(
                    "crm_resource --show-metadata {0}".format(agent_name),
                    stdout="",
                    stderr=stderr,
                    returncode=74,
                ),
                instead=instead,
            )
            return

        with open(rc(agent_metadata_filename)) as a_file:
            self.__calls.place(
                name,
                RunnerCall(
                    "crm_resource --show-metadata {0}".format(agent_name),
                    stdout=a_file.read(),
                    stderr=stderr,
                ),
                instead=instead,
            )
示例#29
0
from pcs_test.tools.assertions import ac
from pcs_test.tools.cib import get_assert_pcs_effect_mixin
from pcs_test.tools.misc import (
    get_test_resource as rc,
    get_tmp_file,
    write_file_to_tmpfile,
)
from pcs_test.tools.pcs_runner import (
    pcs,
    PcsRunner,
)

# pylint: disable=invalid-name

empty_cib = rc("cib-empty.xml")


class PropertyTest(TestCase):
    def setUp(self):
        self.temp_cib = get_tmp_file("tier1_properties")
        write_file_to_tmpfile(empty_cib, self.temp_cib)
        self.pcs_runner = PcsRunner(self.temp_cib.name)

    def tearDown(self):
        self.temp_cib.close()

    def testEmpty(self):
        output, returnVal = pcs(self.temp_cib.name, ["property"])
        assert returnVal == 0, "Unable to list resources"
        assert output == "Cluster Properties:\n", [output]
示例#30
0
    def load_agent(
        self,
        name="runner.pcmk.load_agent",
        agent_name="ocf:heartbeat:Dummy",
        agent_filename=None,
        agent_is_missing=False,
        stdout=None,
        stderr=None,
        instead=None,
        env=None,
    ):
        """
        Create call for loading resource agent metadata.

        string name -- key of the call
        string agent_name
        string agent_filename -- points to file with the agent metadata in the
            content
        bool agent_is_missing -- create a response as if the agent was missing
        string instead -- key of call instead of which this new call is to be
            placed
        dict env -- CommandRunner environment variables
        """
        if env:
            env = dict(env)
        else:
            env = {}
        env["PATH"] = ":".join([
            settings.fence_agent_binaries,
            "/bin",
            "/usr/bin",
        ])

        if agent_filename:
            agent_metadata_filename = agent_filename
        elif agent_name in AGENT_FILENAME_MAP:
            agent_metadata_filename = AGENT_FILENAME_MAP[agent_name]
        elif not stdout and not agent_is_missing:
            raise AssertionError(
                ("Filename with metadata of agent '{0}' not specified.\n"
                 "Please specify file with metadata for agent:\n"
                 "  a) explicitly for this test:"
                 " config.runner.pcmk.load_agent(agent_name='{0}',"
                 " filename='FILENAME_HERE.xml')\n"
                 "  b) implicitly for agent '{0}' in 'AGENT_FILENAME_MAP' in"
                 " '{1}'\n"
                 "Place agent metadata into '{2}FILENAME_HERE.xml'\n"
                 "Or define metadata directly in 'stdout' argument.").format(
                     agent_name, os.path.realpath(__file__), rc("")))

        if agent_is_missing:
            if stderr is None:
                stderr = (
                    f"Agent {agent_name} not found or does not support "
                    "meta-data: Invalid argument (22)\n"
                    f"Metadata query for {agent_name} failed: Input/output "
                    "error\n")
            self.__calls.place(
                name,
                RunnerCall(
                    ["crm_resource", "--show-metadata", agent_name],
                    stdout="",
                    stderr=stderr,
                    returncode=74,
                    env=env,
                ),
                instead=instead,
            )
            return

        if not stdout:
            with open(rc(agent_metadata_filename)) as a_file:
                stdout = a_file.read()
        self.__calls.place(
            name,
            RunnerCall(
                ["crm_resource", "--show-metadata", agent_name],
                stdout=stdout,
                stderr=stderr,
                env=env,
            ),
            instead=instead,
        )
示例#31
0
class ManageUnmanage(
        TestCase,
        get_assert_pcs_effect_mixin(lambda cib: etree.tostring(
            # pylint:disable=undefined-variable
            etree.parse(cib).findall(".//resources")[0])),
):
    empty_cib = rc("cib-empty.xml")

    @staticmethod
    def fixture_cib_unmanaged_a(add_empty_meta_b=False):
        empty_meta_b = '<meta_attributes id="B-meta_attributes" />'
        return """
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <meta_attributes id="A-meta_attributes">
                        <nvpair id="A-meta_attributes-is-managed"
                            name="is-managed" value="false"
                        />
                    </meta_attributes>
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
                <primitive class="ocf" id="B" provider="heartbeat" type="Dummy">
                    {empty_meta_b}<operations>
                        <op id="B-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
            </resources>
        """.format(empty_meta_b=(empty_meta_b if add_empty_meta_b else ""))

    def setUp(self):
        self.temp_cib = get_tmp_file("tier1_cib_resource_manage_unmanage")
        write_file_to_tmpfile(self.empty_cib, self.temp_cib)
        self.pcs_runner = PcsRunner(self.temp_cib.name)

    def tearDown(self):
        self.temp_cib.close()

    def fixture_resource(self, name, managed=True, with_monitors=False):
        self.assert_pcs_success([
            "resource",
            "create",
            name,
            "ocf:heartbeat:Dummy",
            "--no-default-ops",
        ])
        if not managed:
            cmd = ["resource", "unmanage", name]
            if with_monitors:
                cmd.append("--monitor")
            self.assert_pcs_success(cmd)

    def fixture_tag(self, name, ids):
        self.assert_pcs_success(["tag", "create", name] + ids)

    def test_unmanage_none(self):
        self.assert_pcs_fail(
            "resource unmanage".split(),
            "Error: You must specify resource(s) to unmanage\n",
        )

    def test_manage_none(self):
        self.assert_pcs_fail(
            "resource manage".split(),
            "Error: You must specify resource(s) to manage\n",
        )

    def test_unmanage_one(self):
        self.fixture_resource("A")
        self.fixture_resource("B")
        self.assert_effect("resource unmanage A".split(),
                           self.fixture_cib_unmanaged_a())

    def test_manage_one(self):
        self.fixture_resource("A", managed=False)
        self.fixture_resource("B", managed=False)
        self.assert_effect(
            "resource manage B".split(),
            self.fixture_cib_unmanaged_a(add_empty_meta_b=True),
        )

    def test_unmanage_monitor(self):
        self.fixture_resource("A")
        self.assert_effect(
            "resource unmanage A --monitor".split(),
            """
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <meta_attributes id="A-meta_attributes">
                        <nvpair id="A-meta_attributes-is-managed"
                            name="is-managed" value="false"
                        />
                    </meta_attributes>
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s" enabled="false"
                        />
                    </operations>
                </primitive>
            </resources>
            """,
        )

    def test_unmanage_monitor_enabled(self):
        self.fixture_resource("A")
        self.assert_effect(
            "resource unmanage A".split(),
            """
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <meta_attributes id="A-meta_attributes">
                        <nvpair id="A-meta_attributes-is-managed"
                            name="is-managed" value="false"
                        />
                    </meta_attributes>
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
            </resources>
            """,
        )

    def test_manage_monitor(self):
        self.fixture_resource("A", managed=True, with_monitors=True)
        self.assert_effect(
            "resource manage A --monitor".split(),
            """
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
            </resources>
            """,
        )

    def test_manage_monitor_disabled(self):
        self.fixture_resource("A", managed=False, with_monitors=True)
        self.assert_effect(
            "resource manage A".split(),
            """
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <meta_attributes id="A-meta_attributes" />
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s" enabled="false"
                        />
                    </operations>
                </primitive>
            </resources>
            """,
            "Warning: Resource 'A' has no enabled monitor operations."
            " Re-run with '--monitor' to enable them.\n",
        )

    def test_unmanage_more(self):
        self.fixture_resource("A")
        self.fixture_resource("B")
        self.fixture_tag("TA", ["A"])
        self.assert_effect(
            "resource unmanage TA B".split(),
            """
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <meta_attributes id="A-meta_attributes">
                        <nvpair id="A-meta_attributes-is-managed"
                            name="is-managed" value="false"
                        />
                    </meta_attributes>
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
                <primitive class="ocf" id="B" provider="heartbeat" type="Dummy">
                    <meta_attributes id="B-meta_attributes">
                        <nvpair id="B-meta_attributes-is-managed"
                            name="is-managed" value="false"
                        />
                    </meta_attributes>
                    <operations>
                        <op id="B-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
            </resources>
            """,
        )

    def test_manage_more(self):
        self.fixture_resource("A", managed=False)
        self.fixture_resource("B", managed=False)
        self.fixture_tag("TA", ["A"])
        self.assert_effect(
            "resource manage TA B".split(),
            """
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <meta_attributes id="A-meta_attributes" />
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
                <primitive class="ocf" id="B" provider="heartbeat" type="Dummy">
                    <meta_attributes id="B-meta_attributes" />
                    <operations>
                        <op id="B-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
            </resources>
            """,
        )

    def test_unmanage_nonexistent(self):
        self.fixture_resource("A")

        self.assert_pcs_fail(
            "resource unmanage A B".split(),
            ("Error: bundle/clone/group/resource/tag 'B' does not exist\n"
             "Error: Errors have occurred, therefore pcs is unable to continue\n"
             ),
        )
        self.assert_resources_xml_in_cib("""
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
            </resources>
            """)

    def test_manage_nonexistent(self):
        self.fixture_resource("A", managed=False)

        self.assert_pcs_fail(
            "resource manage A B".split(),
            ("Error: bundle/clone/group/resource/tag 'B' does not exist\n"
             "Error: Errors have occurred, therefore pcs is unable to continue\n"
             ),
        )
        self.assert_resources_xml_in_cib("""
            <resources>
                <primitive class="ocf" id="A" provider="heartbeat" type="Dummy">
                    <meta_attributes id="A-meta_attributes">
                        <nvpair id="A-meta_attributes-is-managed"
                            name="is-managed" value="false"
                        />
                    </meta_attributes>
                    <operations>
                        <op id="A-monitor-interval-10s" interval="10s"
                            name="monitor" timeout="20s"
                        />
                    </operations>
                </primitive>
            </resources>
            """)
示例#32
0
import os.path

from pcs_test.tools.misc import get_test_resource as rc

from pcs import utils

__pcs_location = os.path.join(
    os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
    "pcs_for_tests"
)
_temp_cib = rc("temp-cib.xml")

# this can be changed from suite.py
test_installed = False


class PcsRunner:
    def __init__(
        self, cib_file=_temp_cib, corosync_conf_opt=None, mock_settings=None
    ):
        self.cib_file = cib_file
        self.corosync_conf_opt = corosync_conf_opt
        self.mock_settings = mock_settings

    def run(self, args):
        return pcs(
            self.cib_file,
            args,
            corosync_conf_opt=self.corosync_conf_opt,
            mock_settings=self.mock_settings
        )
示例#33
0
 def test_status_no_resources_default_command_hide_inactive(self):
     write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
     self.assert_pcs_success(
         self.command[:-1] + ["--hide-inactive"],
         stdout_full=self.no_active_resources_msg,
     )
示例#34
0
 def test_status_no_resources_with_node_hide_inactive(self):
     write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
     self.assert_pcs_success(
         self.command + ["node=rh-1", "--hide-inactive"],
         stdout_full=self.no_active_resources_msg,
     )
示例#35
0
class StonithWarningTest(TestCase, AssertPcsMixin):
    empty_cib = rc("cib-empty.xml")
    corosync_conf = rc("corosync.conf")

    def setUp(self):
        self.temp_cib = get_tmp_file("tier0_statust_stonith_warning")
        write_file_to_tmpfile(self.empty_cib, self.temp_cib)
        self.pcs_runner = PcsRunner(self.temp_cib.name)

    def tearDown(self):
        self.temp_cib.close()

    def fixture_stonith_action(self):
        self.assert_pcs_success(
            ("stonith create Sa fence_apc ip=i username=u action=reboot "
             "--force").split(),
            "Warning: stonith option 'action' is deprecated and should not be "
            "used, use 'pcmk_off_action', 'pcmk_reboot_action' instead\n",
        )

    def fixture_stonith_cycle(self):
        self.assert_pcs_success(
            "stonith create Sc fence_ipmilan method=cycle".split())

    def fixture_resource(self):
        self.assert_pcs_success(
            ("resource create dummy ocf:pacemaker:Dummy action=reboot "
             "method=cycle --force").split(),
            "Warning: invalid resource options: 'action', 'method', allowed "
            "options are: 'envfile', 'fail_start_on', 'fake', 'op_sleep', "
            "'passwd', 'state', 'trace_file', 'trace_ra'\n",
        )

    def test_warning_stonith_action(self):
        self.fixture_stonith_action()
        self.fixture_resource()
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'

                    Cluster Summary:
                """),
            )
        else:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'

                    Stack: unknown
                    Current DC: NONE
                """),
            )

    def test_warning_stonith_method_cycle(self):
        self.fixture_stonith_cycle()
        self.fixture_resource()
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Cluster Summary:
                """),
            )
        else:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Stack: unknown
                    Current DC: NONE
                """),
            )

    def test_stonith_warnings(self):
        self.fixture_stonith_action()
        self.fixture_stonith_cycle()
        self.fixture_resource()
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Cluster Summary:
                """),
            )
        else:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Stack: unknown
                    Current DC: NONE
                """),
            )

    def test_warn_when_no_stonith(self):
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    No stonith devices and stonith-enabled is not false

                    Cluster Summary:
                """),
            )
        else:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    No stonith devices and stonith-enabled is not false

                    Stack: unknown
                    Current DC: NONE
                """),
            )

    def test_no_stonith_warning_when_stonith_in_group(self):
        self.assert_pcs_success(
            "stonith create S fence_xvm --group G".split(),
            ("Deprecation Warning: Option to group stonith resource is "
             "deprecated and will be removed in a future release.\n"),
        )
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        self.pcs_runner.mock_settings = get_mock_settings(
            "crm_resource_binary")
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99
                    Cluster Summary:
                """),
            )
        else:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99

                    Stack: unknown
                    Current DC: NONE
                """),
            )

    def test_disabled_stonith_does_not_care_about_missing_devices(self):
        self.assert_pcs_success("property set stonith-enabled=false".split())
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99
                    Cluster Summary:
                """),
            )
        else:
            self.assert_pcs_success(
                ["status"],
                stdout_start=dedent("""\
                    Cluster name: test99
                    Stack: unknown
                    Current DC: NONE
                """),
            )
示例#36
0
                forbidden_characters=None,
            ),
        ])

    def test_wait(self):
        self.env_assist.assert_raise_library_error(
            lambda: node_add_guest(self.env_assist.get_env(), wait=1),
            [
                fixture.error(reports.codes.WAIT_FOR_IDLE_NOT_LIVE_CLUSTER, ),
            ],
            expected_in_processor=False,
        )


@mock.patch.object(settings, "pacemaker_api_result_schema",
                   rc("pcmk_api_rng/api-result.rng"))
class WithWait(TestCase):
    def setUp(self):
        self.wait = 1
        self.env_assist, self.config = get_env_tools(self)
        (self.config.env.set_known_hosts_dests(
            KNOWN_HOSTS_DESTS).local.load_cib().corosync_conf.load(
                node_name_list=[NODE_1, NODE_2]).http.host.check_auth(
                    communication_list=[
                        dict(label=NODE_NAME, dest_list=NODE_DEST_LIST)
                    ], ).local.get_host_info(
                        NODE_NAME,
                        NODE_DEST_LIST).local.push_existing_authkey_to_remote(
                            NODE_NAME,
                            NODE_DEST_LIST).local.run_pacemaker_remote(
                                NODE_NAME,
示例#37
0
 def fixture_status_xml(self, nodes, resources):
     xml_man = XmlManipulation.from_file(rc("crm_mon.minimal.xml"))
     doc = xml_man.tree.getroottree()
     doc.find("/summary/nodes_configured").set("number", str(nodes))
     doc.find("/summary/resources_configured").set("number", str(resources))
     return str(XmlManipulation(doc))
示例#38
0
import shutil
import unittest

from pcs_test.tools.misc import (
    get_test_resource as rc,
    skip_unless_pacemaker_version,
    outdent,
    ParametrizedTestMetaClass,
)
from pcs_test.tools.assertions import AssertPcsMixin
from pcs_test.tools.pcs_runner import PcsRunner


old_cib = rc("cib-empty-2.0.xml")
empty_cib = rc("cib-empty-2.5.xml")
temp_cib = rc("temp-cib.xml")

skip_unless_alerts_supported = skip_unless_pacemaker_version(
    (1, 1, 15),
    "alerts"
)

class PcsAlertTest(unittest.TestCase, AssertPcsMixin):
    def setUp(self):
        shutil.copy(empty_cib, temp_cib)
        self.pcs_runner = PcsRunner(temp_cib)


@skip_unless_alerts_supported
class AlertCibUpgradeTest(unittest.TestCase, AssertPcsMixin):
    def setUp(self):
示例#39
0
 def setUp(self):
     self.temp_cib = get_tmp_file("tier1_status_xml_status")
     write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
     self.pcs_runner = PcsRunner(self.temp_cib.name)
示例#40
0
from textwrap import dedent
from unittest import TestCase

from lxml import etree

from pcs_test.tools.cib import get_assert_pcs_effect_mixin
from pcs_test.tools.misc import get_test_resource as rc
from pcs_test.tools.misc import (
    get_tmp_file,
    outdent,
    write_file_to_tmpfile,
)
from pcs_test.tools.pcs_runner import PcsRunner

empty_cib = rc("cib-empty.xml")
tags_cib = rc("cib-tags.xml")


class TestTagMixin(
        get_assert_pcs_effect_mixin(lambda cib: etree.tostring(
            # pylint:disable=undefined-variable
            etree.parse(cib).findall(".//tags")[0]))):
    def setUp(self):
        # pylint: disable=invalid-name
        self.temp_cib = get_tmp_file("tier1_tag")
        write_file_to_tmpfile(tags_cib, self.temp_cib)
        self.pcs_runner = PcsRunner(self.temp_cib.name)

    def tearDown(self):
        # pylint: disable=invalid-name
        self.temp_cib.close()
示例#41
0
 def test_success(self):
     path = rc("corosync.conf")
     settings.corosync_conf_file = path
     with open(path) as a_file:
         self.assertEqual(lib.get_local_corosync_conf(), a_file.read())
示例#42
0
import os
from unittest import TestCase

from pcs_test.tools.misc import get_test_resource as rc

from pcs.daemon.ssl import PcsdSSL, CertKeyPair, SSLCertKeyException

SERVER_NAME = "pcsd-daemon"
SSL_OPTIONS = 0
SSL_CIPHERS = "DEFAULT:!RC4"
CERT = rc("daemon.cert")
KEY = rc("daemon.key")

def remove_ssl_files():
    if os.path.exists(CERT):
        os.remove(CERT)
    if os.path.exists(KEY):
        os.remove(KEY)

def damage_ssl_files():
    with open(CERT, "w") as cert:
        cert.write("bad content")
    with open(KEY, "w") as key:
        key.write("bad content")

# various versions of OpenSSL / PyOpenSSL emit different messages
DAMAGED_SSL_FILES_ERRORS_1 = (
    f"Invalid SSL certificate '{CERT}':"
        " 'PEM routines:PEM_read_bio:no start line'"
    ,
    f"Invalid SSL key '{KEY}': 'PEM routines:PEM_read_bio:no start line'"
示例#43
0
from textwrap import dedent
from unittest import mock, skipUnless, TestCase

from pcs.cli.booth import command as booth_cmd

from pcs_test.tools.assertions import AssertPcsMixin
from pcs_test.tools.misc import (
    get_test_resource as rc,
    get_tmp_dir,
    get_tmp_file,
    outdent,
    write_file_to_tmpfile,
)
from pcs_test.tools.pcs_runner import PcsRunner

EMPTY_CIB = rc("cib-empty.xml")

BOOTH_RESOURCE_AGENT_INSTALLED = os.path.exists(
    "/usr/lib/ocf/resource.d/pacemaker/booth-site")
need_booth_resource_agent = skipUnless(
    BOOTH_RESOURCE_AGENT_INSTALLED,
    "test requires resource agent ocf:pacemaker:booth-site"
    " which is not installed",
)


class BoothMixinNoFiles(AssertPcsMixin):
    def setUp(self):
        # pylint cannot possibly know this is being mixed into TestCase classes
        # pylint: disable=invalid-name
        self.pcs_runner = PcsRunner(None)
示例#44
0
 def setUp(self):
     self.uid_gid_dir = rc("uid_gid.d")
     if not os.path.exists(self.uid_gid_dir):
         os.mkdir(self.uid_gid_dir)
示例#45
0
 def setUp(self):
     super().setUp()
     self.pcs_runner.mock_settings = {
         "corosync_conf_file": rc("corosync.conf")
     }
示例#46
0
class WaitMixin(FixturesMixin, SetUpMixin):
    initial_resources = "<resources/>"
    bundle_id = None
    image = None

    @property
    def fixture_status_running(self):
        return """
            <resources>
                <bundle id="{bundle_id}" managed="true">
                    <replica id="0">
                        <resource
                            id="{bundle_id}-docker-0"
                            managed="true"
                            role="Started"
                        >
                            <node name="node1" id="1" cached="false"/>
                        </resource>
                    </replica>
                    <replica id="1">
                        <resource
                            id="{bundle_id}-docker-1"
                            managed="true"
                            role="Started"
                        >
                            <node name="node2" id="2" cached="false"/>
                        </resource>
                    </replica>
                </bundle>
            </resources>
        """.format(bundle_id=self.bundle_id)

    @property
    def fixture_status_not_running(self):
        return """
            <resources>
                <bundle id="{bundle_id}" managed="true">
                    <replica id="0">
                        <resource
                            id="{bundle_id}-docker-0"
                            managed="true"
                            role="Stopped"
                        />
                    </replica>
                    <replica id="1">
                        <resource
                            id="{bundle_id}-docker-1"
                            managed="true"
                            role="Stopped"
                        />
                    </replica>
                </bundle>
            </resources>
        """.format(bundle_id=self.bundle_id)

    @property
    def fixture_resources_bundle_simple_disabled(self):
        return """
            <resources>
                <bundle id="{bundle_id}">
                    <meta_attributes id="{bundle_id}-meta_attributes">
                        <nvpair id="{bundle_id}-meta_attributes-target-role"
                            name="target-role" value="Stopped" />
                    </meta_attributes>
                    <docker image="{image}" />
                </bundle>
            </resources>
        """.format(bundle_id=self.bundle_id, image=self.image)

    def test_wait_fail(self):
        wait_error_message = dedent("""\
            Pending actions:
                    Action 12: {bundle_id}-node2-stop on node2
            Error performing operation: Timer expired
            """.format(bundle_id=self.bundle_id)).strip()
        self.config.env.push_cib(
            resources=self.fixture_resources_bundle_simple,
            wait=TIMEOUT,
            exception=LibraryError(
                reports.item.ReportItem.error(
                    reports.messages.WaitForIdleTimedOut(wait_error_message))),
        )
        self.env_assist.assert_raise_library_error(
            lambda: self.run_bundle_cmd(wait=TIMEOUT),
            [fixture.report_wait_for_idle_timed_out(wait_error_message)],
            expected_in_processor=False,
        )

    @mock.patch.object(
        settings,
        "pacemaker_api_result_schema",
        rc("pcmk_api_rng/api-result.rng"),
    )
    def test_wait_ok_run_ok(self):
        (self.config.env.push_cib(
            resources=self.fixture_resources_bundle_simple,
            wait=TIMEOUT).runner.pcmk.load_state(
                resources=self.fixture_status_running))
        self.run_bundle_cmd(wait=TIMEOUT)
        self.env_assist.assert_reports([
            fixture.report_resource_running(self.bundle_id,
                                            {"Started": ["node1", "node2"]}),
        ])

    @mock.patch.object(
        settings,
        "pacemaker_api_result_schema",
        rc("pcmk_api_rng/api-result.rng"),
    )
    def test_wait_ok_run_fail(self):
        (self.config.env.push_cib(
            resources=self.fixture_resources_bundle_simple,
            wait=TIMEOUT).runner.pcmk.load_state(
                resources=self.fixture_status_not_running))
        self.env_assist.assert_raise_library_error(
            lambda: self.run_bundle_cmd(wait=TIMEOUT))
        self.env_assist.assert_reports([
            fixture.report_resource_not_running(self.bundle_id,
                                                severities.ERROR),
        ])

    @mock.patch.object(
        settings,
        "pacemaker_api_result_schema",
        rc("pcmk_api_rng/api-result.rng"),
    )
    def test_disabled_wait_ok_run_ok(self):
        (self.config.env.push_cib(
            resources=self.fixture_resources_bundle_simple_disabled,
            wait=TIMEOUT,
        ).runner.pcmk.load_state(resources=self.fixture_status_not_running))
        self.run_bundle_cmd(ensure_disabled=True, wait=TIMEOUT)
        self.env_assist.assert_reports([(
            severities.INFO,
            report_codes.RESOURCE_DOES_NOT_RUN,
            {
                "resource_id": self.bundle_id,
            },
            None,
        )])

    @mock.patch.object(
        settings,
        "pacemaker_api_result_schema",
        rc("pcmk_api_rng/api-result.rng"),
    )
    def test_disabled_wait_ok_run_fail(self):
        (self.config.env.push_cib(
            resources=self.fixture_resources_bundle_simple_disabled,
            wait=TIMEOUT,
        ).runner.pcmk.load_state(resources=self.fixture_status_running))
        self.env_assist.assert_raise_library_error(
            lambda: self.run_bundle_cmd(ensure_disabled=True, wait=TIMEOUT))
        self.env_assist.assert_reports([
            fixture.report_resource_running(
                self.bundle_id,
                {"Started": ["node1", "node2"]},
                severities.ERROR,
            )
        ])
示例#47
0
    def load_state(self,
                   name="runner.pcmk.load_state",
                   filename="crm_mon.minimal.xml",
                   resources=None,
                   raw_resources=None,
                   nodes=None,
                   stdout="",
                   stderr="",
                   returncode=0):
        """
        Create call for loading pacemaker state.

        string name -- key of the call
        string filename -- points to file with the status in the content
        string resources -- xml - resources section, will be put to state
        string nodes -- iterable of node dicts
        string stdout -- crm_mon's stdout
        string stderr -- crm_mon's stderr
        int returncode -- crm_mon's returncode
        """
        if ((resources or raw_resources is not None or nodes)
                and (stdout or stderr or returncode)):
            raise AssertionError(
                "Cannot specify resources or nodes when stdout, stderr or "
                "returncode is specified")
        if resources and raw_resources is not None:
            raise AssertionError(
                "Cannot use 'resources' and 'raw_resources' together")

        if (stdout or stderr or returncode):
            self.__calls.place(
                name,
                RunnerCall("crm_mon --one-shot --as-xml --inactive",
                           stdout=stdout,
                           stderr=stderr,
                           returncode=returncode))
            return

        state = etree.fromstring(open(rc(filename)).read())

        if raw_resources is not None:
            resources = fixture_state_resources_xml(**raw_resources)
        if resources:
            state.append(complete_state_resources(etree.fromstring(resources)))

        if nodes:
            nodes_element = state.find("./nodes")
            for node in nodes:
                nodes_element.append(
                    etree.fromstring(fixture_state_node_xml(**node)))

        # set correct number of nodes and resources into the status
        resources_count = len(
            state.xpath(" | ".join([
                "./resources/bundle",
                "./resources/clone",
                "./resources/group",
                "./resources/resource",
            ])))
        nodes_count = len(state.findall("./nodes/node"))
        state.find("./summary/nodes_configured").set("number",
                                                     str(nodes_count))
        state.find("./summary/resources_configured").set(
            "number", str(resources_count))

        self.__calls.place(
            name,
            RunnerCall(
                "crm_mon --one-shot --as-xml --inactive",
                stdout=etree_to_str(state),
            ))
示例#48
0
class StonithWarningTest(TestCase, AssertPcsMixin):
    empty_cib = rc("cib-empty.xml")
    temp_cib = rc("temp-cib.xml")
    corosync_conf = rc("corosync.conf")

    def setUp(self):
        shutil.copy(self.empty_cib, self.temp_cib)
        self.pcs_runner = PcsRunner(self.temp_cib)

    def fixture_stonith_action(self):
        self.assert_pcs_success(
            "stonith create Sa fence_apc ip=i username=u action=reboot --force",
            "Warning: stonith option 'action' is deprecated and should not be"
            " used, use pcmk_off_action, pcmk_reboot_action instead\n")

    def fixture_stonith_cycle(self):
        self.assert_pcs_success("stonith create Sc fence_ipmilan method=cycle")

    def fixture_resource(self):
        self.assert_pcs_success(
            "resource create dummy ocf:pacemaker:Dummy action=reboot "
            "method=cycle --force",
            "Warning: invalid resource options: 'action', 'method', allowed "
            "options are: 'envfile', 'fail_start_on', 'fake', 'op_sleep', "
            "'passwd', 'state', 'trace_file', 'trace_ra'\n")

    def test_warning_stonith_action(self):
        self.fixture_stonith_action()
        self.fixture_resource()
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'

                    Cluster Summary:
                """))
        else:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'

                    Stack: unknown
                    Current DC: NONE
                """))

    def test_warning_stonith_method_cycle(self):
        self.fixture_stonith_cycle()
        self.fixture_resource()
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Cluster Summary:
                """))
        else:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Stack: unknown
                    Current DC: NONE
                """))

    def test_stonith_warnings(self):
        self.fixture_stonith_action()
        self.fixture_stonith_cycle()
        self.fixture_resource()
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Cluster Summary:
                """))
        else:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    Following stonith devices have the 'action' option set, it is recommended to set 'pcmk_off_action', 'pcmk_reboot_action' instead: 'Sa'
                    Following stonith devices have the 'method' option set to 'cycle' which is potentially dangerous, please consider using 'onoff': 'Sc'

                    Stack: unknown
                    Current DC: NONE
                """))

    def test_warn_when_no_stonith(self):
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    No stonith devices and stonith-enabled is not false

                    Cluster Summary:
                """))
        else:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99

                    WARNINGS:
                    No stonith devices and stonith-enabled is not false

                    Stack: unknown
                    Current DC: NONE
                """))

    def test_disabled_stonith_does_not_care_about_missing_devices(self):
        self.assert_pcs_success("property set stonith-enabled=false")
        self.pcs_runner.corosync_conf_opt = self.corosync_conf
        if PCMK_2_0_3_PLUS:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99
                    Cluster Summary:
                """))
        else:
            self.assert_pcs_success("status",
                                    stdout_start=dedent("""\
                    Cluster name: test99
                    Stack: unknown
                    Current DC: NONE
                """))
示例#49
0
from lxml import etree

from pcs_test.tools.assertions import AssertPcsMixin
from pcs_test.tools.cib import get_assert_pcs_effect_mixin
from pcs_test.tools.misc import (
    get_test_resource as rc,
    get_tmp_file,
    skip_unless_crm_rule,
    skip_unless_pacemaker_supports_rsc_and_op_rules,
    write_data_to_tmpfile,
    write_file_to_tmpfile,
)
from pcs_test.tools.pcs_runner import PcsRunner
from pcs_test.tools.xml import XmlManipulation

empty_cib = rc("cib-empty-2.0.xml")
empty_cib_rules = rc("cib-empty-3.4.xml")


class TestDefaultsMixin:
    def setUp(self):
        # pylint: disable=invalid-name
        self.temp_cib = get_tmp_file("tier1_cib_options")
        self.pcs_runner = PcsRunner(self.temp_cib.name)

    def tearDown(self):
        # pylint: disable=invalid-name
        self.temp_cib.close()


class DefaultsConfigMixin(TestDefaultsMixin, AssertPcsMixin):
示例#50
0
文件: test_ssl.py 项目: wuyeliang/pcs
import os
from unittest import TestCase

from pcs_test.tools.misc import get_test_resource as rc

from pcs.daemon.ssl import PcsdSSL, CertKeyPair, SSLCertKeyException

SERVER_NAME = "pcsd-daemon"
SSL_OPTIONS = 0
SSL_CIPHERS = "DEFAULT:!RC4"
CERT = rc("daemon.cert")
KEY = rc("daemon.key")


def remove_ssl_files():
    if os.path.exists(CERT):
        os.remove(CERT)
    if os.path.exists(KEY):
        os.remove(KEY)


def damage_ssl_files():
    with open(CERT, "w") as cert:
        cert.write("bad content")
    with open(KEY, "w") as key:
        key.write("bad content")


# various versions of OpenSSL / PyOpenSSL emit different messages
DAMAGED_SSL_FILES_ERRORS_1 = (
    f"Invalid SSL certificate '{CERT}':"
示例#51
0
class ResourceStonithStatusBase(AssertPcsMixin):
    # pylint: disable=too-many-public-methods
    command = None
    no_resources_msg = None
    all_resources_output = None
    active_resources_output = None
    active_resources_output_node = None
    node_output = None
    cib_file = rc("cib-tags.xml")
    corosync_conf = rc("corosync.conf")
    no_active_resources_msg = "No active resources\n"

    def setUp(self):
        # pylint: disable=invalid-name
        self.temp_cib = get_tmp_file("tier1_status_resource_stonith_status")
        write_file_to_tmpfile(self.cib_file, self.temp_cib)
        self.pcs_runner = PcsRunner(self.temp_cib.name)

    def tearDown(self):
        # pylint: disable=invalid-name
        self.temp_cib.close()

    def test_not_resource_or_tag_id(self):
        self.assert_pcs_fail(
            self.command + ["cx1"],
            stdout_full="Error: resource or tag id 'cx1' not found\n",
        )

    def test_nonexistent_id(self):
        self.assert_pcs_fail(
            self.command + ["nonexistent"],
            stdout_full="Error: resource or tag id 'nonexistent' not found\n",
        )

    def test_missing_node_value(self):
        self.assert_pcs_fail(
            self.command + ["node="],
            stdout_full="Error: missing value of 'node' option\n",
        )

    def test_missing_node_key(self):
        self.assert_pcs_fail(
            self.command + ["=node"],
            stdout_full="Error: missing key in '=node' option\n",
        )

    def test_more_node_options(self):
        self.assert_pcs_fail(
            self.command + ["node=rh-1", "node=rh-2"],
            stdout_full=(
                "Error: duplicate option 'node' with different values 'rh-1' "
                "and 'rh-2'\n"),
        )

    def test_more_no_node_option(self):
        self.assert_pcs_fail(
            self.command + ["r1", "r2"],
            stdout_full="Error: missing value of 'r2' option\n",
        )

    def test_resource_id(self):
        if is_pacemaker_21_without_20_compatibility():
            stdout_full = "  * x1	(ocf:pacemaker:Dummy):	 Started rh-1\n"
        else:
            stdout_full = "  * x1	(ocf::pacemaker:Dummy):	 Started rh-1\n"
        self.assert_pcs_success(
            self.command + ["x1"],
            stdout_full=stdout_full,
        )

    def test_resource_id_hide_inactive(self):
        self.assert_pcs_success(
            self.command + ["x2", "--hide-inactive"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_resource_id_with_node_hide_inactive(self):
        self.assert_pcs_success(
            self.command + ["x2", "node=rh-1", "--hide-inactive"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_resource_id_with_node_started(self):
        if is_pacemaker_21_without_20_compatibility():
            stdout_full = "  * x1	(ocf:pacemaker:Dummy):	 Started rh-1\n"
        else:
            stdout_full = "  * x1	(ocf::pacemaker:Dummy):	 Started rh-1\n"
        self.assert_pcs_success(
            self.command + ["x1", "node=rh-1"],
            stdout_full=stdout_full,
        )

    def test_resource_id_with_node_stopped(self):
        if is_pacemaker_21_without_20_compatibility():
            stdout_full = "  * x2	(ocf:pacemaker:Dummy):	 Stopped\n"
        else:
            stdout_full = "  * x2	(ocf::pacemaker:Dummy):	 Stopped\n"
        self.assert_pcs_success(
            self.command + ["x2", "node=rh-1"],
            stdout_full=stdout_full,
        )

    def test_resource_id_with_node_without_status(self):
        self.assert_pcs_success(
            self.command + ["x1", "node=rh-2"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_resource_id_with_node_changed_arg_order(self):
        if is_pacemaker_21_without_20_compatibility():
            stdout_full = "  * x1	(ocf:pacemaker:Dummy):	 Started rh-1\n"
        else:
            stdout_full = "  * x1	(ocf::pacemaker:Dummy):	 Started rh-1\n"
        self.assert_pcs_success(
            self.command + ["node=rh-1", "x1"],
            stdout_full=stdout_full,
        )

    def test_stonith_id(self):
        self.assert_pcs_success(
            self.command + ["fence-rh-1"],
            stdout_full="  * fence-rh-1	(stonith:fence_xvm):	 Started rh-1\n",
        )

    def test_stonith_id_hide_inactive(self):
        self.assert_pcs_success(
            self.command + ["fence-rh-2", "--hide-inactive"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_stonith_id_with_node_hide_inactive(self):
        self.assert_pcs_success(
            self.command + ["fence-rh-2", "node=rh-2", "--hide-inactive"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_stonith_id_with_node_started(self):
        self.assert_pcs_success(
            self.command + ["fence-rh-1", "node=rh-1"],
            stdout_full="  * fence-rh-1	(stonith:fence_xvm):	 Started rh-1\n",
        )

    def test_stonith_id_with_node_stopped(self):
        self.assert_pcs_success(
            self.command + ["fence-rh-2", "node=rh-2"],
            stdout_full="  * fence-rh-2	(stonith:fence_xvm):	 Stopped\n",
        )

    def test_stonith_id_with_node_without_status(self):
        self.assert_pcs_success(
            self.command + ["fence-rh-1", "node=rh-2"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_tag_id(self):
        if is_pacemaker_21_without_20_compatibility():
            stdout_full = outdent("""\
                  * fence-rh-1	(stonith:fence_xvm):	 Started rh-1
                  * fence-rh-2	(stonith:fence_xvm):	 Stopped
                  * x3	(ocf:pacemaker:Dummy):	 Stopped
                  * y1	(ocf:pacemaker:Dummy):	 Stopped
                """)
        else:
            stdout_full = outdent("""\
                  * fence-rh-1	(stonith:fence_xvm):	 Started rh-1
                  * fence-rh-2	(stonith:fence_xvm):	 Stopped
                  * x3	(ocf::pacemaker:Dummy):	 Stopped
                  * y1	(ocf::pacemaker:Dummy):	 Stopped
                """)
        self.assert_pcs_success(
            self.command + ["tag-mixed-stonith-devices-and-resources"],
            stdout_full=stdout_full,
        )

    def test_tag_id_hide_inactive(self):
        self.assert_pcs_success(
            self.command +
            ["tag-mixed-stonith-devices-and-resources", "--hide-inactive"],
            stdout_full=outdent("""\
                  * fence-rh-1	(stonith:fence_xvm):	 Started rh-1
                """),
        )

    def test_tag_id_with_node(self):
        if is_pacemaker_21_without_20_compatibility():
            stdout_full = outdent("""\
                  * fence-rh-2	(stonith:fence_xvm):	 Stopped
                  * x3	(ocf:pacemaker:Dummy):	 Stopped
                  * y1	(ocf:pacemaker:Dummy):	 Stopped
                """)
        else:
            stdout_full = outdent("""\
                  * fence-rh-2	(stonith:fence_xvm):	 Stopped
                  * x3	(ocf::pacemaker:Dummy):	 Stopped
                  * y1	(ocf::pacemaker:Dummy):	 Stopped
                """)
        self.assert_pcs_success(
            self.command +
            ["tag-mixed-stonith-devices-and-resources", "node=rh-2"],
            stdout_full=stdout_full,
        )

    def test_tag_id_with_node_hide_inactive(self):
        self.assert_pcs_success(
            self.command + [
                "tag-mixed-stonith-devices-and-resources",
                "node=rh-1",
                "--hide-inactive",
            ],
            stdout_full=outdent("""\
                  * fence-rh-1	(stonith:fence_xvm):	 Started rh-1
                """),
        )

    def test_resource_status_without_id(self):
        self.assert_pcs_success(self.command,
                                stdout_full=self.all_resources_output)

    def test_resource_status_without_id_hide_inactive(self):
        self.assert_pcs_success(
            self.command + ["--hide-inactive"],
            stdout_full=self.active_resources_output,
        )

    def test_resource_status_without_id_with_node(self):
        self.assert_pcs_success(self.command + ["node=rh-1"],
                                stdout_full=self.node_output)

    def test_resource_status_without_id_with_node_hide_inactive(self):
        self.assert_pcs_success(
            self.command + ["node=rh-1", "--hide-inactive"],
            stdout_full=self.active_resources_output_node,
        )

    def test_resource_status_without_id_default_command(self):
        self.assert_pcs_success(self.command[:-1],
                                stdout_full=self.all_resources_output)

    def test_resource_status_without_id_default_command_hide_inactive(self):
        self.assert_pcs_success(
            self.command[:-1] + ["--hide-inactive"],
            stdout_full=self.active_resources_output,
        )

    def test_status_no_resources(self):
        write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
        self.assert_pcs_success(self.command,
                                stdout_full=self.no_resources_msg)

    def test_status_no_resources_hide_inactive(self):
        write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
        self.assert_pcs_success(
            self.command + ["--hide-inactive"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_status_no_resources_with_node(self):
        write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
        self.assert_pcs_success(
            self.command + ["node=rh-1"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_status_no_resources_with_node_hide_inactive(self):
        write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
        self.assert_pcs_success(
            self.command + ["node=rh-1", "--hide-inactive"],
            stdout_full=self.no_active_resources_msg,
        )

    def test_status_no_resources_default_command(self):
        write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
        self.assert_pcs_success(self.command[:-1],
                                stdout_full=self.no_resources_msg)

    def test_status_no_resources_default_command_hide_inactive(self):
        write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
        self.assert_pcs_success(
            self.command[:-1] + ["--hide-inactive"],
            stdout_full=self.no_active_resources_msg,
        )
示例#52
0
class Unclone(
    TestCase,
    get_assert_pcs_effect_mixin(
        lambda cib: etree.tostring(
            # pylint:disable=undefined-variable
            etree.parse(cib).findall(".//resources")[0]
        )
    ),
):
    empty_cib = rc("cib-empty.xml")

    def assert_tags_xml(self, expected_xml):
        self.assert_resources_xml_in_cib(
            expected_xml,
            get_cib_part_func=lambda cib: etree.tostring(
                etree.parse(cib).findall(".//tags")[0],
            ),
        )

    def assert_constraint_xml(self, expected_xml):
        self.assert_resources_xml_in_cib(
            expected_xml,
            get_cib_part_func=lambda cib: etree.tostring(
                etree.parse(cib).findall(".//constraints")[0],
            ),
        )

    def setUp(self):
        # pylint: disable=invalid-name
        self.temp_cib = get_tmp_file("tier1_cib_resource_group_ungroup")
        self.pcs_runner = PcsRunner(self.temp_cib.name)
        xml_manip = XmlManipulation.from_file(self.empty_cib)
        xml_manip.append_to_first_tag_name(
            "resources",
            FIXTURE_CLONE,
            FIXTURE_DUMMY,
        )
        xml_manip.append_to_first_tag_name(
            "configuration",
            FIXTURE_TAGS_CONFIG_XML,
        )
        xml_manip.append_to_first_tag_name(
            "constraints",
            """
            <rsc_location id="location-C-clone-rh7-1-INFINITY" node="rh7-1"
                rsc="C-clone" score="INFINITY"/>
            """,
            """
            <rsc_location id="location-TagCloneOnly-rh7-1-INFINITY"
                node="rh7-1" rsc="TagCloneOnly" score="INFINITY"/>
            """,
        )
        write_data_to_tmpfile(str(xml_manip), self.temp_cib)

    def tearDown(self):
        # pylint: disable=invalid-name
        self.temp_cib.close()

    def test_nonexistent_clone(self):
        self.assert_pcs_fail(
            "resource unclone NonExistentClone".split(),
            "Error: could not find resource: NonExistentClone\n",
        )
        self.assert_resources_xml_in_cib(FIXTURE_CLONE_AND_RESOURCE)
        self.assert_tags_xml(FIXTURE_TAGS_CONFIG_XML)
        self.assert_constraint_xml(FIXTURE_CONSTRAINTS_CONFIG_XML)

    def test_not_clone_resource(self):
        self.assert_pcs_fail(
            "resource unclone Dummy".split(),
            "Error: 'Dummy' is not a clone resource\n",
        )
        self.assert_resources_xml_in_cib(FIXTURE_CLONE_AND_RESOURCE)
        self.assert_tags_xml(FIXTURE_TAGS_CONFIG_XML)
        self.assert_constraint_xml(FIXTURE_CONSTRAINTS_CONFIG_XML)

    def test_unclone_clone_id(self):
        self.assert_effect(
            "resource unclone C-clone".split(), FIXTURE_RESOURCES
        )
        self.assert_tags_xml(FIXTURE_TAGS_RESULT_XML)
        self.assert_constraint_xml("<constraints/>")

    def test_unclone_resoruce_id(self):
        self.assert_effect("resource unclone C".split(), FIXTURE_RESOURCES)
        self.assert_tags_xml(FIXTURE_TAGS_RESULT_XML)
        self.assert_constraint_xml("<constraints/>")
示例#53
0
 def test_status_no_resources_default_command(self):
     write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
     self.assert_pcs_success(self.command[:-1],
                             stdout_full=self.no_resources_msg)
示例#54
0
class Clone(
    TestCase,
    get_assert_pcs_effect_mixin(
        lambda cib: etree.tostring(
            # pylint:disable=undefined-variable
            etree.parse(cib).findall(".//resources")[0]
        )
    ),
):
    # pylint: disable=too-many-public-methods
    empty_cib = rc("cib-empty.xml")

    def setUp(self):
        self.temp_cib = get_tmp_file("tier1_cib_resource_clone_unclone_clone")
        self.pcs_runner = PcsRunner(self.temp_cib.name)
        self.set_cib_file(FIXTURE_PRIMITIVE_FOR_CLONE)
        self.stonith_deprecation_warning = (
            "Deprecation Warning: Ability of this command to accept stonith "
            "resources is deprecated and will be removed in a future release.\n"
        )

    def tearDown(self):
        self.temp_cib.close()

    def set_cib_file(self, *xml_string_list):
        xml_manip = XmlManipulation.from_file(self.empty_cib)
        xml_manip.append_to_first_tag_name("resources", *xml_string_list)
        write_data_to_tmpfile(str(xml_manip), self.temp_cib)

    def test_clone(self):
        self.assert_effect(
            "resource clone C".split(),
            fixture_resources_xml(fixture_clone("C-clone", "C")),
        )

    def test_clone_custom_id(self):
        self.assert_effect(
            "resource clone C CustomCloneId".split(),
            fixture_resources_xml(fixture_clone("CustomCloneId", "C")),
        )

    def test_clone_id_increment(self):
        self.set_cib_file(
            fixture_clone("C-clone", "Dummy"),
            FIXTURE_PRIMITIVE_FOR_CLONE,
        )
        self.assert_effect(
            "resource clone C".split(),
            fixture_resources_xml(
                fixture_clone("C-clone", "Dummy"),
                fixture_clone("C-clone-1", "C"),
            ),
        )

    def test_clone_id_is_stonith(self):
        self.set_cib_file(FIXTURE_STONITH_FOR_CLONE)
        self.assert_pcs_fail(
            "resource clone fence-device".split(),
            self.stonith_deprecation_warning + fixture_clone_stonith_msg(),
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_STONITH_FOR_CLONE)
        )

    def test_clone_id_is_stonith_forced(self):
        self.set_cib_file(FIXTURE_STONITH_FOR_CLONE)
        self.assert_effect(
            "resource clone fence-device --force".split(),
            fixture_resources_xml(FIXTURE_STONITH_CLONE),
            output=self.stonith_deprecation_warning
            + fixture_clone_stonith_msg(forced=True),
        )

    def test_clone_group_with_stonith(self):
        self.set_cib_file(FIXTURE_GROUP_WITH_STONITH)
        self.assert_effect(
            "resource clone Group".split(),
            fixture_resources_xml(FIXTURE_CLONED_GROUP_WITH_STONITH),
        )

    def test_clone_group_with_stonith_forced(self):
        self.set_cib_file(FIXTURE_GROUP_WITH_STONITH)
        self.assert_effect(
            "resource clone Group --force".split(),
            fixture_resources_xml(FIXTURE_CLONED_GROUP_WITH_STONITH),
        )

    def test_promotable_clone(self):
        self.assert_effect(
            "resource promotable C".split(),
            fixture_resources_xml(
                fixture_clone("C-clone", "C", promotable=True)
            ),
        )

    def test_promotable_clone_custom_id(self):
        self.assert_effect(
            "resource promotable C CustomPromotableId".split(),
            fixture_resources_xml(
                fixture_clone("CustomPromotableId", "C", promotable=True)
            ),
        )

    def test_promotable_clone_id_is_stonith(self):
        self.set_cib_file(FIXTURE_STONITH_FOR_CLONE)
        self.assert_pcs_fail(
            "resource promotable fence-device".split(),
            self.stonith_deprecation_warning + fixture_clone_stonith_msg(),
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_STONITH_FOR_CLONE)
        )

    def test_promotable_clone_id_is_stonith_forced(self):
        self.set_cib_file(FIXTURE_STONITH_FOR_CLONE)
        self.assert_effect(
            "resource promotable fence-device --force".split(),
            fixture_resources_xml(FIXTURE_STONITH_PROMOTABLE),
            output=self.stonith_deprecation_warning
            + fixture_clone_stonith_msg(forced=True),
        )

    def test_promotable_keyword_and_option(self):
        self.assert_pcs_fail(
            "resource promotable C CustomCloneId promotable=false".split(),
            (
                "Error: you cannot specify both promotable option and "
                "promotable keyword\n"
            ),
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_PRIMITIVE_FOR_CLONE)
        )

    def test_clone_with_options(self):
        self.assert_effect(
            (
                "resource clone C CustomCloneId globally-unique=true meta a=b "
                "c=d"
            ).split(),
            fixture_resources_xml(FIXTURE_CLONE_WITH_OPTIONS),
        )

    def test_group_last_member(self):
        self.set_cib_file(FIXTURE_GROUP_LAST_MEMBER)
        self.assert_effect(
            "resource clone C".split(),
            fixture_resources_xml(fixture_clone("C-clone", "C")),
        )

    def test_nonexistent_resource(self):
        self.assert_pcs_fail(
            "resource clone NonExistentClone".split(),
            "Error: unable to find group or resource: NonExistentClone\n",
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_PRIMITIVE_FOR_CLONE)
        )

    def test_invalid_clone_id(self):
        self.assert_pcs_fail(
            "resource clone C 1invalid".split(),
            "Error: invalid id '1invalid'\n",
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_PRIMITIVE_FOR_CLONE)
        )

    def test_clone_id_already_exist(self):
        self.assert_pcs_fail(
            "resource clone C C".split(),
            "Error: id 'C' already exists\n",
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_PRIMITIVE_FOR_CLONE)
        )

    def test_group_already_cloned(self):
        self.set_cib_file(FIXTURE_CLONED_GROUP)
        self.assert_pcs_fail(
            "resource clone Group".split(),
            "Error: cannot clone a group that has already been cloned\n",
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_CLONED_GROUP)
        )

    def test_already_a_clone_resource(self):
        self.set_cib_file(FIXTURE_CLONED_GROUP)
        self.assert_pcs_fail(
            "resource clone G1".split(),
            "Error: G1 is already a clone resource\n",
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_CLONED_GROUP)
        )

    def test_bundle_resource(self):
        self.set_cib_file(FIXTURE_BUNDLE_RESOURCE)
        self.assert_pcs_fail(
            "resource clone Dummy".split(),
            "Error: cannot clone bundle resource\n",
        )
        self.assert_resources_xml_in_cib(
            fixture_resources_xml(FIXTURE_BUNDLE_RESOURCE)
        )
示例#55
0
 def test_status_no_resources_default_command_hide_inactive(self):
     self.pcs_runner.corosync_conf_opt = self.corosync_conf
     write_file_to_tmpfile(rc("cib-empty.xml"), self.temp_cib)
     self.assert_pcs_success(["status", "--hide-inactive"],
                             stdout_start=self.no_resources_status)
示例#56
0
import shutil
from textwrap import dedent
from unittest import TestCase

from pcs_test.tools.assertions import AssertPcsMixin
from pcs_test.tools.misc import (
    get_test_resource as rc,
    ParametrizedTestMetaClass,
)
from pcs_test.tools.pcs_runner import PcsRunner

# pylint: disable=line-too-long

coro_conf = rc("corosync.conf")
coro_qdevice_conf = rc("corosync-3nodes-qdevice.conf")
coro_qdevice_heuristics_conf = rc("corosync-3nodes-qdevice-heuristics.conf")
temp_conf = rc("corosync.conf.tmp")


class TestBase(TestCase, AssertPcsMixin):
    def setUp(self):
        shutil.copy(coro_conf, temp_conf)
        # The tested commands work differently when non-live corosync.conf
        # (--corosync_conf) is used. In these tests it is not possible to cover
        # all the live config behavior, so we stick to using a non-live config.
        # Live behavior is tested in pcs_test.tier0.lib.commands.test_quorum.
        self.pcs_runner = PcsRunner(cib_file=None, corosync_conf_opt=temp_conf)

    @staticmethod
    def fixture_conf_qdevice():
        shutil.copy(coro_qdevice_conf, temp_conf)
示例#57
0
import logging
import os

from pcs.daemon import auth
from pcs.daemon.app import ui
from pcs_test.tier0.daemon.app import fixtures_app
from pcs_test.tools.misc import(
    create_setup_patch_mixin,
    get_test_resource as rc,
)

USER = "******"
PASSWORD = "******"
LOGIN_BODY = {"username": USER, "password": PASSWORD}
PUBLIC_DIR = rc("web_public")
PREFIX = "/ui/"
SPA_DIR = os.path.join(PUBLIC_DIR, PREFIX)
FALLBACK = os.path.join(PUBLIC_DIR, "fallback.html")
INDEX = os.path.join(SPA_DIR, "index.html")

if not os.path.exists(SPA_DIR):
    os.makedirs(SPA_DIR)

# Don't write errors to test output.
logging.getLogger("tornado.access").setLevel(logging.CRITICAL)

class AppTest(
    fixtures_app.AppUiTest, create_setup_patch_mixin(ui.app_session)
):
    def setUp(self):
        self.index_content = "<html/>"
示例#58
0
def create_wrapper():
    return ruby_pcsd.Wrapper(
        rc("/path/to/gem_home"),
        rc("/path/to/pcsd/cmdline/entry"),
    )