示例#1
0
    def setUp(self):
        super(TestSSPDiskAdapter, self).setUp()

        self.inst = powervm.TEST_INSTANCE

        self.apt = mock.Mock()
        self.host_uuid = 'host_uuid'

        self.ssp_wrap = mock.create_autospec(pvm_stg.SSP, instance=True)

        # SSP.refresh() returns itself
        self.ssp_wrap.refresh.return_value = self.ssp_wrap
        self.node1 = mock.create_autospec(pvm_clust.Node, instance=True)
        self.node2 = mock.create_autospec(pvm_clust.Node, instance=True)
        self.clust_wrap = mock.create_autospec(pvm_clust.Cluster,
                                               instance=True)
        self.clust_wrap.nodes = [self.node1, self.node2]
        self.clust_wrap.refresh.return_value = self.clust_wrap
        self.tier_wrap = mock.create_autospec(pvm_stg.Tier, instance=True)
        # Tier.refresh() returns itself
        self.tier_wrap.refresh.return_value = self.tier_wrap
        self.vio_wrap = mock.create_autospec(pvm_vios.VIOS, instance=True)

        # For _cluster
        self.mock_clust = self.useFixture(
            fixtures.MockPatch('pypowervm.wrappers.cluster.Cluster',
                               autospec=True)).mock
        self.mock_clust.get.return_value = [self.clust_wrap]

        # For _ssp
        self.mock_ssp_gbhref = self.useFixture(
            fixtures.MockPatch(
                'pypowervm.wrappers.storage.SSP.get_by_href')).mock
        self.mock_ssp_gbhref.return_value = self.ssp_wrap

        # For _tier
        self.mock_get_tier = self.useFixture(
            fixtures.MockPatch('pypowervm.tasks.storage.default_tier_for_ssp',
                               autospec=True)).mock
        self.mock_get_tier.return_value = self.tier_wrap

        # A FeedTask
        self.mock_wtsk = mock.create_autospec(pvm_tx.WrapperTask,
                                              instance=True)
        self.mock_wtsk.configure_mock(wrapper=self.vio_wrap)
        self.mock_ftsk = mock.create_autospec(pvm_tx.FeedTask, instance=True)
        self.mock_afs = self.mock_ftsk.add_functor_subtask
        self.mock_ftsk.configure_mock(
            wrapper_tasks={self.vio_wrap.uuid: self.mock_wtsk})

        self.pvm_uuid = self.useFixture(
            fixtures.MockPatch('nova.virt.powervm.vm.get_pvm_uuid')).mock

        # Return the mgmt uuid
        self.mgmt_uuid = self.useFixture(
            fixtures.MockPatch('nova.virt.powervm.mgmt.mgmt_uuid')).mock
        self.mgmt_uuid.return_value = 'mp_uuid'

        # The SSP disk adapter
        self.ssp_drv = ssp_dvr.SSPDiskAdapter(self.apt, self.host_uuid)
示例#2
0
    def init_host(self, host):
        """Initialize anything that is necessary for the driver to function.

        Includes catching up with currently running VMs on the given host.
        """
        # Build the adapter. May need to attempt the connection multiple times
        # in case the PowerVM management API service is starting.
        # TODO(efried): Implement async compute service enable/disable like
        # I73a34eb6e0ca32d03e54d12a5e066b2ed4f19a61
        self.adapter = pvm_apt.Adapter(
            pvm_apt.Session(conn_tries=60),
            helpers=[log_hlp.log_helper, vio_hlp.vios_busy_retry_helper])
        # Make sure the Virtual I/O Server(s) are available.
        pvm_par.validate_vios_ready(self.adapter)
        self.host_wrapper = pvm_ms.System.get(self.adapter)[0]

        # Do a scrub of the I/O plane to make sure the system is in good shape
        LOG.info("Clearing stale I/O connections on driver init.")
        pvm_stor.ComprehensiveScrub(self.adapter).execute()

        # Initialize the disk adapter
        # TODO(efried): Other disk adapters (localdisk), by conf selection.
        self.disk_dvr = ssp.SSPDiskAdapter(self.adapter,
                                           self.host_wrapper.uuid)
        self.image_api = image.API()

        LOG.info("The PowerVM compute driver has been initialized.")