Exemplo n.º 1
0
 def setUp(self):
     super(DiskAdapter, self).setUp()
     self.std_disk_adpt_fx = self.useFixture(
         fixtures.MockPatch('nova_powervm.virt.powervm.disk.localdisk.'
                            'LocalStorage'))
     self.std_disk_adpt = self.std_disk_adpt_fx.mock
Exemplo n.º 2
0
 def setUp(self):
     super(TestAccountGeneratorV2, self).setUp()
     self.mock_config_and_opts(self.identity_version)
     self.useFixture(fixtures.MockPatch(
         'tempest.lib.auth.AuthProvider.set_auth',
         return_value=self.identity_response))
Exemplo n.º 3
0
 def _setUp(self):
     self.useFixture(
         fixtures.MockPatch(
             "snapcraft.plugins.get_plugin_for_base", side_effect=self.get_plugin
         )
     )
Exemplo n.º 4
0
    def setUp(self):
        super().setUp()

        self.useFixture(fixtures.MockPatch("os.path.exists",
                                           return_value=True))
Exemplo n.º 5
0
 def mockpatch(self, methstr):
     return self.useFixture(fixtures.MockPatch(methstr)).mock
Exemplo n.º 6
0
    def setUp(self):
        p = 'keystonemiddleware.audit._notifier.oslo_messaging'
        f = fixtures.MockPatch(p, None)
        self.messaging_fixture = self.useFixture(f)

        super(TestLoggingNotifier, self).setUp()
Exemplo n.º 7
0
 def setUp(self):
     super(TestServiceClients, self).setUp()
     self.useFixture(
         fixtures.MockPatch('tempest.lib.services.clients.tempest_modules',
                            return_value=set(['fake_service1'])))
Exemplo n.º 8
0
 def setUp(self):
     super(OpenStackScenarioTestCase, self).setUp()
     self.osclients = fixtures.MockPatch("rally.osclients.Clients")
     self.useFixture(self.osclients)
     self.context = test.get_test_context()
     self.context.update({"foo": "bar"})
Exemplo n.º 9
0
    def setUp(self):
        super(_LibvirtEvacuateTest, self).setUp()

        self.useFixture(nova_fixtures.CinderFixture(self))
        self.useFixture(nova_fixtures.NeutronFixture(self))
        self.useFixture(nova_fixtures.GlanceFixture(self))
        self.useFixture(func_fixtures.PlacementFixture())
        fake_network.set_stub_network_methods(self)

        api_fixture = self.useFixture(
            nova_fixtures.OSAPIFixture(api_version='v2.1'))

        self.api = api_fixture.admin_api
        # force_down and evacuate without onSharedStorage
        self.api.microversion = '2.14'

        fake_notifier.stub_notifier(self)
        self.addCleanup(fake_notifier.reset)

        self.useFixture(fakelibvirt.FakeLibvirtFixture())

        # Fake out all the details of volume connection
        self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.driver.LibvirtDriver.get_volume_connector'))
        self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.driver.LibvirtDriver._connect_volume'))
        # For cleanup
        self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.driver.LibvirtDriver._disconnect_volume'))

        volume_config = libvirt_config.LibvirtConfigGuestDisk()
        volume_config.driver_name = 'fake-volume-driver'
        volume_config.source_path = 'fake-source-path'
        volume_config.target_dev = 'fake-target-dev'
        volume_config.target_bus = 'fake-target-bus'
        get_volume_config = self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.driver.LibvirtDriver._get_volume_config')
        ).mock
        get_volume_config.return_value = volume_config

        # Ensure our computes report lots of available disk, vcpu, and ram
        lots = 10000000
        get_local_gb_info = self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.driver.LibvirtDriver._get_local_gb_info')
        ).mock
        get_local_gb_info.return_value = {
            'total': lots,
            'free': lots,
            'used': 1
        }
        get_vcpu_available = self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.driver.LibvirtDriver._get_vcpu_available')
        ).mock
        get_vcpu_available.return_value = set(cpu for cpu in range(24))
        get_memory_mb_total = self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.host.Host.get_memory_mb_total')).mock
        get_memory_mb_total.return_value = lots

        # Mock out adding rng devices
        self.useFixture(
            fixtures.MockPatch(
                'nova.virt.libvirt.driver.LibvirtDriver._add_rng_device')).mock

        self.start_service('conductor')
        self.start_service('scheduler')

        self.flags(compute_driver='libvirt.LibvirtDriver')

        ctxt = context.get_admin_context()
        for flavor in FLAVOR_FIXTURES:
            objects.Flavor(context=ctxt, **flavor).create()
Exemplo n.º 10
0
 def setUp(self):
     self.client = self.useFixture(
         fixtures.MockPatch(
             'aodh.evaluator.gnocchi.client')).mock.Client.return_value
     self.prepared_alarms = [
         models.Alarm(name='instance_running_hot',
                      description='instance_running_hot',
                      type='gnocchi_resources_threshold',
                      enabled=True,
                      user_id='foobar',
                      project_id='snafu',
                      alarm_id=uuidutils.generate_uuid(),
                      state='insufficient data',
                      state_reason='insufficient data',
                      state_timestamp=constants.MIN_DATETIME,
                      timestamp=constants.MIN_DATETIME,
                      insufficient_data_actions=[],
                      ok_actions=[],
                      alarm_actions=[],
                      repeat_actions=False,
                      time_constraints=[],
                      severity='low',
                      rule=dict(comparison_operator='gt',
                                threshold=80.0,
                                evaluation_periods=5,
                                aggregation_method='mean',
                                granularity=60,
                                metric='cpu_util',
                                resource_type='instance',
                                resource_id='my_instance')),
         models.Alarm(
             name='group_running_idle',
             description='group_running_idle',
             type='gnocchi_aggregation_by_metrics_threshold',
             enabled=True,
             user_id='foobar',
             project_id='snafu',
             state='insufficient data',
             state_reason='insufficient data',
             state_timestamp=constants.MIN_DATETIME,
             timestamp=constants.MIN_DATETIME,
             insufficient_data_actions=[],
             ok_actions=[],
             alarm_actions=[],
             repeat_actions=False,
             alarm_id=uuidutils.generate_uuid(),
             time_constraints=[],
             rule=dict(comparison_operator='le',
                       threshold=10.0,
                       evaluation_periods=4,
                       aggregation_method='max',
                       granularity=300,
                       metrics=[
                           '0bb1604d-1193-4c0a-b4b8-74b170e35e83',
                           '9ddc209f-42f8-41e1-b8f1-8804f59c4053'
                       ]),
         ),
         models.Alarm(name='instance_not_running',
                      description='instance_running_hot',
                      type='gnocchi_aggregation_by_resources_threshold',
                      enabled=True,
                      user_id='foobar',
                      project_id='snafu',
                      alarm_id=uuidutils.generate_uuid(),
                      state='insufficient data',
                      state_reason='insufficient data',
                      state_timestamp=constants.MIN_DATETIME,
                      timestamp=constants.MIN_DATETIME,
                      insufficient_data_actions=[],
                      ok_actions=[],
                      alarm_actions=[],
                      repeat_actions=False,
                      time_constraints=[],
                      rule=dict(comparison_operator='gt',
                                threshold=80.0,
                                evaluation_periods=6,
                                aggregation_method='mean',
                                granularity=50,
                                metric='cpu_util',
                                resource_type='instance',
                                query='{"=": {"server_group": '
                                '"my_autoscaling_group"}}')),
     ]
     super(TestGnocchiEvaluatorBase, self).setUp()
Exemplo n.º 11
0
    def setUp(self):
        super().setUp()

        # Our experimental environment variable is sticky
        self.useFixture(
            fixtures.EnvironmentVariable(
                "SNAPCRAFT_EXPERIMENTAL_PROGRESSIVE_RELEASES", None
            )
        )

        self.fake_store_login = fixtures.MockPatchObject(storeapi.StoreClient, "login")
        self.useFixture(self.fake_store_login)

        self.fake_store_register = fixtures.MockPatchObject(
            storeapi._dashboard_api.DashboardAPI, "register"
        )
        self.useFixture(self.fake_store_register)

        self.fake_store_account_info = fixtures.MockPatchObject(
            storeapi._dashboard_api.DashboardAPI,
            "get_account_information",
            return_value={
                "account_id": "abcd",
                "account_keys": list(),
                "snaps": {
                    "16": {
                        "snap-test": {
                            "snap-id": "snap-test-snap-id",
                            "status": "Approved",
                            "private": False,
                            "since": "2016-12-12T01:01Z",
                            "price": "0",
                        },
                        "basic": {
                            "snap-id": "basic-snap-id",
                            "status": "Approved",
                            "private": False,
                            "since": "2016-12-12T01:01Z",
                            "price": "0",
                        },
                    }
                },
            },
        )
        self.useFixture(self.fake_store_account_info)

        self.fake_store_status = fixtures.MockPatchObject(
            storeapi._dashboard_api.DashboardAPI, "snap_status", return_value=dict()
        )
        self.useFixture(self.fake_store_status)

        self.fake_store_release = fixtures.MockPatchObject(
            storeapi.StoreClient, "release"
        )
        self.useFixture(self.fake_store_release)

        self.fake_store_register_key = fixtures.MockPatchObject(
            storeapi._dashboard_api.DashboardAPI, "register_key"
        )
        self.useFixture(self.fake_store_register_key)

        # channel-map endpoint
        self.channel_map = ChannelMap.unmarshal(
            {
                "channel-map": [
                    {
                        "architecture": "amd64",
                        "channel": "2.1/beta",
                        "expiration-date": None,
                        "revision": 19,
                        "progressive": {
                            "paused": None,
                            "percentage": None,
                            "current-percentage": None,
                        },
                    },
                    {
                        "architecture": "amd64",
                        "channel": "2.0/beta",
                        "expiration-date": None,
                        "revision": 18,
                        "progressive": {
                            "paused": None,
                            "percentage": None,
                            "current-percentage": None,
                        },
                    },
                ],
                "revisions": [
                    {"architectures": ["amd64"], "revision": 19, "version": "10"},
                    {"architectures": ["amd64"], "revision": 18, "version": "10"},
                ],
                "snap": {
                    "name": "snap-test",
                    "channels": [
                        {
                            "branch": None,
                            "fallback": None,
                            "name": "2.1/stable",
                            "risk": "stable",
                            "track": "2.1",
                        },
                        {
                            "branch": None,
                            "fallback": "2.1/stable",
                            "name": "2.1/candidate",
                            "risk": "candidate",
                            "track": "2.1",
                        },
                        {
                            "branch": None,
                            "fallback": "2.1/candidate",
                            "name": "2.1/beta",
                            "risk": "beta",
                            "track": "2.1",
                        },
                        {
                            "branch": None,
                            "fallback": "2.1/beta",
                            "name": "2.1/edge",
                            "risk": "edge",
                            "track": "2.1",
                        },
                        {
                            "branch": None,
                            "fallback": None,
                            "name": "2.0/stable",
                            "risk": "stable",
                            "track": "2.0",
                        },
                        {
                            "branch": None,
                            "fallback": "2.0/stable",
                            "name": "2.0/candidate",
                            "risk": "candidate",
                            "track": "2.0",
                        },
                        {
                            "branch": None,
                            "fallback": "2.0/candidate",
                            "name": "2.0/beta",
                            "risk": "beta",
                            "track": "2.0",
                        },
                        {
                            "branch": None,
                            "fallback": "2.0/beta",
                            "name": "2.0/edge",
                            "risk": "edge",
                            "track": "2.0",
                        },
                    ],
                    "default-track": "2.1",
                    "tracks": [
                        {
                            "name": "2.0",
                            "status": "default",
                            "creation-date": "2019-10-17T14:11:59Z",
                            "version-pattern": "2\\.*",
                        },
                        {
                            "name": "latest",
                            "status": "active",
                            "creation-date": None,
                            "version-pattern": None,
                        },
                    ],
                },
            }
        )
        self.fake_store_get_snap_channel_map = fixtures.MockPatchObject(
            storeapi.StoreClient, "get_snap_channel_map", return_value=self.channel_map
        )
        self.useFixture(self.fake_store_get_snap_channel_map)

        self.releases = Releases.unmarshal(
            {
                "revisions": [
                    {
                        "architectures": ["i386"],
                        "base": "core20",
                        "build_url": None,
                        "confinement": "strict",
                        "created_at": " 2016-09-27T19:23:40Z",
                        "grade": "stable",
                        "revision": 2,
                        "sha3-384": "a9060ef4872ccacbfa440617a76fcd84967896b28d0d1eb7571f00a1098d766e7e93353b084ba6ad841d7b14b95ede48",
                        "size": 20,
                        "status": "Published",
                        "version": "2.0.1",
                    },
                    {
                        "architectures": ["amd64"],
                        "base": "core20",
                        "build_url": None,
                        "confinement": "strict",
                        "created_at": "2016-09-27T18:38:43Z",
                        "grade": "stable",
                        "revision": 1,
                        "sha3-384": "a9060ef4872ccacbfa440617a76fcd84967896b28d0d1eb7571f00a1098d766e7e93353b084ba6ad841d7b14b95ede48",
                        "size": 20,
                        "status": "Published",
                        "version": "2.0.2",
                    },
                ],
                "releases": [
                    {
                        "architecture": "amd64",
                        "branch": None,
                        "channel": "latest/stable",
                        "expiration-date": None,
                        "revision": 1,
                        "risk": "stable",
                        "track": "latest",
                        "when": "2020-02-12T17:51:40.891996Z",
                    },
                    {
                        "architecture": "i386",
                        "branch": None,
                        "channel": "latest/stable",
                        "expiration-date": None,
                        "revision": None,
                        "risk": "stable",
                        "track": "latest",
                        "when": "2020-02-11T17:51:40.891996Z",
                    },
                    {
                        "architecture": "amd64",
                        "branch": None,
                        "channel": "latest/edge",
                        "expiration-date": None,
                        "revision": 1,
                        "risk": "stable",
                        "track": "latest",
                        "when": "2020-01-12T17:51:40.891996Z",
                    },
                ],
            }
        )
        self.fake_store_get_releases = fixtures.MockPatchObject(
            storeapi.StoreClient, "get_snap_releases", return_value=self.releases
        )
        self.useFixture(self.fake_store_get_releases)

        # Uploading
        self.mock_tracker = mock.Mock(storeapi._status_tracker.StatusTracker)
        self.mock_tracker.track.return_value = {
            "code": "ready_to_release",
            "processed": True,
            "can_release": True,
            "url": "/fake/url",
            "revision": 19,
        }
        self.fake_store_upload_precheck = fixtures.MockPatchObject(
            storeapi.StoreClient, "upload_precheck"
        )
        self.useFixture(self.fake_store_upload_precheck)

        self.fake_store_upload = fixtures.MockPatchObject(
            storeapi.StoreClient, "upload", return_value=self.mock_tracker
        )
        self.useFixture(self.fake_store_upload)

        # Mock the snap command, pass through a select few.
        self.fake_check_output = fixtures.MockPatch(
            "subprocess.check_output", side_effect=mock_check_output
        )
        self.useFixture(self.fake_check_output)

        # Pretend that the snap command is available
        self.fake_package_installed = fixtures.MockPatch(
            "snapcraft.internal.repo.Repo.is_package_installed", return_value=True
        )
        self.useFixture(self.fake_package_installed)
    def setUp(self):
        super(ServerGroupPolicyTest, self).setUp()
        self.controller = server_groups.ServerGroupController()
        self.req = fakes.HTTPRequest.blank('')

        self.mock_get = self.useFixture(
            fixtures.MockPatch('nova.objects.InstanceGroup.get_by_uuid')).mock
        self.sg = [
            objects.InstanceGroup(uuid=uuids.fake_id,
                                  name='fake',
                                  project_id=self.project_id,
                                  user_id='u1',
                                  policies=[],
                                  members=[]),
            objects.InstanceGroup(uuid=uuids.fake_id,
                                  name='fake2',
                                  project_id='proj2',
                                  user_id='u2',
                                  policies=[],
                                  members=[])
        ]
        self.mock_get.return_value = self.sg[0]

        # Check that admin or and owner is able to delete
        # the server group.
        self.admin_or_owner_authorized_contexts = [
            self.legacy_admin_context, self.system_admin_context,
            self.project_admin_context, self.project_member_context,
            self.project_reader_context, self.project_foo_context
        ]
        # Check that non-admin/owner is not able to delete
        # the server group.
        self.admin_or_owner_unauthorized_contexts = [
            self.system_member_context,
            self.system_reader_context,
            self.system_foo_context,
            self.other_project_member_context,
            self.other_project_reader_context,
        ]
        # Check that system reader or owner is able to get
        # the server group. Due to old default everyone
        # is allowed to perform this operation.
        self.system_reader_or_owner_authorized_contexts = [
            self.legacy_admin_context, self.system_admin_context,
            self.project_admin_context, self.project_member_context,
            self.project_reader_context, self.system_member_context,
            self.system_reader_context, self.project_foo_context
        ]
        self.system_reader_or_owner_unauthorized_contexts = [
            self.system_foo_context,
            self.other_project_member_context,
            self.other_project_reader_context,
        ]
        # Check that everyone is able to list
        # theie own server group. Due to old defaults everyone
        # is able to list their server groups.
        self.everyone_authorized_contexts = [
            self.legacy_admin_context,
            self.system_admin_context,
            self.project_admin_context,
            self.project_member_context,
            self.project_reader_context,
            self.project_foo_context,
            self.system_member_context,
            self.system_reader_context,
            self.system_foo_context,
            self.other_project_member_context,
            self.other_project_reader_context,
        ]
        self.everyone_unauthorized_contexts = []
        # Check that project member is able to create server group.
        # Due to old defaults everyone is able to list their server groups.
        self.project_member_authorized_contexts = [
            self.legacy_admin_context,
            self.system_admin_context,
            self.project_admin_context,
            self.project_member_context,
            self.system_member_context,
            self.project_reader_context,
            self.project_foo_context,
            self.system_reader_context,
            self.system_foo_context,
            self.other_project_member_context,
            self.other_project_reader_context,
        ]
        self.project_member_unauthorized_contexts = []
    def setUp(self):
        super(FlavorExtraSpecsPolicyTest, self).setUp()
        self.controller = flavors_extraspecs.FlavorExtraSpecsController()
        self.flavor_ctrl = flavors.FlavorsController()
        self.fm_ctrl = flavor_manage.FlavorManageController()
        self.server_ctrl = servers.ServersController()
        self.req = fakes.HTTPRequest.blank('')
        self.server_ctrl._view_builder._add_security_grps = mock.MagicMock()
        self.server_ctrl._view_builder._get_metadata = mock.MagicMock()
        self.server_ctrl._view_builder._get_addresses = mock.MagicMock()
        self.server_ctrl._view_builder._get_host_id = mock.MagicMock()
        self.server_ctrl._view_builder._get_fault = mock.MagicMock()
        self.server_ctrl._view_builder._add_host_status = mock.MagicMock()

        self.instance = fake_instance.fake_instance_obj(
            self.project_member_context,
            id=1,
            uuid=uuids.fake_id,
            project_id=self.project_id,
            vm_state=vm_states.ACTIVE)

        self.mock_get = self.useFixture(
            fixtures.MockPatch('nova.api.openstack.common.get_instance')).mock
        self.mock_get.return_value = self.instance

        fakes.stub_out_secgroup_api(self,
                                    security_groups=[{
                                        'name': 'default'
                                    }])
        self.mock_get_all = self.useFixture(
            fixtures.MockPatchObject(self.server_ctrl.compute_api,
                                     'get_all')).mock
        self.mock_get_all.return_value = objects.InstanceList(
            objects=[self.instance])

        def get_flavor_extra_specs(context, flavor_id):
            return fake_flavor.fake_flavor_obj(
                self.project_member_context,
                id=1,
                uuid=uuids.fake_id,
                project_id=self.project_id,
                is_public=False,
                extra_specs={'hw:cpu_policy': 'shared'},
                expected_attrs='extra_specs')

        self.stub_out('nova.api.openstack.common.get_flavor',
                      get_flavor_extra_specs)

        # Check that all are able to get flavor extra specs.
        self.all_authorized_contexts = [
            self.legacy_admin_context, self.system_admin_context,
            self.project_admin_context, self.project_member_context,
            self.project_reader_context, self.project_foo_context,
            self.system_member_context, self.system_reader_context,
            self.system_foo_context, self.other_project_member_context,
            self.other_project_reader_context
        ]
        self.all_unauthorized_contexts = []
        # Check that all system scoped are able to get flavor extra specs.
        self.all_system_authorized_contexts = [
            self.legacy_admin_context, self.system_admin_context,
            self.project_admin_context, self.project_member_context,
            self.project_reader_context, self.project_foo_context,
            self.system_member_context, self.system_reader_context,
            self.system_foo_context, self.other_project_member_context,
            self.other_project_reader_context
        ]
        self.all_system_unauthorized_contexts = []

        # Check that admin is able to create, update and delete flavor
        # extra specs.
        self.admin_authorized_contexts = [
            self.legacy_admin_context, self.system_admin_context,
            self.project_admin_context
        ]
        # Check that non-admin is not able to create, update and
        # delete flavor extra specs.
        self.admin_unauthorized_contexts = [
            self.system_member_context, self.system_reader_context,
            self.system_foo_context, self.project_member_context,
            self.project_reader_context, self.project_foo_context,
            self.other_project_member_context,
            self.other_project_reader_context
        ]
Exemplo n.º 14
0
 def setUp(self):
     super(HostCPUStats, self).setUp()
     self.host_cpu_stats = self.useFixture(
         fixtures.MockPatch('nova_powervm.virt.powervm.host.HostCPUStats'))
Exemplo n.º 15
0
 def test_is_resource_deleted_false(self):
     mod = ('tempest.lib.services.compute.security_groups_client.'
            'SecurityGroupsClient.show_security_group')
     self.useFixture(fixtures.MockPatch(mod, return_value='success'))
     self.assertFalse(self.client.is_resource_deleted('fake-id'))
Exemplo n.º 16
0
    def test_evacuate_failure_nonshared_instancedir(self):
        """Assert the failure cleanup behaviour of non-shared instance storage

        If we fail during evacuate and the instance directory didn't
        previously exist on the destination, we should delete it
        """
        # Create instances on compute0
        compute0 = self._start_compute('compute0')
        servers = self._create_servers()
        compute1 = self._swap_computes(compute0)

        # Create a 'pass-through' mock for ensure_tree so we can log its calls
        orig_ensure_tree = fileutils.ensure_tree
        mock_ensure_tree = self.useFixture(
            fixtures.MockPatch('oslo_utils.fileutils.ensure_tree',
                               side_effect=orig_ensure_tree)).mock

        for server in servers:
            name = server['name']
            source_instance_path = self.source_instance_path(server)
            dest_instance_path = self.dest_instance_path(server)

            # Check that we've got an instance directory on the source and not
            # on the dest
            self.assertTrue(
                os.path.exists(source_instance_path),
                "Source instance directory %s for server %s does "
                "not exist" % (source_instance_path, name))
            self.assertFalse(
                os.path.exists(dest_instance_path),
                "Destination instance directory %s for server %s "
                "exists" % (dest_instance_path, name))

            # By default our 2 compute hosts share the same instance directory
            # on the test runner. Force a different directory while running
            # evacuate on compute1 so we don't have shared storage.
            def dest_get_instance_path(instance, relative=False):
                if relative:
                    return instance.uuid
                return dest_instance_path

            with mock.patch('nova.virt.libvirt.utils.get_instance_path') \
                    as get_instance_path:
                get_instance_path.side_effect = dest_get_instance_path
                server = self._evacuate_with_failure(server, compute1)

            # Check that we've got an instance directory on the source and not
            # on the dest, but that the dest was created
            self.assertTrue(
                os.path.exists(source_instance_path),
                "Source instance directory %s for server %s does "
                "not exist" % (source_instance_path, name))
            self.assertFalse(
                os.path.exists(dest_instance_path),
                "Destination instance directory %s for server %s "
                "exists" % (dest_instance_path, name))
            mock_ensure_tree.assert_called_with(dest_instance_path)

            self.assert_disks_nonshared_instancedir(server)

            # Check we're still on the failed source host
            self.assertEqual('compute0', server['OS-EXT-SRV-ATTR:host'])
Exemplo n.º 17
0
    def setUp(self):
        super().setUp()

        self.fake_check_call = fixtures.MockPatch("subprocess.check_call")
        self.useFixture(self.fake_check_call)
Exemplo n.º 18
0
 def _setUp(self):
     super(PlanManagementFixture, self)._setUp()
     self.mock_tarball = self.useFixture(
         fixtures.MockPatch(
             'tripleoclient.workflows.plan_management.tarball')).mock
Exemplo n.º 19
0
    def _test_failover_scenario(self, enable_cancel_on_failover=False):
        # NOTE(sileht): run this test only if functional suite run of a driver
        # that use rabbitmq as backend
        self.driver = os.environ.get('TRANSPORT_DRIVER')
        if self.driver not in self.DRIVERS:
            self.skipTest("TRANSPORT_DRIVER is not set to a rabbit driver")

        # NOTE(sileht): Allow only one response at a time, to
        # have only one tcp connection for reply and ensure it will failover
        # correctly
        self.config(heartbeat_timeout_threshold=1,
                    rpc_conn_pool_size=1,
                    kombu_reconnect_delay=0,
                    rabbit_retry_interval=0,
                    rabbit_retry_backoff=0,
                    enable_cancel_on_failover=enable_cancel_on_failover,
                    group='oslo_messaging_rabbit')

        self.pifpaf = self.useFixture(
            rabbitmq.RabbitMQDriver(cluster=True, port=5692))

        self.url = self.pifpaf.env["PIFPAF_URL"]
        self.n1 = self.pifpaf.env["PIFPAF_RABBITMQ_NODENAME1"]
        self.n2 = self.pifpaf.env["PIFPAF_RABBITMQ_NODENAME2"]
        self.n3 = self.pifpaf.env["PIFPAF_RABBITMQ_NODENAME3"]

        # ensure connections will be establish to the first node
        self.pifpaf.stop_node(self.n2)
        self.pifpaf.stop_node(self.n3)

        self.servers = self.useFixture(
            utils.RpcServerGroupFixture(self.conf,
                                        self.url,
                                        endpoint=self,
                                        names=["server"]))

        # Don't randomize rabbit hosts
        self.useFixture(
            fixtures.MockPatch('oslo_messaging._drivers.impl_rabbit.random',
                               side_effect=lambda x: x))

        # NOTE(sileht): this connects server connections and reply
        # connection to nodename n1
        self.client = self.servers.client(0)
        self.client.ping()
        self._check_ports(self.pifpaf.port)

        # Switch to node n2
        self.pifpaf.start_node(self.n2)
        self.assertEqual("callback done", self.client.kill_and_process())
        self.assertEqual("callback done", self.client.just_process())
        self._check_ports(self.pifpaf.get_port(self.n2))

        # Switch to node n3
        self.pifpaf.start_node(self.n3)
        time.sleep(0.1)
        self.pifpaf.kill_node(self.n2, signal=signal.SIGKILL)
        time.sleep(0.1)
        self.assertEqual("callback done", self.client.just_process())
        self._check_ports(self.pifpaf.get_port(self.n3))

        self.pifpaf.start_node(self.n1)
        time.sleep(0.1)
        self.pifpaf.kill_node(self.n3, signal=signal.SIGKILL)
        time.sleep(0.1)
        self.assertEqual("callback done", self.client.just_process())
        self._check_ports(self.pifpaf.get_port(self.n1))
Exemplo n.º 20
0
    def setUp(self):
        super(ForeverRetryUncaughtExceptionsTest, self).setUp()

        self._exceptions = []

        self.useFixture(fixtures.MockPatch('time.sleep', return_value=None))
Exemplo n.º 21
0
    def setUp(self):
        super(TestRetryHandler, self).setUp()

        self.now = time.time()
        f_time = self.useFixture(fixtures.MockPatch('time.time'))
        f_time.mock.return_value = self.now
    def test_setup_environment_content_arm64(self):
        self.useFixture(
            fixtures.MockPatch("platform.machine", return_value="aarch64"))
        recorded_files = dict()

        @contextlib.contextmanager
        def fake_namedtempfile(*, suffix: str, **kwargs):
            # Usage hides the file basename in the suffix.
            tmp_path = os.path.join(self.path, "tmpfile")
            with open(tmp_path, "wb") as f_write:
                yield f_write
            with open(tmp_path, "r") as f_read:
                recorded_files[suffix] = f_read.read()

        self.useFixture(
            fixtures.MockPatch("tempfile.NamedTemporaryFile",
                               new=fake_namedtempfile))

        provider = ProviderImpl(project=self.project, echoer=self.echoer_mock)
        provider._setup_environment()

        self.expectThat(
            recorded_files,
            Equals({
                ".bashrc":
                '#!/bin/bash\nexport PS1="\\h \\$(/bin/_snapcraft_prompt)# "\n',
                "00-snapcraft":
                'Apt::Install-Recommends "false";\n',
                "_snapcraft_prompt":
                dedent("""\
                        #!/bin/bash
                        if [[ "$PWD" =~ ^$HOME.* ]]; then
                            path="${PWD/#$HOME/\\ ..}"
                            if [[ "$path" == " .." ]]; then
                                ps1=""
                            else
                                ps1="$path"
                            fi
                        else
                            ps1="$PWD"
                        fi
                        echo -n $ps1
                        """),
                "default.sources":
                dedent("""\
                        Types: deb deb-src
                        URIs: http://ports.ubuntu.com/ubuntu-ports
                        Suites: xenial xenial-updates
                        Components: main multiverse restricted universe
                    """),
                "default-security.sources":
                dedent("""\
                        Types: deb deb-src
                        URIs: http://ports.ubuntu.com/ubuntu-ports
                        Suites: xenial-security
                        Components: main multiverse restricted universe
                    """),
                "sources.list":
                "",
            }),
        )
Exemplo n.º 23
0
 def patch(self, target, **kwargs):
     mockfixture = self.useFixture(fixtures.MockPatch(target, **kwargs))
     return mockfixture.mock
Exemplo n.º 24
0
    def setUp(self):
        super().setUp()

        self.fake_run = fixtures.MockPatch("snapcraft.internal.common.run")
        self.useFixture(self.fake_run)
Exemplo n.º 25
0
    def start_fixture(self):
        """Create necessary temp files and do the config dance."""
        global LOAD_APP_KWARGS

        if not os.getenv("GNOCCHI_TEST_DEBUG"):
            self.output = base.CaptureOutput()
            self.output.setUp()

        data_tmp_dir = tempfile.mkdtemp(prefix='gnocchi')

        if os.getenv("GABBI_LIVE"):
            dcf = None
        else:
            dcf = []
        conf = service.prepare_service([],
                                       conf=utils.prepare_conf(),
                                       default_config_files=dcf,
                                       logging_level=logging.DEBUG,
                                       skip_log_opts=True)

        py_root = os.path.abspath(
            os.path.join(
                os.path.dirname(__file__),
                '..',
                '..',
            ))
        conf.set_override('paste_config',
                          os.path.join(py_root, 'rest', 'api-paste.ini'),
                          group="api")
        conf.set_override('policy_file',
                          os.path.join(py_root, 'rest', 'policy.yaml'),
                          group="oslo_policy")

        # NOTE(sileht): This is not concurrency safe, but only this tests file
        # deal with cors, so we are fine. set_override don't work because cors
        # group doesn't yet exists, and we the CORS middleware is created it
        # register the option and directly copy value of all configurations
        # options making impossible to override them properly...
        cfg.set_defaults(cors.CORS_OPTS, allowed_origin="http://foobar.com")

        self.conf = conf
        self.tmp_dir = data_tmp_dir

        if conf.indexer.url is None:
            raise case.SkipTest("No indexer configured")

        storage_driver = os.getenv("GNOCCHI_TEST_STORAGE_DRIVER", "file")

        conf.set_override('driver', storage_driver, 'storage')
        if conf.storage.driver == 'file':
            conf.set_override('file_basepath', data_tmp_dir, 'storage')
        elif conf.storage.driver == 'ceph':
            conf.set_override('ceph_conffile', os.getenv("CEPH_CONF"),
                              'storage')
            self.ceph_pool_name = uuid.uuid4().hex
            with open(os.devnull, 'w') as f:
                subprocess.call(("ceph -c %s osd pool create %s "
                                 "16 16 replicated") %
                                (os.getenv("CEPH_CONF"), self.ceph_pool_name),
                                shell=True,
                                stdout=f,
                                stderr=subprocess.STDOUT)
                subprocess.call(("ceph -c %s osd pool application "
                                 "enable %s rbd") %
                                (os.getenv("CEPH_CONF"), self.ceph_pool_name),
                                shell=True,
                                stdout=f,
                                stderr=subprocess.STDOUT)
            conf.set_override('ceph_pool', self.ceph_pool_name, 'storage')
        elif conf.storage.driver == "s3":
            conf.set_override('s3_endpoint_url',
                              os.getenv("GNOCCHI_STORAGE_HTTP_URL"),
                              group="storage")
            conf.set_override('s3_access_key_id', "gnocchi", group="storage")
            conf.set_override('s3_secret_access_key',
                              "anythingworks",
                              group="storage")
            conf.set_override("s3_bucket_prefix",
                              str(uuid.uuid4())[:26], "storage")
        elif conf.storage.driver == "swift":
            # NOTE(sileht): This fixture must start before any driver stuff
            swift_fixture = fixtures.MockPatch('swiftclient.client.Connection',
                                               base.FakeSwiftClient)
            swift_fixture.setUp()

        # NOTE(jd) All of that is still very SQL centric but we only support
        # SQL for now so let's say it's good enough.
        conf.set_override(
            'url',
            sqlalchemy.SQLAlchemyIndexer._create_new_database(
                conf.indexer.url), 'indexer')

        index = indexer.get_driver(conf)
        index.upgrade()

        # Set pagination to a testable value
        conf.set_override('max_limit', 7, 'api')

        conf.set_override('enable_proxy_headers_parsing', True, group="api")

        self.index = index

        self.coord = metricd.get_coordinator_and_start(str(uuid.uuid4()),
                                                       conf.coordination_url)
        s = storage.get_driver(conf)
        i = incoming.get_driver(conf)

        if conf.storage.driver == 'redis':
            # Create one prefix per test
            s.STORAGE_PREFIX = str(uuid.uuid4()).encode()

        if conf.incoming.driver == 'redis':
            i.SACK_NAME_FORMAT = (str(uuid.uuid4()) +
                                  incoming.IncomingDriver.SACK_NAME_FORMAT)

        self.fixtures = [
            fixtures.MockPatch("gnocchi.storage.get_driver", return_value=s),
            fixtures.MockPatch("gnocchi.incoming.get_driver", return_value=i),
            fixtures.MockPatch("gnocchi.indexer.get_driver",
                               return_value=self.index),
            fixtures.MockPatch("gnocchi.cli.metricd.get_coordinator_and_start",
                               return_value=self.coord),
        ]
        for f in self.fixtures:
            f.setUp()

        if conf.storage.driver == 'swift':
            self.fixtures.append(swift_fixture)

        LOAD_APP_KWARGS = {
            'conf': conf,
        }

        s.upgrade()
        i.upgrade(128)

        # start up a thread to async process measures
        self.metricd_thread = MetricdThread(chef.Chef(self.coord, i, index, s))
        self.metricd_thread.start()
    def setUp(self):
        super(TestImportNodeMultiArch, self).setUp()

        self.nodes_list = [{
            "pm_user": "******",
            "pm_addr": "192.168.122.1",
            "pm_password": "******",
            "pm_type": "pxe_ssh",
            "mac": ["00:0b:d0:69:7e:59"],
        }, {
            "pm_user": "******",
            "pm_addr": "192.168.122.2",
            "pm_password": "******",
            "pm_type": "pxe_ssh",
            "arch": "x86_64",
            "mac": ["00:0b:d0:69:7e:58"]
        }, {
            "pm_user": "******",
            "pm_addr": "192.168.122.3",
            "pm_password": "******",
            "pm_type": "pxe_ssh",
            "arch": "x86_64",
            "platform": "SNB",
            "mac": ["00:0b:d0:69:7e:58"]
        }]
        self.json_file = tempfile.NamedTemporaryFile(mode='w',
                                                     delete=False,
                                                     suffix='.json')
        json.dump(self.nodes_list, self.json_file)
        self.json_file.close()
        self.addCleanup(os.unlink, self.json_file.name)

        self.workflow = self.app.client_manager.workflow_engine
        execution = mock.Mock()
        execution.id = "IDID"
        self.workflow.executions.create.return_value = execution
        client = self.app.client_manager.tripleoclient
        self.websocket = client.messaging_websocket()

        # Get the command object to test
        self.cmd = overcloud_node.ImportNode(self.app, None)

        image = collections.namedtuple('image', ['id', 'name'])
        self.app.client_manager.image = mock.Mock()
        self.app.client_manager.image.images.list.return_value = [
            image(id=3, name='overcloud-full'),
            image(id=6, name='x86_64-overcloud-full'),
            image(id=9, name='SNB-x86_64-overcloud-full'),
        ]

        self.http_boot = '/var/lib/ironic/httpboot'

        existing = ['agent', 'x86_64/agent', 'SNB-x86_64/agent']
        existing = {
            os.path.join(self.http_boot, name + ext)
            for name in existing for ext in ('.kernel', '.ramdisk')
        }

        self.useFixture(
            fixtures.MockPatch('os.path.exists',
                               autospec=True,
                               side_effect=lambda path: path in existing))
Exemplo n.º 27
0
 def setUp(self):
     super(TestMemcachePoolAbstraction, self).setUp()
     self.useFixture(
         fixtures.MockPatch('oslo_cache._memcache_pool._MemcacheClient'))
Exemplo n.º 28
0
 def test_is_resource_deleted_true(self):
     mod = ('tempest.lib.services.compute.security_groups_client.'
            'SecurityGroupsClient.show_security_group')
     self.useFixture(fixtures.MockPatch(mod, side_effect=lib_exc.NotFound))
     self.assertTrue(self.client.is_resource_deleted('fake-id'))
Exemplo n.º 29
0
 def setUp(self):
     self.client = self.useFixture(
         fixtures.MockPatch(
             'aodh.evaluator.gnocchi.client')).mock.Client.return_value
     super(BaseCompositeEvaluate, self).setUp()
Exemplo n.º 30
0
 def setUp(self):
     super(ImageAPI, self).setUp()
     self.img_api_fx = self.useFixture(fixtures.MockPatch('nova.image.API'))