def set_patches(self): super(MDRaidArrayDeviceMethodsTestCase, self).set_patches() self.patchers["md"] = patch("blivet.devices.md.blockdev.md") self.patchers["is_disk"] = patch.object(self.device_class, "is_disk", new=PropertyMock(return_value=False)) self.patchers["pvs_info"] = patch("blivet.devices.md.pvs_info") self.patchers["lvm"] = patch("blivet.devices.md.blockdev.lvm")
def test_teardown(self): with patch("blivet.devicelibs.lvm.lvmetad_socket_exists", return_value=False): super(LVMLogicalVolumeDeviceMethodsTestCase, self).test_teardown() with patch("blivet.devices.lvm.blockdev.lvm") as lvm: self.device._teardown() self.assertTrue(lvm.lvdeactivate.called)
def set_patches(self): super(FSMethodsTestCase, self).set_patches() self.patchers["udev"] = patch("blivet.formats.fs.udev") self.patchers["util"] = patch("blivet.formats.fs.util") self.patchers["system_mountpoint"] = patch.object(self.format_class, "system_mountpoint", new=PropertyMock(return_value='/fake/mountpoint')) self.patchers["fs_os"] = patch("blivet.formats.fs.os")
def setUp(self): # Create a cleanup to undo a patch() call *before* calling the # base class version of setup(). patcher = mock.patch('os.environ.keys') patcher.start() self.addCleanup(patcher.stop) super(TestManualMock, self).setUp() self.useFixture(fixtures.MockPatch('fixtures.Timeout')) self.unstopped = mock.patch('os.environ.put')
def test_load_notifiers_no_extensions(self): self.config(routing_notifier_config="routing_notifier.yaml") routing_config = r"" config_file = mock.MagicMock() config_file.return_value = routing_config with mock.patch.object(self.router, '_get_notifier_config_file', config_file): with mock.patch('stevedore.dispatch.DispatchExtensionManager', return_value=self._empty_extension_manager()): with mock.patch('oslo_messaging.notify.' '_impl_routing.LOG') as mylog: self.router._load_notifiers() self.assertFalse(mylog.debug.called) self.assertEqual({}, self.router.routing_groups)
def test_lsm_error_handling(self): """Validate handling of potential lsm errors.""" with patch("blivet.devicelibs.disk._lsm_required._check_avail", return_value=True): with patch("blivet.devicelibs.disk.lsm") as _lsm: _lsm.LsmError = FakeLsmError # verify that we end up with an empty dict if lsm.Client() raises LsmError _lsm.Client.side_effect = raise_lsm_error disklib.update_volume_info() self.assertEqual(disklib.volumes, dict()) # verify that any error other than LsmError gets raised _lsm.Client.side_effect = raise_other_error with self.assertRaises(OtherError): disklib.update_volume_info()
def test_destroy(self): with patch.object(self.device, "teardown"): super(LVMLogicalVolumeDeviceMethodsTestCase, self).test_destroy() with patch("blivet.devices.lvm.blockdev.lvm") as lvm: self.device._destroy() self.assertTrue(lvm.lvremove.called)
def _test_destroy_backend(self): with patch("blivet.formats.run_program") as run_program: run_program.return_value = 0 self.format.exists = True self.format.destroy() self.assertFalse(self.format.exists) run_program.assert_called_with(["wipefs", "-f", "-a", self.format.device])
def test_ignore_req_opt(self): m = middleware.RequestNotifier(FakeApp(), ignore_req_list='get, PUT') req = webob.Request.blank('/skip/foo', environ={'REQUEST_METHOD': 'GET'}) req1 = webob.Request.blank('/skip/foo', environ={'REQUEST_METHOD': 'PUT'}) req2 = webob.Request.blank('/accept/foo', environ={'REQUEST_METHOD': 'POST'}) with mock.patch( 'oslo_messaging.notify.notifier.Notifier._notify') as notify: # Check GET request does not send notification m(req) m(req1) self.assertEqual(0, len(notify.call_args_list)) # Check non-GET request does send notification m(req2) self.assertEqual(2, len(notify.call_args_list)) call_args = notify.call_args_list[0][0] self.assertEqual('http.request', call_args[1]) self.assertEqual('INFO', call_args[3]) self.assertEqual(set(['request']), set(call_args[2].keys())) request = call_args[2]['request'] self.assertEqual('/accept/foo', request['PATH_INFO']) self.assertEqual('POST', request['REQUEST_METHOD']) call_args = notify.call_args_list[1][0] self.assertEqual('http.response', call_args[1]) self.assertEqual('INFO', call_args[3]) self.assertEqual(set(['request', 'response']), set(call_args[2].keys()))
def test_add_device(self, *args): # pylint: disable=unused-argument dt = DeviceTree() dev1 = StorageDevice("dev1", exists=False, uuid=sentinel.dev1_uuid, parents=[]) self.assertEqual(dt.devices, list()) # things are called, updated as expected when a device is added with patch("blivet.devicetree.callbacks") as callbacks: dt._add_device(dev1) self.assertTrue(callbacks.device_added.called) self.assertEqual(dt.devices, [dev1]) self.assertTrue(dev1 in dt.devices) self.assertTrue(dev1.name in dt.names) self.assertTrue(dev1.add_hook.called) # pylint: disable=no-member # adding an already-added device fails six.assertRaisesRegex(self, ValueError, "already in tree", dt._add_device, dev1) dev2 = StorageDevice("dev2", exists=False, parents=[]) dev3 = StorageDevice("dev3", exists=False, parents=[dev1, dev2]) # adding a device with one or more parents not already in the tree fails six.assertRaisesRegex(self, DeviceTreeError, "parent.*not in tree", dt._add_device, dev3) self.assertFalse(dev2 in dt.devices) self.assertFalse(dev2.name in dt.names) dt._add_device(dev2) self.assertTrue(dev2 in dt.devices) self.assertTrue(dev2.name in dt.names) dt._add_device(dev3) self.assertTrue(dev3 in dt.devices) self.assertTrue(dev3.name in dt.names)
def test_reconnect_order(self): brokers = ['host1', 'host2', 'host3', 'host4', 'host5'] brokers_count = len(brokers) self.config(qpid_hosts=brokers, group='oslo_messaging_qpid') with mock.patch('qpid.messaging.Connection') as conn_mock: # starting from the first broker in the list url = oslo_messaging.TransportURL.parse(self.conf, None) connection = qpid_driver.Connection(self.conf, url, amqp.PURPOSE_SEND) # reconnect will advance to the next broker, one broker per # attempt, and then wrap to the start of the list once the end is # reached for _ in range(brokers_count): connection.reconnect() expected = [] for broker in brokers: expected.extend([mock.call("%s:5672" % broker), mock.call().open(), mock.call().session(), mock.call().opened(), mock.call().opened().__nonzero__(), mock.call().close()]) conn_mock.assert_has_calls(expected, any_order=True)
def test_listen_and_direct_send(self): target = oslo_messaging.Target(exchange="exchange_test", topic="topic_test", server="server_test") with mock.patch('qpid.messaging.Connection') as conn_cls: conn = conn_cls.return_value session = conn.session.return_value session.receiver.side_effect = [mock.Mock(), mock.Mock(), mock.Mock()] listener = self.driver.listen(target) listener.conn.direct_send("msg_id", {}) self.assertEqual(3, len(listener.conn.consumers)) expected_calls = [ mock.call(AddressNodeMatcher( 'amq.topic/topic/exchange_test/topic_test')), mock.call(AddressNodeMatcher( 'amq.topic/topic/exchange_test/topic_test.server_test')), mock.call(AddressNodeMatcher('amq.topic/fanout/topic_test')), ] session.receiver.assert_has_calls(expected_calls) session.sender.assert_called_with( AddressNodeMatcher("amq.direct/msg_id"))
def test_declared_queue_publisher(self): transport = oslo_messaging.get_transport(self.conf, 'kombu+memory:////') self.addCleanup(transport.cleanup) e_passive = kombu.entity.Exchange( name='foobar', type='topic', passive=True) e_active = kombu.entity.Exchange( name='foobar', type='topic', passive=False) with transport._driver._get_connection( driver_common.PURPOSE_SEND) as pool_conn: conn = pool_conn.connection exc = conn.connection.channel_errors[0] def try_send(exchange): conn._ensure_publishing( conn._publish_and_creates_default_queue, exchange, {}, routing_key='foobar') with mock.patch('kombu.transport.virtual.Channel.close'): # Ensure the exchange does not exists self.assertRaises(oslo_messaging.MessageDeliveryFailure, try_send, e_passive) # Create it try_send(e_active) # Ensure it creates it try_send(e_passive) with mock.patch('kombu.messaging.Producer.publish', side_effect=exc): with mock.patch('kombu.transport.virtual.Channel.close'): # Ensure the exchange is already in cache self.assertIn('foobar', conn._declared_exchanges) # Reset connection self.assertRaises(oslo_messaging.MessageDeliveryFailure, try_send, e_passive) # Ensure the cache is empty self.assertEqual(0, len(conn._declared_exchanges)) try_send(e_active) self.assertIn('foobar', conn._declared_exchanges)
def test_notify(self): with mock.patch("confluent_kafka.Producer") as producer: self.driver.pconn.notify_send("fake_topic", {"fake_ctxt": "fake_param"}, {"fake_text": "fake_message_1"}, 10) assert producer.call_count == 1
def testEventMask(self): handler_cb = Mock() with patch("blivet.events.manager.validate_cb", return_value=True): mgr = FakeEventManager(handler_cb=handler_cb) device = "sdc" action = "add" mgr.handle_event(action, device) time.sleep(1) self.assertEqual(handler_cb.call_count, 1) event = handler_cb.call_args[1]["event"] # pylint: disable=unsubscriptable-object self.assertEqual(event.device, device) self.assertEqual(event.action, action) # mask matches device but not action -> event is handled handler_cb.reset_mock() mask = mgr.add_mask(device=device, action=action + 'x') mgr.handle_event(action, device) time.sleep(1) self.assertEqual(handler_cb.call_count, 1) event = handler_cb.call_args[1]["event"] # pylint: disable=unsubscriptable-object self.assertEqual(event.device, device) self.assertEqual(event.action, action) # mask matches action but not device -> event is handled handler_cb.reset_mock() mask = mgr.add_mask(device=device + 'x', action=action) mgr.handle_event(action, device) time.sleep(1) self.assertEqual(handler_cb.call_count, 1) event = handler_cb.call_args[1]["event"] # pylint: disable=unsubscriptable-object self.assertEqual(event.device, device) self.assertEqual(event.action, action) # mask matches device and action -> event is ignored handler_cb.reset_mock() mgr.remove_mask(mask) mask = mgr.add_mask(device=device, action=action) mgr.handle_event(action, device) time.sleep(1) self.assertEqual(handler_cb.call_count, 0) # device-only mask matches -> event is ignored handler_cb.reset_mock() mgr.remove_mask(mask) mask = mgr.add_mask(device=device) mgr.handle_event(action, device) time.sleep(1) self.assertEqual(handler_cb.call_count, 0) # action-only mask matches -> event is ignored handler_cb.reset_mock() mgr.remove_mask(mask) mask = mgr.add_mask(action=action) mgr.handle_event(action, device) time.sleep(1) self.assertEqual(handler_cb.call_count, 0) mgr.remove_mask(mask)
def set_patches(self): self.patchers["update_sysfs_path"] = patch.object(self.device, "update_sysfs_path") self.patchers["udev"] = patch("blivet.devices.storage.udev") self.patchers["update_size"] = patch.object(self.device, "update_size") self.patchers["setup_parents"] = patch.object(self.device, "setup_parents") self.patchers["teardown_parents"] = patch.object(self.device, "teardown_parents") self.patchers["media_present"] = patch.object(self.device_class, "media_present", new=PropertyMock(return_value=True)) self.patchers["status"] = patch.object(self.device_class, "status", new=PropertyMock())
def test_notify(self): with mock.patch("kafka.KafkaProducer") as fake_producer_class: fake_producer = fake_producer_class.return_value self.driver.pconn.notify_send("fake_topic", {"fake_ctxt": "fake_param"}, {"fake_text": "fake_message_1"}, 10) self.assertEqual(2, len(fake_producer.send.mock_calls))
def test_best_label_type(self, arch): """ 1. is always in _disklabel_types 2. is the default unless the device is too long for the default 3. is msdos for fba dasd on S390 4. is dasd for non-fba dasd on S390 """ dl = blivet.formats.disklabel.DiskLabel() dl._parted_disk = mock.Mock() dl._parted_device = mock.Mock() dl._device = "labeltypefakedev" arch.is_s390.return_value = False arch.is_efi.return_value = False arch.is_aarch64.return_value = False arch.is_arm.return_value = False arch.is_pmac.return_value = False with mock.patch.object(dl, '_label_type_size_check') as size_check: # size check passes for first type ("msdos") size_check.return_value = True self.assertEqual(dl._get_best_label_type(), "msdos") # size checks all fail -> label type is None size_check.return_value = False self.assertEqual(dl._get_best_label_type(), None) # size check passes on second call -> label type is "gpt" (second in platform list) size_check.side_effect = [False, True] self.assertEqual(dl._get_best_label_type(), "gpt") arch.is_pmac.return_value = True with mock.patch.object(dl, '_label_type_size_check') as size_check: size_check.return_value = True self.assertEqual(dl._get_best_label_type(), "mac") arch.is_pmac.return_value = False arch.is_efi.return_value = True with mock.patch.object(dl, '_label_type_size_check') as size_check: size_check.return_value = True self.assertEqual(dl._get_best_label_type(), "gpt") arch.is_efi.return_value = False arch.is_s390.return_value = True with mock.patch.object(dl, '_label_type_size_check') as size_check: size_check.return_value = True with mock.patch("blivet.formats.disklabel.blockdev.s390") as _s390: _s390.dasd_is_fba.return_value = False self.assertEqual(dl._get_best_label_type(), "msdos") _s390.dasd_is_fba.return_value = True self.assertEqual(dl._get_best_label_type(), "msdos") _s390.dasd_is_fba.return_value = False dl._parted_device.type = parted.DEVICE_DASD self.assertEqual(dl._get_best_label_type(), "dasd") arch.is_s390.return_value = False
def test_send_notification(self): target = oslo_messaging.Target(exchange="exchange_test", topic="topic_test.info") with mock.patch('qpid.messaging.Connection') as conn_cls: conn = conn_cls.return_value session = conn.session.return_value self.driver.send_notification(target, {}, {}, "2.0") session.sender.assert_called_with(AddressNodeMatcher( "amq.topic/topic/exchange_test/topic_test.info"))
def test_process_response_fail(self): def notify_error(context, publisher_id, event_type, priority, payload): raise Exception('error') with mock.patch('oslo_messaging.notify.notifier.Notifier._notify', notify_error): m = middleware.RequestNotifier(FakeApp()) req = webob.Request.blank('/foo/bar', environ={'REQUEST_METHOD': 'GET'}) m.process_response(req, webob.response.Response())
def test_send(self): target = messaging.Target(exchange="exchange_test", topic="topic_test", server="server_test") with mock.patch('qpid.messaging.Connection') as conn_cls: conn = conn_cls.return_value session = conn.session.return_value self.driver.send(target, {}, {}) session.sender.assert_called_with(AddressNodeMatcher( "amq.topic/topic/exchange_test/topic_test.server_test"))
def test_weight_2(self): for spec in weighted: part = PartitionDevice('weight_test') part._format = Mock(name="fmt", type=spec.fstype, mountpoint=spec.mountpoint, mountable=spec.mountpoint is not None) with patch('blivet.devices.partition.arch') as _arch: for func in arch_funcs: f = getattr(_arch, func) f.return_value = func in spec.true_funcs self.assertEqual(part.weight, spec.weight)
def test_consume_from_missing_queue(self): transport = oslo_messaging.get_transport(self.conf, 'kombu+memory://') self.addCleanup(transport.cleanup) with transport._driver._get_connection( driver_common.PURPOSE_LISTEN) as conn: with mock.patch('kombu.Queue.consume') as consume, mock.patch( 'kombu.Queue.declare') as declare: conn.declare_topic_consumer(exchange_name='test', topic='test', callback=lambda msg: True) import amqp consume.side_effect = [amqp.NotFound, None] conn.connection.connection.recoverable_connection_errors = () conn.connection.connection.recoverable_channel_errors = () self.assertEqual(1, declare.call_count) conn.connection.connection.drain_events = mock.Mock() # Ensure that a queue will be re-declared if the consume method # of kombu.Queue raise amqp.NotFound conn.consume() self.assertEqual(2, declare.call_count)
def test_send_notification(self): target = oslo_messaging.Target(topic="topic_test") with mock.patch("kafka.KafkaProducer") as fake_producer_class: fake_producer = fake_producer_class.return_value fake_producer.send.side_effect = kafka.errors.NoBrokersAvailable self.assertRaises(kafka.errors.NoBrokersAvailable, self.driver.send_notification, target, {}, {"payload": ["test_1"]}, None, retry=3) self.assertEqual(3, fake_producer.send.call_count)
def test_declared_queue_publisher(self): transport = oslo_messaging.get_transport(self.conf, 'kombu+memory:////') self.addCleanup(transport.cleanup) e_passive = kombu.entity.Exchange( name='foobar', type='topic', passive=True) e_active = kombu.entity.Exchange( name='foobar', type='topic', passive=False) with transport._driver._get_connection( driver_common.PURPOSE_SEND) as pool_conn: conn = pool_conn.connection exc = conn.connection.channel_errors[0] def try_send(exchange): conn._ensure_publishing( conn._publish_and_creates_default_queue, exchange, {}, routing_key='foobar') with mock.patch('kombu.transport.virtual.Channel.close'): # Ensure the exchange does not exists self.assertRaises(exc, try_send, e_passive) # Create it try_send(e_active) # Ensure it creates it try_send(e_passive) with mock.patch('kombu.messaging.Producer', side_effect=exc): # Should reset the cache and ensures the exchange does # not exists self.assertRaises(exc, try_send, e_passive) # Recreate it try_send(e_active) # Ensure it have been recreated try_send(e_passive)
def test_notify_filtered(self): self.config(routing_config="routing_notifier.yaml", group='oslo_messaging_notifications') routing_config = r""" group_1: rpc: accepted_events: - my_event rpc2: accepted_priorities: - info bar: accepted_events: - nothing """ config_file = mock.MagicMock() config_file.return_value = routing_config rpc_driver = mock.Mock() rpc2_driver = mock.Mock() bar_driver = mock.Mock() pm = dispatch.DispatchExtensionManager.make_test_instance( [extension.Extension('rpc', None, None, rpc_driver), extension.Extension('rpc2', None, None, rpc2_driver), extension.Extension('bar', None, None, bar_driver)], ) with mock.patch.object(self.router, '_get_notifier_config_file', config_file): with mock.patch('stevedore.dispatch.DispatchExtensionManager', return_value=pm): with mock.patch('oslo_messaging.notify.' '_impl_routing.LOG'): self.notifier.info({}, 'my_event', {}) self.assertFalse(bar_driver.info.called) rpc_driver.notify.assert_called_once_with( {}, mock.ANY, 'INFO', -1) rpc2_driver.notify.assert_called_once_with( {}, mock.ANY, 'INFO', -1)
def test_main(self): """Is ccframe called with the given parameters?""" with mock.patch('sys.stdout') as mock_stdout: cclib.scripts.ccframe.main() self.assertEqual(mock_stdout.write.call_count, 2) df, newline = mock_stdout.write.call_args_list if isinstance(df[0][0], mock.MagicMock): self.assertEqual(df[0][0].name, 'mock.DataFrame()') else: # TODO: this is what we really should be testing pass self.assertEqual(newline[0][0], '\n')
def test_connection_ack_have_disconnected_kombu_connection(self): transport = oslo_messaging.get_transport(self.conf, 'kombu+memory:////') self.addCleanup(transport.cleanup) with transport._driver._get_connection(amqp.PURPOSE_LISTEN) as conn: channel = conn.connection.channel with mock.patch('kombu.connection.Connection.connected', new_callable=mock.PropertyMock, return_value=False): self.assertRaises(driver_common.Timeout, conn.connection.consume, timeout=0.01) # Ensure a new channel have been setuped self.assertNotEqual(channel, conn.connection.channel)
def test_transport_url(self, fake_ensure_connection, fake_reset): transport = messaging.get_transport(self.conf, self.url) self.addCleanup(transport.cleanup) driver = transport._driver # NOTE(sileht): some kombu transport can depend on library that # we don't want to depend yet, because selecting the transport # is experimental, only amqp is supported # for example kombu+qpid depends of qpid-tools # so, mock the connection.info to skip call to qpid-tools with mock.patch('kombu.connection.Connection.info'): urls = driver._get_connection()._url.split(";") self.assertEqual(sorted(self.expected), sorted(urls))
def test_send_exception_remap(self): bad_exc = Exception("Non-oslo.messaging exception") transport = oslo_messaging.get_transport(self.conf, 'kombu+memory:////') exchange_mock = mock.Mock() with transport._driver._get_connection( driver_common.PURPOSE_SEND) as pool_conn: conn = pool_conn.connection with mock.patch('kombu.messaging.Producer.publish', side_effect=bad_exc): self.assertRaises(MessageDeliveryFailure, conn._ensure_publishing, conn._publish, exchange_mock, 'msg')
def test_run(self, *args): if self.udev_type is None: return devicetree = DeviceTree() data = dict() device = Mock() with patch("blivet.udev.device_get_format", return_value=self.udev_type): helper = self.helper_class(devicetree, data, device) helper.run() self.assertEqual(device.format.type, self.blivet_type, msg="FormatPopulator.run failed for %s" % self.udev_type)
def set_patches(self): self.patchers["update_sysfs_path"] = patch.object( self.device, "update_sysfs_path") self.patchers["udev"] = patch("blivet.devices.storage.udev") self.patchers["update_size"] = patch.object(self.device, "update_size") self.patchers["setup_parents"] = patch.object(self.device, "setup_parents") self.patchers["teardown_parents"] = patch.object( self.device, "teardown_parents") self.patchers["media_present"] = patch.object( self.device_class, "media_present", new=PropertyMock(return_value=True)) self.patchers["status"] = patch.object(self.device_class, "status", new=PropertyMock())
def test_no_paging_required(): """If "next" link not present, current items should be included.""" items = [{'name': 'a'}, {'name': 'b'}, {'name': 'c'}] obj = RestObj(items=items, count=len(items)) with mock.patch('sasctl.core.request') as request: pager = PageIterator(obj) # Returned page of items should preserve item order items = next(pager) for idx, item in enumerate(items): assert item.name == RestObj(items[idx]).name # No request should have been made to retrieve additional data. request.assert_not_called()
def test_log_filtering(caplog): caplog.set_level(logging.DEBUG, logger='sasctl.core.session') HOST = 'example.com' USERNAME = '******' PASSWORD = '******' ACCESS_TOKEN = 'secretaccesstoken' REFRESH_TOKEN = 'secretrefreshtoken' CLIENT_SECRET = 'clientpassword' CONSUL_TOKEN = 'supersecretconsultoken!' sensitive_data = [PASSWORD, ACCESS_TOKEN, REFRESH_TOKEN, CLIENT_SECRET, CONSUL_TOKEN] with mock.patch('requests.Session.send') as mocked: # Response to every request with a response that contains sensitive data # Access token should also be used to set session.auth mocked.return_value.status_code = 200 mocked.return_value.raise_for_status.return_value = None mocked.return_value.json.return_value = { 'access_token': ACCESS_TOKEN, 'refresh_token': REFRESH_TOKEN } mocked.return_value.url = 'http://' + HOST mocked.return_value.headers = {} mocked.return_value.body = json.dumps( mocked.return_value.json.return_value) mocked.return_value._content = mocked.return_value.body with Session(HOST, USERNAME, PASSWORD) as s: assert s.auth is not None assert mocked.return_value == s.get('/fakeurl') assert mocked.return_value == s.post('/fakeurl', headers={ 'X-Consul-Token': CONSUL_TOKEN}, json={ 'client_id': 'TestClient', 'client_secret': CLIENT_SECRET}) # Correct token should have been set assert 'secretaccesstoken' == s.auth.token # No sensitive information should be contained in the log records assert len(caplog.records) > 0 for r in caplog.records: for d in sensitive_data: assert d not in r.message
def test_auto_tags(self): # # automatically-set tags for DiskDevice # with patch('blivet.devices.disk.util') as patched_util: patched_util.get_sysfs_attr.return_value = None d = DiskDevice('test1') self.assertIn(Tags.local, d.tags) self.assertNotIn(Tags.ssd, d.tags) self.assertNotIn(Tags.usb, d.tags) patched_util.get_sysfs_attr.return_value = '1' d = DiskDevice('test2') self.assertIn(Tags.local, d.tags) self.assertNotIn(Tags.ssd, d.tags) patched_util.get_sysfs_attr.return_value = '0' d = DiskDevice('test2') self.assertIn(Tags.local, d.tags) self.assertIn(Tags.ssd, d.tags) self.assertNotIn(Tags.usb, DiskDevice('test3').tags) self.assertIn(Tags.usb, DiskDevice('test4', bus='usb').tags) # # automatically-set tags for networked storage devices # iscsi_kwarg_names = ["initiator", "name", "offload", "target", "address", "port", "lun", "iface", "node", "ibft", "nic"] iscsi_device = iScsiDiskDevice('test5', **dict((k, None) for k in iscsi_kwarg_names)) self.assertIn(Tags.remote, iscsi_device.tags) self.assertNotIn(Tags.local, iscsi_device.tags) fcoe_device = FcoeDiskDevice('test6', nic=None, identifier=None) self.assertIn(Tags.remote, fcoe_device.tags) self.assertNotIn(Tags.local, fcoe_device.tags) zfcp_device = ZFCPDiskDevice('test7', hba_id=None, wwpn=None, fcp_lun=None) self.assertIn(Tags.remote, zfcp_device.tags) self.assertNotIn(Tags.local, zfcp_device.tags) multipath_device = MultipathDevice('test8', parents=[iscsi_device]) self.assertIn(Tags.remote, multipath_device.tags) self.assertNotIn(Tags.local, multipath_device.tags) # # built-in tags should also be accessible as str # self.assertIn("remote", multipath_device.tags)
def test_from_pickle_with_func(): """Create a PyMAS instance from a pickled object.""" import pickle from sasctl.utils.pymas import from_pickle data = pickle.dumps(dummy_func) with mock.patch('sasctl.utils.pymas.core.PyMAS', autospec=True) as mocked: result = from_pickle(data) assert 1 == mocked.call_count call_args = mocked.call_args[0] assert [DS2Variable('x1', 'str', False), DS2Variable('x2', 'int', False), DS2Variable('out1', 'float', True)] == call_args[1] # Variables assert isinstance(result, PyMAS)
def test_send_notification(self): target = oslo_messaging.Target(topic="topic_test") with mock.patch("confluent_kafka.Producer") as producer: self.driver.send_notification(target, {}, {"payload": ["test_1"]}, None, retry=3) producer.assert_called_once_with({ 'bootstrap.servers': '', 'linger.ms': mock.ANY, 'batch.num.messages': mock.ANY, 'security.protocol': 'PLAINTEXT', 'sasl.mechanism': 'PLAIN', 'sasl.username': mock.ANY, 'sasl.password': mock.ANY, 'ssl.ca.location': '' })
def test_logger_called_when_project_role_is_granted(self): fixture = fixtures.ProjectFixture() with mock.patch('waldur_core.structure.handlers.event_logger.project_role.info') as logger_mock: fixture.project.add_user(fixture.user, structure_models.ProjectRole.MANAGER, fixture.owner) logger_mock.assert_called_once_with( 'User {affected_user_username} has gained role of {role_name} in project {project_name}.', event_type='role_granted', event_context={ 'project': fixture.project, 'user': fixture.owner, 'affected_user': fixture.user, 'structure_type': 'project', 'role_name': 'Manager', }, )
def test_create_file_from_file_object(self, dummy_file): """Create a file from just a path.""" with open(dummy_file) as f: # Filename must be provided with pytest.raises(ValueError): file = files.create_file(f) # Requests uses os.urandom(16) to generate boundaries for multi-part # form uploads. Mock the output to ensure a consistent value so # that body request/responses always match. with mock.patch('os.urandom', return_value='abcdefghijklmnop'.encode('utf-8')): file = files.create_file(f, filename=self.filename) assert isinstance(file, RestObj) assert self.filename == file.name
def test_get_impls_expected_defaults(): """ Test that the correct package and containing module directory is correct for the dummy plugin. """ mock_return_value = 'mock return' with mock.patch('smqtk.utils.plugin.get_plugins') as m_get_plugins: m_get_plugins.return_value = mock_return_value assert DummyImpl.get_impls() == mock_return_value m_get_plugins.assert_called_once_with( DummyImpl, 'SMQTK_PLUGIN_PATH', 'SMQTK_PLUGIN_CLASS', # Default ``warn`` value warn=True, # Default ``reload_modules`` value reload_modules=False)
def test_new_session(missing_packages): HOST = 'example.com' USERNAME = '******' PASSWORD = '******' # Ensure no dependency on swat required with missing_packages('swat'): with mock.patch('sasctl.core.Session.get_token'): s = Session(HOST, USERNAME, PASSWORD) assert USERNAME == s.user assert HOST == s.settings['domain'] assert 'https' == s.settings['protocol'] assert USERNAME == s.settings['username'] assert PASSWORD == s.settings['password'] # Tests don't reset global variables (_session) so explicitly cleanup current_session(None)
def test_logger_logs_project_name_and_description_when_updated(self): new_project = factories.ProjectFactory(description='description', name='name') with mock.patch('waldur_core.structure.handlers.event_logger') as logger_mock: new_project.name = 'new name' new_project.description = 'new description' new_project.save() expected_message = ('Project {project_name} has been updated.' " Description has been changed from 'description' to 'new description'." " Name has been changed from 'name' to 'new name'.") logger_mock.project.info.assert_called_once_with( expected_message, event_type='project_update_succeeded', event_context={ 'project': new_project, }, )
def test_logger_called_when_customer_role_is_revoked(self): fixture = fixtures.CustomerFixture() owner = fixture.owner with mock.patch('waldur_core.structure.handlers.event_logger.customer_role.info') as logger_mock: fixture.customer.remove_user(owner, structure_models.CustomerRole.OWNER, fixture.staff) logger_mock.assert_called_once_with( 'User {affected_user_username} has lost role of {role_name} in customer {customer_name}.', event_type='role_revoked', event_context={ 'customer': fixture.customer, 'user': fixture.staff, 'affected_user': fixture.owner, 'structure_type': 'customer', 'role_name': 'Owner', }, )
def test_gdal_supported_drivers_caching(self): """ Test that GDAL driver mimetype getter performs caching. """ # If the expected cache attr exists already on the function, remove it if hasattr(get_gdal_driver_supported_mimetypes, 'cache'): del get_gdal_driver_supported_mimetypes.cache assert not hasattr(get_gdal_driver_supported_mimetypes, 'cache') ret1 = get_gdal_driver_supported_mimetypes() # A second call to the function should return the same thing but not # call anything from GDAL. with mock.patch('smqtk.algorithms.image_io.gdal_io.gdal') as m_gdal: ret2 = get_gdal_driver_supported_mimetypes() assert ret2 == ret1 m_gdal.GetDriverCount.assert_not_called() m_gdal.GetDriver.assert_not_called()
def test_create_cas_destination(): target = {'name': 'caslocal', 'destinationType': 'cas', 'casServerName': 'camelot', 'casLibrary': 'round', 'destinationTable': 'table', 'description': None } with mock.patch('sasctl._services.model_publish.ModelPublish.post') as post: mp.create_cas_destination('caslocal', server='camelot', library='round', table='table') assert post.called json = post.call_args[1]['json'] for k in json.keys(): assert json[k] == target[k]
def test_logger_called_once_on_project_name_update(self): new_project = factories.ProjectFactory() old_name = new_project.name with mock.patch('waldur_core.structure.handlers.event_logger') as logger_mock: new_project.name = 'new name' new_project.save() logger_mock.project.info.assert_called_once_with( "Project {project_name} has been updated. Name has been changed from '%s' to '%s'." % ( old_name, new_project.name, ), event_type='project_update_succeeded', event_context={ 'project': new_project, }, )
def test_iterconsume_timeout(self): transport = messaging.get_transport(self.conf, 'kombu+memory:////') self.addCleanup(transport.cleanup) deadline = time.time() + 3 with transport._driver._get_connection(amqp.PURPOSE_LISTEN) as conn: conn.iterconsume(timeout=3) # kombu memory transport doesn't really raise error # so just simulate a real driver behavior conn.connection.connection.recoverable_channel_errors = (IOError,) conn.declare_fanout_consumer("notif.info", lambda msg: True) with mock.patch('kombu.connection.Connection.drain_events', side_effect=IOError): try: conn.consume(timeout=3) except driver_common.Timeout: pass self.assertEqual(0, int(deadline - time.time()))
def test_notification(self): m = middleware.RequestNotifier(FakeApp()) req = webob.Request.blank('/foo/bar', environ={ 'REQUEST_METHOD': 'GET', 'HTTP_X_AUTH_TOKEN': uuid.uuid4() }) with mock.patch( 'oslo_messaging.notify.notifier.Notifier._notify') as notify: m(req) # Check first notification with only 'request' call_args = notify.call_args_list[0][0] self.assertEqual('http.request', call_args[1]) self.assertEqual('INFO', call_args[3]) self.assertEqual(set(['request']), set(call_args[2].keys())) request = call_args[2]['request'] self.assertEqual('/foo/bar', request['PATH_INFO']) self.assertEqual('GET', request['REQUEST_METHOD']) self.assertIn('HTTP_X_SERVICE_NAME', request) self.assertNotIn('HTTP_X_AUTH_TOKEN', request) self.assertFalse( any(map(lambda s: s.startswith('wsgi.'), request.keys())), "WSGI fields are filtered out") # Check second notification with request + response call_args = notify.call_args_list[1][0] self.assertEqual('http.response', call_args[1]) self.assertEqual('INFO', call_args[3]) self.assertEqual(set(['request', 'response']), set(call_args[2].keys())) request = call_args[2]['request'] self.assertEqual('/foo/bar', request['PATH_INFO']) self.assertEqual('GET', request['REQUEST_METHOD']) self.assertIn('HTTP_X_SERVICE_NAME', request) self.assertNotIn('HTTP_X_AUTH_TOKEN', request) self.assertFalse( any(map(lambda s: s.startswith('wsgi.'), request.keys())), "WSGI fields are filtered out") response = call_args[2]['response'] self.assertEqual('200 OK', response['status']) self.assertEqual('13', response['headers']['content-length'])
def mocked_importer(packages): builtin_import = __import__ # Accept single string or iterable of strings if isinstance(packages, str): packages = [packages] # Method that fails to load specified packages but otherwise behaves normally def _import(name, *args, **kwargs): if any(name == package for package in packages): raise ImportError() return builtin_import(name, *args, **kwargs) try: with mock.patch(six.moves.builtins.__name__ + '.__import__', side_effect=_import): yield finally: pass
def test_db_file_corrupted(): with tempfile.TemporaryDirectory() as cachedir: unc = 'sqlite:///' + cachedir + '/test.db' with mock.patch( 'ansibullbot.utils.sqlite_utils.C.DEFAULT_DATABASE_UNC', unc): # create the initial file ADB1 = AnsibullbotDatabase(cachedir=cachedir) unc_file = ADB1.unc unc_file = unc_file.replace('sqlite:///', '') with open(unc_file, 'w') as f: f.write('NULLNULLNULLNULL') # now try to init again ADB2 = AnsibullbotDatabase(cachedir=cachedir) assert os.path.exists(ADB2.dbfile)
def test_pickle_input(): """pyml2ds should accept a binary pickle string as input.""" import pickle from sasctl.utils.pyml2ds import pyml2ds # The target "model" to use target = {'msg': 'hello world'} # Pickle the "model" to a file-like object in_file = pickle.dumps(target) out_file = 'model.sas' with mock.patch('sasctl.utils.pyml2ds.core._check_type') as check: check.translate.return_value = 'translated' pyml2ds(in_file, out_file) # Verify _check_type should have been called with the "model" assert check.call_count == 1 assert check.call_args[0][0] == target
def test_update_model_performance(self, sklearn_linear_model, cas_session): from six.moves import mock from sasctl.tasks import update_model_performance lm, X, y = sklearn_linear_model # Score & set output var train_df = X.copy() train_df['var1'] = lm.predict(X) train_df['Price'] = y with mock.patch('swat.CAS') as CAS: CAS.return_value = cas_session # NOTE: can only automate testing of 1 period at a time since # upload_model_performance closes the CAS session when it's done. for period in ['q12019']: sample = train_df.sample(frac=0.1) update_model_performance(sample, self.MODEL_NAME, period)
def test_update_item(): from sasctl.core import _build_crud_funcs, RestObj _, _, update_item, _ = _build_crud_funcs('/widget') target = RestObj({'name': 'Test Widget', 'id': 12345}) with mock.patch('sasctl.core.request') as request: request.return_value = target # ETag should be required with pytest.raises(ValueError): resp = update_item(target) target._headers = {'etag': 'abcd'} resp = update_item(target) assert request.call_count == 1 assert ('put', '/widget/12345') == request.call_args[0] assert target == resp
def test_logger(self, mock_utcnow): fake_transport = oslo_messaging.get_notification_transport(self.conf) with mock.patch('oslo_messaging.transport._get_transport', return_value=fake_transport): self.logger = oslo_messaging.LoggingNotificationHandler('test://') mock_utcnow.return_value = datetime.datetime.utcnow() levelno = getattr(logging, self.priority.upper(), 42) record = logging.LogRecord('foo', levelno, '/foo/bar', 42, 'Something happened', None, None) self.logger.emit(record) context = oslo_messaging.notify._impl_test.NOTIFICATIONS[0][0] self.assertEqual({}, context) n = oslo_messaging.notify._impl_test.NOTIFICATIONS[0][1] self.assertEqual(getattr(self, 'queue', self.priority.upper()), n['priority']) self.assertEqual('logrecord', n['event_type']) self.assertEqual(str(timeutils.utcnow()), n['timestamp']) self.assertIsNone(n['publisher_id']) self.assertEqual( {'process': os.getpid(), 'funcName': None, 'name': 'foo', 'thread': None, 'levelno': levelno, 'processName': 'MainProcess', 'pathname': '/foo/bar', 'lineno': 42, 'msg': 'Something happened', 'exc_info': None, 'levelname': logging.getLevelName(levelno), 'extra': None}, n['payload'])
def __init__(self, _path, *args, **kwargs): # type (str, *object, **object) -> None """ Kwargs: _path: key in `_factory_map` dict of the method to swap mock for (should be an import path) *args, **kwargs: passed through to the mock factory used to generate a replacement mock to swap in """ global _factory_map, _mocks _pre_import() factory = _factory_map[_path] new_mock = factory(*args, **kwargs) super(SwapMockContextDecorator, self).__init__( mock.patch(_path, new=new_mock), mock.patch.dict(_mocks, {_path: new_mock}), )
def test_request_formats(): from requests import Response import sasctl from sasctl.core import request, RestObj response = Response() response.status_code = 200 response._content = '{"name": "test"}'.encode('utf-8') with mock.patch('sasctl.core.Session') as mock_sess: mock_sess.request.return_value = response resp = request('GET', 'example.com', session=mock_sess, format='response') assert mock_sess.request.call_count == 1 assert isinstance(resp, Response) with pytest.warns(DeprecationWarning): resp = request('GET', 'example.com', session=mock_sess, raw=True) # Make sure old param is eventually cleaned up if sasctl.__version__.startswith('1.6'): pytest.fail("Deprecated 'raw' parameter should be removed.") assert isinstance(resp, Response) resp = request('GET', 'example.com', session=mock_sess, format='json') assert isinstance(resp, dict) assert resp['name'] == 'test' resp = request('GET', 'example.com', session=mock_sess, format='text') assert resp == '{"name": "test"}' resp = request('GET', 'example.com', session=mock_sess, format='content') assert resp == response._content resp = request('GET', 'example.com', session=mock_sess, format=None) assert isinstance(resp, RestObj) assert resp.name == 'test'
def test_regular_variants(self): """Make sure non-hashed variants work.""" # Simulate a call to rez-pip where the user had written hashed_variants = False normal_rez_pip_arguments = { "commands": "env.PYTHONPATH.append('{root}/python')", "help": [["Home Page", u"https://github.com/jaraco/zipp"]], "hashed_variants": True, "description": u"Backport of pathlib-compatible object wrapper for zip files", "is_pure_python": True, "from_pip": True, "version": "1.2.0", "authors": [u"Jason R. Coombs ([email protected])"], "variants": [["python-2.7", "contextlib2"]], "pip_name": u"zipp (1.2.0)", "name": u"zipp", } mocked_rez_pip_arguments = copy.copy(normal_rez_pip_arguments) mocked_rez_pip_arguments["hashed_variants"] = False import_to_mock = "rez.package_maker.PackageMaker._get_data" try: from rez import package_maker as _ except ImportError: import_to_mock = "rez.package_maker__.PackageMaker._get_data" with mock.patch(import_to_mock) as patcher: patcher.return_value = mocked_rez_pip_arguments directory = tempfile.mkdtemp(prefix="rez_pip_boy_", suffix="_test_regular_variants") atexit.register(functools.partial(shutil.rmtree, directory)) _run_command( "rez_pip_boy --install zipp==1.2.0 --python-version=2.7 -- {directory}" .format(directory=directory)) source_directory = os.path.join(directory, "zipp", "1.2.0") self._verify_source_package(source_directory, [["python-2.7", "contextlib2"]])
def test_from_pickle_with_class(): """Create a PyMAS instance from a pickled object.""" import pickle from sasctl.utils.pymas import from_pickle data = pickle.dumps(DummyClass()) with pytest.raises(ValueError): result = from_pickle(data) # No function specified with mock.patch('sasctl.utils.pymas.core.PyMAS', autospec=True) as mocked: result = from_pickle(data, 'func') assert 1 == mocked.call_count call_args = mocked.call_args[0] assert [DS2Variable('x1', 'str', False), DS2Variable('x2', 'int', False), DS2Variable('out1', 'float', True)] == call_args[1] # Variables assert isinstance(result, PyMAS)
def test_lvm_vdo_pool(self): pv = StorageDevice("pv1", fmt=blivet.formats.get_format("lvmpv"), size=Size("1 GiB"), exists=True) vg = LVMVolumeGroupDevice("testvg", parents=[pv]) pool = LVMLogicalVolumeDevice("testpool", parents=[vg], size=Size("512 MiB"), seg_type="vdo-pool", exists=True) self.assertTrue(pool.is_vdo_pool) free = vg.free_space lv = LVMLogicalVolumeDevice("testlv", parents=[pool], size=Size("2 GiB"), seg_type="vdo", exists=True) self.assertTrue(lv.is_vdo_lv) self.assertEqual(lv.vg, vg) self.assertEqual(lv.pool, pool) # free space in the vg shouldn't be affected by the vdo lv self.assertEqual(lv.vg_space_used, 0) self.assertEqual(free, vg.free_space) self.assertListEqual(pool.lvs, [lv]) # now try to destroy both the pool and the vdo lv # for the lv this should be a no-op, destroying the pool should destroy both with patch("blivet.devices.lvm.blockdev.lvm") as lvm: lv.destroy() lv.remove_hook() self.assertFalse(lv.exists) self.assertFalse(lvm.lvremove.called) self.assertListEqual(pool.lvs, []) pool.destroy() self.assertFalse(pool.exists) self.assertTrue(lvm.lvremove.called)
def test_listen_for_notifications(self): targets_and_priorities = [ (oslo_messaging.Target(topic="topic_test_1"), "sample"), ] expected_topics = ["topic_test_1.sample"] with mock.patch("kafka.KafkaConsumer") as consumer: self.driver.listen_for_notifications( targets_and_priorities, "kafka_test", 1000, 10) consumer.assert_called_once_with( *expected_topics, group_id="kafka_test", enable_auto_commit=mock.ANY, bootstrap_servers=['localhost:9092'], max_partition_fetch_bytes=mock.ANY, max_poll_records=mock.ANY, security_protocol='PLAINTEXT', sasl_mechanism='PLAIN', sasl_plain_username=mock.ANY, sasl_plain_password=mock.ANY, ssl_cafile='', selector=mock.ANY )