def test_handle_lirc_code(self): # Test normal event callback. self.infrared_sensor.register("start", self.mock_callback) self.infrared_sensor._handle_lirc_code(None) self.mock_callback.assert_not_called() self.infrared_sensor._handle_lirc_code(['start']) self.mock_callback.assert_called_once_with() # Test additional release callback. self.mock_callback.reset_mock() mock_stop_callback = MagicMock() mock_stop_release_callback = MagicMock() self.infrared_sensor.register("stop", mock_stop_callback, mock_stop_release_callback) self.infrared_sensor._handle_lirc_code(None) mock_stop_callback.assert_not_called() mock_stop_release_callback.assert_not_called() self.infrared_sensor._handle_lirc_code(['stop']) mock_stop_callback.assert_called_once_with() mock_stop_release_callback.assert_not_called() # The release callback is called once the stop code is not provided. # The normal callback is not called. Another non-code does not call the # release callback again. mock_stop_callback.reset_mock() self.infrared_sensor._handle_lirc_code(None) self.infrared_sensor._handle_lirc_code(None) mock_stop_callback.assert_not_called() mock_stop_release_callback.assert_called_once_with() # Start button was not pressed, so its callback is not called. self.mock_callback.assert_not_called()
class TestRest: def __init__(self): self.registry = MagicMock(Registry) self.client_mediator = MagicMock(ClientMediator) self.rest = None def setup(self): self.client_mediator.reset_mock() self.rest = RestServer(self.client_mediator) def test_register_package(self): self.rest.register_package(TEST_PACKAGE) self.client_mediator.handle_registration.assert_called_once_with(TEST_PACKAGE, TEST_PACKAGE) def test_unregister(self): package = 'Foo' self.rest.unregister_package(package) self.client_mediator.handle_unregistration.assert_called_once_with(package) def test_invoke(self): service = 'Foo' method = 'test' body = {"a": 1} self.rest.invoke(TEST_PACKAGE, service, method, body) def test_run(self): self.rest.run() bottle.run.assert_called_once()
def test_VERB_methods(self): link = 'http://sixtyten.org/verby/' data = dict(up='down', left='right') c = MagicMock(spec=Session) r = Resource(c, link) r.DELETE(timeout=3) c.request.assert_called_once_with('DELETE', link, timeout=3) c.reset_mock() r.GET(timeout=4) c.request.assert_called_once_with('GET', link, timeout=4) c.reset_mock() r.PATCH(data=data, timeout=5) c.request.assert_called_once_with('PATCH', link, data=data, timeout=5) c.reset_mock() r.POST(data=data, timeout=6) c.request.assert_called_once_with('POST', link, data=data, timeout=6) c.reset_mock() r.PUT(data=data, timeout=7) c.request.assert_called_once_with('PUT', link, data=data, timeout=7) c.reset_mock()
def test_remove_container(mockDocker, mockOutput): """ Test that the remove_container function does it's job. """ update = MagicMock() update.name = 'test' container = MagicMock() client = MagicMock() client.containers.get.return_value = container service = MagicMock() mockDocker.return_value = client dockerapi.remove_container(update, service) mockDocker.assert_called_once_with(base_url='unix://var/run/docker.sock', version='auto') container.remove.assert_called_once_with(force=True) #client.images.remove.assert_called_once() assert update.complete.call_count == 0 client.reset_mock() mockDocker.side_effect = Exception('Test') try: dockerapi.remove_container(update, service) except Exception as e: assert e.message == 'Test'
def test_instance_callbacks_are_executed_once(self): ODMModel.set_pm(MagicMock()) dic = {"_id": "2323", "pippo": "pluto"} odm = ODMModel(dic) odm.pippo = MagicMock() odm.pluto = MagicMock() sempronio = MagicMock() odm.listen_once("before_save", "pippo") odm.listen_once("before_save", "pluto") odm.listen_once("before_save", sempronio) odm.save() MagicMock.assert_called_once_with(odm.pippo, odm) MagicMock.assert_called_once_with(odm.pluto, odm) MagicMock.assert_called_once_with(sempronio, odm) self.assertEqual(odm.before_callbacks_single["save"], []) odm.pippo.reset_mock() odm.pluto.reset_mock() sempronio.reset_mock() odm.listen_once("after_destroy", "pippo") odm.listen_once("after_destroy", sempronio) odm.destroy() MagicMock.assert_called_once_with(odm.pippo, odm) MagicMock.assert_called_once_with(sempronio, odm) self.assertEqual(odm.pluto.call_count, 0) self.assertEqual(odm.after_callbacks_single["destroy"], [])
def test_resolve_room_categories(self): a_floor = MagicMock() should_be_called = MagicMock(return_value = True) cats = MagicMock(return_value = "cats_result") class_ns = "__main__.DXFRoomCatsResolver" with patch(class_ns+".get_room_categories_dict", cats): with patch(class_ns+"._resolve_room_categories_for_floor", should_be_called): # Chiamata 1 - un solo floor, passato come parametro r = DXFRoomCatsResolver.resolve_room_categories(MagicMock(), a_floor) self.assertEqual(r, 1) MagicMock.assert_called_once_with(cats) MagicMock.assert_called_once_with( should_be_called, a_floor, "cats_result", ) MagicMock.reset_mock(should_be_called) b = { "dxf": { "floors": ["firstfloor", "secondfloor", "thirdfloor"] } } r = DXFRoomCatsResolver.resolve_room_categories(b) for f in b["dxf"]["floors"]: MagicMock.assert_any_call( should_be_called, f, "cats_result" ) self.assertEqual(r, 3)
def test_check_archiver_errors(self, isdir_mock, listdir_mock): server = build_real_server() check_strategy = MagicMock() # There is no error file check_strategy.reset_mock() listdir_mock.return_value = [] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with("main", "archiver errors", True, None) # There is one duplicate file check_strategy.reset_mock() listdir_mock.return_value = ["testing.duplicate"] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with("main", "archiver errors", False, "duplicates: 1") # There is one unknown file check_strategy.reset_mock() listdir_mock.return_value = ["testing.unknown"] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with("main", "archiver errors", False, "unknown: 1") # There is one not relevant file check_strategy.reset_mock() listdir_mock.return_value = ["testing.error"] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with("main", "archiver errors", False, "not relevant: 1") # There is one extraneous file check_strategy.reset_mock() listdir_mock.return_value = ["testing.wrongextension"] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with("main", "archiver errors", False, "unknown failure: 1")
def test_logs_subcommand(): get_logs_method = MagicMock() result_value = "" get_logs_method.return_value = (True, result_value) spark_controller.get_logs = get_logs_method command = "logs -s" name = "sessions_name" line = " ".join([command, name]) cell = "cell code" # Could get results result = magic.spark(line, cell) get_logs_method.assert_called_once_with(name) assert result is None ipython_display.write.assert_called_once_with(result_value) # Could not get results get_logs_method.reset_mock() get_logs_method.return_value = (False, result_value) result = magic.spark(line, cell) get_logs_method.assert_called_once_with(name) assert result is None ipython_display.send_error.assert_called_once_with(result_value)
def test_magic_methods_fspath(self): mock = MagicMock() expected_path = mock.__fspath__() mock.reset_mock() self.assertEqual(os.fspath(mock), expected_path) mock.__fspath__.assert_called_once()
def test_wait_for_providers_task(self, mock_export_run): mock_run_uid = str(uuid.uuid4()) mock_provider_task = Mock(status=TaskStates.SUCCESS.value) mock_export_run.objects.filter().first.return_value = Mock() mock_export_run.objects.filter().first().provider_tasks.filter.return_value = [mock_provider_task] callback_task = MagicMock() apply_args = {"arg1": "example_value"} wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args) callback_task.apply_async.assert_called_once_with(**apply_args) callback_task.reset_mock() mock_provider_task = Mock(status=TaskStates.RUNNING.value) mock_export_run.objects.filter().first.return_value = Mock() mock_export_run.objects.filter().first().provider_tasks.filter.return_value = [mock_provider_task] wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args) callback_task.apply_async.assert_not_called() with self.assertRaises(Exception): mock_export_run.reset_mock() mock_export_run.objects.filter().first().__nonzero__.return_value = False wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args)
def test_iface_discovery(mock_import, mock_provider_check): """ test iface discovery """ # provider_check is none mock_provider_check.return_value = 'debian' mock_debian_iface = MagicMock() mock_debian_iface.iface.return_value = 'its a debian bridge' values = {'netshowlib.debian.iface': mock_debian_iface} mock_import.side_effect = mod_args_generator(values) assert_equals(nn.iface('eth1'), 'its a debian bridge') # if provider_check is not none mock_debian_iface = MagicMock() mock_debian_iface.iface.return_value = 'its a debian bridge' values['netshowlib.debian.iface'] = mock_debian_iface assert_equals(nn.iface('eth1', providername='debian'), 'its a debian bridge') # if cache is set provider_check is none mock_debian_iface.reset_mock() mock_debian_iface = MagicMock() mock_debian_iface.iface.return_value = 'its a debian bridge' mock_debian_cache = MagicMock() values = {'netshowlib.debian.iface': mock_debian_iface, 'netshowlib.debian.cache': mock_debian_cache} mock_import.side_effect = mod_args_generator(values) all_cache = nn.feature_cache() assert_equals(nn.iface('eth1', cache=all_cache), 'its a debian bridge') # confirm syntax for iface_type accepts cache mock_debian_iface.iface.assert_called_with('eth1', cache=all_cache)
def test_overwrite_filelocking(): path = 'tests/fixtures/empty_repo.conf' mocked_path = MagicMock() mocked_path.__str__ = lambda x: path mocked_fcntl = MagicMock() mocked_open = MagicMock() with patch('pyolite.repo.open', mocked_open, create=True): manager = mocked_open.return_value.__enter__.return_value # asserts file locking has been put in place before writing manager.write = lambda text: ([ mocked_fcntl.flock.assert_called_once_with( manager, mocked_fcntl.LOCK_EX ), mocked_fcntl.reset_mock() ]) with patch.multiple('pyolite.repo', fcntl=mocked_fcntl): repo = Repo(path) mocked_fcntl.reset_mock() repo.overwrite('some_text') # asserts lock has been removed after writing mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl.LOCK_UN)
def test_replace_filelocking(): mocked_re = MagicMock() mocked_fcntl = MagicMock() mocked_open = MagicMock() path = 'tests/fixtures/config.conf' with patch('pyolite.repo.open', mocked_open, create=True): manager = mocked_open.return_value.__enter__.return_value # asserts file locking has been put in place before reading manager.read = lambda: ([ mocked_fcntl.flock.assert_called_once_with( manager, mocked_fcntl.LOCK_EX ), mocked_fcntl.reset_mock() ]) with patch.multiple('pyolite.repo', re=mocked_re, fcntl=mocked_fcntl): repo = Repo(path) mocked_fcntl.reset_mock() repo.replace('pattern', 'string') # asserts lock has been removed after operating on file mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl.LOCK_UN)
def test_users_filelocking(): path = 'tests/fixtures/repo_users.conf' mocked_path = MagicMock() mocked_path.__str__ = lambda x: path mocked_path.exists.return_value = True mocked_re = MagicMock() mocked_fcntl = MagicMock() mocked_open = MagicMock() with patch('pyolite.repo.open', mocked_open, create=True): manager = mocked_open.return_value.__enter__.return_value # asserts file locking has been put in place before reading manager.read = lambda: ([ mocked_fcntl.flock.assert_called_once_with( manager, mocked_fcntl.LOCK_EX ), mocked_fcntl.reset_mock() ]) with patch.multiple('pyolite.repo', re=mocked_re, fcntl=mocked_fcntl): repo = Repo(mocked_path) mocked_fcntl.reset_mock() repo.users # asserts lock has been removed after reading mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl.LOCK_UN)
def test_report_changes(self): change = [["path"], "value"] s = MagicMock() p = Process("proc", s) s.reset_mock() p.report_changes(change) p.q.put.assert_called_once_with(BlockChanges(changes=[change]))
def test_users_filelocking(): path = 'tests/fixtures/repo_users.conf' mocked_path = MagicMock() mocked_path.__str__ = lambda x: path mocked_path.exists.return_value = True mocked_re = MagicMock() mocked_re.return_value = 'another_text' mocked_re.finditer.return_value = [] mocked_fcntl = MagicMock() mocked_open = MagicMock() with patch('pyolite.abstracts.config.open', mocked_open, create=True): manager = mocked_open.return_value.__enter__.return_value # asserts file locking has been put in place before reading manager.read = lambda: ([ mocked_fcntl.assert_called_once_with(manager, fcntl.LOCK_EX), mocked_fcntl.reset_mock() ]) repo = Repo(mocked_path) with patch.object(repo, 'regex', mocked_re), patch('fcntl.flock', mocked_fcntl): mocked_fcntl.reset_mock() repo.objects # asserts lock has been removed after reading mocked_fcntl.assert_called_once_with(manager, fcntl.LOCK_UN)
def test_edit_broadcasts(self): sg = self.dc.new_subset_group() bcast = MagicMock() sg.subsets[0].broadcast = bcast bcast.reset_mock() sg.subsets[0].style.color = 'red' assert bcast.call_count == 1
def test_failHard(self, infoCalls, jID=666, jStat="Done", inFiles=None, ofStat=["Exists"]): """Test the job.failHard function.""" from DIRAC.TransformationSystem.Utilities.TransformationInfo import TransformationInfo from DIRAC.TransformationSystem.Utilities.JobInfo import JobInfo tInfoMock = Mock(name="tInfoMock", spec=TransformationInfo) tInfoMock.reset_mock() testJob = JobInfo(jobID=666, status=jStat, tID=123, tType="MCSimulation") testJob.outputFiles = ["/my/stupid/file.lfn"] testJob.outputFileStatus = ofStat testJob.otherTasks = True testJob.inputFiles = inFiles testJob.inputFileExists = True testJob.fileStatus = "Processed" self.dra.inputFilesProcessed = set() self.dra._DataRecoveryAgent__failJobHard(testJob, tInfoMock) # pylint: disable=protected-access, no-member gLogger.notice("Expecting calls", infoCalls) gLogger.notice("Called", tInfoMock.method_calls) assert len(infoCalls) == len(tInfoMock.method_calls) for index, infoCall in enumerate(infoCalls): self.assertIn(infoCall, tInfoMock.method_calls[index]) if jStat == "Done": self.assertIn("Failing job %s" % jID, self.dra.notesToSend) else: self.assertNotIn("Failing job %s" % jID, self.dra.notesToSend)
def test_failAndCleanUpDocker(mockDocker, mockOutput): """ Test that the failure and clean up function does it's job. """ client = MagicMock() mockDocker.return_value = client #call clean up with empty sets, matching sets, and different sets for valid and current images and test for pair in [[[],[]], [[1, 2, 3], [1, 2, 3]], [[{'Id': 1}, {'Id': 2}, {'Id': 3}], [{'Id': 1}, {'Id': 2}, {'Id': 3}, {'Id': 4}, {'Id': 5}]]]: #fake that docker is returning the second list in the pair as the current images and containers on the device client.containers.return_value = pair[1] client.images.return_value = pair[1] try: dockerapi.failAndCleanUpDocker(pair[0],pair[0]) except Exception as e: #we should always see this exception assert e.message == 'Building or starting of docker image failed check your Dockerfile for errors.' mockDocker.assert_called_with(base_url='unix://var/run/docker.sock', version='auto') client.containers.assert_called_once_with(quiet=True, all=True) client.images.assert_called_once_with(quiet=True, all=False) if pair[1] == pair[0]: assert client.remove_image.call_count == 0 assert client.remove_container.call_count == 0 else: img_expected = "[call(image={'Id': 4}), call(image={'Id': 5})]" cntr_expected = "[call(container=4), call(container=5)]" assert str(client.remove_image.call_args_list) == img_expected assert str(client.remove_container.call_args_list) == cntr_expected assert client.remove_image.call_count == 2 assert client.remove_container.call_count == 2 client.reset_mock()
class RobotTests(object): def setup_helper(self): self.conn = MagicMock() self.sensor_state = defaultdict(int) self.sense = MorseSense(self.conn, self.sensor_state) self.conn.subscribe = MagicMock( side_effect=self.subscribe_side_effect() ) self.conn.unscribe = MagicMock() self.sense.start(.001) def assert_bot_detection(self, robot): self.assertEqual(self.sensor_state['robot'], robot) def assert_sensor_data(self, decoded_sensor): for key, value in decoded_sensor.iteritems(): self.assertEqual(self.sensor_state[key], value) def assert_start(self): self.assertEqual(self.conn.subscribe.call_count, 2) def assert_stop(self): self.assertEqual(self.conn.subscribe.call_count, 2) self.conn.reset_mock() self.sense.unsubscribe() self.assertEqual(self.conn.unsubscribe.call_count, 2)
def test_overwrite_filelocking(): path = 'tests/fixtures/empty_repo.conf' mocked_path = MagicMock() mocked_path.__str__ = lambda x: path mocked_fcntl = MagicMock() mocked_open = MagicMock() with patch('pyolite.repo.open', mocked_open, create=True): manager = mocked_open.return_value.__enter__.return_value # asserts file locking has been put in place before writing manager.write = lambda text: ([ mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl. LOCK_EX), mocked_fcntl.reset_mock() ]) with patch.multiple('pyolite.repo', fcntl=mocked_fcntl): repo = Repo(path) mocked_fcntl.reset_mock() repo.overwrite('some_text') # asserts lock has been removed after writing mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl.LOCK_UN)
def test_replace_filelocking(): mocked_re = MagicMock() mocked_fcntl = MagicMock() mocked_open = MagicMock() path = 'tests/fixtures/config.conf' with patch('pyolite.repo.open', mocked_open, create=True): manager = mocked_open.return_value.__enter__.return_value # asserts file locking has been put in place before reading manager.read = lambda: ([ mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl. LOCK_EX), mocked_fcntl.reset_mock() ]) with patch.multiple('pyolite.repo', re=mocked_re, fcntl=mocked_fcntl): repo = Repo(path) mocked_fcntl.reset_mock() repo.replace('pattern', 'string') # asserts lock has been removed after operating on file mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl.LOCK_UN)
def test_users_filelocking(): path = 'tests/fixtures/repo_users.conf' mocked_path = MagicMock() mocked_path.__str__ = lambda x: path mocked_path.exists.return_value = True mocked_re = MagicMock() mocked_fcntl = MagicMock() mocked_open = MagicMock() with patch('pyolite.repo.open', mocked_open, create=True): manager = mocked_open.return_value.__enter__.return_value # asserts file locking has been put in place before reading manager.read = lambda: ([ mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl. LOCK_EX), mocked_fcntl.reset_mock() ]) with patch.multiple('pyolite.repo', re=mocked_re, fcntl=mocked_fcntl): repo = Repo(mocked_path) mocked_fcntl.reset_mock() repo.users # asserts lock has been removed after reading mocked_fcntl.flock.assert_called_once_with(manager, mocked_fcntl.LOCK_UN)
def test_ifd(self): mock_type = MagicMock(spec=hou.NodeType) mock_type.name.return_value = "ifd" mock_node = MagicMock(spec=hou.Node) mock_node.type.return_value = mock_type # Return True for soho_outputmode (writing ifd) and the path. mock_node.evalParm.side_effect = (True, "/var/tmp/test.ifd") result = ht.events.events.rop_render._get_target_file(mock_node) self.assertEqual(result, "/var/tmp/test.ifd") mock_node.evalParm.assert_any_call("soho_outputmode") mock_node.evalParm.assert_any_call("soho_diskfile") mock_node.reset_mock() # Try again but with writing an image. mock_node.evalParm.side_effect = (False, "/var/tmp/test.exr") result = ht.events.events.rop_render._get_target_file(mock_node) self.assertEqual(result, "/var/tmp/test.exr") mock_node.evalParm.assert_any_call("soho_outputmode") mock_node.evalParm.assert_any_call("vm_picture")
def test_packet_action(self): # Callback must be callable with self.assertRaises(TypeError): self.environment.add_packet_action("waypoint_add", "no_function") mock_callback = MagicMock() self.environment.add_packet_action("waypoint_add", mock_callback) # Not allowed to add more than one callback for a packet specification. with self.assertRaises(KeyError): self.environment.add_packet_action("waypoint_add", MagicMock()) # Callback is called for correct specification. packet = Packet() packet.set("specification", "waypoint_add") packet.set("latitude", 12.345) packet.set("longitude", 32.109) packet.set("to_id", 1) self.environment.receive_packet(packet) mock_callback.assert_called_once_with(packet) # Callback is not called for another specification. mock_callback.reset_mock() packet = Packet() packet.set("specification", "waypoint_clear") packet.set("to_id", 1) self.environment.receive_packet(packet) mock_callback.assert_not_called()
def test_create_sql_hive_context_happens_once(self): kind = constants.SESSION_KIND_SPARK http_client = MagicMock() ipython_display = MagicMock() http_client.post_session.return_value = self.session_create_json self.post_statement_responses = [self.post_statement_json, self.post_statement_json] http_client.post_statement.side_effect = self._next_statement_response_post http_client.get_session.return_value = self.ready_sessions_json self.get_statement_responses = [self.running_statement_json, self.ready_statement_json, self.ready_statement_json] http_client.get_statement.side_effect = self._next_statement_response_get conf.override_all({ "status_sleep_seconds": 0.01, "statement_sleep_seconds": 0.01 }) session = self._create_session(kind=kind, http_client=http_client) session.ipython_display = ipython_display conf.load() session.start(create_sql_context=False) # Reset the mock so that post called count is accurate http_client.reset_mock() session.create_sql_context() assert ipython_display.writeln.call_count == 2 # Second call should not issue a post request session.create_sql_context() assert call(0, {"code": "val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)"}) \ in http_client.post_statement.call_args_list assert len(http_client.post_statement.call_args_list) == 1
class TestScanTickerPart(unittest.TestCase): def setUp(self): self.context = MagicMock(spec=Context) self.registrar = MagicMock(spec=PartRegistrar) self.o = ScanTickerPart(name="AxisTwo", mri="mri") self.o.setup(self.registrar) def prepare_half_run(self): line1 = LineGenerator('AxisOne', 'mm', 0, 2, 3) line2 = LineGenerator('AxisTwo', 'mm', 0, 2, 2) compound = CompoundGenerator([line1, line2], [], [], 1.0) compound.prepare() self.o.configure(0, 2, generator=compound, axesToMove=['AxisTwo']) def test_configure(self): self.prepare_half_run() assert self.o._completed_steps == 0 assert self.o._steps_to_do == 2 def test_run(self): self.prepare_half_run() self.registrar.reset_mock() self.o.run(self.context) assert self.context.mock_calls == [ call.block_view("mri"), call.block_view().counter.put_value(0), call.sleep(AlmostFloat(1.0, delta=0.05)), call.block_view().counter.put_value(2), call.sleep(AlmostFloat(2.0, delta=0.1)) ] assert self.registrar.report.call_count == 2 assert self.registrar.report.call_args_list[0][0][0].steps == 1 assert self.registrar.report.call_args_list[1][0][0].steps == 2
def test_check_jobs(jobResetAgent): """Test for checkJobs function.""" jobIDs = [1, 2] dummy_treatJobWithNoReq = MagicMock() dummy_treatJobWithReq = MagicMock() # if the readRequestsForJobs func returns error than checkJobs should exit and return an error jobResetAgent.reqClient.readRequestsForJobs.return_value = S_ERROR() res = jobResetAgent.checkJobs(jobIDs, treatJobWithNoReq=dummy_treatJobWithNoReq, treatJobWithReq=dummy_treatJobWithReq) assert not res["OK"] # test if correct treatment functions are called jobResetAgent.reqClient.readRequestsForJobs.return_value = S_OK({'Successful': {}, 'Failed': {jobIDs[0]: 'Request not found'}}) jobResetAgent.checkJobs(jobIDs, treatJobWithNoReq=dummy_treatJobWithNoReq, treatJobWithReq=dummy_treatJobWithReq) dummy_treatJobWithNoReq.assert_has_calls([call(jobIDs[0]), call(jobIDs[1])]) dummy_treatJobWithReq.assert_not_called() dummy_treatJobWithNoReq.reset_mock() req1 = Request({"RequestID": 1}) req2 = Request({"RequestID": 2}) jobResetAgent.reqClient.readRequestsForJobs.return_value = S_OK({'Successful': {jobIDs[0]: req1, jobIDs[1]: req2}, 'Failed': {}}) jobResetAgent.checkJobs(jobIDs, treatJobWithNoReq=dummy_treatJobWithNoReq, treatJobWithReq=dummy_treatJobWithReq) dummy_treatJobWithNoReq.assert_not_called() dummy_treatJobWithReq.assert_has_calls([call(jobIDs[0], req1), call(jobIDs[1], req2)])
def test_rpilcdmenu_render_multiple_items_rewind_menu(LCDHwdMock): LCDHwdMockInstance = MagicMock() LCDHwdMock.return_value = LCDHwdMockInstance menu = RpiLCDMenu() item1Mock = Mock() item1Mock.text = "item1" item2Mock = Mock() item2Mock.text = "item2" item3Mock = Mock() item3Mock.text = "item3" menu.append_item(item1Mock) menu.append_item(item2Mock) menu.append_item(item3Mock) menu.processDown() menu.processDown() LCDHwdMockInstance.reset_mock() menu.render() assert LCDHwdMockInstance.write4bits.mock_calls == [call(LCDHwdMock.LCD_CLEARDISPLAY)] + [ call(ord(char), True) for char in ">item3" ] + [call(0xC0)] + [ call(ord(char), True) for char in " item1" ]
def test_connect_and_close(self, sock): s = MagicMock() sock.socket.return_value = s sock.getaddrinfo.return_value = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("127.0.0.1", 80, 0, 0))] c = WebSocketBaseClient(url="ws://127.0.0.1/?token=value") s.recv.return_value = b"\r\n".join([ b"HTTP/1.1 101 Switching Protocols", b"Connection: Upgrade", b"Sec-Websocket-Version: 13", b"Content-Type: text/plain;charset=utf-8", b"Sec-Websocket-Accept: " + b64encode(sha1(c.key + WS_KEY).digest()), b"Upgrade: websocket", b"Date: Sun, 26 Jul 2015 12:32:55 GMT", b"Server: ws4py/test", b"\r\n" ]) c.connect() s.connect.assert_called_once_with(("127.0.0.1", 80)) s.reset_mock() c.close(code=1006, reason="boom") args = s.sendall.call_args_list[0] f = Frame() f.parser.send(args[0][0]) f.parser.close() self.assertIn(b'boom', f.unmask(f.body))
def test_discover(self, bind_mock): bind_mock.configure_mock(side_effect=socket.error(errno.EADDRINUSE, "port in use")) callback_mock = MagicMock() self.rf_sensor.discover(callback_mock) bind_calls = bind_mock.call_args_list callback_calls = callback_mock.call_args_list self.assertEqual(len(bind_calls), self.rf_sensor.number_of_sensors) self.assertEqual(len(callback_calls), self.rf_sensor.number_of_sensors) # Each vehicle must be checked whether its port is in use, and # successfully report the identity of its RF sensor in order. for vehicle_id in xrange(1, self.rf_sensor.number_of_sensors + 1): expected_address = (self.rf_sensor._ip, self.rf_sensor._port + vehicle_id) address = bind_calls.pop(0)[0][0] self.assertEqual(address, expected_address) response = callback_calls.pop(0)[0][0] self.assertEqual(response, { "id": vehicle_id, "address": "{}:{}".format(*expected_address) }) callback_mock.reset_mock() bind_mock.reset_mock() bind_mock.configure_mock(side_effect=None) self.rf_sensor.discover(callback_mock, required_sensors=set([1])) bind_mock.assert_called_once_with((self.rf_sensor._ip, self.rf_sensor._port + 1)) callback_mock.assert_not_called()
def test_inactive_users_queued_for_digest( self, mock_django_timezone: mock.MagicMock, mock_queue_digest_recipient: mock.MagicMock) -> None: # Turn on realm digest emails for all realms Realm.objects.update(digest_emails_enabled=True) cutoff = timezone_now() # Test Tuesday mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=5) all_user_profiles = UserProfile.objects.filter( is_active=True, is_bot=False, enable_digest_emails=True) # Check that all users without an a UserActivity entry are considered # inactive users and get enqueued. enqueue_emails(cutoff) self.assertEqual(mock_queue_digest_recipient.call_count, all_user_profiles.count()) mock_queue_digest_recipient.reset_mock() for realm in Realm.objects.filter(deactivated=False, digest_emails_enabled=True): user_profiles = all_user_profiles.filter(realm=realm) for user_profile in user_profiles: UserActivity.objects.create(last_visit=cutoff - datetime.timedelta(days=1), user_profile=user_profile, count=0, client=get_client('test_client')) # Check that inactive users are enqueued enqueue_emails(cutoff) self.assertEqual(mock_queue_digest_recipient.call_count, all_user_profiles.count())
def test_on_changed(self): change = [["path"], "value"] s = MagicMock() p = Process("proc", s) s.reset_mock() p.on_changed(change, notify=False) p.q.put.assert_called_once_with(BlockChanged(change=change))
class TestValidator(unittest.TestCase): """ Base class for validator unit tests """ def setUp(self): self.service_id = uuid4() self.topchef_url = 'http://some-address.com/topchef' self.http_library = MagicMock(spec=requests) self.validator = Validator(self.topchef_url, self.http_library) def tearDown(self): self.http_library.reset_mock() class MockResponse(object): """ Wraps HTTP responses in order to allow stubbing of the HTTP library in a reasonable way """ def __init__(self, status_code, data=None): if data is None: data = {} self.status_code = status_code self._data = data def json(self): """ :return: The json that was supplied in the response constructor :rtype dict: """ return self._data
def test_remove_fact_no_paste(mock_requests): from cloudbot.util import database from plugins import factoids importlib.reload(database) importlib.reload(factoids) factoids.factoid_cache.clear() mock_requests.add(mock_requests.POST, 'https://hastebin.com/documents', status=404) mock_session = MagicMock() mock_notice = MagicMock() factoids.remove_fact('#example', ['foo'], mock_session, mock_notice) mock_notice.assert_called_once_with("Unknown factoids: 'foo'") mock_session.execute.assert_not_called() mock_notice.reset_mock() factoids.factoid_cache['#example']['foo'] = 'bar' factoids.remove_fact('#example', ['foo', 'bar'], mock_session, mock_notice) mock_notice.assert_has_calls([ call("Unknown factoids: 'bar'"), call('Unable to paste removed data, not removing facts'), ]) mock_session.execute.assert_not_called()
def test_checkAllJob( self ): """test for DataRecoveryAgent checkAllJobs .....................................................""" from ILCDIRAC.ILCTransformationSystem.Utilities.JobInfo import JobInfo ### test with additional task dicts out = StringIO() sys.stdout = out from ILCDIRAC.ILCTransformationSystem.Utilities.TransformationInfo import TransformationInfo tInfoMock = Mock( name = "tInfoMock", spec=TransformationInfo ) mockJobs = dict([ (i, self.getTestMock() ) for i in xrange(11) ] ) mockJobs[2].pendingRequest = True mockJobs[3].getJobInformation = Mock( side_effect = ( RuntimeError("ARGJob1"), None ) ) mockJobs[4].getTaskInfo = Mock( side_effect = ( TaskInfoException("ARG1"), None ) ) taskDict = True lfnTaskDict = True self.dra.checkAllJobs( mockJobs, tInfoMock, taskDict, lfnTaskDict ) self.assertIn( "ERROR: +++++ Exception: ARGJob1", out.getvalue().strip() ) self.assertIn( "Skip Task, due to TaskInfoException: ARG1", out.getvalue().strip() ) ### test without additional task dicts out = StringIO() sys.stdout = out mockJobs = dict([ (i, self.getTestMock() ) for i in xrange(5) ] ) mockJobs[2].pendingRequest = True mockJobs[3].getJobInformation = Mock( side_effect = ( RuntimeError("ARGJob2"), None ) ) tInfoMock.reset_mock() self.dra.checkAllJobs( mockJobs, tInfoMock ) self.assertIn( "ERROR: +++++ Exception: ARGJob2", out.getvalue().strip() )
def test_instance_callbacks_are_executed_once(self): ODMModel.set_pm(MagicMock()) dic = {"_id" : "2323", "pippo" : "pluto"} odm = ODMModel(dic) odm.pippo = MagicMock() odm.pluto = MagicMock() sempronio = MagicMock() odm.listen_once("before_save", "pippo") odm.listen_once("before_save", "pluto") odm.listen_once("before_save", sempronio) odm.save() MagicMock.assert_called_once_with(odm.pippo, odm) MagicMock.assert_called_once_with(odm.pluto, odm) MagicMock.assert_called_once_with(sempronio, odm) self.assertEqual(odm.before_callbacks_single["save"], []) odm.pippo.reset_mock() odm.pluto.reset_mock() sempronio.reset_mock() odm.listen_once("after_destroy", "pippo") odm.listen_once("after_destroy", sempronio) odm.destroy() MagicMock.assert_called_once_with(odm.pippo, odm) MagicMock.assert_called_once_with(sempronio, odm) self.assertEqual(odm.pluto.call_count, 0) self.assertEqual(odm.after_callbacks_single["destroy"], [])
def test_checkAllJob(self): """test for DataRecoveryAgent checkAllJobs .....................................................""" from DIRAC.TransformationSystem.Utilities.JobInfo import JobInfo # test with additional task dicts from DIRAC.TransformationSystem.Utilities.TransformationInfo import TransformationInfo tInfoMock = Mock(name="tInfoMock", spec=TransformationInfo) mockJobs = dict([(i, self.getTestMock()) for i in range(11)]) mockJobs[2].pendingRequest = True mockJobs[3].getJobInformation = Mock( side_effect=(RuntimeError("ARGJob1"), None)) mockJobs[4].getTaskInfo = Mock(side_effect=(TaskInfoException("ARG1"), None)) taskDict = True lfnTaskDict = True self.dra.checkAllJobs(mockJobs, tInfoMock, taskDict, lfnTaskDict) self.dra.log.error.assert_any_call(MatchStringWith("+++++ Exception"), "ARGJob1") self.dra.log.error.assert_any_call( MatchStringWith("Skip Task, due to TaskInfoException: ARG1")) self.dra.log.reset_mock() # test inputFile None mockJobs = dict([(i, self.getTestMock(nameID=i)) for i in range(5)]) mockJobs[1].inputFiles = [] mockJobs[1].getTaskInfo = Mock( side_effect=(TaskInfoException("NoInputFile"), None)) mockJobs[1].tType = "MCSimulation" tInfoMock.reset_mock() self.dra.checkAllJobs(mockJobs, tInfoMock, taskDict, lfnTaskDict=True) self.dra.log.notice.assert_any_call( MatchStringWith("Failing job hard"))
def test_removeChute(mockDocker, mockOutput): """ Test that the removeChute function does it's job. """ update = MagicMock() update.name = 'test' container = MagicMock() client = MagicMock() client.containers.get.return_value = container mockDocker.return_value = client dockerapi.removeChute(update) mockDocker.assert_called_once_with(base_url='unix://var/run/docker.sock', version='auto') container.remove.assert_called_once_with(force=True) client.images.remove.assert_called_once() assert update.complete.call_count == 0 client.reset_mock() container.remove.side_effect = Exception('Test') try: dockerapi.removeChute(update) except Exception as e: assert e.message == 'Test'
def test_wait_for_providers_task(self, mock_export_run): mock_run_uid = str(uuid.uuid4()) mock_provider_task = Mock(status=TaskStates.SUCCESS.value) mock_export_run.objects.filter().first.return_value = Mock() mock_export_run.objects.filter().first( ).provider_tasks.all.return_value = [mock_provider_task] callback_task = MagicMock() apply_args = {"arg1": "example_value"} wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args) callback_task.apply_async.assert_called_once_with(**apply_args) callback_task.reset_mock() mock_provider_task = Mock(status=TaskStates.RUNNING.value) mock_export_run.objects.filter().first.return_value = Mock() mock_export_run.objects.filter().first( ).provider_tasks.all.return_value = [mock_provider_task] wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args) callback_task.apply_async.assert_not_called() with self.assertRaises(Exception): mock_export_run.reset_mock() mock_export_run.objects.filter().first( ).__nonzero__.return_value = False wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args)
def test_execute_spatialite_script(self, mock_sqlite3, mock_enable, mock_logging_open): test_db = "test.gpkg" test_script = "test.sql" test_command = "select * from table;" mock_connection = MagicMock() mock_sqlite3.connect.return_value = mock_connection mock_logging_open().__enter__().read.return_value = test_command execute_spatialite_script(test_db, test_script) # test success mock_sqlite3.connect.assert_called_with(test_db) mock_connection.cursor().executescript.assert_called_once_with( test_command) mock_connection.cursor().close.assert_called_once() mock_connection.close.assert_called_once() mock_connection.reset_mock() # test failure, ensure connection is cleaned up mock_connection.commit.side_effect = Exception("Failed") with self.assertRaises(Exception): execute_spatialite_script(test_db, test_script) mock_connection.cursor().close.assert_called_once() mock_connection.close.assert_called_once()
def testPTC150_DVIP(self): socket = MagicMock() camera = PTC150_DVIP("Test", "127.0.0.1") camera.socket = socket camera.tallyGreen() socket.send.assert_called_once_with('\x00\x0B\x81\x01\x7E\x01\x0A\x00\x00\x02\xFF') socket.reset_mock()
def test_check_archiver_errors(self, isdir_mock, listdir_mock): server = build_real_server() check_strategy = MagicMock() # There is no error file check_strategy.reset_mock() listdir_mock.return_value = [] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with( 'main', 'archiver errors', True, None, ) # There is one duplicate file check_strategy.reset_mock() listdir_mock.return_value = ['testing.duplicate'] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with( 'main', 'archiver errors', False, 'duplicates: 1', ) # There is one unknown file check_strategy.reset_mock() listdir_mock.return_value = ['testing.unknown'] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with( 'main', 'archiver errors', False, 'unknown: 1', ) # There is one not relevant file check_strategy.reset_mock() listdir_mock.return_value = ['testing.error'] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with( 'main', 'archiver errors', False, 'not relevant: 1', ) # There is one extraneous file check_strategy.reset_mock() listdir_mock.return_value = ['testing.wrongextension'] server.check_archiver_errors(check_strategy) check_strategy.result.assert_called_with( 'main', 'archiver errors', False, 'unknown failure: 1' )
def test_start_sync_job( mocker: MockerFixture, job_type: JobType, create_sync_job: CreateSyncJob, ) -> None: mock_sync_content = mocker.patch("arrsync.lib.sync_content") mock_get_content_payloads = mocker.patch( "arrsync.lib.get_content_payloads") mock_calculate_content_diff = mocker.patch( "arrsync.lib.calculate_content_diff") job = create_sync_job(job_type) source_api = MagicMock(spec=Api) source_api.__enter__.return_value = source_api source_api.status.return_value = Status.parse_obj({"version": "3"}) dest_api = MagicMock(spec=Api) dest_api.__enter__.return_value = dest_api dest_api.status.return_value = Status.parse_obj({"version": "3"}) mock_api = mocker.patch("arrsync.lib.Api", autospec=True, side_effect=[source_api, dest_api]) start_sync_job(job) assert mock_api.call_count == 2 source_api.status.assert_called_once_with() dest_api.status.assert_called_once_with() source_api.tag.assert_called_once_with() source_api.profile.assert_called_once_with() dest_api.profile.assert_called_once_with() dest_api.metadata.assert_called_once_with() dest_api.language.assert_called_once_with() source_api.content.assert_called_once_with() dest_api.content.assert_called_once_with() mock_calculate_content_diff.assert_called() mock_get_content_payloads.assert_called() mock_sync_content.assert_called() mock_api.reset_mock() source_api.reset_mock() dest_api.reset_mock() source_api = source_api source_api.__enter__.return_value = source_api source_api.status.return_value = None dest_api = dest_api dest_api.__enter__.return_value = dest_api dest_api.status.return_value = None mock_api.side_effect = [source_api, dest_api] with pytest.raises(Exception): start_sync_job(job)
def test_on_changed_with_notify(self): change = [["path"], "value"] s = MagicMock() p = Process("proc", s) s.reset_mock() p.on_changed(change) p.q.put.assert_has_calls([ call(BlockChanged(change=change)), call(BlockNotify(name="path"))])
def test_magic_methods_fspath(self): mock = MagicMock() if sys.version_info < (3, 6): self.assertRaises(AttributeError, lambda: mock.__fspath__) else: expected_path = mock.__fspath__() mock.reset_mock() self.assertEqual(os.fspath(mock), expected_path) mock.__fspath__.assert_called_once()
def test_create_application(monkeypatch): monkeypatch.setattr('boto.ec2.connect_to_region', MagicMock()) monkeypatch.setattr('boto.iam.connect_to_region', MagicMock()) monkeypatch.setattr('time.sleep', lambda s: s) mock_sgr = MagicMock() monkeypatch.setattr('aws_minion.cli.SecurityGroupRule', mock_sgr) context = Context({'region': 'caprica', 'vpc': 'myvpc'}) context.get_application = raise_application_not_found context_constructor = lambda x, y: context monkeypatch.setattr('aws_minion.cli.Context', context_constructor) runner = CliRunner() data = { 'application_name': 'myapp', 'team_name': 'MyTeam', 'exposed_ports': [123], 'exposed_protocol': 'http' } mock_sgr.reset_mock() with runner.isolated_filesystem(): with open('myapp.yaml', 'w') as fd: yaml.dump(data, fd) context.write_config('config.yaml') result = runner.invoke(cli, ['-p', 'default', '--config-file', 'config.yaml', 'applications', 'create', 'myapp.yaml'], catch_exceptions=False) assert call('tcp', 80, 80, '0.0.0.0/0', None) in mock_sgr.call_args_list assert call('tcp', 443, 443, '0.0.0.0/0', None) in mock_sgr.call_args_list assert 'Creating IAM role and instance profile.. OK' in result.output data_tcp = { 'application_name': 'myapp', 'team_name': 'MyTeam', 'exposed_ports': [123], 'exposed_protocol': 'tcp' } mock_sgr.reset_mock() with runner.isolated_filesystem(): with open('myapp.yaml', 'w') as fd: yaml.dump(data_tcp, fd) context.write_config('config.yaml') result = runner.invoke(cli, ['-p', 'default', '--config-file', 'config.yaml', 'applications', 'create', 'myapp.yaml'], catch_exceptions=False) assert mock_sgr.call_args == call('tcp', 123, 123, None, 'app-myapp-lb') assert 'Creating IAM role and instance profile.. OK' in result.output