def test_copy_metadata_main_missing_directory(self, m_logger, m_exit, m_exists, m_isdir, m_process_all_files, m_argparser): """ Test main() function. Mock CustomArgumentParser to return values desired for test but with missing `src_directory`. Verify correct behavior by asserting calls on logger, usage_message, and exit. """ m_exists.return_value = True m_isdir.return_value = True # Mock up the proper return values. m_parse_args = MagicMock( src_directory=None, dst_directory="/def", simon_sez=True, verbose=False, ) attrs = { 'parse_args.return_value': m_parse_args, 'usage_message.return_value': None, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invoke the unit. retval = main() # Confirm expected behavior m_logger.error.assert_called() m_parse_args_container.usage_message.assert_called_once() m_exit.assert_called_once_with(1)
def test_wait_for_providers_task(self, mock_export_run): mock_run_uid = str(uuid.uuid4()) mock_provider_task = Mock(status=TaskStates.SUCCESS.value) mock_export_run.objects.filter().first.return_value = Mock() mock_export_run.objects.filter().first().provider_tasks.filter.return_value = [mock_provider_task] callback_task = MagicMock() apply_args = {"arg1": "example_value"} wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args) callback_task.apply_async.assert_called_once_with(**apply_args) callback_task.reset_mock() mock_provider_task = Mock(status=TaskStates.RUNNING.value) mock_export_run.objects.filter().first.return_value = Mock() mock_export_run.objects.filter().first().provider_tasks.filter.return_value = [mock_provider_task] wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args) callback_task.apply_async.assert_not_called() with self.assertRaises(Exception): mock_export_run.reset_mock() mock_export_run.objects.filter().first().__nonzero__.return_value = False wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args)
def test_sshvm_key(self, server_proxy): """ Test sshvm operation """ proxy = MagicMock() radl = open(get_abs_path("../files/test_priv.radl"), 'r').read() proxy.GetVMInfo.return_value = (True, radl) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout oldstderr = sys.stderr sys.stdout = out sys.stderr = out res = main("sshvm", options, ["infid", "vmid", "1"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("ssh -p 1022 -i /tmp/", output) self.assertIn(" -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no [email protected]", output) sys.stdout = oldstdout sys.stderr = oldstderr
def test_poll_requeues_loops(self): loop = MagicMock() loop.function = MagicMock() loop.requeue = True self.engine._callbacks.append(loop) self.engine.poll(0.02) self.assertTrue(loop in self.engine._callbacks)
def test_save_grade_success(self): data = { 'rubric_scores[]': [0, 0], 'location': self.location, 'submission_id': 1, 'submission_key': 'fake key', 'score': 2, 'feedback': 'feedback', 'submission_flagged': 'false', 'answer_unknown': 'false', 'rubric_scores_complete' : 'true' } qdict = MagicMock() def fake_get_item(key): return data[key] qdict.__getitem__.side_effect = fake_get_item qdict.getlist = fake_get_item qdict.keys = data.keys response = self.peer_module.save_grade(qdict) self.assertTrue(response['success'])
def test_getversion(self, server_proxy, requests): """ Test getversion operation """ proxy = MagicMock() proxy.GetVersion.return_value = (True, "1.0") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getversion", options, [], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("1.0", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getversion", options, [], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("1.0", output) sys.stdout = oldstdout
def test_import(self, server_proxy, requests): """ Test import operation """ proxy = MagicMock() proxy.ImportInfrastructure.return_value = (True, "newinfid") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("import", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("New Inf: newinfid", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("import", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("New Inf: newinfid", output) sys.stdout = oldstdout
def test_getinfo(self, server_proxy, requests): """ Test getinfo operation """ proxy = MagicMock() proxy.GetVMInfo.return_value = (True, "radltest") proxy.GetInfrastructureInfo.return_value = (True, ["vm1"]) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getinfo", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Info about VM with ID: vm1\nradltest", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getinfo", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Info about VM with ID: vm1\nradltest", output) sys.stdout = oldstdout
def test_rebootvm(self, server_proxy, requests): """ Test rebootvm operation """ proxy = MagicMock() proxy.RebootVM.return_value = (True, "") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("rebootvm", options, ["infid", "vmid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("VM successfully rebooted", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("rebootvm", options, ["infid", "vmid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("VM successfully rebooted", output) sys.stdout = oldstdout
def test_getcontmsg(self, server_proxy, requests): """ Test getcontmsg operation """ proxy = MagicMock() proxy.GetInfrastructureContMsg.return_value = (True, "contmsg") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getcontmsg", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Msg Contextualizator: \n\ncontmsg", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getcontmsg", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Msg Contextualizator: \n\ncontmsg", output) sys.stdout = oldstdout
def test_getstate(self, server_proxy, requests): """ Test getstate operation """ proxy = MagicMock() proxy.GetInfrastructureState.return_value = (True, {"state": "running", "vm_states": {"vm1": "running"}}) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getstate", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("The infrastructure is in state: running\nVM ID: vm1 is in state: running.", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getstate", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("The infrastructure is in state: running\nVM ID: vm1 is in state: running.", output) sys.stdout = oldstdout
def test_create(self, server_proxy, requests): """ Test create operation """ proxy = MagicMock() proxy.CreateInfrastructure.return_value = (True, "inf1") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("create", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Infrastructure successfully created with ID: inf1", output) sys.stdout = oldstdout out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("create", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Infrastructure successfully created with ID: inf1", output) sys.stdout = oldstdout
def test_addresource(self, server_proxy, requests): """ Test addresource operation """ proxy = MagicMock() proxy.AddResource.return_value = (True, ["1"]) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("addresource", options, ["infid", get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Resources with IDs: 1 successfully added.", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("addresource", options, ["infid", get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Resources with IDs: 1 successfully added.", output) sys.stdout = oldstdout
def get_group_with_posting_members(members): 'A helper function to generate groups to test' retval = MagicMock() # --=mpj17=-- I am not proud of this next line gp = retval.site_root().ListManager.get_list().getProperty gp.return_value = members return retval
def test_comit_no_parents(self): mocked_repo = MagicMock() mocked_parent = MagicMock() mocked_parent.id = 1 mocked_repo.status.return_value = True mocked_repo.index.write_tree.return_value = "tree" mocked_repo.revparse_single.return_value = mocked_parent mocked_repo.create_commit.return_value = "commit" author = ("author_1", "author_2") commiter = ("commiter_1", "commiter_2") with patch('gitfs.repository.Signature') as mocked_signature: mocked_signature.return_value = "signature" repo = Repository(mocked_repo) commit = repo.commit("message", author, commiter) assert commit == "commit" assert mocked_repo.status.call_count == 1 assert mocked_repo.index.write_tree.call_count == 1 assert mocked_repo.index.write.call_count == 1 mocked_signature.has_calls([call(*author), call(*commiter)]) mocked_repo.revparse_single.assert_called_once_with("HEAD") mocked_repo.create_commit.assert_called_once_with("HEAD", "signature", "signature", "message", "tree", [1])
def test_on_taskrun_submit_event(self, mock_update_feed, mock_add_user, mock_is_task, mock_update_task, mock_create_result, mock_push): """Test on_taskrun_submit is called.""" conn = MagicMock() target = MagicMock() target.id = 1 target.project_id = 1 target.task_id = 2 target.user_id = 3 tmp = Project(id=1, name='name', short_name='short_name', info=dict(container=1, thumbnail="avatar.png"), published=True, webhook='http://localhost.com') conn.execute.return_value = [tmp] on_taskrun_submit(None, conn, target) obj = tmp.to_public_json() obj['action_updated'] = 'TaskCompleted' mock_add_user.assert_called_with(conn, target.user_id, obj) mock_update_task.assert_called_with(conn, target.task_id) mock_update_feed.assert_called_once_with(obj) obj_with_webhook = tmp.to_public_json() obj_with_webhook['webhook'] = tmp.webhook obj_with_webhook['action_updated'] = 'TaskCompleted' mock_push.assert_called_with(obj_with_webhook, target.task_id, 1)
def test_getattr_with_correct_path(self): mocked_repo = MagicMock() mocked_first = MagicMock() mocked_last = MagicMock() mocked_first.return_value = "tomorrow" mocked_last.return_value = "tomorrow" mocked_repo.get_commit_dates.return_value = ['/'] with patch('gitfs.views.history.lru_cache') as mocked_cache: mocked_cache.__call__ = lambda f: f history = HistoryView(repo=mocked_repo, uid=1, gid=1, mount_time="now") history._get_first_commit_time = mocked_first history._get_last_commit_time = mocked_last result = history.getattr("/", 1) asserted_result = { 'st_uid': 1, 'st_gid': 1, 'st_ctime': "tomorrow", 'st_mtime': "tomorrow", 'st_nlink': 2, 'st_mode': S_IFDIR | 0o555, } assert asserted_result == result
def test_remove_response(self, c, review_id, side_effect=None, err_type=None, err_msg=None): # Create mocks review_manager.Review = MagicMock() rev_object = MagicMock() self.offering.owner_organization = self.org self.org.managers = [self.user.pk] rev_object.offering = self.offering review_manager.Review.objects.get.return_value = rev_object # Call the side effect if needed if side_effect: side_effect(self) error = None try: rm = review_manager.ReviewManager() rm.remove_response(self.user, review_id) except Exception as e: error = e if not err_type: self.assertEquals(error, None) # Check calls rev_object.response.delete.assert_called_with() else: self.assertTrue(isinstance(error, err_type)) self.assertEquals(unicode(error), err_msg)
def test_disabled(self, mock_django_timezone: mock.MagicMock, mock_queue_digest_recipient: mock.MagicMock) -> None: cutoff = timezone_now() # A Tuesday mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=5) enqueue_emails(cutoff) mock_queue_digest_recipient.assert_not_called()
def test_redirect_to_course_position(self): mock_module = MagicMock() mock_module.descriptor.id = 'Underwater Basketweaving' mock_module.position = 3 mock_module.get_display_items.return_value = [] self.assertRaises(Http404, views.redirect_to_course_position, mock_module, views.CONTENT_DEPTH)
def test_get_view(self): mocked_index = MagicMock() mocked_current = MagicMock() mocked_view = MagicMock(return_value=mocked_current) router, mocks = self.get_new_router() router.register([ ("/history", mocked_view), ("/current", mocked_view), ("/", MagicMock(return_value=mocked_index)), ]) with patch('gitfs.router.lru_cache') as mocked_cache: mocked_cache.get_if_exists.return_value = None view, path = router.get_view("/current") assert view == mocked_current assert path == "/" asserted_call = { 'repo': mocks['repo'], 'ignore': mocks['repo'].ignore, 'repo_path': mocks['repo_path'], 'mount_path': mocks['mount_path'], 'regex': "/current", 'relative_path': "/", 'uid': 1, 'gid': 1, 'branch': mocks['branch'], 'mount_time': 0, 'queue': mocks['queue'], 'max_size': mocks['max_size'], 'max_offset': mocks['max_offset'], } mocked_view.assert_called_once_with(**asserted_call) mocked_cache.get_if_exists.assert_called_once_with("/current")
def test_rename(self): mocked_re = MagicMock() mocked_index = MagicMock() mocked_os = MagicMock() mocked_result = MagicMock() mocked_result.rename.return_value = True mocked_re.sub.return_value = "new" mocked_os.path.split.return_value = [1, 1] with patch.multiple('gitfs.views.current', re=mocked_re, os=mocked_os): from gitfs.views import current as current_view old_rename = current_view.PassthroughView.rename current_view.PassthroughView.rename = lambda self, old, new: True current = CurrentView(regex="regex", repo="repo", repo_path="repo_path", ignore=CachedIgnore()) current._stage = mocked_index result = current.rename("old", "new") assert result is True mocked_index.assert_called_once_with(**{ 'remove': 1, 'add': "new", "message": "Rename old to new" }) mocked_os.path.split.assert_called_once_with("old") current_view.PassthroughView.rename = old_rename
def test_valid_enable_ipmi(login_mock, query_dn_mock, set_mo_mock): # Patch ImcHandle.login to create a Faux ImcHandle object w/o real CIMC # Patch ImcHandle.set_mo to simulate CIMC interaction w/o real CIMC login_mock.return_value = True set_mo_mock.return_value = True ipmi_enabled_mock = MagicMock() ipmi_enabled_mock.admin_state = "enabled" test_cimc = ImcHandle(ip='169.254.1.1', username='******', password='******') test_cimc._set_platform(platform=IMC_PLATFORM.TYPE_CLASSIC) query_dn_mock.return_value = ipmi_enabled_mock # Scenario: Enable IPMI default values assert ipmi_enable(test_cimc) is ipmi_enabled_mock # Assert values of the object passed to add_mo() test_ipmi_mo = set_mo_mock.call_args[0][0] assert test_ipmi_mo.admin_state == "enabled" assert test_ipmi_mo.priv == CommIpmiLanConsts.PRIV_ADMIN assert test_ipmi_mo.key == '0'*40 # Scenario: Enable IPMI custom priv and key assert ipmi_enable(test_cimc, priv="user", key='1'*40) is ipmi_enabled_mock test_ipmi_mo = set_mo_mock.call_args[0][0] assert test_ipmi_mo.admin_state == "enabled" assert test_ipmi_mo.priv == "user" assert test_ipmi_mo.key == '1'*40
def test_search_q(): user = MagicMock(spec=auth_models.User) user.is_staff = True queryset = MagicMock(spec=QuerySet) expected_result = MagicMock(spec=QuerySet) (queryset .select_related.return_value .prefetch_related.return_value .filter.return_value) = expected_result params = {'q': ['meow']} result = models.place.objects.get_search_queryset(user, queryset, params) q = ( MyQ(title__icontains='meow') | MyQ(address__icontains='meow') | MyQ(address2__icontains='meow') | MyQ(city__icontains='meow') | MyQ(state__icontains='meow') | MyQ(country__icontains='meow') ) chained = ( call .select_related('cover_photo') .prefetch_related('cover_photo__photo_file_set') .filter(q) ) assert queryset.mock_calls == chained.call_list() assert result is expected_result
def test_notification_rate_limit2(self, mock_retry, mock_summ, mock_delay, verify_signature_mock, debug_mock): """ Test that the notification task rate limit errors ok """ # Check that we fail gracefully when we hit the rate limit mock_delay.side_effect = lambda arg: process_notification(arg) resp = MagicMock() resp.headers = {'x-ratelimit-reset': 1404298869} exc = misfit_exceptions.MisfitRateLimitError(429, '', resp) mock_summ.side_effect = exc mock_retry.side_effect = Exception with HTTMock(JsonMock().goal_http, JsonMock().profile_http, JsonMock('summary_detail').summary_http): try: content = json.dumps(self.notification_content).encode('utf8') self.client.post(reverse('misfit-notification'), data=content, content_type='application/json') assert False, 'We should have raised an exception' except Exception: assert True mock_delay.assert_called_once_with(content) mock_summ.assert_called_once_with( detail=True, end_date=datetime.date(2014, 10, 8), start_date=datetime.date(2014, 10, 5)) mock_retry.assert_called_once_with(countdown=549) eq_(Goal.objects.filter(user=self.user).count(), 2) eq_(Profile.objects.filter(user=self.user).count(), 1) eq_(Summary.objects.filter(user=self.user).count(), 0)
def test_systemexit_during_deferred_processing_is_raised(self): defer = MagicMock() defer.function = MagicMock(side_effect=SystemExit) defer.requeue = False defer.end = self.engine.latest_poll_time - 1 self.engine._deferreds.append(defer) self.assertRaises(SystemExit, self.engine.poll, 0.02)
def test_poll_processes_events(self): channel = MagicMock() channel._handle_events = MagicMock() self.engine._channels = {1: channel} self.engine._poller.poll = MagicMock(return_value={1:Engine.ALL_EVENTS}) self.engine.poll(0.02) channel._handle_events.assert_called_once_with(Engine.ALL_EVENTS)
def test_create_aggregate_projections(self): connection = MagicMock() self.create_task(cls=CopyToVerticaDummyTableWithProjections).create_aggregate_projections(connection) self.assertEquals([ call('CREATE PROJECTION IF NOT EXISTS foobar.dummy_table_projection_2 DEFINITION_2 on foobar.dummy_table;'), call('SELECT start_refresh();'), ], connection.cursor().execute.mock_calls)
def test_keyboardinterrupt_during_deferred_processing_is_raised(self): defer = MagicMock() defer.function = MagicMock(side_effect=KeyboardInterrupt) defer.requeue = False defer.end = self.engine.latest_poll_time - 1 self.engine._deferreds.append(defer) self.assertRaises(KeyboardInterrupt, self.engine.poll, 0.02)
def test_reap_tmp_images(self, _os_datastore_path, _uuid): """ Test that stray images are found and deleted by the reaper """ def _fake_ds_folder(datastore, folder): return "%s__%s" % (datastore, folder) ds = MagicMock() ds.id = "dsid" ds.type = DatastoreType.EXT3 # In a random transient directory, set up a directory to act as the # tmp images folder and to contain a stray image folder with a file. tmpdir = file_util.mkdtemp(delete=True) tmp_images_folder = _fake_ds_folder(ds.id, TMP_IMAGE_FOLDER_NAME) tmp_images_dir = os.path.join(tmpdir, tmp_images_folder) tmp_image_dir = os.path.join(tmp_images_dir, "stray_image") os.mkdir(tmp_images_dir) os.mkdir(tmp_image_dir) (fd, path) = tempfile.mkstemp(prefix='strayimage_', dir=tmp_image_dir) self.assertTrue(os.path.exists(path)) def _fake_os_datastore_path(datastore, folder): return os.path.join(tmpdir, _fake_ds_folder(datastore, folder)) _os_datastore_path.side_effect = _fake_os_datastore_path ds_manager = MagicMock() ds_manager.get_datastores.return_value = [ds] image_manager = EsxImageManager(self.vim_client, ds_manager) image_manager.reap_tmp_images() # verify stray image is deleted self.assertFalse(os.path.exists(path))
def test__role_expand(self): self.assertEqual(config._role_expand(cfg, base="tag[]"), ["tag[]"]) cfg._get_role_attr = MagicMock(return_value = {"r0": "", "r1": "CONFIG"}) self.assertEqual(config._role_expand(cfg, base="tag['@']"), ["r0", "r1 CONFIG"])
def test_nodes(): task = MagicMock(publish=Mock(side_effect=Exception("Test error!"))) error = yield task self.assertIsNotNone(error) raise error
class TrainingEnvNoIntermediate(Mock): framework_module = 'my_framework:entry_point' log_level = 20 def sagemaker_s3_output(self): return None class ScriptTrainingEnv(TrainingEnv): framework_module = None def sagemaker_s3_output(self): return 's3://bucket' @patch('inotify_simple.INotify', MagicMock()) @patch('boto3.client', MagicMock()) @patch('importlib.import_module') @patch('sagemaker_containers.training_env', TrainingEnv) def test_train(import_module): framework = Mock() import_module.return_value = framework _trainer.train() import_module.assert_called_with('my_framework') framework.entry_point.assert_called() @patch('inotify_simple.INotify', MagicMock()) @patch('boto3.client', MagicMock()) @patch('importlib.import_module')
async def test_on_game_session_meta_update(preset_manager, skip_qtbot): # Setup network_client = MagicMock() network_client.current_user = User(id=12, name="Player A") network_client.session_self_update = AsyncMock() game_connection = MagicMock(spec=GameConnection) game_connection.executor = AsyncMock() game_connection.pretty_current_status = "Maybe Connected" game_connection.lock_identifier = None initial_session = GameSessionEntry( id=1234, name="The Session", presets=[ preset_manager.default_preset_for_game( RandovaniaGame.METROID_PRIME_ECHOES), preset_manager.default_preset ], players={ 12: PlayerSessionEntry(12, "Player A", 0, True, "Online"), }, game_details=None, state=GameSessionState.SETUP, generation_in_progress=None, allowed_games=[RandovaniaGame.METROID_PRIME_ECHOES], ) second_session = GameSessionEntry( id=1234, name="The Session", presets=[ preset_manager.default_preset_for_game( RandovaniaGame.METROID_PRIME_ECHOES) ], players={ 12: PlayerSessionEntry(12, "Player A", 0, True, "Online"), 24: PlayerSessionEntry(24, "Player B", None, False, "Online"), }, game_details=GameDetails( seed_hash="AB12", word_hash="Chykka Required", spoiler=True, ), state=GameSessionState.IN_PROGRESS, generation_in_progress=None, allowed_games=[RandovaniaGame.METROID_PRIME_ECHOES], ) network_client.current_game_session = initial_session window = await GameSessionWindow.create_and_update(network_client, game_connection, preset_manager, MagicMock(), MagicMock()) window.update_multiworld_client_status = AsyncMock() # Run await window.on_game_session_meta_update(second_session) window.update_multiworld_client_status.assert_awaited() network_client.session_self_update.assert_awaited_once_with( game_connection.get_current_inventory.return_value, game_connection.current_status, game_connection.backend_choice, )
async def test_check_dangerous_presets(window, mocker): mock_warning = mocker.patch("randovania.gui.lib.async_dialog.warning", new_callable=AsyncMock) mock_warning.return_value = QtWidgets.QMessageBox.No game_session = MagicMock() game_session.presets = [MagicMock(), MagicMock(), MagicMock()] game_session.presets[0].name = "Preset A" game_session.presets[0].dangerous_settings.return_value = ["Cake"] game_session.presets[1].name = "Preset B" game_session.presets[1].dangerous_settings.return_value = ["Bomb", "Knife"] game_session.presets[2].dangerous_settings.return_value = [] window._game_session = game_session window.team_players = [MagicMock(), MagicMock()] window.team_players[0].player.name = "Crazy Person" window.team_players[1].player = None permalink = MagicMock(spec=Permalink) permalink.parameters = MagicMock(spec=GeneratorParameters) permalink.parameters.presets = list(game_session.presets) # Run result = await window._check_dangerous_presets(permalink) # Assert message = ( "The following presets have settings that can cause an impossible game:\n" "\nCrazy Person - Preset A: Cake" "\nPlayer 2 - Preset B: Bomb, Knife" "\n\nDo you want to continue?") mock_warning.assert_awaited_once_with( window, "Dangerous preset", message, QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) assert not result
from __future__ import print_function from mock import MagicMock import unittest import time import sys sys.modules['RPi'] = MagicMock() from apscheduler.schedulers.background import BackgroundScheduler from modules.BaseModule import BaseModule from modules.DynamoDBDataSource import DynamoDBDataSource from modules.RelaySwitch import RelaySwitch class Validate(unittest.TestCase): def setUp(self): BaseModule.scheduler = BackgroundScheduler() def test(self): relay = RelaySwitch({ 'name': 'light', 'pin': 18, 'value': 1, 'value_toggle': 0, 'duration': 2 }) ddb = DynamoDBDataSource({ 'name': 'ddbevents', 'table': 'staging-test1-events', 'region': 'us-east-1'
def test_get_handler(self): app = tornado.web.Application() m = MagicMock() h = GetHandler(app, m) h._transforms = [] h.get()
def test_add_cv(self, read_yaml): cfg.c_dict = False cfg.__make_c_dict = MagicMock(return_value=True) cfg.cv_file = "test" self.assertIsNone(config.add_cv(cfg, 'protocols', 'bgp', reconnect=True))
def _gen_ctx(self): _ctx = MagicMock() _graph_mock = MagicMock() _graph_mock.execute = MagicMock() _sequence = MagicMock() _sequence.add = MagicMock() _graph_mock._sequence = _sequence _graph_mock.sequence = MagicMock(return_value=_sequence) _node = MagicMock() _node.operations = { 'cloudify.interfaces.statistics.perfomance': {} } _instance = MagicMock() _instance.send_event = MagicMock( return_value='event') _instance.execute_operation = MagicMock( return_value='execute_operation') _node.properties = {} _node.instances = [_instance] _workflow_ctx = MagicMock() _workflow_ctx.nodes = [_node] _workflow_ctx.graph_mode = MagicMock(return_value=_graph_mock) _workflow_ctx.get_ctx = MagicMock(return_value=_ctx) return _workflow_ctx, _graph_mock, _instance
def test__process_args(self): self.assertIsNone(config._process_args(cfg)) data = {"set cv['r0__name']": 2} cfg._process_config_vars = MagicMock(return_value=True) self.assertTrue(config._process_args(cfg, data=data))
def test_get_postgresql_clusters(fx_addresses_expected, fx_asgs_expected, fx_pg_instances_expected, fx_eip_allocation, fx_launch_configuration_expected, fx_ips_dnsnames): postgresql.collect_eip_addresses = MagicMock( return_value=fx_addresses_expected) postgresql.collect_launch_configurations = MagicMock( return_value=fx_launch_configuration_expected) postgresql.extract_eipalloc_from_lc = MagicMock( return_value=fx_eip_allocation) postgresql.collect_recordsets = MagicMock(return_value=fx_ips_dnsnames) entities = postgresql.get_postgresql_clusters( conftest.REGION, conftest.pg_infrastructure_account, fx_asgs_expected, fx_pg_instances_expected) assert entities == [{ 'type': 'postgresql_cluster', 'id': 'pg-bla[aws:12345678:eu-central-1]', 'region': conftest.REGION, 'spilo_cluster': 'bla', 'elastic_ip': '12.23.34.45', 'elastic_ip_instance_id': 'i-1234', 'allocation_error': '', 'instances': [{ 'instance_id': 'i-1234', 'private_ip': '192.168.1.1', 'role': 'master' }, { 'instance_id': 'i-02e0', 'private_ip': '192.168.1.3', 'role': 'replica' }], 'infrastructure_account': conftest.pg_infrastructure_account, 'dnsname': 'something.interesting.com', 'shards': { 'postgres': 'something.interesting.com:5432/postgres' } }, { 'type': 'postgresql_cluster', 'id': 'pg-malm[aws:12345678:eu-central-1]', 'region': conftest.REGION, 'spilo_cluster': 'malm', 'elastic_ip': '22.33.44.55', 'elastic_ip_instance_id': '', 'allocation_error': 'There is a public IP defined but not attached to any instance', 'instances': [{ 'instance_id': 'i-4444', 'private_ip': '192.168.13.32', 'role': 'master' }, { 'instance_id': 'i-5555', 'private_ip': '192.168.31.154', 'role': 'replica' }], 'infrastructure_account': conftest.pg_infrastructure_account, 'dnsname': 'other.cluster.co.uk', 'shards': { 'postgres': 'other.cluster.co.uk:5432/postgres' } }]
import sys import os from collections import OrderedDict, defaultdict import unittest2 as unittest from mock import patch, Mock, MagicMock, call, mock_open import builtins from jnpr.toby.init.init import init from jnpr.toby.utils.response import Response from jnpr.toby.hldcl.juniper.junos import Juniper import jnpr.toby.engines.config.config_utils as config_utils from jnpr.toby.engines.config.config import config #from config import config #import config_utils import pprint builtins.t = MagicMock(spec=init) t.t_dict ={'resources': {'r0': {'interfaces': {'r0r1_2': {'pic': 'ge-1/0/7', 'management': 0, 'name': 'ge-1/0/7.0', 'link': 'link2', 'type': ['10x', '1GE', 'LAN', 'EQ', 'SFP-SX', 'QDPCE-R-40X', 'B3', 'I3', 'ether', 'ge'], 'unit': 0}, 'r0r1_1': {'management': 0, 'uv_test': 2, 'uv-test2': 3, 'unit': 0,
def test_get_job_inputs(): """Test class methods""" job_inputs = MagicMock() arc = AutoRegressionConfiguration(job_inputs=job_inputs, ) assert arc.get_job_inputs() == job_inputs
def load_db_params(self): self.databases = tuple(ConfigValue.to_iter(self.config.raw_data['databases'])) self.db_params = {} self.predefined_type_ids = {} if not self.databases: return if self.env == 'unit_test': from mock import MagicMock return MagicMock() dbm = db_manager.db_manager() db_param_names = ('name', 'db_host', 'db_user', 'db_pass', 'db_port', 'pool_size', 'max_overflow') for db_name in self.databases: conf_params = ConfigValue.to_iter(self.config.raw_data[db_name + '_db']) params = dict(zip(db_param_names, conf_params)) if params['db_user'] == "*": params['db_user'] = self.db_user if params['db_pass'] == "*": params['db_pass'] = self.db_pass if params['db_port'] == "*": params['db_port'] = self.db_port if params['pool_size'] == "*": params['pool_size'] = self.db_pool_size if params['max_overflow'] == "*": params['max_overflow'] = self.db_pool_overflow_size dbm.setup_db(db_name, g_override=self, **params) self.db_params[db_name] = params dbm.type_db = dbm.get_engine(self.config.raw_data['type_db']) dbm.relation_type_db = dbm.get_engine(self.config.raw_data['rel_type_db']) def split_flags(raw_params): params = [] flags = {} for param in raw_params: if not param.startswith("!"): params.append(param) else: key, sep, value = param[1:].partition("=") if sep: flags[key] = value else: flags[key] = True return params, flags prefix = 'db_table_' for k, v in self.config.raw_data.iteritems(): if not k.startswith(prefix): continue params, table_flags = split_flags(ConfigValue.to_iter(v)) name = k[len(prefix):] kind = params[0] server_list = self.config.raw_data["db_servers_" + name] engines, flags = split_flags(ConfigValue.to_iter(server_list)) typeid = table_flags.get("typeid") if typeid: self.predefined_type_ids[name] = int(typeid) if kind == 'thing': dbm.add_thing(name, dbm.get_engines(engines), **flags) elif kind == 'relation': dbm.add_relation(name, params[1], params[2], dbm.get_engines(engines), **flags) return dbm
def test_init(): """Should return expected attributes after initialization""" job_inputs = MagicMock() arc = AutoRegressionConfiguration(job_inputs=job_inputs) assert arc.inputs == job_inputs assert arc.job_execution_class() == AutoRegressionExecution
conftest.pg_infrastructure_account, 'dnsname': 'other.cluster.co.uk', 'shards': { 'postgres': 'other.cluster.co.uk:5432/postgres' } }] # If any of the utility functions fail, we expect an empty list fx_something_fails = [ 'collect_eip_addresses', 'filter_asgs', 'filter_instances', 'collect_launch_configurations', 'extract_eipalloc_from_lc' ] @pytest.mark.parametrize('func', fx_something_fails) @pytest.mark.parametrize('side_effect', [MagicMock(side_effect=Exception)]) @pytest.mark.parametrize('output', [[]]) def test_get_postgresql_clusters_after_exception(func, side_effect, output, fx_asgs_expected, fx_pg_instances_expected, monkeypatch): monkeypatch.setattr(postgresql, func, side_effect) entities = postgresql.get_postgresql_clusters( conftest.REGION, conftest.pg_infrastructure_account, fx_asgs_expected, fx_pg_instances_expected) assert entities == output
class AppPillowTest(TestCase): domain = 'app-pillowtest-domain' def setUp(self): super(AppPillowTest, self).setUp() FormProcessorTestUtils.delete_all_cases() with trap_extra_setup(ConnectionError): self.es = get_es_new() ensure_index_deleted(APP_INDEX_INFO.index) initialize_index_and_mapping(self.es, APP_INDEX_INFO) def tearDown(self): ensure_index_deleted(APP_INDEX_INFO.index) super(AppPillowTest, self).tearDown() def test_app_pillow_kafka(self): consumer = get_test_kafka_consumer(topics.APP) # have to get the seq id before the change is processed kafka_seq = get_topic_offset(topics.APP) couch_seq = get_current_seq(Application.get_db()) app_name = 'app-{}'.format(uuid.uuid4().hex) app = self._create_app(app_name) app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow') app_db_pillow.process_changes(couch_seq, forever=False) # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(app._id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch app_pillow = get_app_to_elasticsearch_pillow() app_pillow.process_changes(since=kafka_seq, forever=False) self.es.indices.refresh(APP_INDEX_INFO.index) # confirm change made it to elasticserach results = AppES().run() self.assertEqual(1, results.total) app_doc = results.hits[0] self.assertEqual(self.domain, app_doc['domain']) self.assertEqual(app['_id'], app_doc['_id']) self.assertEqual(app_name, app_doc['name']) def _create_app(self, name): factory = AppFactory(domain=self.domain, name=name, build_version='2.11.0') module1, form1 = factory.new_basic_module('open_case', 'house') factory.form_opens_case(form1) app = factory.app app.save() self.addCleanup(app.delete) return app def refresh_elasticsearch(self, kafka_seq, couch_seq): app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow') app_db_pillow.process_changes(couch_seq, forever=False) app_pillow = get_app_to_elasticsearch_pillow() app_pillow.process_changes(since=kafka_seq, forever=False) self.es.indices.refresh(APP_INDEX_INFO.index) @patch.object(Application, 'validate_app', list) @patch.object(Application, 'create_build_files', MagicMock()) def test_prune_autogenerated_builds(self): kafka_seq = get_topic_offset(topics.APP) couch_seq = get_current_seq(Application.get_db()) # Build #1, manually generated app = self._create_app('test-prune-app') build1 = app.make_build() build1.save() self.assertFalse(build1.is_auto_generated) # Build #2, auto-generated app.save() build2 = make_async_build(app, 'someone') # Build #3, manually generated app.save() build3 = app.make_build() build3.save() # All 3 builds should show up in ES self.refresh_elasticsearch(kafka_seq, couch_seq) build_ids_in_es = AppES().domain(self.domain).is_build().values_list('_id', flat=True) self.assertItemsEqual(build_ids_in_es, [build1._id, build2._id, build3._id]) # prune, which should delete the autogenerated build prune_auto_generated_builds(self.domain, app.id) # Build2 should no longer be in ES self.refresh_elasticsearch(kafka_seq, couch_seq) build_ids_in_es = AppES().domain(self.domain).is_build().values_list('_id', flat=True) self.assertItemsEqual(build_ids_in_es, [build1._id, build3._id])
def test_create_from_result(): """Should return None as not implemented""" job_inputs = MagicMock() arc = AutoRegressionConfiguration(job_inputs=job_inputs, ) assert arc.create_from_result(MagicMock(), "output") is None
class TestCreateViewTask(bigquery.BigQueryCreateViewTask): client = MagicMock() view = '''SELECT * FROM table LIMIT 10''' def output(self): return bigquery.BigQueryTarget(PROJECT_ID, DATASET_ID, 'view1', client=self.client)
def test_get_parameters_class(): """Should return the AutoRegressionParameters class""" arc = AutoRegressionConfiguration(job_inputs=MagicMock(), ) assert arc.get_parameters_class().__name__ == "AutoRegressionParameters"
def _bq_conn(): conn = MagicMock() conn.get.side_effect = lambda x: 'bigquery' if x == 'type' else None return conn
import six import sys import unittest import confuse from mock import MagicMock, patch from beets import logging from beets.library import Item from beets.util import bytestring_path from beetsplug import lyrics from test import _common log = logging.getLogger('beets.test_lyrics') raw_backend = lyrics.Backend({}, log) google = lyrics.Google(MagicMock(), log) genius = lyrics.Genius(MagicMock(), log) class LyricsPluginTest(unittest.TestCase): def setUp(self): """Set up configuration.""" lyrics.LyricsPlugin() def test_search_artist(self): item = Item(artist=u'Alice ft. Bob', title=u'song') self.assertIn((u'Alice ft. Bob', [u'song']), lyrics.search_pairs(item)) self.assertIn((u'Alice', [u'song']), lyrics.search_pairs(item)) item = Item(artist=u'Alice feat Bob', title=u'song') self.assertIn((u'Alice feat Bob', [u'song']),
class test_commands: def __init__(self): self.manager = CommandManager() self.manager.logger.setLevel(logging.CRITICAL) # Shut up, logger self.factory_manager = Mock(name="factory_manager") self.plugin = Mock(name="plugin") @nosetools.nottest def teardown(self): # Clean up self.manager.commands = {} self.manager.aliases = {} self.manager.auth_handler = None self.manager.perm_handler = None self.plugin.reset_mock() self.plugin.handler.reset_mock() self.factory_manager.reset_mock() @nose.with_setup(teardown=teardown) def test_singleton(self): """CMNDS | Test Singleton metaclass""" nosetools.assert_true(self.manager is CommandManager()) @nose.with_setup(teardown=teardown) def test_set_factory_manager(self): """CMNDS | Test setting factory manager""" self.manager.set_factory_manager(self.factory_manager) nosetools.assert_true(self.factory_manager) @nose.with_setup(teardown=teardown) def test_add_command(self): """CMNDS | Test adding commands""" r = self.manager.register_command("test", self.plugin.handler, self.plugin, "test.test", ["test2"], True) nosetools.assert_true(r) nosetools.assert_true("test" in self.manager.commands) command = self.manager.commands.get("test", None) if command: nosetools.assert_true("f" in command) nosetools.assert_true(command.get("f") is self.plugin.handler) nosetools.assert_true("permission" in command) nosetools.assert_true(command.get("permission") == "test.test") nosetools.assert_true("owner" in command) nosetools.assert_true(command.get("owner") is self.plugin) nosetools.assert_true("default" in command) nosetools.assert_true(command.get("default")) nosetools.assert_true("test2" in self.manager.aliases) alias = self.manager.aliases.get("test2", None) if alias: nosetools.assert_true(alias == "test") r = self.manager.register_command("test", self.plugin.handler, self.plugin, "test.test", ["test2"], True) nosetools.assert_false(r) @nose.with_setup(teardown=teardown) def test_unregister_commands(self): """CMNDS | Test unregistering commands""" self.manager.register_command("test1", self.plugin.handler, self.plugin, aliases=["test11"]) self.manager.register_command("test2", self.plugin.handler, self.plugin, aliases=["test22"]) self.manager.register_command("test3", self.plugin.handler, self.plugin, aliases=["test33"]) nosetools.assert_equals(len(self.manager.commands), 3) nosetools.assert_equals(len(self.manager.aliases), 3) self.manager.unregister_commands_for_owner(self.plugin) nosetools.assert_equals(len(self.manager.commands), 0) nosetools.assert_equals(len(self.manager.aliases), 0) @nose.with_setup(teardown=teardown) def test_run_commands_defaults(self): """CMNDS | Test running commands directly | Defaults""" self.manager.register_command("test4", self.plugin.handler, self.plugin, aliases=["test5"], default=True) caller = Mock(name="caller") source = Mock(name="source") protocol = Mock(name="protocol") # Testing defaults r = self.manager.run_command("test4", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Success, None)) r = self.manager.run_command("test5", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Success, None)) nosetools.assert_equals(self.plugin.handler.call_count, 2) @nose.with_setup(teardown=teardown) def test_run_commands_aliases(self): """CMNDS | Test running commands directly | Aliases""" self.manager.register_command("test4", self.plugin.handler, self.plugin, aliases=["test5"], default=True) caller = Mock(name="caller") source = Mock(name="source") protocol = Mock(name="protocol") # Testing defaults r = self.manager.run_command("test4", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Success, None)) self.plugin.handler.assert_called_with(protocol, caller, source, "test4", "", []) # Reset mock self.plugin.handler.reset_mock() r = self.manager.run_command("test5", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Success, None)) self.plugin.handler.assert_called_with(protocol, caller, source, "test4", "", []) # Reset mock self.plugin.handler.reset_mock() self.manager.register_command("test5", self.plugin.handler, self.plugin, default=True) r = self.manager.run_command("test5", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Success, None)) self.plugin.handler.assert_called_with(protocol, caller, source, "test5", "", []) @nose.with_setup(teardown=teardown) def test_run_commands_auth(self): """CMNDS | Test running commands directly | Auth""" caller = Mock(name="caller") source = Mock(name="source") protocol = Mock(name="protocol") auth = Mock(name="auth") perms = Mock(name="perms") self.manager.set_auth_handler(auth) self.manager.set_permissions_handler(perms) ### COMMAND WITH PERMISSION ### auth.authorized.return_value = True perms.check.return_value = True self.manager.register_command("test5", self.plugin.handler, self.plugin, "test.test", aliases=["test6"]) r = self.manager.run_command("test5", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Success, None)) nosetools.assert_equals(self.plugin.handler.call_count, 1) auth.authorized.reset_mock() perms.check.reset_mock() self.plugin.handler.reset_mock() ### ALIAS WITH PERMISSION ### r = self.manager.run_command("test6", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Success, None)) nosetools.assert_equals(self.plugin.handler.call_count, 1) auth.authorized.reset_mock() perms.check.reset_mock() auth.authorized.return_value = True perms.check.return_value = False self.plugin.handler.reset_mock() ### COMMAND WITHOUT PERMISSION ### r = self.manager.run_command("test5", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.NoPermission, None)) nosetools.assert_equals(self.plugin.handler.call_count, 0) perms.check.return_value = True auth.authorized.reset_mock() perms.check.reset_mock() self.plugin.handler.reset_mock() ### COMMAND WITH EXCEPTION ### self.plugin.handler = Mock(side_effect=Exception('Boom!')) self.manager.unregister_commands_for_owner(self.plugin) self.manager.register_command("test5", self.plugin.handler, self.plugin, "test.test") r = self.manager.run_command("test5", caller, source, protocol, "") nosetools.assert_equals(r[0], CommandState.Error) nosetools.assert_true(isinstance(r[1], Exception)) nosetools.assert_equals(self.plugin.handler.call_count, 1) auth.authorized.reset_mock() perms.check.reset_mock() self.plugin.handler.reset_mock() ### UNKNOWN COMMAND ### r = self.manager.run_command("test7", caller, source, protocol, "") nosetools.assert_equals(r, (CommandState.Unknown, None)) nosetools.assert_equals(self.plugin.handler.call_count, 0)
class TestExternalBigQueryTask(bigquery.ExternalBigQueryTask): client = MagicMock() def output(self): return bigquery.BigQueryTarget(PROJECT_ID, DATASET_ID, 'table1', client=self.client)
""" This module contains the unit tests for the cle launcher shutdown """ import unittest from mock import patch, Mock, MagicMock from hbp_nrp_cleserver.server import CLEGazeboSimulationAssembly from hbp_nrp_commons.generated import exp_conf_api_gen MockOs = Mock() MockOs.environ = {'ROS_MASTER_URI': "localhost:0815"} MockOs.path.join.return_value = "/a/really/nice/place" @patch("hbp_nrp_backend.storage_client_api.StorageClient.StorageClient", new=MagicMock()) @patch("hbp_nrp_cleserver.server.GazeboSimulationAssembly.SimUtil", new=Mock()) @patch("hbp_nrp_cleserver.server.CLEGazeboSimulationAssembly.os", new=Mock()) @patch("hbp_nrp_cleserver.server.GazeboSimulationAssembly.subprocess", new=Mock()) @patch("hbp_nrp_cleserver.server.GazeboSimulationAssembly.rospy", new=MagicMock()) @patch("hbp_nrp_cleserver.server.GazeboSimulationAssembly.rosnode", new=MagicMock()) class TestCLELauncherShutdown(unittest.TestCase): def setUp(self): self.m_simconf = MagicMock() self.m_simconf.gzserver_host = 'local' self.mocked_cleserver = patch( "hbp_nrp_cleserver.server.CLEGazeboSimulationAssembly.ROSCLEServer"
def __init__(self): self.manager = CommandManager() self.manager.logger.setLevel(logging.CRITICAL) # Shut up, logger self.factory_manager = Mock(name="factory_manager") self.plugin = Mock(name="plugin")
def setUp(self): self.client = MagicMock() self.cluster = Cluster(self.client) self.index = Index(self.cluster, 'test')
"""Test testing utilities """ import os import warnings import subprocess from mock import patch, MagicMock from nipype.testing.utils import TempFATFS from nose.tools import assert_true, assert_raises def test_tempfatfs(): try: fatfs = TempFATFS() except (IOError, OSError): warnings.warn("Cannot mount FAT filesystems with FUSE") else: with fatfs as tmpdir: yield assert_true, os.path.exists(tmpdir) @patch('subprocess.check_call', MagicMock(side_effect=subprocess.CalledProcessError('', ''))) def test_tempfatfs_calledprocesserror(): yield assert_raises, IOError, TempFATFS @patch('subprocess.Popen', MagicMock(side_effect=OSError())) def test_tempfatfs_oserror(): yield assert_raises, IOError, TempFATFS
def test_gen_combine_fields_text(self): field_weights = {"u": 0.8, "o": 0.1, "w": 0.1} query_language_modifier = QueryLanguageModifier(self.parameters) query_language_modifier.embedding_space.initialize = MagicMock( return_value=None) res = query_language_modifier._gen_weighted_fields_text( field_weights, self.all_fields) print(res, file=sys.stderr) expected_res = """ #weight( 0.80000#weight( 1#weight( 0.90000000000000002220#combine(two) 0.59999999999999997780#combine(stand) 0.10000000000000000555#combine(hands) ) 0.065#weight( 0.90000000000000002220#combine(pair) 0.10000000000000000555#combine(one) 0.59999999999999997780#combine(sit) 0.29999999999999998890#combine(come) 0.10000000000000000555#combine(arm) 0.10000000000000000555#combine(ears) ) ) 0.10000#weight( 1#weight( 0.59999999999999997780#od(two stand) 0.20000000000000001110#od(stand hands) ) 0.065#weight( 0.29999999999999998890#od(two sit) 0.40000000000000002220#od(two come) 0.10000000000000000555#od(stand arm) 0.10000000000000000555#od(stand ears) ) 0.01#weight( 0.50000000000000000000#od(pair sit) 0.50000000000000000000#od(pair come) 0.50000000000000000000#od(one sit) 0.50000000000000000000#od(one come) 0.50000000000000000000#od(sit arm) 0.50000000000000000000#od(sit ears) 0.50000000000000000000#od(come arm) 0.50000000000000000000#od(come ears) ) ) 0.10000#weight( 1#weight( 0.59999999999999997780#uw(two stand) 0.20000000000000001110#uw(stand hands) ) 0.065#weight( 0.29999999999999998890#uw(two sit) 0.40000000000000002220#uw(two come) 0.10000000000000000555#uw(stand arm) 0.10000000000000000555#uw(stand ears) ) 0.01#weight( 0.50000000000000000000#uw(pair sit) 0.50000000000000000000#uw(pair come) 0.50000000000000000000#uw(one sit) 0.50000000000000000000#uw(one come) 0.50000000000000000000#uw(sit arm) 0.50000000000000000000#uw(sit ears) 0.50000000000000000000#uw(come arm) 0.50000000000000000000#uw(come ears) ) ) ) """ self.assertEqual(res, expected_res)
def test_parameter_validator(monkeypatch): request = MagicMock(name='request') request.args = {} request.headers = {} request.params = {} app = MagicMock(name='app') app.response_class = lambda a, mimetype, status: json.loads(a)['detail'] monkeypatch.setattr('flask.request', request) monkeypatch.setattr('flask.current_app', app) def orig_handler(*args, **kwargs): return 'OK' params = [{'name': 'p1', 'in': 'path', 'type': 'integer', 'required': True}, {'name': 'h1', 'in': 'header', 'type': 'string', 'enum': ['a', 'b']}, {'name': 'q1', 'in': 'query', 'type': 'integer', 'maximum': 3}, {'name': 'a1', 'in': 'query', 'type': 'array', 'items': {'type': 'integer', 'minimum': 0}}] validator = ParameterValidator(params) handler = validator(orig_handler) assert handler() == "Missing path parameter 'p1'" assert handler(p1='123') == 'OK' assert handler(p1='') == "Wrong type, expected 'integer' for path parameter 'p1'" assert handler(p1='foo') == "Wrong type, expected 'integer' for path parameter 'p1'" assert handler(p1='1.2') == "Wrong type, expected 'integer' for path parameter 'p1'" request.args = {'q1': '4'} assert handler(p1=1).startswith('4 is greater than the maximum of 3') request.args = {'q1': '3'} assert handler(p1=1) == 'OK' request.args = {'a1': "1,2"} assert handler(p1=1) == "OK" request.args = {'a1': "1,a"} assert handler(p1=1).startswith("'a' is not of type 'integer'") request.args = {'a1': "1,-1"} assert handler(p1=1).startswith("-1 is less than the minimum of 0") del request.args['a1'] request.headers = {'h1': 'a'} assert handler(p1='123') == 'OK' request.headers = {'h1': 'x'} assert handler(p1='123').startswith("'x' is not one of ['a', 'b']")