def test_reassign_atom_to_project(self): """ assign_atom_to_project should remove the given master from a project that it's already attached to :return: """ from portal.plugins.gnmatomresponder.master_importer import MasterImportResponder import portal.plugins.gnmatomresponder.constants as const from gnmvidispine.vs_item import VSItem mock_item = MagicMock(target=VSItem) mock_item.get = MagicMock(return_value="VX-444") mock_old_collection = MagicMock(target=VSCollection) mock_collection = MagicMock(target=VSCollection) mock_collection.get = MagicMock(return_value="VX-123") mock_collection.addToCollection = MagicMock() with patch('portal.plugins.gnmatomresponder.master_importer.MasterImportResponder.refresh_access_credentials'): m = MasterImportResponder("fake role", "fake session", "fake stream", "shard-00000") m.get_item_for_atomid = MagicMock(return_value=mock_item) m.get_collection_for_id = MagicMock(side_effect=[mock_old_collection, mock_collection]) m.set_project_fields_for_master = MagicMock(return_value=mock_item) m.assign_atom_to_project("D64EEBD7-6033-4DC6-A0CA-1BBFA5A6DD95","VX-123","VX-456",mock_item) mock_item.get.assert_called_once_with(const.PARENT_COLLECTION) mock_old_collection.removeFromCollection.assert_called_once_with(mock_item) mock_collection.get.assert_called_once_with(const.PARENT_COLLECTION) mock_collection.addToCollection.assert_called_once_with(mock_item) m.set_project_fields_for_master.assert_called_once_with(mock_item, parent_project=mock_collection)
def test_form_to_db_schema_options(self): options = MagicMock() self._plugin.form_to_db_schema_options(options) options.get.assert_called_with('context', {}) options.get('context', {}).get.assert_called_with('schema', None)
class TestTrailEnvironment(unittest.TestCase): def setUp(self): self.input_queue = MagicMock() self.output_queue = MagicMock() self.prompt_queue = MagicMock() self.trail_environment = TrailEnvironment(self.prompt_queue, self.input_queue, self.output_queue) def test_instance(self): self.assertIsInstance(self.trail_environment, TrailEnvironment) def test_input(self): return_value = self.trail_environment.input('foo') self.prompt_queue.put.assert_called_once_with('foo') self.input_queue.get.assert_called_once_with(timeout=None) self.assertEqual(return_value, self.input_queue.get()) def test_input_with_timeout(self): return_value = self.trail_environment.input('foo', timeout=1) self.prompt_queue.put.assert_called_once_with('foo') self.input_queue.get.assert_called_once_with(timeout=1) self.assertEqual(return_value, self.input_queue.get()) def test_output(self): self.trail_environment.output('foo') self.output_queue.put.assert_called_once_with('foo')
def test_view(self): mock_item = MagicMock() mock_item.get = MagicMock(return_value='testkey/') mock_response = MagicMock() mock_response.get = MagicMock(side_effect=[mock_item]) self.s3.list_objects.side_effect = mock_response self.nodetool.view(KEYSPACE, BUCKET)
def test_remove_watcher_view_removes_observer_from_item(self, notifications): decision = N(Decision) user = N(User) mock_view = MagicMock(spec=RemoveWatcher) mock_view.get_object = lambda: decision mock_view.get_user = lambda: user mock_view.get = RemoveWatcher.get mock_view.get(mock_view, RequestFactory().get('/', {'next': '/'})) notifications.stop_observing.assert_called_with(decision, user)
def test_add_watcher_view_adds_observer_to_item(self, notifications): decision = N(Decision) user = N(User) mock_view = MagicMock(spec=AddWatcher) mock_view.get_object = lambda: decision mock_view.get_user = lambda: user mock_view.get = AddWatcher.get mock_view.get(mock_view, RequestFactory().get('/', {'next': '/'})) notifications.observe.assert_called_with( decision, user, DECISION_CHANGE)
def test_remove_watcher_view_removes_observer_from_item( self, notifications): decision = N(Decision) user = N(User) mock_view = MagicMock(spec=RemoveWatcher) mock_view.get_object = lambda: decision mock_view.get_user = lambda: user mock_view.get = RemoveWatcher.get mock_view.get(mock_view, RequestFactory().get('/', {'next': '/'})) notifications.stop_observing.assert_called_with(decision, user)
def test_resync_normal(self): """ a request should trigger a resync :return: """ client = APIClient() mock_master = MagicMock() mock_master.get = MagicMock( return_value="09239f72-e0a5-4299-ba5e-ec18c27117b4") with patch('__builtin__.__import__', side_effect=import_mock): with patch('requests.put', return_value=self.MockResponse(200, { "some": "data", "here": "now" })) as mock_put: #with patch("portal.plugins.gnm_masters.models.VSMaster", return_value=mock_master): models_mock.VSMaster = MagicMock(return_value=mock_master) response = client.get( reverse_lazy("resync_to_atom", kwargs={"item_id": "VX-123"})) self.assertEqual(response.status_code, 200) self.assertDictEqual(json.loads(response.content), { "some": "data", "here": "now" }) mock_put.assert_called_once_with( "https://launchdetector/update/09239f72-e0a5-4299-ba5e-ec18c27117b4" )
def setUp(self): super(SolrMockupTestCase, self).setUp() assert self.schema is not None, 'A path for Solr schema is needed' conn = MagicMock(name='SolrConnection') conn.get = MagicMock(name='get', return_value=SolrResponse( body=self.schema, status=200, )) manager = MagicMock(name='SolrConnectionManager') manager.connection = conn manager.schema = SolrSchema(manager) solr = getUtility(ISolrSearch) solr._manager = manager assert self.search is not None, 'A path for Solr search is needed' solr.search = MagicMock(name='search', return_value=SolrResponse( body=self.search, status=200, )) self.solr = solr self.source = UsersContactsInboxesSource(self.portal)
def test_success(self): mock_requests = MagicMock() mock_requests.get = MagicMock() requests_patch = mock.patch( 'image_downloader.utils.requests', mock_requests ) mock_get_image_links_from_response = MagicMock() mock_get_image_links_from_response.return_value = [ 'http://fake_url.com/1.jpg', 'http://fake_url.com/2.png' ] get_image_links_patch = mock.patch( 'image_downloader.utils.get_image_links_from_response', mock_get_image_links_from_response ) fake_prepeare_image_link = lambda image_link, response: image_link prepare_image_link_patch = mock.patch( 'image_downloader.utils.prepare_image_link', fake_prepeare_image_link ) with requests_patch, get_image_links_patch, prepare_image_link_patch: image_links = utils.get_image_links_from_url('http://fake_url.com') self.assertEquals(len(image_links), 2)
def test_notify_wrongpath(self): """ process_premiere_project should update project record with any non-SAN media paths :return: """ from asset_folder_importer.database import importer_db from gnmvidispine.vs_collection import VSCollection from gnmvidispine.vidispine_api import VSNotFound from gnmvidispine.vs_item import VSItem from asset_folder_importer.premiere_get_referenced_media.PremiereProject import PremiereProject mock_database = MagicMock(target=importer_db) #with patch('asset_folder_importer.premiere_get_referenced_media.processor.VSCollection') as mock_coll: mock_coll_instance = MagicMock(target=VSCollection) mock_proj_instance = MagicMock(target=PremiereProject) mock_item_instance = MagicMock(target=VSItem) mock_proj_instance.getReferencedMedia = MagicMock(return_value=['/Volumes/Internet Downloads/WRONG FILE.mov']) mock_coll_instance.get = MagicMock(return_value=None) with patch('asset_folder_importer.premiere_get_referenced_media.processor.VSCollection', return_value=mock_coll_instance) as mock_coll: with patch('asset_folder_importer.premiere_get_referenced_media.processor.VSItem', return_value=mock_item_instance): with patch('asset_folder_importer.premiere_get_referenced_media.processor.PremiereProject', return_value=mock_proj_instance) as mock_proj: with patch('asset_folder_importer.premiere_get_referenced_media.processor.process_premiere_fileref',side_effect=VSNotFound()): from asset_folder_importer.premiere_get_referenced_media.processor import process_premiere_project process_premiere_project("/fakeproject/VX-446.prproj", None, db=mock_database, cfg=self.FakeConfig()) mock_coll_instance.set_metadata.assert_called_with({'gnm_project_invalid_media_paths': ['/Volumes/Internet Downloads/WRONG FILE.mov']},mode="add")
def test_no_current_value_add_a_list(self): """ update_invalid_media_paths should be able to cope with when the old value is empty and the new is a list :return: """ from asset_folder_importer.database import importer_db from gnmvidispine.vs_collection import VSCollection from gnmvidispine.vidispine_api import VSNotFound from gnmvidispine.vs_item import VSItem from asset_folder_importer.premiere_get_referenced_media.PremiereProject import PremiereProject mock_database = MagicMock(target=importer_db) mock_coll_instance = MagicMock(target=VSCollection) mock_proj_instance = MagicMock(target=PremiereProject) mock_item_instance = MagicMock(target=VSItem) mock_proj_instance.getReferencedMedia = MagicMock(return_value=['/Volumes/Internet Downloads/WRONG FILE.mov']) mock_coll_instance.get = MagicMock(return_value=[]) with patch('asset_folder_importer.premiere_get_referenced_media.processor.VSCollection', return_value=mock_coll_instance) as mock_coll: with patch('asset_folder_importer.premiere_get_referenced_media.processor.VSItem', return_value=mock_item_instance): with patch('asset_folder_importer.premiere_get_referenced_media.processor.PremiereProject', return_value=mock_proj_instance) as mock_proj: with patch('asset_folder_importer.premiere_get_referenced_media.processor.process_premiere_fileref',side_effect=VSNotFound()): from asset_folder_importer.premiere_get_referenced_media.processor import process_premiere_project process_premiere_project("/fakeproject/VX-446.prproj", None, db=mock_database, cfg=self.FakeConfig()) mock_coll_instance.get.assert_called() mock_coll_instance.set_metadata.assert_called_with({'gnm_project_invalid_media_paths': ['/Volumes/Internet Downloads/WRONG FILE.mov']},mode="add")
def test_model_found(self, mock_get_class): mock_model = MagicMock() mock_model.get = lambda x: MODEL with patch('radiotherm.thermostat.CommonThermostat.model', mock_model): ret = radiotherm.get_thermostat(IP) mock_get_class.assert_called_once_with(MODEL)
def test_last_message_user(self): context_data = MagicMock() context_data.get = MagicMock(return_value="existing_context_value") attribute_product_data = MagicMock() target = Target(context_data, attribute_product_data) target.extract_last_user_message = MagicMock(return_value="last_message") target.extract_entities = MagicMock(return_value="extracted_entities") target.remove_default_entities_if_detections = MagicMock(return_value="removed_entities") target.update_entities_with_last_message = MagicMock(return_value="entities_update_entities_with_last_message") target.create_entity_type_index_modifier = MagicMock(return_value="entities_create_entity_type_index_modifier") target.change_entities_weighting = MagicMock(return_value="entities_change_entities_weighting") target.add_product_counts = MagicMock(return_value="entities_add_product_counts") target.split_unsupported_entities = MagicMock(return_value=("supported_entities", "unsupported_entities")) target.update("context_id_value", "_rev_value", "messages_value") target.extract_last_user_message.assert_called_once_with("messages_value") context_data.get.assert_called_once_with("context_id_value", "_rev_value") target.extract_entities.assert_called_once_with("existing_context_value") target.update_entities_with_last_message.assert_called_once_with("extracted_entities", "last_message") target.remove_default_entities_if_detections.assert_called_once_with( "entities_update_entities_with_last_message" ) target.create_entity_type_index_modifier.assert_called_once_with("removed_entities") target.change_entities_weighting.assert_called_once_with("entities_create_entity_type_index_modifier") target.add_product_counts.assert_called_once_with("entities_change_entities_weighting") target.split_unsupported_entities.assert_called_once_with("entities_add_product_counts") context_data.update.assert_called_once_with( "context_id_value", "_rev_value", unsupported_entities="unsupported_entities", entities="supported_entities" )
def get_data(resource): service_mock = MagicMock() service_mock.get = MagicMock() service_mock.get.return_value = [{ 'state': 'published', 'firstpublished': datetime(year=2018, month=2, day=15, hour=12, minute=30, second=0, tzinfo=pytz.UTC), 'item_id': '1' }, { 'state': 'corrected', 'versioncreated': datetime(year=2018, month=2, day=15, hour=13, minute=45, second=0, tzinfo=pytz.UTC), 'item_id': '1' }] return service_mock
def test_verify_oauth_scopes_remote(monkeypatch): tokeninfo = dict(uid="foo", scope="scope1 scope2") def get_tokeninfo_response(*args, **kwargs): tokeninfo_response = requests.Response() tokeninfo_response.status_code = requests.codes.ok tokeninfo_response._content = json.dumps(tokeninfo).encode() return tokeninfo_response def func(request): pass wrapped_func = verify_oauth_remote('https://example.org/tokeninfo', set(['admin']), func) request = MagicMock() request.headers = {"Authorization": "Bearer 123"} app = MagicMock() monkeypatch.setattr('flask.current_app', app) session = MagicMock() session.get = get_tokeninfo_response monkeypatch.setattr('connexion.decorators.security.session', session) with pytest.raises(OAuthScopeProblem, message="Provided token doesn't have the required scope"): wrapped_func(request) tokeninfo["scope"] += " admin" wrapped_func(request) tokeninfo["scope"] = ["foo", "bar"] with pytest.raises(OAuthScopeProblem, message="Provided token doesn't have the required scope"): wrapped_func(request) tokeninfo["scope"].append("admin") wrapped_func(request)
def test_do_work(job_mock, registry_mock): conn = boto.sns.connect_to_region('ap-southeast-2') topic = conn.create_topic('multipage-test') def side_effect(value): return { 'topics': {'topic': {'multipage': topic['CreateTopicResponse']['CreateTopicResult']['TopicArn']}}, 'region': {'region': 'ap-southeast-2'} }.get(value) registry = MagicMock() registry.get = MagicMock(side_effect=side_effect) registry_mock.return_value = registry document = MagicMock(**{ 'uuid': 'some-id', 'published': True }) document.name = 'Test Document' job_mock.save.return_value = MagicMock(**{ 'uuid': 'job-id' }) helper = Multipage(document) helper.do_work() assert document.set.called assert job_mock.save.called
def test_add_watcher_view_adds_observer_to_item(self, notifications): # A watcher is only added if the item isn't already being watched so we # explicitly set is_observing to False notifications.is_observing = lambda decision, user: False decision = N(Decision) user = N(User) mock_view = MagicMock(spec=AddWatcher) mock_view.get_object = lambda: decision mock_view.get_user = lambda: user mock_view.get = AddWatcher.get mock_view.get(mock_view, RequestFactory().get('/', {'next': '/'})) notifications.observe.assert_called_with( decision, user, DECISION_CHANGE)
def test_get_success(self): http_session = MagicMock() http_session.get = MagicMock() response = MagicMock() http_session.get.return_value = response response.status_code = 200 response.content = 'dummy blob' blobs = BlobClient( http_session, 'http://example.com/blerbs/', ) result = blobs.get('sha1-7928f34bd3263b86e67d11efff30d67fe7f3d176') http_session.get.assert_called_with( "http://example.com/blerbs/camli/" "sha1-7928f34bd3263b86e67d11efff30d67fe7f3d176") self.assertEqual( type(result), Blob, ) self.assertEqual( result.data, 'dummy blob', )
def test_post_specific_documents_2(app_registry, db_connect_mock, job_mock, registry_mock, arrow_mock, _mock, permission_mock, blueprint_config): arrow_mock.now.return_value = arrow.get(2015, 7, 1, 20, 0, 0) conn = boto.sns.connect_to_region('ap-southeast-2') topic = conn.create_topic('migrationdownload') permission_mock.return_value = True app_registry.return_value = blueprint_config def side_effect(value): return { 'topics': {'topic': {'migrationdownload': topic['CreateTopicResponse']['CreateTopicResult']['TopicArn']}}, 'region': {'region': 'ap-southeast-2'} }.get(value) registry = MagicMock() registry.get = MagicMock(side_effect=side_effect) registry_mock.return_value = registry db_connect_mock.return_value = None response = app().post('/admin/migration', data=json.dumps({ "document": [{"parent_id": "some-uuid"}], "all_documents": False }), content_type='application/json') job_mock.save.return_value = MagicMock(**{ 'uuid': 'job-id' }) assert response.status_code == 200 assert job_mock.save.called
def test_hbp_auth(self): with patch('bbp_services.client.get_services') as m: m.return_value = { 'oidc_service': { 'prod': { 'url': 'url', 'api_url': 'api_url' } } } from hbp_app_python_auth.auth import HbpAuth auth = HbpAuth() httpResponseMock = MagicMock() httpResponseMock.get = MagicMock( return_value=[{ 'immutable': True, 'primary': True, 'value': '*****@*****.**' }]) details = auth.get_user_details(httpResponseMock) eq_(details['email'], '*****@*****.**') eq_(auth.revoke_token_params('token', 'uuid'), {'token': 'token'}) eq_(auth.revoke_token_headers('token', 'uuid'), {'Content-type': 'application/json'})
def test_recover_from_expired_token(aggregator): # First api answers with 403 to force the check to re-authenticate unauthentified_response = MagicMock(status_code=403) # Api answer when a request is being made to the login endpoint login_response = MagicMock() # Third api answer, when the check retries the initial endpoint but is now authenticated valid_response = MagicMock() valid_response.json = MagicMock(return_value={"foo": "bar"}) http = MagicMock() http.post = MagicMock(side_effect=[login_response]) http.get = MagicMock(side_effect=[unauthentified_response, valid_response]) session_wrapper = SessionWrapper(aci_url=common.ACI_URL, http=http, log=MagicMock()) session_wrapper.apic_cookie = "cookie" api = Api(common.ACI_URLS, http, common.USERNAME, password=common.PASSWORD) api.sessions = [session_wrapper] data = api.make_request("") # Assert that we retrieved the value from `valid_response.json()` assert data == {"foo": "bar"} get_calls = http.get._mock_call_args_list post_calls = http.post._mock_call_args_list # Assert that the first call was to the ACI_URL assert get_calls[0].args[0] == common.ACI_URL # Assert that the second call was to the login endpoint assert 'aaaLogin.xml' in post_calls[0].args[0] # Assert that the last call was to the ACI_URL again assert get_calls[1].args[0] == common.ACI_URL
def test_archive_cannot_be_found(job_mock, registry_mock, connection_mock): conn_s3 = boto.connect_s3() storage = conn_s3.create_bucket('storage-bucket') conn_s3.create_bucket('file-bucket') registry = MagicMock() registry.get = MagicMock(side_effect=side_effect) registry_mock.return_value = registry message = Message() message.set_body(json.dumps({ 'Message': '9bd96ca7-3d0a-4e74-b523-b3bd38e9862e', 'Subject': 'Test Subject' })) job = MagicMock(**{ 'uuid': '9bd96ca7-3d0a-4e74-b523-b3bd38e9862e', 'name': 'Migration Download', 'status': 'pending', 'message': { 'file': { 'key': 'archive.zip' } } }) job_mock.selectBy.return_value.getOne.return_value = job service = MigrationUploadJob() with pytest.raises(InvalidJobError): service.do_work(message) assert job.set.called
def test_missing_keys(self): http_session = MagicMock() response = MagicMock() http_session.get = MagicMock() http_session.get.return_value = response response.status_code = 200 response.content = "{}" response.url = "http://example.com/?camli.mode=config" conn = _connect( 'http://example.com/', http_session=http_session, ) self.assertEqual( conn.blob_root, None, ) self.assertEqual( conn.search_root, None, ) self.assertEqual( conn.sign_root, None, )
def test_get_resource_by_href(self): fake_res = {'name': 'test'} coll = MagicMock() coll.get = lambda x: fake_res resource = resources.get_resource(coll, 'href', Client.BASE_URL + '/test/resource') self.assertEquals(resource, fake_res)
def test_set_wrongtag(discovery_mock: Any) -> NoReturn: service_mock = MagicMock() service_mock.projects = MagicMock(return_value=service_mock) service_mock.locations = MagicMock(return_value=service_mock) service_mock.services = MagicMock(return_value=service_mock) service_mock.get = MagicMock(return_value=service_mock) service_mock.execute = MagicMock( return_value=service_data(MOCK_SERVICE_NAME, [MOCK_PR_NUMBER])) discovery_mock.build = MagicMock(return_value=service_mock) invalid_pr = MOCK_PR_NUMBER + 1 # intentionally wrong response = runner.invoke( cli, [ "set", "--project-id", MOCK_PROJECT_ID, "--region", "us-central1", "--service", MOCK_SERVICE_NAME, "--repo-name", MOCK_REPO_NAME, "--commit-sha", MOCK_COMMIT_SHA, "--pull-request", invalid_pr, "--dry-run", ], ) print(response.output) assert response.exit_code == 1 assert "Error finding revision" in response.output assert f"pr-{invalid_pr}" in response.output
def test_remote_remove_capabilities_config_after_setting_desired( self, webdriver_create_mock, browser_create_mock): """Test `remote` method. Note: Test removing `capabilities` after setting `desired_capabilities` in remote browser config with this empty property. """ selenium_config = { 'REMOTE': { 'desired_capabilities': {}, 'capabilities': { 'driver_name': DEFAULT_BROWSER } } } mock_config = MagicMock(name='selenium_config') mock_config.get.side_effect = selenium_config.get mock_config.__contains__.side_effect = selenium_config.__contains__ s = Selenium(mock_config) s.remote(DEFAULT_BROWSER) self.assertNotIn('capabilities', mock_config.get('REMOTE'))
def setUp(self): conn = MagicMock(name='SolrConnection') conn.get = MagicMock(name='get', return_value=SolrResponse( body=get_data('schema.json'), status=200)) manager = MagicMock(name='SolrConnectionManager') type(manager).connection = PropertyMock(return_value=conn) self.schema = SolrSchema(manager)
def test_verify_oauth_scopes_remote(monkeypatch): tokeninfo = dict(uid="foo", scope="scope1 scope2") def get_tokeninfo_response(*args, **kwargs): tokeninfo_response = requests.Response() tokeninfo_response.status_code = requests.codes.ok tokeninfo_response._content = json.dumps(tokeninfo).encode() return tokeninfo_response token_info_func = get_tokeninfo_func({'x-tokenInfoUrl': 'https://example.org/tokeninfo'}) wrapped_func = verify_oauth(token_info_func, validate_scope) request = MagicMock() request.headers = {"Authorization": "Bearer 123"} session = MagicMock() session.get = get_tokeninfo_response monkeypatch.setattr('connexion.decorators.security.session', session) with pytest.raises(OAuthScopeProblem, message="Provided token doesn't have the required scope"): wrapped_func(request, ['admin']) tokeninfo["scope"] += " admin" assert wrapped_func(request, ['admin']) is not None tokeninfo["scope"] = ["foo", "bar"] with pytest.raises(OAuthScopeProblem, message="Provided token doesn't have the required scope"): wrapped_func(request, ['admin']) tokeninfo["scope"].append("admin") assert wrapped_func(request, ['admin']) is not None
def test_add_watcher_view_adds_observer_to_item(self, notifications): # A watcher is only added if the item isn't already being watched so we # explicitly set is_observing to False notifications.is_observing = lambda decision, user: False decision = N(Decision) user = N(User) mock_view = MagicMock(spec=AddWatcher) mock_view.get_object = lambda: decision mock_view.get_user = lambda: user mock_view.get = AddWatcher.get mock_view.get(mock_view, RequestFactory().get('/', {'next': '/'})) notifications.observe.assert_called_with(decision, user, DECISION_CHANGE)
def test_annotation(self): harmonized_db = MagicMock() harmonized_db.get = lambda product_code, _: {"510k": [{"k_number": "K094035"}], \ "device_pma": [{"pma_number": "P950002"}], \ "registration": [{"fei_number": "3001451451"}], \ } if product_code == "OQG" else {} ann = UDIAnnotateMapper(harmonized_db) mapper = XML2JSONMapper() def add_fn(id, json): harmonized = ann.harmonize(json) eq_("OQG", harmonized["product_codes"][0]["code"]) eq_(None, harmonized["product_codes"][0]["openfda"].get("pma_number")) eq_(None, harmonized["product_codes"][0]["openfda"].get("k_number")) eq_(None, harmonized["product_codes"][0]["openfda"].get("fei_number")) eq_({}, harmonized["product_codes"][1].get("openfda")) map_input = MagicMock() map_input.filename = os.path.join(dirname(os.path.abspath(__file__)), "test.xml") map_output = MagicMock() map_output.add = add_fn mapper.map_shard(map_input, map_output)
def test_solve_conflicts_both_update_a_file(self): mocked_theirs = MagicMock() mocked_ours = MagicMock(id="id", path="path") mocked_full = MagicMock(return_value="full_path") mocked_repo = MagicMock(_full_path=mocked_full) mocked_repo.get().data = "data" def conflicts(): yield None, mocked_theirs, mocked_ours mock_path = 'gitfs.merges.accept_mine.open' with patch(mock_path, create=True) as mocked_open: mocked_file = MagicMock(spec=file) mocked_open.return_value = mocked_file mine = AcceptMine(mocked_repo) mine.solve_conflicts(conflicts()) mocked_full.assert_called_once_with("path") mocked_open.assert_called_once_with("full_path", "w") mocked_repo.get.has_calls([call("id")]) mocked_open().__enter__().write.assert_called_once_with("data") mocked_repo.index.add.assert_called_once_with("path")
def test_get(self): """ Test that we can get data from files. Verify that the driver sampling can be started and stopped """ from mi.core.kudu.brttpkt import NoData rvals = [(self.PKT_ID, sn, ts, pkt) for ( pt, pkt, sn, ts) in [makepacket(self.PKT_DATA, time=n + 1) for n in range(2)]] def orbget(): if rvals: log.trace('returning 1st packet') return rvals.pop(0) else: log.trace('no more packets') raise NoData() with patch( 'mi.dataset.parser.antelope_orb.OrbReapThr') as MockOrbReapThr: orbreapthr = MagicMock() orbreapthr.get = orbget MockOrbReapThr.return_value = orbreapthr self.driver.start_sampling() log.trace("STARTED SAMPLING") self.assert_data(None, 'first.result.yml', count=2, timeout=5) log.trace("ASSERTED DATA")
def setUp(self): super().setUp() redis = MagicMock() self.redis_get_mock = MagicMock() self.redis_set_mock = MagicMock() redis.set = self.redis_set_mock redis.get = self.redis_get_mock locker = FakeLocker() self.locker_lock = MagicMock() locker._lock = self.locker_lock self.locker_unlock = MagicMock() locker._unlock = self.locker_unlock invalidate_by = ['a', 'problem_id', 'group_id'] invalidator = MagicMock() self.invalidator_subscribe_mock = invalidator.subscribe self.invalidator_invalidate_mock = invalidator.invalidate self.cacher = Cacher(redis, locker, invalidate_by, prefix='key_prefix', cache_invalidator=invalidator) self.to_be_cached = MagicMock(return_value=FUNC_RETURN_VALUE) self.to_be_cached.__name__ = FUNC_NAME self.cached_function = self.cacher(self.to_be_cached)
def test_caching_is_per_instance(): # Test that values cached for one instance do not appear on another class FieldTester(ScopedStorageMixin): """Toy class for ModelMetaclass and field access testing""" field_a = List(scope=Scope.settings) field_data = MagicMock(spec=FieldData) field_data.get = lambda block, name, default=None: [name] # pylint: disable=C0322 # Same field_data used in different objects should result # in separately-cached values, so that changing a value # in one instance doesn't affect values stored in others. field_tester_a = FieldTester( runtime=TestRuntime(services={'field-data': field_data}), scope_ids=MagicMock(spec=ScopeIds) ) field_tester_b = FieldTester( runtime=TestRuntime(services={'field-data': field_data}), scope_ids=MagicMock(spec=ScopeIds) ) value = field_tester_a.field_a assert_equals(value, field_tester_a.field_a) field_tester_a.field_a.append(1) assert_equals(value, field_tester_a.field_a) assert_not_equals(value, field_tester_b.field_a)
def should_call_twice(self, topic): def callback(*args, **kwargs): pass file_key = { 'Error': 'Error', 'ResponseMetadata': { 'HTTPStatusCode': 502 } } self.call_count = 0 def get(key, callback=None): self.call_count += 1 callback(file_key) mock_bucket_loader = MagicMock() mock_bucket_loader.get = get func = s3_loader.HandleDataFunc.as_func( '/'.join([s3_bucket, IMAGE_PATH]), callback=callback, bucket_loader=mock_bucket_loader, context=topic ) func(file_key) expect(self.call_count).to_equal(3)
class test_doc_(object): def setUp(self): from lembrar import db from datetime import datetime reload(db) self.grid = MagicMock() self.created = datetime.now() self.doc = db.Doc(doc=dict(raw_data='raw_data', _id='123', test='test1', created=self.created), db=MagicMock(), grid=self.grid, accepted_languages=MagicMock(), prefix='prefix') def test_doc_get_raw_data(self): assert self.grid.get().read() == self.doc.raw_data def test_doc_to_jsonable_dict(self): assert dict(id='123', created=self.created.isoformat(), test='test1') == self.doc.to_jsonable_dict() def test_doc_update_plugin_attr(self): self.doc.update_plugin_attr('key', 'value') assert 'value' == self.doc.doc['prefix_key'] def test_doc_update_plugin_and_canonical_attr(self): self.doc.update_plugin_and_canonical_attr('key', 'value') assert 'value' == self.doc.doc['key'] assert 'value' == self.doc.doc['prefix_key'] def test_doc_register_html_representation(self): self.doc.register_html_representation('html_field_key') assert ['prefix_html_field_key'] == self.doc.doc['full_htmls'] def test_doc_register_searchable_field(self): self.doc.register_searchable_field('search_field_key') assert ['prefix_search_field_key'] \ == self.doc.doc['searchable_fields'] def test_prefixed_name(self): assert 'prefix_test' == self.doc.prefixed_name('test') def test_reindex(self): from lembrar import db self.doc.doc['fulltext_fields'] = ['field1', 'field2'] self.doc.doc['field1'] = 'text1' self.doc.doc['field2'] = 'text2' db.index = MagicMock() db.index.return_value = ['index', 'data'] self.doc.reindex() assert call('text1 text2', accepted_languages=self.doc.accepted_languages) \ == db.index.mock_calls[0] assert ['index', 'data'] == self.doc.doc['search_terms'] def test_finish_parsing(self): self.doc.finish_parsing('2.2') assert '2.2' == self.doc.doc['prefix_version']
def test_object_identity(): # Check that values that are modified are what is returned class FieldTester(ScopedStorageMixin): """Toy class for ModelMetaclass and field access testing""" field_a = List(scope=Scope.settings) # Make sure that field_data always returns a different object # each time it's actually queried, so that the caching is # doing the work to maintain object identity. field_data = MagicMock(spec=FieldData) field_data.get = lambda block, name, default=None: [name] # pylint: disable=C0322 field_tester = FieldTester( runtime=TestRuntime(services={'field-data': field_data}), scope_ids=MagicMock(spec=ScopeIds) ) value = field_tester.field_a assert_equals(value, field_tester.field_a) # Changing the field in place matches a previously fetched value field_tester.field_a.append(1) assert_equals(value, field_tester.field_a) # Changing the previously-fetched value also changes the value returned by the field: value.append(2) assert_equals(value, field_tester.field_a) # Deletion restores the default value. In the case of a List with # no default defined, this is the empty list. del field_tester.field_a assert_equals([], field_tester.field_a)
def test_caching_is_per_instance(): # Test that values cached for one instance do not appear on another class FieldTester(object): """Toy class for ModelMetaclass and field access testing""" __metaclass__ = ModelMetaclass field_a = List(scope=Scope.settings) def __init__(self, field_data): self._field_data = field_data self._dirty_fields = {} field_data = MagicMock(spec=FieldData) field_data.get = lambda block, name, default=None: [name] # pylint: disable=C0322 # Same field_data used in different objects should result # in separately-cached values, so that changing a value # in one instance doesn't affect values stored in others. field_tester_a = FieldTester(field_data) field_tester_b = FieldTester(field_data) value = field_tester_a.field_a assert_equals(value, field_tester_a.field_a) field_tester_a.field_a.append(1) assert_equals(value, field_tester_a.field_a) assert_not_equals(value, field_tester_b.field_a)
def setUp(self): super(SolrMockupTestCase, self).setUp() assert self.schema is not None, 'A path for Solr schema is needed' conn = MagicMock(name='SolrConnection') conn.get = MagicMock( name='get', return_value=SolrResponse( body=self.schema, status=200, )) manager = MagicMock(name='SolrConnectionManager') manager.connection = conn manager.schema = SolrSchema(manager) solr = getUtility(ISolrSearch) solr._manager = manager assert self.search is not None, 'A path for Solr search is needed' solr.search = MagicMock( name='search', return_value=SolrResponse( body=self.search, status=200, )) self.solr = solr self.source = UsersContactsInboxesSource(self.portal)
def test_do_work(job_mock, registry_mock): conn = boto.sns.connect_to_region('ap-southeast-2') topic = conn.create_topic('multipage-test') def side_effect(value): return { 'topics': { 'topic': { 'multipage': topic['CreateTopicResponse']['CreateTopicResult'] ['TopicArn'] } }, 'region': { 'region': 'ap-southeast-2' } }.get(value) registry = MagicMock() registry.get = MagicMock(side_effect=side_effect) registry_mock.return_value = registry document = MagicMock(**{'uuid': 'some-id', 'published': True}) document.name = 'Test Document' job_mock.save.return_value = MagicMock(**{'uuid': 'job-id'}) helper = Multipage(document) helper.do_work() assert document.set.called assert job_mock.save.called
def should_call_twice(self, topic): def callback(*args, **kwargs): pass file_key = { 'Error': 'Error', 'ResponseMetadata': { 'HTTPStatusCode': 502 } } self.call_count = 0 def get(key, callback=None): self.call_count += 1 callback(file_key) mock_bucket_loader = MagicMock() mock_bucket_loader.get = get func = s3_loader.HandleDataFunc.as_func( '/'.join([s3_bucket, IMAGE_PATH]), callback=callback, bucket_loader=mock_bucket_loader, context=topic) func(file_key) expect(self.call_count).to_equal(3)
def test_orbreapthr_args(self): """ Test that we can get data from files. Verify that the driver sampling can be started and stopped """ from mi.core.kudu.brttpkt import NoData def orbget(): log.trace('no more packets') raise NoData() tafter = 999 state={'parser_state': { StateKey.TAFTER: tafter, ParserConfigKey.ORBNAME: ORB_NAME, ParserConfigKey.SELECT: '', ParserConfigKey.REJECT: '', }} self.driver = self._get_driver_object(memento=state) with patch('mi.dataset.parser.antelope_orb.OrbReapThr') as MockOrbReapThr: orbreapthr = MagicMock() orbreapthr.get = orbget MockOrbReapThr.return_value = orbreapthr self.driver.start_sampling() expected_call_args = call( ORB_NAME, '', '', tafter, timeout=0, queuesize=100) self.assertEquals(MockOrbReapThr.call_args, expected_call_args)
def test_get(self): """ Test that we can get data from files. Verify that the driver sampling can be started and stopped """ from mi.core.kudu.brttpkt import NoData rvals = [(self.PKT_ID, sn, ts, pkt) for (pt, pkt, sn, ts) in [ makepacket(self.PKT_DATA, time=n+1) for n in range(2)]] def orbget(): if rvals: log.trace('returning 1st packet') return rvals.pop(0) else: log.trace('no more packets') raise NoData() with patch('mi.dataset.parser.antelope_orb.OrbReapThr') as MockOrbReapThr: orbreapthr = MagicMock() orbreapthr.get = orbget MockOrbReapThr.return_value = orbreapthr self.driver.start_sampling() log.trace("STARTED SAMPLING") self.assert_data(None, 'first.result.yml', count=2, timeout=5) log.trace("ASSERTED DATA")
def test_verify_oauth_scopes_remote(monkeypatch): tokeninfo = dict(uid="foo", scope="scope1 scope2") def get_tokeninfo_response(*args, **kwargs): tokeninfo_response = requests.Response() tokeninfo_response.status_code = requests.codes.ok tokeninfo_response._content = json.dumps(tokeninfo).encode() return tokeninfo_response token_info_func = get_tokeninfo_func( {'x-tokenInfoUrl': 'https://example.org/tokeninfo'}) wrapped_func = verify_oauth(token_info_func, validate_scope) request = MagicMock() request.headers = {"Authorization": "Bearer 123"} session = MagicMock() session.get = get_tokeninfo_response monkeypatch.setattr('specific.decorators.security.session', session) with pytest.raises(OAuthScopeProblem, match="Provided token doesn't have the required scope"): wrapped_func(request, ['admin']) tokeninfo["scope"] += " admin" assert wrapped_func(request, ['admin']) is not None tokeninfo["scope"] = ["foo", "bar"] with pytest.raises(OAuthScopeProblem, match="Provided token doesn't have the required scope"): wrapped_func(request, ['admin']) tokeninfo["scope"].append("admin") assert wrapped_func(request, ['admin']) is not None
def test_createMarlinApplication(self): from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin cpMock = Mock() cpMock.read = Mock() cpMock.get = self.mockConfig parameter = Mock() parameter.prodConfigFilename = 'filename' parameter.dumpConfigFile = False with patch( "ILCDIRAC.ILCTransformationSystem.scripts.dirac-clic-make-productions.ConfigParser.SafeConfigParser", new=Mock(return_value=cpMock)): self.chain.loadParameters(parameter) ret = self.chain.createMarlinApplication(300.0) self.assertIsInstance(ret, Marlin) self.assertEqual(ret.detectortype, 'myDetectorModel') self.assertEqual(ret.steeringFile, 'clicReconstruction.xml') self.assertEqual(self.chain.cliReco, '--Config.Tracking=Tracked --Config.Overlay=300GeV ') with patch( "ILCDIRAC.ILCTransformationSystem.scripts.dirac-clic-make-productions.ConfigParser.SafeConfigParser", new=Mock(return_value=cpMock)): self.chain.loadParameters(parameter) self.chain._flags._over = False ret = self.chain.createMarlinApplication(300.0) self.assertIsInstance(ret, Marlin) self.assertEqual(ret.detectortype, 'myDetectorModel') self.assertEqual(ret.steeringFile, 'clicReconstruction.xml') self.assertEqual(self.chain.cliReco, '--Config.Tracking=Tracked')
def _stub_volume_client(self): self.instance_task._volume_client = MagicMock(spec=cinderclient.Client) stub_volume_mgr = MagicMock(spec=cinderclient.volumes.VolumeManager) self.instance_task.volume_client.volumes = stub_volume_mgr stub_volume_mgr.extend = MagicMock(return_value=None) stub_new_volume = cinderclient.volumes.Volume(stub_volume_mgr, {"status": "available", "size": 2}, True) stub_volume_mgr.get = MagicMock(return_value=stub_new_volume) stub_volume_mgr.attach = MagicMock(return_value=None)
def test_geocode(self, mocked_geocode): mocked_result = MagicMock() mocked_result.coords = (12, 18) mocked_result.get().coords = (12, 18) mocked_geocode.return_value = mocked_result qs = utils.bulk_geocode(TestModel.objects.all()) self.assertTrue(qs[0].latitude)
def test_describe_blob(self): http_session = MagicMock() http_session.get = MagicMock() response = MagicMock() http_session.get.return_value = response response.status_code = 200 response.content = """ { "meta": { "dummy1": { "blobRef": "dummy1" }, "dummy2": { "blobRef": "dummy2" } } } """ searcher = SearchClient( http_session=http_session, base_url="http://example.com/s/", ) result = searcher.describe_blob("dummy1") http_session.get.assert_called_with( 'http://example.com/s/camli/search/describe', params={ 'blobref': 'dummy1', } ) self.assertEqual( type(result), BlobDescription, ) self.assertEqual( result.raw_dict, { "blobRef": "dummy1", } ) self.assertEqual( result.other_raw_dicts, { "dummy1": { "blobRef": "dummy1", }, "dummy2": { "blobRef": "dummy2", }, } )
def test_unregister_vm(self): vim_mock = MagicMock() self.vm_manager.vim_client = vim_mock vm_mock = MagicMock() vim_mock.get = MagicMock(return_value=vm_mock) self.vm_manager.unregister_vm("1234") vm_mock.Unregister.assert_called_once()
def test_get(mock_memcached): client = MagicMock() client.get = MagicMock() mock_memcached.return_value = client key = "test" unit = MemcachedCache() unit.get(key) client.get.assert_called_with(key)
def test_get(mock_redis): client = MagicMock() client.get = MagicMock() mock_redis.return_value = client key = "test" unit = RedisCache() unit.get(key) client.get.assert_called_with(key)
def setUp(self): super(TestCollectiveIndexingIntegration, self).setUp() self.portal = self.layer['portal'] login(self.portal, TEST_USER_NAME) setRoles(self.portal, TEST_USER_ID, ['Manager']) # Prepare nested folders so 'View' isn't mapped to the 'Reader' role. # First subtree has a leaf node folder with 'View' acquired, whereas # the second subtree has a leaf node folder with AQ disabled for 'View' self.folder = api.content.create( type='Folder', title='My Folder', id='folder', container=self.portal) self.folder.manage_permission('View', roles=['Other'], acquire=False) self.subfolder = api.content.create( type='Folder', title='My Subfolder', id='subfolder', container=self.folder) self.subfolder.manage_permission('View', roles=['Other'], acquire=True) self.folder2 = api.content.create( type='Folder', title='My Folder 2', id='folder2', container=self.portal) self.folder2.manage_permission('View', roles=['Other'], acquire=False) self.subfolder2_without_aq = api.content.create( type='Folder', title='My Subfolder without acquired permission', id='subfolder2_without_aq', container=self.folder2) self.subfolder2_without_aq.manage_permission('View', roles=['Other'], acquire=False) self.folder.reindexObjectSecurity() self.folder2.reindexObjectSecurity() self.subfolder.reindexObjectSecurity() self.subfolder2_without_aq.reindexObjectSecurity() # Flush queue to avoid having the above objects getting indexed at # the end of the transaction, after we already installed the mocks getQueue().process() self.manager = MagicMock(name='SolrConnectionManager') alsoProvides(self.manager, ISolrConnectionManager) conn = MagicMock(name='SolrConnection') conn.get = MagicMock(name='get', return_value=SolrResponse( body=get_data('schema.json'), status=200)) type(self.manager).connection = PropertyMock(return_value=conn) type(self.manager).schema = PropertyMock( return_value=SolrSchema(self.manager)) sm = self.portal.getSiteManager() sm.registerUtility(self.manager, ISolrConnectionManager) self.connection = self.manager.connection # Manager is memoized on the ISolrIndexQueueProcessor - reset it queue_processor = getUtility(ISolrIndexQueueProcessor, name='ftw.solr') queue_processor._manager = None