def test_filemetadata_read_metadata(self, m_img_md): """ Test read_metadata() method with mocked pyexiv2.ImageMetadata object. Mock keys expected and __getitem__ to return some value. Confirm expected value is returned. """ exif_keys = ['Exif.Image.DateTime'] xmp_keys = ['Xmp.xmp.CreateDate'] attrs = { # Return exif_keys 'exif_keys': exif_keys, # Return xmp_keys 'xmp_keys': xmp_keys, # Return 1 for everything? '__getitem__.return_value': Mock(raw_value=1), } # Create a mock that will return meaningful values for testing the # unit. m_keys = MagicMock() m_keys.configure_mock(**attrs) # Return the mock when pyexiv2.ImageMetadata(src_fn) is called. m_img_md.return_value = m_keys # Instantiate with any filename. Mock prevents read. filemd = FileMetadata("file.jpg") expected_metadata = {exif_keys[0]: 1, xmp_keys[0]: 1} assert filemd.read_metadata() == expected_metadata
def setUp(self): crush_node_by_id = {-1: {'name': 'root', 'type_name': 'root', 'items': [{'id': -3, 'weight': 3, 'pos': 1}, {'id': -2, 'weight': 2, 'pos': 0}] }, -2: {'name': 'rack1', 'items': []}, -4: {'name': 'rack3', 'items': []}, -3: {'name': 'rack2', 'items': []}} osd_map_attrs = {'get_tree_node': lambda x: crush_node_by_id[x], 'osd_tree_node_by_id': {2: {'name': 'osd.2'}, 3: {'name': 'osd.3'}}, 'parent_bucket_by_node_id': {-2: {'name': 'root', 'type': 'root'}}, 'osds_by_id': {0: {'up': True}, 1: {'up': False}}} fake_osd_map = MagicMock() fake_osd_map.configure_mock(**osd_map_attrs) fake_cluster_monitor = MagicMock() attributes = {'name': 'I am a fake', 'fsid': 12345, 'get_sync_object.return_value': fake_osd_map} fake_cluster_monitor.configure_mock(**attributes) self.factory = CrushNodeRequestFactory(fake_cluster_monitor)
def test_filemetadata_set_datetime_exception(self, m_xmp_tag, m_exif_tag, m_img_md, m_logger, exception): """ Test set_datetime() method with Exception raised on write() call. Verify exception handled by mocking logger and confirm called_once_with(exception). Also, just to be pedantic, verify img_md.write() called once. """ new_datetime = datetime.now() # Raise this exception when write() called. md_attrs = { 'write.side_effect': exception, '__getitem__.return_value': Mock(raw_value=1), } m_write = MagicMock() m_write.configure_mock(**md_attrs) m_img_md.return_value = m_write filemd = FileMetadata("file.jpg") filemd.metadata = {"One fish": "Two Fish"} # Will fall through and call filemd.img_md.write()...boom! filemd.set_datetime(new_datetime) # Confirm img_md.write() called. filemd.img_md.write.assert_called_once() # Confirm exception raised by proxy. m_logger.error.assert_called_once_with(exception)
def test_copy_metadata_main_args_with_verbose_params(self, m_logging, m_exists, m_isdir, m_process_all_files, m_argparser, verbose, log_level): """ Test main() function. Mock CustomArgumentParser to return values desired for test. Parametrize different combinations of verbosity and log level. Verify that logging configured properly for verbose setting. """ m_exists.return_value = True m_isdir.return_value = True # Mock up the proper return values. m_parse_args = MagicMock( src_directory="/abc", dst_directory="/def", simon_sez=True, verbose=verbose, ) attrs = { 'parse_args.return_value': m_parse_args, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invoke the unit. retval = main() # Confirm expected behavior m_logging.basicConfig.called_once_with(level=log_level) m_process_all_files.assert_called_with('/abc', '/def', simon_sez=True)
def test_copy_metadata_main_missing_directory(self, m_logger, m_exit, m_exists, m_isdir, m_process_all_files, m_argparser): """ Test main() function. Mock CustomArgumentParser to return values desired for test but with missing `src_directory`. Verify correct behavior by asserting calls on logger, usage_message, and exit. """ m_exists.return_value = True m_isdir.return_value = True # Mock up the proper return values. m_parse_args = MagicMock( src_directory=None, dst_directory="/def", simon_sez=True, verbose=False, ) attrs = { 'parse_args.return_value': m_parse_args, 'usage_message.return_value': None, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invoke the unit. retval = main() # Confirm expected behavior m_logger.error.assert_called() m_parse_args_container.usage_message.assert_called_once() m_exit.assert_called_once_with(1)
def test_translation_static_transcript_xml_with_data_dirc(self): """ Test id data_dir is set in XML course. Set course data_dir and ensure we get redirected to that path if it isn't found in the contentstore. """ # Simulate data_dir set in course. test_modulestore = MagicMock() attrs = {"get_course.return_value": Mock(data_dir="dummy/static", static_asset_path="")} test_modulestore.configure_mock(**attrs) self.item_descriptor.runtime.modulestore = test_modulestore # Test youtube style en request = Request.blank("/translation/en?videoId=12345") response = self.item.transcript(request=request, dispatch="translation/en") self.assertEqual(response.status, "307 Temporary Redirect") self.assertIn(("Location", "/static/dummy/static/subs_12345.srt.sjson"), response.headerlist) # Test HTML5 video style self.item.sub = "OEoXaMPEzfM" request = Request.blank("/translation/en") response = self.item.transcript(request=request, dispatch="translation/en") self.assertEqual(response.status, "307 Temporary Redirect") self.assertIn(("Location", "/static/dummy/static/subs_OEoXaMPEzfM.srt.sjson"), response.headerlist) # Test different language to ensure we are just ignoring it since we can't # translate with static fallback request = Request.blank("/translation/uk") response = self.item.transcript(request=request, dispatch="translation/uk") self.assertEqual(response.status, "404 Not Found")
def test_changing_magic_set_in_initialization(self): m = MagicMock(**{'__str__.return_value': "12"}) m.__str__.return_value = "13" self.assertEqual(str(m), "13") m = MagicMock(**{'__str__.return_value': "12"}) m.configure_mock(**{'__str__.return_value': "14"}) self.assertEqual(str(m), "14")
def test_copy_metadata_main_path_not_exist(self, m_exit, m_exists, m_isdir, m_process_all_files, m_argparser): """ Test main() function. Mock CustomArgumentParser to return values desired for test. Mock os.path.exists to return False. Verify usage_message() called once. """ m_exists.return_value = False m_isdir.return_value = True # Mock up the proper return values. m_parse_args = MagicMock( src_directory="/abc", dst_directory="/def", simon_sez=True, verbose=False, ) attrs = { 'parse_args.return_value': m_parse_args, 'usage_message.return_value': None, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invoke the unit. retval = main() # Confirm expected behavior m_exit.assert_called_once_with(1) m_parse_args_container.usage_message.assert_called_once()
def test_pass_check_package(self, mock_popen): process_mock = MagicMock() attrs = {'communicate.return_value': ("", ""), 'returncode': 0} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock result = DebianDependencies._check_package("package") assert result is True
def test_override_endpoint(self): """check that a session catalog is overriden""" mock = MagicMock() config = {'auth.get_access.return_value': self.access} mock.configure_mock(**config) self.osclients._session_v3 = mock self.override_endpoint() self.assertOverrideEndpoint()
def test_is_element_displayed_splinter(self, page, splinter, splinter_strategy): root_element = MagicMock() root_element.configure_mock( **{"find_by_{0}.return_value.first.visible".format(splinter_strategy): True} ) locator = (splinter_strategy, str(random.random())) region = Region(page, root=root_element) assert region.is_element_displayed(*locator)
def test_pass_execute_cmd_and_check_result(self, mock_subproc_popen): process_mock = MagicMock() attrs = {'wait.return_value': None, 'returncode': 0} process_mock.configure_mock(**attrs) mock_subproc_popen.return_value = process_mock creator = deb.DebianPackageCreator("", "", "") creator.workspace_path = "/tmp/eventlog_workspace10_31_55" creator._execute_cmd_and_check_result(["a", "b", "c"]) assert mock_subproc_popen.called is True
def setUp(self): fake_cluster_monitor = MagicMock() attributes = {'name': 'I am a fake', 'fsid': 12345, 'get_sync_object.return_value': fake_cluster_monitor, 'osds_by_id': {0: {'up': True}, 1: {'up': False}}} fake_cluster_monitor.configure_mock(**attributes) self.osd_map = OsdMap(1, None) self.factory = OsdRequestFactory(fake_cluster_monitor)
def test_fail_execute_cmd_and_check_result(self, mock_popen): process_mock = MagicMock() attrs = {'wait.return_value': None, 'returncode': 1} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock creator = deb.DebianPackageCreator("", "", "") creator.workspace_path = "/tmp/eventlog_workspace10_31_55/eventlog" with pytest.raises(Exception): creator._execute_cmd_and_check_result(["a", "b", "c"])
def test_repr(self): relation = Relation(id=1, resolwe=MagicMock()) relation.type = 'compare' relation.unit = 'min' relation.category = 'background' # `name` cannot be mocked in another way sample_1 = MagicMock() sample_1.configure_mock(name='sample_1') sample_2 = MagicMock() sample_2.configure_mock(name='sample_2') relation._samples = [sample_1, sample_2] # Positions and labels are given relation.partitions = [ {'id': 3, 'entity': 1, 'position': 10, 'label': 'first'}, {'id': 4, 'entity': 2, 'position': 20, 'label': 'second'}, ] self.assertEqual( str(relation), "Relation id: 1, type: 'compare', category: 'background', " "samples: {first (10 min): sample_1, second (20 min): sample_2}" ) # Only labels are given relation.partitions = [ {'id': 3, 'entity': 1, 'position': None, 'label': 'first'}, {'id': 4, 'entity': 2, 'position': None, 'label': 'second'}, ] self.assertEqual( str(relation), "Relation id: 1, type: 'compare', category: 'background', " "samples: {first: sample_1, second: sample_2}" ) # Only positions are given relation.partitions = [ {'id': 3, 'entity': 1, 'position': 10, 'label': None}, {'id': 4, 'entity': 2, 'position': 20, 'label': None}, ] self.assertEqual( str(relation), "Relation id: 1, type: 'compare', category: 'background', " "samples: {10 min: sample_1, 20 min: sample_2}" ) # Only sample names are given relation.partitions = [ {'id': 3, 'entity': 1, 'position': None, 'label': None}, {'id': 4, 'entity': 2, 'position': None, 'label': None}, ] self.assertEqual( str(relation), "Relation id: 1, type: 'compare', category: 'background', " "samples: {sample_1, sample_2}" )
def test_compose_filters(self): query = MagicMock(spec=ResolweQuery) query.configure_mock(_filters={'id': 42, 'type': 'data'}, _limit=None, _offset=None) filters = ResolweQuery._compose_filters(query) self.assertEqual(filters, {'id': 42, 'type': 'data'}) query.configure_mock(_filters={'id': 42, 'type': 'data'}, _limit=5, _offset=2) filters = ResolweQuery._compose_filters(query) self.assertEqual(filters, {'id': 42, 'type': 'data', 'limit': 5, 'offset': 2})
def test_find_elements_splinter(self, page, splinter, splinter_strategy): root_element = MagicMock() root_element.configure_mock( **{"find_by_{0}.return_value".format(splinter_strategy): Mock()} ) locator = (splinter_strategy, str(random.random())) Region(page, root=root_element).find_elements(*locator) getattr( root_element, "find_by_{0}".format(splinter_strategy) ).assert_called_once_with(locator[1])
def test_is_element_present_not_preset_splinter(self, page, splinter, splinter_strategy): root_element = MagicMock() from splinter.element_list import ElementList root_element.configure_mock(**{ "find_by_{0}.return_value".format(splinter_strategy): ElementList([]) }) locator = (splinter_strategy, str(random.random())) assert not Region(page, root=root_element).is_element_present(*locator)
def test_is_element_present_not_preset_splinter( self, page, splinter, splinter_strategy ): root_element = MagicMock() from splinter.element_list import ElementList root_element.configure_mock( **{"find_by_{0}.return_value".format(splinter_strategy): ElementList([])} ) locator = (splinter_strategy, str(random.random())) assert not Region(page, root=root_element).is_element_present(*locator)
def test_is_element_displayed_hidden_splinter(self, page, splinter, splinter_strategy): root_element = MagicMock() root_element.configure_mock( **{ 'find_by_{0}.return_value.first.visible'.format(splinter_strategy): False }) locator = (splinter_strategy, str(random.random())) region = Region(page, root=root_element) assert not region.is_element_displayed(*locator)
def test_compose_filters(self): query = MagicMock(spec=ResolweQuery) query.configure_mock( _filters={"id": 42, "type": "data"}, _limit=None, _offset=None ) filters = ResolweQuery._compose_filters(query) self.assertEqual(filters, {"id": 42, "type": "data"}) query.configure_mock(_filters={"id": 42, "type": "data"}, _limit=5, _offset=2) filters = ResolweQuery._compose_filters(query) self.assertEqual(filters, {"id": 42, "type": "data", "limit": 5, "offset": 2})
def test_process_deployment(self, popen, update_deployment): """Test process_deployment method.""" process_mock = MagicMock() attrs = { 'communicate.return_value': ('ouput', 'error'), 'wait.return_value': 0 } process_mock.configure_mock(**attrs) popen.return_value = process_mock res = process_deployment(deployment) assert res, res assert update_deployment.called_with(deployment, status='success')
def test_get_topologyitems_sync(self): """ Test if it returns the topology items and tags for VM """ instance = {'name': 'vsphere_mock', 'host': "ESXi"} self.check._is_excluded = MagicMock(return_value=False) server_mock = MagicMock() server_mock.configure_mock( **{'RetrieveContent.return_value': self.mock_content("vm")}) self.check._get_server_instance = MagicMock(return_value=server_mock) # mock the vpshere client connect self.check.vsphere_client_connect = MagicMock() # mock the CategoryModel and TagModel for response category = VsphereCategory('345', 'stackstate-identifier') tags = VsphereTag('123', 'vishal-test', '345') # get the client client = vsphere_client() # list_attached_tags method returns list of tags ids of type string client.tagging.TagAssociation.list_attached_tags = MagicMock( return_value=['123']) # get method of Tag returns a TagModel object which is returned client.tagging.Tag.get = MagicMock(return_value=tags) # get method of Category returns a CategoryModel object which is returned client.tagging.Category.get = MagicMock(return_value=category) # assign the vsphere client object to the check vsphere client object self.check.client = client topo_dict = self.check.get_topologyitems_sync(instance) self.assertEqual(len(topo_dict["vms"]), 1) # Check if stackstate-identifier are as expected from vsphere tags and coming in identifiers section self.assertEqual(len(topo_dict["vms"][0]['topo_tags']['identifiers']), 1) self.assertEqual(topo_dict["vms"][0]['topo_tags']['identifiers'][0], 'vishal-test') # Check if tags are as expected self.assertEqual(topo_dict["vms"][0]['topo_tags']['name'], 'Ubuntu') self.assertEqual(topo_dict["vms"][0]['topo_tags']['domain'], 'ESXi') self.assertEqual(topo_dict["vms"][0]['topo_tags']['layer'], 'VSphere VMs') self.assertEqual(topo_dict["vms"][0]["topo_tags"]["topo_type"], "vsphere-VirtualMachine") self.assertEqual(topo_dict["vms"][0]['topo_tags']['datastore'], '54183927-04f91918-a72a-6805ca147c55')
def test_retrieve_one(self): mock_task = MagicMock(name='mocked_task') attr = {'duedate': None, 'star': None, 'priority': 5, 'status': 1, 'title': 'retrieve one' } mock_task.configure_mock(**attr) del mock_task.completed self.apic.return_value.getTasks.return_value = [mock_task, ] hotlist = tasks.HotList() result = list(hotlist.retrieve()) self.assertEqual(result, [mock_task])
def make_language(name, source_extensions, header_extensions, compilation_command, evaluation_command): """Create a language (actually a MagicMock) with the given data.""" language = MagicMock() language.configure_mock(name=name, source_extensions=source_extensions, source_extension=source_extensions[0], header_extensions=header_extensions, header_extension=header_extensions[0]) language.get_compilation_commands.side_effect = \ functools.partial(fake_compilation_commands, compilation_command) language.get_evaluation_commands.side_effect = \ functools.partial(fake_evaluation_commands, evaluation_command) return language
def test_is_not_next_action(self): self.apic.return_value.getAccountInfo.return_value.hotliststatus = '1' mock_task = MagicMock(name='mocked_task') attr = {'duedate': None, 'star': None, 'priority': 5, 'status': 2, 'title': 'retrieve one' } mock_task.configure_mock(**attr) del mock_task.completed self.apic.return_value.getTasks.return_value = [mock_task, ] hotlist = tasks.HotList() result = list(hotlist.retrieve()) self.assertEqual(result, [])
def setUp(self): fake_manager = MagicMock() mock_attribs = {'name': 'I am a fake', 'fsid': 12345, 'clusters': fake_manager, 'osd_tree_node_by_id': {1: 'a node'}, 'get_sync_object.return_value': fake_manager, '__getitem__.return_value': fake_manager, 'osds_by_id': {0: {'up': True}, 1: {'up': False}}} fake_manager.configure_mock(**mock_attribs) fake_manager[12345] = fake_manager self.rpc = RpcInterface(fake_manager)
def test_process_deployment_process_error(self, popen, update_deployment): """Test process_deployment process_error method.""" process_mock = MagicMock() attrs = { 'communicate.return_value': ('ouput', 'error'), 'wait.return_value': 1 } process_mock.configure_mock(**attrs) popen.return_value = process_mock res = process_deployment(deployment) assert res is False, res message = "command: %s ERROR: %s" % ('output', 'error') assert update_deployment.called_with(deployment, status='error', message=message)
def test_is_clean_index_and_it_is_not_clean(self, monkeypatch): name_mock = MagicMock() index_mock = MagicMock() index_mock.configure_mock(name=name_mock) elasticsearch_mock = MagicMock() search_mock = MagicMock(return_value=MagicMock(count=MagicMock( return_value=1234567))) monkeypatch.setattr(u'eevee.indexing.indexers.Search', search_mock) task = self._create_indexing_task(index=index_mock, elasticsearch=elasticsearch_mock) assert not task.is_clean_index() # check the constructor args assert search_mock.call_args_list == [ call(using=elasticsearch_mock, index=name_mock) ]
def test_set_host_enabled_when_auto_disabled(self): self.mox.UnsetStubs() service_mock = MagicMock() # Previous status of the service: disabled: True, 'AUTO: ERROR' service_mock.configure_mock(disabled_reason='AUTO: ERROR', disabled=True) from nova.objects import service as service_obj self.mox.StubOutWithMock(service_obj.Service, 'get_by_compute_host') service_obj.Service.get_by_compute_host(self.ctxt, 'fake-mini').AndReturn(service_mock) self.mox.ReplayAll() self.connection.set_host_enabled('my_test_host', True) self.assertFalse(service_mock.disabled) self.assertEqual(service_mock.disabled_reason, '')
def test_set_host_enabled_dont_override_manually_disabled(self): self.mox.UnsetStubs() service_mock = MagicMock() # Previous status of the service: disabled: True, 'Manually disabled' service_mock.configure_mock(disabled_reason='Manually disabled', disabled=True) from nova.objects import service as service_obj self.mox.StubOutWithMock(service_obj.Service, 'get_by_compute_host') service_obj.Service.get_by_compute_host(self.ctxt, 'fake-mini').AndReturn(service_mock) self.mox.ReplayAll() self.connection.set_host_enabled('my_test_host', 'ERROR!') self.assertTrue(service_mock.disabled) self.assertEqual(service_mock.disabled_reason, 'Manually disabled')
def mock_aggregator(): aggregator = MagicMock() series = {'series': MOCK_FLUSH_DATA} events = MOCK_FLUSH_DATA service_checks = MOCK_FLUSH_DATA attrs = { 'series': series, 'events': events, 'service_checks': service_checks, 'flush.return_value': MOCK_FLUSH_DATA, 'flush_events.return_value': events, 'flush_service_checks.return_value': service_checks, } aggregator.configure_mock(**attrs) return aggregator
def sagemaker_session_custom_endpoint(): boto_session = MagicMock("boto_session") resource_mock = Mock("resource") client_mock = MagicMock("client") boto_attrs = {"region_name": "us-east-1"} boto_session.configure_mock(**boto_attrs) boto_session.resource = Mock(name="resource", return_value=resource_mock) boto_session.client = Mock(name="client", return_value=client_mock) local_session = sagemaker.local.local_session.LocalSession( boto_session=boto_session, s3_endpoint_url=ENDPOINT_URL) local_session.default_bucket = Mock(name="default_bucket", return_value=BUCKET_NAME) return local_session
def test_process_deployment_oserror(self, popen, update_deployment): """Test process_deployment fails method.""" process_mock = MagicMock() attrs = { 'communicate.return_value': ('ouput', 'error'), 'wait.return_value': 1, 'wait.side_effect': OSError } process_mock.configure_mock(**attrs) popen.return_value = process_mock res = process_deployment(deployment) assert res is False, res e = OSError() assert update_deployment.called_with(deployment, status='error', message=str(e))
def test_set_host_enabled(self): self.mox.UnsetStubs() service_mock = MagicMock() # Previous status of the service: disabled: False # service_mock.__getitem__.return_value = False service_mock.configure_mock(disabled_reason='', disabled=False) from nova.objects import service as service_obj self.mox.StubOutWithMock(service_obj.Service, 'get_by_compute_host') service_obj.Service.get_by_compute_host(self.ctxt, 'fake-mini').AndReturn(service_mock) self.mox.ReplayAll() self.connection.set_host_enabled('my_test_host', 'ERROR!') self.assertTrue(service_mock.disabled) self.assertEqual(service_mock.disabled_reason, 'AUTO: ERROR!')
def setUp(self): fake_cluster_monitor = MagicMock() attributes = { 'name': 'I am a fake', 'fsid': 12345, 'get_sync_object.return_value': fake_cluster_monitor, 'osds_by_id': { 0: { 'up': True }, 1: { 'up': False } } } fake_cluster_monitor.configure_mock(**attributes) self.osd_request_factory = OsdRequestFactory(fake_cluster_monitor)
def get_mocked_server(): """ Return a mocked Server object """ # mock pyvmomi stuff all_mors = create_topology(os.path.join(HERE, 'fixtures', 'vsphere_topology.json')) root_folder_mock = next(mor for mor in all_mors if mor.name == "rootFolder") event_mock = MagicMock(createdTime=datetime.utcnow()) eventmanager_mock = MagicMock(latestEvent=event_mock) property_collector_mock = MagicMock() property_collector_mock.RetrievePropertiesEx.return_value = retrieve_properties_mock(all_mors) content_mock = MagicMock( eventManager=eventmanager_mock, propertyCollector=property_collector_mock, rootFolder=root_folder_mock ) # assemble the mocked server server_mock = MagicMock() server_mock.configure_mock(**{'RetrieveContent.return_value': content_mock, 'content': content_mock}) return server_mock
def test_update_refresh_interval(): # update_refresh_interval(elasticsearch, indexes, refresh_interval) mock_elasticsearch_client = MagicMock(indices=MagicMock(put_settings=MagicMock())) mock_index_1 = MagicMock() mock_index_1.configure_mock(name=u'index_1') mock_index_2 = MagicMock() mock_index_2.configure_mock(name=u'index_2') refresh_interval = 10 # pass 2 mock_index_2 objects so that we can check the refresh isn't applied multiple times to # the same index update_refresh_interval(mock_elasticsearch_client, [mock_index_1, mock_index_2, mock_index_2], refresh_interval) assert mock_elasticsearch_client.indices.put_settings.call_count == 2 assert (call({u'index': {u'refresh_interval': refresh_interval}}, mock_index_1.name) in mock_elasticsearch_client.indices.put_settings.call_args_list) assert (call({u'index': {u'refresh_interval': refresh_interval}}, mock_index_2.name) in mock_elasticsearch_client.indices.put_settings.call_args_list)
def test_filemetadata_copy_metadata_exception(self, m_img_md, m_logger, exception): """ Test copy_metadata() method with exception. Verify ImageMetadata.write() called. Verify logger.error(exception) called which confirms exception handling. """ # Raise this exception when write() called. attrs = {'write.side_effect': exception,} m_write = MagicMock() m_write.configure_mock(**attrs) m_img_md.return_value = m_write filemd = FileMetadata("file.tiff") # Will fall through and call filemd.img_md.write()...boom! filemd.copy_metadata("other.tiff") # Confirm img_md.write() called. filemd.img_md.write.assert_called_once() # Confirm exception raised by proxy. m_logger.error.assert_called_once_with(exception)
def test_list_artifacts_with_subdir(sftp_mock): artifact_root_path = "/experiment_id/run_id/" repo = SFTPArtifactRepository("sftp://test_sftp" + artifact_root_path, sftp_mock) # mocked file structure # |- model # |- model.pb # |- variables dir_name = 'model' # list artifacts at sub directory level file_path = 'model.pb' file_size = 345 subdir_name = 'variables' sftp_mock.listdir = MagicMock(return_value=[file_path, subdir_name]) sftp_mock.isdir = MagicMock( side_effect=lambda path: { posixpath.join(artifact_root_path, dir_name): True, posixpath.join(artifact_root_path, dir_name, file_path): False, posixpath.join(artifact_root_path, dir_name, subdir_name): True, }[path]) file_stat = MagicMock() file_stat.configure_mock(st_size=file_size) sftp_mock.stat = MagicMock(return_value=file_stat) artifacts = repo.list_artifacts(path=dir_name) sftp_mock.listdir.assert_called_once_with(artifact_root_path + dir_name) sftp_mock.stat.assert_called_once_with(artifact_root_path + dir_name + '/' + file_path) assert len(artifacts) == 2 assert artifacts[0].path == posixpath.join(dir_name, file_path) assert artifacts[0].is_dir is False assert artifacts[0].file_size == file_size assert artifacts[1].path == posixpath.join(dir_name, subdir_name) assert artifacts[1].is_dir is True assert artifacts[1].file_size is None
def test_activate_transfer_function_ok(self, mocked_tf_framework): mocked_tf = MagicMock() mocked_tf.configure_mock(name='tf_0', active=True) mocked_tf_framework.get_transfer_function = MagicMock( return_value=mocked_tf) mocked_tf_framework.activate_transfer_function = MagicMock() ros_callbacks = self.__get_handlers_for_testing_main() activate_transfer_function_handler = ros_callbacks[ 'activate_transfer_function'] new_activate = False request = MagicMock(transfer_function_name=mocked_tf.name, activate=new_activate) response_message = activate_transfer_function_handler(request) self.assertEqual(response_message, "") # no error self.assertTrue(mocked_tf_framework.activate_transfer_function.called)
def test_get_topologyitems_with_vm_regexes(self): """ Test if it returns the vm as per filter config """ instance = { 'name': 'vsphere_mock', 'host': "ESXi", "vm_include_only_regex": "VM" } server_mock = MagicMock() server_mock.configure_mock( **{'RetrieveContent.return_value': self.mock_content("vm")}) self.check._get_server_instance = MagicMock(return_value=server_mock) # mock the vpshere client connect self.check.vsphere_client_connect = MagicMock() topo_dict = self.check.get_topologyitems_sync(instance) self.assertEqual(len(topo_dict["vms"]), 0)
def test_get_topologyitems_with_host_regexes(self): """ Test if it returns the hosts as per filter config """ instance = { 'name': 'vsphere_mock', 'host': "ESXi", "host_include_only_regex": "localhost" } config = {} self.load_check(config) # self.check._is_excluded = MagicMock(return_value=False) server_mock = MagicMock() server_mock.configure_mock( **{'RetrieveContent.return_value': self.mock_content("host")}) self.check._get_server_instance = MagicMock(return_value=server_mock) topo_dict = self.check.get_topologyitems_sync(instance) self.assertEqual(len(topo_dict["hosts"]), 1)
def test_translation_static_transcript_xml_with_data_dirc(self): """ Test id data_dir is set in XML course. Set course data_dir and ensure we get redirected to that path if it isn't found in the contentstore. """ # Simulate data_dir set in course. test_modulestore = MagicMock() attrs = { 'get_course.return_value': Mock(data_dir='dummy/static', static_asset_path='') } test_modulestore.configure_mock(**attrs) self.item_descriptor.runtime.modulestore = test_modulestore # Test youtube style en request = Request.blank('/translation/en?videoId=12345') response = self.item.transcript(request=request, dispatch='translation/en') self.assertEqual(response.status, '307 Temporary Redirect') self.assertIn( ('Location', '/static/dummy/static/subs_12345.srt.sjson'), response.headerlist) # Test HTML5 video style self.item.sub = 'OEoXaMPEzfM' request = Request.blank('/translation/en') response = self.item.transcript(request=request, dispatch='translation/en') self.assertEqual(response.status, '307 Temporary Redirect') self.assertIn( ('Location', '/static/dummy/static/subs_OEoXaMPEzfM.srt.sjson'), response.headerlist) # Test different language to ensure we are just ignoring it since we can't # translate with static fallback request = Request.blank('/translation/uk') response = self.item.transcript(request=request, dispatch='translation/uk') self.assertEqual(response.status, '404 Not Found')
def test_set_datetime_main_args_with_interval_params(self, m_logger, m_exists, m_isdir, m_process_all_files, m_argparser, interval, new_datetime): """ Test set_datetime main() function. Parametrize interval. Mock needed args. Verify correct behavior by confirming method calls. """ workdir = "/" m_exists.return_value = True m_isdir.return_value = True # Mock up the proper return values. m_parse_args = MagicMock( directory=workdir, datetime=new_datetime, interval=interval, simon_sez=True, ) attrs = { 'parse_args.return_value': m_parse_args, 'usage_message.return_value': None, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invovke the unit. retval = main() expected_interval = interval if interval is None: expected_interval = "1" # Confirm expected behavior. if interval is None: m_logger.warn.assert_called_once() m_process_all_files.assert_called_with( workdir, new_datetime, int(expected_interval), simon_sez=True)
def get_mocked_server(): """ Return a mocked Server object """ # create topology from a fixture file vcenter_topology = create_topology('vsphere_topology.json') # mock pyvmomi stuff view_mock = MockedContainer(topology=vcenter_topology) viewmanager_mock = MagicMock( **{'CreateContainerView.return_value': view_mock}) event_mock = MagicMock(createdTime=datetime.now()) eventmanager_mock = MagicMock(latestEvent=event_mock) content_mock = MagicMock(viewManager=viewmanager_mock, eventManager=eventmanager_mock) # assemble the mocked server server_mock = MagicMock() server_mock.configure_mock(**{ 'RetrieveContent.return_value': content_mock, 'content': content_mock, }) return server_mock
def mock_comp_obj(request): # Mock hal.component and returned object # - Settable and readable pins request.instance.pin_values = pin_values = dict(__default=0xDEADBEEF) def get_pin(key): if key in pin_values: value = pin_values[key] print("Returning pin %s value=%s" % (key, value)) else: value = pin_values["__default"] print("Returning pin %s DEFAULT value=0x%x" % (key, value)) return value def set_pin(key, value): print("Setting pin %s value=%s" % (key, value)) pin_values[key] = value mock_objs_dict["comp_name"] = "test_comp" comp_getprefix = MagicMock(side_effect=lambda: mock_objs_dict["comp_name"]) def set_comp_name(n): mock_objs_dict["comp_name"] = n comp_setprefix = MagicMock(side_effect=set_comp_name) mock_comp_obj = MagicMock(name="mock_hal_comp_obj") mock_comp_obj.configure_mock(name="mock_hal_comp_obj", getprefix=comp_getprefix, setprefix=comp_setprefix, set_pin=set_pin, **{ "__getitem__.side_effect": get_pin, "__setitem__.side_effect": set_pin, }) patcher = patch("hal.component", return_value=mock_comp_obj) mock_hal = patcher.start() mock_objs_dict["hal_comp"] = mock_hal # Pass hal.component fixture yield mock_comp_obj patcher.stop()
def mock_comp_obj(): # Mock hal.component and returned object # - Settable and readable pins pin_value_map = dict(__default=0xDEADBEEF) def get_pin(key): if key in pin_value_map: value = pin_value_map[key] print("Returning pin %s value=%s" % (key, value)) else: value = pin_value_map['__default'] print("Returning pin %s DEFAULT value=0x%x" % (key, value)) return value def set_pin(key, value): print("Setting pin %s value=%s" % (key, value)) pin_value_map[key] = value mock_objs_dict['comp_name'] = 'test_comp' comp_getprefix = MagicMock(side_effect=lambda: mock_objs_dict['comp_name']) def set_comp_name(n): mock_objs_dict['comp_name'] = n comp_setprefix = MagicMock(side_effect=set_comp_name) mock_comp_obj = MagicMock(name='mock_hal_comp_obj') mock_comp_obj.configure_mock(name='mock_hal_comp_obj', getprefix=comp_getprefix, setprefix=comp_setprefix, set_pin=set_pin, **{ '__getitem__.side_effect': get_pin, '__setitem__.side_effect': set_pin, }) patcher = patch('hal.component', return_value=mock_comp_obj) mock_hal = patcher.start() mock_objs_dict['hal_comp'] = mock_hal # Pass hal.component fixture yield mock_comp_obj patcher.stop()
def test_update_statuses_no_update(self, monkeypatch): elasticsearch_mock = MagicMock(indices=MagicMock(exists=MagicMock( return_value=False))) monkeypatch.setattr( u'eevee.indexing.indexers.get_elasticsearch_client', MagicMock(return_value=elasticsearch_mock)) index1 = MagicMock() index1.configure_mock(name=u'index1') index2 = MagicMock() index2.configure_mock(name=u'index2') index3 = MagicMock() index3.configure_mock(name=u'index3') feeders_and_indexes = [ (MagicMock(), index1), (MagicMock(), index2), (MagicMock(), index1), (MagicMock(), index3), ] index_definition = { u'settings': { u'index': { # this will always be a small index so no need to create a bunch of shards u'number_of_shards': 1, u'number_of_replicas': 1 } }, u'mappings': { DOC_TYPE: { u'properties': { u'name': { u'type': u'keyword' }, u'index_name': { u'type': u'keyword' }, u'latest_version': { u'type': u'date', u'format': u'epoch_millis' } } } } } indexer = Indexer(MagicMock(), MagicMock(), feeders_and_indexes, update_status=False) indexer.update_statuses() assert elasticsearch_mock.indices.exists.call_args_list == [ call(indexer.config.elasticsearch_status_index_name) ] assert elasticsearch_mock.indices.create.call_args_list == [ call(indexer.config.elasticsearch_status_index_name, body=index_definition) ] assert not elasticsearch_mock.index.called
def test_define_indexes(self, monkeypatch): elasticsearch_mock = MagicMock(indices=MagicMock(exists=MagicMock( side_effect=lambda n: n == u'index3'))) monkeypatch.setattr( u'eevee.indexing.indexers.get_elasticsearch_client', MagicMock(return_value=elasticsearch_mock)) index1 = MagicMock() index1.configure_mock(name=u'index1') index2 = MagicMock() index2.configure_mock(name=u'index2') index3 = MagicMock() index3.configure_mock(name=u'index3') feeders_and_indexes = [ (MagicMock(), index1), (MagicMock(), index2), (MagicMock(), index1), (MagicMock(), index3), ] indexer = Indexer(MagicMock(), MagicMock(), feeders_and_indexes) indexer.define_indexes() assert elasticsearch_mock.indices.exists.call_count == 3 for index_name in [u'index1', u'index2', u'index3']: assert call( index_name) in elasticsearch_mock.indices.exists.call_args_list assert elasticsearch_mock.indices.create.call_count == 2 for index in [index1, index2]: assert call(index.name, body=index.get_index_create_body() ) in elasticsearch_mock.indices.create.call_args_list
def test_set_datetime_main_args_invalid_datetime(self, m_logger, m_exit, m_match, m_exists, m_isdir, m_process_all_files, m_argparser, new_datetime): """ Test set_datetime main() function. Mock re.match() and set return value = False for datetime validity check. Verify correct behavior by confirming correct calls to logger, usage_message, and exit. """ workdir = "/abc/def" interval = "1" m_exists.return_value = True m_isdir.return_value = True m_match.return_value = False # Mock up the proper return values. m_parse_args = MagicMock( directory=workdir, datetime=new_datetime, interval=interval, simon_sez=True, ) attrs = { 'parse_args.return_value': m_parse_args, 'usage_message.return_value': None, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invovke the unit. retval = main() # Confirm expected behavior. m_logger.error.assert_called() m_parse_args_container.usage_message.assert_called_once() m_exit.assert_called_once()
def test_set_datetime_main_args_with_workdir_params(self, m_exists, m_isdir, m_process_all_files, m_argparser, workdir, new_datetime): """ Test set_datetime main() function. Paramatrize workdir. Mock all necessary arguments. Confirm process_all_files() method call is correct. """ interval = "1" m_exists.return_value = True m_isdir.return_value = True # Mock up the proper return values. m_parse_args = MagicMock( directory=workdir, datetime=new_datetime, interval=interval, simon_sez=True, ) attrs = { 'parse_args.return_value': m_parse_args, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invovke the unit. retval = main() expected_workdir = workdir if workdir is None: expected_workdir = os.getcwd() # Confirm expected behavior. m_process_all_files.assert_called_with( expected_workdir, new_datetime, int(interval), simon_sez=True)
def test_redis_includeme(self, mocked_redis): config = { 'storage_config': { 'cache_host': '127.0.0.1', 'cache_port': '6379', 'cache_db_name': '0' } } StrictRedis_mock = MagicMock() StrictRedis_mock.configure_mock(**{'set': None, 'exists': None}) mocked_redis.StrictRedis.return_value = StrictRedis_mock db = redis_includeme(config) self.assertEqual(db._backend, 'redis') self.assertEqual(db._db_name, config['storage_config']['cache_db_name']) self.assertEqual(db._port, config['storage_config']['cache_port']) self.assertEqual(db._host, config['storage_config']['cache_host']) self.assertEqual(db._host, config['storage_config']['cache_host']) self.assertEqual(db.set_value, None) self.assertEqual(db.has_value, None)
def setUp(self): self.config = { 'storage_config': { 'cache_host': '127.0.0.1', 'cache_port': '6379', 'cache_db_name': '0' } } with patch('openprocurement.bridge.basic.storages.redis_plugin.redis' ) as mocked_redis: StrictRedis_mock = MagicMock() StrictRedis_mock.configure_mock(**{'set': None, 'exists': None}) mocked_redis.StrictRedis.return_value = StrictRedis_mock self.db = redis_includeme(self.config) self.db.db = dict() def set_value(key, value): self.db.db[key] = value self.db.set_value = set_value self.db.has_value = lambda x: x in self.db.db
def test_set_datetime_main_args_with_verbose_params(self, m_logging, m_exists, m_isdir, m_process_all_files, m_argparser, verbose, log_level, new_datetime): """ Test main() function. Mock CustomArgumentParser to return values desired for test. Verify process_all_files called with expected arguments. """ workdir = "/abc" interval = "1" m_exists.return_value = True m_isdir.return_value = True # Mock up the proper return values. m_parse_args = MagicMock( directory=workdir, datetime=new_datetime, interval=interval, simon_sez=True, verbose=verbose, ) attrs = { 'parse_args.return_value': m_parse_args, } # This one configured to return m_parse_args. m_parse_args_container = MagicMock() m_parse_args_container.configure_mock(**attrs) m_argparser.return_value = m_parse_args_container # Invovke the unit. retval = main() # Confirm expected behavior. m_logging.basicConfig.called_once_with(level=log_level) m_process_all_files.assert_called_with( workdir, new_datetime, int(interval), simon_sez=True)
def test_retrieve_multi(self): mock_task_1st = MagicMock(name='mocked_task_1st') attr_1st = {'duedate': None, 'star': None, 'priority': 5, 'status': 1, 'title': 'retrieve one' } mock_task_1st.configure_mock(**attr_1st) del mock_task_1st.completed mock_task_2nd = MagicMock(name='mocked_task_2nd') attr_2nd = {'duedate': None, 'star': None, 'priority': 5, 'status': 1, 'title': 'retrieve two' } mock_task_2nd.configure_mock(**attr_2nd) del mock_task_2nd.completed self.apic.return_value.getTasks.return_value = [mock_task_1st, mock_task_2nd] hotlist = tasks.HotList() result = list(hotlist.retrieve()) self.assertEqual(result, [mock_task_1st, mock_task_2nd])