def test_cache(self, model): """When get_seq_record is called multiple times to fetch the same seq record, it should not go out to Entrez after the first time. It should cache the results.""" with patch('cpg_islands.models.Entrez') as mock_entrez: with patch('cpg_islands.models.SeqIO') as mock_seqio: # call previously necessary methods mock_entrez.read.return_value = { 'IdList': [ sentinel._, sentinel._, sentinel.chosen_id, sentinel._ ], 'QueryTranslation': sentinel._ } model.search(sentinel._) handle = MagicMock() mock_entrez.efetch.return_value = handle mock_seqio.read.return_value = sentinel.record index = 2 record = model.get_seq_record(index) assert record == sentinel.record record = model.get_seq_record(index) assert record == sentinel.record # Should be called once and only once. mock_entrez.efetch.assert_called_once_with(db='nucleotide', id=sentinel.chosen_id, rettype='gb', retmode='text') # Should be called once and only once. assert mock_seqio.mock_calls == [call.read(handle, 'genbank')]
def test_normal_use(self, model): with patch('cpg_islands.models.Entrez') as mock_entrez: with patch('cpg_islands.models.SeqIO') as mock_seqio: # call previously necessary methods mock_entrez.read.return_value = { 'IdList': [ sentinel._, sentinel._, sentinel.chosen_id, sentinel._ ], 'QueryTranslation': sentinel._ } model.search(sentinel._) handle = MagicMock() mock_entrez.efetch.return_value = handle mock_seqio.read.return_value = sentinel.record record = model.get_seq_record(2) assert record == sentinel.record # We don't care about the things that were done with the # Entrez module earlier in `model.search()', so just assert # that `Entrez.efetch()' has been called correctly. mock_entrez.efetch.assert_called_once_with(db='nucleotide', id=sentinel.chosen_id, rettype='gb', retmode='text') assert mock_seqio.mock_calls == [call.read(handle, 'genbank')]
def test_cache(self, model): """When get_seq_record is called multiple times to fetch the same seq record, it should not go out to Entrez after the first time. It should cache the results.""" with patch('cpg_islands.models.Entrez') as mock_entrez: with patch('cpg_islands.models.SeqIO') as mock_seqio: # call previously necessary methods mock_entrez.read.return_value = { 'IdList': [sentinel._, sentinel._, sentinel.chosen_id, sentinel._], 'QueryTranslation': sentinel._} model.search(sentinel._) handle = MagicMock() mock_entrez.efetch.return_value = handle mock_seqio.read.return_value = sentinel.record index = 2 record = model.get_seq_record(index) assert record == sentinel.record record = model.get_seq_record(index) assert record == sentinel.record # Should be called once and only once. mock_entrez.efetch.assert_called_once_with( db='nucleotide', id=sentinel.chosen_id, rettype='gb', retmode='text') # Should be called once and only once. assert mock_seqio.mock_calls == [call.read(handle, 'genbank')]
def test_df_reader_iter_all_ok(self): etag = md5() fcont = '123456789' etag.update(fcont) etag.update(fcont) etag.update(fcont) meta = { 'name': self._obj_name(), 'Content-Length': (3 * len(fcont)), 'ETag': etag.hexdigest() } self.ioctx.get_xattr.return_value = pickle.dumps(meta) self.ioctx.stat.return_value = ((3 * len(fcont)), 0) success = False try: with self.df.open(): rdr = self.df.reader() num_chunks = 0 self.ioctx.read.return_value = fcont for chunk in rdr: num_chunks += 1 assert (chunk == fcont) if num_chunks == 3: self.ioctx.read.return_value = None assert (num_chunks == 3) success = True except Exception: pass finally: assert (success) self._assert_if_rados_opened_closed() # check read calls call_list = [ call.read(self._obj_name(), offset=0), call.read(self._obj_name(), offset=len(fcont)), call.read(self._obj_name(), offset=(2 * len(fcont))), call.read(self._obj_name(), offset=(3 * len(fcont))) ] self.ioctx.assert_has_calls(call_list) # if everything is perfect, the object will not be deleted assert (self.ioctx.remove_object.call_count == 0)
def test_flush(self, mock_size): mock = Mock() self.device._device = mock.device self.device.read = mock.read self.device.flush() expected = [call.device.flush(), call.read(mock_size)] self.assertListEqual(expected, mock.mock_calls)
def test_empty_file_py2(): fake_fp = Mock() with patch('osfclient.utils.six.PY2', True): empty = file_empty(fake_fp) expected = [call.read(), call.seek(0)] assert expected == fake_fp.mock_calls # mocks and calls on mocks always return True, so this should be False assert not empty
def test_suggest(self, model): with patch('cpg_islands.models.Entrez') as mock_entrez: mock_entrez.espell.return_value = sentinel.handle mock_entrez.read.return_value = { 'CorrectedQuery': sentinel.corrected_query} suggestion = model.suggest(sentinel.text) assert suggestion == sentinel.corrected_query assert mock_entrez.mock_calls == [ call.espell(db='pubmed', term=sentinel.text), call.read(sentinel.handle)]
def test_gread( self ): for n in range( 0, 4 ): f = MagicMock( ) # The mock file contains "12". Each read() invocation shall return one byte from that, # followed by the empty string for EOF. f.read.side_effect = [ '1', '2', '' ] # Read n bytes greedily # noinspection PyTypeChecker self.assertEqual( self.gread( f, n ), "12"[ :n ] ) # First call to read() should request n bytes and then one less on each subsequent call. self.assertEqual( f.mock_calls, [ call.read( i ) for i in range( n, 0, -1 ) ] )
def test_search(self, model): with patch('cpg_islands.models.Entrez') as mock_entrez: mock_entrez.esearch.return_value = sentinel.handle mock_entrez.read.return_value = { 'IdList': sentinel.id_list, 'QueryTranslation': sentinel.query_translation} results = model.search(sentinel.search) assert results == (sentinel.id_list, sentinel.query_translation) assert mock_entrez.mock_calls == [ call.esearch(db='nucleotide', term=sentinel.search), call.read(sentinel.handle)]
def test_df_reader_iter_all_ok(self): etag = md5() fcont = "123456789" etag.update(fcont) etag.update(fcont) etag.update(fcont) meta = {"name": self._obj_name(), "Content-Length": (3 * len(fcont)), "ETag": etag.hexdigest()} self.ioctx.get_xattr.return_value = pickle.dumps(meta) self.ioctx.stat.return_value = ((3 * len(fcont)), 0) success = False try: with self.df.open(): rdr = self.df.reader() num_chunks = 0 self.ioctx.read.return_value = fcont for chunk in rdr: num_chunks += 1 assert chunk == fcont if num_chunks == 3: self.ioctx.read.return_value = None assert num_chunks == 3 success = True except Exception: pass finally: assert success self._assert_if_rados_opened_closed() # check read calls call_list = [ call.read(self._obj_name(), offset=0), call.read(self._obj_name(), offset=len(fcont)), call.read(self._obj_name(), offset=(2 * len(fcont))), call.read(self._obj_name(), offset=(3 * len(fcont))), ] self.ioctx.assert_has_calls(call_list) # if everything is perfect, the object will not be deleted assert self.ioctx.remove_object.call_count == 0
def test_suggest(self, model): with patch('cpg_islands.models.Entrez') as mock_entrez: mock_entrez.espell.return_value = sentinel.handle mock_entrez.read.return_value = { 'CorrectedQuery': sentinel.corrected_query } suggestion = model.suggest(sentinel.text) assert suggestion == sentinel.corrected_query assert mock_entrez.mock_calls == [ call.espell(db='pubmed', term=sentinel.text), call.read(sentinel.handle) ]
def test_search(self, model): with patch('cpg_islands.models.Entrez') as mock_entrez: mock_entrez.esearch.return_value = sentinel.handle mock_entrez.read.return_value = { 'IdList': sentinel.id_list, 'QueryTranslation': sentinel.query_translation } results = model.search(sentinel.search) assert results == (sentinel.id_list, sentinel.query_translation) assert mock_entrez.mock_calls == [ call.esearch(db='nucleotide', term=sentinel.search), call.read(sentinel.handle) ]
def test_read_all_reads_all_memories(self): # Pick any offset, but do not use 0 to check that the offset is ignored offset = 0x1000 # Pick any number of bytes to check that numbytes is ignored numbytes = 256 # Just pick any device with more than one memory segment device = 'atmega4809' device_info = deviceinfo.getdeviceinfo(device) device_meminfo = deviceinfo.DeviceMemoryInfo(device_info) mock_nvmaccessprovider = self._mock_nvmaccessprovider() mock_nvmaccessprovider.read.side_effect = self._read_memory_stub self.programmer.load_device(device) self.programmer.setup_device() memory_read_list = self.programmer.read_memory( memory_name=MemoryNameAliases.ALL, offset=offset, numbytes=numbytes) memories = device_meminfo.mem_by_name.keys() read_calls = [] for memory_name_expected in memories: meminfo_expected = device_meminfo.memory_info_by_name( memory_name_expected) found_memory = False for memory_read_tuple in memory_read_list: if memory_read_tuple.memory_info[ DeviceMemoryInfoKeys.NAME] == memory_name_expected: data_expected = self._generate_dummydata( meminfo_expected[DeviceMemoryInfoKeys.SIZE]) found_memory = True data_read = memory_read_tuple.data meminfo_read = memory_read_tuple.memory_info self.assertEqual(data_read, data_expected) self.assertEqual(meminfo_read, meminfo_expected) read_calls.append( call.read(meminfo_expected, 0, meminfo_expected[DeviceMemoryInfoKeys.SIZE])) self.assertTrue( found_memory, msg="Did not find {} memory in returned data".format( memory_name_expected)) mock_nvmaccessprovider.assert_has_calls(read_calls)
def test_df_reader_iter_invalid_etag(self): etag = md5() fcont = '123456789' etag.update(fcont) meta = {'name': self._obj_name(), 'Content-Length': (3 * len(fcont)), 'ETag': etag.hexdigest()} self.ioctx.get_xattr.return_value = pickle.dumps(meta) self.ioctx.stat.return_value = ((len(fcont) * 3), 0) success = False try: with self.df.open(): rdr = self.df.reader() num_chunks = 0 self.ioctx.read.return_value = fcont for chunk in rdr: num_chunks += 1 assert(chunk == fcont) if num_chunks == 3: self.ioctx.read.return_value = None assert(num_chunks == 3) success = True except Exception: pass finally: assert(success) self._assert_if_rados_opened_closed() # check read calls call_list = [call.read(self._obj_name(), offset=0), call.read(self._obj_name(), offset=len(fcont)), call.read(self._obj_name(), offset=(2 * len(fcont))), call.read(self._obj_name(), offset=(3 * len(fcont)))] self.ioctx.assert_has_calls(call_list) self.ioctx.remove_object.assert_called_once_with(self._obj_name())
def test_read_close(self): m_select = self._patch('select.select') m_read = self._patch('os.read') rfd = 42 m_out = self._make_mock() m_select.return_value = ([rfd], [], []) m_read.return_value = '' self.m.add_source(rfd, m_out, permanent=False) cont = self.m.process_events() self._assertCallsEqual(m_select, [call([rfd], [], [], SELECT_INTERVAL)]) self._assertCallsEqual(m_read, [call.read(rfd, BUFSIZE)]) self._assertCallsEqual(m_out, [call.finish()]) self.assertEqual(False, cont)
def test_update_existing_file_files_match_force_overrides_update(): # test that adding `force=True` and `update=True` forces overwriting of the # remote file, since `force=True` overrides `update=True` new_file_url = ('https://files.osf.io/v1/resources/9zpcy/providers/' + 'osfstorage/foo123/') store = Storage({}) store._new_file_url = new_file_url def simple_OSFCore_put(url, params=None, data=None): if url == new_file_url: return FakeResponse(409, None) elif url.endswith("osfstorage/foo.txt"): return FakeResponse(200, None) store._files_url = 'https://api.osf.io/v2/nodes/f3szh/files/osfstorage' json = fake_responses.files_node('f3szh', 'osfstorage', file_names=['hello.txt', 'foo.txt']) for i_file in range(2): json['data'][i_file]['attributes']['extra']['hashes']['md5'] = '0' * 32 top_level_response = FakeResponse(200, json) def simple_OSFCore_get(url): if url == store._files_url: return top_level_response def simple_checksum(file_path): return '0' * 32 fake_fp = MagicMock() fake_fp.mode = 'rb' with patch.object(OSFCore, '_put', side_effect=simple_OSFCore_put) as fake_put: with patch.object(OSFCore, '_get', side_effect=simple_OSFCore_get) as fake_get: with patch('osfclient.models.storage.checksum', side_effect=simple_checksum): store.create_file('foo.txt', fake_fp, force=True, update=True) assert fake_fp.call_count == 0 assert call.read(1) in fake_fp.mock_calls # should have made two PUT requests, first attempt at uploading then # to update the file, even though they match, since force=True overrides # update=True assert fake_put.call_count == 2 # should have made one GET request to list files assert fake_get.call_count == 1
def test_update_existing_file_overrides_connection_error(): # successful upload even on connection error if update=True new_file_url = ('https://files.osf.io/v1/resources/9zpcy/providers/' + 'osfstorage/foo123/') store = Storage({}) store._new_file_url = new_file_url def simple_OSFCore_put(url, params=None, data=None): if url == new_file_url: raise ConnectionError elif url.endswith("osfstorage/foo.txt"): return FakeResponse(200, None) def simple_checksum(file_path): return '0' * 32 store._files_url = 'https://api.osf.io/v2/nodes/f3szh/files/osfstorage' json = fake_responses.files_node('f3szh', 'osfstorage', file_names=['hello.txt', 'foo.txt']) top_level_response = FakeResponse(200, json) def simple_OSFCore_get(url): if url == store._files_url: return top_level_response fake_fp = MagicMock() fake_fp.mode = 'rb' with patch.object(OSFCore, '_put', side_effect=simple_OSFCore_put) as fake_put: with patch.object(OSFCore, '_get', side_effect=simple_OSFCore_get) as fake_get: with patch('osfclient.models.storage.checksum', side_effect=simple_checksum): store.create_file('foo.txt', fake_fp, update=True) assert fake_fp.call_count == 0 assert call.read(1) in fake_fp.mock_calls # should have made two PUT requests, first attempt at uploading then # to update the file assert fake_put.call_count == 2 # should have made one GET request to list files assert fake_get.call_count == 1
def test_force_existing_file(): # test that adding `force=True` lets you overwrite existing remote files new_file_url = ('https://files.osf.io/v1/resources/9zpcy/providers/' + 'osfstorage/foo123/') store = Storage({}) store._new_file_url = new_file_url def simple_OSFCore_put(url, params=None, data=None): if url == new_file_url: return FakeResponse(409, None) elif url.endswith("osfstorage/foo.txt"): return FakeResponse(200, None) store._files_url = 'https://api.osf.io/v2/nodes/f3szh/files/osfstorage' json = fake_responses.files_node('f3szh', 'osfstorage', file_names=['hello.txt', 'foo.txt']) top_level_response = FakeResponse(200, json) def simple_OSFCore_get(url): if url == store._files_url: return top_level_response fake_fp = MagicMock() fake_fp.mode = 'rb' with patch.object(OSFCore, '_put', side_effect=simple_OSFCore_put) as fake_put: with patch.object(OSFCore, '_get', side_effect=simple_OSFCore_get) as fake_get: store.create_file('foo.txt', fake_fp, force=True) assert fake_fp.call_count == 0 assert call.read(1) in fake_fp.mock_calls # should have made two PUT requests, first attempt at uploading then # to update the file assert fake_put.call_count == 2 # should have made one GET request to list files assert fake_get.call_count == 1
def test_normal_use(self, model): with patch('cpg_islands.models.Entrez') as mock_entrez: with patch('cpg_islands.models.SeqIO') as mock_seqio: # call previously necessary methods mock_entrez.read.return_value = { 'IdList': [sentinel._, sentinel._, sentinel.chosen_id, sentinel._], 'QueryTranslation': sentinel._} model.search(sentinel._) handle = MagicMock() mock_entrez.efetch.return_value = handle mock_seqio.read.return_value = sentinel.record record = model.get_seq_record(2) assert record == sentinel.record # We don't care about the things that were done with the # Entrez module earlier in `model.search()', so just assert # that `Entrez.efetch()' has been called correctly. mock_entrez.efetch.assert_called_once_with( db='nucleotide', id=sentinel.chosen_id, rettype='gb', retmode='text') assert mock_seqio.mock_calls == [call.read(handle, 'genbank')]
def test_provide_config_parser(self, config_parser_class_mock): config_parser = self.config_module.provide_config_parser() assert_that(config_parser, is_(config_parser_class_mock.return_value)) assert_that(config_parser_class_mock.mock_calls, has_item(call())) assert_that(config_parser.mock_calls, has_item(call.read('/etc/blitzortung.conf')))
def test_provide_config_parser(self, config_parser_class_mock): config_parser = self.config_module.provide_config_parser() assert_that(config_parser).is_equal_to(config_parser_class_mock.return_value) assert_that(config_parser_class_mock.mock_calls).contains(call()) assert_that(config_parser.mock_calls).contains(call.read('/etc/blitzortung.conf'))