def test_generate_dataset_domain_slices(self): collection = file_manager.DataSetCollection() mock_dataset = mock.MagicMock() mock_dataset.variable_name = 'tas' mock_dataset.temporal = slice(1, 20) mock_dataset.spatial = { 'lat': slice(20, 40), 'lon': slice(0, 100) } domain = collection.generate_dataset_domain(mock_dataset) expected = { 'variable': 'tas', 'temporal': slice(1, 20), 'spatial': { 'lat': slice(20, 40), 'lon': slice(0, 100) } } self.assertEqual(domain, expected)
def test_add(self): collection = file_manager.DataSetCollection() collection.add(cwt.Variable('file:///test1.nc', 'tas')) collection.add(cwt.Variable('file:///test1.nc', 'tas')) collection.add(cwt.Variable('file:///test1.nc', 'tas')) self.assertEqual(len(collection.datasets), 3)
def test_get_cache_entry(self): uid_hash = hashlib.sha256('file:///test1.nc:tas') collection = file_manager.DataSetCollection() entry = collection.get_cache_entry(uid_hash, {}) self.assertIsNone(entry)
def test_get_cache_entry_invalid(self): uid_hash = hashlib.sha256('file:///test1.nc:tas') models.Cache.objects.create(uid=uid_hash, url='file:///test1.nc', dimensions=json.dumps({})) collection = file_manager.DataSetCollection() entry = collection.get_cache_entry(uid_hash, {}) self.assertIsNone(entry)
def test_partitions(self): domain = cwt.Domain([ cwt.Dimension('time', 20, 100), cwt.Dimension('lat', 0, 90), ]) mock_cache = mock.MagicMock() mock_cache_obj = mock.MagicMock() collection = file_manager.DataSetCollection() collection.check_cache = mock.MagicMock(return_value=(mock_cache, mock_cache_obj)) mock_dataset1 = mock.MagicMock() mock_dataset1.variable_name = 'tas' mock_dataset1.get_time.return_value.units = 'days since 2000' mock_dataset1.partitions.return_value = [ mock.MagicMock(), ] mock_dataset2 = mock.MagicMock() mock_dataset2.variable_name = 'tas' mock_dataset2.get_time.return_value.units = 'days since 1990' mock_dataset2.partitions.return_value = [ mock.MagicMock(), ] collection.datasets = [ mock_dataset1, mock_dataset2, ] result = [x for x in collection.partitions(domain, False)] mock_dataset1.partitions.return_value[0].getTime.return_value.toRelativeTime.assert_called_with('days since 1990') mock_dataset2.partitions.return_value[0].getTime.return_value.toRelativeTime.assert_called_with('days since 1990') mock_cache_obj.write.assert_has_calls([ mock.call(mock_dataset2.partitions.return_value[0], id='tas'), mock.call(mock_dataset1.partitions.return_value[0], id='tas'), ]) mock_cache_obj.sync.assert_called() mock_cache.set_size.assert_called() mock_dataset1.map_domain.assert_called_with(domain, 'days since 1990') mock_dataset2.map_domain.assert_called_with(domain, 'days since 1990') self.assertEqual(collection.datasets[0], mock_dataset2) self.assertEqual(collection.datasets[1], mock_dataset1) self.assertEqual(result[0], (mock_dataset2, mock_dataset2.partitions.return_value[0])) self.assertEqual(result[1], (mock_dataset1, mock_dataset1.partitions.return_value[0]))
def test_context_manager(self, mock_open): collection = file_manager.DataSetCollection() collection.add(cwt.Variable('file:///test1.nc', 'tas')) collection.add(cwt.Variable('file:///test1.nc', 'tas')) collection.add(cwt.Variable('file:///test1.nc', 'tas')) with collection as collection: pass self.assertEqual(3, mock_open.call_count) self.assertEqual(3, mock_open.return_value.close.call_count)
def test_get_base_units(self): collection = file_manager.DataSetCollection() mock_dataset = mock.MagicMock() mock_dataset.get_time.return_value.units = 'days since 1990' collection.datasets.append(mock_dataset) base_units = collection.get_base_units() self.assertEqual(base_units, 'days since 1990')
def test_check_cache(self, mock_open): ds = file_manager.DataSet(cwt.Variable('file:///test1.nc', 'tas')) collection = file_manager.DataSetCollection() collection.get_cache_entry = mock.MagicMock(return_value=None) with self.assertNumQueries(1): cache, cache_obj = collection.check_cache(ds) mock_open.assert_called_with(cache.local_path, 'w') self.assertIsNotNone(cache_obj)
def test_check_cache_error_opening(self, mock_open): mock_open.side_effect = cdms2.CDMSError('some error') ds = file_manager.DataSet(cwt.Variable('file:///test1.nc', 'tas')) collection = file_manager.DataSetCollection() collection.get_cache_entry = mock.MagicMock(return_value=None) with self.assertNumQueries(2): result = collection.check_cache(ds) self.assertIsNone(result)
def test_get_variable_name(self): fm = file_manager.FileManager([]) dsc = file_manager.DataSetCollection() dsc.datasets = mock.MagicMock() dsc.datasets.__getitem__.return_value.variable_name = 'tas' fm.collections.append(dsc) var_name = fm.get_variable_name() self.assertEqual(var_name, 'tas')
def test_get_cache_entry_valid(self, mock_valid, mock_superset): mock_superset.return_value = False mock_valid.return_value = True uid_hash = hashlib.sha256('file:///test1.nc:tas') models.Cache.objects.create(uid=uid_hash, url='file:///test1.nc', dimensions=json.dumps({})) collection = file_manager.DataSetCollection() entry = collection.get_cache_entry(uid_hash, {}) self.assertIsNone(entry)
def test_check_cache_replace_file_obj(self, mock_open): uid_hash = hashlib.sha256('file:///test1.nc:tas').hexdigest() cache_entry = models.Cache.objects.create(uid=uid_hash, url='file:///test1.nc', dimensions='{}') ds = file_manager.DataSet(cwt.Variable('file:///test1.nc', 'tas')) collection = file_manager.DataSetCollection() collection.get_cache_entry = mock.MagicMock(return_value=cache_entry) with self.assertNumQueries(0): result = collection.check_cache(ds) mock_open.assert_called_with(cache_entry.local_path, 'r') self.assertIsNone(result) self.assertEqual(ds.file_obj, mock_open.return_value)