def test_RequestDatasetCollection(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA daymet = self.test_data.get_rd('daymet_tmax') tas = self.test_data.get_rd('cancm4_tas') uris = [daymet.uri, tas.uri] variables = ['foo1','foo2'] rdc = RequestDatasetCollection() for uri,variable in zip(uris,variables): rd = RequestDataset(uri,variable) rdc.update(rd) self.assertEqual([1,2],[rd.did for rd in rdc]) variables = ['foo1','foo1'] rdc = RequestDatasetCollection() for ii,(uri,variable) in enumerate(zip(uris,variables)): rd = RequestDataset(uri,variable) if ii == 1: with self.assertRaises(KeyError): rdc.update(rd) else: rdc.update(rd) aliases = ['a1','a2'] for uri,variable,alias in zip(uris,variables,aliases): rd = RequestDataset(uri,variable,alias=alias) rdc.update(rd) for row in rdc: self.assertIsInstance(row,RequestDataset) self.assertIsInstance(rdc[0],RequestDataset) self.assertIsInstance(rdc['a2'],RequestDataset)
def test_set_unique_id(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() for element in [rd, field]: RequestDatasetCollection._set_unique_id_(element, 5) uid = RequestDatasetCollection._get_unique_id_(element) self.assertEqual(uid, 5)
def test_iter_request_datasets(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field.name = 'foo' rdc = RequestDatasetCollection(target=[rd, field]) tt = list(rdc.iter_request_datasets()) self.assertEqual(len(tt), 1) self.assertEqual(len(rdc), 2) self.assertIsInstance(tt[0], RequestDataset)
def __init__(self, init_value): if isinstance(init_value, self.__class__): # Allow the dataset object to be initialized by an instance of itself. self.__dict__ = init_value.__dict__ else: if init_value is not None: if isinstance(init_value, RequestDatasetCollection): init_value = deepcopy(init_value) else: if isinstance(init_value, (RequestDataset, dict, Field)): itr = [init_value] elif type(init_value) in [list, tuple]: itr = init_value else: should_raise = True try: import ESMF except ImportError: # ESMF is not a required library ocgis_lh('Could not import ESMF library.', level=logging.WARN) else: if isinstance(init_value, ESMF.Field): from ocgis.regrid.base import get_ocgis_field_from_esmf_field field = get_ocgis_field_from_esmf_field(init_value) itr = [field] should_raise = False if should_raise: raise DefinitionValidationError(self, 'Type not accepted: {0}'.format(type(init_value))) rdc = RequestDatasetCollection() for rd in itr: if not isinstance(rd, Field): rd = deepcopy(rd) try: rdc.update(rd) except NoDimensionedVariablesFound: if rd._name is None: msg = messages.M2.format(rd.uri) raise DefinitionValidationError(self, msg) else: raise init_value = rdc else: init_value = init_value super(Dataset, self).__init__(init_value)
def test_with_overloads(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() # loaded calendar should match file metadata self.assertEqual(field.temporal.calendar, '365_day') # the overloaded calendar in the request dataset should still be None self.assertEqual(rd.t_calendar, None) dataset = [{'time_region': None, 'uri': [rd.uri], 'alias': u'tas', 't_units': u'days since 1940-01-01 00:00:00', 'variable': u'tas', 't_calendar': u'will_not_work'}] rdc = RequestDatasetCollection(dataset) rd2 = RequestDataset(**dataset[0]) # the overloaded calendar should be passed to the request dataset self.assertEqual(rd2.t_calendar, 'will_not_work') self.assertEqual(rdc.first().t_calendar, 'will_not_work') # when this bad calendar value is used it should raise an exception with self.assertRaises(ValueError): rdc.first().get().temporal.value_datetime dataset = [{'time_region': None, 'uri': [rd.uri], 'alias': u'tas', 't_units': u'days since 1940-01-01 00:00:00', 'variable': u'tas'}] rdc = RequestDatasetCollection(dataset) # ensure the overloaded units are properly passed self.assertEqual(rdc.first().get().temporal.units, 'days since 1940-01-01 00:00:00') # the calendar was not overloaded and the value should be read from the metadata self.assertEqual(rdc.first().get().temporal.calendar, '365_day')
def __init__(self,arg): if arg is not None: if isinstance(arg,RequestDatasetCollection): init_value = arg else: if isinstance(arg,RequestDataset): itr = [arg] elif isinstance(arg,dict): itr = [arg] else: itr = arg rdc = RequestDatasetCollection() for rd in itr: rdc.update(rd) init_value = rdc else: init_value = arg super(Dataset,self).__init__(init_value)
def __init__(self,arg): if arg is not None: if isinstance(arg,RequestDatasetCollection): init_value = arg else: if isinstance(arg,RequestDataset): itr = [arg] elif isinstance(arg,dict): itr = [arg] else: itr = arg rdc = RequestDatasetCollection() for rd in itr: rdc.update(rd) init_value = rdc ## dereference any prior dataset connections for rd in init_value: rd._ds = None else: init_value = arg super(Dataset,self).__init__(init_value)
def test_update(self): rd = self.test_data.get_rd('cancm4_tas') rd.did = 10 field = rd.get() self.assertEqual(field.uid, 10) field.uid = 20 rdc = RequestDatasetCollection() rdc.update(rd) # name is already in collection and should yield a key error with self.assertRaises(KeyError): rdc.update(field) field.name = 'tas2' rdc.update(field) # add another object and check the increment field2 = deepcopy(field) field2.name = 'hanzel' field2.uid = None rdc.update(field2) self.assertEqual(field2.uid, 21)
def test_get_unique_id(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() rd_did = deepcopy(rd) rd_did.did = 1 field_uid = deepcopy(field) field_uid.uid = 1 for element in [rd, field, rd_did, field_uid]: uid = RequestDatasetCollection._get_unique_id_(element) try: self.assertEqual(uid, 1) except AssertionError: try: self.assertIsNone(element.did) except AttributeError: self.assertIsNone(element.uid)
def test_name_attribute_used_for_keys(self): rd = self.test_data.get_rd('cancm4_tas') rd.name = 'hi_there' rdc = RequestDatasetCollection(target=[rd]) self.assertEqual(rdc.keys(), ['hi_there'])
def test_get_meta_rows(self): for k in self.iter_keywords(): rdc = RequestDatasetCollection(target=k.target) rows = rdc._get_meta_rows_() self.assertTrue(len(rows) >= 1)