def setUp(self): super(TestDataValues, self).setUp() self.data_array = np.array([-999., -999., -999., -999., -999., 1., 2., 3., 4., 5]) self.data_masked = np.ma.masked_array([-4, -3, -2, -1, 0, 1., 2., 3., 4., 5], mask=[True, True, True, True, True, False, False, False, False, False]) self.template = DatasetTemplate( dimensions={'TIME': 10}, variables={ 'TIME': { '_dimensions': ['TIME'], '_datatype': 'float32', '_data': np.array([np.nan, np.nan, 1, 2, 3, 4, 5, 6, 7, 8]) }, 'X': { '_dimensions': ['TIME'], '_datatype': 'float32', '_FillValue': -999., '_data': self.data_array }, 'Y': { '_dimensions': ['TIME'], '_datatype': 'float32', '_fill_value': -999., '_data': self.data_masked } } )
def test_dimensionless_variable(self): template = DatasetTemplate( variables={'X': { '_datatype': 'double', '_data': np.array(1) }}) template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) self.assertEqual((), dataset.variables['X'].dimensions)
def test_add_method(self): template1 = DatasetTemplate(dimensions={'ONE': 1}, variables={ 'X': { '_dimensions': ['ONE'], '_datatype': 'float32' }, 'Y': { '_dimensions': ['ONE'], '_datatype': 'float32' } }, global_attributes={ 'title': 'First template', 'comment': 'one' }) template2 = DatasetTemplate(dimensions={'TWO': 2}, variables={ 'Y': { '_dimensions': ['TWO'], 'comment': 'updated' }, 'Z': { 'name': 'new' } }, global_attributes={ 'title': 'Second template', 'version': 2 }) template = template1 + template2 self.assertEqual({'ONE': 1, 'TWO': 2}, template.dimensions) self.assertEqual( { 'title': 'Second template', 'comment': 'one', 'version': 2 }, template.global_attributes) self.assertSetEqual({'X', 'Y', 'Z'}, set(template.variables.keys())) self.assertEqual({ '_dimensions': ['ONE'], '_datatype': 'float32' }, template.variables['X']) self.assertEqual( { '_dimensions': ['TWO'], '_datatype': 'float32', 'comment': 'updated' }, template.variables['Y']) self.assertEqual({'name': 'new'}, template.variables['Z'])
def test_create_file(self): template = DatasetTemplate.from_json(TEMPLATE_JSON) template.variables['TIME']['_data'] = self.values10 template.variables['DEPTH']['_data'] = self.values1 template.variables['TEMP']['_data'] = self.values10.reshape((10, 1)) template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) expected_dimensions = OrderedDict([ ('TIME', len(self.values10)), ('DEPTH', len(self.values1)) ]) ds_dimensions = OrderedDict((k, v.size) for k, v in dataset.dimensions.items()) self.assertEqual(expected_dimensions, ds_dimensions) for vname, vdict in self.variables.items(): ds_var = dataset[vname] self.assertEqual(vdict['_dimensions'], list(ds_var.dimensions)) self.assertEqual(vdict['_datatype'], ds_var.dtype) ds_var_attr = OrderedDict((k, ds_var.getncattr(k)) for k in ds_var.ncattrs()) self.assertEqual(metadata_attributes(vdict), ds_var_attr) self.assertTrue(all(dataset['TIME'] == self.values10)) self.assertTrue(all(dataset['DEPTH'] == self.values1)) self.assertTrue(all(dataset['TEMP'] == self.values10.reshape(10, 1))) ds_global_attributes = OrderedDict((k, dataset.getncattr(k)) for k in dataset.ncattrs()) self.assertEqual(self.global_attributes, ds_global_attributes)
def test_init_from_partial_template(self): template = DatasetTemplate.from_json(TEMPLATE_PARTIAL_JSON) with open(TEMPLATE_PARTIAL_JSON) as t: tdict = json.load(t, object_pairs_hook=OrderedDict) self.assertEqual({}, template.dimensions) self.assertEqual(tdict['_variables'], template.variables) self.assertEqual(metadata_attributes(tdict), template.global_attributes)
def test_init_from_dicts(self): template = DatasetTemplate(dimensions=self.dimensions, variables=self.variables, global_attributes=self.global_attributes) self.assertEqual(self.dimensions, template.dimensions) self.assertEqual(self.variables, template.variables) self.assertEqual(self.global_attributes, template.global_attributes)
def test_add_variable_attributes(self): template = DatasetTemplate.from_json(TEMPLATE_PARTIAL_JSON) template.variables['TEMP'].update([('units', 'Kelvin'), ('comment', 'ok')]) self.assertEqual( OrderedDict([('standard_name', 'sea_water_temperature'), ('units', 'Kelvin'), ('comment', 'ok')]), template.variables['TEMP'])
def test_ensure_completeness(self): template = DatasetTemplate(dimensions={'X': 1}) template.variables = { 'A': {'_dimensions': ['X'], '_datatype': 'float32', '_data': [12.3]}, 'B': {'_dimensions': ['X'], '_data': [12.3]}, 'X': {'_dimensions': ['X'], '_data': self.values1}, 'Y': {'_datatype': 'float32', '_data': None} } template.ensure_completeness() self.assertEqual(['X'], template.variables['A']['_dimensions']) self.assertEqual(np.dtype('float32'), template.variables['A']['_datatype']) self.assertEqual([12.3], template.variables['A']['_data']) self.assertIsInstance(template.variables['A']['_data'], np.ndarray) self.assertEqual(np.dtype('float64'), template.variables['B']['_datatype']) self.assertIs(self.values1.dtype, template.variables['X']['_datatype']) self.assertEqual([], template.variables['Y']['_dimensions']) template.variables = {'Z': {'_dimensions': [], '_data': None}} self.assertRaisesRegexp(ValidationError, r"No data type information for variable 'Z'", template.ensure_completeness) template.variables = {'Z': {'_dimensions': []}} self.assertRaisesRegexp(ValidationError, r"No data specified for variable 'Z'", template.ensure_completeness)
class TestDataValues(TemplateTestCase): def setUp(self): super(TestDataValues, self).setUp() self.data_array = np.array([-999., -999., -999., -999., -999., 1., 2., 3., 4., 5]) self.data_masked = np.ma.masked_array([-4, -3, -2, -1, 0, 1., 2., 3., 4., 5], mask=[True, True, True, True, True, False, False, False, False, False]) self.template = DatasetTemplate( dimensions={'TIME': 10}, variables={ 'TIME': { '_dimensions': ['TIME'], '_datatype': 'float32', '_data': np.array([np.nan, np.nan, 1, 2, 3, 4, 5, 6, 7, 8]) }, 'X': { '_dimensions': ['TIME'], '_datatype': 'float32', '_FillValue': -999., '_data': self.data_array }, 'Y': { '_dimensions': ['TIME'], '_datatype': 'float32', '_fill_value': -999., '_data': self.data_masked } } ) def test_fill_values(self): self.template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) dataset.set_auto_mask(True) for varname in ('X', 'Y'): dsvar = dataset.variables[varname] self.assertEqual(-999., dsvar._FillValue) self.assertIsInstance(dsvar[:], np.ma.MaskedArray) self.assertTrue(dsvar[:5].mask.all()) self.assertTrue((dsvar[5:] == self.data_array[5:]).all()) def test_fill_value_aliases(self): self.template.variables['X']['_fill_value'] = -999. # both aliases, but equal so should still work self.template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) self.assertEqual(-999., dataset.variables['X']._FillValue) del self._temp_nc_file self.template.variables['X']['_fill_value'] = -666. # now they're different, which is an error self.assertRaises(ValueError, self.template.to_netcdf, self.temp_nc_file) def test_get_data_range(self): self.assertEqual((1, 8), self.template.get_data_range('TIME')) self.assertEqual((1, 5), self.template.get_data_range('X')) self.assertEqual((1, 5), self.template.get_data_range('Y'))
def test_create_empty_variable(self): template = DatasetTemplate(dimensions={'X': 10}) template.variables['X'] = {'_dimensions': ['X'], '_datatype': 'float32'} self.assertRaises(ValidationError, template.to_netcdf, self.temp_nc_file) # not providing '_data' is an error del self._temp_nc_file # Get a new temp file template.variables['X']['_data'] = None # This is ok, it's a shortcut for all fill values template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) dataset.set_auto_mask(True) dsx = dataset.variables['X'] self.assertIsInstance(dsx[:], np.ma.MaskedArray) self.assertTrue(dsx[:].mask.all())
def test_init_from_dicts_validation(self): with self.assertRaises(ValidationError): DatasetTemplate(dimensions='X') with self.assertRaises(ValidationError): DatasetTemplate(dimensions={'TIME': -1}) with self.assertRaises(ValidationError): DatasetTemplate(variables='TEMP') with self.assertRaises(ValidationError): DatasetTemplate(variables={'_TEMP': {}}) with self.assertRaises(ValidationError): DatasetTemplate(global_attributes='title') with self.assertRaises(ValidationError): DatasetTemplate(global_attributes={'title': None})
class TestDataValues(TemplateTestCase): def setUp(self): super(TestDataValues, self).setUp() self.data_array = np.array( [-999., -999., -999., -999., -999., 1., 2., 3., 4., 5]) self.data_masked = np.ma.masked_array( [-4, -3, -2, -1, 0, 1., 2., 3., 4., 5], mask=[ True, True, True, True, True, False, False, False, False, False ]) self.template = DatasetTemplate( dimensions={'TIME': 10}, variables={ 'TIME': { '_dimensions': ['TIME'], '_datatype': 'float64', 'valid_min': 0, 'valid_max': 10, '_data': np.array([np.nan, np.nan, 1, 2, 3, 4, 5, 6, 7, 8]) }, 'X': { '_dimensions': ['TIME'], '_datatype': 'float32', 'valid_min': 1, 'valid_max': 5, '_FillValue': -999, '_data': self.data_array }, 'Y': { '_dimensions': ['TIME'], '_datatype': 'float32', 'valid_range': [-4, 5], '_fill_value': -999, '_data': self.data_masked }, 'N': { '_dimensions': ['TIME'], '_datatype': 'int32', 'valid_range': [-4, 5], '_fill_value': -999, '_data': self.data_array } }) def test_fill_values(self): self.template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) dataset.set_auto_mask(True) for varname in ('X', 'Y'): dsvar = dataset.variables[varname] self.assertEqual(-999., dsvar._FillValue) self.assertIsInstance(dsvar[:], np.ma.MaskedArray) self.assertTrue(dsvar[:5].mask.all()) self.assertTrue((dsvar[5:] == self.data_array[5:]).all()) def test_fill_value_aliases(self): self.template.variables['X'][ '_fill_value'] = -999. # both aliases, but equal so should still work self.template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) self.assertEqual(-999., dataset.variables['X']._FillValue) del self._temp_nc_file self.template.variables['X'][ '_fill_value'] = -666. # now they're different, which is an error self.assertRaises(ValueError, self.template.to_netcdf, self.temp_nc_file) def test_get_data_range(self): self.assertEqual((1, 8), self.template.get_data_range('TIME')) self.assertEqual((1, 5), self.template.get_data_range('X')) self.assertEqual((1, 5), self.template.get_data_range('Y')) def test_var_attr_datatype_conversion(self): """ test to check the conversion of some attributes matches the datatype of the variable as defined in the template """ self.template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file) TIME = dataset.variables['TIME'] self.assertEqual(TIME.dtype, TIME.valid_min.dtype) self.assertEqual(TIME.dtype, TIME.valid_max.dtype) X = dataset.variables['X'] self.assertEqual(X.dtype, X.valid_min.dtype) self.assertEqual(X.dtype, X.valid_max.dtype) self.assertEqual(X.dtype, X._FillValue.dtype) for v in ['Y', 'N']: var = dataset.variables[v] self.assertEqual(var.dtype, var.valid_range.dtype) self.assertEqual(var.dtype, var._FillValue.dtype)
def test_set_variable_values(self): template = DatasetTemplate.from_json(TEMPLATE_JSON) template.variables['TEMP']['_data'] = self.values10 self.assertTrue(all(template.variables['TEMP']['_data'] == self.values10))
def netcdf_writer(log_path, output_dir, ship_name, meta_path=[]): if meta_path != []: with open(meta_path, 'r') as f: meta_data = json.loads('\n'.join([ row for row in f.readlines() if len(row.split('#')) == 1 ])) # remove comments for ii in range(len(meta_data['calibration'])): if meta_data['calibration'][ii]['item'] == 'EFLO': calibration_flo_a0 = float( meta_data['calibration'][ii]['a0']) calibration_flo_a1 = float( meta_data['calibration'][ii]['a1']) if meta_data['calibration'][ii]['item'] == 'ESAL': calibration_sal_a0 = float( meta_data['calibration'][ii]['a0']) calibration_sal_a1 = float( meta_data['calibration'][ii]['a1']) if meta_data['calibration'][ii]['item'] == 'ETMP': calibration_tmp_a0 = float( meta_data['calibration'][ii]['a0']) calibration_tmp_a1 = float( meta_data['calibration'][ii]['a1']) if meta_data['calibration'][ii]['item'] == 'ETURB': calibration_turb_a0 = float( meta_data['calibration'][ii]['a0']) calibration_turb_a1 = float( meta_data['calibration'][ii]['a1']) df = parse_log_file(log_path) df = transform_count_to_real_val(df) log_filename = os.path.basename(log_path) fields = get_pattern_subgroups_from_string(log_filename, SOOP_NRT_LOG_PATTERN) product_code = fields['product_code'] if product_code in ['D2M', 'M2D', 'S2M', 'M2S']: product_type = "transect" feature_type = "trajectory" template = DatasetTemplate.from_json(NC_JSON_TEMPLATE_TRAJECTORY) elif product_code in ['DEV', 'MEL', 'SYD']: product_type = "mooring" feature_type = "timeSeries" template = DatasetTemplate.from_json(NC_JSON_TEMPLATE_MOORING) else: raise InvalidFileNameError( "SOOP NRT input logfile has incorrect product_code '{product_code}'. Not belonging to any of " "('D2M', 'M2D', 'S2M', 'M2S','DEV', 'MEL', 'SYD').".format( product_code=product_code)) template.global_attributes.update({'product_type': product_type}) time_val_dateobj = date2num(df.index.to_pydatetime(), template.variables['TIME']['units'], template.variables['TIME']['calendar']) # replace all nan with FillValue from template value df.replace(np.nan, template.variables['LATITUDE']['_FillValue'], inplace=True) template.variables['TIME']['_data'] = time_val_dateobj template.variables['LATITUDE']['_data'] = df.LATITUDE.values template.variables['LONGITUDE']['_data'] = df.LONGITUDE.values template.variables['TEMP']['_data'] = df.TEMP.values template.variables['PSAL']['_data'] = df.PSAL.values template.variables['TURB']['_data'] = df.TURB.values template.variables['CPHL']['_data'] = df.CPHL.values calibration_comment = 'Value=a0 + a1 x raw_value' if 'calibration_tmp_a0' in locals() and 'calibration_tmp_a1' in locals(): template.variables['TEMP']['a0'] = calibration_tmp_a0 template.variables['TEMP']['a1'] = calibration_tmp_a1 template.variables['TEMP']['calibration_comment'] = calibration_comment if 'calibration_sal_a0' in locals() and 'calibration_sal_a1' in locals(): template.variables['PSAL']['a0'] = calibration_sal_a0 template.variables['PSAL']['a1'] = calibration_sal_a1 template.variables['PSAL']['calibration_comment'] = calibration_comment if 'calibration_turb_a0' in locals() and 'calibration_turb_a1' in locals(): template.variables['TURB']['a0'] = calibration_turb_a0 template.variables['TURB']['a1'] = calibration_turb_a1 template.variables['TURB']['calibration_comment'] = calibration_comment if 'calibration_flo_a0' in locals() and 'calibration_flo_a1' in locals(): template.variables['CPHL']['a0'] = calibration_flo_a0 template.variables['CPHL']['a1'] = calibration_flo_a1 template.variables['CPHL']['calibration_comment'] = calibration_comment measurement_frequency = get_measurement_frequency(df) if measurement_frequency == 1: measurement_frequency_str = '1sec' elif measurement_frequency == 10: measurement_frequency_str = '10secs' template.global_attributes.update({ 'time_coverage_start': df.index.strftime('%Y-%m-%dT%H:%M:%SZ')[0], 'time_coverage_end': df.index.strftime('%Y-%m-%dT%H:%M:%SZ')[-1], 'featureType': feature_type, 'date_created': datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), 'platform_code': SHIP_CODE, 'vessel_name': ship_name, 'geospatial_lat_min': df.LATITUDE.dropna().min(), 'geospatial_lat_max': df.LATITUDE.dropna().max(), 'geospatial_lon_min': df.LONGITUDE.dropna().min(), 'geospatial_lon_max': df.LONGITUDE.dropna().max(), 'measurement_frequency': measurement_frequency_str, 'history': "File created {date_created}".format( date_created=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")) }) if measurement_frequency == 1: template.variables['CPHL'].update({ 'calibration_blank': CHLU_PARAMS['blank'], 'calibration_scale': CHLU_PARAMS['scale'] }) template.variables['TURB'].update({ 'calibration_blank': TURB_PARAMS['blank'], 'calibration_scale': TURB_PARAMS['scale'] }) nc_filename = 'IMOS_SOOP-TMV_TSUB_{time_start}_{vessel_code}_FV0{product_number}_{product_type}-{product_code}_END-{time_end}.nc'.format( time_start=df.index.strftime('%Y%m%dT%H%M%SZ')[0], time_end=df.index.strftime('%Y%m%dT%H%M%SZ')[-1], vessel_code=SHIP_CODE, product_number=0, product_type=product_type, product_code=product_code) netcdf_path = os.path.join(output_dir, nc_filename) template.to_netcdf(netcdf_path) return netcdf_path
def test_init_empty(self): template = DatasetTemplate() self.assertEqual({}, template.dimensions) self.assertEqual({}, template.variables) self.assertEqual({}, template.global_attributes)
def test_ensure_consistency(self): template = DatasetTemplate() scalar = {'_dimensions': [], '_data': np.array(1)} template.variables = {'SCALAR': scalar} template.ensure_consistency() self.assertEqual({}, template.dimensions) self.assertIs(scalar, template.variables['SCALAR']) template = DatasetTemplate(dimensions={'TEN': 10}) var_10 = {'_dimensions': ['TEN'], '_data': self.values10} template.variables = {'TEN': var_10} template.ensure_consistency() self.assertEqual({'TEN': 10}, template.dimensions) self.assertIs(var_10, template.variables['TEN']) template = DatasetTemplate(dimensions={'X': None}) var_12 = {'_dimensions': ['X'], '_data': np.arange(12)} template.variables = {'X': var_12} template.ensure_consistency() self.assertEqual({'X': 12}, template.dimensions) self.assertIs(var_12, template.variables['X']) empty = {'_dimensions': ['X'], '_data': None} template.variables['EMPTY'] = empty template.ensure_consistency() self.assertEqual({'X': 12}, template.dimensions) self.assertIs(empty, template.variables['EMPTY']) template.variables['X']['_data'] = self.values1 self.assertRaisesRegexp(ValueError, 'inconsistent with dimension sizes defined in template', template.ensure_consistency) # now should fail because dim X is already set template.variables = { 'Z': {'_dimensions': ["NOSUCHTHING"], '_data': self.values10} } self.assertRaisesRegexp(ValidationError, 'undefined dimensions', template.ensure_consistency) template.variables = { 'W': {'_dimensions': ['X'], '_data': np.arange(20).reshape((10,2))} } self.assertRaisesRegexp(ValueError, "Variable 'W' has 1 dimensions, but value array has 2 dimensions.", template.ensure_consistency )
def netcdf_writer(netcdf_file_path, dataf, dtime, time, src_file, platform_code, ship_callsign_ls): """ Create the netcdf file """ vessel_name = ship_callsign_ls[platform_code] template = DatasetTemplate.from_json(NC_JSON_TEMPLATE) # write voyage specific attributes template.global_attributes.update({ 'title': "IMOS SOOP Underway CO2 dataset measured onboard the %s " "between the %s and %s" % (vessel_name, min(dtime).strftime("%d-%b-%Y %H:%M:%S"), max(dtime).strftime("%d-%b-%Y %H:%M:%S")), 'date_created': datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), 'history': 'file created on {date}'.format( date=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")), 'abstract': "This dataset contains underway CO2 measurements collected onboard the {vessel_name} " "between the {start_date} and {end_date}".format( vessel_name=vessel_name, start_date=min(dtime).strftime("%d-%b-%Y %H:%M:%S"), end_date=max(dtime).strftime("%d-%b-%Y %H:%M:%S")), 'time_coverage_start': min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ"), 'time_coverage_end': max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ"), 'geospatial_lat_min': np.nanmin(np.array(dataf['GpsShipLatitude'])), 'geospatial_lat_max': np.nanmax(np.array(dataf['GpsShipLatitude'])), 'geospatial_lon_min': np.nanmin(np.array(dataf['GpsShipLongitude'])), 'geospatial_lon_max': np.nanmax(np.array(dataf['GpsShipLongitude'])), 'geospatial_vertical_min': 0., 'geospatial_vertical_max': 0., 'vessel_name': vessel_name, 'platform_code': platform_code, 'sourceFilename': os.path.basename(src_file) }) if platform_code == 'VLMJ': template.variables.update({ "LabMain_sw_flow_raw": { "_datatype": "float64", "_dimensions": ["TIME"], "long_name": "Seawater flow in main laboratory", "reference_datum": "sea surface", "units": "l min-1", "coordinates": "TIME LATITUDE LONGITUDE" } }) template.variables['WSPD_raw']['_data'] = dataf[ 'MetTrueWindSpKts'].multiply(0.514444) # replace nans with fillvalue in dataframe dataf.fillna(value=float(-999.), inplace=True) # Can use either PCDate/Time or GPS. Decided to use PCDate /Time as it # simplifies the code template.variables['TIME']['_data'] = time template.variables['LATITUDE']['_data'] = dataf['GpsShipLatitude'].values template.variables['LONGITUDE']['_data'] = dataf['GpsShipLongitude'].values # create fixed length strings padded with space # create variable of type string, then convert to array of char string_10_dim = template.dimensions['string_10'] # convert to array of char type_tmp = stringtochar( np.array(dataf['Type'], 'S{dimelen}'.format(dimelen=string_10_dim))) template.variables['TYPE']['_data'] = type_tmp template.variables['TEQ_raw']['_data'] = dataf['EquTemp'].values template.variables['CO2_STD_Value']['_data'] = dataf['CO2StdValue'].values template.variables['xCO2_PPM_raw']['_data'] = dataf['CO2um_m'].values template.variables['xH2O_PPT_raw']['_data'] = dataf['H2Omm_m'].values template.variables['Press_Licor_raw']['_data'] = dataf[ 'DryBoxDruckPress'].values template.variables['Diff_Press_Equ_raw']['_data'] = dataf[ 'EquPress'].values template.variables['H2O_flow_raw']['_data'] = dataf['EquH2OFlow'].values template.variables['Licor_flow_raw']['_data'] = dataf['LicorFlow'].values template.variables['TEMP_raw']['_data'] = dataf['IntakeShipTemp'].values template.variables['WSPD_raw']['_data'] = dataf[ 'MetTrueWindSpKts'].values * 0.514444 # WSP converted to m s-1 template.variables['WDIR_raw']['_data'] = dataf['MetTrueWindDir'].values template.variables['ATMP_raw']['_data'] = dataf['AtmSeaLevelPress'].values if platform_code == 'VLMJ': template.variables['TEMP_Tsg_raw']['_data'] = dataf[ 'TsgShipTemp'].values template.variables['Tsg_flow_raw']['_data'] = dataf[ 'TsgShipFlow'].values template.variables['LabMain_sw_flow_raw']['_data'] = dataf[ 'LabMainSwFlow'].values template.variables['PSAL_raw']['_data'] = dataf[ 'TsgShipSalinity'].values elif platform_code == 'VNAA': template.variables['TEMP_Tsg_raw']['_data'] = dataf[ 'TsgSbe45Temp'].values template.variables['PSAL_raw']['_data'] = dataf[ 'TsgSbe45Salinity'].values template.variables['Tsg_flow_raw']['_data'] = dataf['SBE45Flow'].values template.to_netcdf(netcdf_file_path) return netcdf_file_path
def test_close_file_on_exception(self): template = DatasetTemplate(variables={'Z': {}}) self.assertIsNone(template.ncobj) self.assertRaises(ValidationError, template.to_netcdf, self.temp_nc_file) self.assertIsNone(template.ncobj)
def test_init_from_json(self): template = DatasetTemplate.from_json(TEMPLATE_JSON) self.assertEqual(self.dimensions, template.dimensions) self.assertEqual(self.variables, template.variables) self.assertEqual(self.global_attributes, template.global_attributes)
def test_add_global_attributes(self): template = DatasetTemplate() template.global_attributes.update(self.global_attributes) self.assertEqual(self.global_attributes, template.global_attributes)
def test_change_dimensions(self): template = DatasetTemplate.from_json(TEMPLATE_JSON) template.dimensions['TIME'] = 100 template.dimensions['DEPTH'] = 10 self.assertEqual(OrderedDict([('TIME', 100), ('DEPTH', 10)]), template.dimensions)
def test_create_empty_file(self): template = DatasetTemplate() template.to_netcdf(self.temp_nc_file) dataset = Dataset(self.temp_nc_file)
def test_add_variables(self): template = DatasetTemplate.from_json(TEMPLATE_PARTIAL_JSON) template.variables['TIME'] = self.variables['TIME'] self.assertEqual({'TEMP', 'TIME'}, set(template.variables.keys())) self.assertEqual(self.variables['TIME'], template.variables['TIME'])
def test_add_variable_dimensions(self): template = DatasetTemplate.from_json(TEMPLATE_PARTIAL_JSON) template.variables['TEMP']['_dimensions'] = ['TIME', 'DEPTH'] self.assertEqual(['TIME', 'DEPTH'], template.variables['TEMP']['_dimensions'])