def identify_lat_lon(dataarray): """ Identify the latitude and longitude dimensions of a dataarray using CF attributes Args: dataarray: Source dataarray Returns: (lat, lon): Tuple of `xarray.Dataarray` for the latitude and longitude dimensions Todo: * Assumes latitude and longitude are unique """ lat = None lon = None for c in dataarray.coords.values(): if (c.attrs.get("standard_name", "") == "latitude" or Units(c.attrs.get("units", "")).islatitude or c.attrs.get("axis", "") == "Y"): lat = c if (c.attrs.get("standard_name", "") == "longitude" or Units(c.attrs.get("units", "")).islongitude or c.attrs.get("axis", "") == "X"): lon = c if lat is None or lon is None: raise Exception("Couldn't identify horizontal coordinates") return (lat, lon)
def test_Units__hash__(self): self.assertIsInstance(hash(Units('K')), int) self.assertIsInstance(hash(Units('')), int) self.assertIsInstance(hash(Units()), int) self.assertIsInstance(hash(Units('days since 2000-01-01')), int) self.assertIsInstance( hash(Units('days since 2000-01-01', calendar='360_day')), int)
def gallon2liter(value): if value: return Units.conform(int(value), Units('gallon / day'), Units('liter / second'), inplace=True) return 0.0
def get_are_units_equal_by_string_or_cfunits(source,target,try_cfunits=True): ''' Test if unit definitions are equal. :param str source: String definition of the units to compare against. :param str target: Target units to test for equality. :param bool try_cfunits: If ``True`` attempt to import and use :class:`cfunits.Units` for equality operation. >>> get_are_units_equal_by_string_or_cfunits('K','K',try_cfunits=True) True >>> get_are_units_equal_by_string_or_cfunits('K','kelvin',try_cfunits=False) False >>> get_are_units_equal_by_string_or_cfunits('K','kelvin',try_cfunits=True) True ''' try: if try_cfunits: from cfunits import Units source_cfunits = Units(source) match = source_cfunits.equals(Units(target)) else: raise(ImportError) except ImportError: match = source.lower() == target.lower() return(match)
def test_Units__hash__(self): """Tests the hash value generated by `__hash__` on `Units`.""" self.assertIsInstance(hash(Units("K")), int) self.assertIsInstance(hash(Units("")), int) self.assertIsInstance(hash(Units()), int) self.assertIsInstance(hash(Units("days since 2000-01-01")), int) self.assertIsInstance( hash(Units("days since 2000-01-01", calendar="360_day")), int)
def wrap(self, units=None): time = val_or_raise(func, (self._base, )) if units is not None: try: from_units = Units(self.get_time_units()) to_units = Units(units) except AttributeError, NotImplementedError: pass else: if not from_units.equals(to_units): time = Units.conform(time, from_units, to_units)
def do_test(self): d = init_module(module) for var in d.outputs: if not (hasattr(var, 'standard_name') and hasattr(var, 'units')): continue output_units = Units(var.units) try: canonical_units = Units(names[var.standard_name]) except KeyError: # Invalid standard name continue self.assertTrue(output_units.equivalent(canonical_units))
def is_longitude(coord): if hasattr(coord, "units"): if Units(coord.units).islongitude: return True elif coord.attrs.get("standard_name", None) == "longitude": return True
def value_unit_format_conformity(resol_string): resol_string = resol_string.strip() if resol_string == 'point': return 4 elif not check_value_exists(resol_string): # Missing value return 1 else: val_unit_dict = split_value_unit(resol_string) unit_valid = True for val in val_unit_dict.keys(): if not val_unit_dict[val]: # Missing units return 2 else: unit_valid = True for units in val_unit_dict[val]: if not Units(units).isvalid and not units == "''": unit_valid = False if unit_valid: # All okay return 4 else: # Unrecognized unit return 3
def get_value(self, name, out=None, units=None, angle=None, at=None, method=None): if out is None: grid = self.get_var_grid(name) dtype = self.get_var_type(name) if dtype == "": raise ValueError("{name} not understood".format(name=name)) loc = self.get_var_grid_loc(name) out = np.empty(self.get_grid_dim(grid, loc), dtype=dtype) self.bmi.get_value(name, out) if name in self._interpolators and at is not None: out[:] = self._interpolators[name].interpolate(at) from_units = Units(self.get_var_units(name)) if units is not None: to_units = Units(units) else: to_units = from_units if units is not None and from_units != to_units: Units.conform(out, from_units, to_units, inplace=True) # if units is not None: # try: # from_units = self.get_var_units(name) # except AttributeError, NotImplementedError: # pass # else: # Units.conform(out, Units(from_units), Units(units), # inplace=True) if angle not in ("azimuth", "math", None): raise ValueError("angle not understood") if angle == "azimuth" and "azimuth" not in name: transform_math_to_azimuth(out, to_units) elif angle == "math" and "azimuth" in name: transform_azimuth_to_math(out, to_units) return out
def test_Units_isvalid(self): self.assertTrue(Units('m').isvalid) self.assertTrue(Units('days since 2019-01-01').isvalid) self.assertTrue( Units('days since 2019-01-01', calendar='360_day').isvalid) self.assertFalse(Units('qwerty').isvalid) self.assertFalse(Units(1.0).isvalid) self.assertFalse(Units([1.0, 'qwerty']).isvalid) self.assertFalse(Units('since 2019-01-01').isvalid) self.assertFalse( Units('days since 2019-01-01', calendar='qwerty').isvalid) self.assertFalse(Units('since 2019-01-01', calendar='qwerty').isvalid)
def get_value(self, name, out=None, units=None, angle=None, at=None, method=None): if out is None: grid = self.get_var_grid(name) dtype = self.get_var_type(name) if dtype == '': raise ValueError('{name} not understood'.format(name=name)) out = np.empty(self.get_grid_size(grid), dtype=dtype) bmi_call(self.bmi.get_value, name, out) if name in self._interpolators and at is not None: out[:] = self._interpolators[name].interpolate(at) from_units = Units(self.get_var_units(name)) if units is not None: to_units = Units(units) else: to_units = from_units if units is not None and from_units != to_units: Units.conform(out, from_units, to_units, inplace=True) # if units is not None: # try: # from_units = self.get_var_units(name) # except AttributeError, NotImplementedError: # pass # else: # Units.conform(out, Units(from_units), Units(units), # inplace=True) if angle not in ('azimuth', 'math', None): raise ValueError('angle not understood') if angle == 'azimuth' and 'azimuth' not in name: transform_math_to_azimuth(out, to_units) elif angle == 'math' and 'azimuth' in name: transform_azimuth_to_math(out, to_units) return out
def time_from(self, time, units): if units is None: return time try: # units_str = self.time_units units_str = self.time_units except (AttributeError, NotImplementedError): pass else: to_units = Units(units_str) from_units = Units(units) if not from_units.equals(to_units): time = Units.conform(time, from_units, to_units) return time
def test_Units_isvalid(self): """Tests the `isvalid` property on `Units`.""" self.assertTrue(Units("m").isvalid) self.assertTrue(Units("days since 2019-01-01").isvalid) self.assertTrue( Units("days since 2019-01-01", calendar="360_day").isvalid) self.assertFalse(Units("qwerty").isvalid) self.assertFalse(Units(1.0).isvalid) self.assertFalse(Units([1.0, "qwerty"]).isvalid) self.assertFalse(Units("since 2019-01-01").isvalid) self.assertFalse( Units("days since 2019-01-01", calendar="qwerty").isvalid) self.assertFalse(Units("since 2019-01-01", calendar="qwerty").isvalid)
def convert_units(input_vals, input_unit, output_unit): """ Convert the units of an array of values Params ------- input_vals : Numpy array Values to convert input_unit : str Unit corresponding to the input values output_unit : str Desired unit to covert the values to Return ------- Numpy array """ conv_vals = Units.conform(input_vals, Units(input_unit), Units(output_unit)) return conv_vals
def convert_cfunits(from_value, from_unit, wanted_unit=None, suppress_unit=False): """ Use the cfunits-python package to handle the interaction with udunits2 """ try: from cfunits import Units except ImportError: raise Exception("cfunits option relies on cfunits-python package") ZERO = False if float(from_value) == 0: # special Zero handling if from_unit != '1': warnings.warn("""Found a 0, udunits2 breaks, so we are assuming that this is not a non-zero based conversion like temperature""" ) ZERO = True from_value = 1. if wanted_unit: return Units.conform(from_value, Units(from_unit), Units(wanted_unit)) out = Units(" ".join((str(from_value), from_unit))).format() split_out = out.split(' ') if len(split_out) == 1: out = (1., split_out[0]) else: if ZERO: out = (0.0, split_out[1]) else: out = (float(split_out[0]), split_out[1]) if suppress_unit: return out[0] return out
def is_time(coord): if coord.values.size > 1: if hasattr(coord.values[0], "calendar"): if Units(calendar=coord.values[0].calendar).isreftime: return True elif hasattr(coord, "axis"): if coord.axis == "T": return True elif coord.attrs.get("standard_name", None) == "time": return True
def identify_time(dataarray): """ Identify the time dimension of a dataarray using CF attributes Args: dataarray: Source dataarray Returns: :obj:`xarray.Dataarray` for the time dimension Todo: * Assumes time dimension is unique """ for c in dataarray.coords.values(): if (c.attrs.get("standard_name", "") == "time" or Units(c.attrs.get("units", "")).isreftime or Units(c.encoding.get("units", "")).isreftime or c.attrs.get("axis", "") == "T"): return c raise Exception("No time axis found")
def wrap(self, name, out=None, units=None): """Get a value by name. Parameters ---------- name : str CSDMS standard name. out : ndarray, optional Buffer to place values. units : str, optional Convert units of the returned values. Returns ------- ndarray Array of values (or *out*, if provided). """ if out is None: grid = self.get_var_grid(name) dtype = self.get_var_type(name) if dtype == '': print self.get_output_var_names() raise ValueError('{name} not understood'.format(name=name)) out = np.empty(self.get_grid_size(grid), dtype=dtype) val_or_raise(func, (self._base, name, out)) if units is not None: try: from_units = self.get_var_units(name) except AttributeError, NotImplementedError: pass else: Units.conform(out, Units(from_units), Units(units), inplace=True)
def main(): assert len( sys.argv ) == 4, 'Usage: %s <root_dir> <file_template> <units>' % sys.argv[0] root_dir = sys.argv[1] file_template = sys.argv[2] units = Units(sys.argv[3]).units # This will fail for invalid units nc_path_list = [ filename for filename in subprocess.check_output( ['find', root_dir, '-name', file_template]).split('\n') if re.search('\.nc$', filename) ] for nc_path in nc_path_list: print 'Setting units in %s' % nc_path nc_dataset = netCDF4.Dataset(nc_path, 'r+') # Find variable with "grid_mapping" attribute - assumed to be 2D data # variable try: variable = [ variable for variable in nc_dataset.variables.values() if hasattr(variable, 'grid_mapping') ][0] except: raise Exception( 'Unable to determine data variable (must have "grid_mapping" attribute' ) variable_name = variable.name variable.units = units nc_dataset.close() print '%s.variables["%s"].units = %s' % (nc_path, variable_name, units) print 'Updating metadata in %s' % nc_path try: g2n_object = ERS2NetCDF() g2n_object.update_nc_metadata(nc_path, do_stats=True) # Kind of redundant, but possibly useful for debugging g2n_object.check_json_metadata() except Exception as e: print 'Metadata update failed: %s' % e.message
def test_get_field_with_overloaded_units(self): rd = self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) preload = [False, True] for pre in preload: field = rd.get() # conform units argument needs to be attached to a field variable self.assertEqual(field.variables['tas']._conform_units_to, Units('celsius')) sub = field.get_time_region({'year': [2009], 'month': [5]}) if pre: # if we wanted to load the data prior to subset then do so and manually perform the units conversion to_test = Units.conform(sub.variables['tas'].value, sub.variables['tas'].cfunits, Units('celsius')) # assert the conform attribute makes it though the subset self.assertEqual(sub.variables['tas']._conform_units_to, Units('celsius')) value = sub.variables['tas'].value self.assertAlmostEqual(np.ma.mean(value), 5.921925206338206) self.assertAlmostEqual(np.ma.median(value), 10.745431900024414) if pre: # assert the manually converted array matches the loaded value self.assertNumpyAll(to_test, value)
def test_Units_equivalent(self): self.assertTrue(Units().equivalent(Units())) self.assertTrue(Units(' ').equivalent(Units())) self.assertTrue(Units('').equivalent(Units())) self.assertTrue(Units().equivalent(Units(''))) self.assertTrue(Units().equivalent(Units(' '))) self.assertTrue(Units('').equivalent(Units(''))) self.assertTrue(Units('').equivalent(Units(' '))) self.assertTrue(Units('').equivalent(Units('1'))) self.assertTrue(Units('').equivalent(Units('18'))) self.assertTrue(Units('18').equivalent(Units('1'))) self.assertTrue(Units('18').equivalent(Units('18'))) self.assertTrue(Units('1)').equivalent(Units('1'))) self.assertTrue(Units('m').equivalent(Units('m'))) self.assertTrue(Units('meter').equivalent(Units('km'))) self.assertTrue(Units('metre').equivalent(Units('mile'))) self.assertTrue(Units('s').equivalent(Units('h'))) self.assertTrue(Units('s').equivalent(Units('day'))) self.assertTrue(Units('second').equivalent(Units('month'))) self.assertTrue( Units(calendar='noleap').equivalent(Units(calendar='noleap'))) self.assertTrue( Units(calendar='noleap').equivalent(Units(calendar='365_day'))) self.assertTrue( Units(calendar='nOLEAP').equivalent(Units(calendar='365_dAY'))) self.assertTrue( Units('days since 2000-1-1').equivalent( Units('d since 2000-1-1 0:0'))) self.assertTrue( Units('days since 2000-1-1').equivalent( Units('h since 1234-1-1 0:0'))) self.assertTrue( Units('days since 2000-1-1').equivalent( Units('d since 2000-1-1 0:0', calendar='gregorian'))) self.assertTrue( Units('days since 2000-1-1').equivalent( Units('h since 1234-1-1 0:0', calendar='standard'))) self.assertTrue( Units('days since 2000-1-1', calendar='all_leap').equivalent( Units('d since 2000-1-1 0:0', calendar='366_day'))) self.assertTrue( Units('days since 2000-1-1', calendar='all_leap').equivalent( Units('h since 1234-1-1 0:0', calendar='366_day'))) u = Units('days since 2000-02-02', calendar='standard') v = Units('months since 2000-02-02', calendar='standard') self.assertNotEqual(u, v) u = Units('days since 2000-02-02', calendar='standard') v = Units('months since 2000-02-02', calendar='gregorian') self.assertNotEqual(u, v) self.assertFalse(Units(1).equivalent(Units(1))) self.assertFalse(Units().equivalent(Units(1))) self.assertFalse(Units(2).equivalent(Units(1))) self.assertFalse(Units('').equivalent(Units(1))) self.assertFalse(Units(' ').equivalent(Units(1))) self.assertFalse(Units('1').equivalent(Units(1)))
def test_Units_formatted(self): u = Units('W') self.assertEqual(u.units, 'W') self.assertEqual(u.formatted(names=True), 'watt') self.assertEqual(u.formatted(definition=True), 'm2.kg.s-3') self.assertEqual(u.formatted(names=True, definition=True), 'meter^2-kilogram-second^-3') self.assertEqual(u.formatted(), 'W') u = Units('tsp') self.assertEqual(u.formatted(names=True), '4.928921875e-06 meter^3') u = Units('tsp', names=True) self.assertEqual(u.units, '4.928921875e-06 meter^3') u = Units('m/s', formatted=True) self.assertEqual(u.units, 'm.s-1') u = Units('Watt', formatted=True) self.assertEqual(u.units, 'W') u = Units('Watt', names=True) self.assertEqual(u.units, 'watt') u = Units('Watt', definition=True) self.assertEqual(u.units, 'm2.kg.s-3') u = Units('Watt', names=True, definition=True) self.assertEqual(u.units, 'meter^2-kilogram-second^-3') u = Units('days since 1900-1-1 03:05', names=True) self.assertEqual(u.units, 'day since 1900-01-01 03:05:00') u = Units('days since 1900-1-1 03:05', formatted=True) self.assertEqual(u.units, 'd since 1900-01-01 03:05:00') u = Units('days since 1900-1-1 03:05') self.assertEqual(u.formatted(), 'd since 1900-01-01 03:05:00') u = Units('hours since 2100-1-1', calendar='noleap', names=True) self.assertEqual(u.units, 'hour since 2100-01-01 00:00:00') u = Units('hours since 2100-1-1', calendar='noleap', formatted=True) self.assertEqual(u.units, 'h since 2100-01-01 00:00:00') u = Units('hours since 2100-1-1', calendar='noleap') self.assertEqual(u.formatted(), 'h since 2100-01-01 00:00:00')
def do_test(self): d = init_module(module) for var in d.outputs: if hasattr(var, 'units'): _unit = Units(var.units) self.assertTrue(_unit.isvalid)
def test_Units_has_offset(self): self.assertFalse(Units('K').has_offset) self.assertFalse(Units('K @ 0').has_offset) self.assertFalse(Units('Watt').has_offset) self.assertFalse(Units('m2.kg.s-3').has_offset) self.assertFalse(Units('km').has_offset) self.assertFalse(Units('1000 m').has_offset) self.assertFalse(Units('(K @ 273.15) m s-1').has_offset) self.assertFalse(Units('degC m s-1').has_offset) self.assertTrue(Units('K @ 273.15').has_offset) self.assertTrue(Units('degC').has_offset) self.assertTrue(Units('degF').has_offset) self.assertTrue(Units('m2.kg.s-3 @ 3.14').has_offset) self.assertEqual(Units('degC m s-1'), Units('K m s-1'))
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, rmMetadatas=['NETCDF_VARNAME', '_Unsigned', 'ScaleRatio', 'ScaleOffset', 'dods_variable'], **kwargs): # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_' # from keys in gdalDataset tmpGdalMetadata = {} geoMetadata = {} origin_is_nansat = False if not gdalMetadata: raise WrongMapperError for key in gdalMetadata.keys(): newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '') if 'NANSAT_' in newKey: geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key] origin_is_nansat = True else: tmpGdalMetadata[newKey] = gdalMetadata[key] gdalMetadata = tmpGdalMetadata fileExt = os.path.splitext(inputFileName)[1] # Get file names from dataset or subdataset subDatasets = gdalDataset.GetSubDatasets() if len(subDatasets) == 0: fileNames = [inputFileName] else: fileNames = [f[0] for f in subDatasets] # add bands with metadata and corresponding values to the empty VRT metaDict = [] xDatasetSource = '' yDatasetSource = '' firstXSize = 0 firstYSize = 0 for _, fileName in enumerate(fileNames): subDataset = gdal.Open(fileName) # choose the first dataset whith grid if (firstXSize == 0 and firstYSize == 0 and subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1): firstXSize = subDataset.RasterXSize firstYSize = subDataset.RasterYSize firstSubDataset = subDataset # get projection from the first subDataset projection = firstSubDataset.GetProjection() # take bands whose sizes are same as the first band. if (subDataset.RasterXSize == firstXSize and subDataset.RasterYSize == firstYSize): if projection == '': projection = subDataset.GetProjection() if ('GEOLOCATION_X_DATASET' in fileName or 'longitude' in fileName): xDatasetSource = fileName elif ('GEOLOCATION_Y_DATASET' in fileName or 'latitude' in fileName): yDatasetSource = fileName else: for iBand in range(subDataset.RasterCount): subBand = subDataset.GetRasterBand(iBand+1) bandMetadata = subBand.GetMetadata_Dict() if 'PixelFunctionType' in bandMetadata: bandMetadata.pop('PixelFunctionType') sourceBands = iBand + 1 # sourceBands = i*subDataset.RasterCount + iBand + 1 # generate src metadata src = {'SourceFilename': fileName, 'SourceBand': sourceBands} # set scale ratio and scale offset scaleRatio = bandMetadata.get( 'ScaleRatio', bandMetadata.get( 'scale', bandMetadata.get('scale_factor', ''))) if len(scaleRatio) > 0: src['ScaleRatio'] = scaleRatio scaleOffset = bandMetadata.get( 'ScaleOffset', bandMetadata.get( 'offset', bandMetadata.get( 'add_offset', ''))) if len(scaleOffset) > 0: src['ScaleOffset'] = scaleOffset # sate DataType src['DataType'] = subBand.DataType # generate dst metadata # get all metadata from input band dst = bandMetadata # set wkv and bandname dst['wkv'] = bandMetadata.get('standard_name', '') # first, try the name metadata if 'name' in bandMetadata: bandName = bandMetadata['name'] else: # if it doesn't exist get name from NETCDF_VARNAME bandName = bandMetadata.get('NETCDF_VARNAME', '') if len(bandName) == 0: bandName = bandMetadata.get( 'dods_variable', '' ) # remove digits added by gdal in # exporting to netcdf... if (len(bandName) > 0 and origin_is_nansat and fileExt == '.nc'): if bandName[-1:].isdigit(): bandName = bandName[:-1] if bandName[-1:].isdigit(): bandName = bandName[:-1] # if still no bandname, create one if len(bandName) == 0: bandName = 'band_%03d' % iBand dst['name'] = bandName # remove non-necessary metadata from dst for rmMetadata in rmMetadatas: if rmMetadata in dst: dst.pop(rmMetadata) # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # create empty VRT dataset with geolocation only VRT.__init__(self, firstSubDataset, srcMetadata=gdalMetadata) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # Create complex data bands from 'xxx_real' and 'xxx_imag' bands # using pixelfunctions rmBands = [] for iBandNo in range(self.dataset.RasterCount): iBand = self.dataset.GetRasterBand(iBandNo + 1) iBandName = iBand.GetMetadataItem('name') # find real data band if iBandName.find("_real") != -1: realBandNo = iBandNo realBand = self.dataset.GetRasterBand(realBandNo + 1) realDtype = realBand.GetMetadataItem('DataType') bandName = iBandName.replace(iBandName.split('_')[-1], '')[0:-1] for jBandNo in range(self.dataset.RasterCount): jBand = self.dataset.GetRasterBand(jBandNo + 1) jBandName = jBand.GetMetadataItem('name') # find an imaginary data band corresponding to the real # data band and create complex data band from the bands if jBandName.find(bandName+'_imag') != -1: imagBandNo = jBandNo imagBand = self.dataset.GetRasterBand(imagBandNo + 1) imagDtype = imagBand.GetMetadataItem('DataType') dst = imagBand.GetMetadata() dst['name'] = bandName dst['PixelFunctionType'] = 'ComplexData' dst['dataType'] = 10 src = [{'SourceFilename': fileNames[realBandNo], 'SourceBand': 1, 'DataType': realDtype}, {'SourceFilename': fileNames[imagBandNo], 'SourceBand': 1, 'DataType': imagDtype}] self._create_band(src, dst) self.dataset.FlushCache() rmBands.append(realBandNo + 1) rmBands.append(imagBandNo + 1) # Delete real and imaginary bands if len(rmBands) != 0: self.delete_bands(rmBands) if len(projection) == 0: # projection was not set automatically # get projection from GCPProjection projection = geoMetadata.get('GCPProjection', '') if len(projection) == 0: # no projection was found in dataset or metadata: # generate WGS84 by default projection = NSR().wkt # fix problem with MET.NO files where a, b given in m and XC/YC in km if ('UNIT["kilometre"' in projection and ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in projection): projection = projection.replace( ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', '') # set projection self.dataset.SetProjection(self.repare_projection(projection)) # check if GCPs were added from input dataset gcps = firstSubDataset.GetGCPs() gcpProjection = firstSubDataset.GetGCPProjection() # if no GCPs in input dataset: try to add GCPs from metadata if not gcps: gcps = self.add_gcps_from_metadata(geoMetadata) # if yet no GCPs: try to add GCPs from variables if not gcps: gcps = self.add_gcps_from_variables(inputFileName) if gcps: if len(gcpProjection) == 0: # get GCP projection and repare gcpProjection = self.repare_projection(geoMetadata. get('GCPProjection', '')) # add GCPs to dataset self.dataset.SetGCPs(gcps, gcpProjection) self.dataset.SetProjection('') self._remove_geotransform() # Find proper bands and insert GEOLOCATION ARRAY into dataset if len(xDatasetSource) > 0 and len(yDatasetSource) > 0: self.add_geolocationArray(GeolocationArray(xDatasetSource, yDatasetSource)) elif not gcps: # if no GCPs found and not GEOLOCATION ARRAY set: # Set Nansat Geotransform if it is not set automatically geoTransform = self.dataset.GetGeoTransform() if len(geoTransform) == 0: geoTransformStr = geoMetadata.get('GeoTransform', '(0|1|0|0|0|0|1)') geoTransform = eval(geoTransformStr.replace('|', ',')) self.dataset.SetGeoTransform(geoTransform) subMetadata = firstSubDataset.GetMetadata() ### GET START TIME from METADATA time_coverage_start = None if 'start_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_time']) elif 'start_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_date']) elif 'time_coverage_start' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_start']) ### GET END TIME from METADATA time_coverage_end = None if 'stop_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['stop_time']) elif 'stop_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['stop_date']) elif 'time_coverage_stop' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_stop']) elif 'end_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['end_time']) elif 'end_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['end_date']) elif 'time_coverage_end' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_end']) ### GET start time from time variable if (time_coverage_start is None and cfunitsInstalled and 'time#standard_name' in subMetadata and subMetadata['time#standard_name'] == 'time' and 'time#units' in subMetadata and 'time#calendar' in subMetadata): # get data from netcdf data ncFile = netcdf_file(inputFileName, 'r') timeLength = ncFile.variables['time'].shape[0] timeValueStart = ncFile.variables['time'][0] timeValueEnd = ncFile.variables['time'][-1] ncFile.close() try: timeDeltaStart = Units.conform(timeValueStart, Units(subMetadata['time#units'], calendar=subMetadata['time#calendar']), Units('days since 1950-01-01')) except ValueError: self.logger.error('calendar units are wrong: %s' % subMetadata['time#calendar']) else: time_coverage_start = (datetime.datetime(1950,1,1) + datetime.timedelta(float(timeDeltaStart))) if timeLength > 1: timeDeltaEnd = Units.conform(timeValueStart, Units(subMetadata['time#units'], calendar=subMetadata['time#calendar']), Units('days since 1950-01-01')) else: timeDeltaEnd = timeDeltaStart + 1 time_coverage_end = (datetime.datetime(1950,1,1) + datetime.timedelta(float(timeDeltaEnd))) ## finally set values of time_coverage start and end if available if time_coverage_start is not None: self.dataset.SetMetadataItem('time_coverage_start', time_coverage_start.isoformat()) if time_coverage_end is not None: self.dataset.SetMetadataItem('time_coverage_end', time_coverage_end.isoformat()) if 'sensor' not in gdalMetadata: self.dataset.SetMetadataItem('sensor', 'unknown') if 'satellite' not in gdalMetadata: self.dataset.SetMetadataItem('satellite', 'unknown') if 'source_type' not in gdalMetadata: self.dataset.SetMetadataItem('source_type', 'unknown') if 'platform' not in gdalMetadata: self.dataset.SetMetadataItem('platform', 'unknown') if 'instrument' not in gdalMetadata: self.dataset.SetMetadataItem('instrument', 'unknown') self.logger.info('Use generic mapper - OK!')
def test_Units_BINARY_AND_UNARY_OPERATORS(self): self.assertEqual(Units('m') * 2, Units('2m')) self.assertEqual(Units('m') / 2, Units('0.5m')) self.assertEqual(Units('m') // 2, Units('0.5m')) self.assertEqual(Units('m') + 2, Units('m @ -2')) self.assertEqual(Units('m') - 2, Units('m @ 2')) self.assertEqual(Units('m')**2, Units('m2')) self.assertEqual(Units('m')**-2, Units('m-2')) self.assertEqual(Units('m2')**0.5, Units('m')) u = Units('m') v = u u *= 2 self.assertEqual(u, Units('2m')) self.assertNotEqual(u, v) u = Units('m') v = u u /= 2 self.assertEqual(u, Units('0.5m')) self.assertNotEqual(u, v) u = Units('m') v = u u //= 2 self.assertEqual(u, Units('0.5m')) self.assertNotEqual(u, v) u = Units('m') v = u u += 2 self.assertEqual(u, Units('m @ -2')) self.assertNotEqual(u, v) u = Units('m') v = u u -= 2 self.assertEqual(u, Units('m @ 2')) self.assertNotEqual(u, v) u = Units('m') v = u u **= 2 self.assertEqual(u, Units('m2')) self.assertNotEqual(u, v) self.assertEqual(2 * Units('m'), Units('2m')) self.assertEqual(2 / Units('m'), Units('2 m-1')) self.assertEqual(2 // Units('m'), Units('2 m-1')) self.assertEqual(2 + Units('m'), Units('m @ -2')) self.assertEqual(2 - Units('m'), Units('-1 m @ -2')) self.assertEqual(Units('m') * Units('2m'), Units('2 m2')) self.assertEqual(Units('m') / Units('2m'), Units('0.5')) self.assertEqual(Units('m') // Units('2m'), Units('0.5')) u = Units('m') v = u u *= u self.assertEqual(u, Units('m2')) self.assertNotEqual(u, v) u = Units('m') v = u u /= u self.assertEqual(u, Units('1')) self.assertNotEqual(u, v) u = Units('m') v = u u //= u self.assertEqual(u, Units('1')) self.assertNotEqual(u, v) self.assertEqual(Units('m').log(10), Units('lg(re 1 m)')) self.assertEqual(Units('m').log(2), Units('lb(re 1 m)')) self.assertEqual(Units('m').log(math.e), Units('ln(re 1 m)')) self.assertEqual( Units('m').log(1.5), Units('2.46630346237643 ln(re 1 m)'))
def test_Units___eq__(self): self.assertEqual(Units(''), Units('')) self.assertEqual(Units('18'), Units('18')) self.assertEqual(Units('1'), Units('1')) self.assertEqual(Units('m'), Units('m')) self.assertEqual(Units('m'), Units('metres')) self.assertEqual(Units('m'), Units('meTRES')) self.assertEqual(Units('days since 2000-1-1'), Units('d since 2000-1-1 0:0')) self.assertNotEqual(Units('days since 2000-1-1'), Units('h since 1234-1-1 0:0')) self.assertEqual(Units('days since 2000-1-1'), Units('d since 2000-1-1 0:0', calendar='gregorian')) self.assertEqual(Units('days since 2000-1-1'), Units('d since 2000-1-1 0:0', calendar='standard')) self.assertEqual(Units(calendar='noleap'), Units(calendar='noleap')) self.assertEqual(Units(calendar='noleap'), Units(calendar='365_day')) self.assertEqual(Units(calendar='nOLEAP'), Units(calendar='365_dAY')) self.assertEqual(Units('days since 2000-1-1', calendar='all_leap'), Units('d since 2000-1-1 0:0', calendar='366_day')) self.assertNotEqual(Units('days since 2000-1-1', calendar='all_leap'), Units('h since 2000-1-1 0:0', calendar='366_day')) self.assertNotEqual(Units(1), Units(1)) self.assertNotEqual(Units(1), Units(2)) self.assertNotEqual(Units(1), Units()) self.assertNotEqual(Units(1), Units('')) self.assertNotEqual(Units(1), Units(' ')) self.assertNotEqual(Units(1), Units('metre'))
def test_Units_conform(self): self.assertEqual(Units.conform(0.5, Units('km'), Units('m')), 500) self.assertEqual(Units.conform(360, Units('second'), Units('minute')), 6) x = Units.conform([360], Units('second'), Units('minute')) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, 6)) x = Units.conform((360, 720), Units('second'), Units('minute')) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, [6, 12])) x = Units.conform([360.0, 720.0], Units('second'), Units('minute')) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, [6, 12])) x = Units.conform([[360, 720]], Units('second'), Units('minute')) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, [[6, 12]])) v = numpy.array([360.0, 720.0]) x = Units.conform(v, Units('second'), Units('minute')) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, [6, 12]), x) v = numpy.array([360, 720]) x = Units.conform(v, Units('second'), Units('minute'), inplace=True) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, [6, 12])) self.assertTrue(numpy.allclose(x, v)) x = Units.conform(35, Units('degrees_C'), Units('degrees_F')) self.assertIsInstance(x, float) self.assertTrue(numpy.allclose(x, 95)) x = Units.conform([35], Units('degrees_C'), Units('degrees_F')) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, 95)) x = Units.conform(35, Units('degrees_C'), Units('degrees_F'), inplace=True) self.assertIsInstance(x, float) self.assertTrue(numpy.allclose(x, 95)) x = Units.conform([35], Units('degrees_C'), Units('degrees_F'), inplace=True) self.assertIsInstance(x, numpy.ndarray) self.assertEqual(x.dtype, numpy.dtype('float64')) self.assertTrue(numpy.allclose(x, 95)) with self.assertRaises(ValueError): Units.conform(1, Units('m'), Units('second'))
def __init__(self, from_units): self._units = Units(from_units) if not self._units.isvalid: raise ValueError("invalid units ({0})".format(from_units))
def _convert(val, from_units, to_units): from_units, to_units = Units(from_units), Units(to_units) if from_units.equals(to_units): return val else: return Units.conform(val, from_units, to_units)
def conform(*args, **kwargs): return cfUnits.conform(*args, **kwargs)
def test_Units_BINARY_AND_UNARY_OPERATORS(self): self.assertTrue((Units('m') * 2) == Units('2m')) self.assertTrue((Units('m') / 2) == Units('0.5m')) self.assertTrue((Units('m') // 2) == Units('0.5m')) self.assertTrue((Units('m') + 2) == Units('m @ -2')) self.assertTrue((Units('m') - 2) == Units('m @ 2')) self.assertTrue((Units('m')**2) == Units('m2')) self.assertTrue((Units('m')**-2) == Units('m-2')) self.assertTrue((Units('m2')**0.5) == Units('m')) u = Units('m') v = u u *= 2 self.assertTrue(u == Units('2m')) self.assertTrue(u != v) u = Units('m') v = u u /= 2 self.assertTrue(u == Units('0.5m')) self.assertTrue(u != v) u = Units('m') v = u u //= 2 self.assertTrue(u == Units('0.5m')) self.assertTrue(u != v) u = Units('m') v = u u += 2 self.assertTrue(u == Units('m @ -2')) self.assertTrue(u != v) u = Units('m') v = u u -= 2 self.assertTrue(u == Units('m @ 2')) self.assertTrue(u != v) u = Units('m') v = u u **= 2 self.assertTrue(u == Units('m2')) self.assertTrue(u != v) self.assertTrue((2 * Units('m')) == Units('2m')) self.assertTrue((2 / Units('m')) == Units('2 m-1')) self.assertTrue((2 // Units('m')) == Units('2 m-1')) self.assertTrue((2 + Units('m')) == Units('m @ -2')) self.assertTrue((2 - Units('m')) == Units('-1 m @ -2')) self.assertTrue((Units('m') * Units('2m')) == Units('2 m2')) self.assertTrue((Units('m') / Units('2m')) == Units('0.5')) self.assertTrue((Units('m') // Units('2m')) == Units('0.5')) u = Units('m') v = u u *= u self.assertTrue(u == Units('m2')) self.assertTrue(u != v) u = Units('m') v = u u /= u self.assertTrue(u == Units('1')) self.assertTrue(u != v) u = Units('m') v = u u //= u self.assertTrue(u == Units('1')) self.assertTrue(u != v) self.assertTrue(Units('m').log(10) == Units('lg(re 1 m)')) self.assertTrue(Units('m').log(2) == Units('lb(re 1 m)')) self.assertTrue(Units('m').log(math.e) == Units('ln(re 1 m)')) self.assertTrue( Units('m').log(1.5) == Units('2.46630346237643 ln(re 1 m)'))