def test_reproject_GCPs_auto(self): ds = gdal.Open(self.test_file) d = Domain(ds=ds) d.reproject_GCPs() gcpproj = NSR(d.vrt.dataset.GetGCPProjection()) self.assertEqual(gcpproj.GetAttrValue('PROJECTION'), 'Stereographic')
def __init__(self, srs=None, ext=None, ds=None, **kwargs): """Create Domain from GDALDataset or string options or lat/lon grids""" # If too much information is given raise error if ds is not None and srs is not None and ext is not None: raise ValueError( 'Ambiguous specification of both dataset, srs- and ext-strings.' ) # choose between input opitons: # ds # ds and srs # srs and ext # if only a dataset is given: # copy geo-reference from the dataset if ds is not None and srs is None: self.vrt = VRT.from_gdal_dataset(ds) # If dataset and srs are given (but not ext): # use AutoCreateWarpedVRT to determine bounds and resolution elif ds is not None and srs is not None: srs = NSR(srs) tmp_vrt = gdal.AutoCreateWarpedVRT(ds, None, srs.wkt) if tmp_vrt is None: raise NansatProjectionError( 'Could not warp the given dataset to the given SRS.') else: self.vrt = VRT.from_gdal_dataset(tmp_vrt) # If SpatialRef and extent string are given (but not dataset) elif srs is not None and ext is not None: srs = NSR(srs) # create full dictionary of parameters extent_dict = Domain._create_extent_dict(ext) # convert -lle to -te if 'lle' in extent_dict.keys(): extent_dict = self._convert_extentDic(srs, extent_dict) # get size/extent from the created extent dictionary geo_transform, raster_x_size, raster_y_size = self._get_geotransform( extent_dict) # create VRT object with given geo-reference parameters self.vrt = VRT.from_dataset_params(x_size=raster_x_size, y_size=raster_y_size, geo_transform=geo_transform, projection=srs.wkt, gcps=[], gcp_projection='') elif 'lat' in kwargs and 'lon' in kwargs: warnings.warn( 'Domain(lon=lon, lat=lat) will be deprectaed!' 'Use Domain.from_lonlat()', NansatFutureWarning) # create self.vrt from given lat/lon self.vrt = VRT.from_lonlat(kwargs['lon'], kwargs['lat']) else: raise ValueError('"dataset" or "srsString and extentString" ' 'or "dataset and srsString" are required')
def test_transform_coordinates_1d_array(self): src_srs = NSR() dst_srs = NSR(str('+proj=stere')) src_points = (np.array([1,2,3,4]), np.array([5,6,7,8]), np.array([5,6,7,8])) dst_x, dst_y, dst_z = VRT.transform_coordinates(src_srs, src_points, dst_srs) # check if shape of the result matches the expected shape (list with four points) self.assertEqual(dst_x.shape, (4,)) self.assertEqual(dst_y.shape, (4,)) self.assertEqual(dst_z.shape, (4,))
def create_vrt(self, filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir): """ Create VRT Parameters ---------- filename: str, absolute url of an input file date: str, date in format YYYY-MM-DD ds: netDCF.Dataset bands: list list of src bands cachedir: str """ if date is None: warnings.warn('Date is not specified! Will return the first layer. ' 'Please add date="YYYY-MM-DD"') # TODO: <self.filename> will be changed to vrt filename after init vrt self.filename = filename self.cachedir = cachedir self.ds = self.get_dataset(ds) if 'projection' in self.ds.variables: self.srcDSProjection = NSR(srs=self.ds.variables['projection'].proj4_string).wkt elif 'UTM_projection' in self.ds.variables: self.srcDSProjection = NSR(srs=self.ds.variables['UTM_projection'].proj4_string).wkt ds_time = self.get_dataset_time() ds_times = self.convert_dstime_datetimes(ds_time) layer_time_id, layer_date = Opendap.get_layer_datetime(date, ds_times) var_names = self.get_geospatial_variable_names() if bands: # TODO: select variable names based on standard names instead of band names # - this means the variable must be looped, like in mapper_netcdf_cf.py var_names = bands # create VRT with correct lon/lat (geotransform) raster_x, raster_y = self.get_shape() geotransform = self.get_geotransform() self._init_from_dataset_params(int(raster_x), int(raster_y), geotransform, self.srcDSProjection) meta_dict = self.create_metadict(filename, var_names, layer_time_id) self.create_bands(meta_dict) # Copy metadata for attr in self.ds.ncattrs(): self.dataset.SetMetadataItem(str(attr), str(self.ds.getncattr(attr))) # set time time_res_sec = self.get_time_coverage_resolution() self.dataset.SetMetadataItem('time_coverage_start', str(layer_date)) self.dataset.SetMetadataItem('time_coverage_end', str(layer_date + time_res_sec))
def test_transform_coordinates_2d_array(self): src_srs = NSR() dst_srs = NSR(str('+proj=stere')) src_points = (np.array([[1,2,3,4],[1,2,3,4]]), np.array([[5,6,7,8],[5,6,7,8]]), np.array([[5,6,7,8],[5,6,7,8]]),) dst_x, dst_y, dst_z = VRT.transform_coordinates(src_srs, src_points, dst_srs) # check if shape of the result matches the expected shape (2x4 array) self.assertEqual(dst_x.shape, (2,4)) self.assertEqual(dst_y.shape, (2,4)) self.assertEqual(dst_z.shape, (2,4))
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): title_correct = False if not gdalMetadata: raise WrongMapperError for key, val in gdalMetadata.iteritems(): if 'title' in key: if not val == \ 'Daily AMSR-E Arctic lead area fraction [in percent]': raise WrongMapperError else: title_correct = True if not title_correct: raise WrongMapperError # initiate VRT for the NSIDC 10 km grid VRT.__init__(self, srcGeoTransform=(-3850000, 6250, 0.0, 5850000, 0.0, -6250), srcProjection=NSR(3411).wkt, srcRasterXSize=1216, srcRasterYSize=1792) src = { 'SourceFilename': 'NETCDF:"%s":lf' % fileName, 'SourceBand': 1, } dst = { 'name': 'leadFraction', 'long_name': 'AMSRE sea ice lead fraction', } self._create_band(src, dst) self.dataset.FlushCache()
def init_from_manifest_only(self, manifestXML, annotXML, missionName): ''' Create fake VRT and add metadata only from the manifest.safe ''' X, Y, lon, lat, inc, ele, numberOfSamples, numberOfLines = self.read_geolocation_lut( annotXML) VRT.__init__(self, srcRasterXSize=numberOfSamples, srcRasterYSize=numberOfLines) doc = ET.fromstring(manifestXML) gcps = [] for i in range(len(X)): gcps.append(gdal.GCP(lon[i], lat[i], 0, X[i], Y[i])) self.dataset.SetGCPs(gcps, NSR().wkt) self.dataset.SetMetadataItem( 'time_coverage_start', doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}startTime]" )[0][0].text) self.dataset.SetMetadataItem( 'time_coverage_end', doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}stopTime]") [0][0].text) self.dataset.SetMetadataItem( 'platform', json.dumps(pti.get_gcmd_platform(missionName))) self.dataset.SetMetadataItem( 'instrument', json.dumps(pti.get_gcmd_instrument('SAR'))) self.dataset.SetMetadataItem('Entry Title', missionName + ' SAR') self.dataset.SetMetadataItem('Data Center', 'ESA/EO') self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans') self.dataset.SetMetadataItem('Summary', missionName + ' SAR data')
def __init__(self, fileName, gdalDataset, gdalMetadata, date=None, ds=None, bands=None, cachedir=None, **kwargs): ''' Create NCEP VRT Parameters: fileName : URL date : str 2010-05-01 ds : netCDF.Dataset previously opened dataset ''' self.test_mapper(fileName) ds = Dataset(fileName) proj4str = '%s +units=%s' % (ds.variables['Polar_Stereographic_Grid']. proj4_string, ds.variables['xc'].units) self.srcDSProjection = NSR(proj4str).wkt if fileName[-3:] == '.nc': date = self.t0 + dt.timedelta(seconds=ds.variables['time'][0]) date = date.strftime('%Y-%m-%d') self.create_vrt(fileName, gdalDataset, gdalMetadata, date, ds, bands, cachedir) # add instrument and platform mm = pti.get_gcmd_instrument('Passive Remote Sensing') ee = pti.get_gcmd_platform('Earth Observation Satellites') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, flip_gcp_line=False): #if not 'S1A' in filename or not 'S1B' in filename: # raise WrongMapperError('%s: Not Sentinel 1A or 1B' %filename) if not self.dataset.GetMetadataItem('SATELLITE_IDENTIFIER') or \ not self.dataset.GetMetadataItem('SATELLITE_IDENTIFIER').lower()=='sentinel-1': raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename) if not IMPORT_SCIPY: raise NansatReadError( 'Sentinel-1 data cannot be read because scipy is not installed' ) self.input_filename = filename try: self.ds = Dataset(filename) except OSError: self.ds = Dataset(filename + '#fillmismatch') self.input_filename = filename + '#fillmismatch' try: lon = self.ds.variables['GCP_longitude_' + self.ds.polarisation[:2]] except (AttributeError, KeyError): raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename) self._remove_geotransform() self._remove_geolocation() self.dataset.SetProjection('') self.dataset.SetGCPs(self.get_gcps(flip_gcp_line=flip_gcp_line), NSR().wkt) self.add_incidence_angle_band() self.add_look_direction_band() self.set_gcmd_dif_keywords()
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): title_correct = False if not gdalMetadata: raise WrongMapperError for key, val in list(gdalMetadata.items()): if 'title' in key: if not val == 'Daily AMSR-E Arctic lead area fraction [in percent]': raise WrongMapperError else: title_correct = True if not title_correct: raise WrongMapperError # initiate VRT for the NSIDC 10 km grid self._init_from_dataset_params(1216, 1792, (-3850000, 6250, 0.0, 5850000, 0.0, -6250), NSR(3411).wkt) src = { 'SourceFilename': 'NETCDF:"%s":lf' % filename, 'SourceBand': 1, } dst = { 'name': 'leadFraction', 'long_name': 'AMSRE sea ice lead fraction', } self.create_band(src, dst) self.dataset.FlushCache()
def transform_points(self, colVector, rowVector, DstToSrc=0, dst_srs=None): """Transform given lists of X,Y coordinates into lon/lat or inverse Parameters ----------- colVector : lists X and Y coordinates in pixel/line or lon/lat coordinate system DstToSrc : 0 or 1 - 0 - forward transform (pix/line => lon/lat) - 1 - inverse transformation dst_srs : NSR destination spatial reference Returns -------- X, Y : lists X and Y coordinates in lon/lat or pixel/line coordinate system """ if dst_srs is None: dst_srs = NSR() return self.vrt.transform_points(colVector, rowVector, dst2src=DstToSrc, dst_srs=dst_srs)
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None, ds=None, bands=None, cachedir=None, *args, **kwargs): self.test_mapper(filename) timestamp = date if date else self.get_date(filename) ds = Dataset(filename) try: self.srcDSProjection = NSR(ds.variables['projection_3'].proj4 + ' +to_meter=0.0174532925199 +wktext') except KeyError: raise WrongMapperError self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir) self.dataset.SetMetadataItem( 'instrument', json.dumps(pti.get_gcmd_instrument('Computer'))) self.dataset.SetMetadataItem( 'platform', json.dumps(pti.get_gcmd_platform('MODELS'))) self.dataset.SetMetadataItem( 'Data Center', json.dumps(pti.get_gcmd_provider('NO/MET'))) self.dataset.SetMetadataItem('Entry Title', str(ds.getncattr('title'))) self.dataset.SetMetadataItem( 'Entry Title', json.dumps(pti.get_iso19115_topic_category('Oceans'))) self.dataset.SetMetadataItem( 'gcmd_location', json.dumps(pti.get_gcmd_location('sea surface')))
def __repr__(self): '''Creates string with basic info about the Domain object Modifies --------- Print size, projection and corner coordinates ''' outStr = 'Domain:[%d x %d]\n' % (self.vrt.dataset.RasterXSize, self.vrt.dataset.RasterYSize) outStr += '-' * 40 + '\n' try: corners = self.get_corners() except: self.logger.error('Cannot read projection from source!') else: outStr += 'Projection:\n' outStr += (NSR(self.vrt.get_projection()).ExportToPrettyWkt(1) + '\n') outStr += '-' * 40 + '\n' outStr += 'Corners (lon, lat):\n' outStr += '\t (%6.2f, %6.2f) (%6.2f, %6.2f)\n' % ( corners[0][0], corners[1][0], corners[0][2], corners[1][2]) outStr += '\t (%6.2f, %6.2f) (%6.2f, %6.2f)\n' % ( corners[0][1], corners[1][1], corners[0][3], corners[1][3]) return outStr
class Mapper(Opendap): ''' VRT with mapping of WKV for NCEP GFS ''' #http://www.ifremer.fr/opendap/cerdap1/globcurrent/v2.0/global_025_deg/total_hs/2010/001/20100101000000-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc #http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CUREUL_HS-ALT_SUM-V02.0_FULL_TIME_SERIE baseURLs = ['http://www.ifremer.fr/opendap/cerdap1/globcurrent/v2.0/'] timeVarName = 'time' xName = 'lon' yName = 'lat' timeCalendarStart = '1950-01-01' srcDSProjection = NSR().wkt def __init__(self, filename, gdalDataset, gdalMetadata, date=None, ds=None, bands=None, cachedir=None, **kwargs): ''' Create NCEP VRT Parameters: filename : URL date : str 2010-05-01 ds : netCDF.Dataset previously opened dataset ''' self.test_mapper(filename) fname = os.path.split(filename)[1] date = '%s-%s-%sT%s:00Z' % (fname[0:4], fname[4:6], fname[6:8], fname[8:10]) self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir) # add instrument and platform #instr = pti.get_gcmd_instrument('active remote sensing') #pltfr = pti.get_gcmd_platform('Earth Observation Satellites') pltfr = pti.get_gcmd_platform('JASON-1') instr = pti.get_gcmd_instrument('JASON-2 RADAR ALTIMETER') self.dataset.SetMetadataItem('instrument', json.dumps(instr)) self.dataset.SetMetadataItem('platform', json.dumps(pltfr)) self.dataset.SetMetadataItem('Data Center', 'FR/IFREMER/CERSAT') self.dataset.SetMetadataItem('Entry Title', 'GLOBCURRENT') def convert_dstime_datetimes(self, dsTime): ''' Convert time variable to np.datetime64 ''' dsDatetimes = np.array([ (np.datetime64(self.timeCalendarStart).astype('M8[s]') + np.timedelta64(int(day), 'D').astype('m8[s]') + np.timedelta64(int(24 * (day - int(day))), 'h').astype('m8[s]')) for day in dsTime ]).astype('M8[s]') return dsDatetimes
def set_gcps(self, lon, lat, gdal_dataset): """ Set gcps """ self.band_vrts['new_lon_VRT'] = VRT.from_array(lon) self.dataset.SetGCPs(VRT._lonlat2gcps(lon, lat, n_gcps=400), NSR().wkt) # Add geolocation from correct longitudes and latitudes self._add_geolocation( Geolocation(self.band_vrts['new_lon_VRT'], self, x_band=1, y_band=self._latitude_band_number(gdal_dataset)) )
def test_transform_points_dstsrs(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") lon, lat = d.transform_points( [1, 2, 3], [1, 2, 3], dstSRS=NSR( '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=75 +lon_0=10 +no_defs' )) self.assertEqual(type(lon), np.ndarray) self.assertEqual(type(lat), np.ndarray)
def test_issue_189(self): fn = '/mnt/10.11.12.232/sat_downloads_asar/level-0/2010-01/descending/VV/gsar_rvl/RVL_ASA_WS_20100110211812087.gsar' if doppler_installed: n = Doppler(fn) xlon, xlat = n.get_corners() d = Domain( NSR(3857), '-lle %f %f %f %f -tr 1000 1000' % (xlon.min(), xlat.min(), xlon.max(), xlat.max())) n.reproject(d, eResampleAlg=1, tps=True) inci = n['incidence_angle']
class Mapper(Opendap): ''' VRT with mapping of WKV for NCEP GFS ''' #http://dap.ceda.ac.uk/data/neodc/esacci/sst/data/lt/Analysis/L4/v01.1/2010/05/01/20100501120000-ESACCI-L4_GHRSST-SSTdepth-OSTIA-GLOB_LT-v02.0-fv01.1.nc baseURLs = [ 'http://dap.ceda.ac.uk/data/neodc/esacci/sst/data/lt/Analysis/L4/v01.1/' ] timeVarName = 'time' xName = 'lon' yName = 'lat' timeCalendarStart = '1981-01-01' srcDSProjection = NSR().wkt def __init__(self, filename, gdalDataset, gdalMetadata, date=None, ds=None, bands=None, cachedir=None, **kwargs): ''' Create NCEP VRT Parameters: filename : URL date : str 2010-05-01 ds : netCDF.Dataset previously opened dataset ''' self.test_mapper(filename) fname = os.path.split(filename) date = '%s-%s-%s' % (fname[0:4], fname[4:6], fname[6:8]) self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir) # add instrument and platform mm = pti.get_gcmd_instrument('Passive Remote Sensing') ee = pti.get_gcmd_platform('Earth Observation Satellites') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) def convert_dstime_datetimes(self, dsTime): ''' Convert time variable to np.datetime64 ''' dsDatetimes = np.array([ (np.datetime64(self.timeCalendarStart).astype('M8[s]') + np.timedelta64(int(day), 'D').astype('m8[s]') + np.timedelta64(int(24 * (day - int(day))), 'h').astype('m8[s]')) for day in dsTime ]).astype('M8[s]') return dsDatetimes
def test_convert_extentDic(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") result = d._convert_extentDic(NSR(4326), { 'lle': [25.0, 70.0, 35.0, 72.0], 'ts': [500.0, 500.0] }) self.assertEqual( result, { 'lle': [25.0, 70.0, 35.0, 72.0], 'te': [25.0, 70.0, 35.0, 72.0], 'ts': [500.0, 500.0] })
def _create_empty_from_projection_variable( self, gdal_dataset, gdal_metadata, projection_variable='projection_lambert'): ds = Dataset(self.input_filename) subdataset = gdal.Open(self._get_sub_filenames(gdal_dataset)[0]) self._init_from_dataset_params( x_size=subdataset.RasterXSize, y_size=subdataset.RasterYSize, geo_transform=subdataset.GetGeoTransform(), projection=NSR(ds.variables[projection_variable].proj4).wkt, metadata=gdal_metadata)
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, **kwargs): # check if mapper fits if not gdalMetadata: raise WrongMapperError if not os.path.splitext(inputFileName)[1] == '.mnt': raise WrongMapperError try: mbNorthLatitude = float(gdalMetadata['NC_GLOBAL#mbNorthLatitude']) mbSouthLatitude = float(gdalMetadata['NC_GLOBAL#mbSouthLatitude']) mbEastLongitude = float(gdalMetadata['NC_GLOBAL#mbEastLongitude']) mbWestLongitude = float(gdalMetadata['NC_GLOBAL#mbWestLongitude']) mbProj4String = gdalMetadata['NC_GLOBAL#mbProj4String'] Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines']) Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns']) Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size']) Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size']) except: raise WrongMapperError # find subdataset with DEPTH subDatasets = gdalDataset.GetSubDatasets() dSourceFile = None for subDataset in subDatasets: if subDataset[0].endswith('.mnt":DEPTH'): dSourceFile = subDataset[0] if dSourceFile is None: raise WrongMapperError dSubDataset = gdal.Open(dSourceFile) dMetadata = dSubDataset.GetMetadata() try: scale_factor = dMetadata['DEPTH#scale_factor'] add_offset = dMetadata['DEPTH#add_offset'] except: raise WrongMapperError geoTransform = [mbWestLongitude, Element_x_size, 0, mbNorthLatitude, 0, -Element_y_size] # create empty VRT dataset with geolocation only self._init_from_dataset_params(Number_columns, Number_lines, geoTransform, NSR(mbProj4String).wkt, metadata=gdalMetadata) metaDict = [{'src': {'SourceFilename': dSourceFile, 'SourceBand': 1, 'ScaleRatio' : scale_factor, 'ScaleOffset' : add_offset}, 'dst': {'wkv': 'depth'}}] # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict)
class Mapper(Opendap): ''' VRT with mapping of WKV for NCEP GFS ''' baseURLs = ['http://tds0.ifremer.fr/thredds/dodsC/CLS-L4'] timeVarName = 'time' xName = 'lon' yName = 'lat' timeCalendarStart = '1950-01-01' srcDSProjection = NSR().wkt def __init__(self, fileName, gdalDataset, gdalMetadata, date=None, ds=None, bands=None, cachedir=None, **kwargs): ''' Create NCEP VRT Parameters: fileName : URL date : str 2010-05-01 ds : netCDF.Dataset previously opened dataset ''' self.test_mapper(fileName) self.create_vrt(fileName, gdalDataset, gdalMetadata, date, ds, bands, cachedir) # add instrument and platform mm = pti.get_gcmd_instrument('Passive Remote Sensing') ee = pti.get_gcmd_platform('Earth Observation Satellites') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) self.dataset.SetMetadataItem('Data Center', 'FR/IFREMER/CERSAT') self.dataset.SetMetadataItem('Entry Title', 'GLOBCURRENT') def convert_dstime_datetimes(self, dsTime): ''' Convert time variable to np.datetime64 ''' dsDatetimes = np.array([ np.datetime64(self.timeCalendarStart) + int(day) for day in dsTime ]).astype('M8[s]') return dsDatetimes
class Mapper(Opendap): ''' VRT with mapping of WKV for NCEP GFS ''' baseURLs = [ 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL', 'https://www.oceancolour.org/thredds/dodsC/CCI_ALL', 'https://esgf-data1.ceda.ac.uk/thredds/dodsC/esg_esacci/ocean_colour/data/v2-release/geographic/netcdf/' ] timeVarName = 'time' xName = 'lon' yName = 'lat' timeCalendarStart = '1970-01-01' srcDSProjection = NSR().wkt def __init__(self, filename, gdalDataset, gdalMetadata, date=None, ds=None, bands=None, cachedir=None, **kwargs): ''' Create NCEP VRT Parameters: filename : URL date : str 2010-05-01 ds : netCDF.Dataset previously opened dataset ''' self.test_mapper(filename) self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir) # add instrument and platform mm = pti.get_gcmd_instrument('Passive Remote Sensing') ee = pti.get_gcmd_platform('Earth Observation Satellites') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) def convert_dstime_datetimes(self, dsTime): ''' Convert time variable to np.datetime64 ''' dsDatetimes = np.array([ np.datetime64(self.timeCalendarStart) + day for day in dsTime ]).astype('M8[s]') return dsDatetimes
def _init_data(self, x_filename, y_filename, x_band=1, y_band=1, srs=NSR().wkt, line_offset=0, line_step=1, pixel_offset=0, pixel_step=1): """Init data of Geolocation object from input parameters Parameters ----------- x_filename : str name of file for X-dataset y_filename : str name of file for Y-dataset x_band : int number of the band in the X-dataset y_band : int number of the band in the Y-dataset srs : str WKT line_offset : int offset of first line line_step : int step of lines pixel_offset : int offset of first pixel pixel_step : int step of pixels Notes ----- Saves everything in self.data dict """ self.data['SRS'] = srs self.data['X_DATASET'] = x_filename self.data['Y_DATASET'] = y_filename self.data['X_BAND'] = str(x_band) self.data['Y_BAND'] = str(y_band) self.data['LINE_OFFSET'] = str(line_offset) self.data['LINE_STEP'] = str(line_step) self.data['PIXEL_OFFSET'] = str(pixel_offset) self.data['PIXEL_STEP'] = str(pixel_step)
def init_from_xml(self, productXml): ''' Fast init from metada in XML only ''' numberOfLines = int( productXml.node('imageAttributes').node('rasterAttributes').node( 'numberOfLines').value) numberOfSamples = int( productXml.node('imageAttributes').node('rasterAttributes').node( 'numberOfSamplesPerLine').value) VRT.__init__(self, srcRasterXSize=numberOfSamples, srcRasterYSize=numberOfLines) gcps = [] geogrid = productXml.node('imageAttributes').node( 'geographicInformation').node('geolocationGrid') for child in geogrid.children: pix = float(child.node('imageCoordinate').node('pixel').value) lin = float(child.node('imageCoordinate').node('line').value) lon = float( child.node('geodeticCoordinate').node('longitude').value) lat = float( child.node('geodeticCoordinate').node('latitude').value) gcps.append(gdal.GCP(lon, lat, 0, pix, lin)) self.dataset.SetGCPs(gcps, NSR().wkt) dates = list( map(parse, [ child.node('timeStamp').value for child in (productXml.node( 'sourceAttributes').node('orbitAndAttitude').node( 'orbitInformation').nodeList('stateVector')) ])) self.dataset.SetMetadataItem('time_coverage_start', min(dates).isoformat()) self.dataset.SetMetadataItem('time_coverage_end', max(dates).isoformat()) self.dataset.SetMetadataItem( 'platform', json.dumps(pti.get_gcmd_platform('radarsat-2'))) self.dataset.SetMetadataItem( 'instrument', json.dumps(pti.get_gcmd_instrument('SAR'))) self.dataset.SetMetadataItem('Entry Title', 'Radarsat-2 SAR') self.dataset.SetMetadataItem('Data Center', 'CSA') self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans') self.dataset.SetMetadataItem('Summary', 'Radarsat-2 SAR data')
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None, ds=None, bands=None, cachedir=None, *args, **kwargs): self.test_mapper(filename) timestamp = date if date else self.get_date(filename) ds = Dataset(filename) try: self.srcDSProjection = NSR( ds.variables['projection_lambert'].proj4) except KeyError: raise WrongMapperError self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir) mm = pti.get_gcmd_instrument('Computer') ee = pti.get_gcmd_platform('ecmwfifs') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) md_item = 'Data Center' if not self.dataset.GetMetadataItem(md_item): self.dataset.SetMetadataItem(md_item, 'NO/MET') md_item = 'Entry Title' if not self.dataset.GetMetadataItem(md_item): self.dataset.SetMetadataItem(md_item, str(ds.getncattr('title'))) md_item = 'summary' if not self.dataset.GetMetadataItem(md_item): summary = """ AROME_Arctic is a convection-permitting atmosphere model covering parts of the Barents Sea and the Nordic Arctic. It has horizontal resolution of 2.5 km and 65 vertical levels. AROME_Arctic runs for 66 hours four times a day (00,06,12,18) with three-hourly cycling for data assimilation. Boundary data is from ECMWF. Model code based on HARMONIE cy40h1.1 """ self.dataset.SetMetadataItem(md_item, str(summary))
def _init_empty(self, manifest_data, annotation_data): """ Fast initialization from minimum of information Parameters ---------- manifest_data : dict data from the manifest file (time_coverage_start, etc) annotation_data : dict data from annotation file (longitude, latitude, x_size, etc) Note ---- Calls VRT.__init__, Adds GCPs, metadata """ # init empty dataset super(Mapper, self).__init__(annotation_data['x_size'], annotation_data['y_size']) # add GCPs from (corrected) geolocation data gcps = Mapper.create_gcps(annotation_data['longitude'], annotation_data['latitude'], annotation_data['height'], annotation_data['pixel'], annotation_data['line']) self.dataset.SetGCPs(gcps, NSR().wkt) # set metadata self.dataset.SetMetadataItem('time_coverage_start', manifest_data['time_coverage_start']) self.dataset.SetMetadataItem('time_coverage_end', manifest_data['time_coverage_end']) platform_name = manifest_data['platform_family_name'] + manifest_data[ 'platform_number'] self.dataset.SetMetadataItem( 'platform', json.dumps(pti.get_gcmd_platform(platform_name))) self.dataset.SetMetadataItem( 'instrument', json.dumps(pti.get_gcmd_instrument('SAR'))) self.dataset.SetMetadataItem('entry_title', platform_name + ' SAR') self.dataset.SetMetadataItem( 'data_center', json.dumps(pti.get_gcmd_provider('ESA/EO'))) self.dataset.SetMetadataItem( 'iso_topic_category', json.dumps(pti.get_iso19115_topic_category('Oceans'))) self.dataset.SetMetadataItem('summary', platform_name + ' SAR data') self.dataset.FlushCache()
def _convert_extentDic(self, dstSRS, extentDic): '''Convert -lle option (lat/lon) to -te (proper coordinate system) Source SRS from LAT/LON projection and target SRS from dstWKT. Create osr.CoordinateTransformation based on these SRSs and convert given values in degrees to the destination coordinate system given by WKT. Add key 'te' and the converted values into the extentDic. Parameters ----------- dstSRS : NSR Destination Spatial Reference extentDic : dictionary dictionary with 'lle' key Returns -------- extentDic : dictionary input dictionary + 'te' key and its values ''' coorTrans = osr.CoordinateTransformation(NSR(), dstSRS) # convert lat/lon given by 'lle' to the target coordinate system and # add key 'te' and the converted values to extentDic x1, y1, _ = coorTrans.TransformPoint(extentDic['lle'][0], extentDic['lle'][3]) x2, y2, _ = coorTrans.TransformPoint(extentDic['lle'][2], extentDic['lle'][3]) x3, y3, _ = coorTrans.TransformPoint(extentDic['lle'][2], extentDic['lle'][1]) x4, y4, _ = coorTrans.TransformPoint(extentDic['lle'][0], extentDic['lle'][1]) minX = min([x1, x2, x3, x4]) maxX = max([x1, x2, x3, x4]) minY = min([y1, y2, y3, y4]) maxY = max([y1, y2, y3, y4]) extentDic['te'] = [minX, minY, maxX, maxY] return extentDic
def get_geolocation_grids(self, stepSize=1, dst_srs=None): """Get longitude and latitude grids representing the full data grid If GEOLOCATION is not present in the self.vrt.dataset then grids are generated by converting pixel/line of each pixel into lat/lon If GEOLOCATION is present in the self.vrt.dataset then grids are read from the geolocation bands. Parameters ----------- stepSize : int Reduction factor if output is desired on a reduced grid size Returns -------- longitude : numpy array grid with longitudes latitude : numpy array grid with latitudes """ if dst_srs is None: dst_srs = NSR() step_size = stepSize x_vec = list(range(0, self.vrt.dataset.RasterXSize, step_size)) y_vec = list(range(0, self.vrt.dataset.RasterYSize, step_size)) x_grid, y_grid = np.meshgrid(x_vec, y_vec) if self.vrt.geolocation is not None and len( self.vrt.geolocation.data) > 0: # if the vrt dataset has geolocationArray # read lon,lat grids from geolocationArray lon_grid, lat_grid = self.vrt.geolocation.get_geolocation_grids() lon_arr, lat_arr = lon_grid[y_grid, x_grid], lat_grid[y_grid, x_grid] else: # generate lon,lat grids using GDAL Transformer lon_vec, lat_vec = self.transform_points(x_grid.flatten(), y_grid.flatten(), dst_srs=dst_srs) lon_arr = lon_vec.reshape(x_grid.shape) lat_arr = lat_vec.reshape(x_grid.shape) return lon_arr, lat_arr
def plot_s1a_example(fsize='small'): test_data.get_sentinel1a(fsize=fsize) #w = SARWind(test_data.sentinel1a[fsize]) w = BayesianWind(test_data.sentinel1a[fsize]) cc = w.get_corners() lonmin = np.int(np.floor(np.min(cc[0])*100))/100. lonmax = np.int(np.ceil(np.max(cc[0])*100))/100. latmin = np.int(np.floor(np.min(cc[1])*100))/100. latmax = np.int(np.ceil(np.max(cc[1])*100))/100. w.reproject( Domain(NSR().wkt, ext='-lle %s %s %s %s -ts %s %s' %(lonmin, latmin, lonmax, latmax, (lonmax-lonmin)*110., (latmax-latmin)*110.) ) ) #u = w['U'] #v = w['V'] windspeed = w['bspeed_modcmod'] winddir = w['bdir_modcmod'] u = -windspeed*np.sin((180.0 - winddir)*np.pi/180.0) v = windspeed*np.cos((180.0 - winddir)*np.pi/180.0) nmap = Nansatmap(w, resolution='h') nmap.pcolormesh(np.hypot(u,v), vmax=18) nmap.add_colorbar(fontsize=8) nmap.quiver(u, v, step=20)#, scale=1, width=0.001) nmap.draw_continents() nmap.drawgrid() #nmap.drawmeridians(np.arange(lonmin, lonmax, 5), labels=[False, False, # True, False]) #nmap.drawparallels(np.arange(latmin, latmax, 3), labels=[True, False, # False, False]) # set size of the figure (inches) #nmap.fig.set_figheight(20) #nmap.fig.set_figwidth(15) # save figure to a PNG file nmap.draw_continents() #plt.suptitle( # 'High resolution\nwind speed and direction\nfrom Sentinel-1A and ' \ # 'NCEP\n%s' %w.time_coverage_start.isoformat(), # fontsize=8 #) #nmap.fig.savefig('s1a_wind_%s.png'%fsize, dpi=150, bbox_inches='tight') nmap.fig.savefig('s1a_bwind_%s.png'%fsize, dpi=150, bbox_inches='tight')
def __init__(self, domain, **kwargs): ''' Set attributes Get proj4 from the given domain and convert the proj4 projection to the basemap projection. Parameters ----------- domain : domain object kwargs : dictionary parameters that are used for all operations. Modifies --------- self.fig : figure matplotlib.pyplot.figure self.colorbar : boolean if colorbar is True, it is possible to put colorbar. e.g. contour_plots(contour_style='fill'), put_color() self.mpl : list elements are matplotlib.contour.QuadContourSet instance, matplotlib.quiver.Quiver instance or matplotlib.collections.QuadMesh object See also ---------- http://matplotlib.org/basemap/api/basemap_api.html ''' self.domain = domain # get proj4 proj4 = NSR(domain.vrt.get_projection()).ExportToProj4() # convert proj4 to basemap projection projStr = proj4.split(' ')[0][6:] projection = {'aea': 'aea', 'ocea': 'aea', 'aeqd': 'aeqd', 'xxx1': 'spaeqd', 'xxx2': 'npaeqd', 'cass': 'cass', 'cea': 'cea', 'eqc': 'cyl', 'longlat': 'cyl', 'eck4': 'eck4', 'eqdc': 'eqdc', 'gall': 'gall', 'geos': 'geos', 'gnom': 'gnom', 'hammer': 'hammer', 'nell_h': 'hammer', 'kav7': 'kav7', 'laea': 'laea', 'xxx3': 'splaea', 'xxx4': 'nplaea', 'lcc': 'lcc', 'lcca': 'lcc', 'mbtfpq': 'mbtfpq', 'somerc': 'merc', 'merc': 'merc', 'omerc': 'merc', 'mill': 'mill', 'moll': 'moll', 'nsper': 'nsper', 'omerc': 'omerc', 'ortho': 'ortho', 'poly': 'poly', 'rpoly': 'poly', 'imw_p': 'poly', 'robin': 'robin', 'sinu': 'sinu', 'fouc_s': 'sinu', 'gn_sinu': 'sinu', 'mbtfps': 'sinu', 'urmfps': 'sinu', 'stere': 'stere', 'sterea': 'stere', 'lee_os': 'stere', 'mil_os': 'stere', 'rouss': 'stere', 'ups': 'npstere', 'ups': 'spstere', # CHECK!! 'tmerc': 'tmerc', 'gstmerc': 'tmerc', 'utm': 'tmerc', 'vandg': 'vandg', 'vandg2': 'vandg', 'vandg3': 'vandg', 'vandg4': 'vandg', }.get(projStr, 'cyl') if projection in ['stere']: lon_0 = float(re.findall('lon_0=+[-+]?\d*[.\d*]*', proj4)[0].split('=')[1]) lat_0 = float(re.findall('lat_0=+[-+]?\d*[.\d*]*', proj4)[0].split('=')[1]) kwargs['lon_0'] = lon_0 kwargs['lat_0'] = lat_0 if projStr == 'utm': kwargs['lon_0'] = -180 + NSR(proj4).GetUTMZone()*6 - 3 kwargs['lat_0'] = 0 self.extensionList = ['png', 'emf', 'eps', 'pdf', 'rgba', 'ps', 'raw', 'svg', 'svgz'] # set llcrnrlat, urcrnrlat, llcrnrlon and urcrnrlon to kwargs. # if required, modify them from -90. to 90. # get min/max lon/lat lonCrn, latCrn = domain.get_corners() self.lonMin = min(lonCrn) self.lonMax = max(lonCrn) self.latMin = max(min(latCrn), -90.) self.latMax = min(max(latCrn), 90.) if not('llcrnrlat' in kwargs.keys()): kwargs['llcrnrlat'] = latCrn[1] if not('urcrnrlat' in kwargs.keys()): kwargs['urcrnrlat'] = latCrn[2] if not('llcrnrlon' in kwargs.keys()): kwargs['llcrnrlon'] = lonCrn[1] if not('urcrnrlon' in kwargs.keys()): kwargs['urcrnrlon'] = lonCrn[2] # separate kwarge of plt.figure() from kwargs figArgs = ['num', 'figsize', 'dpi', 'facecolor', 'edgecolor', 'frameon'] figKwargs = {} for iArg in figArgs: if iArg in kwargs.keys(): figKwargs[iArg] = kwargs.pop(iArg) Basemap.__init__(self, projection=projection, **kwargs) # create figure and set it as an attribute plt.close() self.fig = plt.figure(**figKwargs)
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, rmMetadatas=['NETCDF_VARNAME', '_Unsigned', 'ScaleRatio', 'ScaleOffset', 'dods_variable'], **kwargs): # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_' # from keys in gdalDataset tmpGdalMetadata = {} geoMetadata = {} origin_is_nansat = False if not gdalMetadata: raise WrongMapperError for key in gdalMetadata.keys(): newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '') if 'NANSAT_' in newKey: geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key] origin_is_nansat = True else: tmpGdalMetadata[newKey] = gdalMetadata[key] gdalMetadata = tmpGdalMetadata fileExt = os.path.splitext(inputFileName)[1] # Get file names from dataset or subdataset subDatasets = gdalDataset.GetSubDatasets() if len(subDatasets) == 0: filenames = [inputFileName] else: filenames = [f[0] for f in subDatasets] # add bands with metadata and corresponding values to the empty VRT metaDict = [] xDatasetSource = '' yDatasetSource = '' firstXSize = 0 firstYSize = 0 for _, filename in enumerate(filenames): subDataset = gdal.Open(filename) # choose the first dataset whith grid if (firstXSize == 0 and firstYSize == 0 and subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1): firstXSize = subDataset.RasterXSize firstYSize = subDataset.RasterYSize firstSubDataset = subDataset # get projection from the first subDataset projection = firstSubDataset.GetProjection() # take bands whose sizes are same as the first band. if (subDataset.RasterXSize == firstXSize and subDataset.RasterYSize == firstYSize): if projection == '': projection = subDataset.GetProjection() if ('GEOLOCATION_X_DATASET' in filename or 'longitude' in filename): xDatasetSource = filename elif ('GEOLOCATION_Y_DATASET' in filename or 'latitude' in filename): yDatasetSource = filename else: for iBand in range(subDataset.RasterCount): subBand = subDataset.GetRasterBand(iBand+1) bandMetadata = subBand.GetMetadata_Dict() if 'PixelFunctionType' in bandMetadata: bandMetadata.pop('PixelFunctionType') sourceBands = iBand + 1 # sourceBands = i*subDataset.RasterCount + iBand + 1 # generate src metadata src = {'SourceFilename': filename, 'SourceBand': sourceBands} # set scale ratio and scale offset scaleRatio = bandMetadata.get( 'ScaleRatio', bandMetadata.get( 'scale', bandMetadata.get('scale_factor', ''))) if len(scaleRatio) > 0: src['ScaleRatio'] = scaleRatio scaleOffset = bandMetadata.get( 'ScaleOffset', bandMetadata.get( 'offset', bandMetadata.get( 'add_offset', ''))) if len(scaleOffset) > 0: src['ScaleOffset'] = scaleOffset # sate DataType src['DataType'] = subBand.DataType # generate dst metadata # get all metadata from input band dst = bandMetadata # set wkv and bandname dst['wkv'] = bandMetadata.get('standard_name', '') # first, try the name metadata if 'name' in bandMetadata: bandName = bandMetadata['name'] else: # if it doesn't exist get name from NETCDF_VARNAME bandName = bandMetadata.get('NETCDF_VARNAME', '') if len(bandName) == 0: bandName = bandMetadata.get( 'dods_variable', '' ) # remove digits added by gdal in # exporting to netcdf... if (len(bandName) > 0 and origin_is_nansat and fileExt == '.nc'): if bandName[-1:].isdigit(): bandName = bandName[:-1] if bandName[-1:].isdigit(): bandName = bandName[:-1] # if still no bandname, create one if len(bandName) == 0: bandName = 'band_%03d' % iBand dst['name'] = bandName # remove non-necessary metadata from dst for rmMetadata in rmMetadatas: if rmMetadata in dst: dst.pop(rmMetadata) # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(firstSubDataset, metadata=gdalMetadata) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) self._create_complex_bands(filenames) if len(projection) == 0: # projection was not set automatically # get projection from GCPProjection projection = geoMetadata.get('GCPProjection', '') if len(projection) == 0: # no projection was found in dataset or metadata: # generate WGS84 by default projection = NSR().wkt # fix problem with MET.NO files where a, b given in m and XC/YC in km if ('UNIT["kilometre"' in projection and ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in projection): projection = projection.replace( ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', '') # set projection self.dataset.SetProjection(self.repare_projection(projection)) # check if GCPs were added from input dataset gcps = firstSubDataset.GetGCPs() gcpProjection = firstSubDataset.GetGCPProjection() # if no GCPs in input dataset: try to add GCPs from metadata if not gcps: gcps = self.add_gcps_from_metadata(geoMetadata) # if yet no GCPs: try to add GCPs from variables if not gcps: gcps = self.add_gcps_from_variables(inputFileName) if gcps: if len(gcpProjection) == 0: # get GCP projection and repare gcpProjection = self.repare_projection(geoMetadata. get('GCPProjection', '')) # add GCPs to dataset self.dataset.SetGCPs(gcps, gcpProjection) self.dataset.SetProjection('') self._remove_geotransform() # Find proper bands and insert GEOLOCATION ARRAY into dataset if len(xDatasetSource) > 0 and len(yDatasetSource) > 0: self._add_geolocation(Geolocation.from_filenames(xDatasetSource, yDatasetSource)) elif not gcps: # if no GCPs found and not GEOLOCATION ARRAY set: # Set Nansat Geotransform if it is not set automatically geoTransform = self.dataset.GetGeoTransform() if len(geoTransform) == 0: geoTransformStr = geoMetadata.get('GeoTransform', '(0|1|0|0|0|0|1)') geoTransform = eval(geoTransformStr.replace('|', ',')) self.dataset.SetGeoTransform(geoTransform) subMetadata = firstSubDataset.GetMetadata() ### GET START TIME from METADATA time_coverage_start = None if 'start_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_time']) elif 'start_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_date']) elif 'time_coverage_start' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_start']) ### GET END TIME from METADATA time_coverage_end = None if 'stop_time' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['stop_time']) elif 'stop_date' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['stop_date']) elif 'time_coverage_stop' in gdalMetadata: time_coverage_end = parse_time( gdalMetadata['time_coverage_stop']) elif 'end_time' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['end_time']) elif 'end_date' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['end_date']) elif 'time_coverage_end' in gdalMetadata: time_coverage_end = parse_time( gdalMetadata['time_coverage_end']) ### GET start time from time variable if (time_coverage_start is None and 'time#standard_name' in subMetadata and subMetadata['time#standard_name'] == 'time' and 'time#units' in subMetadata): # get data from netcdf data ncFile = Dataset(inputFileName, 'r') time_var = ncFile.variables['time'] t0 = time_var[0] if len(time_var) == 1: t1 = t0 + 1 else: t1 = time_var[-1] time_units_start = parse(time_var.units, fuzzy=True, ignoretz=True) time_units_to_seconds = {'second' : 1.0, 'hour' : 60 * 60.0, 'day' : 24 * 60 * 60.0} for key in time_units_to_seconds: if key in time_var.units: factor = time_units_to_seconds[key] break time_coverage_start = time_units_start + datetime.timedelta(seconds=t0 * factor) time_coverage_end = time_units_start + datetime.timedelta(seconds=t1 * factor) ## finally set values of time_coverage start and end if available if time_coverage_start is not None: self.dataset.SetMetadataItem('time_coverage_start', time_coverage_start.isoformat()) if time_coverage_end is not None: self.dataset.SetMetadataItem('time_coverage_end', time_coverage_end.isoformat()) if 'sensor' not in gdalMetadata: self.dataset.SetMetadataItem('sensor', 'unknown') if 'satellite' not in gdalMetadata: self.dataset.SetMetadataItem('satellite', 'unknown') if 'source_type' not in gdalMetadata: self.dataset.SetMetadataItem('source_type', 'unknown') if 'platform' not in gdalMetadata: self.dataset.SetMetadataItem('platform', 'unknown') if 'instrument' not in gdalMetadata: self.dataset.SetMetadataItem('instrument', 'unknown') self.logger.info('Use generic mapper - OK!')
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, rmMetadatas=['NETCDF_VARNAME', '_Unsigned', 'ScaleRatio', 'ScaleOffset', 'dods_variable'], **kwargs): # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_' # from keys in gdalDataset tmpGdalMetadata = {} geoMetadata = {} origin_is_nansat = False if not gdalMetadata: raise WrongMapperError for key in gdalMetadata.keys(): newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '') if 'NANSAT_' in newKey: geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key] origin_is_nansat = True else: tmpGdalMetadata[newKey] = gdalMetadata[key] gdalMetadata = tmpGdalMetadata fileExt = os.path.splitext(inputFileName)[1] # Get file names from dataset or subdataset subDatasets = gdalDataset.GetSubDatasets() if len(subDatasets) == 0: fileNames = [inputFileName] else: fileNames = [f[0] for f in subDatasets] # add bands with metadata and corresponding values to the empty VRT metaDict = [] xDatasetSource = '' yDatasetSource = '' firstXSize = 0 firstYSize = 0 for _, fileName in enumerate(fileNames): subDataset = gdal.Open(fileName) # choose the first dataset whith grid if (firstXSize == 0 and firstYSize == 0 and subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1): firstXSize = subDataset.RasterXSize firstYSize = subDataset.RasterYSize firstSubDataset = subDataset # get projection from the first subDataset projection = firstSubDataset.GetProjection() # take bands whose sizes are same as the first band. if (subDataset.RasterXSize == firstXSize and subDataset.RasterYSize == firstYSize): if projection == '': projection = subDataset.GetProjection() if ('GEOLOCATION_X_DATASET' in fileName or 'longitude' in fileName): xDatasetSource = fileName elif ('GEOLOCATION_Y_DATASET' in fileName or 'latitude' in fileName): yDatasetSource = fileName else: for iBand in range(subDataset.RasterCount): subBand = subDataset.GetRasterBand(iBand+1) bandMetadata = subBand.GetMetadata_Dict() if 'PixelFunctionType' in bandMetadata: bandMetadata.pop('PixelFunctionType') sourceBands = iBand + 1 # sourceBands = i*subDataset.RasterCount + iBand + 1 # generate src metadata src = {'SourceFilename': fileName, 'SourceBand': sourceBands} # set scale ratio and scale offset scaleRatio = bandMetadata.get( 'ScaleRatio', bandMetadata.get( 'scale', bandMetadata.get('scale_factor', ''))) if len(scaleRatio) > 0: src['ScaleRatio'] = scaleRatio scaleOffset = bandMetadata.get( 'ScaleOffset', bandMetadata.get( 'offset', bandMetadata.get( 'add_offset', ''))) if len(scaleOffset) > 0: src['ScaleOffset'] = scaleOffset # sate DataType src['DataType'] = subBand.DataType # generate dst metadata # get all metadata from input band dst = bandMetadata # set wkv and bandname dst['wkv'] = bandMetadata.get('standard_name', '') # first, try the name metadata if 'name' in bandMetadata: bandName = bandMetadata['name'] else: # if it doesn't exist get name from NETCDF_VARNAME bandName = bandMetadata.get('NETCDF_VARNAME', '') if len(bandName) == 0: bandName = bandMetadata.get( 'dods_variable', '' ) # remove digits added by gdal in # exporting to netcdf... if (len(bandName) > 0 and origin_is_nansat and fileExt == '.nc'): if bandName[-1:].isdigit(): bandName = bandName[:-1] if bandName[-1:].isdigit(): bandName = bandName[:-1] # if still no bandname, create one if len(bandName) == 0: bandName = 'band_%03d' % iBand dst['name'] = bandName # remove non-necessary metadata from dst for rmMetadata in rmMetadatas: if rmMetadata in dst: dst.pop(rmMetadata) # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # create empty VRT dataset with geolocation only VRT.__init__(self, firstSubDataset, srcMetadata=gdalMetadata) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) # Create complex data bands from 'xxx_real' and 'xxx_imag' bands # using pixelfunctions rmBands = [] for iBandNo in range(self.dataset.RasterCount): iBand = self.dataset.GetRasterBand(iBandNo + 1) iBandName = iBand.GetMetadataItem('name') # find real data band if iBandName.find("_real") != -1: realBandNo = iBandNo realBand = self.dataset.GetRasterBand(realBandNo + 1) realDtype = realBand.GetMetadataItem('DataType') bandName = iBandName.replace(iBandName.split('_')[-1], '')[0:-1] for jBandNo in range(self.dataset.RasterCount): jBand = self.dataset.GetRasterBand(jBandNo + 1) jBandName = jBand.GetMetadataItem('name') # find an imaginary data band corresponding to the real # data band and create complex data band from the bands if jBandName.find(bandName+'_imag') != -1: imagBandNo = jBandNo imagBand = self.dataset.GetRasterBand(imagBandNo + 1) imagDtype = imagBand.GetMetadataItem('DataType') dst = imagBand.GetMetadata() dst['name'] = bandName dst['PixelFunctionType'] = 'ComplexData' dst['dataType'] = 10 src = [{'SourceFilename': fileNames[realBandNo], 'SourceBand': 1, 'DataType': realDtype}, {'SourceFilename': fileNames[imagBandNo], 'SourceBand': 1, 'DataType': imagDtype}] self._create_band(src, dst) self.dataset.FlushCache() rmBands.append(realBandNo + 1) rmBands.append(imagBandNo + 1) # Delete real and imaginary bands if len(rmBands) != 0: self.delete_bands(rmBands) if len(projection) == 0: # projection was not set automatically # get projection from GCPProjection projection = geoMetadata.get('GCPProjection', '') if len(projection) == 0: # no projection was found in dataset or metadata: # generate WGS84 by default projection = NSR().wkt # fix problem with MET.NO files where a, b given in m and XC/YC in km if ('UNIT["kilometre"' in projection and ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in projection): projection = projection.replace( ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', '') # set projection self.dataset.SetProjection(self.repare_projection(projection)) # check if GCPs were added from input dataset gcps = firstSubDataset.GetGCPs() gcpProjection = firstSubDataset.GetGCPProjection() # if no GCPs in input dataset: try to add GCPs from metadata if not gcps: gcps = self.add_gcps_from_metadata(geoMetadata) # if yet no GCPs: try to add GCPs from variables if not gcps: gcps = self.add_gcps_from_variables(inputFileName) if gcps: if len(gcpProjection) == 0: # get GCP projection and repare gcpProjection = self.repare_projection(geoMetadata. get('GCPProjection', '')) # add GCPs to dataset self.dataset.SetGCPs(gcps, gcpProjection) self.dataset.SetProjection('') self._remove_geotransform() # Find proper bands and insert GEOLOCATION ARRAY into dataset if len(xDatasetSource) > 0 and len(yDatasetSource) > 0: self.add_geolocationArray(GeolocationArray(xDatasetSource, yDatasetSource)) elif not gcps: # if no GCPs found and not GEOLOCATION ARRAY set: # Set Nansat Geotransform if it is not set automatically geoTransform = self.dataset.GetGeoTransform() if len(geoTransform) == 0: geoTransformStr = geoMetadata.get('GeoTransform', '(0|1|0|0|0|0|1)') geoTransform = eval(geoTransformStr.replace('|', ',')) self.dataset.SetGeoTransform(geoTransform) subMetadata = firstSubDataset.GetMetadata() ### GET START TIME from METADATA time_coverage_start = None if 'start_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_time']) elif 'start_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_date']) elif 'time_coverage_start' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_start']) ### GET END TIME from METADATA time_coverage_end = None if 'stop_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['stop_time']) elif 'stop_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['stop_date']) elif 'time_coverage_stop' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_stop']) elif 'end_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['end_time']) elif 'end_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['end_date']) elif 'time_coverage_end' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_end']) ### GET start time from time variable if (time_coverage_start is None and cfunitsInstalled and 'time#standard_name' in subMetadata and subMetadata['time#standard_name'] == 'time' and 'time#units' in subMetadata and 'time#calendar' in subMetadata): # get data from netcdf data ncFile = netcdf_file(inputFileName, 'r') timeLength = ncFile.variables['time'].shape[0] timeValueStart = ncFile.variables['time'][0] timeValueEnd = ncFile.variables['time'][-1] ncFile.close() try: timeDeltaStart = Units.conform(timeValueStart, Units(subMetadata['time#units'], calendar=subMetadata['time#calendar']), Units('days since 1950-01-01')) except ValueError: self.logger.error('calendar units are wrong: %s' % subMetadata['time#calendar']) else: time_coverage_start = (datetime.datetime(1950,1,1) + datetime.timedelta(float(timeDeltaStart))) if timeLength > 1: timeDeltaEnd = Units.conform(timeValueStart, Units(subMetadata['time#units'], calendar=subMetadata['time#calendar']), Units('days since 1950-01-01')) else: timeDeltaEnd = timeDeltaStart + 1 time_coverage_end = (datetime.datetime(1950,1,1) + datetime.timedelta(float(timeDeltaEnd))) ## finally set values of time_coverage start and end if available if time_coverage_start is not None: self.dataset.SetMetadataItem('time_coverage_start', time_coverage_start.isoformat()) if time_coverage_end is not None: self.dataset.SetMetadataItem('time_coverage_end', time_coverage_end.isoformat()) if 'sensor' not in gdalMetadata: self.dataset.SetMetadataItem('sensor', 'unknown') if 'satellite' not in gdalMetadata: self.dataset.SetMetadataItem('satellite', 'unknown') if 'source_type' not in gdalMetadata: self.dataset.SetMetadataItem('source_type', 'unknown') if 'platform' not in gdalMetadata: self.dataset.SetMetadataItem('platform', 'unknown') if 'instrument' not in gdalMetadata: self.dataset.SetMetadataItem('instrument', 'unknown') self.logger.info('Use generic mapper - OK!')