Exemplo n.º 1
0
    def __init__(self, filename, flip_gcp_line=False):
        #if not 'S1A' in filename or not 'S1B' in filename:
        #    raise WrongMapperError('%s: Not Sentinel 1A or 1B' %filename)
        if not self.dataset.GetMetadataItem('SATELLITE_IDENTIFIER') or \
                not self.dataset.GetMetadataItem('SATELLITE_IDENTIFIER').lower()=='sentinel-1':
            raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename)
        if not IMPORT_SCIPY:
            raise NansatReadError(
                'Sentinel-1 data cannot be read because scipy is not installed'
            )

        self.input_filename = filename
        try:
            self.ds = Dataset(filename)
        except OSError:
            self.ds = Dataset(filename + '#fillmismatch')
            self.input_filename = filename + '#fillmismatch'
        try:
            lon = self.ds.variables['GCP_longitude_' +
                                    self.ds.polarisation[:2]]
        except (AttributeError, KeyError):
            raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename)

        self._remove_geotransform()
        self._remove_geolocation()
        self.dataset.SetProjection('')
        self.dataset.SetGCPs(self.get_gcps(flip_gcp_line=flip_gcp_line),
                             NSR().wkt)
        self.add_incidence_angle_band()
        self.add_look_direction_band()
        self.set_gcmd_dif_keywords()
Exemplo n.º 2
0
    def __init__(self, *args, **kwargs):

        raise WrongMapperError('Mapper is under development...')

        fn = args[0]
        #ds = args[1] - None
        #mm = args[2] - None

        self.test_mapper(fn)
Exemplo n.º 3
0
 def test_mapper(self, filename):
     ''' Tests if filename fits mapper. May raise WrongMapperError '''
     baseURLmatch = False
     for baseURL in self.baseURLs:
         if filename.startswith(baseURL):
             baseURLmatch = True
             break
     if not baseURLmatch:
         raise WrongMapperError(filename)
Exemplo n.º 4
0
    def test_mapper(self, filename):
        """Tests if filename fits mapper. May raise WrongMapperError

            Parameters
            ----------
                filename: str
                    absolute url of input file

            Raises
            ------
                WrongMapperError: if input url does not match with list of
                    urls for a mapper
        """
        base_url_match = False
        # TODO: baseURLs var name should be changed here and in all mappers
        for base_url in self.baseURLs:
            if filename.startswith(base_url):
                base_url_match = True
                break
        if not base_url_match:
            raise WrongMapperError(filename)
Exemplo n.º 5
0
    def __init__(self, filename):
        if not 'S1' in filename:
            raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename)
        if not IMPORT_SCIPY:
            raise NansatReadError(
                'Sentinel-1 data cannot be read because scipy is not installed'
            )

        self.input_filename = filename
        try:
            self.ds = Dataset(filename)
        except OSError:
            self.ds = Dataset(filename + '#fillmismatch')
            self.input_filename = filename + '#fillmismatch'

        self._remove_geotransform()
        self._remove_geolocation()
        self.dataset.SetProjection('')
        self.dataset.SetGCPs(self.get_gcps(), NSR().wkt)
        self.add_incidence_angle_band()
        self.add_look_direction_band()
        self.set_gcmd_dif_keywords()
Exemplo n.º 6
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 fast=False,
                 **kwargs):
        if kwargs.get('manifestonly', False):
            fast = True
            NansatFutureWarning(
                'manifestonly option will be deprecated. Use: fast=True')

        if not os.path.split(filename.rstrip('/'))[1][:3] in ['S1A', 'S1B']:
            raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename)

        if not IMPORT_SCIPY:
            raise NansatReadError(
                'Sentinel-1 data cannot be read because scipy is not installed'
            )

        if zipfile.is_zipfile(filename):
            zz = zipfile.PyZipFile(filename)
            # Assuming the file names are consistent, the polarization
            # dependent data should be sorted equally such that we can use the
            # same indices consistently for all the following lists
            # THIS IS NOT THE CASE...
            mds_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'measurement/s1' in fn
            ]
            calibration_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/calibration/calibration-s1' in fn
            ]
            noise_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/calibration/noise-s1' in fn
            ]
            annotation_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/s1' in fn
            ]
            manifest_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'manifest.safe' in fn
            ]
            zz.close()
        else:
            mds_files = glob.glob('%s/measurement/s1*' % filename)
            calibration_files = glob.glob(
                '%s/annotation/calibration/calibration-s1*' % filename)
            noise_files = glob.glob('%s/annotation/calibration/noise-s1*' %
                                    filename)
            annotation_files = glob.glob('%s/annotation/s1*' % filename)
            manifest_files = glob.glob('%s/manifest.safe' % filename)

        if (not mds_files or not calibration_files or not noise_files
                or not annotation_files or not manifest_files):
            raise WrongMapperError(filename)

        # convert list of MDS files into dictionary. Keys - polarizations in upper case.
        mds_files = {
            os.path.basename(ff).split('-')[3].upper(): ff
            for ff in mds_files
        }
        polarizations = list(mds_files.keys())

        # read annotation files
        annotation_data = self.read_annotation(annotation_files)
        if not fast:
            annotation_data = Mapper.correct_geolocation_data(annotation_data)

        # read manifest file
        manifest_data = self.read_manifest_data(manifest_files[0])

        # very fast constructor without any bands only with some metadata and geolocation
        self._init_empty(manifest_data, annotation_data)

        # skip adding bands in the fast mode and RETURN
        if fast:
            return

        # Open data files with GDAL
        gdalDatasets = {}
        for pol in polarizations:
            gdalDatasets[pol] = gdal.Open(mds_files[pol])

            if not gdalDatasets[pol]:
                raise WrongMapperError('%s: No Sentinel-1 datasets found' %
                                       mds_files[pol])

        # Check metadata to confirm it is Sentinel-1 L1
        metadata = gdalDatasets[polarizations[0]].GetMetadata()

        # create full size VRTs with incidenceAngle and elevationAngle
        annotation_vrts = self.vrts_from_arrays(
            annotation_data, ['incidenceAngle', 'elevationAngle'])
        self.band_vrts.update(annotation_vrts)

        # create full size VRTS with calibration LUT
        calibration_names = ['sigmaNought', 'betaNought']
        calibration_list_tag = 'calibrationVectorList'
        for calibration_file in calibration_files:
            pol = '_' + os.path.basename(calibration_file).split(
                '-')[4].upper()
            xml = self.read_vsi(calibration_file)
            calibration_data = self.read_calibration(xml, calibration_list_tag,
                                                     calibration_names, pol)
            calibration_vrts = self.vrts_from_arrays(calibration_data,
                                                     calibration_names, pol,
                                                     True, 1)
            self.band_vrts.update(calibration_vrts)

        # create full size VRTS with noise LUT
        for noise_file in noise_files:
            pol = '_' + os.path.basename(noise_file).split('-')[4].upper()
            xml = self.read_vsi(noise_file)
            if '<noiseVectorList' in xml:
                noise_list_tag = 'noiseVectorList'
                noise_name = 'noiseLut'
            elif '<noiseRangeVectorList' in xml:
                noise_list_tag = 'noiseRangeVectorList'
                noise_name = 'noiseRangeLut'
            noise_data = self.read_calibration(xml, noise_list_tag,
                                               [noise_name], pol)
            noise_vrts = self.vrts_from_arrays(noise_data, [noise_name], pol,
                                               True, 1)
            self.band_vrts.update(noise_vrts)

        #### Create metaDict: dict with metadata for all bands
        metaDict = []
        bandNumberDict = {}
        bnmax = 0
        for pol in polarizations:
            dsPath, dsName = os.path.split(mds_files[pol])
            name = 'DN_%s' % pol
            # A dictionary of band numbers is needed for the pixel function
            # bands further down. This is not the best solution. It would be
            # better to have a function in VRT that returns the number given a
            # band name. This function exists in Nansat but could perhaps be
            # moved to VRT? The existing nansat function could just call the
            # VRT one...
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            band = gdalDatasets[pol].GetRasterBand(1)
            dtype = band.DataType
            metaDict.append({
                'src': {
                    'SourceFilename': mds_files[pol],
                    'SourceBand': 1,
                    'DataType': dtype,
                },
                'dst': {
                    'name': name,
                },
            })
        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)
        '''
        Calibration should be performed as

        s0 = DN^2/sigmaNought^2,

        where sigmaNought is from e.g.
        annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml,
        and DN is the Digital Numbers in the tiff files.

        Also the noise should be subtracted.

        See
        https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT

        The noise correction/subtraction is implemented in an independent package "sentinel1denoised"
        See
        https://github.com/nansencenter/sentinel1denoised
        '''

        # Get look direction
        longitude, latitude = self.transform_points(
            calibration_data['pixel'].flatten(),
            calibration_data['line'].flatten())
        longitude.shape = calibration_data['pixel'].shape
        latitude.shape = calibration_data['pixel'].shape
        sat_heading = initial_bearing(longitude[:-1, :], latitude[:-1, :],
                                      longitude[1:, :], latitude[1:, :])
        look_direction = scipy.ndimage.interpolation.zoom(
            np.mod(sat_heading + 90, 360),
            (np.shape(longitude)[0] / (np.shape(longitude)[0] - 1.), 1))

        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT.from_array(look_direction_u)
        look_v_VRT = VRT.from_array(look_direction_v)
        lookVRT = VRT.from_lonlat(longitude, latitude)
        lookVRT.create_band([{
            'SourceFilename': look_u_VRT.filename,
            'SourceBand': 1
        }, {
            'SourceFilename': look_v_VRT.filename,
            'SourceBand': 1
        }], {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize, 1)

        # Store VRTs so that they are accessible later
        self.band_vrts['look_u_VRT'] = look_u_VRT
        self.band_vrts['look_v_VRT'] = look_v_VRT
        self.band_vrts['lookVRT'] = lookVRT

        metaDict = []
        # Add bands to full size VRT
        for pol in polarizations:
            name = 'sigmaNought_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename': (self.band_vrts[name].filename),
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })
            name = 'noise_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename':
                    self.band_vrts['%s_%s' % (noise_name, pol)].filename,
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })

        name = 'look_direction'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        metaDict.append({
            'src': {
                'SourceFilename': self.band_vrts['lookVRT'].filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': name
            }
        })

        for pol in polarizations:
            dsPath, dsName = os.path.split(mds_files[pol])
            name = 'sigma0_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': [{
                    'SourceFilename': self.filename,
                    'SourceBand': bandNumberDict['DN_%s' % pol],
                }, {
                    'SourceFilename':
                    self.band_vrts['sigmaNought_%s' % pol].filename,
                    'SourceBand':
                    1
                }],
                'dst': {
                    'wkv':
                    'surface_backwards_scattering_coefficient_of_radar_wave',
                    'PixelFunctionType': 'Sentinel1Calibration',
                    'polarization': pol,
                    'suffix': pol,
                },
            })
            name = 'beta0_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': [{
                    'SourceFilename': self.filename,
                    'SourceBand': bandNumberDict['DN_%s' % pol]
                }, {
                    'SourceFilename':
                    self.band_vrts['betaNought_%s' % pol].filename,
                    'SourceBand':
                    1
                }],
                'dst': {
                    'wkv':
                    'surface_backwards_brightness_coefficient_of_radar_wave',
                    'PixelFunctionType': 'Sentinel1Calibration',
                    'polarization': pol,
                    'suffix': pol,
                },
            })

        self.create_bands(metaDict)

        # Add incidence angle as band
        name = 'incidence_angle'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        src = {
            'SourceFilename': self.band_vrts['incidenceAngle'].filename,
            'SourceBand': 1
        }
        dst = {'wkv': 'angle_of_incidence', 'name': name}
        self.create_band(src, dst)
        self.dataset.FlushCache()

        # Add elevation angle as band
        name = 'elevation_angle'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        src = {
            'SourceFilename': self.band_vrts['elevationAngle'].filename,
            'SourceBand': 1
        }
        dst = {'wkv': 'angle_of_elevation', 'name': name}
        self.create_band(src, dst)
        self.dataset.FlushCache()

        # Add sigma0_VV
        if 'VV' not in polarizations and 'HH' in polarizations:
            name = 'sigma0_VV'
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            src = [{
                'SourceFilename': self.filename,
                'SourceBand': bandNumberDict['DN_HH'],
            }, {
                'SourceFilename': (self.band_vrts['sigmaNought_HH'].filename),
                'SourceBand':
                1,
            }, {
                'SourceFilename': self.band_vrts['incidenceAngle'].filename,
                'SourceBand': 1
            }]
            dst = {
                'wkv':
                'surface_backwards_scattering_coefficient_of_radar_wave',
                'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
                'polarization': 'VV',
                'suffix': 'VV'
            }
            self.create_band(src, dst)
            self.dataset.FlushCache()
Exemplo n.º 7
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
        ''' Create NCEP VRT '''

        if not gdalDataset:
            raise WrongMapperError(filename)

        geotransform = gdalDataset.GetGeoTransform()
        if (geotransform == (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
                geotransform == (-0.5, 1.0, 0.0, 90.5, 0.0, -1.0)):
            if gdalDataset.RasterCount == 4:
                srcBandId = {'temperature': 2,
                             'u-component': 3,
                             'v-component': 4}
            elif gdalDataset.RasterCount == 9:
                srcBandId = {'temperature': 6,
                             'u-component': 8,
                             'v-component': 9}
            else:
                raise WrongMapperError(filename)
        else:
            raise WrongMapperError(filename)  # Not water proof

        # Adding valid time from the GRIB file to dataset
        band = gdalDataset.GetRasterBand(srcBandId['u-component'])
        validTime = band.GetMetadata()['GRIB_VALID_TIME']
        time_isoformat = (datetime.datetime.utcfromtimestamp(
            int(validTime.strip().split(' ')[0])).isoformat())

        # Set band metadata time_iso_8601 for use in OpenWind
        time_iso_8601 = np.datetime64(parse(time_isoformat))
        metaDict = [{'src': {'SourceFilename': filename,
                             'SourceBand': srcBandId['u-component']},
                     'dst': {'wkv': 'eastward_wind',
                             'height': '10 m',
                             'time_iso_8601': time_iso_8601}},
                    {'src': {'SourceFilename': filename,
                             'SourceBand': srcBandId['v-component']},
                     'dst': {'wkv': 'northward_wind',
                             'height': '10 m',
                             'time_iso_8601': time_iso_8601}},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': srcBandId['u-component'],
                              'DataType': (gdalDataset.GetRasterBand(srcBandId['u-component']).DataType)
                              },
                             {'SourceFilename': filename,
                              'SourceBand': srcBandId['v-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['v-component']).DataType
                              }],
                     'dst': {'wkv': 'wind_speed',
                             'PixelFunctionType': 'UVToMagnitude',
                             'name': 'windspeed',
                             'height': '2 m',
                             'time_iso_8601': time_iso_8601
                             }},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': srcBandId['u-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['u-component']).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': srcBandId['v-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['v-component']).DataType
                              }],
                     'dst': {'wkv': 'wind_from_direction',
                             'PixelFunctionType': 'UVToDirectionFrom',
                             'name': 'winddirection',
                             'height': '2 m',
                             'time_iso_8601': time_iso_8601
                             }},
                    {'src': {'SourceFilename': filename,
                             'SourceBand': srcBandId['temperature']},
                     'dst': {'wkv': 'air_temperature',
                             'name': 'air_t',
                             'height': '2 m',
                             'time_iso_8601': time_iso_8601}
                     }]

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        self.dataset.SetMetadataItem('time_coverage_start', time_isoformat)

        self.dataset.SetMetadataItem('time_coverage_end', time_isoformat)

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('computer')
        ee = pti.get_gcmd_platform('ncep-gfs')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 8
0
    def __init__(self, filename, gdalDataset, gdalMetadata, logLevel=30,
                 **kwargs):
        if filename[0:len(keywordBase)] != keywordBase:
            raise WrongMapperError(__file__,
                                   "Not Nora10 data converted from felt to netCDF")

        requestedTime = datetime.strptime(filename[len(keywordBase)+1:],
                                          '%Y%m%d%H%M')
        # For correct rounding
        fileTime = requestedTime + timedelta(minutes=30)
        fileTime = fileTime - timedelta(minutes=fileTime.minute)

        nc_file = (baseFolder + 'windspeed_10m' +
                   fileTime.strftime('/%Y/%m/') + 'windspeed_' +
                   fileTime.strftime('%Y%m%d%H.nc'))
        nc_file_winddir = (baseFolder + 'winddir_10m' +
                           fileTime.strftime('/%Y/%m/') +
                           'winddir_' + fileTime.strftime('%Y%m%d%H.nc'))

        # Would prefer to use geotransform, but ob_tran
        # (General Oblique Transformation) is not supported by GDAL
        # Keeping lines below for potential future use:
        #proj4 = gdalDataset.GetMetadataItem('projection_rotated_ll#proj4')
        #proj4 = '+proj=ob_tran +o_proj=longlat +lon_0=-40 +o_lat_p=22 +a=6367470 +e=0'
        #rlatmin = -13.25; rlatmax  = 26.65; deltarlat = 0.1
        #rlonmin = 5.75; rlonmax  = 30.45; deltarlon = 0.1

        # Needed due to precence of time dimension in netCDF file
        gdal.SetConfigOption('GDAL_NETCDF_BOTTOMUP', 'No')

        # Read relevant arrays into memory
        g = gdal.Open('NETCDF:"' + nc_file + '":' + 'windspeed_10m')
        ws_10m = np.flipud(g.GetRasterBand(1).ReadAsArray())
        g = gdal.Open('NETCDF:"' + nc_file_winddir + '":' +
                        'wind_direction_10m')
        wd_10m = np.flipud(g.GetRasterBand(1).ReadAsArray())
        g = gdal.Open('NETCDF:"' + nc_file + '":' + 'latitude')
        lat = np.flipud(g.GetRasterBand(1).ReadAsArray())
        g = gdal.Open('NETCDF:"' + nc_file + '":' + 'longitude')
        lon = np.flipud(g.GetRasterBand(1).ReadAsArray())

        u10 = -ws_10m*np.sin(np.deg2rad(wd_10m))
        v10 = -ws_10m*np.cos(np.deg2rad(wd_10m))
        VRT_u10 = VRT(array=u10, lat=lat, lon=lon)
        VRT_v10 = VRT(array=v10, lat=lat, lon=lon)

        # Store band_vrts so that they are available after reprojection etc
        self.band_vrts = {'u_VRT': VRT_u10,
                        'v_VRT': VRT_v10}

        metaDict = []
        metaDict.append({'src': {'SourceFilename': VRT_u10.filename,
                                 'SourceBand': 1},
                         'dst': {'wkv': 'eastward_wind',
                                 'name': 'eastward_wind'}})
        metaDict.append({'src': {'SourceFilename': VRT_v10.filename,
                                 'SourceBand': 1},
                         'dst': {'wkv': 'northward_wind',
                                 'name': 'northward_wind'}})

        # Add pixel function with wind speed
        metaDict.append({
            'src': [{'SourceFilename': self.band_vrts['u_VRT'].filename,
                     'SourceBand': 1,
                     'DataType': 6},
                    {'SourceFilename': self.band_vrts['v_VRT'].filename,
                     'SourceBand': 1,
                     'DataType': 6}],
            'dst': {'wkv': 'wind_speed',
                    'name': 'windspeed',
                    'height': '10 m',
                    'PixelFunctionType': 'UVToMagnitude'}})

        # Add pixel function with wind direction
        metaDict.append({
            'src': [{'SourceFilename': self.band_vrts['u_VRT'].filename,
                     'SourceBand': 1,
                     'DataType': 6},
                    {'SourceFilename': self.band_vrts['v_VRT'].filename,
                     'SourceBand': 1,
                     'DataType': 6}],
            'dst': {'wkv': 'wind_from_direction',
                    'name': 'winddir',
                    'height': '10 m',
                    'PixelFunctionType': 'UVToDirectionFrom'}})

        # create empty VRT dataset with geolocation only
        self._init_from_lonlat(lon, lat)

        # add bands with metadata and corresponding values
        # to the empty VRT
        self.create_bands(metaDict)

        # Add time
        self.dataset.SetMetadataItem('time_coverage_start', fileTime.isoformat())
Exemplo n.º 9
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
        ''' Create NCEP VRT '''

        if not gdalDataset:
            raise WrongMapperError(filename)

        geotransform = gdalDataset.GetGeoTransform()
        if (geotransform != (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
                gdalDataset.RasterCount != 2):  # Not water proof
            raise WrongMapperError(filename)

        metaDict = [{'src': {'SourceFilename': filename,
                             'SourceBand': 1},
                     'dst': {'wkv': 'eastward_wind',
                             'height': '10 m'}},
                    {'src': {'SourceFilename': filename,
                             'SourceBand': 2},
                     'dst': {'wkv': 'northward_wind',
                             'height': '10 m'}},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': 1,
                              'DataType': gdalDataset.GetRasterBand(1).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              }],
                     'dst': {'wkv': 'wind_speed',
                             'PixelFunctionType': 'UVToMagnitude',
                             'name': 'windspeed',
                             'height': '2 m'
                             }},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': 1,
                              'DataType': gdalDataset.GetRasterBand(1).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              }],
                     'dst': {'wkv': 'wind_from_direction',
                             'PixelFunctionType': 'UVToDirectionFrom',
                             'name': 'winddirection',
                             'height': '2 m'
                             }
                     }]

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # Adding valid time from the GRIB file to dataset
        validTime = gdalDataset.GetRasterBand(1).GetMetadata()['GRIB_VALID_TIME']
        self.dataset.SetMetadataItem('time_coverage_start',
            (datetime.datetime.utcfromtimestamp(
                int(validTime.strip().split(' ')[0])).isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
            (datetime.datetime.utcfromtimestamp(
                int(validTime.strip().split(' ')[0])).isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('computer')
        ee = pti.get_gcmd_platform('ncep-gfs')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 10
0
    def __init__(self, inputFileName, gdalDataset, gdalMetadata,
                 xmlonly=False,  **kwargs):
        ''' Create Radarsat2 VRT '''
        fPathName, fExt = os.path.splitext(inputFileName)

        if zipfile.is_zipfile(inputFileName):
            # Open zip file using VSI
            fPath, fName = os.path.split(fPathName)
            filename = '/vsizip/%s/%s' % (inputFileName, fName)
            if not 'RS' in fName[0:2]:
                raise WrongMapperError('%s: Provided data is not Radarsat-2'
                                       % fName)
            gdalDataset = gdal.Open(filename)
            gdalMetadata = gdalDataset.GetMetadata()
        else:
            filename = inputFileName

        # if it is not RADARSAT-2, return
        if (not gdalMetadata or not 'SATELLITE_IDENTIFIER' in list(gdalMetadata.keys())):
            raise WrongMapperError(filename)
        elif gdalMetadata['SATELLITE_IDENTIFIER'] != 'RADARSAT-2':
            raise WrongMapperError(filename)

        if zipfile.is_zipfile(inputFileName):
            # Open product.xml to get additional metadata
            zz = zipfile.ZipFile(inputFileName)
            productXmlName = os.path.join(os.path.basename(
                inputFileName).split('.')[0], 'product.xml')
            productXml = zz.open(productXmlName).read()
        else:
            # product.xml to get additionali metadata
            productXmlName = os.path.join(filename, 'product.xml')
            if not os.path.isfile(productXmlName):
                raise WrongMapperError(filename)
            productXml = open(productXmlName).read()

        if not IMPORT_SCIPY:
            raise NansatReadError('Radarsat-2 data cannot be read because scipy is not installed')

        # parse product.XML
        rs2_0 = Node.create(productXml)

        if xmlonly:
            self.init_from_xml(rs2_0, filename)
            return

        # Get additional metadata from product.xml
        rs2_1 = rs2_0.node('sourceAttributes')
        rs2_2 = rs2_1.node('radarParameters')
        if rs2_2['antennaPointing'].lower() == 'right':
            antennaPointing = 90
        else:
            antennaPointing = -90
        rs2_3 = rs2_1.node('orbitAndAttitude').node('orbitInformation')
        passDirection = rs2_3['passDirection']

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)
        self.dataset.SetGCPs(self.dataset.GetGCPs(), NSR().wkt)

        # define dictionary of metadata and band specific parameters
        pol = []
        metaDict = []

        # Get the subdataset with calibrated sigma0 only
        for dataset in gdalDataset.GetSubDatasets():
            if dataset[1] == 'Sigma Nought calibrated':
                s0dataset = gdal.Open(dataset[0])
                s0datasetName = dataset[0][:]
                band = s0dataset.GetRasterBand(1)
                s0datasetPol = band.GetMetadata()['POLARIMETRIC_INTERP']
                for i in range(1, s0dataset.RasterCount+1):
                    iBand = s0dataset.GetRasterBand(i)
                    polString = iBand.GetMetadata()['POLARIMETRIC_INTERP']
                    suffix = polString
                    # The nansat data will be complex
                    # if the SAR data is of type 10
                    dtype = iBand.DataType
                    if dtype == 10:
                        # add intensity band
                        metaDict.append(
                            {'src': {'SourceFilename':
                                     ('RADARSAT_2_CALIB:SIGMA0:'
                                      + filename + '/product.xml'),
                                     'SourceBand': i,
                                     'DataType': dtype},
                             'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                                     'PixelFunctionType': 'intensity',
                                     'SourceTransferType': gdal.GetDataTypeName(dtype),
                                     'suffix': suffix,
                                     'polarization': polString,
                                     'dataType': 6}})
                        # modify suffix for adding the compled band below
                        suffix = polString+'_complex'
                    pol.append(polString)
                    metaDict.append(
                        {'src': {'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:'
                                                    + filename
                                                    + '/product.xml'),
                                 'SourceBand': i,
                                 'DataType': dtype},
                         'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                                 'suffix': suffix,
                                 'polarization': polString}})

            if dataset[1] == 'Beta Nought calibrated':
                b0dataset = gdal.Open(dataset[0])
                b0datasetName = dataset[0][:]
                for j in range(1, b0dataset.RasterCount+1):
                    jBand = b0dataset.GetRasterBand(j)
                    polString = jBand.GetMetadata()['POLARIMETRIC_INTERP']
                    if polString == s0datasetPol:
                        b0datasetBand = j

        ###############################
        # Add SAR look direction
        ###############################
        d = Domain(ds=gdalDataset)
        lon, lat = d.get_geolocation_grids(100)

        '''
        (GDAL?) Radarsat-2 data is stored with maximum latitude at first
        element of each column and minimum longitude at first element of each
        row (e.g. np.shape(lat)=(59,55) -> latitude maxima are at lat[0,:],
        and longitude minima are at lon[:,0])

        In addition, there is an interpolation error for direct estimate along
        azimuth. We therefore estimate the heading along range and add 90
        degrees to get the "satellite" heading.

        '''
        if str(passDirection).upper() == 'DESCENDING':
            sat_heading = initial_bearing(lon[:, :-1], lat[:, :-1],
                                          lon[:, 1:], lat[:, 1:]) + 90
        elif str(passDirection).upper() == 'ASCENDING':
            sat_heading = initial_bearing(lon[:, 1:], lat[:, 1:],
                                          lon[:, :-1], lat[:, :-1]) + 90
        else:
            print('Can not decode pass direction: ' + str(passDirection))

        # Calculate SAR look direction
        look_direction = sat_heading + antennaPointing
        # Interpolate to regain lost row
        look_direction = np.mod(look_direction, 360)
        look_direction = scipy.ndimage.interpolation.zoom(
            look_direction, (1, 11./10.))
        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT.from_array(look_direction_u)
        look_v_VRT = VRT.from_array(look_direction_v)

        # Note: If incidence angle and look direction are stored in
        #       same VRT, access time is about twice as large
        lookVRT = VRT.from_lonlat(lon, lat)
        lookVRT.create_band(
            [{'SourceFilename': look_u_VRT.filename, 'SourceBand': 1},
             {'SourceFilename': look_v_VRT.filename, 'SourceBand': 1}],
            {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize, gdalDataset.RasterYSize)
        # Store VRTs so that they are accessible later
        self.band_vrts['look_u_VRT'] = look_u_VRT
        self.band_vrts['look_v_VRT'] = look_v_VRT
        self.band_vrts['lookVRT'] = lookVRT

        # Add band to full sized VRT
        lookFileName = self.band_vrts['lookVRT'].filename
        metaDict.append({'src': {'SourceFilename': lookFileName,
                                 'SourceBand': 1},
                         'dst': {'wkv': 'sensor_azimuth_angle',
                                 'name': 'look_direction'}})

        ###############################
        # Create bands
        ###############################
        self.create_bands(metaDict)

        ###################################################
        # Add derived band (incidence angle) calculated
        # using pixel function "BetaSigmaToIncidence":
        ###################################################
        src = [{'SourceFilename': b0datasetName,
                'SourceBand':  b0datasetBand,
                'DataType': dtype},
               {'SourceFilename': s0datasetName,
                'SourceBand': 1,
                'DataType': dtype}]
        dst = {'wkv': 'angle_of_incidence',
               'PixelFunctionType': 'BetaSigmaToIncidence',
               'SourceTransferType': gdal.GetDataTypeName(dtype),
               '_FillValue': -10000,   # NB: this is also hard-coded in
                                       #     pixelfunctions.c
               'dataType': 6,
               'name': 'incidence_angle'}

        self.create_band(src, dst)
        self.dataset.FlushCache()

        ###################################################################
        # Add sigma0_VV - pixel function of sigma0_HH and beta0_HH
        # incidence angle is calculated within pixel function
        # It is assummed that HH is the first band in sigma0 and
        # beta0 sub datasets
        ###################################################################
        if 'VV' not in pol and 'HH' in pol:
            s0datasetNameHH = pol.index('HH')+1
            src = [{'SourceFilename': s0datasetName,
                    'SourceBand': s0datasetNameHH,
                    'DataType': 6},
                   {'SourceFilename': b0datasetName,
                    'SourceBand': b0datasetBand,
                    'DataType': 6}]
            dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                   'PixelFunctionType': 'Sigma0HHBetaToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self.create_band(src, dst)
            self.dataset.FlushCache()

        ############################################
        # Add SAR metadata
        ############################################
        if antennaPointing == 90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT')
        if antennaPointing == -90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'LEFT')
        self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                     str(passDirection).upper())

        # set valid time
        self.dataset.SetMetadataItem('time_coverage_start',
                                     (parse(gdalMetadata['FIRST_LINE_TIME']).
                                      isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                                     (parse(gdalMetadata['LAST_LINE_TIME']).
                                      isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument("C-SAR")
        ee = pti.get_gcmd_platform('radarsat-2')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
        self.dataset.SetMetadataItem('entry_title', 'Radarsat-2 SAR')
        self.dataset.SetMetadataItem('provider', 'MDA/GSI')
        self.dataset.SetMetadataItem('dataset_parameters', json.dumps(
                                     ['surface_backwards_scattering_coefficient_of_radar_wave']))
        self.dataset.SetMetadataItem('entry_id', os.path.basename(filename))
Exemplo n.º 11
0
    def __init__(self, filename, gdalDataset, gdalMetadata, minQual=4,
                 **kwargs):
        ''' Create VRT '''

        if not 'AVHRR_Pathfinder-PFV5.2' in filename:
            raise WrongMapperError(filename)

        subDatasets = gdalDataset.GetSubDatasets()
        metaDict = []
        sstName = ''

        for subDataset in subDatasets:
            subDatasetName = subDataset[0].split(':')[2]

            if '//' in subDatasetName:
                h5Style = True
            else:
                h5Style = False

            if h5Style:
                subDatasetName = subDatasetName.replace('//', '')

            if subDatasetName == 'quality_level':
                qualName = subDataset[0]

            subGDALDataset = vrt.gdal.Open(subDataset[0])
            subGDALMetadata = subGDALDataset.GetRasterBand(1).GetMetadata()
            if h5Style:
                metaPrefix = subDatasetName + '_'
            else:
                metaPrefix = ''

            subWKV = subGDALMetadata.get(metaPrefix + 'standard_name', '')
            subScaleRatio = subGDALMetadata.get(metaPrefix + 'scale_factor',
                                                '1')
            subScaleOffset = subGDALMetadata.get(metaPrefix + 'add_offset',
                                                 '0')
            metaEntry = {'src': {'SourceFilename': subDataset[0],
                                 'sourceBand': 1,
                                 'ScaleRatio': subScaleRatio,
                                 'ScaleOffset': subScaleOffset},
                         'dst': {'wkv': subWKV}}

            # append band metadata to metaDict
            metaDict.append(metaEntry)

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(subGDALDataset)

        # add mask
        if qualName != '':
            qualDataset = vrt.gdal.Open(qualName)
            qualArray = qualDataset.ReadAsArray()
            qualArray[qualArray < minQual] = 1
            qualArray[qualArray >= minQual] = 128
            self.band_vrts = {'maskVRT': vrt.VRT(array=qualArray.astype('int8'))}
            metaDict.append({'src': {'SourceFilename': (self.
                                                        band_vrts['maskVRT'].
                                                        filename),
                                     'SourceBand': 1,
                                     'SourceType': 'SimpleSource',
                                     'DataType': 1},
                             'dst': {'name': 'mask'}})

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # append fixed projection and geotransform
        self.dataset.SetProjection(NSR().wkt)
        self.dataset.SetGeoTransform((-180, 0.0417, 0, 90, 0, -0.0417))

        # set TIMEstart_time
        if h5Style:
            startTimeKey = 'start_time'
        else:
            startTimeKey = 'NC_GLOBAL#start_time'
        self.dataset.SetMetadataItem('time_coverage_start', subGDALDataset.GetMetadataItem(startTimeKey))
Exemplo n.º 12
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 product_type='RVL',
                 GCP_COUNT=10,
                 **kwargs):
        '''
        Parameters
        ----------
        product_type: string
            Sentinel-1 level-2 ocean product type/component, i.e. ocean swell
            spectra (OSW), ocean wind field (OWI), or radial surface velocity
            (RVL) (RVL is the default)
        GCP_COUNT : int
            number of GCPs along each dimention
        '''
        fPathName, fExt = os.path.splitext(filename)

        # List of Sentinel-1 level-2 components
        unwanted_product_components = ['osw', 'owi', 'rvl']
        # Remove requested 'product_type' from list of unwanted
        unwanted_product_components.pop(
            unwanted_product_components.index(product_type.lower()))

        # Check if it is Sentinel-1 (or ASAR) level-2 (in S1 data format)
        if not gdalMetadata or not 'NC_GLOBAL' in gdalMetadata.keys():
            raise WrongMapperError(filename)
        else:
            title = gdalMetadata['NC_GLOBAL#TITLE']

        # Raise error if it is not Sentinel-1 format
        if not 'Sentinel-1' or 'ASA' in title:
            raise WrongMapperError(filename)

        metadata = {}
        for key, val in gdalMetadata.iteritems():
            new_key = key.split('#')[-1]
            metadata[new_key] = val

        subDatasets = gdalDataset.GetSubDatasets()
        filenames = [f[0] for f in subDatasets]

        rm_bands = []
        # Find all data that is not relevant for the selected product type
        # and get bands of longitude, latitude and zero doppler time
        for i, f in enumerate(filenames):
            if f.split(':')[-1][:3] in unwanted_product_components:
                rm_bands.append(i)
            if 'Lon' in f.split(':')[-1]:
                lon_ds = gdal.Open(f)
                rm_bands.append(i)
            if 'Lat' in f.split(':')[-1]:
                lat_ds = gdal.Open(f)
                rm_bands.append(i)
            if 'ZeroDopplerTime' in f.split(':')[-1]:
                zdt_ds = gdal.Open(f)
                rm_bands.append(i)
        # Remove bands in rm_bands from the list of bands to add to the Nansat
        # object
        filenames = [f for i, f in enumerate(filenames) if not i in rm_bands]
        #     (
        # 'Lon' in f.split(':')[-1] or
        # 'Lat' in f.split(':')[-1] or
        # 'ZeroDopplerTime' in f.split(':')[-1] )]

        # create empty VRT dataset
        self._init_from_gdal_dataset(gdal.Open(subDatasets[0][0]),
                                     metadata=metadata)

        # The zero Doppler time grid is 3-dimensional - the last dimension is a
        # char array with the time as year, month, day, etc.
        # Will not bother with it yet...
        #for iBand in range(zdt_ds.RasterCount):
        #    subBand = zdt_ds.GetRasterBand(iBand+1)

        XSize = lon_ds.RasterXSize
        YSize = lon_ds.RasterYSize

        # get projection from the lon and lat datasets
        longitude = lon_ds.ReadAsArray()
        latitude = lat_ds.ReadAsArray()
        # estimate step of GCPs
        step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT))
        step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT))
        self.logger.debug('gcpCount: >%s<, %d %d %f %d %d', title,
                          latitude.shape[0], latitude.shape[1], GCP_COUNT,
                          step0, step1)

        # estimate pixel/line step of the geolocation arrays
        pixelStep = 1
        lineStep = 1
        self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep))
        # generate list of GCPs
        dx = .5
        dy = .5
        gcps = []
        k = 0
        for i0 in range(0, latitude.shape[0], step0):
            for i1 in range(0, latitude.shape[1], step1):
                # create GCP with X,Y,pixel,line from lat/lon matrices
                lon = float(longitude[i0, i1])
                lat = float(latitude[i0, i1])
                if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
                    gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep + dx,
                                   i0 * lineStep + dy)
                    self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel,
                                      gcp.GCPLine, gcp.GCPX, gcp.GCPY)
                    gcps.append(gcp)
                    k += 1

        # append GCPs and lat/lon projection to the vsiDataset
        self.dataset.SetGCPs(gcps, NSR().wkt)

        # define band specific parameters
        metaDict = []
        geoFileDict = {}
        xDatasetSource = ''
        yDatasetSource = ''
        for i, filename in enumerate(filenames):
            band = gdal.Open(filename)

            # check that the band size is the same size as the latitude and
            # longitude grids
            if (band.RasterXSize != XSize or band.RasterYSize != YSize):
                raise IndexError(('Size of sub-dataset is different from size '
                                  'of longitude and latitude grids'))

            bandMetadata = band.GetMetadata()
            # generate src metadata
            src = {'SourceFilename': filename, 'SourceBand': 1}

            # Generate dst metadata
            short_name = filename.split(':')[-1]
            dst = {
                'name': short_name,
                'short_name': short_name,
                'long_name': bandMetadata[short_name + '#long_name'],
                'units': bandMetadata[short_name + '#units'],
                #'wkv': ,
            }

            # append band with src and dst dictionaries
            metaDict.append({'src': src, 'dst': dst})

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        metaDict = []
        for i in range(self.dataset.RasterCount):
            if 'Nrcs' in self.dataset.GetRasterBand(i +
                                                    1).GetMetadata()['name']:
                metaDict.append({
                    'src': {
                        'SourceFilename': (self.dataset.GetRasterBand(
                            i + 1).GetMetadata()['SourceFilename']),
                        'SourceBand':
                        1
                    },
                    'dst': {
                        'short_name':
                        'sigma0',
                        'wkv':
                        'surface_backwards_scattering_coefficient_of_radar_wave',
                        'PixelFunctionType':
                        'dB2pow',
                        'polarization':
                        (self.dataset.GetMetadata()['POLARISATION']),
                        'suffix':
                        self.dataset.GetMetadata()['POLARISATION'],
                        'dataType':
                        6,
                    }
                })

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # set time
        self.dataset.SetMetadataItem(
            'time_coverage_start',
            parse(self.dataset.GetMetadata()
                  ['SOURCE_ACQUISITION_UTC_TIME']).isoformat())
Exemplo n.º 13
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 GCP_COUNT0=5,
                 GCP_COUNT1=20,
                 pixelStep=1,
                 lineStep=1,
                 **kwargs):
        ''' Create VIIRS VRT '''

        if not 'GMTCO_npp_' in filename:
            raise WrongMapperError(filename)
        ifiledir = os.path.split(filename)[0]
        ifiles = glob.glob(ifiledir + 'SVM??_npp_d*_obpg_ops.h5')
        ifiles.sort()

        if not IMPORT_SCIPY:
            raise NansatReadError(
                'VIIRS data cannot be read because scipy is not installed')

        viirsWavelengths = [
            None, 412, 445, 488, 555, 672, 746, 865, 1240, 1378, 1610, 2250,
            3700, 4050, 8550, 10736, 12013
        ]

        # create empty VRT dataset with geolocation only
        xDatasetSource = (
            'HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Longitude' % filename)
        xDatasetBand = 1
        xDataset = gdal.Open(xDatasetSource)
        self._init_from_gdal_dataset(xDataset)

        metaDict = []
        for ifile in ifiles:
            ifilename = os.path.split(ifile)[1]
            print(ifilename)
            bNumber = int(ifilename[3:5])
            print(bNumber)
            bWavelength = viirsWavelengths[bNumber]
            print(bWavelength)
            SourceFilename = (
                'HDF5:"%s"://All_Data/VIIRS-M%d-SDR_All/Radiance' %
                (ifile, bNumber))
            print(SourceFilename)
            metaEntry = {
                'src': {
                    'SourceFilename': SourceFilename,
                    'SourceBand': 1
                },
                'dst': {
                    'wkv': 'toa_outgoing_spectral_radiance',
                    'wavelength': str(bWavelength),
                    'suffix': str(bWavelength)
                }
            }
            metaDict.append(metaEntry)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        xVRTArray = xDataset.ReadAsArray()
        xVRTArray = gaussian_filter(xVRTArray, 5).astype('float32')
        xVRT = VRT.from_array(xVRTArray)

        yDatasetSource = (
            'HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Latitude' % filename)
        yDatasetBand = 1
        yDataset = gdal.Open(yDatasetSource)
        yVRTArray = yDataset.ReadAsArray()
        yVRTArray = gaussian_filter(yVRTArray, 5).astype('float32')
        yVRT = VRT.from_array(yVRTArray)

        # estimate pixel/line step
        self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep))

        # ==== ADD GCPs and Pojection ====
        # get lat/lon matrices
        longitude = xVRT.dataset.GetRasterBand(1).ReadAsArray()
        latitude = yVRT.dataset.GetRasterBand(1).ReadAsArray()

        # estimate step of GCPs
        step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT0))
        step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT1))
        self.logger.debug('gcpCount: %d %d %d %d, %d %d', latitude.shape[0],
                          latitude.shape[1], GCP_COUNT0, GCP_COUNT1, step0,
                          step1)

        # generate list of GCPs
        gcps = []
        k = 0
        for i0 in range(0, latitude.shape[0], step0):
            for i1 in range(0, latitude.shape[1], step1):
                # create GCP with X,Y,pixel,line from lat/lon matrices
                lon = float(longitude[i0, i1])
                lat = float(latitude[i0, i1])
                if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
                    gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep, i0 * lineStep)
                    self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel,
                                      gcp.GCPLine, gcp.GCPX, gcp.GCPY)
                    gcps.append(gcp)
                    k += 1

        # append GCPs and lat/lon projection to the vsiDataset
        self.dataset.SetGCPs(gcps, NSR().wkt)

        # remove geolocation array
        self._remove_geolocation()
Exemplo n.º 14
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 geolocation=False,
                 zoomSize=500,
                 step=1,
                 **kwargs):
        ''' Create MER2 VRT

        Parameters
        -----------
        filename : string
        gdalDataset : gdal dataset
        gdalMetadata : gdal metadata
        geolocation : bool (default is False)
            if True, add gdal geolocation
        zoomSize: int (used in envisat.py)
            size, to which the ADS array will be zoomed using scipy
            array of this size will be stored in memory
        step: int (used in envisat.py)
            step of pixel and line in GeolocationArrays. lat/lon grids are
            generated at that step
        '''

        self.setup_ads_parameters(filename, gdalMetadata)

        if self.product[0:9] != "MER_FRS_2" and self.product[
                0:9] != "MER_RR__2":
            raise WrongMapperError(filename)

        metaDict = [{
            'src': {
                'SourceFilename': filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '412'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 2
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '443'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 3
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '490'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 4
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '510'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 5
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '560'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 6
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '620'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 7
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '665'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 8
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '680'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 9
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '708'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 10
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '753'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 11
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '761'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 12
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '778'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 13
            },
            'dst': {
                'wkv':
                'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air',
                'wavelength': '864'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 15
            },
            'dst': {
                'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water',
                'suffix': '1_log',
                'case': 'I'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 16
            },
            'dst': {
                'wkv':
                'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter',
                'suffix': '2_log',
                'case': 'II'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 17
            },
            'dst': {
                'wkv': 'mass_concentration_of_suspended_matter_in_sea_water',
                'suffix': '2_log',
                'case': 'II'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 18
            },
            'dst': {
                'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water',
                'suffix': '2_log',
                'case': 'II'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 22
            },
            'dst': {
                'wkv': 'quality_flags',
                'suffix': 'l2'
            }
        }]

        # add 'name' to 'parameters'
        for bandDict in metaDict:
            if 'wavelength' in bandDict['dst']:
                bandDict['dst']['suffix'] = bandDict['dst']['wavelength']

        #get GADS from header
        scales = self.read_scaling_gads(range(7, 20) + [20, 21, 22, 20])
        offsets = self.read_scaling_gads(range(33, 46) + [46, 47, 48, 46])
        # set scale/offset to the band metadata (only reflectance)
        for i, bandDict in enumerate(metaDict[:-1]):
            bandDict['src']['ScaleRatio'] = str(scales[i])
            bandDict['src']['ScaleOffset'] = str(offsets[i])

        # add log10-scaled variables
        metaDict += [{
            'src': {
                'SourceFilename': filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water',
                'suffix': '1',
                'case': 'I',
                'expression': 'np.power(10., self["chlor_a_1_log"])'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water',
                'suffix': '2',
                'case': 'II',
                'expression': 'np.power(10., self["chlor_a_2_log"])'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv':
                'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter',
                'suffix': '2',
                'case': 'II',
                'expression': 'np.power(10., self["cdom_a_2_log"])'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'mass_concentration_of_suspended_matter_in_sea_water',
                'suffix': '2',
                'case': 'II',
                'expression': 'np.power(10., self["tsm_2_log"])'
            }
        }]

        # get list with resized VRTs from ADS
        self.band_vrts = {
            'adsVRTs':
            self.get_ads_vrts(gdalDataset, [
                'sun zenith angles', 'sun azimuth angles', 'zonal winds',
                'meridional winds'
            ],
                              zoomSize=zoomSize,
                              step=step)
        }

        # add bands from the ADS VRTs
        for adsVRT in self.band_vrts['adsVRTs']:
            metaDict.append({
                'src': {
                    'SourceFilename': adsVRT.filename,
                    'SourceBand': 1
                },
                'dst': {
                    'name':
                    (adsVRT.dataset.GetRasterBand(1).GetMetadataItem('name')),
                    'units':
                    (adsVRT.dataset.GetRasterBand(1).GetMetadataItem('units'))
                }
            })

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # set time
        self._set_envisat_time(gdalMetadata)

        # add geolocation arrays
        if geolocation:
            self.add_geolocation_from_ads(gdalDataset,
                                          zoomSize=zoomSize,
                                          step=step)
        # set time
        self._set_envisat_time(gdalMetadata)

        self.dataset.SetMetadataItem('sensor', 'MERIS')
        self.dataset.SetMetadataItem('satellite', 'ENVISAT')