Exemplo n.º 1
0
    def set_gcmd_dif_keywords(self):
        mditem = 'entry_title'
        if not self.dataset.GetMetadataItem(mditem):
            self.dataset.SetMetadataItem(mditem, self.input_filename)
        mditem = 'data_center'
        if not self.dataset.GetMetadataItem(mditem):
            self.dataset.SetMetadataItem(
                mditem, json.dumps(pti.get_gcmd_provider('NO/MET')))
        mditem = 'ISO_topic_category'
        if not self.dataset.GetMetadataItem(mditem):
            self.dataset.SetMetadataItem(
                mditem,
                pti.get_iso19115_topic_category(
                    'Imagery/Base Maps/Earth Cover')['iso_topic_category'])

        mm = pti.get_gcmd_instrument('sar')
        if self.ds.MISSION_ID == 'S1A':
            ee = pti.get_gcmd_platform('sentinel-1a')
        else:
            ee = pti.get_gcmd_platform('sentinel-1b')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        self.dataset.SetMetadataItem(
            'time_coverage_start',
            self.dataset.GetMetadataItem('ACQUISITION_START_TIME'))
        self.dataset.SetMetadataItem(
            'time_coverage_end',
            self.dataset.GetMetadataItem('ACQUISITION_STOP_TIME'))
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 **kwargs):
        ''' Create NCEP VRT
        Parameters:
            filename : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(filename)
        fname = os.path.split(filename)[1]
        date = '%s-%s-%sT%s:00Z' % (fname[0:4], fname[4:6], fname[6:8],
                                    fname[8:10])

        self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands,
                        cachedir)

        # add instrument and platform
        pi = []
        if 'jason-1' in self.ds.sensor.lower():
            pi.append([
                pti.get_gcmd_platform('jason-1'),
                pti.get_gcmd_instrument('poseidon-2')
            ])
        if 'envisat' in self.ds.sensor.lower():
            pi.append([
                pti.get_gcmd_platform('envisat'),
                pti.get_gcmd_instrument('ra-1')
            ])
        if 'jason-2' in self.ds.sensor.lower():
            pi.append([
                pti.get_gcmd_platform('ostm/jason-2'),
                pti.get_gcmd_instrument('poseidon-3')
            ])

        if pi:
            self.dataset.SetMetadataItem('platform/instrument', json.dumps(pi))
        else:
            warnings.warn('Could not provide source metadata - please check and submit a git ' \
                    'issue if necessary')

        self.dataset.SetMetadataItem('Data Center', 'FR/IFREMER/CERSAT')
        self.dataset.SetMetadataItem('Entry Title', 'GLOBCURRENT')
Exemplo n.º 3
0
    def __init__(self, filename, gdalDataset, gdalMetadata, date=None, ds=None, bands=None,
                 cachedir=None, **kwargs):
        ''' Create NCEP VRT
        Parameters:
            filename : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(filename)
        ds = Dataset(filename)
        proj4str = '%s +units=%s' % (ds.variables['Polar_Stereographic_Grid'].proj4_string,
                                     ds.variables['xc'].units)
        self.srcDSProjection = NSR(proj4str).wkt
        if filename[-3:] == '.nc':
            date = self.t0 + dt.timedelta(seconds=ds.variables['time'][0])
            date = date.strftime('%Y-%m-%d')

        self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)

        # add instrument and platform
        mm = pti.get_gcmd_instrument('Passive Remote Sensing')
        ee = pti.get_gcmd_platform('Earth Observation Satellites')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 4
0
    def has_metadata_platform(self, n):
        meta1 = json.loads(n.get_metadata('platform'))
        meta1ShortName = meta1['Short_Name']
        meta2 = pti.get_gcmd_platform(meta1ShortName)

        assert type(meta1) == dict
        assert meta1 == meta2
Exemplo n.º 5
0
    def _init_empty(self, manifest_data, annotation_data):
        """ Fast initialization from minimum of information

        Parameters
        ----------
        manifest_data : dict
            data from the manifest file (time_coverage_start, etc)
        annotation_data : dict
            data from annotation file (longitude, latitude, x_size, etc)

        Note
        ----
            Calls VRT.__init__, Adds GCPs, metadata
        """
        # init empty dataset
        super(Mapper, self).__init__(annotation_data['x_size'], annotation_data['y_size'])
        # add GCPs from (corrected) geolocation data
        gcps = Mapper.create_gcps(annotation_data['longitude'],
                                  annotation_data['latitude'],
                                  annotation_data['height'],
                                  annotation_data['pixel'],
                                  annotation_data['line'])
        self.dataset.SetGCPs(gcps, NSR().wkt)
        # set metadata
        self.dataset.SetMetadataItem('time_coverage_start', manifest_data['time_coverage_start'])
        self.dataset.SetMetadataItem('time_coverage_end', manifest_data['time_coverage_end'])
        platform_name = manifest_data['platform_family_name'] + manifest_data['platform_number']
        self.dataset.SetMetadataItem('platform', json.dumps(pti.get_gcmd_platform(platform_name)))
        self.dataset.SetMetadataItem('instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
        self.dataset.SetMetadataItem('entry_title', platform_name + ' SAR')
        self.dataset.SetMetadataItem('data_center', json.dumps(pti.get_gcmd_provider('ESA/EO')))
        self.dataset.SetMetadataItem('iso_topic_category', json.dumps(pti.get_iso19115_topic_category('Oceans')))
        self.dataset.SetMetadataItem('summary', platform_name + ' SAR data')
        self.dataset.FlushCache()
Exemplo n.º 6
0
    def init_from_manifest_only(self, manifestXML, annotXML, missionName):
        ''' Create fake VRT and add metadata only from the manifest.safe '''
        X, Y, lon, lat, inc, ele, numberOfSamples, numberOfLines = self.read_geolocation_lut(
            annotXML)

        VRT.__init__(self,
                     srcRasterXSize=numberOfSamples,
                     srcRasterYSize=numberOfLines)
        doc = ET.fromstring(manifestXML)

        gcps = []
        for i in range(len(X)):
            gcps.append(gdal.GCP(lon[i], lat[i], 0, X[i], Y[i]))

        self.dataset.SetGCPs(gcps, NSR().wkt)
        self.dataset.SetMetadataItem(
            'time_coverage_start',
            doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}startTime]"
                        )[0][0].text)
        self.dataset.SetMetadataItem(
            'time_coverage_end',
            doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}stopTime]")
            [0][0].text)
        self.dataset.SetMetadataItem(
            'platform', json.dumps(pti.get_gcmd_platform(missionName)))
        self.dataset.SetMetadataItem(
            'instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
        self.dataset.SetMetadataItem('Entry Title', missionName + ' SAR')
        self.dataset.SetMetadataItem('Data Center', 'ESA/EO')
        self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans')
        self.dataset.SetMetadataItem('Summary', missionName + ' SAR data')
Exemplo n.º 7
0
def get_gcmd_keywords_mapping():
    gcmd_keywords_mapping = {
        'MERCATOR PSY4QV3R1': {
            'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
            'platform': json.dumps(pti.get_gcmd_platform('models')),
        },
        'NERSC-HYCOM model fields': {
            'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
            'platform': json.dumps(pti.get_gcmd_platform('models')),
        },
        'MERCATOR BIOMER4V1R2': {
            'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
            'platform': json.dumps(pti.get_gcmd_platform('models')),
        },
    }
    return gcmd_keywords_mapping
Exemplo n.º 8
0
    def __init__(self,
                 fileName,
                 gdalDataset,
                 gdalMetadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 **kwargs):
        ''' Create NCEP VRT
        Parameters:
            fileName : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(fileName)
        ds = Dataset(fileName)
        proj4str = '%s +units=%s' % (ds.variables['Polar_Stereographic_Grid'].
                                     proj4_string, ds.variables['xc'].units)
        self.srcDSProjection = NSR(proj4str).wkt
        if fileName[-3:] == '.nc':
            date = self.t0 + dt.timedelta(seconds=ds.variables['time'][0])
            date = date.strftime('%Y-%m-%d')

        self.create_vrt(fileName, gdalDataset, gdalMetadata, date, ds, bands,
                        cachedir)

        # add instrument and platform
        mm = pti.get_gcmd_instrument('Passive Remote Sensing')
        ee = pti.get_gcmd_platform('Earth Observation Satellites')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 9
0
    def __init__(self, *args, **kwargs):

        mm = args[2] # metadata
        if not mm:
            raise WrongMapperError
        if 'NC_GLOBAL#source' not in list(mm.keys()):
            raise WrongMapperError
        if 'NC_GLOBAL#institution' not in list(mm.keys()):
            raise WrongMapperError
        if not ('ecmwf' in mm['NC_GLOBAL#source'].lower() and 'met.no' in
                mm['NC_GLOBAL#institution'].lower()):
            raise WrongMapperError
    
        super(Mapper, self).__init__(*args, **kwargs)

        self.dataset.SetMetadataItem('time_coverage_start',
                (parse(mm['NC_GLOBAL#min_time'], ignoretz=True, fuzzy=True).isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                (parse(mm['NC_GLOBAL#max_time'], ignoretz=True, fuzzy=True).isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('computer')
        ee = pti.get_gcmd_platform('models')

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 10
0
    def __init__(self, filename, gdal_dataset, metadata, quartile=0, *args, **kwargs):

        if not 'ascat' in metadata.get('NC_GLOBAL#source', '').lower():
            raise WrongMapperError

        super(Mapper, self).__init__(filename, gdal_dataset, metadata, quartile=quartile, *args, **kwargs)

        lat = self.dataset.GetRasterBand(self._latitude_band_number(gdal_dataset)).ReadAsArray()
        lon = self.dataset.GetRasterBand(self._longitude_band_number(gdal_dataset)).ReadAsArray()
        lon = ScatterometryMapper.shift_longitudes(lon)
        self.set_gcps(lon, lat, gdal_dataset)

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        ii = pti.get_gcmd_instrument('ascat')
        pp = pti.get_gcmd_platform(metadata['NC_GLOBAL#source'].split(' ')[0])
        provider = pti.get_gcmd_provider(re.split('[^a-zA-Z]',
            metadata['NC_GLOBAL#institution'])[0])

        # TODO: Validate that the found instrument and platform are indeed what
        # we want....

        self.dataset.SetMetadataItem('instrument', json.dumps(ii))
        self.dataset.SetMetadataItem('platform', json.dumps(pp))
        self.dataset.SetMetadataItem('data_center', json.dumps(provider))
        self.dataset.SetMetadataItem('entry_title', metadata['NC_GLOBAL#title'])
        self.dataset.SetMetadataItem('ISO_topic_category',
                json.dumps(pti.get_iso19115_topic_category('Oceans')))
Exemplo n.º 11
0
    def __init__(self, *args, **kwargs):

        mm = args[2] # metadata
        if not mm:
            raise WrongMapperError
        if not mm.has_key('NC_GLOBAL#source'):
            raise WrongMapperError
        if not 'arome' in mm['NC_GLOBAL#source'].lower() and \
                not 'meps' in mm['NC_GLOBAL#source'].lower():
            raise WrongMapperError
    
        super(Mapper, self).__init__(*args, **kwargs)

        self.dataset.SetMetadataItem('time_coverage_start',
                (parse(mm['min_time'], ignoretz=True, fuzzy=True).isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                (parse(mm['max_time'], ignoretz=True, fuzzy=True).isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('computer')
        ee = pti.get_gcmd_platform('models')

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 12
0
    def __init__(self, filename, gdal_dataset, metadata, quartile=0, *args, **kwargs):

        if not 'quikscat' in metadata.get('NC_GLOBAL#source', '').lower():
            raise WrongMapperError

        super(Mapper, self).__init__(filename, gdal_dataset, metadata, quartile=quartile, *args, **kwargs)

        lat = self.dataset.GetRasterBand(self._latitude_band_number(gdal_dataset)).ReadAsArray()
        lon = self.dataset.GetRasterBand(self._longitude_band_number(gdal_dataset)).ReadAsArray()
        lon = ScatterometryMapper.shift_longitudes(lon)
        self.set_gcps(lon, lat, gdal_dataset)

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('seawinds')
        ee = pti.get_gcmd_platform('quikscat')
        provider = metadata['NC_GLOBAL#institution']
        if provider.lower()=='jpl':
            provider = 'NASA/JPL/QUIKSCAT'
        provider = pti.get_gcmd_provider(provider)

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
        self.dataset.SetMetadataItem('data_center', json.dumps(provider))
        self.dataset.SetMetadataItem('entry_title', metadata['NC_GLOBAL#title'])
        self.dataset.SetMetadataItem('ISO_topic_category',
                json.dumps(pti.get_iso19115_topic_category('Oceans')))
Exemplo n.º 13
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 **kwargs):
        ''' Create NCEP VRT
        Parameters:
            filename : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(filename)
        self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands,
                        cachedir)

        # add instrument and platform
        mm = pti.get_gcmd_instrument('Passive Remote Sensing')
        ee = pti.get_gcmd_platform('Earth Observation Satellites')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 14
0
    def has_correct_platform(self, n):
        meta1 = json.loads(n.get_metadata('platform'))
        meta1ShortName = meta1['Short_Name']
        meta2 = pti.get_gcmd_platform(meta1ShortName)

        assert type(meta1) == dict
        assert meta1 == meta2
Exemplo n.º 15
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create VRT '''

        fileBaseName = os.path.basename(fileName)
        if not fileBaseName == 'MOD44W.vrt':
            raise WrongMapperError

        metaDict = [{
            'src': {
                'SourceFilename': fileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'land_binary_mask'
            }
        }]

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        mm = pti.get_gcmd_instrument('MODIS')
        ee = pti.get_gcmd_platform('TERRA')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 16
0
    def __init__(self, filename, gdal_dataset, metadata, quartile=0, *args, **kwargs):

        if not 'ascat' in metadata.get('NC_GLOBAL#source', '').lower():
            raise WrongMapperError

        super(Mapper, self).__init__(filename, gdal_dataset, metadata, quartile=quartile, *args, **kwargs)

        lat = self.dataset.GetRasterBand(self._latitude_band_number(gdal_dataset)).ReadAsArray()
        lon = self.dataset.GetRasterBand(self._longitude_band_number(gdal_dataset)).ReadAsArray()
        lon = ScatterometryMapper.shift_longitudes(lon)
        self.set_gcps(lon, lat, gdal_dataset)

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        ii = pti.get_gcmd_instrument('ascat')
        pp = pti.get_gcmd_platform(metadata['NC_GLOBAL#source'].split(' ')[0])
        provider = pti.get_gcmd_provider(re.split('[^a-zA-Z]',
            metadata['NC_GLOBAL#institution'])[0])

        # TODO: Validate that the found instrument and platform are indeed what
        # we want....

        self.dataset.SetMetadataItem('instrument', json.dumps(ii))
        self.dataset.SetMetadataItem('platform', json.dumps(pp))
        self.dataset.SetMetadataItem('data_center', json.dumps(provider))
        self.dataset.SetMetadataItem('entry_title', metadata['NC_GLOBAL#title'])
        self.dataset.SetMetadataItem('ISO_topic_category',
                json.dumps(pti.get_iso19115_topic_category('Oceans')))
Exemplo n.º 17
0
    def __init__(self, filename, gdal_dataset, metadata, quartile=0, *args, **kwargs):

        if not 'quikscat' in metadata.get('NC_GLOBAL#source', '').lower():
            raise WrongMapperError

        super(Mapper, self).__init__(filename, gdal_dataset, metadata, quartile=quartile, *args, **kwargs)

        lat = self.dataset.GetRasterBand(self._latitude_band_number(gdal_dataset)).ReadAsArray()
        lon = self.dataset.GetRasterBand(self._longitude_band_number(gdal_dataset)).ReadAsArray()
        lon = ScatterometryMapper.shift_longitudes(lon)
        self.set_gcps(lon, lat, gdal_dataset)

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('seawinds')
        ee = pti.get_gcmd_platform('quikscat')
        provider = metadata['NC_GLOBAL#institution']
        if provider.lower()=='jpl':
            provider = 'NASA/JPL/QUIKSCAT'
        provider = pti.get_gcmd_provider(provider)

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
        self.dataset.SetMetadataItem('data_center', json.dumps(provider))
        self.dataset.SetMetadataItem('entry_title', metadata['NC_GLOBAL#title'])
        self.dataset.SetMetadataItem('ISO_topic_category',
                json.dumps(pti.get_iso19115_topic_category('Oceans')))
Exemplo n.º 18
0
def get_gcmd_keywords_mapping():
    gcmd_keywords_mapping = {
        'MERCATOR PSY4QV3R1': {
            'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
            'platform': json.dumps(pti.get_gcmd_platform('models')),
        },
        'NERSC-HYCOM model fields': {
            'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
            'platform': json.dumps(pti.get_gcmd_platform('models')),
        },
        'MERCATOR BIOMER4V1R2': {
            'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
            'platform': json.dumps(pti.get_gcmd_platform('models')),
        },
    }
    return gcmd_keywords_mapping
Exemplo n.º 19
0
    def __init__(self,
                 fileName,
                 gdalDataset,
                 gdalMetadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 **kwargs):
        ''' Create NCEP VRT
        Parameters:
            fileName : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(fileName)
        fname = os.path.split(fileName)[1]
        date = '%s-%s-%sT%s:00Z' % (fname[0:4], fname[4:6], fname[6:8],
                                    fname[8:10])

        self.create_vrt(fileName, gdalDataset, gdalMetadata, date, ds, bands,
                        cachedir)

        # add instrument and platform
        instr = pti.get_gcmd_instrument('active remote sensing')
        pltfr = pti.get_gcmd_platform('Earth Observation Satellites')

        self.dataset.SetMetadataItem('instrument', json.dumps(instr))
        self.dataset.SetMetadataItem('platform', json.dumps(pltfr))
        self.dataset.SetMetadataItem('Data Center', 'FR/IFREMER/CERSAT')
        self.dataset.SetMetadataItem('Entry Title', 'GLOBCURRENT')
Exemplo n.º 20
0
    def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
                 ds=None, bands=None, cachedir=None, *args, **kwargs):

        self.test_mapper(filename)
        timestamp = date if date else self.get_date(filename)
        ds = Dataset(filename)

        try:
            self.srcDSProjection = NSR(ds.variables['projection_lambert'].proj4)
        except KeyError:
            raise WrongMapperError

        self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)

        mm = pti.get_gcmd_instrument('Computer')
        ee = pti.get_gcmd_platform('Earth Observation Satellites')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
        self.dataset.SetMetadataItem('Data Center', 'NO/MET')
        self.dataset.SetMetadataItem('Entry Title', str(ds.getncattr('title')))
        try:
            # Skip the field if summary is missing in the ds
            self.dataset.SetMetadataItem('summary', str(ds.getncattr('summary')))
        except AttributeError:
            pass
Exemplo n.º 21
0
    def __init__(self,
                 filename,
                 gdal_dataset,
                 gdal_metadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 *args,
                 **kwargs):

        self.test_mapper(filename)
        timestamp = date if date else self.get_date(filename)
        ds = Dataset(filename)
        try:
            self.srcDSProjection = NSR(ds.variables['projection_3'].proj4 +
                                       ' +to_meter=0.0174532925199 +wktext')
        except KeyError:
            raise WrongMapperError

        self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds,
                        bands, cachedir)

        self.dataset.SetMetadataItem(
            'instrument', json.dumps(pti.get_gcmd_instrument('Computer')))
        self.dataset.SetMetadataItem(
            'platform', json.dumps(pti.get_gcmd_platform('MODELS')))
        self.dataset.SetMetadataItem(
            'Data Center', json.dumps(pti.get_gcmd_provider('NO/MET')))
        self.dataset.SetMetadataItem('Entry Title', str(ds.getncattr('title')))
        self.dataset.SetMetadataItem(
            'Entry Title',
            json.dumps(pti.get_iso19115_topic_category('Oceans')))
        self.dataset.SetMetadataItem(
            'gcmd_location', json.dumps(pti.get_gcmd_location('sea surface')))
    def __init__(self, filename, gdalDataset, gdalMetadata,
                 date=None, ds=None, bands=None, cachedir=None,
                 **kwargs):
        ''' Create NCEP VRT
        Parameters:
            filename : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(filename)
        fname = os.path.split(filename)[1]
        date = '%s-%s-%sT%s:00Z' % (fname[0:4], fname[4:6], fname[6:8], fname[8:10])

        self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)

        # add instrument and platform
        pi = []
        if 'jason-1' in self.ds.sensor.lower():
            pi.append([pti.get_gcmd_platform('jason-1'), pti.get_gcmd_instrument('poseidon-2')])
        if 'envisat' in self.ds.sensor.lower():
            pi.append([pti.get_gcmd_platform('envisat'), pti.get_gcmd_instrument('ra-1')])
        if 'jason-2' in self.ds.sensor.lower():
            pi.append([pti.get_gcmd_platform('ostm/jason-2'),
                pti.get_gcmd_instrument('poseidon-3')])

        if pi:
            self.dataset.SetMetadataItem('platform/instrument', json.dumps(pi))
        else:
            warnings.warn('Could not provide source metadata - please check and submit a git ' \
                    'issue if necessary')

        self.dataset.SetMetadataItem('Data Center', 'FR/IFREMER/CERSAT')
        self.dataset.SetMetadataItem('Entry Title', 'GLOBCURRENT')
Exemplo n.º 23
0
    def init_from_xml(self, productXml):
        ''' Fast init from metada in XML only '''
        numberOfLines = int(
            productXml.node('imageAttributes').node('rasterAttributes').node(
                'numberOfLines').value)
        numberOfSamples = int(
            productXml.node('imageAttributes').node('rasterAttributes').node(
                'numberOfSamplesPerLine').value)

        VRT.__init__(self,
                     srcRasterXSize=numberOfSamples,
                     srcRasterYSize=numberOfLines)

        gcps = []
        geogrid = productXml.node('imageAttributes').node(
            'geographicInformation').node('geolocationGrid')
        for child in geogrid.children:
            pix = float(child.node('imageCoordinate').node('pixel').value)
            lin = float(child.node('imageCoordinate').node('line').value)
            lon = float(
                child.node('geodeticCoordinate').node('longitude').value)
            lat = float(
                child.node('geodeticCoordinate').node('latitude').value)
            gcps.append(gdal.GCP(lon, lat, 0, pix, lin))

        self.dataset.SetGCPs(gcps, NSR().wkt)

        dates = list(
            map(parse, [
                child.node('timeStamp').value for child in (productXml.node(
                    'sourceAttributes').node('orbitAndAttitude').node(
                        'orbitInformation').nodeList('stateVector'))
            ]))

        self.dataset.SetMetadataItem('time_coverage_start',
                                     min(dates).isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     max(dates).isoformat())
        self.dataset.SetMetadataItem(
            'platform', json.dumps(pti.get_gcmd_platform('radarsat-2')))
        self.dataset.SetMetadataItem(
            'instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
        self.dataset.SetMetadataItem('Entry Title', 'Radarsat-2 SAR')
        self.dataset.SetMetadataItem('Data Center', 'CSA')
        self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans')
        self.dataset.SetMetadataItem('Summary', 'Radarsat-2 SAR data')
Exemplo n.º 24
0
    def __init__(self,
                 filename,
                 gdal_dataset,
                 gdal_metadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 *args,
                 **kwargs):

        self.test_mapper(filename)
        timestamp = date if date else self.get_date(filename)
        ds = Dataset(filename)

        try:
            self.srcDSProjection = NSR(
                ds.variables['projection_lambert'].proj4)
        except KeyError:
            raise WrongMapperError

        self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds,
                        bands, cachedir)

        mm = pti.get_gcmd_instrument('Computer')
        ee = pti.get_gcmd_platform('ecmwfifs')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        md_item = 'Data Center'
        if not self.dataset.GetMetadataItem(md_item):
            self.dataset.SetMetadataItem(md_item, 'NO/MET')
        md_item = 'Entry Title'
        if not self.dataset.GetMetadataItem(md_item):
            self.dataset.SetMetadataItem(md_item, str(ds.getncattr('title')))
        md_item = 'summary'
        if not self.dataset.GetMetadataItem(md_item):
            summary = """
            AROME_Arctic is a convection-permitting atmosphere model covering parts of the Barents
            Sea and the Nordic Arctic. It has horizontal resolution of 2.5 km and 65 vertical
            levels. AROME_Arctic runs for 66 hours four times a day (00,06,12,18) with three-hourly
            cycling for data assimilation. Boundary data is from ECMWF. Model code based on HARMONIE
            cy40h1.1
            """
            self.dataset.SetMetadataItem(md_item, str(summary))
Exemplo n.º 25
0
    def _init_empty(self, manifest_data, annotation_data):
        """ Fast initialization from minimum of information

        Parameters
        ----------
        manifest_data : dict
            data from the manifest file (time_coverage_start, etc)
        annotation_data : dict
            data from annotation file (longitude, latitude, x_size, etc)

        Note
        ----
            Calls VRT.__init__, Adds GCPs, metadata
        """
        # init empty dataset
        super(Mapper, self).__init__(annotation_data['x_size'],
                                     annotation_data['y_size'])
        # add GCPs from (corrected) geolocation data
        gcps = Mapper.create_gcps(annotation_data['longitude'],
                                  annotation_data['latitude'],
                                  annotation_data['height'],
                                  annotation_data['pixel'],
                                  annotation_data['line'])
        self.dataset.SetGCPs(gcps, NSR().wkt)
        # set metadata
        self.dataset.SetMetadataItem('time_coverage_start',
                                     manifest_data['time_coverage_start'])
        self.dataset.SetMetadataItem('time_coverage_end',
                                     manifest_data['time_coverage_end'])
        platform_name = manifest_data['platform_family_name'] + manifest_data[
            'platform_number']
        self.dataset.SetMetadataItem(
            'platform', json.dumps(pti.get_gcmd_platform(platform_name)))
        self.dataset.SetMetadataItem(
            'instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
        self.dataset.SetMetadataItem('entry_title', platform_name + ' SAR')
        self.dataset.SetMetadataItem(
            'data_center', json.dumps(pti.get_gcmd_provider('ESA/EO')))
        self.dataset.SetMetadataItem(
            'iso_topic_category',
            json.dumps(pti.get_iso19115_topic_category('Oceans')))
        self.dataset.SetMetadataItem('summary', platform_name + ' SAR data')
        self.dataset.FlushCache()
Exemplo n.º 26
0
    def __init__(self, filename, gdalDataset, gdalMetadata, date=None, ds=None, bands=None,
                 cachedir=None, **kwargs):
        ''' Create NCEP VRT
        Parameters:
            filename : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(filename)
        self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)

        # add instrument and platform
        mm = pti.get_gcmd_instrument('Passive Remote Sensing')
        ee = pti.get_gcmd_platform('Earth Observation Satellites')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 27
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create VRT '''

        fileBaseName = os.path.basename(fileName)
        if not fileBaseName == 'MOD44W.vrt':
            raise WrongMapperError

        metaDict = [{'src': {'SourceFilename': fileName, 'SourceBand':  1},
                     'dst': {'wkv': 'land_binary_mask'}}]

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        mm = pti.get_gcmd_instrument('MODIS')
        ee = pti.get_gcmd_platform('TERRA')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 28
0
    def init_from_xml(self, productXml):
        ''' Fast init from metada in XML only '''
        numberOfLines = int(productXml
                        .node('imageAttributes')
                        .node('rasterAttributes')
                        .node('numberOfLines')
                        .value)
        numberOfSamples = int(productXml
                        .node('imageAttributes')
                        .node('rasterAttributes')
                        .node('numberOfSamplesPerLine')
                        .value)

        VRT.__init__(self, srcRasterXSize=numberOfSamples, srcRasterYSize=numberOfLines)

        gcps = []
        geogrid = productXml.node('imageAttributes').node('geographicInformation').node('geolocationGrid')
        for child in geogrid.children:
            pix = float(child.node('imageCoordinate').node('pixel').value)
            lin = float(child.node('imageCoordinate').node('line').value)
            lon = float(child.node('geodeticCoordinate').node('longitude').value)
            lat = float(child.node('geodeticCoordinate').node('latitude').value)
            gcps.append(gdal.GCP(lon, lat, 0, pix, lin))

        self.dataset.SetGCPs(gcps, NSR().wkt)

        dates = list(map(parse, [child.node('timeStamp').value for child in
                             (productXml.node('sourceAttributes')
                                        .node('orbitAndAttitude')
                                        .node('orbitInformation')
                                        .nodeList('stateVector'))]))

        self.dataset.SetMetadataItem('time_coverage_start', min(dates).isoformat())
        self.dataset.SetMetadataItem('time_coverage_end', max(dates).isoformat())
        self.dataset.SetMetadataItem('platform', json.dumps(pti.get_gcmd_platform('radarsat-2')))
        self.dataset.SetMetadataItem('instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
        self.dataset.SetMetadataItem('Entry Title', 'Radarsat-2 SAR')
        self.dataset.SetMetadataItem('Data Center', 'CSA')
        self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans')
        self.dataset.SetMetadataItem('Summary', 'Radarsat-2 SAR data')
Exemplo n.º 29
0
    def init_from_manifest_only(self, manifestXML, annotXML, missionName):
        ''' Create fake VRT and add metadata only from the manifest.safe '''
        X, Y, lon, lat, inc, ele, numberOfSamples, numberOfLines = self.read_geolocation_lut(annotXML)

        VRT.__init__(self, srcRasterXSize=numberOfSamples, srcRasterYSize=numberOfLines)
        doc = ET.fromstring(manifestXML)

        gcps = []
        for i in range(len(X)):
            gcps.append(gdal.GCP(lon[i], lat[i], 0, X[i], Y[i]))

        self.dataset.SetGCPs(gcps, NSR().wkt)
        self.dataset.SetMetadataItem('time_coverage_start',
                                     doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}startTime]")[0][0].text)
        self.dataset.SetMetadataItem('time_coverage_end',
                                     doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}stopTime]")[0][0].text)
        self.dataset.SetMetadataItem('platform', json.dumps(pti.get_gcmd_platform(missionName)))
        self.dataset.SetMetadataItem('instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
        self.dataset.SetMetadataItem('Entry Title', missionName + ' SAR')
        self.dataset.SetMetadataItem('Data Center', 'ESA/EO')
        self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans')
        self.dataset.SetMetadataItem('Summary', missionName + ' SAR data')
Exemplo n.º 30
0
    def __init__(self,
                 filename,
                 gdal_dataset,
                 gdal_metadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 *args,
                 **kwargs):

        self.test_mapper(filename)
        timestamp = date if date else self.get_date(filename)

        self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds,
                        bands, cachedir)

        mditem = 'entry_title'
        if not self.dataset.GetMetadataItem(mditem):
            try:
                self.dataset.SetMetadataItem(mditem,
                                             str(self.ds.getncattr('title')))
            except AttributeError:
                self.dataset.SetMetadataItem(mditem, filename)
        mditem = 'data_center'
        if not self.dataset.GetMetadataItem(mditem):
            self.dataset.SetMetadataItem(
                'data_center', json.dumps(pti.get_gcmd_provider('NO/MET')))
        mditem = 'ISO_topic_category'
        if not self.dataset.GetMetadataItem(mditem):
            self.dataset.SetMetadataItem(
                mditem,
                pti.get_iso19115_topic_category(
                    'Imagery/Base Maps/Earth Cover')['iso_topic_category'])

        mm = pti.get_gcmd_instrument('multi-spectral')
        ee = pti.get_gcmd_platform('sentinel-2')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 31
0
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                 date=None, ds=None, bands=None, cachedir=None,
                 **kwargs):
        ''' Create NCEP VRT
        Parameters:
            fileName : URL
            date : str
                2010-05-01
            ds : netCDF.Dataset
                previously opened dataset

        '''
        self.test_mapper(fileName)
        fname = os.path.split(fileName)[1]
        date = '%s-%s-%sT%s:00Z' % (fname[0:4], fname[4:6], fname[6:8], fname[8:10])

        self.create_vrt(fileName, gdalDataset, gdalMetadata, date, ds, bands, cachedir)

        # add instrument and platform
        mm = pti.get_gcmd_instrument('active remote sensing')
        ee = pti.get_gcmd_platform('Earth Observation Satellites')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 32
0
    def __init__(self,
                 filename,
                 gdal_dataset,
                 gdal_metadata,
                 date=None,
                 ds=None,
                 bands=None,
                 cachedir=None,
                 *args,
                 **kwargs):

        self.test_mapper(filename)
        timestamp = date if date else self.get_date(filename)
        ds = Dataset(filename)

        try:
            self.srcDSProjection = NSR(
                ds.variables['projection_lambert'].proj4)
        except KeyError:
            raise WrongMapperError

        self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds,
                        bands, cachedir)

        mm = pti.get_gcmd_instrument('Computer')
        ee = pti.get_gcmd_platform('Earth Observation Satellites')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
        self.dataset.SetMetadataItem('Data Center', 'NO/MET')
        self.dataset.SetMetadataItem('Entry Title', str(ds.getncattr('title')))
        try:
            # Skip the field if summary is missing in the ds
            self.dataset.SetMetadataItem('summary',
                                         str(ds.getncattr('summary')))
        except AttributeError:
            pass
Exemplo n.º 33
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' OBPG L3 VRT '''

        # test the product
        try:
            assert gdalMetadata['PlatformShortName'] == 'GCOM-W1'
            assert gdalMetadata['SensorShortName'] == 'AMSR2'
            assert gdalMetadata['ProductName'] == 'AMSR2-L3'
        except:
            raise WrongMapperError

        # get list of similar (same date, A/D orbit) files in the directory
        iDir, iFile = os.path.split(fileName)
        iFileMask = iFile[:30] + '%02d' + iFile[32:]
        simFiles = []
        for freq in self.freqs:
            simFile = os.path.join(iDir, iFileMask % freq)
            #print simFile
            if os.path.exists(simFile):
                simFiles.append(simFile)

        metaDict = []
        for freq in self.freqs:
            simFile = os.path.join(iDir, iFileMask % freq)
            if simFile not in simFiles:
                continue
            #print 'simFile', simFile
            # open file, get metadata and get parameter name
            simSupDataset = gdal.Open(simFile)
            if simSupDataset is None:
                # skip this similar file
                #print 'No dataset: %s not a supported SMI file' % simFile
                continue
            # get subdatasets from the similar file
            simSubDatasets = simSupDataset.GetSubDatasets()
            for simSubDataset in simSubDatasets:
                #print 'simSubDataset', simSubDataset
                if 'Brightness_Temperature' in simSubDataset[0]:
                    # get SourceFilename from subdataset
                    metaEntry = {
                        'src': {'SourceFilename': simSubDataset[0],
                                'SourceBand': 1,
                                'ScaleRatio': 0.0099999998,
                                'ScaleOffset': 0},
                        'dst': {'wkv': 'brightness_temperature',
                                'frequency': '%02d' % freq,
                                'polarisation': simSubDataset[0][-2:-1],
                                'suffix': ('%02d%s' %
                                           (freq, simSubDataset[0][-2:-1]))}}
                    metaDict.append(metaEntry)

        # initiate VRT for the NSIDC 10 km grid
        VRT.__init__(self,
                     srcGeoTransform=(-3850000, 10000, 0.0,
                                      5850000, 0.0, -10000),
                     srcProjection=NSR(3411).wkt,
                     srcRasterXSize=760,
                     srcRasterYSize=1120)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start', parse(gdalMetadata['ObservationStartDateTime']).isoformat())
        self.dataset.SetMetadataItem('time_coverage_end', parse(gdalMetadata['ObservationStartDateTime']).isoformat())

        mm = pti.get_gcmd_instrument('AMSR2')
        ee = pti.get_gcmd_platform('GCOM-W1')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 34
0
    def __init__(self,
                 fileName,
                 gdalDataset,
                 gdalMetadata,
                 GCP_COUNT=30,
                 **kwargs):
        ''' Create MODIS_L1 VRT '''

        #list of available modis names:resolutions
        modisResolutions = {
            'MYD02QKM': 250,
            'MOD02QKM': 250,
            'MYD02HKM': 500,
            'MOD02HKM': 500,
            'MYD021KM': 1000,
            'MOD021KM': 1000
        }

        #should raise error in case of not MODIS_L1
        try:
            mResolution = modisResolutions[gdalMetadata["SHORTNAME"]]
        except:
            raise WrongMapperError

        # get 1st subdataset and parse to VRT.__init__()
        # for retrieving geo-metadata
        try:
            gdalSubDataset = gdal.Open(gdalDataset.GetSubDatasets()[0][0])
        except (AttributeError, IndexError):
            raise WrongMapperError

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalSubDataset)

        subDsString = 'HDF4_EOS:EOS_SWATH:"%s":MODIS_SWATH_Type_L1B:%s'

        #provide all mappings
        metaDict250SF = ['EV_250_RefSB']

        metaDict250 = [{
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_250_RefSB'),
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '645'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_250_RefSB'),
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '858'
            }
        }]

        metaDict500SF = ['EV_250_Aggr500_RefSB', 'EV_500_RefSB']

        metaDict500 = [{
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_250_Aggr500_RefSB'),
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '645'
            }
        }, {
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_250_Aggr500_RefSB'),
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '858'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'),
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '469'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'),
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '555'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'),
                'SourceBand': 3
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '1240'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'),
                'SourceBand': 4
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '1640'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_500_RefSB'),
                'SourceBand': 5
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '2130'
            }
        }]

        metaDict1000SF = [
            'EV_250_Aggr1km_RefSB', 'EV_500_Aggr1km_RefSB', 'EV_1KM_RefSB',
            'EV_1KM_Emissive'
        ]

        metaDict1000 = [{
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_250_Aggr1km_RefSB'),
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '645'
            }
        }, {
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_250_Aggr1km_RefSB'),
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '858'
            }
        }, {
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_500_Aggr1km_RefSB'),
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '469'
            }
        }, {
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_500_Aggr1km_RefSB'),
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '555'
            }
        }, {
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_500_Aggr1km_RefSB'),
                'SourceBand': 3
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '1240'
            }
        }, {
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_500_Aggr1km_RefSB'),
                'SourceBand': 4
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '1640'
            }
        }, {
            'src': {
                'SourceFilename':
                subDsString % (fileName, 'EV_500_Aggr1km_RefSB'),
                'SourceBand': 5
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '2130'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '412'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '443'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 3
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '488'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 4
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '531'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 5
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '551'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 6
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '667'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 7
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '667'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 8
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '678'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 9
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '678'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 10
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '748'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 11
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '869'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 12
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '905'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 13
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '936'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 14
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '940'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_RefSB'),
                'SourceBand': 15
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '1375'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '3750'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '3959'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 3
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '3959'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 4
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '4050'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 5
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '4465'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 6
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '4515'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 7
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '6715'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 8
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '7325'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 9
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '8550'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 10
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '9730'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 11
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '11030'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 12
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '12020'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 13
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '13335'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 14
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '13635'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 15
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '13935'
            }
        }, {
            'src': {
                'SourceFilename': subDsString % (fileName, 'EV_1KM_Emissive'),
                'SourceBand': 16
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '14235'
            }
        }]

        # get proper mapping depending on resolution
        metaDict = {
            250: metaDict250,
            500: metaDict500,
            1000: metaDict1000,
        }[mResolution]
        # get proper mapping depending on resolution
        metaDictSF = {
            250: metaDict250SF,
            500: metaDict500SF,
            1000: metaDict1000SF,
        }[mResolution]

        # read all scales/offsets
        rScales = {}
        rOffsets = {}
        for sf in metaDictSF:
            dsName = subDsString % (fileName, sf)
            ds = gdal.Open(dsName)
            rScales[dsName] = map(
                float,
                ds.GetMetadataItem('radiance_scales').split(','))
            rOffsets[dsName] = map(
                float,
                ds.GetMetadataItem('radiance_offsets').split(','))
            self.logger.debug('radiance_scales: %s' % str(rScales))

        # add 'band_name' to 'parameters'
        for bandDict in metaDict:
            SourceFilename = bandDict['src']['SourceFilename']
            SourceBand = bandDict['src']['SourceBand']
            bandDict['dst']['suffix'] = bandDict['dst']['wavelength']
            scale = rScales[SourceFilename][SourceBand - 1]
            offset = rOffsets[SourceFilename][SourceBand - 1]
            self.logger.debug('band, scale, offset: %s_%d %s %s' %
                              (SourceFilename, SourceBand, scale, offset))
            bandDict['src']['ScaleRatio'] = scale
            bandDict['src']['ScaleOffset'] = offset

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        productDate = gdalMetadata["RANGEBEGINNINGDATE"]
        productTime = gdalMetadata["RANGEBEGINNINGTIME"]
        self.remove_geolocationArray()

        # set required metadata
        self.dataset.SetMetadataItem(
            'time_coverage_start',
            (parse(gdalMetadata["RANGEBEGINNINGDATE"] + ' ' +
                   gdalMetadata["RANGEBEGINNINGTIME"]).isoformat()))
        self.dataset.SetMetadataItem(
            'time_coverage_end',
            (parse(gdalMetadata["RANGEENDINGDATE"] + ' ' +
                   gdalMetadata["RANGEENDINGTIME"]).isoformat()))

        instrumentName = self.find_metadata(gdalMetadata,
                                            'ASSOCIATEDINSTRUMENTSHORTNAME',
                                            'MODIS')
        platformName = self.find_metadata(gdalMetadata,
                                          'ASSOCIATEDPLATFORMSHORTNAME',
                                          'AQUA')
        mm = pti.get_gcmd_instrument(instrumentName)
        ee = pti.get_gcmd_platform(platformName)
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        lonSubdataset = [
            subdatasetName[0]
            for subdatasetName in gdalDataset.GetSubDatasets()
            if 'Longitude' in subdatasetName[1]
        ][0]
        latSubdataset = [
            subdatasetName[0]
            for subdatasetName in gdalDataset.GetSubDatasets()
            if 'Latitude' in subdatasetName[1]
        ][0]
        lons = gdal.Open(lonSubdataset).ReadAsArray()
        lats = gdal.Open(latSubdataset).ReadAsArray()
        gcps = []
        rows = range(0, lons.shape[0], lons.shape[0] / GCP_COUNT)
        cols = range(0, lons.shape[1], lons.shape[1] / GCP_COUNT)
        factor = self.dataset.RasterYSize / lons.shape[0]
        for r in rows:
            for c in cols:
                gcps.append(
                    gdal.GCP(float(lons[r, c]), float(lats[r, c]), 0,
                             factor * c + 0.5, factor * r + 0.5))
        self.dataset.SetGCPs(gcps, self.dataset.GetGCPProjection())
        self.tps = True
Exemplo n.º 35
0
 def get_platform_and_instrument_list(ds):
     """ This method uses the source_data in the OPeNDAP dataset to select the platforms
     and instruments. It checks the hardcoded dictionary, pi, to find the platform and instrument
     for products given in the source_data. The reason for that is that the items in source_data
     are not in the GCMD keywords but rather refer to products of certain instruments. If you
     search the internet for the dictionary keys, you'll find a dataset description which
     includes the listed platforms and instruments.
     """
     pi = {
             'AVHRR18_G-NAVO-L2P-V1.0': [pti.get_gcmd_platform('noaa-18'),
                 pti.get_gcmd_instrument('avhrr-3')],
             'AVHRR19_G-NAVO-L2P-V1.0': [pti.get_gcmd_platform('noaa-19'),
                 pti.get_gcmd_instrument('avhrr')],
             'AVHRR_SST_METOP_B-OSISAF-L2P-V1.0': [pti.get_gcmd_platform('metop-b'),
                 pti.get_gcmd_instrument('avhrr')],
             'VIIRS_NPP-OSPO-L2P-V2.3': [pti.get_gcmd_platform('suomi-npp'),
                 pti.get_gcmd_instrument('viirs')],
             'AMSR2-REMSS-L2P-V07.2': [pti.get_gcmd_platform('gcom-w1'),
                 pti.get_gcmd_instrument('amsr2')],
             'GOES13-OSISAF-L3C-V1.0': [pti.get_gcmd_platform('goes-16'),
                 pti.get_gcmd_instrument('abi')],
             'SEVIRI_SST-OSISAF-L3C-V1.0': [pti.get_gcmd_platform('msg'),
                 pti.get_gcmd_instrument('seviri')],
             'OSISAF_ICE': [pti.get_gcmd_platform('earth observation satellites'),
                 pti.get_gcmd_instrument('Imaging Spectrometers/Radiometers')],
             'NCEP_ICE': [pti.get_gcmd_platform('ncep-gfs'),
                 pti.get_gcmd_instrument('computer')],
             'AMSRE': [pti.get_gcmd_platform('aqua'), pti.get_gcmd_instrument('amsr-e')],
             'ATS_NR_2P': [pti.get_gcmd_platform('envisat'), pti.get_gcmd_instrument('aatsr')],
             'AVHRR18_G': [pti.get_gcmd_platform('noaa-18'), pti.get_gcmd_instrument('avhrr-3')],
             'AVHRR17_NAR': [pti.get_gcmd_platform('noaa-17'), pti.get_gcmd_instrument('avhrr')],
             'AVHRR18_NAR': [pti.get_gcmd_platform('noaa-18'), pti.get_gcmd_instrument('avhrr-3')],
             'SEVIRI': [pti.get_gcmd_platform('msg'), pti.get_gcmd_instrument('seviri')],
             'TMI': [pti.get_gcmd_platform('trmm'), pti.get_gcmd_instrument('tmi')],
     }
     pi_list = []
     # Here, we may have a dilemma: for example, the dataset at
     # https://opendap.jpl.nasa.gov:443/opendap/OceanTemperature/ghrsst/data/L4/GLOB/UKMO/OSTIA/2008/002/20080102-UKMO-L4HRfnd-GLOB-v01-fv02-OSTIA.nc.bz2
     # has source data AVHRR18_G and AVHRR18_NAR. I don't know the difference between them but
     # presume both are noaa18/AVHRR-3. It means duplication. Might not be a problem in Nansat,
     # though, and it could be easy to solve in django-geo-spaas...
     for source_data in ds.source_data.split(','):
         pi_list.append(pi[source_data.strip()])
     return pi_list
Exemplo n.º 36
0
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                 GCP_COUNT=10, **kwargs):
        ''' Create VRT
        Parameters
        ----------
        GCP_COUNT : int
            number of GCPs along each dimention
        '''

        # extension must be .nc
        if os.path.splitext(fileName)[1] != '.nc':
            raise WrongMapperError

        # file must contain navigation_data/longitude
        try:
            ds = gdal.Open('HDF5:"%s"://navigation_data/longitude' % fileName)
        except RuntimeError:
            raise WrongMapperError
        else:
            dsMetadata = ds.GetMetadata()

        # title value must be known
        if dsMetadata.get('title', '') not in self.titles:
            raise WrongMapperError

        # get geophysical data variables
        subDatasets = gdal.Open(fileName).GetSubDatasets()
        metaDict = []
        for subDataset in subDatasets:
            groupName = subDataset[0].split('/')[-2]
            if groupName not in ['geophysical_data', 'navigation_data']:
                continue
            varName = subDataset[0].split('/')[-1]
            subds = gdal.Open(subDataset[0])
            b = subds.GetRasterBand(1)
            bMetadata = b.GetMetadata()

            # set SRC/DST parameters
            metaEntry = {'src': {'SourceFilename': subDataset[0],
                                 'sourceBand': 1,
                                 'DataType': b.DataType},
                         'dst': {'name': varName}}

            # replace datatype for l2_flags
            if varName == 'l2_flags':
                metaEntry['src']['DataType'] = 4
                metaEntry['src']['SourceType'] = 'SimpleSource'

            # set scale if exist
            metaKey = '%s_%s_scale_factor' % (groupName, varName)
            if metaKey in bMetadata:
                metaEntry['src']['ScaleRatio'] = bMetadata[metaKey]

            # set offset if exist
            metaKey = '%s_%s_add_offset' % (groupName, varName)
            if metaKey in bMetadata:
                metaEntry['src']['ScaleOffset'] = bMetadata[metaKey]

            # set standard_name if exists
            metaKey = '%s_%s_standard_name' % (groupName, varName)
            if metaKey in bMetadata:
                metaEntry['dst']['wkv'] = bMetadata[metaKey]

            # set other metadata
            for metaKey in bMetadata:
                newMetaKey = metaKey.replace('%s_%s_' %  (groupName, varName), '')
                if newMetaKey not in ['scale_factor', 'add_offset', 'DIMENSION_LIST', '_FillValue']:
                    metaEntry['dst'][newMetaKey] = bMetadata[metaKey]
            metaDict.append(metaEntry)

        # make GCPs
        # get lat/lon grids
        longitude = gdal.Open('HDF5:"%s"://navigation_data/longitude' % fileName).ReadAsArray()
        latitude = gdal.Open('HDF5:"%s"://navigation_data/latitude' % fileName).ReadAsArray()
        rasterYSize, rasterXSize = longitude.shape

        step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT))
        step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT))

        gcps = []
        k = 0
        center_lon = 0
        center_lat = 0
        for i0 in range(0, latitude.shape[0], step0):
            for i1 in range(0, latitude.shape[1], step1):
                # create GCP with X,Y,pixel,line from lat/lon matrices
                lon = float(longitude[i0, i1])
                lat = float(latitude[i0, i1])

                if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
                    gcp = gdal.GCP(lon, lat, 0, i1+0.5, i0+0.5)
                    gcps.append(gcp)
                    center_lon += lon
                    center_lat += lat
                    k += 1

        time_coverage_start = dsMetadata['time_coverage_start']
        time_coverage_end = dsMetadata['time_coverage_end']

        # create VRT
        VRT.__init__(self, srcProjection=NSR().wkt,
                     srcGCPs=gcps,
                     srcGCPProjection=NSR().wkt,
                     srcRasterXSize=rasterXSize,
                     srcRasterYSize=rasterYSize)
        # add bands
        self._create_bands(metaDict)

        # reproject GCPs
        center_lon /= k
        center_lat /= k
        srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % (center_lon, center_lat)
        self.reproject_GCPs(srs)

        ### BAD, BAd, bad ...
        self.dataset.SetProjection(self.dataset.GetGCPProjection())

        # use TPS for reprojection
        self.tps = True

        # add NansenCloud metadata
        self.dataset.SetMetadataItem('time_coverage_start',
                                     str(time_coverage_start))
        self.dataset.SetMetadataItem('time_coverage_end',
                                     str(time_coverage_end))
        self.dataset.SetMetadataItem('source_type', 'Satellite')
        self.dataset.SetMetadataItem('mapper', 'obpg_l2_nc')

        mm = pti.get_gcmd_instrument(dsMetadata.get('instrument'))
        ee = pti.get_gcmd_platform(dsMetadata.get('platform'))
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 37
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
        ''' Create NCEP VRT '''

        if not gdalDataset:
            raise WrongMapperError(filename)

        geotransform = gdalDataset.GetGeoTransform()
        if (geotransform != (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
                gdalDataset.RasterCount != 2):  # Not water proof
            raise WrongMapperError(filename)

        metaDict = [{'src': {'SourceFilename': filename,
                             'SourceBand': 1},
                     'dst': {'wkv': 'eastward_wind',
                             'height': '10 m'}},
                    {'src': {'SourceFilename': filename,
                             'SourceBand': 2},
                     'dst': {'wkv': 'northward_wind',
                             'height': '10 m'}},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': 1,
                              'DataType': gdalDataset.GetRasterBand(1).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              }],
                     'dst': {'wkv': 'wind_speed',
                             'PixelFunctionType': 'UVToMagnitude',
                             'name': 'windspeed',
                             'height': '2 m'
                             }},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': 1,
                              'DataType': gdalDataset.GetRasterBand(1).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              }],
                     'dst': {'wkv': 'wind_from_direction',
                             'PixelFunctionType': 'UVToDirectionFrom',
                             'name': 'winddirection',
                             'height': '2 m'
                             }
                     }]

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # Adding valid time from the GRIB file to dataset
        validTime = gdalDataset.GetRasterBand(1).GetMetadata()['GRIB_VALID_TIME']
        self.dataset.SetMetadataItem('time_coverage_start',
            (datetime.datetime.utcfromtimestamp(
                int(validTime.strip().split(' ')[0])).isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
            (datetime.datetime.utcfromtimestamp(
                int(validTime.strip().split(' ')[0])).isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('computer')
        ee = pti.get_gcmd_platform('ncep-gfs')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 38
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):

        '''
        Parameters
        -----------
        filename : string

        gdalDataset : gdal dataset

        gdalMetadata : gdal metadata

        '''

        self.setup_ads_parameters(filename, gdalMetadata)

        if self.product[0:4] != "ASA_":
            raise WrongMapperError

        if not IMPORT_SCIPY:
            raise NansatReadError('ASAR data cannot be read because scipy is not installed')

        # get channel string (remove '/', since NetCDF
        # does not support that in metadata)
        polarization = [{'channel': gdalMetadata['SPH_MDS1_TX_RX_POLAR']
                        .replace("/", ""), 'bandNum': 1}]
        # if there is the 2nd band, get channel string
        if 'SPH_MDS2_TX_RX_POLAR' in gdalMetadata.keys():
            channel = gdalMetadata['SPH_MDS2_TX_RX_POLAR'].replace("/", "")
            if not(channel.isspace()):
                polarization.append({'channel': channel,
                                     'bandNum': 2})

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset, metadata=gdalMetadata)

        # get calibration constant
        gotCalibration = True
        try:
            for iPolarization in polarization:
                metaKey = ('MAIN_PROCESSING_PARAMS_ADS_CALIBRATION_FACTORS.%d.EXT_CAL_FACT'
                           % (iPolarization['bandNum']))
                iPolarization['calibrationConst'] = float(
                    gdalDataset.GetMetadataItem(metaKey, 'records'))
        except:
            try:
                for iPolarization in polarization:
                    # Apparently some ASAR files have calibration
                    # constant stored in another place
                    metaKey = ('MAIN_PROCESSING_PARAMS_ADS_0_CALIBRATION_FACTORS.%d.EXT_CAL_FACT'
                               % (iPolarization['bandNum']))
                    iPolarization['calibrationConst'] = float(
                        gdalDataset.GetMetadataItem(metaKey, 'records'))
            except:
                self.logger.warning('Cannot get calibrationConst')
                gotCalibration = False

        # add dictionary for raw counts
        metaDict = []
        for iPolarization in polarization:
            iBand = gdalDataset.GetRasterBand(iPolarization['bandNum'])
            dtype = iBand.DataType
            shortName = 'RawCounts_%s' %iPolarization['channel']
            bandName = shortName
            dstName = 'raw_counts_%s' % iPolarization['channel']
            if (8 <= dtype and dtype < 12):
                bandName = shortName+'_complex'
                dstName = dstName + '_complex'

            metaDict.append({'src': {'SourceFilename': filename,
                                     'SourceBand': iPolarization['bandNum']},
                             'dst': {'name': dstName}})


            '''
            metaDict.append({'src': {'SourceFilename': filename,
                                     'SourceBand': iPolarization['bandNum']},
                             'dst': {'name': 'raw_counts_%s'
                                     % iPolarization['channel']}})
            '''
            # if raw data is complex, add the intensity band
            if (8 <= dtype and dtype < 12):
                # choose pixelfunction type
                if (dtype == 8 or dtype == 9):
                    pixelFunctionType = 'IntensityInt'
                else:
                    pixelFunctionType = 'intensity'
                # get data type of the intensity band
                intensityDataType = {'8': 3, '9': 4,
                                     '10': 5, '11': 6}.get(str(dtype), 4)
                # add intensity band
                metaDict.append(
                    {'src': {'SourceFilename': filename,
                             'SourceBand': iPolarization['bandNum'],
                             'DataType': dtype},
                     'dst': {'name': 'raw_counts_%s'
                                     % iPolarization['channel'],
                             'PixelFunctionType': pixelFunctionType,
                             'SourceTransferType': gdal.GetDataTypeName(dtype),
                             'dataType': intensityDataType}})

        #####################################################################
        # Add incidence angle and look direction through small VRT objects
        #####################################################################
        lon = self.get_array_from_ADS('first_line_longs')
        lat = self.get_array_from_ADS('first_line_lats')
        inc = self.get_array_from_ADS('first_line_incidence_angle')

        # Calculate SAR look direction (ASAR is always right-looking)
        look_direction = initial_bearing(lon[:, :-1], lat[:, :-1],
                                             lon[:, 1:], lat[:, 1:])
        # Interpolate to regain lost row
        look_direction = scipy.ndimage.interpolation.zoom(
            look_direction, (1, 11./10.))
        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT.from_array(look_direction_u)
        look_v_VRT = VRT.from_array(look_direction_v)

        # Note: If incidence angle and look direction are stored in
        #       same VRT, access time is about twice as large
        incVRT = VRT.from_array(inc)
        lookVRT = VRT.from_lonlat(lon, lat)
        lookVRT.create_band([{'SourceFilename': look_u_VRT.filename,
                               'SourceBand': 1},
                              {'SourceFilename': look_v_VRT.filename,
                               'SourceBand': 1}],
                             {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up bands to full size
        incVRT = incVRT.get_resized_vrt(gdalDataset.RasterXSize, gdalDataset.RasterYSize)
        lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize, gdalDataset.RasterYSize)
        # Store VRTs so that they are accessible later
        self.band_vrts = {'incVRT': incVRT,
                        'look_u_VRT': look_u_VRT,
                        'look_v_VRT': look_v_VRT,
                        'lookVRT': lookVRT}

        # Add band to full sized VRT
        incFileName = self.band_vrts['incVRT'].filename
        lookFileName = self.band_vrts['lookVRT'].filename
        metaDict.append({'src': {'SourceFilename': incFileName,
                                 'SourceBand': 1},
                         'dst': {'wkv': 'angle_of_incidence',
                                 'name': 'incidence_angle'}})
        metaDict.append({'src': {'SourceFilename': lookFileName,
                                 'SourceBand': 1},
                         'dst': {'wkv': 'sensor_azimuth_angle',
                                 'name': 'look_direction'}})

        ####################
        # Add Sigma0-bands
        ####################
        if gotCalibration:
            for iPolarization in polarization:
                # add dictionary for sigma0, ice and water
                short_names = ['sigma0', 'sigma0_normalized_ice',
                               'sigma0_normalized_water']
                wkt = [
                    'surface_backwards_scattering_coefficient_of_radar_wave',
                    'surface_backwards_scattering_coefficient_of_radar_wave_normalized_over_ice',
                    'surface_backwards_scattering_coefficient_of_radar_wave_normalized_over_water']
                sphPass = [gdalMetadata['SPH_PASS'], '', '']
                sourceFileNames = [filename, incFileName]

                pixelFunctionTypes = ['RawcountsIncidenceToSigma0',
                                      'Sigma0NormalizedIce']
                if iPolarization['channel'] == 'HH':
                    pixelFunctionTypes.append('Sigma0HHNormalizedWater')
                elif iPolarization['channel'] == 'VV':
                    pixelFunctionTypes.append('Sigma0VVNormalizedWater')

                # add pixelfunction bands to metaDict
                for iPixFunc in range(len(pixelFunctionTypes)):
                    srcFiles = []
                    for j, jFileName in enumerate(sourceFileNames):
                        sourceFile = {'SourceFilename': jFileName}
                        if j == 0:
                            sourceFile['SourceBand'] = iPolarization['bandNum']
                            # if ASA_full_incAng,
                            # set 'ScaleRatio' into source file dict
                            sourceFile['ScaleRatio'] = np.sqrt(
                                1.0 / iPolarization['calibrationConst'])
                        else:
                            sourceFile['SourceBand'] = 1
                        srcFiles.append(sourceFile)

                    metaDict.append({
                        'src': srcFiles,
                        'dst': {'short_name': short_names[iPixFunc],
                                'wkv': wkt[iPixFunc],
                                'PixelFunctionType': (
                                pixelFunctionTypes[iPixFunc]),
                                'polarization': iPolarization['channel'],
                                'suffix': iPolarization['channel'],
                                'pass': sphPass[iPixFunc],
                                'dataType': 6}})

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # Add oribit and look information to metadata domain
        # ASAR is always right-looking
        self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT')
        self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                        gdalMetadata['SPH_PASS'].upper().strip())

        ###################################################################
        # Estimate sigma0_VV from sigma0_HH
        ###################################################################
        polarizations = []
        for pp in polarization:
            polarizations.append(pp['channel'])
        if 'VV' not in polarizations and 'HH' in polarizations:
            srcFiles = []
            for j, jFileName in enumerate(sourceFileNames):
                sourceFile = {'SourceFilename': jFileName}
                if j == 0:
                    sourceFile['SourceBand'] = iPolarization['bandNum']
                    # if ASA_full_incAng,
                    # set 'ScaleRatio' into source file dict
                    sourceFile['ScaleRatio'] = np.sqrt(
                        1.0 / iPolarization['calibrationConst'])
                else:
                    sourceFile['SourceBand'] = 1
                srcFiles.append(sourceFile)
            dst = {'wkv': (
                   'surface_backwards_scattering_coefficient_of_radar_wave'),
                   'PixelFunctionType': 'Sigma0HHToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self.create_band(srcFiles, dst)
            self.dataset.FlushCache()

        # set time
        self._set_envisat_time(gdalMetadata)

        # When using TPS for reprojection, use only every 3rd GCP
        # to improve performance (tradeoff vs accuracy)
        self.dataset.SetMetadataItem('skip_gcps', '3')

        self.dataset.SetMetadataItem('time_coverage_start',
                                     (parse(gdalMetadata['MPH_SENSING_START']).
                                      isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                                     (parse(gdalMetadata['MPH_SENSING_STOP']).
                                      isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('asar')
        ee = pti.get_gcmd_platform('envisat')

        # TODO: Validate that the found instrument and platform are indeed what
        # we want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 39
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):

        if zipfile.is_zipfile(fileName):
            zz = zipfile.PyZipFile(fileName)
            # Assuming the file names are consistent, the polarization
            # dependent data should be sorted equally such that we can use the
            # same indices consistently for all the following lists
            # THIS IS NOT THE CASE...
            mdsFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist() if 'measurement/s1a' in fn]
            calFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist()
                        if 'annotation/calibration/calibration-s1a' in fn]
            noiseFiles = ['/vsizip/%s/%s' % (fileName, fn)
                          for fn in zz.namelist()
                          if 'annotation/calibration/noise-s1a' in fn]
            annotationFiles = ['/vsizip/%s/%s' % (fileName, fn)
                               for fn in zz.namelist()
                               if 'annotation/s1a' in fn]
            manifestFile = ['/vsizip/%s/%s' % (fileName, fn)
                            for fn in zz.namelist()
                            if 'manifest.safe' in fn]
            zz.close()
        else:
            mdsFiles = glob.glob('%s/measurement/s1a*' % fileName)
            calFiles = glob.glob('%s/annotation/calibration/calibration-s1a*'
                                 % fileName)
            noiseFiles = glob.glob('%s/annotation/calibration/noise-s1a*'
                                   % fileName)
            annotationFiles = glob.glob('%s/annotation/s1a*'
                                        % fileName)
            manifestFile = glob.glob('%s/manifest.safe' % fileName)

        if (not mdsFiles or not calFiles or not noiseFiles or
                not annotationFiles or not manifestFile):
            raise WrongMapperError

        mdsDict = {}
        for mds in mdsFiles:
            mdsDict[int((os.path.splitext(os.path.basename(mds))[0].
                         split('-'))[-1:][0])] = mds
        calDict = {}
        for ff in calFiles:
            calDict[int((os.path.splitext(os.path.basename(ff))[0].
                         split('-'))[-1:][0])] = ff
        noiseDict = {}
        for ff in noiseFiles:
            noiseDict[int((os.path.splitext(os.path.basename(ff))[0].
                           split('-'))[-1:][0])] = ff
        annotationDict = {}
        for ff in annotationFiles:
            annotationDict[int((os.path.splitext(os.path.basename(ff))[0].
                                split('-'))[-1:][0])] = ff

        manifestXML = self.read_xml(manifestFile[0])

        gdalDatasets = {}
        for key in mdsDict.keys():
            # Open data files
            gdalDatasets[key] = gdal.Open(mdsDict[key])

        if not gdalDatasets:
            raise WrongMapperError('No Sentinel-1 datasets found')

        # Check metadata to confirm it is Sentinel-1 L1
        for key in gdalDatasets:
            metadata = gdalDatasets[key].GetMetadata()
            break
        if not 'TIFFTAG_IMAGEDESCRIPTION' in metadata.keys():
            raise WrongMapperError
        if (not 'Sentinel-1' in metadata['TIFFTAG_IMAGEDESCRIPTION']
                and not 'L1' in metadata['TIFFTAG_IMAGEDESCRIPTION']):
            raise WrongMapperError

        warnings.warn('Sentinel-1 level-1 mapper is not yet adapted to '
                      'complex data. In addition, the band names should be '
                      'updated for multi-swath data - '
                      'and there might be other issues.')

        # create empty VRT dataset with geolocation only
        for key in gdalDatasets:
            VRT.__init__(self, gdalDatasets[key])
            break

        # Read annotation, noise and calibration xml-files
        pol = {}
        it = 0
        for key in annotationDict.keys():
            xml = Node.create(self.read_xml(annotationDict[key]))
            pol[key] = (xml.node('product').
                        node('adsHeader')['polarisation'].upper())
            it += 1
            if it == 1:
                # Get incidence angle
                pi = xml.node('generalAnnotation').node('productInformation')
                self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                             str(pi['pass']))
                # Incidence angles are found in
                #<geolocationGrid>
                #    <geolocationGridPointList count="#">
                #          <geolocationGridPoint>
                geolocationGridPointList = (xml.node('geolocationGrid').
                                            children[0])
                X = []
                Y = []
                lon = []
                lat = []
                inc = []
                ele = []
                for gridPoint in geolocationGridPointList.children:
                    X.append(int(gridPoint['pixel']))
                    Y.append(int(gridPoint['line']))
                    lon.append(float(gridPoint['longitude']))
                    lat.append(float(gridPoint['latitude']))
                    inc.append(float(gridPoint['incidenceAngle']))
                    ele.append(float(gridPoint['elevationAngle']))

                X = np.unique(X)
                Y = np.unique(Y)

                lon = np.array(lon).reshape(len(Y), len(X))
                lat = np.array(lat).reshape(len(Y), len(X))
                inc = np.array(inc).reshape(len(Y), len(X))
                ele = np.array(ele).reshape(len(Y), len(X))

                incVRT = VRT(array=inc, lat=lat, lon=lon)
                eleVRT = VRT(array=ele, lat=lat, lon=lon)
                incVRT = incVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                eleVRT = eleVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                self.bandVRTs['incVRT'] = incVRT
                self.bandVRTs['eleVRT'] = eleVRT
        for key in calDict.keys():
            xml = self.read_xml(calDict[key])
            calibration_LUT_VRTs, longitude, latitude = (
                self.get_LUT_VRTs(xml,
                                  'calibrationVectorList',
                                  ['sigmaNought', 'betaNought',
                                   'gamma', 'dn']
                                  ))
            self.bandVRTs['LUT_sigmaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['sigmaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_betaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['betaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_gamma_VRT'] = calibration_LUT_VRTs['gamma']
            self.bandVRTs['LUT_dn_VRT'] = calibration_LUT_VRTs['dn']
        for key in noiseDict.keys():
            xml = self.read_xml(noiseDict[key])
            noise_LUT_VRT = self.get_LUT_VRTs(xml, 'noiseVectorList',
                                              ['noiseLut'])[0]
            self.bandVRTs['LUT_noise_VRT_'+pol[key]] = (
                noise_LUT_VRT['noiseLut'].get_resized_vrt(
                    self.dataset.RasterXSize,
                    self.dataset.RasterYSize,
                    eResampleAlg=1))

        metaDict = []
        bandNumberDict = {}
        bnmax = 0
        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'DN_%s' % pol[key]
            # A dictionary of band numbers is needed for the pixel function
            # bands further down. This is not the best solution. It would be
            # better to have a function in VRT that returns the number given a
            # band name. This function exists in Nansat but could perhaps be
            # moved to VRT? The existing nansat function could just call the
            # VRT one...
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            band = gdalDatasets[key].GetRasterBand(1)
            dtype = band.DataType
            metaDict.append({
                'src': {
                    'SourceFilename': mdsDict[key],
                    'SourceBand': 1,
                    'DataType': dtype,
                },
                'dst': {
                    'name': name,
                    'SourceTransferType': gdal.GetDataTypeName(dtype),
                    'dataType': 6,
                },
            })
        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        '''
        Calibration should be performed as

        s0 = DN^2/sigmaNought^2,

        where sigmaNought is from e.g.
        annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml,
        and DN is the Digital Numbers in the tiff files.

        Also the noise should be subtracted.

        See
        https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT
        '''
        # Get look direction
        sat_heading = initial_bearing(longitude[:-1, :],
                                      latitude[:-1, :],
                                      longitude[1:, :],
                                      latitude[1:, :])
        look_direction = scipy.ndimage.interpolation.zoom(
            np.mod(sat_heading + 90, 360),
            (np.shape(longitude)[0] / (np.shape(longitude)[0]-1.), 1))

        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT(array=look_direction_u,
                         lat=latitude, lon=longitude)
        look_v_VRT = VRT(array=look_direction_v,
                         lat=latitude, lon=longitude)
        lookVRT = VRT(lat=latitude, lon=longitude)
        lookVRT._create_band([{'SourceFilename': look_u_VRT.fileName,
                               'SourceBand': 1},
                              {'SourceFilename': look_v_VRT.fileName,
                               'SourceBand': 1}],
                             {'PixelFunctionType': 'UVToDirectionTo'}
                             )

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize,
                                          eResampleAlg=1)

        # Store VRTs so that they are accessible later
        self.bandVRTs['look_u_VRT'] = look_u_VRT
        self.bandVRTs['look_v_VRT'] = look_v_VRT
        self.bandVRTs['lookVRT'] = lookVRT

        metaDict = []
        # Add bands to full size VRT
        for key in pol:
            name = 'LUT_sigmaNought_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': {'SourceFilename':
                         (self.bandVRTs['LUT_sigmaNought_VRT_' +
                          pol[key]].fileName),
                         'SourceBand': 1
                         },
                 'dst': {'name': name
                         }
                 })
            name = 'LUT_noise_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename': self.bandVRTs['LUT_noise_VRT_' +
                                                   pol[key]].fileName,
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })

        name = 'look_direction'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        metaDict.append({
            'src': {
                'SourceFilename': self.bandVRTs['lookVRT'].fileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': name
            }
        })

        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'sigma0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]],
                          },
                         {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_%s'
                                             % pol[key]].fileName),
                          'SourceBand': 1
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_sigmaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })
            name = 'beta0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]]
                          },
                         {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_%s'
                                             % pol[key]].fileName),
                          'SourceBand': 1
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_betaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_brightness_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })

        self._create_bands(metaDict)

        # Add incidence angle as band
        name = 'incidence_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['incVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_incidence',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add elevation angle as band
        name = 'elevation_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['eleVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_elevation',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add sigma0_VV
        pp = [pol[key] for key in pol]
        if 'VV' not in pp and 'HH' in pp:
            name = 'sigma0_VV'
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            src = [{'SourceFilename': self.fileName,
                    'SourceBand': bandNumberDict['DN_HH'],
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_HH'].
                                       fileName),
                    'SourceBand': 1
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_sigmaNought_VRT_HH'].
                                       fileName),
                    'SourceBand': 1,
                    },
                   {'SourceFilename': self.bandVRTs['incVRT'].fileName,
                    'SourceBand': 1}
                   ]
            dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                   'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self._create_band(src, dst)
            self.dataset.FlushCache()

        # set time as acquisition start time
        n = Node.create(manifestXML)
        meta = n.node('metadataSection')
        for nn in meta.children:
            if nn.getAttribute('ID') == u'acquisitionPeriod':
                # set valid time
                self.dataset.SetMetadataItem(
                    'time_coverage_start',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:startTime'])
                          ).isoformat())
                self.dataset.SetMetadataItem(
                    'time_coverage_end',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:stopTime'])
                          ).isoformat())

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('sar')
        ee = pti.get_gcmd_platform('sentinel-1a')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 40
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 geolocation=False,
                 zoomSize=500,
                 step=1,
                 **kwargs):
        ''' Create MER1 VRT

        Parameters
        -----------
        filename : string

        gdalDataset : gdal dataset

        gdalMetadata : gdal metadata

        geolocation : bool (default is False)
            if True, add gdal geolocation

        zoomSize: int (used in envisat.py)
            size, to which the ADS array will be zoomed using scipy
            array of this size will be stored in memory

        step: int (used in envisat.py)
            step of pixel and line in GeolocationArrays. lat/lon grids are
            generated at that step

        '''

        self.setup_ads_parameters(filename, gdalMetadata)

        if (self.product[0:9] != "MER_FRS_1"
                and self.product[0:9] != "MER_RR__1"):
            raise WrongMapperError

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)

        metaDict = [{
            'src': {
                'SourceFilename': filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '413'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 2
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '443'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 3
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '490'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 4
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '510'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 5
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '560'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 6
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '620'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 7
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '665'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 8
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '681'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 9
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '709'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 10
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '753'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 11
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '761'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 12
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '778'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 13
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '864'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 14
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '849'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 15
            },
            'dst': {
                'wkv': 'toa_outgoing_spectral_radiance',
                'wavelength': '900'
            }
        }, {
            'src': {
                'SourceFilename': filename,
                'SourceBand': 16,
                'DataType': 1
            },
            'dst': {
                'wkv': 'quality_flags',
                'suffix': 'l1'
            }
        }]

        # add DataType into 'src' and suffix into 'dst'
        for bandDict in metaDict:
            if 'DataType' not in bandDict['src']:
                # default for meris L1 DataType UInt16
                bandDict['src']['DataType'] = 2
            if 'wavelength' in bandDict['dst']:
                bandDict['dst']['suffix'] = bandDict['dst']['wavelength']

        # get scaling GADS from header
        scales = self.read_scaling_gads(range(7, 22))
        # set scale factor to the band metadata (only radiances)
        for i, bandDict in enumerate(metaDict[:-1]):
            bandDict['src']['ScaleRatio'] = str(scales[i])

        # get list with resized VRTs from ADS
        self.band_vrts = {
            'adsVRTs':
            self.get_ads_vrts(gdalDataset, [
                'sun zenith angles', 'sun azimuth angles', 'zonal winds',
                'meridional winds'
            ],
                              zoomSize=zoomSize,
                              step=step)
        }
        # add bands from the ADS VRTs
        for adsVRT in self.band_vrts['adsVRTs']:
            metaDict.append({
                'src': {
                    'SourceFilename': adsVRT.filename,
                    'SourceBand': 1
                },
                'dst': {
                    'name':
                    (adsVRT.dataset.GetRasterBand(1).GetMetadataItem('name')),
                    'units':
                    (adsVRT.dataset.GetRasterBand(1).GetMetadataItem('units'))
                }
            })

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # set time
        self._set_envisat_time(gdalMetadata)

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('meris')
        ee = pti.get_gcmd_platform('envisat')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        # add geolocation arrays
        if geolocation:
            self.add_geolocation_from_ads(gdalDataset,
                                          zoomSize=zoomSize,
                                          step=step)
Exemplo n.º 41
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 GCP_STEP=20,
                 MAX_LAT=90,
                 MIN_LAT=50,
                 resolution='low',
                 **kwargs):
        ''' Create VRT
        Parameters
        ----------
        GCP_COUNT : int
            number of GCPs along each dimention
        '''
        ifile = os.path.split(filename)[1]
        if not ifile.startswith('GW1AM2_') or not ifile.endswith('.h5'):
            raise WrongMapperError
        try:
            ProductName = gdalMetadata['ProductName']
            PlatformShortName = gdalMetadata['PlatformShortName']
            SensorShortName = gdalMetadata['SensorShortName']
        except:
            raise WrongMapperError

        if (not ProductName == 'AMSR2-L1R'
                or not PlatformShortName == 'GCOM-W1'
                or not SensorShortName == 'AMSR2'):
            raise WrongMapperError

        if resolution == 'low':
            subDatasetWidth = 243
        else:
            subDatasetWidth = 486

        # get GCPs from lon/lat grids
        latGrid = gdal.Open(
            'HDF5:"%s"://Latitude_of_Observation_Point_for_89A' %
            filename).ReadAsArray()
        lonGrid = gdal.Open(
            'HDF5:"%s"://Longitude_of_Observation_Point_for_89A' %
            filename).ReadAsArray()
        if subDatasetWidth == 243:
            latGrid = latGrid[:, ::2]
            lonGrid = lonGrid[:, ::2]

        dx = .5
        dy = .5
        gcps = []
        k = 0
        maxY = 0
        minY = latGrid.shape[0]
        for i0 in range(0, latGrid.shape[0], GCP_STEP):
            for i1 in range(0, latGrid.shape[1], GCP_STEP):
                # create GCP with X,Y,pixel,line from lat/lon matrices
                lon = float(lonGrid[i0, i1])
                lat = float(latGrid[i0, i1])
                if (lon >= -180 and lon <= 180 and lat >= MIN_LAT
                        and lat <= MAX_LAT):
                    gcp = gdal.GCP(lon, lat, 0, i1 + dx, i0 + dy)
                    gcps.append(gcp)
                    k += 1
                    maxY = max(maxY, i0)
                    minY = min(minY, i0)
        yOff = minY
        ySize = maxY - minY

        # remove Y-offset from gcps
        for gcp in gcps:
            gcp.GCPLine -= yOff

        metaDict = []

        subDatasets = gdalDataset.GetSubDatasets()
        metadata = gdalDataset.GetMetadata()
        for subDataset in subDatasets:
            # select subdatasets fro that resolution (width)
            if (subDatasetWidth == int(
                    subDataset[1].split(']')[0].split('x')[-1])
                    and 'Latitude' not in subDataset[0]
                    and 'Longitude' not in subDataset[0]):
                name = subDataset[0].split('/')[-1]
                # find scale
                scale = 1
                for meta in metadata:
                    if name + '_SCALE' in meta:
                        scale = float(metadata[meta])
                # create meta entry
                metaEntry = {
                    'src': {
                        'SourceFilename': subDataset[0],
                        'sourceBand': 1,
                        'ScaleRatio': scale,
                        'ScaleOffset': 0,
                        'yOff': yOff,
                        'ySize': ySize,
                    },
                    'dst': {
                        'name': name
                    }
                }
                metaDict.append(metaEntry)

        # create VRT from one of the subdatasets
        gdalSubDataset = gdal.Open(metaEntry['src']['SourceFilename'])
        self._init_from_dataset_params(subDatasetWidth, ySize,
                                       (1, 0, 0, ySize, 0, -1),
                                       NSR().wkt)
        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        self.dataset.SetMetadataItem(
            'time_coverage_start',
            parse_time(gdalMetadata['ObservationStartDateTime']).isoformat())
        self.dataset.SetMetadataItem(
            'time_coverage_end',
            parse_time(gdalMetadata['ObservationEndDateTime']).isoformat())
        # append GCPs and lat/lon projection to the vsiDataset
        self.dataset.SetGCPs(gcps, NSR().wkt)
        self.reproject_gcps(
            '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs'
        )
        self.tps = True

        mm = pti.get_gcmd_instrument('AMSR2')
        ee = pti.get_gcmd_platform('GCOM-W1')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
 def test_get_gcmd_platform(self):
     item = 'AQUA'
     self.assertIsInstance(pti.get_gcmd_platform(item),
             collections.OrderedDict)
Exemplo n.º 43
0
    def __init__(self,
                 fileName,
                 gdalDataset,
                 gdalMetadata,
                 latlonGrid=None,
                 mask='',
                 **kwargs):
        ''' Create VRT

        Parameters
        -----------
        fileName : string
        gdalDataset : gdal dataset
        gdalMetadata : gdal metadata
        latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid
        '''
        # test if input files is ASCAT
        iDir, iFile = os.path.split(fileName)
        iFileName, iFileExt = os.path.splitext(iFile)
        try:
            assert iFileName[0:6] == 'ascat_' and iFileExt == '.nc'
        except:
            raise WrongMapperError

        # Create geolocation
        subDataset = gdal.Open('NETCDF:"' + fileName + '":lat')
        self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize,
                             srcRasterYSize=subDataset.RasterYSize)

        GeolocMetaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lon'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': -360
            },
            'dst': {}
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lat'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': 0
            },
            'dst': {}
        }]

        self.GeolocVRT._create_bands(GeolocMetaDict)

        GeolocObject = GeolocationArray(
            xVRT=self.GeolocVRT,
            yVRT=self.GeolocVRT,
            # x = lon, y = lat
            xBand=1,
            yBand=2,
            lineOffset=0,
            pixelOffset=0,
            lineStep=1,
            pixelStep=1)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self,
                     srcRasterXSize=subDataset.RasterXSize,
                     srcRasterYSize=subDataset.RasterYSize,
                     gdalDataset=subDataset,
                     geolocationArray=GeolocObject,
                     srcProjection=GeolocObject.d['SRS'])

        # Scale and NODATA should ideally be taken directly from raw file
        metaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_speed'),
                'ScaleRatio': 0.01,
                'NODATA': -32767
            },
            'dst': {
                'name': 'windspeed',
                'wkv': 'wind_speed'
            }
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_dir'),
                'ScaleRatio': 0.1,
                'NODATA': -32767
            },
            'dst': {
                'name': 'winddirection',
                'wkv': 'wind_from_direction'
            }
        }]

        self._create_bands(metaDict)

        # This should not be necessary
        # - should be provided by GeolocationArray!
        self.dataset.SetProjection(GeolocObject.d['SRS'])

        # Add time
        startTime = datetime.datetime(int(iFileName[6:10]),
                                      int(iFileName[10:12]),
                                      int(iFileName[12:14]),
                                      int(iFileName[15:17]),
                                      int(iFileName[17:19]),
                                      int(iFileName[19:21]))
        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start',
                                     startTime.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     startTime.isoformat())

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('ascat')
        ee = pti.get_gcmd_platform('metop-a')

        # TODO: Validate that the found instrument and platform are indeed what
        # we want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 44
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create NCEP VRT '''

        if not gdalDataset:
            raise WrongMapperError

        geotransform = gdalDataset.GetGeoTransform()
        if (geotransform == (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
                geotransform == (-0.5, 1.0, 0.0, 90.5, 0.0, -1.0)):
            if gdalDataset.RasterCount == 4:
                srcBandId = {'temperature': 2,
                             'u-component': 3,
                             'v-component': 4}
            elif gdalDataset.RasterCount == 9:
                srcBandId = {'temperature': 6,
                             'u-component': 8,
                             'v-component': 9}
            else:
                raise WrongMapperError
        else:
            raise WrongMapperError  # Not water proof

        metaDict = [{'src': {'SourceFilename': fileName,
                             'SourceBand': srcBandId['u-component']},
                     'dst': {'wkv': 'eastward_wind',
                             'height': '10 m'}},
                    {'src': {'SourceFilename': fileName,
                             'SourceBand': srcBandId['v-component']},
                     'dst': {'wkv': 'northward_wind',
                             'height': '10 m'}},
                    {'src': [{'SourceFilename': fileName,
                              'SourceBand': srcBandId['u-component'],
                              'DataType': (gdalDataset.GetRasterBand(srcBandId['u-component']).DataType)
                              },
                             {'SourceFilename': fileName,
                              'SourceBand': srcBandId['v-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['v-component']).DataType
                              }],
                     'dst': {'wkv': 'wind_speed',
                             'PixelFunctionType': 'UVToMagnitude',
                             'name': 'windspeed',
                             'height': '2 m'
                             }},
                    {'src': [{'SourceFilename': fileName,
                              'SourceBand': srcBandId['u-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['u-component']).DataType
                              },
                             {'SourceFilename': fileName,
                              'SourceBand': srcBandId['v-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['v-component']).DataType
                              }],
                     'dst': {'wkv': 'wind_from_direction',
                             'PixelFunctionType': 'UVToDirectionFrom',
                             'name': 'winddirection',
                             'height': '2 m'
                             }},
                    {'src': {'SourceFilename': fileName,
                             'SourceBand': srcBandId['temperature']},
                     'dst': {'wkv': 'air_temperature',
                             'name': 'air_t',
                             'height': '2 m'}
                     }]

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # Adding valid time from the GRIB file to dataset
        band = gdalDataset.GetRasterBand(srcBandId['u-component'])
        validTime = band.GetMetadata()['GRIB_VALID_TIME']

        self.dataset.SetMetadataItem('time_coverage_start',
            (datetime.datetime.utcfromtimestamp(
                int(validTime.strip().split(' ')[0])).isoformat()))

        self.dataset.SetMetadataItem('time_coverage_end',
            ((datetime.datetime.utcfromtimestamp(
                int(validTime.strip().split(' ')[0]))
             + datetime.timedelta(hours=3)).isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('computer')
        ee = pti.get_gcmd_platform('ncep-gfs')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 45
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 GCP_COUNT=10,
                 **kwargs):
        ''' Create VRT
        Parameters
        ----------
        GCP_COUNT : int
            number of GCPs along each dimention
        '''

        # should raise error in case of not obpg_l2 file
        try:
            title = gdalMetadata["Title"]
        except:
            raise WrongMapperError

        if title not in self.titles:
            raise WrongMapperError

        # get subdataset and parse to VRT.__init__()
        # for retrieving geo-metadata
        # but NOT from longitude or latitude because it can be smaller!
        subDatasets = gdalDataset.GetSubDatasets()
        for subDataset in subDatasets:
            if ('longitude' not in subDataset[1]
                    and 'latitude' not in subDataset[1]):
                gdalSubDataset = gdal.Open(subDataset[0])
                break

        if title is 'GOCI Level-2 Data':
            # set GOCI projection parameters
            rasterXSize = 5567
            rasterYSize = 5685
            proj4 = '+proj=ortho +lat_0=36 +lon_0=130 units=m  +ellps=WGS84 +datum=WGS84 +no_defs'
            srs = osr.SpatialReference()
            srs.ImportFromProj4(proj4)
            projection = srs.ExportToWkt()
            geoTransform = (-1391500.0, 500.0, 0.0, 1349500.0, 0.0, -500.0)

            # create empty VRT dataset with georeference only
            self._init_from_dataset_params(rasterXSize, rasterYSize,
                                           geoTransform, projection)
        else:
            # create empty VRT dataset with geolocation only
            self._init_from_gdal_dataset(gdalSubDataset)

        # parts of dictionary for all Reflectances
        #dictRrs = {'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air', 'wavelength': '412'} }
        # dictionary for all possible bands
        allBandsDict = {
            'Rrs': {
                'src': {},
                'dst': {
                    'wkv':
                    'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air'
                }
            },
            'Kd': {
                'src': {},
                'dst': {
                    'wkv':
                    'volume_attenuation_coefficient_of_downwelling_radiative_flux_in_sea_water'
                }
            },
            'chlor_a': {
                'src': {},
                'dst': {
                    'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water',
                    'case': 'I'
                }
            },
            'cdom_index': {
                'src': {},
                'dst': {
                    'wkv':
                    'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter',
                    'case': 'II'
                }
            },
            'sst': {
                'src': {},
                'dst': {
                    'wkv': 'sea_surface_temperature'
                }
            },
            'sst4': {
                'src': {},
                'dst': {
                    'wkv': 'sea_surface_temperature'
                }
            },
            'l2_flags': {
                'src': {
                    'SourceType': 'SimpleSource',
                    'DataType': 4
                },
                'dst': {
                    'wkv': 'quality_flags',
                    'dataType': 4
                }
            },
            'qual_sst': {
                'src': {
                    'SourceType': 'SimpleSource',
                    'DataType': 4
                },
                'dst': {
                    'wkv': 'quality_flags',
                    'name': 'qual_sst',
                    'dataType': 4
                }
            },
            'qual_sst4': {
                'src': {
                    'SourceType': 'SimpleSource',
                    'DataType': 4
                },
                'dst': {
                    'wkv': 'quality_flags',
                    'name': 'qual_sst',
                    'dataType': 4
                }
            },
            'latitude': {
                'src': {},
                'dst': {
                    'wkv': 'latitude'
                }
            },
            'longitude': {
                'src': {},
                'dst': {
                    'wkv': 'longitude'
                }
            },
            'par': {
                'src': {},
                'dst': {
                    'wkv':
                    'downwelling_photosynthetic_photon_radiance_in_sea_water'
                }
            },
            'ipar': {
                'src': {},
                'dst': {
                    'wkv':
                    'instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water'
                }
            },
        }

        # loop through available bands and generate metaDict (non fixed)
        metaDict = []
        bandNo = 0
        for subDataset in subDatasets:
            # get sub dataset name
            subDatasetName = subDataset[1].split(' ')[1]
            self.logger.debug('Subdataset: %s' % subDataset[1])
            self.logger.debug('Subdataset name: "%s"' % subDatasetName)
            # get wavelength if applicable, get dataset name without wavelength
            try:
                wavelength = int(subDatasetName.split('_')[-1])
            except:
                wavelength = None
                subBandName = subDatasetName
            else:
                subBandName = subDatasetName.split('_')[0]

            self.logger.debug('subBandName, wavelength: %s %s' %
                              (subBandName, str(wavelength)))

            if subBandName in allBandsDict:
                # get name, slope, intercept
                self.logger.debug('name: %s' % subBandName)
                tmpSubDataset = gdal.Open(subDataset[0])
                tmpSubMetadata = tmpSubDataset.GetMetadata()
                slope = tmpSubMetadata.get('slope', '1')
                intercept = tmpSubMetadata.get('intercept', '0')
                self.logger.debug('slope, intercept: %s %s ' %
                                  (slope, intercept))
                # create meta entry
                metaEntry = {
                    'src': {
                        'SourceFilename': subDataset[0],
                        'sourceBand': 1,
                        'ScaleRatio': slope,
                        'ScaleOffset': intercept
                    },
                    'dst': {}
                }
                # add more to src
                for srcKey in allBandsDict[subBandName]['src']:
                    metaEntry['src'][srcKey] = (
                        allBandsDict[subBandName]['src'][srcKey])
                # add dst from allBandsDict
                for dstKey in allBandsDict[subBandName]['dst']:
                    metaEntry['dst'][dstKey] = (
                        allBandsDict[subBandName]['dst'][dstKey])

                # add wavelength, band name to dst
                if wavelength is not None:
                    metaEntry['dst']['suffix'] = str(wavelength)
                    metaEntry['dst']['wavelength'] = str(wavelength)

                # append band metadata to metaDict
                self.logger.debug('metaEntry: %d => %s' %
                                  (bandNo, str(metaEntry)))
                metaDict.append(metaEntry)
                bandNo += 1

                if subBandName == 'Rrs':
                    rrsSubDataset = subDataset[0]
                    metaEntryRrsw = {
                        'src': [{
                            'SourceFilename': subDataset[0],
                            'SourceBand': 1,
                            'ScaleRatio': slope,
                            'ScaleOffset': intercept,
                            'DataType': 6
                        }],
                        'dst': {
                            'wkv':
                            'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_water',
                            'suffix':
                            str(wavelength),
                            'wavelength':
                            str(wavelength),
                            'PixelFunctionType':
                            'NormReflectanceToRemSensReflectance',
                        }
                    }

                    # append band metadata to metaDict
                    self.logger.debug('metaEntry: %d => %s' %
                                      (bandNo, str(metaEntryRrsw)))
                    metaDict.append(metaEntryRrsw)
                    bandNo += 1

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # set TIME
        startYear = int(gdalMetadata['Start Year'])
        startDay = int(gdalMetadata['Start Day'])
        startMillisec = int(gdalMetadata['Start Millisec'])
        startDate = datetime(startYear, 1, 1) + timedelta(
            startDay - 1, 0, 0, startMillisec)
        # skip adding georeference for GOCI
        if title is 'GOCI Level-2 Data':
            return

        self._remove_geotransform()

        # add geolocation
        geoMeta = self.geolocation.data
        if len(geoMeta) > 0:
            self.dataset.SetMetadata(geoMeta, 'GEOLOCATION')

        # add GCPs
        geolocationMetadata = gdalSubDataset.GetMetadata('GEOLOCATION')
        xDatasetSource = geolocationMetadata['X_DATASET']
        xDataset = gdal.Open(xDatasetSource)

        yDatasetSource = geolocationMetadata['Y_DATASET']
        yDataset = gdal.Open(yDatasetSource)

        longitude = xDataset.ReadAsArray()
        latitude = yDataset.ReadAsArray()

        # estimate pixel/line step of the geolocation arrays
        pixelStep = int(
            ceil(
                float(gdalSubDataset.RasterXSize) /
                float(xDataset.RasterXSize)))
        lineStep = int(
            ceil(
                float(gdalSubDataset.RasterYSize) /
                float(xDataset.RasterYSize)))
        self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep))

        # ==== ADD GCPs and Pojection ====

        # estimate step of GCPs
        step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT))
        step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT))
        if str(title) == 'VIIRSN Level-2 Data':
            step0 = 64
        self.logger.debug('gcpCount: >%s<, %d %d %f %d %d', title,
                          latitude.shape[0], latitude.shape[1], GCP_COUNT,
                          step0, step1)

        # generate list of GCPs
        dx = .5
        dy = .5
        gcps = []
        k = 0
        center_lon = 0
        center_lat = 0
        for i0 in range(0, latitude.shape[0], step0):
            for i1 in range(0, latitude.shape[1], step1):
                # create GCP with X,Y,pixel,line from lat/lon matrices
                lon = float(longitude[i0, i1])
                lat = float(latitude[i0, i1])
                if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
                    gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep + dx,
                                   i0 * lineStep + dy)
                    self.logger.debug('%d %d %d %f %f', k, gcp.GCPPixel,
                                      gcp.GCPLine, gcp.GCPX, gcp.GCPY)
                    gcps.append(gcp)
                    center_lon += gcp.GCPX
                    center_lat += gcp.GCPY
                    k += 1

        # append GCPs and lat/lon projection to the vsiDataset
        self.dataset.SetGCPs(gcps, NSR().wkt)
        self._remove_geolocation()

        # reproject GCPs
        center_lon /= k
        center_lat /= k
        srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % (
            center_lon, center_lat)
        self.reproject_GCPs(srs)

        # use TPS for reprojection
        self.tps = True

        # add NansenCloud metadata
        self.dataset.SetMetadataItem(
            'time_coverage_start',
            (parse(gdalMetadata['time_coverage_start']).isoformat()))
        self.dataset.SetMetadataItem(
            'time_coverage_end',
            (parse(gdalMetadata['time_coverage_stop']).isoformat()))
        instrument = gdalMetadata['Sensor Name'][1:-1]
        platform = {'A': 'AQUA', 'T': 'TERRA'}[gdalMetadata['Sensor Name'][-1]]
        mm = pti.get_gcmd_instrument(instrument)
        ee = pti.get_gcmd_platform(platform)
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 46
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
        '''
        Parameters
        -----------
        filename : string

        gdalDataset : gdal dataset

        gdalMetadata : gdal metadata

        '''

        self.setup_ads_parameters(filename, gdalMetadata)

        if self.product[0:4] != "ASA_":
            raise WrongMapperError

        if not IMPORT_SCIPY:
            raise NansatReadError(
                'ASAR data cannot be read because scipy is not installed')

        # get channel string (remove '/', since NetCDF
        # does not support that in metadata)
        polarization = [{
            'channel':
            gdalMetadata['SPH_MDS1_TX_RX_POLAR'].replace("/", ""),
            'bandNum':
            1
        }]
        # if there is the 2nd band, get channel string
        if 'SPH_MDS2_TX_RX_POLAR' in gdalMetadata.keys():
            channel = gdalMetadata['SPH_MDS2_TX_RX_POLAR'].replace("/", "")
            if not (channel.isspace()):
                polarization.append({'channel': channel, 'bandNum': 2})

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset, metadata=gdalMetadata)

        # get calibration constant
        gotCalibration = True
        try:
            for iPolarization in polarization:
                metaKey = (
                    'MAIN_PROCESSING_PARAMS_ADS_CALIBRATION_FACTORS.%d.EXT_CAL_FACT'
                    % (iPolarization['bandNum']))
                iPolarization['calibrationConst'] = float(
                    gdalDataset.GetMetadataItem(metaKey, 'records'))
        except:
            try:
                for iPolarization in polarization:
                    # Apparently some ASAR files have calibration
                    # constant stored in another place
                    metaKey = (
                        'MAIN_PROCESSING_PARAMS_ADS_0_CALIBRATION_FACTORS.%d.EXT_CAL_FACT'
                        % (iPolarization['bandNum']))
                    iPolarization['calibrationConst'] = float(
                        gdalDataset.GetMetadataItem(metaKey, 'records'))
            except:
                self.logger.warning('Cannot get calibrationConst')
                gotCalibration = False

        # add dictionary for raw counts
        metaDict = []
        for iPolarization in polarization:
            iBand = gdalDataset.GetRasterBand(iPolarization['bandNum'])
            dtype = iBand.DataType
            shortName = 'RawCounts_%s' % iPolarization['channel']
            bandName = shortName
            dstName = 'raw_counts_%s' % iPolarization['channel']
            if (8 <= dtype and dtype < 12):
                bandName = shortName + '_complex'
                dstName = dstName + '_complex'

            metaDict.append({
                'src': {
                    'SourceFilename': filename,
                    'SourceBand': iPolarization['bandNum']
                },
                'dst': {
                    'name': dstName
                }
            })
            '''
            metaDict.append({'src': {'SourceFilename': filename,
                                     'SourceBand': iPolarization['bandNum']},
                             'dst': {'name': 'raw_counts_%s'
                                     % iPolarization['channel']}})
            '''
            # if raw data is complex, add the intensity band
            if (8 <= dtype and dtype < 12):
                # choose pixelfunction type
                if (dtype == 8 or dtype == 9):
                    pixelFunctionType = 'IntensityInt'
                else:
                    pixelFunctionType = 'intensity'
                # get data type of the intensity band
                intensityDataType = {
                    '8': 3,
                    '9': 4,
                    '10': 5,
                    '11': 6
                }.get(str(dtype), 4)
                # add intensity band
                metaDict.append({
                    'src': {
                        'SourceFilename': filename,
                        'SourceBand': iPolarization['bandNum'],
                        'DataType': dtype
                    },
                    'dst': {
                        'name': 'raw_counts_%s' % iPolarization['channel'],
                        'PixelFunctionType': pixelFunctionType,
                        'SourceTransferType': gdal.GetDataTypeName(dtype),
                        'dataType': intensityDataType
                    }
                })

        #####################################################################
        # Add incidence angle and look direction through small VRT objects
        #####################################################################
        lon = self.get_array_from_ADS('first_line_longs')
        lat = self.get_array_from_ADS('first_line_lats')
        inc = self.get_array_from_ADS('first_line_incidence_angle')

        # Calculate SAR look direction (ASAR is always right-looking)
        look_direction = initial_bearing(lon[:, :-1], lat[:, :-1], lon[:, 1:],
                                         lat[:, 1:])
        # Interpolate to regain lost row
        look_direction = scipy.ndimage.interpolation.zoom(
            look_direction, (1, 11. / 10.))
        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT.from_array(look_direction_u)
        look_v_VRT = VRT.from_array(look_direction_v)

        # Note: If incidence angle and look direction are stored in
        #       same VRT, access time is about twice as large
        incVRT = VRT.from_array(inc)
        lookVRT = VRT.from_lonlat(lon, lat)
        lookVRT.create_band([{
            'SourceFilename': look_u_VRT.filename,
            'SourceBand': 1
        }, {
            'SourceFilename': look_v_VRT.filename,
            'SourceBand': 1
        }], {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up bands to full size
        incVRT = incVRT.get_resized_vrt(gdalDataset.RasterXSize,
                                        gdalDataset.RasterYSize)
        lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize,
                                          gdalDataset.RasterYSize)
        # Store VRTs so that they are accessible later
        self.band_vrts = {
            'incVRT': incVRT,
            'look_u_VRT': look_u_VRT,
            'look_v_VRT': look_v_VRT,
            'lookVRT': lookVRT
        }

        # Add band to full sized VRT
        incFileName = self.band_vrts['incVRT'].filename
        lookFileName = self.band_vrts['lookVRT'].filename
        metaDict.append({
            'src': {
                'SourceFilename': incFileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'angle_of_incidence',
                'name': 'incidence_angle'
            }
        })
        metaDict.append({
            'src': {
                'SourceFilename': lookFileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': 'look_direction'
            }
        })

        ####################
        # Add Sigma0-bands
        ####################
        if gotCalibration:
            for iPolarization in polarization:
                # add dictionary for sigma0, ice and water
                short_names = [
                    'sigma0', 'sigma0_normalized_ice',
                    'sigma0_normalized_water'
                ]
                wkt = [
                    'surface_backwards_scattering_coefficient_of_radar_wave',
                    'surface_backwards_scattering_coefficient_of_radar_wave_normalized_over_ice',
                    'surface_backwards_scattering_coefficient_of_radar_wave_normalized_over_water'
                ]
                sphPass = [gdalMetadata['SPH_PASS'], '', '']
                sourceFileNames = [filename, incFileName]

                pixelFunctionTypes = [
                    'RawcountsIncidenceToSigma0', 'Sigma0NormalizedIce'
                ]
                if iPolarization['channel'] == 'HH':
                    pixelFunctionTypes.append('Sigma0HHNormalizedWater')
                elif iPolarization['channel'] == 'VV':
                    pixelFunctionTypes.append('Sigma0VVNormalizedWater')

                # add pixelfunction bands to metaDict
                for iPixFunc in range(len(pixelFunctionTypes)):
                    srcFiles = []
                    for j, jFileName in enumerate(sourceFileNames):
                        sourceFile = {'SourceFilename': jFileName}
                        if j == 0:
                            sourceFile['SourceBand'] = iPolarization['bandNum']
                            # if ASA_full_incAng,
                            # set 'ScaleRatio' into source file dict
                            sourceFile['ScaleRatio'] = np.sqrt(
                                1.0 / iPolarization['calibrationConst'])
                        else:
                            sourceFile['SourceBand'] = 1
                        srcFiles.append(sourceFile)

                    metaDict.append({
                        'src': srcFiles,
                        'dst': {
                            'short_name': short_names[iPixFunc],
                            'wkv': wkt[iPixFunc],
                            'PixelFunctionType':
                            (pixelFunctionTypes[iPixFunc]),
                            'polarization': iPolarization['channel'],
                            'suffix': iPolarization['channel'],
                            'pass': sphPass[iPixFunc],
                            'dataType': 6
                        }
                    })

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # Add oribit and look information to metadata domain
        # ASAR is always right-looking
        self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT')
        self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                     gdalMetadata['SPH_PASS'].upper().strip())

        ###################################################################
        # Estimate sigma0_VV from sigma0_HH
        ###################################################################
        polarizations = []
        for pp in polarization:
            polarizations.append(pp['channel'])
        if 'VV' not in polarizations and 'HH' in polarizations:
            srcFiles = []
            for j, jFileName in enumerate(sourceFileNames):
                sourceFile = {'SourceFilename': jFileName}
                if j == 0:
                    sourceFile['SourceBand'] = iPolarization['bandNum']
                    # if ASA_full_incAng,
                    # set 'ScaleRatio' into source file dict
                    sourceFile['ScaleRatio'] = np.sqrt(
                        1.0 / iPolarization['calibrationConst'])
                else:
                    sourceFile['SourceBand'] = 1
                srcFiles.append(sourceFile)
            dst = {
                'wkv':
                ('surface_backwards_scattering_coefficient_of_radar_wave'),
                'PixelFunctionType': 'Sigma0HHToSigma0VV',
                'polarization': 'VV',
                'suffix': 'VV'
            }
            self.create_band(srcFiles, dst)
            self.dataset.FlushCache()

        # set time
        self._set_envisat_time(gdalMetadata)

        # When using TPS for reprojection, use only every 3rd GCP
        # to improve performance (tradeoff vs accuracy)
        self.dataset.SetMetadataItem('skip_gcps', '3')

        self.dataset.SetMetadataItem(
            'time_coverage_start',
            (parse(gdalMetadata['MPH_SENSING_START']).isoformat()))
        self.dataset.SetMetadataItem(
            'time_coverage_end',
            (parse(gdalMetadata['MPH_SENSING_STOP']).isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('asar')
        ee = pti.get_gcmd_platform('envisat')

        # TODO: Validate that the found instrument and platform are indeed what
        # we want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 47
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, emrange='VNIR', **kwargs):
        ''' Create MODIS_L1 VRT '''
        # check mapper
        try:
            INSTRUMENTSHORTNAME = gdalMetadata['INSTRUMENTSHORTNAME']
        except:
            raise WrongMapperError
        if INSTRUMENTSHORTNAME != 'ASTER':
            raise WrongMapperError
        try:
            SHORTNAME = gdalMetadata['SHORTNAME']
        except:
            raise WrongMapperError
        if SHORTNAME != 'ASTL1B':
            raise WrongMapperError

        # set up metadict for data with various resolution
        subDSString = 'HDF4_EOS:EOS_SWATH:"%s":%s:%s'
        metaDictVNIR = [
        {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData1' )}, 'dst': {'wavelength': '560'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData2' )}, 'dst': {'wavelength': '660'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData3N')}, 'dst': {'wavelength': '820'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData3B')}, 'dst': {'wavelength': '820'}},
        ]

        metaDictSWIR = [
        {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData4')}, 'dst': {'wavelength': '1650'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData5')}, 'dst': {'wavelength': '2165'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData6')}, 'dst': {'wavelength': '2205'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData7')}, 'dst': {'wavelength': '2260'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData8')}, 'dst': {'wavelength': '2330'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData9')}, 'dst': {'wavelength': '2395'}},
        ]

        metaDictTIR = [
        {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath',  'ImageData10')}, 'dst': {'wavelength': '8300'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath',  'ImageData11')}, 'dst': {'wavelength': '8650'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath',  'ImageData12')}, 'dst': {'wavelength': '9100'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath',  'ImageData13')}, 'dst': {'wavelength': '10600'}},
        {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath',  'ImageData14')}, 'dst': {'wavelength': '11300'}},
        ]

        # select appropriate metaDict based on <emrange> parameter
        metaDict = {'VNIR': metaDictVNIR,
                    'SWIR': metaDictSWIR,
                    'TIR': metaDictTIR,
                    }[emrange]

        # get 1st EOS subdataset and parse to VRT.__init__()
        # for retrieving geo-metadata
        try:
            gdalSubDataset0 = gdal.Open(metaDict[0]['src']['SourceFilename'])
        except (AttributeError, IndexError):
            raise WrongMapperError

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalSubDataset0)

        # add source band, wkv and suffix
        for metaEntry in metaDict:
            metaEntry['src']['SourceBand'] = 1
            metaEntry['dst']['wkv'] = 'toa_outgoing_spectral_radiance'
            metaEntry['dst']['suffix'] = metaEntry['dst']['wavelength']

            if 'ImageData3N' in metaEntry['src']['SourceFilename']:
                metaEntry['dst']['suffix'] += 'N'

            if 'ImageData3B' in metaEntry['src']['SourceFilename']:
                metaEntry['dst']['suffix'] += 'B'

        # add scale and offset
        for metaEntry in metaDict:
            bandNo = metaEntry['src']['SourceFilename'].strip().split(':')[-1].replace('ImageData', '')
            metaEntry['src']['ScaleRatio'] = float(gdalMetadata['INCL' + bandNo])
            metaEntry['src']['ScaleOffset'] = float(gdalMetadata['OFFSET' + bandNo])

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # set time
        datetimeString = self.find_metadata(gdalMetadata, "SETTINGTIMEOFPOINTING")
        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start',
                                     parse(datetimeString+'+00').isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     parse(datetimeString+'+00').isoformat())

        mm = pti.get_gcmd_instrument('ASTER')
        ee = pti.get_gcmd_platform('TERRA')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        self.remove_geolocationArray()
Exemplo n.º 48
0
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                       resolution='low', **kwargs):
        ''' Create LANDSAT VRT from multiple tif files or single tar.gz file'''
        mtlFileName = ''
        bandFileNames = []
        bandSizes = []
        bandDatasets = []
        fname = os.path.split(fileName)[1]

        if   (fileName.endswith('.tar') or
              fileName.endswith('.tar.gz') or
              fileName.endswith('.tgz')):
            # try to open .tar or .tar.gz or .tgz file with tar
            try:
                tarFile = tarfile.open(fileName)
            except:
                raise WrongMapperError

            # collect names of bands and corresponding sizes
            # into bandsInfo dict and bandSizes list
            tarNames = sorted(tarFile.getnames())
            for tarName in tarNames:
                # check if TIF files inside TAR qualify
                if   (tarName[0] in ['L', 'M'] and
                      os.path.splitext(tarName)[1] in ['.TIF', '.tif']):
                    # open TIF file from TAR using VSI
                    sourceFilename = '/vsitar/%s/%s' % (fileName, tarName)
                    gdalDatasetTmp = gdal.Open(sourceFilename)
                    # keep name, GDALDataset and size
                    bandFileNames.append(sourceFilename)
                    bandSizes.append(gdalDatasetTmp.RasterXSize)
                    bandDatasets.append(gdalDatasetTmp)
                elif (tarName.endswith('MTL.txt') or
                      tarName.endswith('MTL.TXT')):
                    # get mtl file
                    mtlFileName = tarName

        elif ((fname.startswith('L') or fname.startswith('M')) and
              (fname.endswith('.tif') or
               fname.endswith('.TIF') or
               fname.endswith('._MTL.txt'))):

            # try to find TIF/tif files with the same name as input file
            path, coreName = os.path.split(fileName)
            coreName = os.path.splitext(coreName)[0].split('_')[0]
            coreNameMask = coreName+'*[tT][iI][fF]'
            tifNames = sorted(glob.glob(os.path.join(path, coreNameMask)))
            for tifName in tifNames:
                sourceFilename = tifName
                gdalDatasetTmp = gdal.Open(sourceFilename)
                # keep name, GDALDataset and size
                bandFileNames.append(sourceFilename)
                bandSizes.append(gdalDatasetTmp.RasterXSize)
                bandDatasets.append(gdalDatasetTmp)

            # get mtl file
            mtlFiles = glob.glob(coreName+'*[mM][tT][lL].[tT][xX][tT]')
            if len(mtlFiles) > 0:
                mtlFileName = mtlFiles[0]
        else:
            raise WrongMapperError

        # if not TIF files found - not appropriate mapper
        if not bandFileNames:
            raise WrongMapperError

        # get appropriate band size based on number of unique size and
        # required resoltuion
        if resolution == 'low':
            bandXSise = min(bandSizes)
        elif resolution in ['high', 'hi']:
            bandXSise = max(bandSizes)
        else:
            raise OptionError('Wrong resolution %s for file %s' % (resolution, fileName))

        # find bands with appropriate size and put to metaDict
        metaDict = []
        for bandFileName, bandSize, bandDataset in zip(bandFileNames,
                                                       bandSizes,
                                                       bandDatasets):
            if bandSize == bandXSise:
                # let last part of file name be suffix
                bandSuffix = os.path.splitext(bandFileName)[0].split('_')[-1]

                metaDict.append({
                    'src': {'SourceFilename': bandFileName,
                            'SourceBand':  1,
                            'ScaleRatio': 0.1},
                    'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                            'suffix': bandSuffix}})
                gdalDataset4Use = bandDataset

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset4Use)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        if len(mtlFileName) > 0:
            mtlFileName = os.path.join(os.path.split(bandFileNames[0])[0],
                                        mtlFileName)
            mtlFileLines = [line.strip() for line in
                            self.read_xml(mtlFileName).split('\n')]
            dateString = [line.split('=')[1].strip()
                          for line in mtlFileLines
                            if ('DATE_ACQUIRED' in line or
                              'ACQUISITION_DATE' in line)][0]
            timeStr = [line.split('=')[1].strip()
                        for line in mtlFileLines
                            if ('SCENE_CENTER_TIME' in line or
                                'SCENE_CENTER_SCAN_TIME' in line)][0]
            time_start = parse_time(dateString + 'T' + timeStr).isoformat()
            time_end = (parse_time(dateString + 'T' + timeStr) +
                        datetime.timedelta(microseconds=60000000)).isoformat()

        self.dataset.SetMetadataItem('time_coverage_start', time_start)
        self.dataset.SetMetadataItem('time_coverage_end', time_end)

        # set platform
        platform = 'LANDSAT'
        if fname[2].isdigit():
            platform += '-'+fname[2]
        ee = pti.get_gcmd_platform(platform)
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        # set instrument
        instrument = {
        'LANDSAT' : 'MSS',
        'LANDSAT-1' : 'MSS',
        'LANDSAT-2' : 'MSS',
        'LANDSAT-3' : 'MSS',
        'LANDSAT-4' : 'TM',
        'LANDSAT-5' : 'TM',
        'LANDSAT-7' : 'ETM+',
        'LANDSAT-8' : 'OLI'}[platform]
        ee = pti.get_gcmd_instrument(instrument)
        self.dataset.SetMetadataItem('instrument', json.dumps(ee))
Exemplo n.º 49
0
 def get_platform(self, raw_metadata):
     return pti.get_gcmd_platform('In Situ Ocean-based Platforms')
Exemplo n.º 50
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):

        try:
            geo_transform = gdalDataset.GetGeoTransform()[0:5]
        except AttributeError:
            raise WrongMapperError
        if geo_transform != (-12.1, 0.2, 0.0, 81.95, 0.0):
            raise WrongMapperError

        metaDict = [{'src': {'SourceFilename': filename,
                             'SourceBand': 2,
                             'NODATA': 9999},
                     'dst': {'wkv': 'eastward_wind',
                             'height': '10 m'}
                     },
                    {'src': {'SourceFilename': filename,
                             'SourceBand': 3,
                             'NODATA': 9999},
                     'dst': {'wkv': 'northward_wind',
                             'height': '10 m'}
                     },
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': 3,
                              'DataType': gdalDataset.GetRasterBand(3).DataType
                              }],
                     'dst': {'wkv': 'wind_speed',
                             'name': 'windspeed',
                             'height': '10 m',
                             'PixelFunctionType': 'UVToMagnitude',
                             'NODATA': 9999}
                     },
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': 3,
                              'DataType': gdalDataset.GetRasterBand(3).DataType
                              }],
                     'dst': {'wkv': 'wind_from_direction',
                             'name': 'winddirection',
                             'height': '10 m',
                             'PixelFunctionType': 'UVToDirectionFrom',
                             'NODATA': 9999
                             }
                     }]

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset, metadata=gdalMetadata)

        # Create bands
        self.create_bands(metaDict)

        # set source, start_date, stop_date
        self.dataset.SetMetadataItem('source', 'HIRLAM')

        # Adding valid time from the GRIB file to dataset
        start_date = gdalDataset.GetRasterBand(1).GetMetadata()['GRIB_VALID_TIME']
        self.dataset.SetMetadataItem('time_coverage_start',
                datetime.datetime.utcfromtimestamp(
                int(start_date.strip().split(' ')[0])).isoformat() + '+00:00')

        stop_date = gdalDataset.GetRasterBand(gdalDataset.RasterCount).GetMetadata()['GRIB_VALID_TIME']
        self.dataset.SetMetadataItem('time_coverage_end',
                datetime.datetime.utcfromtimestamp(
                int(stop_date.strip().split(' ')[0])).isoformat() + '+00:00')

        mm = pti.get_gcmd_instrument('computer')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        ee = pti.get_gcmd_platform('merged analysis')
        self.dataset.SetMetadataItem('platform', json.dump(ee))
Exemplo n.º 51
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
        ''' Create NCEP VRT '''

        if not gdalDataset:
            raise WrongMapperError(filename)

        geotransform = gdalDataset.GetGeoTransform()
        if (geotransform == (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
                geotransform == (-0.5, 1.0, 0.0, 90.5, 0.0, -1.0)):
            if gdalDataset.RasterCount == 4:
                srcBandId = {'temperature': 2,
                             'u-component': 3,
                             'v-component': 4}
            elif gdalDataset.RasterCount == 9:
                srcBandId = {'temperature': 6,
                             'u-component': 8,
                             'v-component': 9}
            else:
                raise WrongMapperError(filename)
        else:
            raise WrongMapperError(filename)  # Not water proof

        # Adding valid time from the GRIB file to dataset
        band = gdalDataset.GetRasterBand(srcBandId['u-component'])
        validTime = band.GetMetadata()['GRIB_VALID_TIME']
        time_isoformat = (datetime.datetime.utcfromtimestamp(
            int(validTime.strip().split(' ')[0])).isoformat())

        # Set band metadata time_iso_8601 for use in OpenWind
        time_iso_8601 = np.datetime64(parse(time_isoformat))
        metaDict = [{'src': {'SourceFilename': filename,
                             'SourceBand': srcBandId['u-component']},
                     'dst': {'wkv': 'eastward_wind',
                             'height': '10 m',
                             'time_iso_8601': time_iso_8601}},
                    {'src': {'SourceFilename': filename,
                             'SourceBand': srcBandId['v-component']},
                     'dst': {'wkv': 'northward_wind',
                             'height': '10 m',
                             'time_iso_8601': time_iso_8601}},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': srcBandId['u-component'],
                              'DataType': (gdalDataset.GetRasterBand(srcBandId['u-component']).DataType)
                              },
                             {'SourceFilename': filename,
                              'SourceBand': srcBandId['v-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['v-component']).DataType
                              }],
                     'dst': {'wkv': 'wind_speed',
                             'PixelFunctionType': 'UVToMagnitude',
                             'name': 'windspeed',
                             'height': '2 m',
                             'time_iso_8601': time_iso_8601
                             }},
                    {'src': [{'SourceFilename': filename,
                              'SourceBand': srcBandId['u-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['u-component']).DataType
                              },
                             {'SourceFilename': filename,
                              'SourceBand': srcBandId['v-component'],
                              'DataType': gdalDataset.GetRasterBand(srcBandId['v-component']).DataType
                              }],
                     'dst': {'wkv': 'wind_from_direction',
                             'PixelFunctionType': 'UVToDirectionFrom',
                             'name': 'winddirection',
                             'height': '2 m',
                             'time_iso_8601': time_iso_8601
                             }},
                    {'src': {'SourceFilename': filename,
                             'SourceBand': srcBandId['temperature']},
                     'dst': {'wkv': 'air_temperature',
                             'name': 'air_t',
                             'height': '2 m',
                             'time_iso_8601': time_iso_8601}
                     }]

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        self.dataset.SetMetadataItem('time_coverage_start', time_isoformat)

        self.dataset.SetMetadataItem('time_coverage_end', time_isoformat)

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('computer')
        ee = pti.get_gcmd_platform('ncep-gfs')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 52
0
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                 GCP_COUNT=10, **kwargs):
        ''' Create VRT
        Parameters
        ----------
        GCP_COUNT : int
            number of GCPs along each dimention
        '''

        # should raise error in case of not obpg_l2 file
        try:
            title = gdalMetadata["Title"]
        except:
            raise WrongMapperError

        if title not in self.titles:
            raise WrongMapperError

        # get subdataset and parse to VRT.__init__()
        # for retrieving geo-metadata
        # but NOT from longitude or latitude because it can be smaller!
        subDatasets = gdalDataset.GetSubDatasets()
        for subDataset in subDatasets:
            if ('longitude' not in subDataset[1] and
                    'latitude' not in subDataset[1]):
                gdalSubDataset = gdal.Open(subDataset[0])
                break

        if title is 'GOCI Level-2 Data':
            # set GOCI projection parameters
            rasterXSize = 5567
            rasterYSize = 5685
            proj4 = '+proj=ortho +lat_0=36 +lon_0=130 units=m  +ellps=WGS84 +datum=WGS84 +no_defs'
            srs = osr.SpatialReference()
            srs.ImportFromProj4(proj4)
            projection = srs.ExportToWkt()
            geoTransform = (-1391500.0, 500.0, 0.0, 1349500.0, 0.0, -500.0)

            # create empty VRT dataset with georeference only
            VRT.__init__(self, srcGeoTransform=geoTransform,
                         srcProjection=projection,
                         srcRasterXSize=rasterXSize,
                         srcRasterYSize=rasterYSize)
        else:
            # create empty VRT dataset with geolocation only
            VRT.__init__(self, gdalSubDataset)

        # parts of dictionary for all Reflectances
        #dictRrs = {'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air', 'wavelength': '412'} }
        # dictionary for all possible bands
        allBandsDict = {'Rrs': {'src': {},
                                'dst': {'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air'}},
                        'Kd': {'src': {},
                               'dst': {'wkv': 'volume_attenuation_coefficient_of_downwelling_radiative_flux_in_sea_water'}},
                        'chlor_a': {'src': {},
                                    'dst': {'wkv': 'mass_concentration_of_chlorophyll_a_in_sea_water',
                                            'case': 'I'}},
                        'cdom_index': {'src': {},
                                       'dst': {'wkv': 'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter',
                                               'case': 'II'}},
                        'sst': {'src': {},
                                'dst': {'wkv': 'sea_surface_temperature'}},
                        'sst4': {'src': {},
                                 'dst': {'wkv': 'sea_surface_temperature'}},
                        'l2_flags': {'src': {'SourceType': 'SimpleSource',
                                             'DataType': 4},
                                     'dst': {'wkv': 'quality_flags',
                                             'dataType': 4}},
                        'qual_sst': {'src': {'SourceType': 'SimpleSource',
                                             'DataType': 4},
                                     'dst': {'wkv': 'quality_flags',
                                             'name': 'qual_sst',
                                             'dataType': 4}},
                        'qual_sst4': {'src': {'SourceType': 'SimpleSource',
                                              'DataType': 4},
                                      'dst': {'wkv': 'quality_flags',
                                              'name': 'qual_sst',
                                              'dataType': 4}},
                        'latitude': {'src': {},
                                     'dst': {'wkv': 'latitude'}},
                        'longitude': {'src': {},
                                      'dst': {'wkv': 'longitude'}},
                        'par': {'src': {},
                                      'dst': {'wkv': 'downwelling_photosynthetic_photon_radiance_in_sea_water'}},
                        'ipar': {'src': {},
                                      'dst': {'wkv': 'instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water'}},
                        }

        # loop through available bands and generate metaDict (non fixed)
        metaDict = []
        bandNo = 0
        for subDataset in subDatasets:
            # get sub dataset name
            subDatasetName = subDataset[1].split(' ')[1]
            self.logger.debug('Subdataset: %s' % subDataset[1])
            self.logger.debug('Subdataset name: "%s"' % subDatasetName)
            # get wavelength if applicable, get dataset name without wavelength
            try:
                wavelength = int(subDatasetName.split('_')[-1])
            except:
                wavelength = None
                subBandName = subDatasetName
            else:
                subBandName = subDatasetName.split('_')[0]

            self.logger.debug('subBandName, wavelength: %s %s' %
                              (subBandName, str(wavelength)))

            if subBandName in allBandsDict:
                # get name, slope, intercept
                self.logger.debug('name: %s' % subBandName)
                tmpSubDataset = gdal.Open(subDataset[0])
                tmpSubMetadata = tmpSubDataset.GetMetadata()
                slope = tmpSubMetadata.get('slope', '1')
                intercept = tmpSubMetadata.get('intercept', '0')
                self.logger.debug('slope, intercept: %s %s ' %
                                  (slope, intercept))
                # create meta entry
                metaEntry = {'src': {'SourceFilename': subDataset[0],
                                     'sourceBand':  1,
                                     'ScaleRatio': slope,
                                     'ScaleOffset': intercept},
                             'dst': {}
                             }
                # add more to src
                for srcKey in allBandsDict[subBandName]['src']:
                    metaEntry['src'][srcKey] = (allBandsDict[subBandName]
                                                ['src'][srcKey])
                # add dst from allBandsDict
                for dstKey in allBandsDict[subBandName]['dst']:
                    metaEntry['dst'][dstKey] = (allBandsDict[subBandName]
                                                ['dst'][dstKey])

                # add wavelength, band name to dst
                if wavelength is not None:
                    metaEntry['dst']['suffix'] = str(wavelength)
                    metaEntry['dst']['wavelength'] = str(wavelength)

                # append band metadata to metaDict
                self.logger.debug('metaEntry: %d => %s' % (bandNo,
                                                           str(metaEntry)))
                metaDict.append(metaEntry)
                bandNo += 1

                if subBandName == 'Rrs':
                    rrsSubDataset = subDataset[0]
                    metaEntryRrsw = {
                        'src': [{
                            'SourceFilename': subDataset[0],
                            'SourceBand':  1,
                            'ScaleRatio': slope,
                            'ScaleOffset': intercept,
                            'DataType': 6}],
                        'dst': {
                            'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_water',
                            'suffix': str(wavelength),
                            'wavelength': str(wavelength),
                            'PixelFunctionType': 'NormReflectanceToRemSensReflectance',
                        }}

                    # append band metadata to metaDict
                    self.logger.debug('metaEntry: %d => %s' %
                                      (bandNo, str(metaEntryRrsw)))
                    metaDict.append(metaEntryRrsw)
                    bandNo += 1

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # set TIME
        startYear = int(gdalMetadata['Start Year'])
        startDay = int(gdalMetadata['Start Day'])
        startMillisec = int(gdalMetadata['Start Millisec'])
        startDate = datetime(startYear, 1, 1) + timedelta(startDay-1, 0, 0,
                                                          startMillisec)
        # skip adding georeference for GOCI
        if title is 'GOCI Level-2 Data':
            return

        self._remove_geotransform()

        # add geolocation
        geoMeta = self.geolocationArray.d
        if len(geoMeta) > 0:
            self.dataset.SetMetadata(geoMeta, 'GEOLOCATION')

        # add GCPs
        geolocationMetadata = gdalSubDataset.GetMetadata('GEOLOCATION')
        xDatasetSource = geolocationMetadata['X_DATASET']
        xDataset = gdal.Open(xDatasetSource)

        yDatasetSource = geolocationMetadata['Y_DATASET']
        yDataset = gdal.Open(yDatasetSource)

        longitude = xDataset.ReadAsArray()
        latitude = yDataset.ReadAsArray()

        # estimate pixel/line step of the geolocation arrays
        pixelStep = int(ceil(float(gdalSubDataset.RasterXSize) /
                             float(xDataset.RasterXSize)))
        lineStep = int(ceil(float(gdalSubDataset.RasterYSize) /
                            float(xDataset.RasterYSize)))
        self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep))

        # ==== ADD GCPs and Pojection ====

        # estimate step of GCPs
        step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT))
        step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT))
        if str(title) == 'VIIRSN Level-2 Data':
            step0 = 64
        self.logger.debug('gcpCount: >%s<, %d %d %f %d %d',
                          title,
                          latitude.shape[0], latitude.shape[1],
                          GCP_COUNT, step0, step1)

        # generate list of GCPs
        dx = .5
        dy = .5
        gcps = []
        k = 0
        center_lon = 0
        center_lat = 0
        for i0 in range(0, latitude.shape[0], step0):
            for i1 in range(0, latitude.shape[1], step1):
                # create GCP with X,Y,pixel,line from lat/lon matrices
                lon = float(longitude[i0, i1])
                lat = float(latitude[i0, i1])
                if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
                    gcp = gdal.GCP(lon, lat, 0, i1 * pixelStep + dx,
                                   i0 * lineStep + dy)
                    self.logger.debug('%d %d %d %f %f',
                                      k, gcp.GCPPixel, gcp.GCPLine,
                                      gcp.GCPX, gcp.GCPY)
                    gcps.append(gcp)
                    center_lon += gcp.GCPX
                    center_lat += gcp.GCPY
                    k += 1


        # append GCPs and lat/lon projection to the vsiDataset
        self.dataset.SetGCPs(gcps, NSR().wkt)
        self.remove_geolocationArray()

        # reproject GCPs
        center_lon /= k
        center_lat /= k
        srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % (center_lon, center_lat)
        self.reproject_GCPs(srs)

        # use TPS for reprojection
        self.tps = True

        # add NansenCloud metadata
        self.dataset.SetMetadataItem('time_coverage_start',
                                     (parse(
                                      gdalMetadata['time_coverage_start']).
                                      isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                                     (parse(
                                      gdalMetadata['time_coverage_stop']).
                                      isoformat()))
        instrument = gdalMetadata['Sensor Name'][1:-1]
        platform = {'A': 'AQUA', 'T': 'TERRA'}[gdalMetadata['Sensor Name'][-1]]
        mm = pti.get_gcmd_instrument(instrument)
        ee = pti.get_gcmd_platform(platform)
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 53
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create Radarsat2 VRT '''
        fPathName, fExt = os.path.splitext(fileName)

        if zipfile.is_zipfile(fileName):
            # Open zip file using VSI
            fPath, fName = os.path.split(fPathName)
            fileName = '/vsizip/%s/%s' % (fileName, fName)
            if not 'RS' in fName[0:2]:
                raise WrongMapperError('Provided data is not Radarsat-2')
            gdalDataset = gdal.Open(fileName)
            gdalMetadata = gdalDataset.GetMetadata()

        #if it is not RADARSAT-2, return
        if (not gdalMetadata or
                not 'SATELLITE_IDENTIFIER' in gdalMetadata.keys()):
            raise WrongMapperError
        elif gdalMetadata['SATELLITE_IDENTIFIER'] != 'RADARSAT-2':
            raise WrongMapperError

        # read product.xml
        productXmlName = os.path.join(fileName, 'product.xml')
        productXml = self.read_xml(productXmlName)

        # Get additional metadata from product.xml
        rs2_0 = Node.create(productXml)
        rs2_1 = rs2_0.node('sourceAttributes')
        rs2_2 = rs2_1.node('radarParameters')
        if rs2_2['antennaPointing'].lower() == 'right':
            antennaPointing = 90
        else:
            antennaPointing = -90
        rs2_3 = rs2_1.node('orbitAndAttitude').node('orbitInformation')
        passDirection = rs2_3['passDirection']

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        #define dictionary of metadata and band specific parameters
        pol = []
        metaDict = []

        # Get the subdataset with calibrated sigma0 only
        for dataset in gdalDataset.GetSubDatasets():
            if dataset[1] == 'Sigma Nought calibrated':
                s0dataset = gdal.Open(dataset[0])
                s0datasetName = dataset[0][:]
                band = s0dataset.GetRasterBand(1)
                s0datasetPol = band.GetMetadata()['POLARIMETRIC_INTERP']
                for i in range(1, s0dataset.RasterCount+1):
                    iBand = s0dataset.GetRasterBand(i)
                    polString = iBand.GetMetadata()['POLARIMETRIC_INTERP']
                    suffix = polString
                    # The nansat data will be complex
                    # if the SAR data is of type 10
                    dtype = iBand.DataType
                    if dtype == 10:
                        # add intensity band
                        metaDict.append(
                            {'src': {'SourceFilename':
                                     ('RADARSAT_2_CALIB:SIGMA0:'
                                      + fileName + '/product.xml'),
                                     'SourceBand': i,
                                     'DataType': dtype},
                             'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                                     'PixelFunctionType': 'intensity',
                                     'SourceTransferType': gdal.GetDataTypeName(dtype),
                                     'suffix': suffix,
                                     'polarization': polString,
                                     'dataType': 6}})
                        # modify suffix for adding the compled band below
                        suffix = polString+'_complex'
                    pol.append(polString)
                    metaDict.append(
                        {'src': {'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:'
                                                    + fileName
                                                    + '/product.xml'),
                                 'SourceBand': i,
                                 'DataType': dtype},
                         'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                                 'suffix': suffix,
                                 'polarization': polString}})

            if dataset[1] == 'Beta Nought calibrated':
                b0dataset = gdal.Open(dataset[0])
                b0datasetName = dataset[0][:]
                for j in range(1, b0dataset.RasterCount+1):
                    jBand = b0dataset.GetRasterBand(j)
                    polString = jBand.GetMetadata()['POLARIMETRIC_INTERP']
                    if polString == s0datasetPol:
                        b0datasetBand = j

        ###############################
        # Add SAR look direction
        ###############################
        d = Domain(ds=gdalDataset)
        lon, lat = d.get_geolocation_grids(100)

        '''
        (GDAL?) Radarsat-2 data is stored with maximum latitude at first
        element of each column and minimum longitude at first element of each
        row (e.g. np.shape(lat)=(59,55) -> latitude maxima are at lat[0,:],
        and longitude minima are at lon[:,0])

        In addition, there is an interpolation error for direct estimate along
        azimuth. We therefore estimate the heading along range and add 90
        degrees to get the "satellite" heading.

        '''
        if str(passDirection).upper() == 'DESCENDING':
            sat_heading = initial_bearing(lon[:, :-1], lat[:, :-1],
                                          lon[:, 1:], lat[:, 1:]) + 90
        elif str(passDirection).upper() == 'ASCENDING':
            sat_heading = initial_bearing(lon[:, 1:], lat[:, 1:],
                                          lon[:, :-1], lat[:, :-1]) + 90
        else:
            print 'Can not decode pass direction: ' + str(passDirection)

        # Calculate SAR look direction
        look_direction = sat_heading + antennaPointing
        # Interpolate to regain lost row
        look_direction = np.mod(look_direction, 360)
        look_direction = scipy.ndimage.interpolation.zoom(
            look_direction, (1, 11./10.))
        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT(array=look_direction_u, lat=lat, lon=lon)
        look_v_VRT = VRT(array=look_direction_v, lat=lat, lon=lon)

        # Note: If incidence angle and look direction are stored in
        #       same VRT, access time is about twice as large
        lookVRT = VRT(lat=lat, lon=lon)
        lookVRT._create_band(
            [{'SourceFilename': look_u_VRT.fileName, 'SourceBand': 1},
             {'SourceFilename': look_v_VRT.fileName, 'SourceBand': 1}],
            {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize,
                                          gdalDataset.RasterYSize)
        # Store VRTs so that they are accessible later
        self.bandVRTs['look_u_VRT'] = look_u_VRT
        self.bandVRTs['look_v_VRT'] = look_v_VRT
        self.bandVRTs['lookVRT'] = lookVRT

        # Add band to full sized VRT
        lookFileName = self.bandVRTs['lookVRT'].fileName
        metaDict.append({'src': {'SourceFilename': lookFileName,
                                 'SourceBand': 1},
                         'dst': {'wkv': 'sensor_azimuth_angle',
                                 'name': 'look_direction'}})

        ###############################
        # Create bands
        ###############################
        self._create_bands(metaDict)

        ###################################################
        # Add derived band (incidence angle) calculated
        # using pixel function "BetaSigmaToIncidence":
        ###################################################
        src = [{'SourceFilename': b0datasetName,
                'SourceBand':  b0datasetBand,
                'DataType': dtype},
               {'SourceFilename': s0datasetName,
                'SourceBand': 1,
                'DataType': dtype}]
        dst = {'wkv': 'angle_of_incidence',
               'PixelFunctionType': 'BetaSigmaToIncidence',
               'SourceTransferType': gdal.GetDataTypeName(dtype),
               '_FillValue': -10000,   # NB: this is also hard-coded in
                                       #     pixelfunctions.c
               'dataType': 6,
               'name': 'incidence_angle'}

        self._create_band(src, dst)
        self.dataset.FlushCache()

        ###################################################################
        # Add sigma0_VV - pixel function of sigma0_HH and beta0_HH
        # incidence angle is calculated within pixel function
        # It is assummed that HH is the first band in sigma0 and
        # beta0 sub datasets
        ###################################################################
        if 'VV' not in pol and 'HH' in pol:
            s0datasetNameHH = pol.index('HH')+1
            src = [{'SourceFilename': s0datasetName,
                    'SourceBand': s0datasetNameHH,
                    'DataType': 6},
                   {'SourceFilename': b0datasetName,
                    'SourceBand': b0datasetBand,
                    'DataType': 6}]
            dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                   'PixelFunctionType': 'Sigma0HHBetaToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self._create_band(src, dst)
            self.dataset.FlushCache()

        ############################################
        # Add SAR metadata
        ############################################
        if antennaPointing == 90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT')
        if antennaPointing == -90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'LEFT')
        self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                     str(passDirection).upper())

        # set valid time
        self.dataset.SetMetadataItem('time_coverage_start',
                                     (parse(gdalMetadata['FIRST_LINE_TIME']).
                                      isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                                     (parse(gdalMetadata['LAST_LINE_TIME']).
                                      isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('sar')
        ee = pti.get_gcmd_platform('radarsat-2')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        self._add_swath_mask_band()
Exemplo n.º 54
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 emrange='VNIR',
                 **kwargs):
        ''' Create MODIS_L1 VRT '''
        # check mapper
        try:
            INSTRUMENTSHORTNAME = gdalMetadata['INSTRUMENTSHORTNAME']
        except:
            raise WrongMapperError
        if INSTRUMENTSHORTNAME != 'ASTER':
            raise WrongMapperError
        try:
            SHORTNAME = gdalMetadata['SHORTNAME']
        except:
            raise WrongMapperError
        if SHORTNAME != 'ASTL1B':
            raise WrongMapperError

        # set up metadict for data with various resolution
        subDSString = 'HDF4_EOS:EOS_SWATH:"%s":%s:%s'
        metaDictVNIR = [
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'VNIR_Swath', 'ImageData1')
                },
                'dst': {
                    'wavelength': '560'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'VNIR_Swath', 'ImageData2')
                },
                'dst': {
                    'wavelength': '660'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'VNIR_Swath', 'ImageData3N')
                },
                'dst': {
                    'wavelength': '820'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'VNIR_Swath', 'ImageData3B')
                },
                'dst': {
                    'wavelength': '820'
                }
            },
        ]

        metaDictSWIR = [
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'SWIR_Swath', 'ImageData4')
                },
                'dst': {
                    'wavelength': '1650'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'SWIR_Swath', 'ImageData5')
                },
                'dst': {
                    'wavelength': '2165'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'SWIR_Swath', 'ImageData6')
                },
                'dst': {
                    'wavelength': '2205'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'SWIR_Swath', 'ImageData7')
                },
                'dst': {
                    'wavelength': '2260'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'SWIR_Swath', 'ImageData8')
                },
                'dst': {
                    'wavelength': '2330'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'SWIR_Swath', 'ImageData9')
                },
                'dst': {
                    'wavelength': '2395'
                }
            },
        ]

        metaDictTIR = [
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'TIR_Swath', 'ImageData10')
                },
                'dst': {
                    'wavelength': '8300'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'TIR_Swath', 'ImageData11')
                },
                'dst': {
                    'wavelength': '8650'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'TIR_Swath', 'ImageData12')
                },
                'dst': {
                    'wavelength': '9100'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'TIR_Swath', 'ImageData13')
                },
                'dst': {
                    'wavelength': '10600'
                }
            },
            {
                'src': {
                    'SourceFilename':
                    subDSString % (filename, 'TIR_Swath', 'ImageData14')
                },
                'dst': {
                    'wavelength': '11300'
                }
            },
        ]

        # select appropriate metaDict based on <emrange> parameter
        metaDict = {
            'VNIR': metaDictVNIR,
            'SWIR': metaDictSWIR,
            'TIR': metaDictTIR,
        }[emrange]

        # get 1st EOS subdataset and parse to VRT.__init__()
        # for retrieving geo-metadata
        try:
            gdalSubDataset0 = gdal.Open(metaDict[0]['src']['SourceFilename'])
        except (AttributeError, IndexError):
            raise WrongMapperError

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalSubDataset0, metadata=gdalMetadata)

        # add source band, wkv and suffix
        for metaEntry in metaDict:
            metaEntry['src']['SourceBand'] = 1
            metaEntry['dst']['wkv'] = 'toa_outgoing_spectral_radiance'
            metaEntry['dst']['suffix'] = metaEntry['dst']['wavelength']

            if 'ImageData3N' in metaEntry['src']['SourceFilename']:
                metaEntry['dst']['suffix'] += 'N'

            if 'ImageData3B' in metaEntry['src']['SourceFilename']:
                metaEntry['dst']['suffix'] += 'B'

        # add scale and offset
        for metaEntry in metaDict:
            bandNo = metaEntry['src']['SourceFilename'].strip().split(
                ':')[-1].replace('ImageData', '')
            metaEntry['src']['ScaleRatio'] = float(gdalMetadata['INCL' +
                                                                bandNo])
            metaEntry['src']['ScaleOffset'] = float(gdalMetadata['OFFSET' +
                                                                 bandNo])

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # set time
        datetimeString = self.find_metadata(gdalMetadata,
                                            "SETTINGTIMEOFPOINTING")
        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start',
                                     parse(datetimeString + '+00').isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     parse(datetimeString + '+00').isoformat())

        mm = pti.get_gcmd_instrument('ASTER')
        ee = pti.get_gcmd_platform('TERRA')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        self._remove_geolocation()
Exemplo n.º 55
0
    def __init__(self, filename, gdalDataset, gdalMetadata, GCP_COUNT=30, **kwargs):
        ''' Create MODIS_L1 VRT '''

        #list of available modis names:resolutions
        modisResolutions = {'MYD02QKM': 250, 'MOD02QKM': 250,
                            'MYD02HKM': 500, 'MOD02HKM': 500,
                            'MYD021KM': 1000, 'MOD021KM': 1000}

        #should raise error in case of not MODIS_L1
        try:
            mResolution = modisResolutions[gdalMetadata["SHORTNAME"]]
        except:
            raise WrongMapperError

        # get 1st subdataset and parse to VRT.__init__()
        # for retrieving geo-metadata
        try:
            gdalSubDataset = gdal.Open(gdalDataset.GetSubDatasets()[0][0])
        except (AttributeError, IndexError):
            raise WrongMapperError

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalSubDataset)

        subDsString = 'HDF4_EOS:EOS_SWATH:"%s":MODIS_SWATH_Type_L1B:%s'

        #provide all mappings
        metaDict250SF = ['EV_250_RefSB']

        metaDict250 = [{'src': {'SourceFilename': subDsString %
                                (filename, 'EV_250_RefSB'),
                                'SourceBand': 1},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '645'}},
                       {'src': {'SourceFilename': subDsString %
                                (filename, 'EV_250_RefSB'),
                                'SourceBand': 2},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '858'}}]

        metaDict500SF = ['EV_250_Aggr500_RefSB', 'EV_500_RefSB']

        metaDict500 = [{'src': {'SourceFilename': subDsString %
                                (filename, 'EV_250_Aggr500_RefSB'),
                                'SourceBand': 1},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '645'}},
                       {'src': {'SourceFilename': subDsString %
                                (filename, 'EV_250_Aggr500_RefSB'),
                                'SourceBand': 2},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '858'}},

                       {'src': {'SourceFilename': subDsString %
                                (filename, 'EV_500_RefSB'),
                                'SourceBand': 1},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '469'}},
                       {'src': {'SourceFilename': subDsString %
                                (filename, 'EV_500_RefSB'),
                                'SourceBand': 2},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '555'}},
                       {'src': {'SourceFilename': subDsString %
                                (filename, 'EV_500_RefSB'),
                                'SourceBand': 3},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '1240'}},
                       {'src': {'SourceFilename': subDsString %
                                (filename, 'EV_500_RefSB'),
                                'SourceBand': 4},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '1640'}},
                       {'src': {'SourceFilename': subDsString %
                                (filename, 'EV_500_RefSB'),
                                'SourceBand': 5},
                        'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                'wavelength': '2130'}}]

        metaDict1000SF = ['EV_250_Aggr1km_RefSB', 'EV_500_Aggr1km_RefSB',
                          'EV_1KM_RefSB', 'EV_1KM_Emissive']

        metaDict1000 = [{'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_250_Aggr1km_RefSB'),
                                 'SourceBand': 1},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '645'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_250_Aggr1km_RefSB'),
                                 'SourceBand': 2},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '858'}},

                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_500_Aggr1km_RefSB'),
                                 'SourceBand': 1},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '469'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_500_Aggr1km_RefSB'),
                                 'SourceBand': 2},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '555'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_500_Aggr1km_RefSB'),
                                 'SourceBand': 3},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '1240'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_500_Aggr1km_RefSB'),
                                 'SourceBand': 4},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '1640'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_500_Aggr1km_RefSB'),
                                 'SourceBand': 5},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '2130'}},

                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 1},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '412'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 2},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '443'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 3},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '488'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 4},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '531'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 5},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '551'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 6},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '667'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 7},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '667'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 8},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '678'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 9},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '678'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 10},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '748'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 11},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '869'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 12},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '905'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 13},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '936'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 14},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '940'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_RefSB'),
                                 'SourceBand': 15},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '1375'}},

                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 1},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '3750'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 2},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '3959'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 3},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '3959'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 4},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '4050'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 5},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '4465'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 6},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '4515'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 7},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '6715'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 8},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '7325'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 9},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '8550'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 10},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '9730'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 11},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '11030'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 12},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '12020'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 13},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '13335'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 14},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '13635'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 15},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '13935'}},
                        {'src': {'SourceFilename': subDsString %
                                 (filename, 'EV_1KM_Emissive'),
                                 'SourceBand': 16},
                         'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                                 'wavelength': '14235'}}]

        # get proper mapping depending on resolution
        metaDict = {250: metaDict250,
                    500: metaDict500,
                    1000: metaDict1000,
                    }[mResolution]
        # get proper mapping depending on resolution
        metaDictSF = {250: metaDict250SF,
                      500: metaDict500SF,
                      1000: metaDict1000SF,
                      }[mResolution]

        # read all scales/offsets
        rScales = {}
        rOffsets = {}
        for sf in metaDictSF:
            dsName = subDsString % (filename, sf)
            ds = gdal.Open(dsName)
            rScales[dsName] = list(map(float,
                                  ds.GetMetadataItem('radiance_scales').
                                  split(',')))
            rOffsets[dsName] = list(map(float,
                                   ds.GetMetadataItem('radiance_offsets').
                                   split(',')))
            self.logger.debug('radiance_scales: %s' % str(rScales))

        # add 'band_name' to 'parameters'
        for bandDict in metaDict:
            SourceFilename = bandDict['src']['SourceFilename']
            SourceBand = bandDict['src']['SourceBand']
            bandDict['dst']['suffix'] = bandDict['dst']['wavelength']
            scale = rScales[SourceFilename][SourceBand-1]
            offset = rOffsets[SourceFilename][SourceBand-1]
            self.logger.debug('band, scale, offset: %s_%d %s %s' %
                              (SourceFilename, SourceBand, scale, offset))
            bandDict['src']['ScaleRatio'] = scale
            bandDict['src']['ScaleOffset'] = offset

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        productDate = gdalMetadata["RANGEBEGINNINGDATE"]
        productTime = gdalMetadata["RANGEBEGINNINGTIME"]
        self._remove_geolocation()

        # set required metadata
        self.dataset.SetMetadataItem('time_coverage_start',
                                     (parse(gdalMetadata["RANGEBEGINNINGDATE"]+
                                         ' '+gdalMetadata["RANGEBEGINNINGTIME"]
                                         ).
                                      isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                                     (parse(gdalMetadata["RANGEENDINGDATE"]+
                                         ' '+gdalMetadata["RANGEENDINGTIME"]
                                         ).
                                      isoformat()))

        instrumentName = self.find_metadata(gdalMetadata,
                                        'ASSOCIATEDINSTRUMENTSHORTNAME',
                                        'MODIS')
        platformName = self.find_metadata(gdalMetadata,
                                     'ASSOCIATEDPLATFORMSHORTNAME',
                                     'AQUA')
        mm = pti.get_gcmd_instrument(instrumentName)
        ee = pti.get_gcmd_platform(platformName)
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        lonSubdataset = [subdatasetName[0]
                         for subdatasetName in gdalDataset.GetSubDatasets()
                         if 'Longitude' in subdatasetName[1]][0]
        latSubdataset = [subdatasetName[0]
                         for subdatasetName in gdalDataset.GetSubDatasets()
                         if 'Latitude' in subdatasetName[1]][0]
        lons = gdal.Open(lonSubdataset).ReadAsArray()
        lats = gdal.Open(latSubdataset).ReadAsArray()
        gcps = []
        rows = range(0, lons.shape[0], lons.shape[0]/GCP_COUNT)
        cols = range(0, lons.shape[1], lons.shape[1]/GCP_COUNT)
        factor = self.dataset.RasterYSize / lons.shape[0]
        for r in rows:
            for c in cols:
                gcps.append(gdal.GCP(float(lons[r,c]),
                                     float(lats[r,c]), 0,
                                     factor*c+0.5,
                                     factor*r+0.5))
        self.dataset.SetGCPs(gcps, self.dataset.GetGCPProjection())
        self.tps = True
Exemplo n.º 56
0
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                 GCP_STEP=20, MAX_LAT=90, MIN_LAT=50, resolution='low',
                 **kwargs):
        ''' Create VRT
        Parameters
        ----------
        GCP_COUNT : int
            number of GCPs along each dimention
        '''
        ifile = os.path.split(fileName)[1]
        if not ifile.startswith('GW1AM2_') or not ifile.endswith('.h5'):
            raise WrongMapperError
        try:
            ProductName = gdalMetadata['ProductName']
            PlatformShortName = gdalMetadata['PlatformShortName']
            SensorShortName = gdalMetadata['SensorShortName']
        except:
            raise WrongMapperError

        if (not ProductName == 'AMSR2-L1R' or
            not PlatformShortName == 'GCOM-W1' or
            not SensorShortName == 'AMSR2'):
            raise WrongMapperError

        if resolution == 'low':
            subDatasetWidth = 243
        else:
            subDatasetWidth = 486

        # get GCPs from lon/lat grids
        latGrid = gdal.Open('HDF5:"%s"://Latitude_of_Observation_Point_for_89A' % fileName).ReadAsArray()
        lonGrid = gdal.Open('HDF5:"%s"://Longitude_of_Observation_Point_for_89A' % fileName).ReadAsArray()
        if subDatasetWidth == 243:
            latGrid = latGrid[:, ::2]
            lonGrid = lonGrid[:, ::2]

        dx = .5
        dy = .5
        gcps = []
        k = 0
        maxY = 0
        minY = latGrid.shape[0]
        for i0 in range(0, latGrid.shape[0], GCP_STEP):
            for i1 in range(0, latGrid.shape[1], GCP_STEP):
                # create GCP with X,Y,pixel,line from lat/lon matrices
                lon = float(lonGrid[i0, i1])
                lat = float(latGrid[i0, i1])
                if (lon >= -180 and
                    lon <= 180 and
                    lat >= MIN_LAT and
                    lat <= MAX_LAT):
                    gcp = gdal.GCP(lon, lat, 0, i1 + dx, i0 + dy)
                    gcps.append(gcp)
                    k += 1
                    maxY = max(maxY, i0)
                    minY = min(minY, i0)
        yOff = minY
        ySize = maxY - minY

        # remove Y-offset from gcps
        for gcp in gcps:
            gcp.GCPLine -= yOff

        metaDict = []

        subDatasets = gdalDataset.GetSubDatasets()
        metadata = gdalDataset.GetMetadata()
        for subDataset in subDatasets:
            # select subdatasets fro that resolution (width)
            if (subDatasetWidth == int(subDataset[1].split(']')[0].split('x')[-1]) and
                'Latitude' not in subDataset[0] and 'Longitude' not in subDataset[0]):
                name = subDataset[0].split('/')[-1]
                # find scale
                scale = 1
                for meta in metadata:
                    if name + '_SCALE' in meta:
                        scale = float(metadata[meta])
                # create meta entry
                metaEntry = {'src': {'SourceFilename': subDataset[0],
                                     'sourceBand':  1,
                                     'ScaleRatio': scale,
                                     'ScaleOffset': 0,
                                     'yOff': yOff,
                                     'ySize': ySize,},
                             'dst': {'name': name}
                             }
                metaDict.append(metaEntry)

        # create VRT from one of the subdatasets
        gdalSubDataset = gdal.Open(metaEntry['src']['SourceFilename'])
        VRT.__init__(self, srcRasterXSize=subDatasetWidth, srcRasterYSize=ySize)
        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        self.dataset.SetMetadataItem('time_coverage_start',
                    parse_time(gdalMetadata['ObservationStartDateTime']).isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                    parse_time(gdalMetadata['ObservationEndDateTime']).isoformat())
        # append GCPs and lat/lon projection to the vsiDataset
        self.dataset.SetGCPs(gcps, NSR().wkt)
        self.reproject_GCPs('+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs')
        self.tps = True

        mm = pti.get_gcmd_instrument('AMSR2')
        ee = pti.get_gcmd_platform('GCOM-W1')
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 57
0
 def get_platform(self, raw_metadata):
     return pti.get_gcmd_platform('OPERATIONAL MODELS')
Exemplo n.º 58
0
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                 manifestonly=False, **kwargs):

        if zipfile.is_zipfile(fileName):
            zz = zipfile.PyZipFile(fileName)
            # Assuming the file names are consistent, the polarization
            # dependent data should be sorted equally such that we can use the
            # same indices consistently for all the following lists
            # THIS IS NOT THE CASE...
            mdsFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist() if 'measurement/s1a' in fn]
            calFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist()
                        if 'annotation/calibration/calibration-s1a' in fn]
            noiseFiles = ['/vsizip/%s/%s' % (fileName, fn)
                          for fn in zz.namelist()
                          if 'annotation/calibration/noise-s1a' in fn]
            annotationFiles = ['/vsizip/%s/%s' % (fileName, fn)
                               for fn in zz.namelist()
                               if 'annotation/s1a' in fn]
            manifestFile = ['/vsizip/%s/%s' % (fileName, fn)
                            for fn in zz.namelist()
                            if 'manifest.safe' in fn]
            zz.close()
        else:
            mdsFiles = glob.glob('%s/measurement/s1a*' % fileName)
            calFiles = glob.glob('%s/annotation/calibration/calibration-s1a*'
                                 % fileName)
            noiseFiles = glob.glob('%s/annotation/calibration/noise-s1a*'
                                   % fileName)
            annotationFiles = glob.glob('%s/annotation/s1a*'
                                        % fileName)
            manifestFile = glob.glob('%s/manifest.safe' % fileName)

        if (not mdsFiles or not calFiles or not noiseFiles or
                not annotationFiles or not manifestFile):
            raise WrongMapperError

        mdsDict = {}
        for ff in mdsFiles:
            mdsDict[
                os.path.splitext(os.path.basename(ff))[0].split('-')[3]] = ff

        self.calXMLDict = {}
        for ff in calFiles:
            self.calXMLDict[
                os.path.splitext(
                os.path.basename(ff))[0].split('-')[4]] = self.read_xml(ff)

        self.noiseXMLDict = {}
        for ff in noiseFiles:
            self.noiseXMLDict[
                os.path.splitext(
                os.path.basename(ff))[0].split('-')[4]] = self.read_xml(ff)

        self.annotationXMLDict = {}
        for ff in annotationFiles:
            self.annotationXMLDict[
                os.path.splitext(
                os.path.basename(ff))[0].split('-')[3]] = self.read_xml(ff)

        self.manifestXML = self.read_xml(manifestFile[0])

        # very fast constructor without any bands
        if manifestonly:
            self.init_from_manifest_only(self.manifestXML,
                                         self.annotationXMLDict[
                                         self.annotationXMLDict.keys()[0]])
            return

        gdalDatasets = {}
        for key in mdsDict.keys():
            # Open data files
            gdalDatasets[key] = gdal.Open(mdsDict[key])

        if not gdalDatasets:
            raise WrongMapperError('No Sentinel-1 datasets found')

        # Check metadata to confirm it is Sentinel-1 L1
        for key in gdalDatasets:
            metadata = gdalDatasets[key].GetMetadata()
            break
        if not 'TIFFTAG_IMAGEDESCRIPTION' in metadata.keys():
            raise WrongMapperError
        if (not 'Sentinel-1' in metadata['TIFFTAG_IMAGEDESCRIPTION']
                and not 'L1' in metadata['TIFFTAG_IMAGEDESCRIPTION']):
            raise WrongMapperError

        warnings.warn('Sentinel-1 level-1 mapper is not yet adapted to '
                      'complex data. In addition, the band names should be '
                      'updated for multi-swath data - '
                      'and there might be other issues.')

        # create empty VRT dataset with geolocation only
        for key in gdalDatasets:
            VRT.__init__(self, gdalDatasets[key])
            break

        # Read annotation, noise and calibration xml-files
        pol = {}
        it = 0
        for key in self.annotationXMLDict:
            xml = Node.create(self.annotationXMLDict[key])
            pol[key] = (xml.node('product').
                        node('adsHeader')['polarisation'].upper())
            it += 1
            if it == 1:
                # Get incidence angle
                pi = xml.node('generalAnnotation').node('productInformation')

                self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                              str(pi['pass']))
                (X, Y, lon, lat, inc, ele, numberOfSamples,
                numberOfLines) = self.read_geolocation_lut(
                                                self.annotationXMLDict[key])

                X = np.unique(X)
                Y = np.unique(Y)

                lon = np.array(lon).reshape(len(Y), len(X))
                lat = np.array(lat).reshape(len(Y), len(X))
                inc = np.array(inc).reshape(len(Y), len(X))
                ele = np.array(ele).reshape(len(Y), len(X))

                incVRT = VRT(array=inc, lat=lat, lon=lon)
                eleVRT = VRT(array=ele, lat=lat, lon=lon)
                incVRT = incVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                eleVRT = eleVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                self.bandVRTs['incVRT'] = incVRT
                self.bandVRTs['eleVRT'] = eleVRT

        for key in self.calXMLDict:
            calibration_LUT_VRTs, longitude, latitude = (
                self.get_LUT_VRTs(self.calXMLDict[key],
                                  'calibrationVectorList',
                                  ['sigmaNought', 'betaNought',
                                   'gamma', 'dn']
                                  ))
            self.bandVRTs['LUT_sigmaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['sigmaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_betaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['betaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_gamma_VRT'] = calibration_LUT_VRTs['gamma']
            self.bandVRTs['LUT_dn_VRT'] = calibration_LUT_VRTs['dn']

        for key in self.noiseXMLDict:
            noise_LUT_VRT = self.get_LUT_VRTs(self.noiseXMLDict[key],
                                              'noiseVectorList',
                                              ['noiseLut'])[0]
            self.bandVRTs['LUT_noise_VRT_'+pol[key]] = (
                noise_LUT_VRT['noiseLut'].get_resized_vrt(
                    self.dataset.RasterXSize,
                    self.dataset.RasterYSize,
                    eResampleAlg=1))

        metaDict = []
        bandNumberDict = {}
        bnmax = 0
        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'DN_%s' % pol[key]
            # A dictionary of band numbers is needed for the pixel function
            # bands further down. This is not the best solution. It would be
            # better to have a function in VRT that returns the number given a
            # band name. This function exists in Nansat but could perhaps be
            # moved to VRT? The existing nansat function could just call the
            # VRT one...
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            band = gdalDatasets[key].GetRasterBand(1)
            dtype = band.DataType
            metaDict.append({
                'src': {
                    'SourceFilename': mdsDict[key],
                    'SourceBand': 1,
                    'DataType': dtype,
                },
                'dst': {
                    'name': name,
                    #'SourceTransferType': gdal.GetDataTypeName(dtype),
                    #'dataType': 6,
                },
            })
        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        '''
        Calibration should be performed as

        s0 = DN^2/sigmaNought^2,

        where sigmaNought is from e.g.
        annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml,
        and DN is the Digital Numbers in the tiff files.

        Also the noise should be subtracted.

        See
        https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT
        '''
        # Get look direction
        sat_heading = initial_bearing(longitude[:-1, :],
                                      latitude[:-1, :],
                                      longitude[1:, :],
                                      latitude[1:, :])
        look_direction = scipy.ndimage.interpolation.zoom(
            np.mod(sat_heading + 90, 360),
            (np.shape(longitude)[0] / (np.shape(longitude)[0]-1.), 1))

        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT(array=look_direction_u,
                         lat=latitude, lon=longitude)
        look_v_VRT = VRT(array=look_direction_v,
                         lat=latitude, lon=longitude)
        lookVRT = VRT(lat=latitude, lon=longitude)
        lookVRT._create_band([{'SourceFilename': look_u_VRT.fileName,
                               'SourceBand': 1},
                              {'SourceFilename': look_v_VRT.fileName,
                               'SourceBand': 1}],
                             {'PixelFunctionType': 'UVToDirectionTo'}
                             )

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize,
                                          eResampleAlg=1)

        # Store VRTs so that they are accessible later
        self.bandVRTs['look_u_VRT'] = look_u_VRT
        self.bandVRTs['look_v_VRT'] = look_v_VRT
        self.bandVRTs['lookVRT'] = lookVRT

        metaDict = []
        # Add bands to full size VRT
        for key in pol:
            name = 'LUT_sigmaNought_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': {'SourceFilename':
                         (self.bandVRTs['LUT_sigmaNought_VRT_' +
                          pol[key]].fileName),
                         'SourceBand': 1
                         },
                 'dst': {'name': name
                         }
                 })
            name = 'LUT_noise_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename': self.bandVRTs['LUT_noise_VRT_' +
                                                   pol[key]].fileName,
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })

        name = 'look_direction'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        metaDict.append({
            'src': {
                'SourceFilename': self.bandVRTs['lookVRT'].fileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': name
            }
        })

        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'sigma0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]],
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_sigmaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })
            name = 'beta0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]]
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_betaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_brightness_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })

        self._create_bands(metaDict)

        # Add incidence angle as band
        name = 'incidence_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['incVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_incidence',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add elevation angle as band
        name = 'elevation_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['eleVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_elevation',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add sigma0_VV
        pp = [pol[key] for key in pol]
        if 'VV' not in pp and 'HH' in pp:
            name = 'sigma0_VV'
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            src = [{'SourceFilename': self.fileName,
                    'SourceBand': bandNumberDict['DN_HH'],
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_HH'].
                                       fileName),
                    'SourceBand': 1
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_sigmaNought_VRT_HH'].
                                       fileName),
                    'SourceBand': 1,
                    },
                   {'SourceFilename': self.bandVRTs['incVRT'].fileName,
                    'SourceBand': 1}
                   ]
            dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                   'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self._create_band(src, dst)
            self.dataset.FlushCache()

        # set time as acquisition start time
        n = Node.create(self.manifestXML)
        meta = n.node('metadataSection')
        for nn in meta.children:
            if nn.getAttribute('ID') == u'acquisitionPeriod':
                # set valid time
                self.dataset.SetMetadataItem(
                    'time_coverage_start',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:startTime'])
                          ).isoformat())
                self.dataset.SetMetadataItem(
                    'time_coverage_end',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:stopTime'])
                          ).isoformat())

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('sar')
        ee = pti.get_gcmd_platform('sentinel-1a')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemplo n.º 59
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 resolution='low',
                 **kwargs):
        ''' Create LANDSAT VRT from multiple tif files or single tar.gz file'''
        mtlFileName = ''
        bandFileNames = []
        bandSizes = []
        bandDatasets = []
        fname = os.path.split(filename)[1]

        if (filename.endswith('.tar') or filename.endswith('.tar.gz')
                or filename.endswith('.tgz')):
            # try to open .tar or .tar.gz or .tgz file with tar
            try:
                tarFile = tarfile.open(filename)
            except:
                raise WrongMapperError

            # collect names of bands and corresponding sizes
            # into bandsInfo dict and bandSizes list
            tarNames = sorted(tarFile.getnames())
            for tarName in tarNames:
                # check if TIF files inside TAR qualify
                if (tarName[0] in ['L', 'M']
                        and os.path.splitext(tarName)[1] in ['.TIF', '.tif']):
                    # open TIF file from TAR using VSI
                    sourceFilename = '/vsitar/%s/%s' % (filename, tarName)
                    gdalDatasetTmp = gdal.Open(sourceFilename)
                    # keep name, GDALDataset and size
                    bandFileNames.append(sourceFilename)
                    bandSizes.append(gdalDatasetTmp.RasterXSize)
                    bandDatasets.append(gdalDatasetTmp)
                elif (tarName.endswith('MTL.txt')
                      or tarName.endswith('MTL.TXT')):
                    # get mtl file
                    mtlFileName = tarName

        elif ((fname.startswith('L') or fname.startswith('M'))
              and (fname.endswith('.tif') or fname.endswith('.TIF')
                   or fname.endswith('._MTL.txt'))):

            # try to find TIF/tif files with the same name as input file
            path, coreName = os.path.split(filename)
            coreName = os.path.splitext(coreName)[0].split('_')[0]
            coreNameMask = coreName + '*[tT][iI][fF]'
            tifNames = sorted(glob.glob(os.path.join(path, coreNameMask)))
            for tifName in tifNames:
                sourceFilename = tifName
                gdalDatasetTmp = gdal.Open(sourceFilename)
                # keep name, GDALDataset and size
                bandFileNames.append(sourceFilename)
                bandSizes.append(gdalDatasetTmp.RasterXSize)
                bandDatasets.append(gdalDatasetTmp)

            # get mtl file
            mtlFiles = glob.glob(coreName + '*[mM][tT][lL].[tT][xX][tT]')
            if len(mtlFiles) > 0:
                mtlFileName = mtlFiles[0]
        else:
            raise WrongMapperError

        # if not TIF files found - not appropriate mapper
        if not bandFileNames:
            raise WrongMapperError

        # get appropriate band size based on number of unique size and
        # required resoltuion
        if resolution == 'low':
            bandXSise = min(bandSizes)
        elif resolution in ['high', 'hi']:
            bandXSise = max(bandSizes)
        else:
            raise ValueError('Wrong resolution %s for file %s' %
                             (resolution, filename))

        # find bands with appropriate size and put to metaDict
        metaDict = []
        for bandFileName, bandSize, bandDataset in zip(bandFileNames,
                                                       bandSizes,
                                                       bandDatasets):
            if bandSize == bandXSise:
                # let last part of file name be suffix
                bandSuffix = os.path.splitext(bandFileName)[0].split('_')[-1]

                metaDict.append({
                    'src': {
                        'SourceFilename': bandFileName,
                        'SourceBand': 1,
                        'ScaleRatio': 0.1
                    },
                    'dst': {
                        'wkv': 'toa_outgoing_spectral_radiance',
                        'suffix': bandSuffix
                    }
                })
                gdalDataset4Use = bandDataset

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset4Use)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        if len(mtlFileName) > 0:
            mtlFileName = os.path.join(
                os.path.split(bandFileNames[0])[0], mtlFileName)
            mtlFileLines = [
                line.strip() for line in self.read_vsi(mtlFileName).split('\n')
            ]
            dateString = [
                line.split('=')[1].strip() for line in mtlFileLines
                if ('DATE_ACQUIRED' in line or 'ACQUISITION_DATE' in line)
            ][0]
            timeStr = [
                line.split('=')[1].strip() for line in mtlFileLines
                if ('SCENE_CENTER_TIME' in line
                    or 'SCENE_CENTER_SCAN_TIME' in line)
            ][0]
            time_start = parse_time(dateString + 'T' + timeStr).isoformat()
            time_end = (parse_time(dateString + 'T' + timeStr) +
                        datetime.timedelta(microseconds=60000000)).isoformat()

        self.dataset.SetMetadataItem('time_coverage_start', time_start)
        self.dataset.SetMetadataItem('time_coverage_end', time_end)

        # set platform
        platform = 'LANDSAT'
        if fname[2].isdigit():
            platform += '-' + fname[2]
        ee = pti.get_gcmd_platform(platform)
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        # set instrument
        instrument = {
            'LANDSAT': 'MSS',
            'LANDSAT-1': 'MSS',
            'LANDSAT-2': 'MSS',
            'LANDSAT-3': 'MSS',
            'LANDSAT-4': 'TM',
            'LANDSAT-5': 'TM',
            'LANDSAT-7': 'ETM+',
            'LANDSAT-8': 'OLI'
        }[platform]
        ee = pti.get_gcmd_instrument(instrument)
        self.dataset.SetMetadataItem('instrument', json.dumps(ee))
Exemplo n.º 60
0
    def __init__(self, filename, gdalDataset, gdalMetadata, latlonGrid=None,
                 mask='', **kwargs):

        ''' Create MER2 VRT

        Parameters
        -----------
        filename : string
        gdalDataset : gdal dataset
        gdalMetadata : gdal metadata
        latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid
        '''
        # test if input files is GLOBCOLOUR L3B
        iDir, iFile = os.path.split(filename)
        iFileName, iFileExt = os.path.splitext(iFile)
        #print 'idir:', iDir, iFile, iFileName[0:5], iFileExt[0:8]
        if (iFileName[0:4] != 'L3b_'
            or iFileExt != '.nc'
            or not os.path.exists(filename)
            or (gdalDataset is not None
                and (len(gdalDataset.GetSubDatasets()) > 0
                     or gdalDataset.RasterCount > 0))):
            raise WrongMapperError

        # define shape of GLOBCOLOUR grid
        GLOBCOLOR_ROWS = 180 * 24
        GLOBCOLOR_COLS = 360 * 24

        # define lon/lat grids for projected var
        if latlonGrid is None:
            latlonGrid = np.mgrid[90:-90:4320j,
                                  -180:180:8640j].astype('float32')
            #latlonGrid = np.mgrid[80:50:900j, -10:30:1200j].astype('float16')
            #latlonGrid = np.mgrid[47:39:300j, 25:45:500j].astype('float32')

        # create empty VRT dataset with geolocation only
        self._init_from_lonlat(latlonGrid[1], latlonGrid[0])

        # get list of similar (same date) files in the directory
        simFilesMask = os.path.join(iDir, iFileName[0:30] + '*' + mask + '.nc')
        simFiles = glob.glob(simFilesMask)
        simFiles.sort()

        metaDict = []
        self.band_vrts = {'mask': [], 'lonlat': []}
        mask = None
        for simFile in simFiles:
            print('sim: ', simFile)
            # copy simFile to a temporary file
            tmpf = tempfile.mkstemp()
            shutil.copyfile(simFile, tmpf[1])
            f = Dataset(tmpf[1])

            # get iBinned, index for converting from binned into GLOBCOLOR-grid
            colBinned = f.variables['col'][:]
            rowBinned = f.variables['row'][:]
            iBinned = (colBinned.astype('uint32') +
                      (rowBinned.astype('uint32') - 1) * GLOBCOLOR_COLS)
            colBinned = None
            rowBinned = None

            # get iRawPro, index for converting
            # from GLOBCOLOR-grid to latlonGrid
            yRawPro = np.rint(1 + (GLOBCOLOR_ROWS - 1) *
                              (latlonGrid[0] + 90) / 180.)
            lon_step_Mat = 24. * np.cos(np.pi * latlonGrid[0] / 180.)
            xRawPro = np.rint(1 + (latlonGrid[1] + 180) * lon_step_Mat)
            iRawPro = xRawPro.astype('uint32') + (yRawPro.astype('uint32') - 1) * GLOBCOLOR_COLS
            yRawPro = None
            xRawPro = None

            for varName in f.variables:
                # find variable with _mean, eg CHL1_mean
                if '_mean' in varName:
                    var = f.variables[varName]
                    break

            # skip variable if no WKV is give in Globcolour
            if varName not in self.varname2wkv:
                continue

            # get WKV
            varWKV = self.varname2wkv[varName]

            # read binned data
            varBinned = var[:]

            # convert to GLOBCOLOR grid
            varRawPro = np.zeros([GLOBCOLOR_ROWS, GLOBCOLOR_COLS], 'float32')
            varRawPro.flat[iBinned] = varBinned

            # convert to latlonGrid
            varPro = varRawPro.flat[iRawPro.flat[:]].reshape(iRawPro.shape)

            # add mask band
            if mask is None:
                mask = np.zeros(varPro.shape, 'uint8')
                mask[:] = 1
                mask[varPro > 0] = 64

                # add VRT with array with data from projected variable
                self.band_vrts['mask'].append(VRT.from_array(mask))

                # add metadata to the dictionary
                metaDict.append({
                    'src': {'SourceFilename': (self.band_vrts['mask'][-1].
                                               filename),
                            'SourceBand':  1},
                    'dst': {'name': 'mask'}})

            # add VRT with array with data from projected variable
            self.band_vrts['lonlat'].append(VRT.from_array(varPro))

            # add metadata to the dictionary
            metaEntry = {
                'src': {'SourceFilename': self.band_vrts['lonlat'][-1].filename,
                        'SourceBand':  1},
                'dst': {'wkv': varWKV, 'original_name': varName}}

            # add wavelength for nLw
            longName = 'Fully normalised water leaving radiance'
            if longName in f.variables[varName].long_name:
                simWavelength = varName.split('L')[1].split('_mean')[0]
                metaEntry['dst']['suffix'] = simWavelength
                metaEntry['dst']['wavelength'] = simWavelength

            # add all metadata from NC-file
            for attr in var.ncattrs():
                metaEntry['dst'][attr] = var.getncattr(attr)

            metaDict.append(metaEntry)

            # add Rrsw band
            metaEntry2 = self.make_rrsw_meta_entry(metaEntry)
            if metaEntry2 is not None:
                metaDict.append(metaEntry2)

            os.remove(tmpf[1])

        instrument = f.title.strip().split(' ')[-2].split('/')[0]
        mm = pti.get_gcmd_instrument(instrument)
        self.dataset.SetMetadataItem('instrument', json.dumps(mm))

        platform = {
            'MODIS' : 'AQUA',
            'MERIS' : 'ENVISAT',
            'SEAWIFS': 'QUICKBIRD',
            'VIIRS' : 'SUOMI-NPP'}[instrument.upper()]
        pp = pti.get_gcmd_platform(platform)
        self.dataset.SetMetadataItem('platform', json.dumps(pp))

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        # add time
        startDate = datetime.datetime(int(iFileName[4:8]),
                                      int(iFileName[8:10]),
                                      int(iFileName[10:12]))

        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start', startDate.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end', startDate.isoformat())