def _create_empty(self, gdal_dataset, gdal_metadata):
        subfiles = self.sub_filenames(gdal_dataset)
        sub0 = gdal.Open(subfiles[0])

        super(Mapper, self).__init__(gdalDataset=sub0,
                                     srcMetadata=gdal_metadata)

        if not (self.dataset.GetGeoTransform() or self.geolocationArray.xVRT):
            # Set geolocation array from bands
            self.add_geolocationArray(
                GeolocationArray([
                    s for s in subfiles
                    if 'longitude' in s or 'GEOLOCATION_X_DATASET' in s
                ][0], [
                    s for s in subfiles
                    if 'latitude' in s or 'GEOLOCATION_Y_DATASET' in s
                ][0]))

        if not self.get_projection():
            # Get band projections
            projections = [
                gdal.Open(sub).GetProjection() for sub in subfiles
                if gdal.Open(sub).GetProjection()
            ]
            if not projections:
                raise WrongMapperError

            # Check that projection is the same for all bands
            assert all(proj == projections[0] for proj in projections)
            # Set projection
            self.dataset.SetProjection(projections[0])
Exemple #2
0
    def add_geolocation_from_ads(self, gdalDataset, zoomSize=500, step=1):
        ''' Add geolocation domain metadata to the dataset

        Get VRTs with zoomed arrays of lon and lat
        Create geolocation object and add to the metadata

        Parameters
        ----------
            gdalDataset: GDAL Dataset
                input dataset
            prodType: str
                'ASA_' or 'MER_'
            zoomSize: int, optional, 500
                size, to which the ADS array will be zoomed using scipy
                array of this size will be stored in memory
            step: int
                step of pixel and line in GeolocationArrays. lat/lon grids are
                generated at that step

        Modifies:
        ---------
            Adds Geolocation Array metadata
        '''
        # get VRTs with lon and lat
        xyVRTs = self.get_ads_vrts(gdalDataset, self.lonlatNames, zoomSize,
                                   step)

        # Add geolocation domain metadata to the dataset
        self.add_geolocationArray(
            GeolocationArray(xVRT=xyVRTs[0],
                             yVRT=xyVRTs[1],
                             xBand=1,
                             yBand=1,
                             srs=gdalDataset.GetGCPProjection(),
                             lineOffset=0,
                             lineStep=step,
                             pixelOffset=0,
                             pixelStep=step))
Exemple #3
0
    def __init__(self,
                 fileName,
                 gdalDataset,
                 gdalMetadata,
                 latlonGrid=None,
                 mask='',
                 **kwargs):
        ''' Create VRT

        Parameters
        -----------
        fileName : string
        gdalDataset : gdal dataset
        gdalMetadata : gdal metadata
        latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid
        '''
        # test if input files is ASCAT
        iDir, iFile = os.path.split(fileName)
        iFileName, iFileExt = os.path.splitext(iFile)
        try:
            assert iFileName[0:6] == 'ascat_' and iFileExt == '.nc'
        except:
            raise WrongMapperError

        # Create geolocation
        subDataset = gdal.Open('NETCDF:"' + fileName + '":lat')
        self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize,
                             srcRasterYSize=subDataset.RasterYSize)

        GeolocMetaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lon'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': -360
            },
            'dst': {}
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lat'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': 0
            },
            'dst': {}
        }]

        self.GeolocVRT._create_bands(GeolocMetaDict)

        GeolocObject = GeolocationArray(
            xVRT=self.GeolocVRT,
            yVRT=self.GeolocVRT,
            # x = lon, y = lat
            xBand=1,
            yBand=2,
            lineOffset=0,
            pixelOffset=0,
            lineStep=1,
            pixelStep=1)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self,
                     srcRasterXSize=subDataset.RasterXSize,
                     srcRasterYSize=subDataset.RasterYSize,
                     gdalDataset=subDataset,
                     geolocationArray=GeolocObject,
                     srcProjection=GeolocObject.d['SRS'])

        # Scale and NODATA should ideally be taken directly from raw file
        metaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_speed'),
                'ScaleRatio': 0.01,
                'NODATA': -32767
            },
            'dst': {
                'name': 'wind_speed',
                'wkv': 'wind_speed'
            }
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_dir'),
                'ScaleRatio': 0.1,
                'NODATA': -32767
            },
            'dst': {
                'name': 'wind_direction',
                'wkv': 'wind_direction'
            }
        }]

        self._create_bands(metaDict)

        # This should not be necessary
        # - should be provided by GeolocationArray!
        self.dataset.SetProjection(GeolocObject.d['SRS'])

        # Add time
        startTime = datetime.datetime(int(iFileName[6:10]),
                                      int(iFileName[10:12]),
                                      int(iFileName[12:14]),
                                      int(iFileName[15:17]),
                                      int(iFileName[17:19]),
                                      int(iFileName[19:21]))
        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start',
                                     startTime.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     startTime.isoformat())

        # set SADCAT specific metadata
        self.dataset.SetMetadataItem('sensor', 'ASCAT')
        self.dataset.SetMetadataItem('satellite', 'Metop-A')
        warnings.warn("Setting satellite to Metop-A - update mapper if it is" \
                " e.g. Metop-B")
        self.dataset.SetMetadataItem('mapper', 'ascat_nasa')
Exemple #4
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):

        ########################################
        # Read metadata from binary file
        ########################################
        try:
            fp = open(fileName, 'rb')
        except IOError:
            raise WrongMapperError
        fp.seek(24)
        try:
            satID = int(struct.unpack('<l', fp.read(4))[0])
        except:
            raise WrongMapperError

        ##################
        # Read time
        ##################
        fp.seek(44)
        year = int(struct.unpack('<l', fp.read(4))[0])
        dayofyear = int(struct.unpack('<l', fp.read(4))[0])
        millisecondsOfDay = int(struct.unpack('<l', fp.read(4))[0])
        try:
            time = (datetime.datetime(year, 1, 1) + datetime.timedelta(
                dayofyear - 1, milliseconds=millisecondsOfDay))
        except:
            raise WrongMapperError

        fp.seek(72)
        numScanLines = int(struct.unpack('<l', fp.read(4))[0])
        missingScanLines = int(struct.unpack('<l', fp.read(4))[0])
        numCalibratedScanLines = int(struct.unpack('<l', fp.read(4))[0])
        if missingScanLines != 0:
            print('WARNING: Missing scanlines: ' + str(missingScanLines))

        fp.seek(88)
        dataFormatNum = int(struct.unpack('<l', fp.read(4))[0])
        dataFormat = dataFormats[dataFormatNum]

        # Determine if we have channel 3A (daytime) or channel 3B (nighttime)
        def int2bitstring(s):
            return str(s) if s <= 1 else int2bitstring(s >> 1) + str(s & 1)

        fp.seek(headerLength + 20)
        scanlinebitFirstline = int(struct.unpack('<L', fp.read(4))[0])
        fp.seek(headerLength + recordLength * (numCalibratedScanLines - 2) +
                20)
        scanlinebitLastline = int(struct.unpack('<L', fp.read(4))[0])

        if int2bitstring(scanlinebitFirstline)[-1] == '0':
            startsWith3A = True
        else:
            startsWith3A = False
        if int2bitstring(scanlinebitLastline)[-1] == '0':
            endsWith3A = True
        else:
            endsWith3A = False

        if startsWith3A != endsWith3A:
            print '############################################'
            print 'WARNING: channel 3 switches '
            print 'between daytime and nighttime (3A <-> 3B)'
            print '###########################################'

        ###########################
        # Make Geolocation Arrays
        ###########################
        srcRasterYSize = numCalibratedScanLines

        # Making VRT with raw (unscaled) lon and lat
        # (smaller bands than full dataset)
        self.bandVRTs = {
            'RawGeolocVRT': VRT(srcRasterXSize=51,
                                srcRasterYSize=srcRasterYSize)
        }
        RawGeolocMetaDict = []
        for lonlatNo in range(1, 3):
            RawGeolocMetaDict.append({
                'src': {
                    'SourceFilename': fileName,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'DataType': gdal.GDT_Int32,
                    'ImageOffset': (headerLength + 676 + (lonlatNo - 1) * 4),
                    'PixelOffset': 8,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {}
            })

        self.bandVRTs['RawGeolocVRT']._create_bands(RawGeolocMetaDict)

        # Make derived GeolocVRT with scaled lon and lat
        self.bandVRTs['GeolocVRT'] = VRT(srcRasterXSize=51,
                                         srcRasterYSize=srcRasterYSize)
        GeolocMetaDict = []
        for lonlatNo in range(1, 3):
            GeolocMetaDict.append({
                'src': {
                    'SourceFilename': (self.bandVRTs['RawGeolocVRT'].fileName),
                    'SourceBand': lonlatNo,
                    'ScaleRatio': 0.0001,
                    'ScaleOffset': 0,
                    'DataType': gdal.GDT_Int32
                },
                'dst': {}
            })

        self.bandVRTs['GeolocVRT']._create_bands(GeolocMetaDict)

        GeolocObject = GeolocationArray(
            xVRT=self.bandVRTs['GeolocVRT'],
            yVRT=self.bandVRTs['GeolocVRT'],
            xBand=2,
            yBand=1,  # x = lon, y = lat
            lineOffset=0,
            pixelOffset=25,
            lineStep=1,
            pixelStep=40)

        #######################
        # Initialize dataset
        #######################
        # create empty VRT dataset with geolocation only
        # (from Geolocation Array)
        VRT.__init__(self,
                     srcRasterXSize=2048,
                     srcRasterYSize=numCalibratedScanLines,
                     geolocationArray=GeolocObject,
                     srcProjection=GeolocObject.d['SRS'])

        # Since warping quality is horrible using geolocation arrays
        # which are much smaller than raster bands (due to a bug in GDAL:
        # http://trac.osgeo.org/gdal/ticket/4907), the geolocation arrays
        # are here converted to GCPs. Only a subset of GCPs is added,
        # significantly increasing speed when using -tps warping
        reductionFactor = 2
        self.convert_GeolocationArray2GPCs(1 * reductionFactor,
                                           40 * reductionFactor)

        ##################
        # Create bands
        ##################
        self.bandVRTs['RawBandsVRT'] = VRT(
            srcRasterXSize=2048, srcRasterYSize=numCalibratedScanLines)
        RawMetaDict = []
        metaDict = []

        centralWavelengths = [0.63, 0.86, np.NaN, 10.8, 12.0]
        if startsWith3A:
            centralWavelengths[2] = 1.6
            firstIRband = 4
        else:
            centralWavelengths[2] = 3.7
            firstIRband = 3

        for bandNo in range(1, 6):
            RawMetaDict.append({
                'src': {
                    'SourceFilename': fileName,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'dataType': gdal.GDT_UInt16,
                    'ImageOffset': imageOffset + (bandNo - 1) * 2,
                    'PixelOffset': 10,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {
                    'dataType': gdal.GDT_UInt16
                }
            })

            if bandNo < firstIRband:
                wkv = 'albedo'
                minmax = '0 60'
            else:
                wkv = 'brightness_temperature'
                minmax = '290 210'

            metaDict.append({
                'src': {
                    'SourceFilename': (self.bandVRTs['RawBandsVRT'].fileName),
                    'SourceBand': bandNo,
                    'ScaleRatio': 0.01,
                    'ScaleOffset': 0,
                    'DataType': gdal.GDT_UInt16
                },
                'dst': {
                    'originalFilename': fileName,
                    'dataType': gdal.GDT_Float32,
                    'wkv': wkv,
                    'colormap': 'gray',
                    'wavelength': centralWavelengths[bandNo - 1],
                    'minmax': minmax
                }
            })

        # Add temperature difference between ch3 and ch 4 as pixelfunction
        if not startsWith3A:  # Only if ch3 is IR (nighttime)
            metaDict.append({
                'src': [{
                    'SourceFilename': (self.bandVRTs['RawBandsVRT'].fileName),
                    'ScaleRatio':
                    0.01,
                    'ScaleOffset':
                    0,
                    'SourceBand':
                    4
                }, {
                    'SourceFilename': (self.bandVRTs['RawBandsVRT'].fileName),
                    'ScaleRatio':
                    0.01,
                    'ScaleOffset':
                    0,
                    'SourceBand':
                    3
                }],
                'dst': {
                    'PixelFunctionType': 'diff',
                    'originalFilename': fileName,
                    'dataType': gdal.GDT_Float32,
                    'name': 'ch4-ch3',
                    'short_name': 'ch4-ch3',
                    'long_name': 'AVHRR ch4 - ch3 temperature difference',
                    'colormap': 'gray',
                    'units': 'kelvin',
                    'minmax': '-3 3'
                }
            })

        self.self.bandVRTs['RawBandsVRT']._create_bands(RawMetaDict)
        self._create_bands(metaDict)

        globalMetadata = {}
        globalMetadata['satID'] = str(satID)
        globalMetadata['daytime'] = str(int(startsWith3A))
        self.dataset.SetMetadata(globalMetadata)

        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start', time.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end', time.isoformat())

        return
    def __init__(self,
                 fileName,
                 gdalDataset,
                 gdalMetadata,
                 latlonGrid=None,
                 mask='',
                 **kwargs):
        ''' Create VRT

        Parameters
        -----------
        fileName : string
        gdalDataset : gdal dataset
        gdalMetadata : gdal metadata
        latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid
        '''
        # test if input files is ASCAT
        iDir, iFile = os.path.split(fileName)
        iFileName, iFileExt = os.path.splitext(iFile)
        try:
            assert iFileName[0:6] == 'ascat_' and iFileExt == '.nc'
        except:
            raise WrongMapperError

        # Create geolocation
        subDataset = gdal.Open('NETCDF:"' + fileName + '":lat')
        self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize,
                             srcRasterYSize=subDataset.RasterYSize)

        GeolocMetaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lon'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': -360
            },
            'dst': {}
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lat'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': 0
            },
            'dst': {}
        }]

        self.GeolocVRT._create_bands(GeolocMetaDict)

        GeolocObject = GeolocationArray(
            xVRT=self.GeolocVRT,
            yVRT=self.GeolocVRT,
            # x = lon, y = lat
            xBand=1,
            yBand=2,
            lineOffset=0,
            pixelOffset=0,
            lineStep=1,
            pixelStep=1)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self,
                     srcRasterXSize=subDataset.RasterXSize,
                     srcRasterYSize=subDataset.RasterYSize,
                     gdalDataset=subDataset,
                     geolocationArray=GeolocObject,
                     srcProjection=GeolocObject.d['SRS'])

        # Scale and NODATA should ideally be taken directly from raw file
        metaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_speed'),
                'ScaleRatio': 0.01,
                'NODATA': -32767
            },
            'dst': {
                'name': 'windspeed',
                'wkv': 'wind_speed'
            }
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_dir'),
                'ScaleRatio': 0.1,
                'NODATA': -32767
            },
            'dst': {
                'name': 'winddirection',
                'wkv': 'wind_from_direction'
            }
        }]

        self._create_bands(metaDict)

        # This should not be necessary
        # - should be provided by GeolocationArray!
        self.dataset.SetProjection(GeolocObject.d['SRS'])

        # Add time
        startTime = datetime.datetime(int(iFileName[6:10]),
                                      int(iFileName[10:12]),
                                      int(iFileName[12:14]),
                                      int(iFileName[15:17]),
                                      int(iFileName[17:19]),
                                      int(iFileName[19:21]))
        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start',
                                     startTime.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     startTime.isoformat())

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('ascat')
        ee = pti.get_gcmd_platform('metop-a')

        # TODO: Validate that the found instrument and platform are indeed what
        # we want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
Exemple #6
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):

        ########################################
        # Read metadata from binary file
        ########################################
        try:
            fp = open(fileName, 'rb')
        except IOError:
            raise WrongMapperError
        fp.seek(72)

        try:
            satNum = int(struct.unpack('<H', fp.read(2))[0])
        except:
            raise WrongMapperError

        if satNum >= 11:
            isMetop = True
        else:
            isMetop = False

        if satNum in satIDs.keys():
            satID = satIDs[satNum]
        else:
            raise WrongMapperError

        fp.seek(76)
        dataFormatNum = int(struct.unpack('<H', fp.read(2))[0])
        if dataFormatNum in dataFormats.keys():
            dataFormat = dataFormats[dataFormatNum]
        else:
            raise WrongMapperError

        fp.seek(dataSetQualityIndicatorOffset + 14)
        numScanLines = int(struct.unpack('<H', fp.read(2))[0])
        numCalibratedScanLines = int(struct.unpack('<H', fp.read(2))[0])
        missingScanLines = int(struct.unpack('<H', fp.read(2))[0])
        if missingScanLines != 0:
            print('WARNING: Missing scanlines: ' + str(missingScanLines))

        ##################
        # Read time
        ##################
        fp.seek(84)
        year = int(struct.unpack('<H', fp.read(2))[0])
        dayofyear = int(struct.unpack('<H', fp.read(2))[0])
        millisecondsOfDay = int(struct.unpack('<l', fp.read(4))[0])
        time = (
            datetime.datetime(year, 1, 1) +
            datetime.timedelta(dayofyear - 1, milliseconds=millisecondsOfDay))

        ##################################
        # Read calibration information
        ##################################
        #IRcalibration = {}
        #fp.seek(202)
        #avh_h_irttcoef=[]
        #for i in range(24):
        #    avh_h_irttcoef.append(int(struct.unpack('<H', fp.read(2))[0]))
        ##print avh_h_irttcoef
        #avh_h_albcnv=[]
        #for i in range(6):
        #    avh_h_albcnv.append(int(struct.unpack('<l', fp.read(4))[0]))
        ##print avh_h_albcnv
        #fp.seek(280)
        #avh_h_radtempcnv = np.zeros((3,3))
        #for IRchannelNo in range(3):
        #    for coeffNo in range(3):
        #        avh_h_radtempcnv[IRchannelNo, coeffNo] = \
        #           int(struct.unpack('<l', fp.read(4))[0])
        #print avh_h_radtempcnv
        #IRcalibration['centralWavenumber'] = (avh_h_radtempcnv[:,0] /
        #                                      [1E2, 1E3, 1E3])
        #IRcalibration['c1'] = avh_h_radtempcnv[:,1] / 1E5
        #IRcalibration['c2'] = avh_h_radtempcnv[:,2] / 1E6

        ########################################################
        # Read visible calibration coefficients per scanline
        # - for channels 1, 2, 3A
        ########################################################
        #for scanline in range(1):
        #    avh_calvis=np.zeros((3,3,5))
        #    fp.seek(headerLength + recordLength*scanline + 48)
        #    for VISchannel in range(3):
        #        for sets in range(3):
        #            for coeff in range(5):
        #                avh_calvis[sets, VISchannel, coeff] = \
        #                    int(struct.unpack('<l', fp.read(4))[0])
        #    print avh_calvis
        #    print '----'

        ########################################################
        # Read IR calibration coefficients per scanline
        # - for channels 3B, 4, 5
        ########################################################
        #for scanline in range(1):
        #    avh_calir=np.zeros((2,3,3))
        #    fp.seek(headerLength + recordLength*scanline + 228)
        #    for IRchannelNo in range(3):
        #        for setNo in range(2):
        #            for coeffNo in range(3):
        #                avh_calir[setNo, IRchannelNo, coeffNo] = \
        #                    int(struct.unpack('<l', fp.read(4))[0])

        #avh_filler2 = np.zeros(3)
        #for fillerNo in range(3):
        #    avh_filler2[fillerNo] = int(struct.unpack('<l', fp.read(4))[0])

        #setNo = 0 # Use operational set (the only available)
        #a = np.zeros((3,3))
        #for IRchannelNo in range(3):
        #    for coeffNo in range(3):
        #        # NB: apparently stored "backwards", therefore 2-coeffNo
        #        a[IRchannelNo, 2-coeffNo] = (avh_calir[setNo, IRchannelNo,
        #                                               coeffNo]
        #                                     / np.power(10,
        #                                                avh_filler2[coeffNo]))
        #
        ###########################
        # Apply calibration
        ###########################
        #C = 410
        #for IRchannelNo in range(3):
        #    Ne = a[IRchannelNo,0] + a[IRchannelNo,1]*C + a[IRchannelNo,2]*C*C
        #    vC = IRcalibration['centralWavenumber'][IRchannelNo]
        #    c1 = -IRcalibration['c1'][IRchannelNo] # Note minus
        #    c2 = IRcalibration['c2'][IRchannelNo]
        #print '-----'
        #print a[IRchannelNo,:]
        #print vC, c1, c2
        #TeStar = c2*vC/np.log(1 + (c1*vC*vC*vC)/Ne)
        #Te = c1 + c2*TeStar
        #print Ne, TeStar, Te
        #print '-----'
        #sys.exit('stop')

        ###########################
        # Make Geolocation Arrays
        ###########################
        srcRasterYSize = numCalibratedScanLines

        # Making VRT with raw (unscaled) lon and lat
        # (smaller bands than full dataset)
        self.bandVRTs = {
            'RawGeolocVRT': VRT(srcRasterXSize=51,
                                srcRasterYSize=srcRasterYSize)
        }
        RawGeolocMetaDict = []
        for lonlatNo in range(1, 3):
            RawGeolocMetaDict.append({
                'src': {
                    'SourceFilename': fileName,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'DataType': gdal.GDT_Int32,
                    'ImageOffset': (headerLength + 640 + (lonlatNo - 1) * 4),
                    'PixelOffset': 8,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {}
            })

        self.bandVRTs['RawGeolocVRT']._create_bands(RawGeolocMetaDict)

        # Make derived GeolocVRT with scaled lon and lat
        self.bandVRTs['GeolocVRT'] = VRT(srcRasterXSize=51,
                                         srcRasterYSize=srcRasterYSize)
        GeolocMetaDict = []
        for lonlatNo in range(1, 3):
            GeolocMetaDict.append({
                'src': {
                    'SourceFilename': (self.bandVRTs['RawGeolocVRT'].fileName),
                    'SourceBand': lonlatNo,
                    'ScaleRatio': 0.0001,
                    'ScaleOffset': 0,
                    'DataType': gdal.GDT_Int32
                },
                'dst': {}
            })

        self.bandVRTs['GeolocVRT']._create_bands(GeolocMetaDict)

        GeolocObject = GeolocationArray(
            xVRT=self.bandVRTs['GeolocVRT'],
            yVRT=self.bandVRTs['GeolocVRT'],
            xBand=2,
            yBand=1,  # x = lon, y = lat
            lineOffset=0,
            pixelOffset=25,
            lineStep=1,
            pixelStep=40)

        #######################
        # Initialize dataset
        #######################
        # create empty VRT dataset with geolocation only
        # (from Geolocation Array)
        VRT.__init__(self,
                     srcRasterXSize=2048,
                     srcRasterYSize=numCalibratedScanLines,
                     geolocationArray=GeolocObject,
                     srcProjection=GeolocObject.d['SRS'])

        # Since warping quality is horrible using geolocation arrays
        # which are much smaller than raster bands (due to a bug in GDAL:
        # http://trac.osgeo.org/gdal/ticket/4907), the geolocation arrays
        # are here converted to GCPs. Only a subset of GCPs is added,
        # significantly increasing speed when using -tps warping
        reductionFactor = 2
        self.convert_GeolocationArray2GPCs(1 * reductionFactor,
                                           40 * reductionFactor)

        ##################
        # Create bands
        ##################
        metaDict = []
        ch = ({}, {}, {}, {}, {}, {})

        ch[1]['wavelength'] = 0.63
        ch[2]['wavelength'] = 0.86
        ch[3]['wavelength'] = '1.6 or 3.7 mum'
        ch[4]['wavelength'] = 10.8
        ch[5]['wavelength'] = 12.0

        ch[1]['minmax'] = '0 700'
        ch[2]['minmax'] = '0 700'
        ch[3]['minmax'] = '0 800'
        ch[4]['minmax'] = '400 1000'
        ch[5]['minmax'] = '400 1000'

        for bandNo in range(1, 6):
            metaDict.append({
                'src': {
                    'SourceFilename': fileName,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'dataType': gdal.GDT_UInt16,
                    'ImageOffset': imageOffset + (bandNo - 1) * 2,
                    'PixelOffset': 10,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {
                    'dataType': gdal.GDT_UInt16,
                    'wkv': 'raw_counts',
                    'colormap': 'gray',
                    'wavelength': ch[bandNo]['wavelength'],
                    'minmax': ch[bandNo]['minmax'],
                    'unit': "1"
                }
            })

        self._create_bands(metaDict)

        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start', time.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end', time.isoformat())

        return
Exemple #7
0
    def __init__(self,
                 inputFileName,
                 gdalDataset,
                 gdalMetadata,
                 logLevel=30,
                 rmMetadatas=[
                     'NETCDF_VARNAME', '_Unsigned', 'ScaleRatio',
                     'ScaleOffset', 'dods_variable'
                 ],
                 **kwargs):
        # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_'
        # from keys in gdalDataset
        tmpGdalMetadata = {}
        geoMetadata = {}
        origin_is_nansat = False
        if not gdalMetadata:
            raise WrongMapperError
        for key in gdalMetadata.keys():
            newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '')
            if 'NANSAT_' in newKey:
                geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key]
                origin_is_nansat = True
            else:
                tmpGdalMetadata[newKey] = gdalMetadata[key]
        gdalMetadata = tmpGdalMetadata
        fileExt = os.path.splitext(inputFileName)[1]

        # Get file names from dataset or subdataset
        subDatasets = gdalDataset.GetSubDatasets()
        if len(subDatasets) == 0:
            fileNames = [inputFileName]
        else:
            fileNames = [f[0] for f in subDatasets]

        # add bands with metadata and corresponding values to the empty VRT
        metaDict = []
        xDatasetSource = ''
        yDatasetSource = ''
        firstXSize = 0
        firstYSize = 0
        for _, fileName in enumerate(fileNames):
            subDataset = gdal.Open(fileName)
            # choose the first dataset whith grid
            if (firstXSize == 0 and firstYSize == 0
                    and subDataset.RasterXSize > 1
                    and subDataset.RasterYSize > 1):
                firstXSize = subDataset.RasterXSize
                firstYSize = subDataset.RasterYSize
                firstSubDataset = subDataset
                # get projection from the first subDataset
                projection = firstSubDataset.GetProjection()

            # take bands whose sizes are same as the first band.
            if (subDataset.RasterXSize == firstXSize
                    and subDataset.RasterYSize == firstYSize):
                if projection == '':
                    projection = subDataset.GetProjection()
                if ('GEOLOCATION_X_DATASET' in fileName
                        or 'longitude' in fileName):
                    xDatasetSource = fileName
                elif ('GEOLOCATION_Y_DATASET' in fileName
                      or 'latitude' in fileName):
                    yDatasetSource = fileName
                else:
                    for iBand in range(subDataset.RasterCount):
                        subBand = subDataset.GetRasterBand(iBand + 1)
                        bandMetadata = subBand.GetMetadata_Dict()
                        if 'PixelFunctionType' in bandMetadata:
                            bandMetadata.pop('PixelFunctionType')
                        sourceBands = iBand + 1
                        # sourceBands = i*subDataset.RasterCount + iBand + 1

                        # generate src metadata
                        src = {
                            'SourceFilename': fileName,
                            'SourceBand': sourceBands
                        }
                        # set scale ratio and scale offset
                        scaleRatio = bandMetadata.get(
                            'ScaleRatio',
                            bandMetadata.get(
                                'scale', bandMetadata.get('scale_factor', '')))
                        if len(scaleRatio) > 0:
                            src['ScaleRatio'] = scaleRatio
                        scaleOffset = bandMetadata.get(
                            'ScaleOffset',
                            bandMetadata.get(
                                'offset', bandMetadata.get('add_offset', '')))
                        if len(scaleOffset) > 0:
                            src['ScaleOffset'] = scaleOffset
                        # sate DataType
                        src['DataType'] = subBand.DataType

                        # generate dst metadata
                        # get all metadata from input band
                        dst = bandMetadata
                        # set wkv and bandname
                        dst['wkv'] = bandMetadata.get('standard_name', '')
                        # first, try the name metadata
                        if 'name' in bandMetadata:
                            bandName = bandMetadata['name']
                        else:
                            # if it doesn't exist get name from NETCDF_VARNAME
                            bandName = bandMetadata.get('NETCDF_VARNAME', '')
                            if len(bandName) == 0:
                                bandName = bandMetadata.get(
                                    'dods_variable', '')

                            # remove digits added by gdal in
                            # exporting to netcdf...
                            if (len(bandName) > 0 and origin_is_nansat
                                    and fileExt == '.nc'):
                                if bandName[-1:].isdigit():
                                    bandName = bandName[:-1]
                                if bandName[-1:].isdigit():
                                    bandName = bandName[:-1]

                        # if still no bandname, create one
                        if len(bandName) == 0:
                            bandName = 'band_%03d' % iBand

                        dst['name'] = bandName

                        # remove non-necessary metadata from dst
                        for rmMetadata in rmMetadatas:
                            if rmMetadata in dst:
                                dst.pop(rmMetadata)

                        # append band with src and dst dictionaries
                        metaDict.append({'src': src, 'dst': dst})

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, firstSubDataset, srcMetadata=gdalMetadata)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # Create complex data bands from 'xxx_real' and 'xxx_imag' bands
        # using pixelfunctions
        rmBands = []
        for iBandNo in range(self.dataset.RasterCount):
            iBand = self.dataset.GetRasterBand(iBandNo + 1)
            iBandName = iBand.GetMetadataItem('name')
            # find real data band
            if iBandName.find("_real") != -1:
                realBandNo = iBandNo
                realBand = self.dataset.GetRasterBand(realBandNo + 1)
                realDtype = realBand.GetMetadataItem('DataType')
                bandName = iBandName.replace(iBandName.split('_')[-1],
                                             '')[0:-1]
                for jBandNo in range(self.dataset.RasterCount):
                    jBand = self.dataset.GetRasterBand(jBandNo + 1)
                    jBandName = jBand.GetMetadataItem('name')
                    # find an imaginary data band corresponding to the real
                    # data band and create complex data band from the bands
                    if jBandName.find(bandName + '_imag') != -1:
                        imagBandNo = jBandNo
                        imagBand = self.dataset.GetRasterBand(imagBandNo + 1)
                        imagDtype = imagBand.GetMetadataItem('DataType')
                        dst = imagBand.GetMetadata()
                        dst['name'] = bandName
                        dst['PixelFunctionType'] = 'ComplexData'
                        dst['dataType'] = 10
                        src = [{
                            'SourceFilename': fileNames[realBandNo],
                            'SourceBand': 1,
                            'DataType': realDtype
                        }, {
                            'SourceFilename': fileNames[imagBandNo],
                            'SourceBand': 1,
                            'DataType': imagDtype
                        }]
                        self._create_band(src, dst)
                        self.dataset.FlushCache()
                        rmBands.append(realBandNo + 1)
                        rmBands.append(imagBandNo + 1)

        # Delete real and imaginary bands
        if len(rmBands) != 0:
            self.delete_bands(rmBands)

        if len(projection) == 0:
            # projection was not set automatically
            # get projection from GCPProjection
            projection = geoMetadata.get('GCPProjection', '')
        if len(projection) == 0:
            # no projection was found in dataset or metadata:
            # generate WGS84 by default
            projection = NSR().wkt
        # fix problem with MET.NO files where a, b given in m and XC/YC in km
        if ('UNIT["kilometre"' in projection
                and ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]'
                in projection):
            projection = projection.replace(
                ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', '')
        # set projection
        self.dataset.SetProjection(self.repare_projection(projection))

        # check if GCPs were added from input dataset
        gcps = firstSubDataset.GetGCPs()
        gcpProjection = firstSubDataset.GetGCPProjection()

        # if no GCPs in input dataset: try to add GCPs from metadata
        if not gcps:
            gcps = self.add_gcps_from_metadata(geoMetadata)
        # if yet no GCPs: try to add GCPs from variables
        if not gcps:
            gcps = self.add_gcps_from_variables(inputFileName)

        if gcps:
            if len(gcpProjection) == 0:
                # get GCP projection and repare
                gcpProjection = self.repare_projection(
                    geoMetadata.get('GCPProjection', ''))
            # add GCPs to dataset
            self.dataset.SetGCPs(gcps, gcpProjection)
            self.dataset.SetProjection('')
            self._remove_geotransform()

        # Find proper bands and insert GEOLOCATION ARRAY into dataset
        if len(xDatasetSource) > 0 and len(yDatasetSource) > 0:
            self.add_geolocationArray(
                GeolocationArray(xDatasetSource, yDatasetSource))

        elif not gcps:
            # if no GCPs found and not GEOLOCATION ARRAY set:
            #   Set Nansat Geotransform if it is not set automatically
            geoTransform = self.dataset.GetGeoTransform()
            if len(geoTransform) == 0:
                geoTransformStr = geoMetadata.get('GeoTransform',
                                                  '(0|1|0|0|0|0|1)')
                geoTransform = eval(geoTransformStr.replace('|', ','))
                self.dataset.SetGeoTransform(geoTransform)

        subMetadata = firstSubDataset.GetMetadata()

        ### GET START TIME from METADATA
        time_coverage_start = None
        if 'start_time' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['start_time'])
        elif 'start_date' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['start_date'])
        elif 'time_coverage_start' in gdalMetadata:
            time_coverage_start = parse_time(
                gdalMetadata['time_coverage_start'])

        ### GET END TIME from METADATA
        time_coverage_end = None
        if 'stop_time' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['stop_time'])
        elif 'stop_date' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['stop_date'])
        elif 'time_coverage_stop' in gdalMetadata:
            time_coverage_start = parse_time(
                gdalMetadata['time_coverage_stop'])
        elif 'end_time' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['end_time'])
        elif 'end_date' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['end_date'])
        elif 'time_coverage_end' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['time_coverage_end'])

        ### GET start time from time variable
        if (time_coverage_start is None and cfunitsInstalled
                and 'time#standard_name' in subMetadata
                and subMetadata['time#standard_name'] == 'time'
                and 'time#units' in subMetadata
                and 'time#calendar' in subMetadata):
            # get data from netcdf data
            ncFile = netcdf_file(inputFileName, 'r')
            timeLength = ncFile.variables['time'].shape[0]
            timeValueStart = ncFile.variables['time'][0]
            timeValueEnd = ncFile.variables['time'][-1]
            ncFile.close()
            try:
                timeDeltaStart = Units.conform(
                    timeValueStart,
                    Units(subMetadata['time#units'],
                          calendar=subMetadata['time#calendar']),
                    Units('days since 1950-01-01'))
            except ValueError:
                self.logger.error('calendar units are wrong: %s' %
                                  subMetadata['time#calendar'])
            else:
                time_coverage_start = (
                    datetime.datetime(1950, 1, 1) +
                    datetime.timedelta(float(timeDeltaStart)))

                if timeLength > 1:
                    timeDeltaEnd = Units.conform(
                        timeValueStart,
                        Units(subMetadata['time#units'],
                              calendar=subMetadata['time#calendar']),
                        Units('days since 1950-01-01'))
                else:
                    timeDeltaEnd = timeDeltaStart + 1
                time_coverage_end = (datetime.datetime(1950, 1, 1) +
                                     datetime.timedelta(float(timeDeltaEnd)))

        ## finally set values of time_coverage start and end if available
        if time_coverage_start is not None:
            self.dataset.SetMetadataItem('time_coverage_start',
                                         time_coverage_start.isoformat())
        if time_coverage_end is not None:
            self.dataset.SetMetadataItem('time_coverage_end',
                                         time_coverage_end.isoformat())

        if 'sensor' not in gdalMetadata:
            self.dataset.SetMetadataItem('sensor', 'unknown')
        if 'satellite' not in gdalMetadata:
            self.dataset.SetMetadataItem('satellite', 'unknown')
        if 'source_type' not in gdalMetadata:
            self.dataset.SetMetadataItem('source_type', 'unknown')
        if 'platform' not in gdalMetadata:
            self.dataset.SetMetadataItem('platform', 'unknown')
        if 'instrument' not in gdalMetadata:
            self.dataset.SetMetadataItem('instrument', 'unknown')

        self.logger.info('Use generic mapper - OK!')
Exemple #8
0
    def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30,
                 rmMetadatas=['NETCDF_VARNAME', '_Unsigned',
                              'ScaleRatio', 'ScaleOffset', 'dods_variable'],
                 **kwargs):
        # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_'
        # from keys in gdalDataset
        tmpGdalMetadata = {}
        geoMetadata = {}
        origin_is_nansat = False
        if not gdalMetadata:
            raise WrongMapperError
        for key in gdalMetadata.keys():
            newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '')
            if 'NANSAT_' in newKey:
                geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key]
                origin_is_nansat = True
            else:
                tmpGdalMetadata[newKey] = gdalMetadata[key]
        gdalMetadata = tmpGdalMetadata
        fileExt = os.path.splitext(inputFileName)[1]

        # Get file names from dataset or subdataset
        subDatasets = gdalDataset.GetSubDatasets()
        if len(subDatasets) == 0:
            fileNames = [inputFileName]
        else:
            fileNames = [f[0] for f in subDatasets]

        # add bands with metadata and corresponding values to the empty VRT
        metaDict = []
        geoFileDict = {}
        xDatasetSource = ''
        yDatasetSource = ''
        firstXSize = 0
        firstYSize = 0
        for i, fileName in enumerate(fileNames):
            subDataset = gdal.Open(fileName)
            # choose the first dataset whith grid
            if (firstXSize == 0 and firstYSize == 0 and
                    subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1):
                firstXSize = subDataset.RasterXSize
                firstYSize = subDataset.RasterYSize
                firstSubDataset = subDataset
                # get projection from the first subDataset
                projection = firstSubDataset.GetProjection()

            # take bands whose sizes are same as the first band.
            if (subDataset.RasterXSize == firstXSize and
                    subDataset.RasterYSize == firstYSize):
                if projection == '':
                    projection = subDataset.GetProjection()
                if ('GEOLOCATION_X_DATASET' in fileName or
                        'longitude' in fileName):
                    xDatasetSource = fileName
                elif ('GEOLOCATION_Y_DATASET' in fileName or
                        'latitude' in fileName):
                    yDatasetSource = fileName
                else:
                    for iBand in range(subDataset.RasterCount):
                        subBand = subDataset.GetRasterBand(iBand+1)
                        bandMetadata = subBand.GetMetadata_Dict()
                        if 'PixelFunctionType' in bandMetadata:
                            bandMetadata.pop('PixelFunctionType')
                        sourceBands = iBand + 1
                        #sourceBands = i*subDataset.RasterCount + iBand + 1

                        # generate src metadata
                        src = {'SourceFilename': fileName,
                               'SourceBand': sourceBands}
                        # set scale ratio and scale offset
                        scaleRatio = bandMetadata.get(
                            'ScaleRatio',
                            bandMetadata.get(
                                'scale',
                                bandMetadata.get('scale_factor', '')))
                        if len(scaleRatio) > 0:
                            src['ScaleRatio'] = scaleRatio
                        scaleOffset = bandMetadata.get(
                            'ScaleOffset',
                            bandMetadata.get(
                                'offset',
                                bandMetadata.get(
                                    'add_offset', '')))
                        if len(scaleOffset) > 0:
                            src['ScaleOffset'] = scaleOffset
                        # sate DataType
                        src['DataType'] = subBand.DataType

                        # generate dst metadata
                        # get all metadata from input band
                        dst = bandMetadata
                        # set wkv and bandname
                        dst['wkv'] = bandMetadata.get('standard_name', '')
                        # first, try the name metadata
                        bandName = bandMetadata.get('name', '')
                        # if it doesn't exist get name from NETCDF_VARNAME
                        if len(bandName) == 0:
                            bandName = bandMetadata.get('NETCDF_VARNAME', '')
                            if len(bandName) == 0:
                                bandName = bandMetadata.get('dods_variable',
                                                            '')
                            if len(bandName) > 0:
                                if origin_is_nansat and fileExt == '.nc':
                                    # remove digits added by gdal in
                                    # exporting to netcdf...
                                    if bandName[-1:].isdigit():
                                        bandName = bandName[:-1]
                                    if bandName[-1:].isdigit():
                                        bandName = bandName[:-1]
                        dst['name'] = bandName

                        # remove non-necessary metadata from dst
                        for rmMetadata in rmMetadatas:
                            if rmMetadata in dst:
                                dst.pop(rmMetadata)

                        # append band with src and dst dictionaries
                        metaDict.append({'src': src, 'dst': dst})

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, firstSubDataset, srcMetadata=gdalMetadata)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # Create complex data bands from 'xxx_real' and 'xxx_imag' bands
        # using pixelfunctions
        rmBands = []
        for iBandNo in range(self.dataset.RasterCount):
            iBand = self.dataset.GetRasterBand(iBandNo + 1)
            iBandName = iBand.GetMetadataItem('name')
            # find real data band
            if iBandName.find("_real") != -1:
                realBandNo = iBandNo
                realBand = self.dataset.GetRasterBand(realBandNo + 1)
                realDtype = realBand.GetMetadataItem('DataType')
                bandName = iBandName.replace(iBandName.split('_')[-1],
                                             '')[0:-1]
                for jBandNo in range(self.dataset.RasterCount):
                    jBand = self.dataset.GetRasterBand(jBandNo + 1)
                    jBandName = jBand.GetMetadataItem('name')
                    # find an imaginary data band corresponding to the real
                    # data band and create complex data band from the bands
                    if jBandName.find(bandName+'_imag') != -1:
                        imagBandNo = jBandNo
                        imagBand = self.dataset.GetRasterBand(imagBandNo + 1)
                        imagDtype = imagBand.GetMetadataItem('DataType')
                        dst = imagBand.GetMetadata()
                        dst['name'] = bandName
                        dst['PixelFunctionType'] = 'ComplexData'
                        dst['dataType'] = 10
                        src = [{'SourceFilename': fileNames[realBandNo],
                                'SourceBand':  1,
                                'DataType': realDtype},
                               {'SourceFilename': fileNames[imagBandNo],
                                'SourceBand': 1,
                                'DataType': imagDtype}]
                        self._create_band(src, dst)
                        self.dataset.FlushCache()
                        rmBands.append(realBandNo + 1)
                        rmBands.append(imagBandNo + 1)

        # Delete real and imaginary bands
        if len(rmBands) != 0:
            self.delete_bands(rmBands)

        if len(projection) == 0:
            # projection was not set automatically
            # get projection from GCPProjection
            projection = geoMetadata.get('GCPProjection', '')
        if len(projection) == 0:
            # no projection was found in dataset or metadata:
            # generate WGS84 by default
            projection = NSR().wkt
        # set projection
        self.dataset.SetProjection(self.repare_projection(projection))

        # check if GCPs were added from input dataset
        gcps = firstSubDataset.GetGCPs()
        # if no GCPs in input dataset: try to add GCPs from metadata
        if not gcps:
            gcps = self.add_gcps_from_metadata(geoMetadata)
        # if yet no GCPs: try to add GCPs from variables
        if not gcps:
            gcps = self.add_gcps_from_variables(inputFileName)

        if gcps:
            # get GCP projection and repare
            projection = self.repare_projection(geoMetadata.
                                                get('GCPProjection', ''))
            # add GCPs to dataset
            self.dataset.SetGCPs(gcps, projection)
            self._remove_geotransform()

        # Find proper bands and insert GEOLOCATION ARRAY into dataset
        if len(xDatasetSource) > 0 and len(yDatasetSource) > 0:
            self.add_geolocationArray(GeolocationArray(xDatasetSource,
                                                       yDatasetSource))

        elif not gcps:
            # if no GCPs found and not GEOLOCATION ARRAY set:
            #   Set Nansat Geotransform if it is not set automatically
            geoTransform = self.dataset.GetGeoTransform()
            if len(geoTransform) == 0:
                geoTransformStr = geoMetadata.get('GeoTransform',
                                                  '(0|1|0|0|0|0|1)')
                geoTransform = eval(geoTransformStr.replace('|', ','))
                self.dataset.SetGeoTransform(geoTransform)

        if 'start_date' in gdalMetadata:
            try:
                startDate = parse(gdalMetadata['start_date'])
            except ValueError:
                self.logger.error('Time format is wrong in input file!')
            else:
                self._set_time(startDate)

        self.logger.warning('Use generic mapper - OK!')