Пример #1
0
    def get_pixelsize_meters(self):
        '''Returns the pixelsize (deltaX, deltaY) of the domain

        For projected domains, the exact result which is constant
        over the domain is returned.
        For geographic (lon-lat) projections, or domains with no geotransform,
        the haversine formula is used to calculate the pixel size
        in the center of the domain.
        Returns
        --------
        deltaX, deltaY : float
        pixel size in X and Y directions given in meters
        '''

        srs = osr.SpatialReference(self.vrt.dataset.GetProjection())
        if srs.IsProjected:
            if srs.GetAttrValue('unit') == 'metre':
                geoTransform = self.vrt.dataset.GetGeoTransform()
                deltaX = abs(geoTransform[1])
                deltaY = abs(geoTransform[5])
                return deltaX, deltaY

        # Estimate pixel size in center of domain using haversine formula
        centerCol = round(self.vrt.dataset.RasterXSize / 2)
        centerRow = round(self.vrt.dataset.RasterYSize / 2)
        lon00, lat00 = self.transform_points([centerCol], [centerRow])
        lon01, lat01 = self.transform_points([centerCol], [centerRow + 1])
        lon10, lat10 = self.transform_points([centerCol + 1], [centerRow])

        deltaX = haversine(lon00, lat00, lon01, lat01)
        deltaY = haversine(lon00, lat00, lon10, lat10)
        return deltaX[0], deltaY[0]
Пример #2
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create VRT '''

        ThreddsBase = 'http://thredds.met.no/thredds/dodsC/myocean/siw-tac/siw-metno-svalbard/'
        # First check if mapper is called with keyword syntax:
        # filename = metno_hires_seaice:YYYYmmdd
        keywordBase = 'metno_hires_seaice'
        foundDataset = False
        if fileName[0:len(keywordBase)] == keywordBase:
            keywordTime = fileName[len(keywordBase)+1:]
            requestedTime = datetime.strptime(keywordTime, '%Y%m%d')
            # Search for nearest available file, within the closest 3 days
            for deltaDay in [0, -1, 1, -2, 2, -3, 3]:
                validTime = (requestedTime + timedelta(days=deltaDay) +
                             timedelta(hours=15))
                fileName = (ThreddsBase +
                            validTime.strftime(
                                '%Y/%m/ice_conc_svalbard_%Y%m%d1500.nc'))
                try:
                    urllib2.urlopen(fileName + '.dds')
                    foundDataset = True
                    # Data is found for this day
                    break
                except:
                    # No data for this day
                    pass

        if not foundDataset:
            raise WrongMapperError

        # Then check if a valid OPeNDAP URL is given
        # (or has been constructed from keyword)
        if fileName[0:len(ThreddsBase)] != ThreddsBase:
            AttributeError("Not Met.no Svalbard-ice Thredds URL")
        else:
            timestr = fileName[-15:-3]
            validTime = datetime.strptime(timestr, '%Y%m%d%H%M')

        fileName = fileName + '?ice_concentration[0][y][x]'
        srcProjection = osr.SpatialReference()
        srcProjection.ImportFromProj4('+proj=stere lon_0=0.0 +lat_0=90 +datum=WGS84 +ellps=WGS84 +units=km +no_defs')
        srcProjection = srcProjection.ExportToWkt()

        # From thredds web, with manual shift
        srcGeotransform = (-1243.008 - 1, 1, 0, -3190.026 - 7, 0, 1)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self,
                     srcGeoTransform=srcGeotransform,
                     srcProjection=srcProjection,
                     srcRasterXSize=3812,
                     srcRasterYSize=2980)

        metaDict = [{'src': {'SourceFilename': fileName,
                             'sourceBand': 1},
                     'dst': {'name': 'sea_ice_area_fraction',
                             'wkv': 'sea_ice_area_fraction'}}]

        # Add band
        self._create_bands(metaDict)

        # Set time
        self.logger.info('Valid time: %s', str(validTime))
        self._set_time(validTime)
Пример #3
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create CSKS VRT '''

        if fileName.split('/')[-1][0:4] != "CSKS":
            raise WrongMapperError

        # Get coordinates
        metadata = gdalMetadata['Estimated_Bottom_Left_Geodetic_Coordinates']
        bottom_left_lon = float(metadata.split(' ')[1])
        bottom_left_lat = float(metadata.split(' ')[0])
        metadata = gdalMetadata['Estimated_Bottom_Right_Geodetic_Coordinates']
        bottom_right_lon = float(metadata.split(' ')[1])
        bottom_right_lat = float(metadata.split(' ')[0])
        metadata = gdalMetadata['Estimated_Top_Left_Geodetic_Coordinates']
        top_left_lon = float(metadata.split(' ')[1])
        top_left_lat = float(metadata.split(' ')[0])
        metadata = gdalMetadata['Estimated_Top_Right_Geodetic_Coordinates']
        top_right_lon = float(metadata.split(' ')[1])
        top_right_lat = float(metadata.split(' ')[0])
        metadata = gdalMetadata['Scene_Centre_Geodetic_Coordinates']
        center_lon = float(metadata.split(' ')[1])
        center_lat = float(metadata.split(' ')[0])

        # Get sub-datasets
        subDatasets = gdalDataset.GetSubDatasets()

        # Get file names from dataset or subdataset
        if subDatasets.__len__() == 1:
            fileNames = [fileName]
        else:
            fileNames = [f[0] for f in subDatasets]

        for i, elem in enumerate(fileNames):
            if fileNames[i][-3:] == 'QLK':
                fileNames.pop(i)
        #print fileNames

        subDataset = gdal.Open(fileNames[0])

        # generate list of GCPs
        gcps = []
        # create GCP with X,Y,Z(?),pixel,line from lat/lon matrices
        gcp = gdal.GCP(float(bottom_left_lon), float(bottom_left_lat), 0, 0, 0)
        gcps.append(gcp)
        #self.logger.debug('%d %d %d %f %f', 0, gcp.GCPPixel, gcp.GCPLine,
        #                  gcp.GCPX, gcp.GCPY)
        gcp = gdal.GCP(float(bottom_right_lon), float(bottom_right_lat), 0,
                       subDataset.RasterXSize, 0)
        gcps.append(gcp)
        #self.logger.debug('%d %d %d %f %f', 1, gcp.GCPPixel, gcp.GCPLine,
        #                  gcp.GCPX, gcp.GCPY)
        gcp = gdal.GCP(float(top_left_lon), float(top_left_lat), 0, 0,
                       subDataset.RasterYSize)
        gcps.append(gcp)
        #self.logger.debug('%d %d %d %f %f', 2, gcp.GCPPixel, gcp.GCPLine,
        #                  gcp.GCPX, gcp.GCPY)
        gcp = gdal.GCP(float(top_right_lon), float(top_right_lat), 0,
                       subDataset.RasterXSize, subDataset.RasterYSize)
        gcps.append(gcp)
        #self.logger.debug('%d %d %d %f %f', 3, gcp.GCPPixel, gcp.GCPLine,
        #                  gcp.GCPX, gcp.GCPY)
        gcp = gdal.GCP(float(center_lon), float(center_lat), 0,
                       int(np.round(subDataset.RasterXSize / 2.)),
                       int(round(subDataset.RasterYSize / 2.)))
        gcps.append(gcp)
        #self.logger.debug('%d %d %d %f %f', 4, gcp.GCPPixel, gcp.GCPLine,
        #                  gcp.GCPX, gcp.GCPY)

        # append GCPs and lat/lon projection to the vsiDataset
        latlongSRS = osr.SpatialReference()
        latlongSRS.ImportFromProj4(
            "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs")
        latlongSRSWKT = latlongSRS.ExportToWkt()

        # create empty VRT dataset with geolocation only
        VRT.__init__(self,
                     srcRasterXSize=subDataset.RasterXSize,
                     srcRasterYSize=subDataset.RasterYSize,
                     srcGCPs=gcps,
                     srcGCPProjection=latlongSRSWKT)

        #print self.fileName
        # Read all bands later
        #band='S01'
        #res='SBI'

        # Use only full size "original" datasets
        for i, elem in enumerate(fileNames):
            if fileNames[i][-3:] == 'SBI':
                # Add real and imaginary raw counts as bands
                src = {
                    'SourceFilename': fileNames[i],
                    'SourceBand': 1,
                    'DataType': gdal.GDT_Int16
                }
                dst = {
                    'dataType':
                    gdal.GDT_Float32,
                    'name':
                    'RawCounts_%s_real' %
                    gdalMetadata[fileNames[i][-7:-4] + '_Polarisation']
                }
                self._create_band(src, dst)

                src = {
                    'SourceFilename': fileNames[i],
                    'SourceBand': 2,
                    'DataType': gdal.GDT_Int16
                }
                dst = {
                    'dataType':
                    gdal.GDT_Float32,
                    'name':
                    'RawCounts_%s_imaginary' %
                    gdalMetadata[fileNames[i][-7:-4] + '_Polarisation']
                }
                self._create_band(src, dst)

                self.dataset.FlushCache()

        for i, elem in enumerate(fileNames):
            if fileNames[i][-3:] == 'SBI':
                # Calculate sigma0 scaling factor
                Rref = float(gdalMetadata['Reference_Slant_Range'])
                Rexp = float(gdalMetadata['Reference_Slant_Range_Exponent'])
                alphaRef = float(gdalMetadata['Reference_Incidence_Angle'])
                F = float(gdalMetadata['Rescaling_Factor'])
                K = float(gdalMetadata[fileNames[i][-7:-4] +
                                       '_Calibration_Constant'])
                Ftot = Rref**(2. * Rexp)
                Ftot *= np.sin(alphaRef * np.pi / 180.0)
                Ftot /= F**2.
                Ftot /= K

                #print Ftot

                src = [{
                    'SourceFilename': self.fileName,
                    'DataType': gdal.GDT_Float32,
                    'SourceBand': 2 * i + 1,
                    'ScaleRatio': np.sqrt(Ftot)
                }, {
                    'SourceFilename': self.fileName,
                    'DataType': gdal.GDT_Float32,
                    'SourceBand': 2 * i + 2,
                    'ScaleRatio': np.sqrt(Ftot)
                }]
                dst = {
                    'wkv':
                    'surface_backwards_scattering_coefficient_of_radar_wave',
                    'PixelFunctionType':
                    'RawcountsToSigma0_CosmoSkymed_SBI',
                    'polarisation':
                    gdalMetadata[fileNames[i][-7:-4] + '_Polarisation'],
                    'name':
                    'sigma0_%s' %
                    gdalMetadata[fileNames[i][-7:-4] + '_Polarisation'],
                    'SatelliteID':
                    gdalMetadata['Satellite_ID'],
                    'dataType':
                    gdal.GDT_Float32
                }
                #'pass': gdalMetadata['']
                #         - I can't find this in the metadata...

                self._create_band(src, dst)

                self.dataset.FlushCache()