コード例 #1
0
 def test_from_filenames(self):
     lon, lat = np.meshgrid(np.linspace(0,5,10), np.linspace(10,20,30))
     x_vrt = VRT.from_array(lon)
     y_vrt = VRT.from_array(lat)
     g = Geolocation.from_filenames(x_vrt.filename, y_vrt.filename)
     self.assertIsInstance(g, Geolocation)
     self.assertEqual(g.data['X_DATASET'], x_vrt.filename)
     self.assertEqual(g.data['Y_DATASET'], y_vrt.filename)
     self.assertEqual(g.data['LINE_OFFSET'], '0')
     self.assertEqual(g.data['LINE_STEP'], '1')
     self.assertEqual(g.data['PIXEL_OFFSET'], '0')
     self.assertEqual(g.data['PIXEL_STEP'], '1')
コード例 #2
0
    def test_init(self):
        lon, lat = np.meshgrid(np.linspace(0, 5, 10), np.linspace(10, 20, 30))
        x_vrt = VRT.from_array(lon)
        y_vrt = VRT.from_array(lat)

        ga = Geolocation(x_vrt, y_vrt)

        self.assertIsInstance(ga, Geolocation)
        self.assertEqual(ga.data['X_DATASET'], x_vrt.filename)
        self.assertEqual(ga.data['Y_DATASET'], y_vrt.filename)
        self.assertEqual(ga.data['LINE_OFFSET'], '0')
        self.assertEqual(ga.data['LINE_STEP'], '1')
        self.assertEqual(ga.data['PIXEL_OFFSET'], '0')
        self.assertEqual(ga.data['PIXEL_STEP'], '1')
        srs = osr.SpatialReference()
        status = srs.ImportFromWkt(ga.data['SRS'])
        self.assertEqual(status, 0)
        self.assertEqual(srs.ExportToProj4().strip(),
                         '+proj=longlat +datum=WGS84 +no_defs')
        self.assertEqual(ga.data['X_BAND'], '1')
        self.assertEqual(ga.data['Y_BAND'], '1')
        self.assertEqual(ga.x_vrt, x_vrt)
        self.assertEqual(ga.y_vrt, y_vrt)
コード例 #3
0
ファイル: envisat.py プロジェクト: julioegren/nansat
    def add_geolocation_from_ads(self, gdalDataset, zoomSize=500, step=1):
        """
        Add geolocation domain metadata to the dataset

        Get VRTs with zoomed arrays of lon and lat
        Create geolocation object and add to the metadata

        Parameters
        ----------
        gdalDataset: GDAL Dataset
            input dataset
        zoomSize: int, optional, 500
            size, to which the ADS array will be zoomed using scipy
            array of this size will be stored in memory
        step: int
            step of pixel and line in GeolocationArrays. lat/lon grids are
            generated at that step

        Modifies
        --------
        Adds Geolocation Array metadata
        """
        # get VRTs with lon and lat
        xyVRTs = self.get_ads_vrts(gdalDataset, self.lonlatNames, zoomSize,
                                   step)

        # Add geolocation domain metadata to the dataset
        self._add_geolocation(
            Geolocation(x_vrt=xyVRTs[0],
                        y_vrt=xyVRTs[1],
                        x_band=1,
                        y_band=1,
                        srs=gdalDataset.GetGCPProjection(),
                        line_offset=0,
                        line_step=step,
                        pixel_offset=0,
                        pixel_step=step))
コード例 #4
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):

        ########################################
        # Read metadata from binary file
        ########################################
        try:
            fp = open(filename, 'rb')
        except IOError:
            raise WrongMapperError
        fp.seek(24)
        try:
            satID = int(struct.unpack('<l', fp.read(4))[0])
        except:
            raise WrongMapperError

        ##################
        # Read time
        ##################
        fp.seek(44)
        year = int(struct.unpack('<l', fp.read(4))[0])
        dayofyear = int(struct.unpack('<l', fp.read(4))[0])
        millisecondsOfDay = int(struct.unpack('<l', fp.read(4))[0])
        try:
            time = (datetime.datetime(year, 1, 1) + datetime.timedelta(
                dayofyear - 1, milliseconds=millisecondsOfDay))
        except:
            raise WrongMapperError

        fp.seek(72)
        numScanLines = int(struct.unpack('<l', fp.read(4))[0])
        missingScanLines = int(struct.unpack('<l', fp.read(4))[0])
        numCalibratedScanLines = int(struct.unpack('<l', fp.read(4))[0])
        if missingScanLines != 0:
            print('WARNING: Missing scanlines: ' + str(missingScanLines))

        fp.seek(88)
        dataFormatNum = int(struct.unpack('<l', fp.read(4))[0])
        dataFormat = dataFormats[dataFormatNum]

        # Determine if we have channel 3A (daytime) or channel 3B (nighttime)
        def int2bitstring(s):
            return str(s) if s <= 1 else int2bitstring(s >> 1) + str(s & 1)

        fp.seek(headerLength + 20)
        scanlinebitFirstline = int(struct.unpack('<L', fp.read(4))[0])
        fp.seek(headerLength + recordLength * (numCalibratedScanLines - 2) +
                20)
        scanlinebitLastline = int(struct.unpack('<L', fp.read(4))[0])

        if int2bitstring(scanlinebitFirstline)[-1] == '0':
            startsWith3A = True
        else:
            startsWith3A = False
        if int2bitstring(scanlinebitLastline)[-1] == '0':
            endsWith3A = True
        else:
            endsWith3A = False

        if startsWith3A != endsWith3A:
            print('############################################')
            print('WARNING: channel 3 switches ')
            print('between daytime and nighttime (3A <-> 3B)')
            print('###########################################')

        ###########################
        # Make Geolocation Arrays
        ###########################
        srcRasterYSize = numCalibratedScanLines

        # Making VRT with raw (unscaled) lon and lat
        # (smaller bands than full dataset)
        self.band_vrts = {
            'RawGeolocVRT': VRT(srcRasterXSize=51,
                                srcRasterYSize=srcRasterYSize)
        }
        RawGeolocMetaDict = []
        for lonlatNo in range(1, 3):
            RawGeolocMetaDict.append({
                'src': {
                    'SourceFilename': filename,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'DataType': gdal.GDT_Int32,
                    'ImageOffset': (headerLength + 676 + (lonlatNo - 1) * 4),
                    'PixelOffset': 8,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {}
            })

        self.band_vrts['RawGeolocVRT'].create_bands(RawGeolocMetaDict)

        # Make derived GeolocVRT with scaled lon and lat
        self.band_vrts['GeolocVRT'] = VRT(srcRasterXSize=51,
                                          srcRasterYSize=srcRasterYSize)
        GeolocMetaDict = []
        for lonlatNo in range(1, 3):
            GeolocMetaDict.append({
                'src': {
                    'SourceFilename':
                    (self.band_vrts['RawGeolocVRT'].filename),
                    'SourceBand': lonlatNo,
                    'ScaleRatio': 0.0001,
                    'ScaleOffset': 0,
                    'DataType': gdal.GDT_Int32
                },
                'dst': {}
            })

        self.band_vrts['GeolocVRT'].create_bands(GeolocMetaDict)

        GeolocObject = Geolocation(
            x_vrt=self.band_vrts['GeolocVRT'],
            y_vrt=self.band_vrts['GeolocVRT'],
            x_band=2,
            y_band=1,  # x = lon, y = lat
            line_offset=0,
            pixel_offset=25,
            line_step=1,
            pixel_step=40)

        #######################
        # Initialize dataset
        #######################
        # create empty VRT dataset with geolocation only
        # (from Geolocation Array)
        self._init_from_dataset_params(
            2048, numCalibratedScanLines,
            (0, 1, 0, numCalibratedScanLines, 0, -1), GeolocObject.d['SRS'])
        self._add_geolocation(GeolocObject)

        ##################
        # Create bands
        ##################
        self.band_vrts['RawBandsVRT'] = VRT(
            srcRasterXSize=2048, srcRasterYSize=numCalibratedScanLines)
        RawMetaDict = []
        metaDict = []

        centralWavelengths = [0.63, 0.86, np.NaN, 10.8, 12.0]
        if startsWith3A:
            centralWavelengths[2] = 1.6
            firstIRband = 4
        else:
            centralWavelengths[2] = 3.7
            firstIRband = 3

        for bandNo in range(1, 6):
            RawMetaDict.append({
                'src': {
                    'SourceFilename': filename,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'dataType': gdal.GDT_UInt16,
                    'ImageOffset': imageOffset + (bandNo - 1) * 2,
                    'PixelOffset': 10,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {
                    'dataType': gdal.GDT_UInt16
                }
            })

            if bandNo < firstIRband:
                wkv = 'albedo'
                minmax = '0 60'
            else:
                wkv = 'brightness_temperature'
                minmax = '290 210'

            metaDict.append({
                'src': {
                    'SourceFilename': (self.band_vrts['RawBandsVRT'].filename),
                    'SourceBand': bandNo,
                    'ScaleRatio': 0.01,
                    'ScaleOffset': 0,
                    'DataType': gdal.GDT_UInt16
                },
                'dst': {
                    'originalFilename': filename,
                    'dataType': gdal.GDT_Float32,
                    'wkv': wkv,
                    'colormap': 'gray',
                    'wavelength': centralWavelengths[bandNo - 1],
                    'minmax': minmax
                }
            })

        # Add temperature difference between ch3 and ch 4 as pixelfunction
        if not startsWith3A:  # Only if ch3 is IR (nighttime)
            metaDict.append({
                'src': [{
                    'SourceFilename': (self.band_vrts['RawBandsVRT'].filename),
                    'ScaleRatio':
                    0.01,
                    'ScaleOffset':
                    0,
                    'SourceBand':
                    4
                }, {
                    'SourceFilename': (self.band_vrts['RawBandsVRT'].filename),
                    'ScaleRatio':
                    0.01,
                    'ScaleOffset':
                    0,
                    'SourceBand':
                    3
                }],
                'dst': {
                    'PixelFunctionType': 'diff',
                    'originalFilename': filename,
                    'dataType': gdal.GDT_Float32,
                    'name': 'ch4-ch3',
                    'short_name': 'ch4-ch3',
                    'long_name': 'AVHRR ch4 - ch3 temperature difference',
                    'colormap': 'gray',
                    'units': 'kelvin',
                    'minmax': '-3 3'
                }
            })

        self.self.band_vrts['RawBandsVRT'].create_bands(RawMetaDict)
        self.create_bands(metaDict)

        globalMetadata = {}
        globalMetadata['satID'] = str(satID)
        globalMetadata['daytime'] = str(int(startsWith3A))
        self.dataset.SetMetadata(globalMetadata)

        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start', time.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end', time.isoformat())

        return
コード例 #5
0
 def test_from_dataset(self):
     ds = gdal.Open(self.test_file)
     g = Geolocation.from_dataset(ds)
     self.assertIsInstance(g, Geolocation)
コード例 #6
0
    def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30,
                 rmMetadatas=['NETCDF_VARNAME', '_Unsigned',
                              'ScaleRatio', 'ScaleOffset', 'dods_variable'],
                 **kwargs):
        # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_'
        # from keys in gdalDataset
        tmpGdalMetadata = {}
        geoMetadata = {}
        origin_is_nansat = False
        if not gdalMetadata:
            raise WrongMapperError
        for key in gdalMetadata.keys():
            newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '')
            if 'NANSAT_' in newKey:
                geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key]
                origin_is_nansat = True
            else:
                tmpGdalMetadata[newKey] = gdalMetadata[key]
        gdalMetadata = tmpGdalMetadata
        fileExt = os.path.splitext(inputFileName)[1]

        # Get file names from dataset or subdataset
        subDatasets = gdalDataset.GetSubDatasets()
        if len(subDatasets) == 0:
            filenames = [inputFileName]
        else:
            filenames = [f[0] for f in subDatasets]

        # add bands with metadata and corresponding values to the empty VRT
        metaDict = []
        xDatasetSource = ''
        yDatasetSource = ''
        firstXSize = 0
        firstYSize = 0
        for _, filename in enumerate(filenames):
            subDataset = gdal.Open(filename)
            # choose the first dataset whith grid
            if (firstXSize == 0 and firstYSize == 0 and
                    subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1):
                firstXSize = subDataset.RasterXSize
                firstYSize = subDataset.RasterYSize
                firstSubDataset = subDataset
                # get projection from the first subDataset
                projection = firstSubDataset.GetProjection()

            # take bands whose sizes are same as the first band.
            if (subDataset.RasterXSize == firstXSize and
                    subDataset.RasterYSize == firstYSize):
                if projection == '':
                    projection = subDataset.GetProjection()
                if ('GEOLOCATION_X_DATASET' in filename or
                        'longitude' in filename):
                    xDatasetSource = filename
                elif ('GEOLOCATION_Y_DATASET' in filename or
                        'latitude' in filename):
                    yDatasetSource = filename
                else:
                    for iBand in range(subDataset.RasterCount):
                        subBand = subDataset.GetRasterBand(iBand+1)
                        bandMetadata = subBand.GetMetadata_Dict()
                        if 'PixelFunctionType' in bandMetadata:
                            bandMetadata.pop('PixelFunctionType')
                        sourceBands = iBand + 1
                        # sourceBands = i*subDataset.RasterCount + iBand + 1

                        # generate src metadata
                        src = {'SourceFilename': filename,
                               'SourceBand': sourceBands}
                        # set scale ratio and scale offset
                        scaleRatio = bandMetadata.get(
                            'ScaleRatio',
                            bandMetadata.get(
                                'scale',
                                bandMetadata.get('scale_factor', '')))
                        if len(scaleRatio) > 0:
                            src['ScaleRatio'] = scaleRatio
                        scaleOffset = bandMetadata.get(
                            'ScaleOffset',
                            bandMetadata.get(
                                'offset',
                                bandMetadata.get(
                                    'add_offset', '')))
                        if len(scaleOffset) > 0:
                            src['ScaleOffset'] = scaleOffset
                        # sate DataType
                        src['DataType'] = subBand.DataType

                        # generate dst metadata
                        # get all metadata from input band
                        dst = bandMetadata
                        # set wkv and bandname
                        dst['wkv'] = bandMetadata.get('standard_name', '')
                        # first, try the name metadata
                        if 'name' in bandMetadata:
                            bandName = bandMetadata['name']
                        else:
                            # if it doesn't exist get name from NETCDF_VARNAME
                            bandName = bandMetadata.get('NETCDF_VARNAME', '')
                            if len(bandName) == 0:
                                bandName = bandMetadata.get(
                                            'dods_variable', ''
                                            )

                            # remove digits added by gdal in
                            # exporting to netcdf...
                            if (len(bandName) > 0 and origin_is_nansat and
                                    fileExt == '.nc'):
                                if bandName[-1:].isdigit():
                                    bandName = bandName[:-1]
                                if bandName[-1:].isdigit():
                                    bandName = bandName[:-1]

                        # if still no bandname, create one
                        if len(bandName) == 0:
                            bandName = 'band_%03d' % iBand

                        dst['name'] = bandName

                        # remove non-necessary metadata from dst
                        for rmMetadata in rmMetadatas:
                            if rmMetadata in dst:
                                dst.pop(rmMetadata)

                        # append band with src and dst dictionaries
                        metaDict.append({'src': src, 'dst': dst})

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(firstSubDataset, metadata=gdalMetadata)

        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)

        self._create_complex_bands(filenames)

        if len(projection) == 0:
            # projection was not set automatically
            # get projection from GCPProjection
            projection = geoMetadata.get('GCPProjection', '')
        if len(projection) == 0:
            # no projection was found in dataset or metadata:
            # generate WGS84 by default
            projection = NSR().wkt
        # fix problem with MET.NO files where a, b given in m and XC/YC in km
        if ('UNIT["kilometre"' in projection and
            ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in
                projection):
            projection = projection.replace(
                ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]',
                '')
        # set projection
        self.dataset.SetProjection(self.repare_projection(projection))

        # check if GCPs were added from input dataset
        gcps = firstSubDataset.GetGCPs()
        gcpProjection = firstSubDataset.GetGCPProjection()

        # if no GCPs in input dataset: try to add GCPs from metadata
        if not gcps:
            gcps = self.add_gcps_from_metadata(geoMetadata)
        # if yet no GCPs: try to add GCPs from variables
        if not gcps:
            gcps = self.add_gcps_from_variables(inputFileName)

        if gcps:
            if len(gcpProjection) == 0:
                # get GCP projection and repare
                gcpProjection = self.repare_projection(geoMetadata. get('GCPProjection', ''))
            # add GCPs to dataset
            self.dataset.SetGCPs(gcps, gcpProjection)
            self.dataset.SetProjection('')
            self._remove_geotransform()

        # Find proper bands and insert GEOLOCATION ARRAY into dataset
        if len(xDatasetSource) > 0 and len(yDatasetSource) > 0:
            self._add_geolocation(Geolocation.from_filenames(xDatasetSource, yDatasetSource))

        elif not gcps:
            # if no GCPs found and not GEOLOCATION ARRAY set:
            #   Set Nansat Geotransform if it is not set automatically
            geoTransform = self.dataset.GetGeoTransform()
            if len(geoTransform) == 0:
                geoTransformStr = geoMetadata.get('GeoTransform',
                                                  '(0|1|0|0|0|0|1)')
                geoTransform = eval(geoTransformStr.replace('|', ','))
                self.dataset.SetGeoTransform(geoTransform)

        subMetadata = firstSubDataset.GetMetadata()


        ### GET START TIME from METADATA
        time_coverage_start = None
        if 'start_time' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['start_time'])
        elif 'start_date' in gdalMetadata:
            time_coverage_start = parse_time(gdalMetadata['start_date'])
        elif 'time_coverage_start' in gdalMetadata:
            time_coverage_start = parse_time(
                                        gdalMetadata['time_coverage_start'])

        ### GET END TIME from METADATA
        time_coverage_end = None
        if 'stop_time' in gdalMetadata:
            time_coverage_end = parse_time(gdalMetadata['stop_time'])
        elif 'stop_date' in gdalMetadata:
            time_coverage_end = parse_time(gdalMetadata['stop_date'])
        elif 'time_coverage_stop' in gdalMetadata:
            time_coverage_end = parse_time(
                                        gdalMetadata['time_coverage_stop'])
        elif 'end_time' in gdalMetadata:
            time_coverage_end = parse_time(gdalMetadata['end_time'])
        elif 'end_date' in gdalMetadata:
            time_coverage_end = parse_time(gdalMetadata['end_date'])
        elif 'time_coverage_end' in gdalMetadata:
            time_coverage_end = parse_time(
                                        gdalMetadata['time_coverage_end'])

        ### GET start time from time variable
        if (time_coverage_start is None and 'time#standard_name' in subMetadata and
                 subMetadata['time#standard_name'] == 'time' and 'time#units' in subMetadata):
            # get data from netcdf data
            ncFile = Dataset(inputFileName, 'r')
            time_var = ncFile.variables['time']
            t0 = time_var[0]
            if len(time_var) == 1:
                t1 = t0 + 1
            else:
                t1 = time_var[-1]

            time_units_start = parse(time_var.units, fuzzy=True, ignoretz=True)
            time_units_to_seconds = {'second' : 1.0,
                                     'hour' : 60 * 60.0,
                                     'day' : 24 * 60 * 60.0}
            for key in time_units_to_seconds:
                if key in time_var.units:
                    factor = time_units_to_seconds[key]
                    break

            time_coverage_start = time_units_start + datetime.timedelta(seconds=t0 * factor)
            time_coverage_end = time_units_start + datetime.timedelta(seconds=t1 * factor)

        ## finally set values of time_coverage start and end if available
        if time_coverage_start is not None:
            self.dataset.SetMetadataItem('time_coverage_start',
                                    time_coverage_start.isoformat())
        if time_coverage_end is not None:
            self.dataset.SetMetadataItem('time_coverage_end',
                                    time_coverage_end.isoformat())

        if 'sensor' not in gdalMetadata:
            self.dataset.SetMetadataItem('sensor', 'unknown')
        if 'satellite' not in gdalMetadata:
            self.dataset.SetMetadataItem('satellite', 'unknown')
        if 'source_type' not in gdalMetadata:
            self.dataset.SetMetadataItem('source_type', 'unknown')
        if 'platform' not in gdalMetadata:
            self.dataset.SetMetadataItem('platform', 'unknown')
        if 'instrument' not in gdalMetadata:
            self.dataset.SetMetadataItem('instrument', 'unknown')

        self.logger.info('Use generic mapper - OK!')
コード例 #7
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):

        ########################################
        # Read metadata from binary file
        ########################################
        try:
            fp = open(filename, 'rb')
        except IOError:
            raise WrongMapperError
        fp.seek(72)

        try:
            satNum = int(struct.unpack('<H', fp.read(2))[0])
        except:
            raise WrongMapperError

        if satNum >= 11:
            isMetop = True
        else:
            isMetop = False

        if satNum in satIDs.keys():
            satID = satIDs[satNum]
        else:
            raise WrongMapperError

        fp.seek(76)
        dataFormatNum = int(struct.unpack('<H', fp.read(2))[0])
        if dataFormatNum in dataFormats.keys():
            dataFormat = dataFormats[dataFormatNum]
        else:
            raise WrongMapperError

        fp.seek(dataSetQualityIndicatorOffset + 14)
        numScanLines = int(struct.unpack('<H', fp.read(2))[0])
        numCalibratedScanLines = int(struct.unpack('<H', fp.read(2))[0])
        missingScanLines = int(struct.unpack('<H', fp.read(2))[0])
        if missingScanLines != 0:
            print('WARNING: Missing scanlines: ' + str(missingScanLines))

        ##################
        # Read time
        ##################
        fp.seek(84)
        year = int(struct.unpack('<H', fp.read(2))[0])
        dayofyear = int(struct.unpack('<H', fp.read(2))[0])
        millisecondsOfDay = int(struct.unpack('<l', fp.read(4))[0])
        time = (
            datetime.datetime(year, 1, 1) +
            datetime.timedelta(dayofyear - 1, milliseconds=millisecondsOfDay))

        ##################################
        # Read calibration information
        ##################################
        #IRcalibration = {}
        #fp.seek(202)
        #avh_h_irttcoef=[]
        #for i in range(24):
        #    avh_h_irttcoef.append(int(struct.unpack('<H', fp.read(2))[0]))
        ##print avh_h_irttcoef
        #avh_h_albcnv=[]
        #for i in range(6):
        #    avh_h_albcnv.append(int(struct.unpack('<l', fp.read(4))[0]))
        ##print avh_h_albcnv
        #fp.seek(280)
        #avh_h_radtempcnv = np.zeros((3,3))
        #for IRchannelNo in range(3):
        #    for coeffNo in range(3):
        #        avh_h_radtempcnv[IRchannelNo, coeffNo] = \
        #           int(struct.unpack('<l', fp.read(4))[0])
        #print avh_h_radtempcnv
        #IRcalibration['centralWavenumber'] = (avh_h_radtempcnv[:,0] /
        #                                      [1E2, 1E3, 1E3])
        #IRcalibration['c1'] = avh_h_radtempcnv[:,1] / 1E5
        #IRcalibration['c2'] = avh_h_radtempcnv[:,2] / 1E6

        ########################################################
        # Read visible calibration coefficients per scanline
        # - for channels 1, 2, 3A
        ########################################################
        #for scanline in range(1):
        #    avh_calvis=np.zeros((3,3,5))
        #    fp.seek(headerLength + recordLength*scanline + 48)
        #    for VISchannel in range(3):
        #        for sets in range(3):
        #            for coeff in range(5):
        #                avh_calvis[sets, VISchannel, coeff] = \
        #                    int(struct.unpack('<l', fp.read(4))[0])
        #    print avh_calvis
        #    print '----'

        ########################################################
        # Read IR calibration coefficients per scanline
        # - for channels 3B, 4, 5
        ########################################################
        #for scanline in range(1):
        #    avh_calir=np.zeros((2,3,3))
        #    fp.seek(headerLength + recordLength*scanline + 228)
        #    for IRchannelNo in range(3):
        #        for setNo in range(2):
        #            for coeffNo in range(3):
        #                avh_calir[setNo, IRchannelNo, coeffNo] = \
        #                    int(struct.unpack('<l', fp.read(4))[0])

        #avh_filler2 = np.zeros(3)
        #for fillerNo in range(3):
        #    avh_filler2[fillerNo] = int(struct.unpack('<l', fp.read(4))[0])

        #setNo = 0 # Use operational set (the only available)
        #a = np.zeros((3,3))
        #for IRchannelNo in range(3):
        #    for coeffNo in range(3):
        #        # NB: apparently stored "backwards", therefore 2-coeffNo
        #        a[IRchannelNo, 2-coeffNo] = (avh_calir[setNo, IRchannelNo,
        #                                               coeffNo]
        #                                     / np.power(10,
        #                                                avh_filler2[coeffNo]))
        #
        ###########################
        # Apply calibration
        ###########################
        #C = 410
        #for IRchannelNo in range(3):
        #    Ne = a[IRchannelNo,0] + a[IRchannelNo,1]*C + a[IRchannelNo,2]*C*C
        #    vC = IRcalibration['centralWavenumber'][IRchannelNo]
        #    c1 = -IRcalibration['c1'][IRchannelNo] # Note minus
        #    c2 = IRcalibration['c2'][IRchannelNo]
        #print '-----'
        #print a[IRchannelNo,:]
        #print vC, c1, c2
        #TeStar = c2*vC/np.log(1 + (c1*vC*vC*vC)/Ne)
        #Te = c1 + c2*TeStar
        #print Ne, TeStar, Te
        #print '-----'
        #sys.exit('stop')

        ###########################
        # Make Geolocation Arrays
        ###########################
        srcRasterYSize = numCalibratedScanLines

        # Making VRT with raw (unscaled) lon and lat
        # (smaller bands than full dataset)
        self.band_vrts = {
            'RawGeolocVRT': VRT(srcRasterXSize=51,
                                srcRasterYSize=srcRasterYSize)
        }
        RawGeolocMetaDict = []
        for lonlatNo in range(1, 3):
            RawGeolocMetaDict.append({
                'src': {
                    'SourceFilename': filename,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'DataType': gdal.GDT_Int32,
                    'ImageOffset': (headerLength + 640 + (lonlatNo - 1) * 4),
                    'PixelOffset': 8,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {}
            })

        self.band_vrts['RawGeolocVRT'].create_bands(RawGeolocMetaDict)

        # Make derived GeolocVRT with scaled lon and lat
        self.band_vrts['GeolocVRT'] = VRT(srcRasterXSize=51,
                                          srcRasterYSize=srcRasterYSize)
        GeolocMetaDict = []
        for lonlatNo in range(1, 3):
            GeolocMetaDict.append({
                'src': {
                    'SourceFilename':
                    (self.band_vrts['RawGeolocVRT'].filename),
                    'SourceBand': lonlatNo,
                    'ScaleRatio': 0.0001,
                    'ScaleOffset': 0,
                    'DataType': gdal.GDT_Int32
                },
                'dst': {}
            })

        self.band_vrts['GeolocVRT'].create_bands(GeolocMetaDict)

        GeolocObject = Geolocation(
            x_vRT=self.band_vrts['GeolocVRT'],
            y_vRT=self.band_vrts['GeolocVRT'],
            x_band=2,
            y_band=1,  # x = lon, y = lat
            line_offset=0,
            pixel_offset=25,
            line_step=1,
            pixel_step=40)

        #######################
        # Initialize dataset
        #######################
        # create empty VRT dataset with geolocation only
        # (from Geolocation Array)
        self._init_from_dataset_params(
            2048, numCalibratedScanLines,
            (0, 1, 0, numCalibratedScanLines, 0, -1), GeolocObject.d['SRS'])
        self._add_geolocation(GeolocObject)

        ##################
        # Create bands
        ##################
        metaDict = []
        ch = ({}, {}, {}, {}, {}, {})

        ch[1]['wavelength'] = 0.63
        ch[2]['wavelength'] = 0.86
        ch[3]['wavelength'] = '1.6 or 3.7 mum'
        ch[4]['wavelength'] = 10.8
        ch[5]['wavelength'] = 12.0

        ch[1]['minmax'] = '0 700'
        ch[2]['minmax'] = '0 700'
        ch[3]['minmax'] = '0 800'
        ch[4]['minmax'] = '400 1000'
        ch[5]['minmax'] = '400 1000'

        for bandNo in range(1, 6):
            metaDict.append({
                'src': {
                    'SourceFilename': filename,
                    'SourceBand': 0,
                    'SourceType': "RawRasterBand",
                    'dataType': gdal.GDT_UInt16,
                    'ImageOffset': imageOffset + (bandNo - 1) * 2,
                    'PixelOffset': 10,
                    'LineOffset': recordLength,
                    'ByteOrder': 'LSB'
                },
                'dst': {
                    'dataType': gdal.GDT_UInt16,
                    'wkv': 'raw_counts',
                    'colormap': 'gray',
                    'wavelength': ch[bandNo]['wavelength'],
                    'minmax': ch[bandNo]['minmax'],
                    'unit': "1"
                }
            })

        self.create_bands(metaDict)

        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start', time.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end', time.isoformat())

        return
コード例 #8
0
 def test_from_dataset(self):
     ds = gdal.Open(self.test_file)
     g = Geolocation.from_dataset(ds)
     self.assertIsInstance(g, Geolocation)