Example #1
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        '''
        Mapping for the global 30 arc-second elevation (see
        https://lta.cr.usgs.gov/GTOPO30).

        Parameters:
        -----------
        fileName : string
            Either the name of a gtopo30 DEM file, or <path>/gtopo30.vrt. The
            latter is an aggregation of the DEM-files available with gtopo30
            except the Antarctic one, which is in polarstereographic
            projection. You can create your own gtopo30.vrt file with gdal:
            > gdalbuildvrt gtopo30.vrt [E,W]*.DEM
        '''

        bn = os.path.basename(fileName)
        if not bn=='gtopo30.vrt' and not os.path.splitext(bn)[1]=='.DEM':
            raise WrongMapperError

        metaDict = [{'src': {'SourceFilename': fileName, 'SourceBand':  1},
                     'dst': {'wkv': 'height_above_reference_ellipsoid'}}]

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)
Example #2
0
    def test_copy_vrt_pixel_func(self):
        vrt1 = VRT()
        vrt1_xml = '''
        <VRTDataset rasterXSize="200" rasterYSize="200">
            <VRTRasterBand dataType="Byte" band="1">
                <ComplexSource>
                    <SourceFilename relativeToVRT="0">%s</SourceFilename>
                    <SourceBand>1</SourceBand>
                    <SourceProperties RasterXSize="200" RasterYSize="200" DataType="Byte" BlockXSize="200" BlockYSize="13" />
                    <SrcRect xOff="0" yOff="0" xSize="200" ySize="200" />
                    <DstRect xOff="0" yOff="0" xSize="200" ySize="200" />
                </ComplexSource>
            </VRTRasterBand>
            <VRTRasterBand dataType="Float32" band="2" subClass="VRTDerivedRasterBand">
                <ComplexSource>
                    <SourceFilename relativeToVRT="0">%s</SourceFilename>
                    <SourceBand>1</SourceBand>
                    <SourceProperties RasterXSize="200" RasterYSize="200" DataType="Byte" BlockXSize="128" BlockYSize="128" />
                    <SrcRect xOff="0" yOff="0" xSize="200" ySize="200" />
                    <DstRect xOff="0" yOff="0" xSize="200" ySize="200" />
                </ComplexSource>
                <PixelFunctionType>sqrt</PixelFunctionType>
            </VRTRasterBand>
        </VRTDataset>
       ''' % (self.test_file_gcps, vrt1.filename)
        vrt1.write_xml(vrt1_xml)
        vrt2 = vrt1.copy()

        self.assertFalse(os.path.basename(vrt1.filename) in vrt2.xml)
Example #3
0
 def test_create_band(self):
     array = gdal.Open(self.test_file_gcps).ReadAsArray()[1, 10:, :]
     vrt1 = VRT.from_array(array)
     vrt2 = VRT(x_size=array.shape[1], y_size=array.shape[0])
     self.assertEqual(vrt2.dataset.RasterCount, 0)
     vrt2.create_band({'SourceFilename': vrt1.filename})
     self.assertEqual(vrt2.dataset.RasterCount, 1)
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create VRT '''

        ##############
        # Get time
        ##############
        if fileName[0:len(keywordBase)] != keywordBase:
            raise AttributeError("Wrong mapper")

        timestr = fileName[len(keywordBase)+1::]
        time = datetime.strptime(timestr, '%Y%m%d%H%M')

        ######################################################
        # Find windFileName corresponding to a Nansat-readable
        # file in your local (or remote) file archive
        ######################################################
        windFileName = localFolder + <.......>

        ######################################################
        # Open file with any other Nansat mapper
        ######################################################
        w = Nansat(windFileName)
        VRT.__init__(self, vrtDataset=w.vrt.dataset)

        return
Example #5
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create NCEP VRT '''

        if not gdalDataset:
            raise WrongMapperError

        geotransform = gdalDataset.GetGeoTransform()
        if (geotransform != (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
                gdalDataset.RasterCount != 2):  # Not water proof
            raise WrongMapperError

        metaDict = [{'src': {'SourceFilename': fileName,
                             'SourceBand': 1},
                     'dst': {'wkv': 'eastward_wind',
                             'height': '10 m'}},
                    {'src': {'SourceFilename': fileName,
                             'SourceBand': 2},
                     'dst': {'wkv': 'northward_wind',
                             'height': '10 m'}},
                    {'src': [{'SourceFilename': fileName,
                              'SourceBand': 1,
                              'DataType': gdalDataset.GetRasterBand(1).DataType
                              },
                             {'SourceFilename': fileName,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              }],
                     'dst': {'wkv': 'wind_speed',
                             'PixelFunctionType': 'UVToMagnitude',
                             'name': 'windspeed',
                             'height': '2 m'
                             }},
                    {'src': [{'SourceFilename': fileName,
                              'SourceBand': 1,
                              'DataType': gdalDataset.GetRasterBand(1).DataType
                              },
                             {'SourceFilename': fileName,
                              'SourceBand': 2,
                              'DataType': gdalDataset.GetRasterBand(2).DataType
                              }],
                     'dst': {'wkv': 'wind_from_direction',
                             'PixelFunctionType': 'UVToDirectionFrom',
                             'name': 'winddirection',
                             'height': '2 m'
                             }
                     }]

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # Adding valid time from the GRIB file to dataset
        validTime = gdalDataset.GetRasterBand(1).\
            GetMetadata()['GRIB_VALID_TIME']
        self._set_time(datetime.datetime.
                       utcfromtimestamp(int(validTime.strip().split(' ')[0])))

        return
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):

        title_correct = False
        if not gdalMetadata:
            raise WrongMapperError
        for key, val in gdalMetadata.iteritems():
            if "title" in key:
                if not val == "Daily AMSR-E Arctic lead area fraction [in percent]":
                    raise WrongMapperError
                else:
                    title_correct = True

        if not title_correct:
            raise WrongMapperError

        # initiate VRT for the NSIDC 10 km grid
        VRT.__init__(
            self,
            srcGeoTransform=(-3850000, 6250, 0.0, 5850000, 0.0, -6250),
            srcProjection=NSR(3411).wkt,
            srcRasterXSize=1216,
            srcRasterYSize=1792,
        )

        src = {"SourceFilename": 'NETCDF:"%s":lf' % fileName, "SourceBand": 1}
        dst = {"name": "leadFraction", "long_name": "AMSRE sea ice lead fraction"}

        self._create_band(src, dst)
        self.dataset.FlushCache()
Example #7
0
 def test_create_band_name_wkv(self):
     short_name='sigma0'
     wkv = dict(short_name=short_name)
     self.mock_pti['get_wkv_variable'].return_value=wkv
     vrt = VRT()
     self.assertEqual(vrt._create_band_name({'wkv': short_name}), (short_name, wkv))
     self.assertEqual(vrt._create_band_name({'wkv': short_name, 'suffix': 'HH'}),
                      (short_name + '_HH', wkv))
Example #8
0
 def test_make_filename(self):
     filename1 = VRT._make_filename()
     filename2 = VRT._make_filename(extention='smth')
     filename3 = VRT._make_filename(nomem=True)
     self.assertTrue(filename1.startswith('/vsimem/'))
     self.assertTrue(filename2.startswith('/vsimem/'))
     self.assertTrue(filename2.endswith('.smth'))
     self.assertTrue(os.path.exists(filename3))
Example #9
0
    def test_get_projection_raises_NansatProjectionError(self, dataset):
        dataset.GetProjection.return_value = ''
        dataset.GetGCPProjection.return_value = ''
        dataset.GetMetadata.return_value = {}

        vrt = VRT()
        with self.assertRaises(NansatProjectionError):
            proj = vrt.get_projection()
Example #10
0
    def test_get_projection_dataset(self, dataset):
        proj = 'SOME_PROJECTION'
        dataset.GetProjection.return_value = proj
        dataset.GetGCPProjection.return_value = ''
        dataset.GetMetadata.return_value = {}

        vrt = VRT()
        proj_src = vrt.get_projection()
        self.assertEqual(proj_src, (proj, 'dataset'))
Example #11
0
    def test_get_projection_geolocation(self, dataset):
        proj = 'SOME_PROJECTION'
        dataset.GetProjection.return_value = ''
        dataset.GetGCPProjection.return_value = ''
        dataset.GetMetadata.return_value = {'SRS': proj}

        vrt = VRT()
        proj_src = vrt.get_projection()
        self.assertEqual(proj_src, (proj, 'geolocation'))
Example #12
0
    def set_gcps(self, lon, lat, gdal_dataset):
        """ Set gcps """
        self.band_vrts['new_lon_VRT'] = VRT.from_array(lon)
        self.dataset.SetGCPs(VRT._lonlat2gcps(lon, lat, n_gcps=400), NSR().wkt)

        # Add geolocation from correct longitudes and latitudes
        self._add_geolocation(
                Geolocation(self.band_vrts['new_lon_VRT'], self, x_band=1, y_band=self._latitude_band_number(gdal_dataset))
            )
Example #13
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create LANDSAT VRT '''
        # try to open .tar or .tar.gz or .tgz file with tar
        try:
            tarFile = tarfile.open(fileName)
        except:
            raise WrongMapperError

        tarNames = tarFile.getnames()
        #print tarNames
        metaDict = []
        for tarName in tarNames:
            if ((tarName[0] == 'L' or tarName[0] == 'M') and
               (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')):
                #print tarName
                bandNo = tarName[-6:-4]
                metaDict.append({
                    'src': {'SourceFilename': '/vsitar/%s/%s' % (fileName,
                                                                 tarName),
                            'SourceBand':  1},
                    'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                            'suffix': bandNo}})

        if not metaDict:
            raise WrongMapperError

        #print metaDict
        sizeDiffBands = []
        for iFile in range(len(metaDict)):
            tmpName = metaDict[iFile]['src']['SourceFilename']
            gdalDatasetTmp = gdal.Open(tmpName)
            if iFile == 0:
                gdalDatasetTmp0 = gdalDatasetTmp
                xSize = gdalDatasetTmp.RasterXSize
                ySize = gdalDatasetTmp.RasterYSize
            elif (xSize != gdalDatasetTmp.RasterXSize or
                    ySize != gdalDatasetTmp.RasterYSize):
                sizeDiffBands.append(iFile)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDatasetTmp0)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # 8th band of LANDSAT8 is a double size band.
        # Reduce the size to same as the 1st band.
        if len(sizeDiffBands) != 0:
            vrtXML = self.read_xml()
            node0 = Node.create(vrtXML)
            for iBand in sizeDiffBands:
                iBandNode = node0.nodeList('VRTRasterBand')[iBand]
                iNodeDstRect = iBandNode.node('DstRect')
                iNodeDstRect.replaceAttribute('xSize', str(xSize))
                iNodeDstRect.replaceAttribute('ySize', str(ySize))

            self.write_xml(node0.rawxml())
Example #14
0
    def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30,
                 **kwargs):
        # check if mapper fits
        if not gdalMetadata:
            raise WrongMapperError
        if not os.path.splitext(inputFileName)[1] == '.mnt':
            raise WrongMapperError
        try:
            mbNorthLatitude = float(gdalMetadata['NC_GLOBAL#mbNorthLatitude'])
            mbSouthLatitude = float(gdalMetadata['NC_GLOBAL#mbSouthLatitude'])
            mbEastLongitude = float(gdalMetadata['NC_GLOBAL#mbEastLongitude'])
            mbWestLongitude = float(gdalMetadata['NC_GLOBAL#mbWestLongitude'])
            mbProj4String = gdalMetadata['NC_GLOBAL#mbProj4String']
            Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines'])
            Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns'])
            Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size'])
            Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size'])
        except:
            raise WrongMapperError

        # find subdataset with DEPTH
        subDatasets = gdalDataset.GetSubDatasets()
        dSourceFile = None
        for subDataset in subDatasets:
            if subDataset[0].endswith('.mnt":DEPTH'):
                dSourceFile = subDataset[0]
        if dSourceFile is None:
            raise WrongMapperError
        dSubDataset = gdal.Open(dSourceFile)
        dMetadata = dSubDataset.GetMetadata()

        try:
            scale_factor = dMetadata['DEPTH#scale_factor']
            add_offset = dMetadata['DEPTH#add_offset']
        except:
            raise WrongMapperError

        geoTransform = [mbWestLongitude, Element_x_size, 0,
                        mbNorthLatitude, 0, -Element_y_size]

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, srcGeoTransform=geoTransform,
                           srcMetadata=gdalMetadata,
                           srcProjection=NSR(mbProj4String).wkt,
                           srcRasterXSize=Number_columns,
                           srcRasterYSize=Number_lines)

        metaDict = [{'src': {'SourceFilename': dSourceFile,
                             'SourceBand': 1,
                             'ScaleRatio' : scale_factor,
                             'ScaleOffset' : add_offset},
                     'dst': {'wkv': 'depth'}}]

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)
Example #15
0
 def test_add_swath_mask_band(self, create_band):
     vrt = VRT()
     vrt.filename = '/temp/filename.vrt'
     vrt._add_swath_mask_band()
     src = [{'SourceFilename': '/temp/filename.vrt',
             'SourceBand':  1,
             'DataType': 1}]
     dst ={'dataType': 1,
             'wkv': 'swath_binary_mask',
             'PixelFunctionType': 'OnesPixelFunc'}
     create_band.assert_called_once_with(src=src, dst=dst)
Example #16
0
    def test_find_complex_band(self):
        a = np.random.randn(100,100)
        vrt1 = VRT.from_array(a)
        vrt2 = VRT.from_array(a.astype(np.complex64))

        vrt3 = VRT.from_gdal_dataset(vrt1.dataset)
        vrt3.create_bands([{'src': {'SourceFilename': vrt1.filename}},
                           {'src': {'SourceFilename': vrt2.filename}}])

        self.assertEqual(vrt1._find_complex_band(), None)
        self.assertEqual(vrt2._find_complex_band(), 1)
        self.assertEqual(vrt3._find_complex_band(), 2)
Example #17
0
 def test_from_filenames(self):
     lon, lat = np.meshgrid(np.linspace(0,5,10), np.linspace(10,20,30))
     x_vrt = VRT.from_array(lon)
     y_vrt = VRT.from_array(lat)
     g = Geolocation.from_filenames(x_vrt.filename, y_vrt.filename)
     self.assertIsInstance(g, Geolocation)
     self.assertEqual(g.data['X_DATASET'], x_vrt.filename)
     self.assertEqual(g.data['Y_DATASET'], y_vrt.filename)
     self.assertEqual(g.data['LINE_OFFSET'], '0')
     self.assertEqual(g.data['LINE_STEP'], '1')
     self.assertEqual(g.data['PIXEL_OFFSET'], '0')
     self.assertEqual(g.data['PIXEL_STEP'], '1')
Example #18
0
    def test_copy_empty_vrt(self):
        vrt1 = VRT()
        vrt2 = vrt1.copy()

        self.assertIsInstance(vrt2, VRT)
        self.assertIsInstance(vrt2.filename, str)
        self.assertEqual(vrt2.dataset.RasterXSize, vrt1.dataset.RasterXSize)
        self.assertEqual(vrt2.dataset.RasterYSize, vrt1.dataset.RasterYSize)
        self.assertEqual(vrt2.dataset.GetProjection(), vrt1.dataset.GetProjection())
        self.assertEqual(vrt2.dataset.GetGeoTransform(), vrt1.dataset.GetGeoTransform())
        self.assertEqual(vrt2.dataset.GetGCPProjection(), vrt1.dataset.GetGCPProjection())
        self.assertIn('filename', list(vrt2.dataset.GetMetadata().keys()))
Example #19
0
    def test_init_from_gdal_dataset(self, _add_geolocation):
        vrt = VRT()
        ds = gdal.Open(self.test_file_gcps)
        vrt._init_from_gdal_dataset(ds)

        self.assertEqual(vrt.dataset.RasterXSize, ds.RasterXSize)
        self.assertEqual(vrt.dataset.RasterYSize, ds.RasterYSize)
        self.assertEqual(vrt.dataset.GetProjection(), ds.GetProjection())
        self.assertEqual(vrt.dataset.GetGeoTransform(), ds.GetGeoTransform())
        self.assertEqual(vrt.dataset.GetGCPProjection(), ds.GetGCPProjection())
        self.assertIn('filename', list(vrt.dataset.GetMetadata().keys()))
        self.assertTrue(_add_geolocation.called_once())
Example #20
0
    def __init__(self, srs=None, ext=None, ds=None, **kwargs):
        """Create Domain from GDALDataset or string options or lat/lon grids"""
        # If too much information is given raise error
        if ds is not None and srs is not None and ext is not None:
            raise ValueError('Ambiguous specification of both dataset, srs- and ext-strings.')

        # choose between input opitons:
        # ds
        # ds and srs
        # srs and ext

        # if only a dataset is given:
        #     copy geo-reference from the dataset
        if ds is not None and srs is None:
            self.vrt = VRT.from_gdal_dataset(ds)

        # If dataset and srs are given (but not ext):
        #   use AutoCreateWarpedVRT to determine bounds and resolution
        elif ds is not None and srs is not None:
            srs = NSR(srs)
            tmp_vrt = gdal.AutoCreateWarpedVRT(ds, None, srs.wkt)
            if tmp_vrt is None:
                raise NansatProjectionError('Could not warp the given dataset to the given SRS.')
            else:
                self.vrt = VRT.from_gdal_dataset(tmp_vrt)

        # If SpatialRef and extent string are given (but not dataset)
        elif srs is not None and ext is not None:
            srs = NSR(srs)
            # create full dictionary of parameters
            extent_dict = Domain._create_extent_dict(ext)

            # convert -lle to -te
            if 'lle' in extent_dict.keys():
                extent_dict = self._convert_extentDic(srs, extent_dict)

            # get size/extent from the created extent dictionary
            geo_transform, raster_x_size, raster_y_size = self._get_geotransform(extent_dict)
            # create VRT object with given geo-reference parameters
            self.vrt = VRT.from_dataset_params(x_size=raster_x_size, y_size=raster_y_size,
                                               geo_transform=geo_transform,
                                               projection=srs.wkt,
                                               gcps=[], gcp_projection='')
        elif 'lat' in kwargs and 'lon' in kwargs:
            warnings.warn('Domain(lon=lon, lat=lat) will be deprectaed!'
                          'Use Domain.from_lonlat()', NansatFutureWarning)
            # create self.vrt from given lat/lon
            self.vrt = VRT.from_lonlat(kwargs['lon'], kwargs['lat'])
        else:
            raise ValueError('"dataset" or "srsString and extentString" '
                              'or "dataset and srsString" are required')
Example #21
0
 def test_fix_global_metadata(self):
     ds = gdal.Open(self.test_file_gcps)
     vrt = VRT.copy_dataset(ds)
     vrt.dataset.SetMetadataItem(str('test'), str('"test"'))
     vrt.fix_global_metadata(['AREA_OR_POINT'])
     self.assertNotIn('AREA_OR_POINT', vrt.dataset.GetMetadata())
     self.assertEqual('&quot;test&quot;', vrt.dataset.GetMetadataItem(str('test')))
Example #22
0
    def from_lonlat(cls, lon, lat, add_gcps=True):
        """Create Domain object from input longitudes, latitudes arrays

        Parameters
        ----------
        lon : numpy.ndarray
            longitudes
        lat : numpy.ndarray
            latitudes
        add_gcps : bool
            Add GCPs from lon/lat arrays.

        Returns
        -------
            d : Domain

        Examples
        --------
            >>> lon, lat = np.meshgrid(range(10), range(10))
            >>> d1 = Domain.from_lonlat(lon, lat)
            >>> d2 = Domain.from_lonlat(lon, lat, add_gcps=False) # add only geolocation arrays

        """
        d = cls.__new__(cls)
        d.vrt = VRT.from_lonlat(lon, lat, add_gcps)
        return d
Example #23
0
 def test_set_gcps_geolocation_geotransform_with_geolocation(self):
     lon, lat = np.meshgrid(np.linspace(0, 5, 10), np.linspace(10, 20, 30))
     vrt = VRT.from_lonlat(lon, lat)
     vrt.create_band({str('SourceFilename'): vrt.geolocation.x_vrt.filename})
     vrt._set_gcps_geolocation_geotransform()
     self.assertFalse('<GeoTransform>' in vrt.xml)
     self.assertEqual(vrt.dataset.GetGCPs(), ())
Example #24
0
 def test_get_super_vrt_geolocation(self):
     lon, lat = np.meshgrid(np.linspace(0, 5, 10), np.linspace(10, 20, 30))
     vrt1 = VRT.from_lonlat(lon, lat)
     vrt2 = vrt1.get_super_vrt()
     vrt1 = None
     self.assertTrue(vrt2.geolocation.x_vrt is not None)
     self.assertTrue(vrt2.geolocation.y_vrt is not None)
Example #25
0
    def test_set_fake_gcps_empty(self):
        ds = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic)
        vrt = VRT.copy_dataset(ds)

        dst_wkt = vrt._set_fake_gcps(self.nsr_wkt, [], 1)
        self.assertEqual(dst_wkt, self.nsr_wkt)
        self.assertEqual(len(vrt.dataset.GetGCPs()), 0)
Example #26
0
    def __init__(self, *args, **kwargs):

        filename = args[0]
        gdal_metadata = VRT._remove_strings_in_metadata_keys(args[2],
                                                            ['NC_GLOBAL#', 'NANSAT_', 'GDAL_'])

        gcmd_keywords_mapping = get_gcmd_keywords_mapping()
        for key, val in list(gcmd_keywords_mapping.items()):
            if 'source' in list(gdal_metadata.keys()) and key in gdal_metadata['source']:
                instrument = gcmd_keywords_mapping[key]['instrument']
                platform = gcmd_keywords_mapping[key]['platform']

        if not 'instrument' in locals():
            raise WrongMapperError

        super(Mapper, self).__init__(*args, **kwargs)

        time_coverage_start, time_coverage_end = self.time_coverage()

        self.dataset.SetMetadataItem('time_coverage_start',
                (time_coverage_start.isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                (time_coverage_end.isoformat()))
        self.dataset.SetMetadataItem('instrument', instrument)
        self.dataset.SetMetadataItem('platform', platform)
Example #27
0
 def test_leave_few_bands(self):
     ds = gdal.Open(self.test_file_gcps)
     vrt = VRT.copy_dataset(ds)
     vrt.leave_few_bands([1, 'L_469'])
     self.assertEqual(vrt.dataset.RasterCount,2)
     self.assertEqual(vrt.dataset.GetRasterBand(1).GetMetadataItem(str('name')), 'L_645')
     self.assertEqual(vrt.dataset.GetRasterBand(2).GetMetadataItem(str('name')), 'L_469')
    def vrts_from_arrays(self, data, variable_names, pol='', resize=True, resample_alg=2):
        """ Convert input dict with arrays into dict with VRTs

        Parameters
        ----------
        data : dict
            2D arrays with data from LUT
        variable_names : list of str
            variable names that should be converted to VRTs
        pol : str
            HH, HV, etc
        resize : bool
            Shall VRT be zoomed to full size?
        resample_alg : int
            Index of resampling algorithm. See VRT.get_resized_vrt()

        Returns
        -------
        vrts : dict with (resized) VRTs

        """
        vrts = {}
        for var_name in variable_names:
            vrts[var_name+pol] = VRT.from_array(data[var_name+pol])
            if resize:
                vrts[var_name+pol] = vrts[var_name+pol].get_resized_vrt(self.dataset.RasterXSize,
                                                                        self.dataset.RasterYSize,
                                                                        resample_alg)
        return vrts
Example #29
0
 def test_set_gcps_geolocation_geotransform_with_geotransform(self):
     ds = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic)
     vrt = VRT.copy_dataset(ds)
     vrt._set_gcps_geolocation_geotransform()
     self.assertEqual(vrt.dataset.GetGeoTransform(),
                      (-1000000.0, 25000.0, 0.0, 5000000.0, 0.0, -25000.0))
     self.assertEqual(vrt.dataset.GetMetadata(str('GEOLOCATION')), {})
     self.assertEqual(vrt.dataset.GetGCPs(), ())
Example #30
0
 def test_fix_band_metadata(self):
     ds = gdal.Open(self.test_file_gcps)
     vrt = VRT.copy_dataset(ds)
     self.assertIn('standard_name', vrt.dataset.GetRasterBand(1).GetMetadata())
     self.assertIn('time', vrt.dataset.GetRasterBand(1).GetMetadata())
     vrt.fix_band_metadata(['standard_name', 'time'])
     self.assertNotIn('standard_name', vrt.dataset.GetRasterBand(1).GetMetadata())
     self.assertNotIn('time', vrt.dataset.GetRasterBand(1).GetMetadata())
Example #31
0
    def __init__(self, filename, gdal_dataset, gdal_metadata, *args, **kwargs):

        if not filename.endswith('nc'):
            raise WrongMapperError

        self.input_filename = filename

        if not gdal_metadata:
            raise WrongMapperError

        if 'NC_GLOBAL#GDAL_NANSAT_GCPY_000' in list(gdal_metadata.keys()) or \
                'NC_GLOBAL#GDAL_NANSAT_GCPProjection' in list(gdal_metadata.keys()):
            # Probably Nansat generated netcdf of swath data - see issue #192
            raise WrongMapperError

        metadata = VRT._remove_strings_in_metadata_keys(
            gdal_metadata, ['NC_GLOBAL#', 'NANSAT_', 'GDAL_'])

        # Set origin metadata (TODO: agree on keyword...)
        origin = ''
        nans = 'NANSAT'
        if 'origin' in list(metadata.keys()):
            origin = metadata['origin'] + ' '
        for key in list(metadata.keys()):
            if nans in key:
                metadata['origin'] = origin + nans
            # else: Nothing needs to be done, origin stays the same...

        # Check conventions metadata
        if 'Conventions' not in list(
                metadata.keys()) or 'CF' not in metadata['Conventions']:
            raise WrongMapperError

        # OBS: at this point, generic mapper fails...
        #if metadata.has_key('GCPProjection'):
        #    # Probably Nansat generated netcdf of swath data - see issue #192
        #    raise WrongMapperError

        # Create empty VRT dataset with geo-reference
        self._create_empty(gdal_dataset, metadata)

        # Add bands with metadata and corresponding values to the empty VRT
        self.create_bands(
            self._band_list(gdal_dataset, metadata, *args, **kwargs))

        # Check size?
        #xsize, ysize = self.ds_size(sub0)

        # Create complex bands from *_real and *_imag bands (the function is in
        # vrt.py)
        self._create_complex_bands(self._get_sub_filenames(gdal_dataset))

        # Set GCMD/DIF compatible metadata if available
        self._set_time_coverage_metadata(metadata)
Example #32
0
 def test_transform_coordinates_2d_array(self):
     src_srs = NSR()
     dst_srs = NSR(str('+proj=stere'))
     src_points = (np.array([[1,2,3,4],[1,2,3,4]]),
                   np.array([[5,6,7,8],[5,6,7,8]]),
                   np.array([[5,6,7,8],[5,6,7,8]]),)
     dst_x, dst_y, dst_z = VRT.transform_coordinates(src_srs, src_points, dst_srs)
     # check if shape of the result matches the expected shape (2x4 array)
     self.assertEqual(dst_x.shape, (2,4))
     self.assertEqual(dst_y.shape, (2,4))
     self.assertEqual(dst_z.shape, (2,4))
Example #33
0
 def test_set_add_band_options(self):
     # case 1
     srcs = [{'SourceFilename': 'filename', 'SourceBand': 1}]
     dst = []
     options = VRT._set_add_band_options(srcs, dst)
     self.assertEqual(options, [])
     # case 2
     srcs = [{'SourceFilename': 'filename',
              'SourceBand': 0,
              'ImageOffset': 0,
              'PixelOffset': 0,
              'LineOffset': 0,
              'ByteOrder': 'i'}]
     options = VRT._set_add_band_options(srcs, dst)
     self.assertIn('subclass=VRTRawRasterBand', options)
     self.assertIn('SourceFilename=filename', options)
     self.assertIn('ImageOffset=0', options)
     self.assertIn('PixelOffset=0', options)
     self.assertIn('LineOffset=0', options)
     self.assertIn('ByteOrder=i', options)
Example #34
0
    def test_from_dataset_params(self):
        ds = gdal.Open(self.test_file_gcps)
        vrt = VRT.from_dataset_params(ds.RasterXSize, ds.RasterYSize,
                                      ds.GetGeoTransform(), ds.GetProjection(),
                                      ds.GetGCPs(), ds.GetGCPProjection())

        self.assertEqual(vrt.dataset.RasterXSize, ds.RasterXSize)
        self.assertEqual(vrt.dataset.RasterYSize, ds.RasterYSize)
        self.assertEqual(vrt.dataset.GetProjection(), ds.GetProjection())
        self.assertEqual(vrt.dataset.GetGeoTransform(), ds.GetGeoTransform())
        self.assertEqual(vrt.dataset.GetGCPProjection(), ds.GetGCPProjection())
        self.assertIn('filename', list(vrt.dataset.GetMetadata().keys()))
Example #35
0
    def test_set_fake_gcps(self):
        ds = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic)
        gcps = gdal.Open(self.test_file_gcps).GetGCPs()
        vrt = VRT.copy_dataset(ds)

        dst_wkt = vrt._set_fake_gcps(self.nsr_wkt, gcps, 1)
        self.assertEqual(dst_wkt, None)
        self.assertEqual(len(vrt.dataset.GetGCPs()), len(gcps))
        self.assertEqual([gcp.GCPPixel for gcp in gcps],
                         [gcp.GCPX for gcp in vrt.dataset.GetGCPs()])
        self.assertEqual([gcp.GCPLine for gcp in gcps],
                         [gcp.GCPY for gcp in vrt.dataset.GetGCPs()])
Example #36
0
 def test_transform_coordinates_1d_array(self):
     src_srs = NSR()
     dst_srs = NSR(str('+proj=stere'))
     src_points = (np.array([1, 2, 3,
                             4]), np.array([5, 6, 7,
                                            8]), np.array([5, 6, 7, 8]))
     dst_x, dst_y, dst_z = VRT.transform_coordinates(
         src_srs, src_points, dst_srs)
     # check if shape of the result matches the expected shape (list with four points)
     self.assertEqual(dst_x.shape, (4, ))
     self.assertEqual(dst_y.shape, (4, ))
     self.assertEqual(dst_z.shape, (4, ))
Example #37
0
 def test_get_dst_band_data_type(self):
     self.assertEqual(VRT._get_dst_band_data_type([], {'dataType': 'Float32'}), 'Float32')
     self.assertEqual(VRT._get_dst_band_data_type([1, 2, 3], {}), gdal.GDT_Float32)
     self.assertEqual(VRT._get_dst_band_data_type([{'ScaleRatio': 2}], {}), gdal.GDT_Float32)
     self.assertEqual(VRT._get_dst_band_data_type([{'LUT': [1, 2, 3]}], {}), gdal.GDT_Float32)
     self.assertEqual(VRT._get_dst_band_data_type([{}], {}), gdal.GDT_Float32)
     self.assertEqual(VRT._get_dst_band_data_type([{'DataType': 'Float32'}], {}), 'Float32')
Example #38
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create VRT '''

        fileBaseName = os.path.basename(fileName)
        if not fileBaseName == 'MOD44W.vrt':
            raise WrongMapperError

        metaDict = [{
            'src': {
                'SourceFilename': fileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'land_binary_mask'
            }
        }]

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)
Example #39
0
    def init_from_manifest_only(self, manifestXML, annotXML):
        ''' Create fake VRT and add metadata only from the manifest.safe '''
        X, Y, lon, lat, inc, ele, numberOfSamples, numberOfLines = self.read_geolocation_lut(annotXML)

        VRT.__init__(self, srcRasterXSize=numberOfSamples, srcRasterYSize=numberOfLines)
        doc = ET.fromstring(manifestXML)

        gcps = []
        for i in range(len(X)):
            gcps.append(gdal.GCP(lon[i], lat[i], 0, X[i], Y[i]))

        self.dataset.SetGCPs(gcps, NSR().wkt)
        self.dataset.SetMetadataItem('time_coverage_start',
                                     doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}startTime]")[0][0].text)
        self.dataset.SetMetadataItem('time_coverage_end',
                                     doc.findall(".//*[{http://www.esa.int/safe/sentinel-1.0}stopTime]")[0][0].text)
        self.dataset.SetMetadataItem('platform', json.dumps(pti.get_gcmd_platform('SENTINEL-1A')))
        self.dataset.SetMetadataItem('instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
        self.dataset.SetMetadataItem('Entry Title', 'Sentinel-1A SAR')
        self.dataset.SetMetadataItem('Data Center', 'ESA/EO')
        self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans')
        self.dataset.SetMetadataItem('Summary', 'S1A SAR data')
Example #40
0
    def test_export(self):
        array = gdal.Open(self.test_file_gcps).ReadAsArray()[1, 10:, :]
        vrt = VRT.from_array(array)
        vrt.export(self.tmp_filename)
        self.assertTrue(self.tmp_filename)
        tree = ET.parse(self.tmp_filename)
        root = tree.getroot()

        self.assertEqual(root.tag, 'VRTDataset')
        self.assertIn('rasterXSize', list(root.keys()))
        self.assertIn('rasterYSize', list(root.keys()))

        self.assertEqual([e.tag for e in root], ['Metadata', 'VRTRasterBand'])
Example #41
0
 def test_remove_strings_in_metadata_keys(self):
     gdal_metadata = {
         'aaa': 'bbb',
         'NC_GLOBAL#ccc': 'ddd',
         'NANSAT_eee': 'fff'
     }
     rm_strings = ['NC_GLOBAL#', 'NANSAT_']
     new_metadata = VRT._remove_strings_in_metadata_keys(
         gdal_metadata, rm_strings)
     self.assertEqual(new_metadata, {
         'aaa': 'bbb',
         'ccc': 'ddd',
         'eee': 'fff'
     })
Example #42
0
    def test_init(self):
        lon, lat = np.meshgrid(np.linspace(0, 5, 10), np.linspace(10, 20, 30))
        x_vrt = VRT.from_array(lon)
        y_vrt = VRT.from_array(lat)

        ga = Geolocation(x_vrt, y_vrt)

        self.assertIsInstance(ga, Geolocation)
        self.assertEqual(ga.data['X_DATASET'], x_vrt.filename)
        self.assertEqual(ga.data['Y_DATASET'], y_vrt.filename)
        self.assertEqual(ga.data['LINE_OFFSET'], '0')
        self.assertEqual(ga.data['LINE_STEP'], '1')
        self.assertEqual(ga.data['PIXEL_OFFSET'], '0')
        self.assertEqual(ga.data['PIXEL_STEP'], '1')
        srs = osr.SpatialReference()
        status = srs.ImportFromWkt(ga.data['SRS'])
        self.assertEqual(status, 0)
        self.assertEqual(srs.ExportToProj4().strip(),
                         '+proj=longlat +datum=WGS84 +no_defs')
        self.assertEqual(ga.data['X_BAND'], '1')
        self.assertEqual(ga.data['Y_BAND'], '1')
        self.assertEqual(ga.x_vrt, x_vrt)
        self.assertEqual(ga.y_vrt, y_vrt)
Example #43
0
 def test_transform_points(self):
     ds = gdal.Open(self.test_file_gcps)
     vrt1 = VRT.from_gdal_dataset(ds, metadata=ds.GetMetadata())
     vrt1.tps = True
     lon, lat = vrt1.transform_points([1, 2, 3], [4, 5, 6])
     self.assertTrue(
         np.allclose(lon, np.array([28.23549571, 28.24337106,
                                    28.25126129])))
     self.assertTrue(
         np.allclose(lat, np.array([71.52509848, 71.51913744,
                                    71.51317568])))
     lon, lat = vrt1.transform_points([], [])
     self.assertTrue(np.allclose(lon, np.array([])))
     self.assertTrue(np.allclose(lat, np.array([])))
Example #44
0
    def test_hardcopy_bands(self):
        ds = gdal.Open(self.test_file_gcps)
        vrt = VRT.copy_dataset(ds)
        vrt.hardcopy_bands()

        self.assertTrue(
            np.allclose(vrt.dataset.ReadAsArray(), ds.ReadAsArray()))
        band_nodes = Node.create(str(vrt.xml)).nodeList('VRTRasterBand')
        self.assertEqual(band_nodes[0].node('SourceFilename').value,
                         vrt.band_vrts[1].filename)
        self.assertEqual(band_nodes[1].node('SourceFilename').value,
                         vrt.band_vrts[2].filename)
        self.assertEqual(band_nodes[2].node('SourceFilename').value,
                         vrt.band_vrts[3].filename)
Example #45
0
    def test_init(self, mock_make_filename):
        metadata={'key': 'value'}
        vrt = VRT(metadata=metadata)

        self.assertIsInstance(vrt, VRT)
        self.assertEqual(vrt.filename, '/vsimem/filename.vrt')
        self.assertIsInstance(vrt.dataset, gdal.Dataset)
        self.assertIsInstance(vrt.logger, logging.Logger) # just for testing mocking
        self.assertIsInstance(vrt.driver, gdal.Driver)
        self.assertEqual(vrt.band_vrts, {})
        self.assertEqual(vrt.tps, False)
        self.assertTrue(vrt.vrt is None)
        self.assertTrue(vrt.xml.startswith('<VRTDataset rasterXSize="1" rasterYSize="1"'))
        self.assertTrue(mock_make_filename.called_once())
        self.assertEqual(vrt.dataset.GetMetadata(), metadata)
Example #46
0
    def test_create_geolocation_bands(self):
        lon, lat = np.meshgrid(np.linspace(0, 5, 10), np.linspace(10, 20, 30))
        vrt = VRT.from_lonlat(lon, lat)
        vrt.create_geolocation_bands()

        self.assertEqual(vrt.dataset.RasterCount, 2)
        self.assertEqual(
            vrt.dataset.GetRasterBand(1).GetMetadataItem(str('name')),
            'longitude')
        self.assertEqual(
            vrt.dataset.GetRasterBand(2).GetMetadataItem(str('name')),
            'latitude')
        self.assertTrue(
            np.allclose(vrt.dataset.GetRasterBand(1).ReadAsArray(), lon))
        self.assertTrue(
            np.allclose(vrt.dataset.GetRasterBand(2).ReadAsArray(), lat))
Example #47
0
    def __init__(self,
                 filename,
                 gdal_dataset,
                 metadata,
                 quartile=0,
                 *args,
                 **kwargs):

        super(Mapper, self).__init__(filename, gdal_dataset, metadata, *args,
                                     **kwargs)

        intervals = [0, 1, 2, 3]
        if not quartile in intervals:
            raise ValueError('quartile must be one of [0,1,2,3]')

        y_size = self.dataset.RasterYSize / 4
        y_offset = [y_size * qq for qq in intervals][quartile]

        # Crop
        self.set_offset_size('y', y_offset, y_size)

        # Add quartile to metadata
        self.dataset.SetMetadataItem('quartile', str(quartile))

        # Create band of times
        # TODO: resolve nansat issue #263 (https://github.com/nansencenter/nansat/issues/263)
        #import ipdb; ipdb.set_trace()
        tt = self.times()[int(y_offset):int(y_offset + y_size)]
        self.dataset.SetMetadataItem('time_coverage_start',
                                     tt[0].astype(datetime).isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     tt[-1].astype(datetime).isoformat())
        time_stamps = (tt - tt[0]) / np.timedelta64(1, 's')
        self.band_vrts['time'] = VRT.from_array(
            np.tile(time_stamps, (self.dataset.RasterXSize, 1)).transpose())
        self.create_band(src={
            'SourceFilename': self.band_vrts['time'].filename,
            'SourceBand': 1,
        },
                         dst={
                             'name':
                             'timestamp',
                             'time_coverage_start':
                             tt[0].astype(datetime).isoformat(),
                             'units':
                             'seconds since time_coverage_start',
                         })
Example #48
0
    def test_from_lonlat(self):
        geo_keys = ['LINE_OFFSET', 'LINE_STEP', 'PIXEL_OFFSET', 'PIXEL_STEP', 'SRS',
                    'X_BAND', 'X_DATASET', 'Y_BAND', 'Y_DATASET']
        lon, lat = np.meshgrid(np.linspace(0, 5, 10), np.linspace(10, 20, 30))
        vrt = VRT.from_lonlat(lon, lat, n_gcps=25)

        self.assertEqual(vrt.dataset.RasterXSize, 10)
        self.assertEqual(vrt.dataset.RasterYSize, 30)
        self.assertIn('filename', list(vrt.dataset.GetMetadata().keys()))
        geo_metadata = vrt.dataset.GetMetadata(str('GEOLOCATION'))
        for geo_key in geo_keys:
            self.assertEqual(vrt.geolocation.data[geo_key], geo_metadata[geo_key])
        self.assertIsInstance(vrt.geolocation.x_vrt, VRT)
        self.assertIsInstance(vrt.geolocation.y_vrt, VRT)
        self.assertEqual(vrt.geolocation.x_vrt.filename, geo_metadata['X_DATASET'])
        self.assertEqual(vrt.geolocation.y_vrt.filename, geo_metadata['Y_DATASET'])
        self.assertEqual(len(vrt.dataset.GetGCPs()), 25)
Example #49
0
    def create_VRT_from_ADS(self, adsName, zoomSize=500):
        """ Create VRT with a band from Envisat ADS metadata

        Read offsets of the <adsName> ADS.
        Read 2D matrix of binary values from ADS from file.
        Zoom array with ADS data to <zoomSize>. Zooming is needed to create smooth matrices. Array
        is zoomed to small size because it is stred in memory. Later the VRT with zoomed array is
        VRT.get_resized_vrt() in order to match the size of the Nansat object.

        Create VRT from the ADS array.

        Parameters
        ----------
            adsName : str
                name of variable from ADS to read. should match allADSParams
            zoomSize :  int, optional, 500
                size, to which original matrix from ADSR is zoomed using
                scipy.zoom

        Returns
        -------
            adsVrt : VRT, vrt with a band created from ADS array

        """
        adsHeight = self.dsOffsetDict["NUM_DSR"]
        adsParams = self.allADSParams['list'][adsName]
        array = self.get_array_from_ADS(adsName)

        if not IMPORT_SCIPY:
            raise NansatReadError(
                'ENVISAT data cannot be read because scipy is not installed...'
            )

        # zoom the array
        array = scipy.ndimage.interpolation.zoom(array,
                                                 zoomSize / float(adsHeight),
                                                 order=1)

        # create VRT from the array
        adsVrt = VRT.from_array(array=array)
        # add "name" and "units" to band metadata
        bandMetadata = {"name": adsName, "units": adsParams['units']}
        adsVrt.dataset.GetRasterBand(1).SetMetadata(bandMetadata)

        return adsVrt
Example #50
0
    def test_update_warped_vrt_xml(self):
        dataset = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic)
        warped_dataset = gdal.AutoCreateWarpedVRT(dataset, None, str(self.nsr_wkt), 0)
        warped_vrt = VRT.copy_dataset(warped_dataset)
        x_size = 100
        y_size = 200
        geo_transform = (0.0, 1.0, 0.0, 200.0, 0.0, -1.0)
        block_size = 64
        working_data_type = 'Float32'
        warped_vrt._update_warped_vrt_xml(x_size, y_size, geo_transform, block_size,
                                          working_data_type)

        self.assertEqual(warped_vrt.dataset.RasterXSize, x_size)
        self.assertEqual(warped_vrt.dataset.RasterYSize, y_size)
        self.assertEqual(warped_vrt.dataset.GetGeoTransform(), geo_transform)
        self.assertEqual(warped_vrt.dataset.GetRasterBand(1).GetBlockSize(),
                         [block_size, block_size])
        self.assertIn('<WorkingDataType>Float32</WorkingDataType>', warped_vrt.xml)
Example #51
0
    def create_VRT_from_ADS(self, adsName, zoomSize=500):
        ''' Create VRT with a band from Envisat ADS metadata

        Read offsets of the <adsName> ADS.
        Read 2D matrix of binary values from ADS from file.
        Read 'last_line_...' ADS (in case of ASAR).
        Zoom array with ADS data to <zoomSize>. Zooming is needed to create
        smooth matrices. Array is zoomed to small size because it is stred in
        memory. Later the VRT with zoomed array is VRT.
        get_resized_vrt() in order to match the size of the Nansat onject.
        Create VRT from the ADS array.

        Parameters
        ----------
            adsName : str
                name of variable from ADS to read. should match allADSParams
            fileType: string, 'ASA_' or 'MER_'
                type of file (from GDAL metadata)
            zoomSize :  int, optional, 500
                size, to which original matrix from ADSR is zoomed using
                scipy.zoom
        Returns:
        ---------
            adsVrt : VRT, vrt with a band created from ADS array

        '''
        adsHeight = self.dsOffsetDict["NUM_DSR"]
        adsParams = self.allADSParams['list'][adsName]
        array = self.get_array_from_ADS(adsName)

        # zoom the array
        array = scipy.ndimage.interpolation.zoom(array,
                                                 zoomSize / float(adsHeight),
                                                 order=1)

        # create VRT from the array
        adsVrt = VRT(array=array)
        # add "name" and "units" to band metadata
        bandMetadata = {"name": adsName, "units": adsParams['units']}
        adsVrt.dataset.GetRasterBand(1).SetMetadata(bandMetadata)

        return adsVrt
Example #52
0
    def get_LUT_VRTs(self, XML, vectorListName, LUT_list):
        n = Node.create(XML)
        vecList = n.node(vectorListName)
        X = []
        Y = []
        LUTs = {}
        for LUT in LUT_list:
            LUTs[LUT] = []
        xLengths = []
        for vec in vecList.children:
            xVec = map(int, vec['pixel'].split())
            xLengths.append(len(xVec))
            X.append(xVec)
            Y.append(int(vec['line']))
            for LUT in LUT_list:
                LUTs[LUT].append(map(float, vec[LUT].split()))

        # truncate X and LUT to minimum length for all rows
        minLength = np.min(xLengths)
        X = [x[:minLength] for x in X]
        for LUT in LUT_list:
            LUTs[LUT] = [lut[:minLength] for lut in LUTs[LUT]]

        X = np.array(X)
        for LUT in LUT_list:
            LUTs[LUT] = np.array(LUTs[LUT])
        Ym = np.array([
            Y,
        ] * np.shape(X)[1]).transpose()

        lon, lat = self.transform_points(X.flatten(), Ym.flatten())
        longitude = lon.reshape(X.shape)
        latitude = lat.reshape(X.shape)

        LUT_VRTs = {}
        for LUT in LUT_list:
            LUT_VRTs[LUT] = VRT(array=LUTs[LUT], lat=latitude, lon=longitude)

        return LUT_VRTs, longitude, latitude
Example #53
0
    def add_incidence_angle_band(self):
        # Get GCP variables
        pixel = self.ds['GCP_pixel_' + self.ds.polarisation[:2]][:].data
        line = self.ds['GCP_line_' + self.ds.polarisation[:2]][:].data
        inci = self.ds['GCP_incidenceAngle_' +
                       self.ds.polarisation[:2]][:].data
        inci = inci.reshape(
            np.unique(line[:].data).shape[0],
            np.unique(pixel[:].data).shape[0])

        # Add incidence angle band
        inciVRT = VRT.from_array(inci)
        inciVRT = inciVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize, 1)
        self.band_vrts['inciVRT'] = inciVRT
        src = {
            'SourceFilename': self.band_vrts['inciVRT'].filename,
            'SourceBand': 1
        }
        dst = {'wkv': 'angle_of_incidence', 'name': 'incidence_angle'}
        self.create_band(src, dst)
        self.dataset.FlushCache()
Example #54
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 fast=False,
                 **kwargs):
        if kwargs.get('manifestonly', False):
            fast = True
            NansatFutureWarning(
                'manifestonly option will be deprecated. Use: fast=True')

        if not os.path.split(filename.rstrip('/'))[1][:3] in ['S1A', 'S1B']:
            raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename)

        if not IMPORT_SCIPY:
            raise NansatReadError(
                'Sentinel-1 data cannot be read because scipy is not installed'
            )

        if zipfile.is_zipfile(filename):
            zz = zipfile.PyZipFile(filename)
            # Assuming the file names are consistent, the polarization
            # dependent data should be sorted equally such that we can use the
            # same indices consistently for all the following lists
            # THIS IS NOT THE CASE...
            mds_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'measurement/s1' in fn
            ]
            calibration_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/calibration/calibration-s1' in fn
            ]
            noise_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/calibration/noise-s1' in fn
            ]
            annotation_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/s1' in fn
            ]
            manifest_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'manifest.safe' in fn
            ]
            zz.close()
        else:
            mds_files = glob.glob('%s/measurement/s1*' % filename)
            calibration_files = glob.glob(
                '%s/annotation/calibration/calibration-s1*' % filename)
            noise_files = glob.glob('%s/annotation/calibration/noise-s1*' %
                                    filename)
            annotation_files = glob.glob('%s/annotation/s1*' % filename)
            manifest_files = glob.glob('%s/manifest.safe' % filename)

        if (not mds_files or not calibration_files or not noise_files
                or not annotation_files or not manifest_files):
            raise WrongMapperError(filename)

        # convert list of MDS files into dictionary. Keys - polarizations in upper case.
        mds_files = {
            os.path.basename(ff).split('-')[3].upper(): ff
            for ff in mds_files
        }
        polarizations = list(mds_files.keys())

        # read annotation files
        annotation_data = self.read_annotation(annotation_files)
        if not fast:
            annotation_data = Mapper.correct_geolocation_data(annotation_data)

        # read manifest file
        manifest_data = self.read_manifest_data(manifest_files[0])

        # very fast constructor without any bands only with some metadata and geolocation
        self._init_empty(manifest_data, annotation_data)

        # skip adding bands in the fast mode and RETURN
        if fast:
            return

        # Open data files with GDAL
        gdalDatasets = {}
        for pol in polarizations:
            gdalDatasets[pol] = gdal.Open(mds_files[pol])

            if not gdalDatasets[pol]:
                raise WrongMapperError('%s: No Sentinel-1 datasets found' %
                                       mds_files[pol])

        # Check metadata to confirm it is Sentinel-1 L1
        metadata = gdalDatasets[polarizations[0]].GetMetadata()

        # create full size VRTs with incidenceAngle and elevationAngle
        annotation_vrts = self.vrts_from_arrays(
            annotation_data, ['incidenceAngle', 'elevationAngle'])
        self.band_vrts.update(annotation_vrts)

        # create full size VRTS with calibration LUT
        calibration_names = ['sigmaNought', 'betaNought']
        calibration_list_tag = 'calibrationVectorList'
        for calibration_file in calibration_files:
            pol = '_' + os.path.basename(calibration_file).split(
                '-')[4].upper()
            xml = self.read_vsi(calibration_file)
            calibration_data = self.read_calibration(xml, calibration_list_tag,
                                                     calibration_names, pol)
            calibration_vrts = self.vrts_from_arrays(calibration_data,
                                                     calibration_names, pol,
                                                     True, 1)
            self.band_vrts.update(calibration_vrts)

        # create full size VRTS with noise LUT
        for noise_file in noise_files:
            pol = '_' + os.path.basename(noise_file).split('-')[4].upper()
            xml = self.read_vsi(noise_file)
            if '<noiseVectorList' in xml:
                noise_list_tag = 'noiseVectorList'
                noise_name = 'noiseLut'
            elif '<noiseRangeVectorList' in xml:
                noise_list_tag = 'noiseRangeVectorList'
                noise_name = 'noiseRangeLut'
            noise_data = self.read_calibration(xml, noise_list_tag,
                                               [noise_name], pol)
            noise_vrts = self.vrts_from_arrays(noise_data, [noise_name], pol,
                                               True, 1)
            self.band_vrts.update(noise_vrts)

        #### Create metaDict: dict with metadata for all bands
        metaDict = []
        bandNumberDict = {}
        bnmax = 0
        for pol in polarizations:
            dsPath, dsName = os.path.split(mds_files[pol])
            name = 'DN_%s' % pol
            # A dictionary of band numbers is needed for the pixel function
            # bands further down. This is not the best solution. It would be
            # better to have a function in VRT that returns the number given a
            # band name. This function exists in Nansat but could perhaps be
            # moved to VRT? The existing nansat function could just call the
            # VRT one...
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            band = gdalDatasets[pol].GetRasterBand(1)
            dtype = band.DataType
            metaDict.append({
                'src': {
                    'SourceFilename': mds_files[pol],
                    'SourceBand': 1,
                    'DataType': dtype,
                },
                'dst': {
                    'name': name,
                },
            })
        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)
        '''
        Calibration should be performed as

        s0 = DN^2/sigmaNought^2,

        where sigmaNought is from e.g.
        annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml,
        and DN is the Digital Numbers in the tiff files.

        Also the noise should be subtracted.

        See
        https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT

        The noise correction/subtraction is implemented in an independent package "sentinel1denoised"
        See
        https://github.com/nansencenter/sentinel1denoised
        '''

        # Get look direction
        longitude, latitude = self.transform_points(
            calibration_data['pixel'].flatten(),
            calibration_data['line'].flatten())
        longitude.shape = calibration_data['pixel'].shape
        latitude.shape = calibration_data['pixel'].shape
        sat_heading = initial_bearing(longitude[:-1, :], latitude[:-1, :],
                                      longitude[1:, :], latitude[1:, :])
        look_direction = scipy.ndimage.interpolation.zoom(
            np.mod(sat_heading + 90, 360),
            (np.shape(longitude)[0] / (np.shape(longitude)[0] - 1.), 1))

        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT.from_array(look_direction_u)
        look_v_VRT = VRT.from_array(look_direction_v)
        lookVRT = VRT.from_lonlat(longitude, latitude)
        lookVRT.create_band([{
            'SourceFilename': look_u_VRT.filename,
            'SourceBand': 1
        }, {
            'SourceFilename': look_v_VRT.filename,
            'SourceBand': 1
        }], {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize, 1)

        # Store VRTs so that they are accessible later
        self.band_vrts['look_u_VRT'] = look_u_VRT
        self.band_vrts['look_v_VRT'] = look_v_VRT
        self.band_vrts['lookVRT'] = lookVRT

        metaDict = []
        # Add bands to full size VRT
        for pol in polarizations:
            name = 'sigmaNought_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename': (self.band_vrts[name].filename),
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })
            name = 'noise_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename':
                    self.band_vrts['%s_%s' % (noise_name, pol)].filename,
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })

        name = 'look_direction'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        metaDict.append({
            'src': {
                'SourceFilename': self.band_vrts['lookVRT'].filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': name
            }
        })

        for pol in polarizations:
            dsPath, dsName = os.path.split(mds_files[pol])
            name = 'sigma0_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': [{
                    'SourceFilename': self.filename,
                    'SourceBand': bandNumberDict['DN_%s' % pol],
                }, {
                    'SourceFilename':
                    self.band_vrts['sigmaNought_%s' % pol].filename,
                    'SourceBand':
                    1
                }],
                'dst': {
                    'wkv':
                    'surface_backwards_scattering_coefficient_of_radar_wave',
                    'PixelFunctionType': 'Sentinel1Calibration',
                    'polarization': pol,
                    'suffix': pol,
                },
            })
            name = 'beta0_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': [{
                    'SourceFilename': self.filename,
                    'SourceBand': bandNumberDict['DN_%s' % pol]
                }, {
                    'SourceFilename':
                    self.band_vrts['betaNought_%s' % pol].filename,
                    'SourceBand':
                    1
                }],
                'dst': {
                    'wkv':
                    'surface_backwards_brightness_coefficient_of_radar_wave',
                    'PixelFunctionType': 'Sentinel1Calibration',
                    'polarization': pol,
                    'suffix': pol,
                },
            })

        self.create_bands(metaDict)

        # Add incidence angle as band
        name = 'incidence_angle'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        src = {
            'SourceFilename': self.band_vrts['incidenceAngle'].filename,
            'SourceBand': 1
        }
        dst = {'wkv': 'angle_of_incidence', 'name': name}
        self.create_band(src, dst)
        self.dataset.FlushCache()

        # Add elevation angle as band
        name = 'elevation_angle'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        src = {
            'SourceFilename': self.band_vrts['elevationAngle'].filename,
            'SourceBand': 1
        }
        dst = {'wkv': 'angle_of_elevation', 'name': name}
        self.create_band(src, dst)
        self.dataset.FlushCache()

        # Add sigma0_VV
        if 'VV' not in polarizations and 'HH' in polarizations:
            name = 'sigma0_VV'
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            src = [{
                'SourceFilename': self.filename,
                'SourceBand': bandNumberDict['DN_HH'],
            }, {
                'SourceFilename': (self.band_vrts['sigmaNought_HH'].filename),
                'SourceBand':
                1,
            }, {
                'SourceFilename': self.band_vrts['incidenceAngle'].filename,
                'SourceBand': 1
            }]
            dst = {
                'wkv':
                'surface_backwards_scattering_coefficient_of_radar_wave',
                'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
                'polarization': 'VV',
                'suffix': 'VV'
            }
            self.create_band(src, dst)
            self.dataset.FlushCache()
Example #55
0
    def correct_geolocation_data(data, max_height=5):
        """ Correct lon/lat values in geolocation data for points high above ground (incorrect)

        Each GCP in Sentinel-1 L1 image (both in the GeoTIF files and Annotation LUT) have five
        coordinates: X, Y, Z (height), Pixel and Line. On some scenes that cover Greenland (and
        probably other lands) some GCPs have height above zero even over ocean. This is incorrect,
        because the radar signal comes actually from the surface and not from a point above the
        ground as stipulated in such GCPs. This function provides correction of such GCPs.

        First, Lon/Lat are converted to X/Y in meters. Second, Pixel coordinates are approximated
        by a 2nd order polynomial of input X,Y,Z. Third, this polynomial is used to calculate new
        Pixel coordinate for ocean surface (Z=0). Fourth, a temporary VRT from original X, Y, Line
        and corrected Z,Pixel coordinates is created. Fifth, the temporary VRT is used for
        converting original Pixel/Line coordinates into correct Lon/Lat

        Parameters
        ----------
        data : dict
            Original geolocation data from Mapper.read_annotation()
        max_height : int
            Maximum afordable height (meters)

        Returns
        -------
        data : dict
            Corrected geolocation data with new longitude and latitude

        """
        # don't correct geolocation data if only few points are affected
        if (data['height'] > max_height).sum() < 10:
            return data

        # convert XY from degrees to meters in stereographic projection
        central_point = int(data['shape'][0] / 2), int(data['shape'][1] / 2)
        dst_srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=%f +lon_0=%f +no_defs' % (
            data['latitude'][central_point], data['longitude'][central_point])
        x, y, z = VRT.transform_coordinates(
            NSR(), (data['longitude'].flat, data['latitude'].flat,
                    data['height'].flat), NSR(dst_srs))
        # create training data
        a = np.vstack(
            [np.ones(x.size), x, x**2, y, y**2, x * y, z, z**2, x * z,
             y * z]).T
        # calculate polynomial coefficients for values of Pixel (using least squares)
        b = np.linalg.lstsq(a, data['pixel'].flat)[0]
        # pixel_test = np.dot(a, b) # for debugging
        # set height to zero (ocean surface)
        a[:, 6:] = 0
        # calculate Pixel at ocean surface
        pixel_ocean = np.dot(a, b)
        high_pixels_idx = data['height'] > max_height
        tmp_pixel = np.array(data['pixel'])
        tmp_pixel[high_pixels_idx] = pixel_ocean[high_pixels_idx.flat]
        new_height = np.zeros(data['height'].shape)

        # create temporary VRT with correct GCPs for converting original pixel/line into lon/lat
        tmp_gcps = Mapper.create_gcps(x, y, new_height, tmp_pixel,
                                      data['line'])
        tmp_vrt = VRT(data['x_size'], data['y_size'])
        tmp_vrt.dataset.SetGCPs(tmp_gcps, NSR(dst_srs).wkt)
        tmp_vrt.tps = True
        new_lon, new_lat = tmp_vrt.transform_points(data['pixel'].flatten(),
                                                    data['line'].flatten())

        data['latitude'] = new_lat.reshape(data['shape'])
        data['longitude'] = new_lon.reshape(data['shape'])
        data['height'] = new_height
        return data
Example #56
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create VRT '''

        ThreddsBase = 'http://thredds.met.no/thredds/dodsC/myocean/siw-tac/siw-metno-svalbard/'
        # First check if mapper is called with keyword syntax:
        # filename = metno_hires_seaice:YYYYmmdd
        keywordBase = 'metno_hires_seaice'
        foundDataset = False
        if fileName[0:len(keywordBase)] == keywordBase:
            keywordTime = fileName[len(keywordBase)+1:]
            requestedTime = datetime.strptime(keywordTime, '%Y%m%d')
            # Search for nearest available file, within the closest 3 days
            for deltaDay in [0, -1, 1, -2, 2, -3, 3]:
                validTime = (requestedTime + timedelta(days=deltaDay) +
                             timedelta(hours=15))
                fileName = (ThreddsBase +
                            validTime.strftime(
                                '%Y/%m/ice_conc_svalbard_%Y%m%d1500.nc'))
                try:
                    urllib2.urlopen(fileName + '.dds')
                    foundDataset = True
                    # Data is found for this day
                    break
                except:
                    # No data for this day
                    pass

        if not foundDataset:
            raise WrongMapperError

        # Then check if a valid OPeNDAP URL is given
        # (or has been constructed from keyword)
        if fileName[0:len(ThreddsBase)] != ThreddsBase:
            AttributeError("Not Met.no Svalbard-ice Thredds URL")
        else:
            timestr = fileName[-15:-3]
            validTime = datetime.strptime(timestr, '%Y%m%d%H%M')

        fileName = fileName + '?ice_concentration[0][y][x]'
        srcProjection = osr.SpatialReference()
        srcProjection.ImportFromProj4('+proj=stere lon_0=0.0 +lat_0=90 +datum=WGS84 +ellps=WGS84 +units=km +no_defs')
        srcProjection = srcProjection.ExportToWkt()

        # From thredds web, with manual shift
        srcGeotransform = (-1243.008 - 1, 1, 0, -3190.026 - 7, 0, 1)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self,
                     srcGeoTransform=srcGeotransform,
                     srcProjection=srcProjection,
                     srcRasterXSize=3812,
                     srcRasterYSize=2980)

        metaDict = [{'src': {'SourceFilename': fileName,
                             'sourceBand': 1},
                     'dst': {'name': 'sea_ice_area_fraction',
                             'wkv': 'sea_ice_area_fraction'}}]

        # Add band
        self._create_bands(metaDict)

        # Set time
        self.logger.info('Valid time: %s', str(validTime))
        self._set_time(validTime)
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Ocean Productivity website VRT '''

        try:
            assert 'IDL' in gdalMetadata['Projection Category']
            assert '-9999' in gdalMetadata['Hole Value']
        except:
            raise WrongMapperError

        print 'Ocean Productivity website data'
        # get list of similar (same date) files in the directory
        iDir, iFile = os.path.split(fileName)
        iFileName, iFileExt = os.path.splitext(iFile)
        simFilesMask = os.path.join(iDir, '*' + iFileName[4:11] + iFileExt)
        #print 'simFilesMask', simFilesMask
        simFiles = glob.glob(simFilesMask)
        #print 'simFiles', simFiles

        metaDict = []
        for simFile in simFiles:
            #print 'simFile',simFile
            # open subdataset with GDAL
            tmpSourceFilename = simFile
            tmpGdalDataset = gdal.Open(tmpSourceFilename)

            # get metadata, get 'Parameter'
            tmpGdalMetadata = tmpGdalDataset.GetMetadata()
            iDir, ifileName = os.path.split(tmpSourceFilename)
            #print 'ifileName',ifileName
            simParameter = ifileName[0:3]

            # set params of the similar file
            simSourceFilename = tmpSourceFilename
            simGdalDataset = tmpGdalDataset
            simGdalMetadata = tmpGdalMetadata

            # get WKV from the similar file
            for param in self.param2wkv:
                #print 'param', param
                if param in simParameter:
                    simWKV = self.param2wkv[param]
                    break
            #print 'simWKV', simWKV
            # generate entry to metaDict
            metaEntry = {
                'src': {
                    'SourceFilename': simSourceFilename,
                    'SourceBand': 1,
                    'ScaleRatio': float(simGdalMetadata['Slope']),
                    'ScaleOffset': float(simGdalMetadata['Intercept'])
                },
                'dst': {
                    'wkv': simWKV,
                    'name': self.bandNames[simWKV],
                    'Parameter': simParameter
                }
            }
            #print 'metaEntry', metaEntry
            # append entry to metaDict
            metaDict.append(metaEntry)

        #get array with data and make 'mask'
        a = simGdalDataset.ReadAsArray()
        mask = np.zeros(a.shape, 'uint8') + 128
        mask[a < -9990] = 1
        self.bandVRTs = {'maskVRT': VRT(array=mask)}

        metaDict.append({
            'src': {
                'SourceFilename': (self.bandVRTs['maskVRT'].fileName),
                'SourceBand': 1
            },
            'dst': {
                'name': 'mask'
            }
        })

        # create empty VRT dataset with geolocation only
        # print 'simGdalMetadata', simGdalMetadata
        latitudeStep = 0.08333334
        longitudeStep = 0.08333334
        numberOfColumns = 4320
        numberOfLines = 2160
        #longitudeStep = float(simGdalMetadata['Longitude Step'])
        VRT.__init__(
            self,
            srcGeoTransform=(-180.0, longitudeStep, 0.0, 90.0, 0.0,
                             -longitudeStep),
            srcProjection=
            'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
            srcRasterXSize=numberOfColumns,
            srcRasterYSize=numberOfLines)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # Add valid time
        startYear = int(iFile[4:8])
        startDay = int(iFile[8:11])
        self.dataset.SetMetadataItem(
            'time_coverage_start', (datetime.datetime(startYear, 1, 1) +
                                    datetime.timedelta(startDay)).isoformat())
Example #58
0
    def __init__(self,
                 srs=None,
                 ext=None,
                 ds=None,
                 lon=None,
                 lat=None,
                 name='',
                 logLevel=None):
        '''Create Domain from GDALDataset or string options or lat/lon grids

        d = Domain(srs, ext)
            Size, extent and spatial reference is given by strings
        d = Domain(ds=GDALDataset):
            Size, extent and spatial reference is copied from input
            GDAL dataset
        d = Domain(srs, ds=GDALDataset):
            Spatial reference is given by srs, but size and extent is
            determined
            from input GDAL dataset
        d = Domain(lon=lonGrid, lat=latGrid)
            Size, extent and spatial reference is given by two grids

        Parameters
        ----------
        srs : PROJ4 or EPSG or WKT or NSR or osr.SpatialReference()
            Input parameter for nansat.NSR()
        ext : string
            some gdalwarp options + additional options
            [http://www.gdal.org/gdalwarp.html]
            Specifies extent, resolution / size
            Available options: (('-te' or '-lle') and ('-tr' or '-ts'))
            (e.g. '-lle -10 30 55 60 -ts 1000 1000' or
            '-te 100 2000 300 10000 -tr 300 200')
            -tr resolutionx resolutiony
            -ts sizex sizey
            -te xmin ymin xmax ymax
            -lle lonmin latmin lonmax latmax
        ds : GDAL dataset
        lat : Numpy array
            Grid with latitudes
        lon : Numpy array
            Grid with longitudes
        name : string, optional
            Name to be added to the Domain object
        logLevel : int, optional, default=30
            level of logging

        Raises
        -------
        ProjectionError : occurs when Projection() is empty
            despite it is required for creating extentDic.
        OptionError : occures when the arguments are not proper.

        Modifies
        ---------
        self.vrt.datasetset : dataset in memory
            dataset is created based on the input arguments

        See Also
        ---------
        Nansat.reproject()
        [http://www.gdal.org/gdalwarp.html]
        [http://trac.osgeo.org/proj/]
        [http://spatialreference.org/]
        [http://www.gdal.org/ogr/osr_tutorial.html]

        '''
        # set default attributes
        self.logger = add_logger('Nansat', logLevel)
        self.name = name

        self.logger.debug('ds: %s' % str(ds))
        self.logger.debug('srs: %s' % srs)
        self.logger.debug('ext: %s' % ext)

        # If too much information is given raise error
        if ds is not None and srs is not None and ext is not None:
            raise OptionError('Ambiguous specification of both '
                              'dataset, srs- and ext-strings.')

        # choose between input opitons:
        # ds
        # ds and srs
        # srs and ext
        # lon and lat

        # if only a dataset is given:
        #     copy geo-reference from the dataset
        if ds is not None and srs is None:
            self.vrt = VRT(gdalDataset=ds)

        # If dataset and srs are given (but not ext):
        #   use AutoCreateWarpedVRT to determine bounds and resolution
        elif ds is not None and srs is not None:
            srs = NSR(srs)
            tmpVRT = gdal.AutoCreateWarpedVRT(ds, None, srs.wkt)
            if tmpVRT is None:
                raise ProjectionError('Could not warp the given dataset'
                                      'to the given SRS.')
            else:
                self.vrt = VRT(gdalDataset=tmpVRT)

        # If SpatialRef and extent string are given (but not dataset)
        elif srs is not None and ext is not None:
            srs = NSR(srs)
            # create full dictionary of parameters
            extentDic = self._create_extentDic(ext)

            # convert -lle to -te
            if 'lle' in extentDic.keys():
                extentDic = self._convert_extentDic(srs, extentDic)

            # get size/extent from the created extet dictionary
            [geoTransform, rasterXSize,
             rasterYSize] = self._get_geotransform(extentDic)
            # create VRT object with given geo-reference parameters
            self.vrt = VRT(srcGeoTransform=geoTransform,
                           srcProjection=srs.wkt,
                           srcRasterXSize=rasterXSize,
                           srcRasterYSize=rasterYSize)
            self.extentDic = extentDic
        elif lat is not None and lon is not None:
            # create self.vrt from given lat/lon
            self.vrt = VRT(lat=lat, lon=lon)
        else:
            raise OptionError('"dataset" or "srsString and extentString" '
                              'or "dataset and srsString" are required')

        self.logger.debug('vrt.dataset: %s' % str(self.vrt.dataset))
Example #59
0
class Domain(object):
    '''Container for geographical reference of a raster

    A Domain object describes all attributes of geographical
    reference of a raster:
      * width and height (number of pixels)
      * pixel size (e.g. in decimal degrees or in meters)
      * relation between pixel/line coordinates and geographical
        coordinates (e.g. a linear relation)
      * type of data projection (e.g. geographical or stereographic)

    The core of Domain is a GDAL Dataset. It has no bands, but only
    georeference information: rasterXsize, rasterYsize, GeoTransform and
    Projection or GCPs, etc. which fully describe dimentions and spatial
    reference of the grid.

    There are three ways to store geo-reference in a GDAL dataset:
      * Using GeoTransfrom to define linear relationship between raster
        pixel/line and geographical X/Y coordinates
      * Using GCPs (set of Ground Control Points) to define non-linear
        relationship between pixel/line and X/Y
      * Using Geolocation Array - full grids of X/Y coordinates for
        each pixel of a raster
    The relation between X/Y coordinates of the raster and latitude/longitude
    coordinates is defined by projection type and projection parameters.
    These pieces of information are therefore stored in Domain:
      * Type and parameters of projection +
        * GeoTransform, or
        * GCPs, or
        * GeolocationArrays

    Domain has methods for basic operations with georeference information:
      * creating georeference from input options;
      * fetching corner, border or full grids of X/Y coordinates;
      * making map of the georeferenced grid in a PNG or KML file;
      * and some more...

    The main attribute of Domain is a VRT object self.vrt.
    Nansat inherits from Domain and adds bands to self.vrt

    '''
    def __init__(self,
                 srs=None,
                 ext=None,
                 ds=None,
                 lon=None,
                 lat=None,
                 name='',
                 logLevel=None):
        '''Create Domain from GDALDataset or string options or lat/lon grids

        d = Domain(srs, ext)
            Size, extent and spatial reference is given by strings
        d = Domain(ds=GDALDataset):
            Size, extent and spatial reference is copied from input
            GDAL dataset
        d = Domain(srs, ds=GDALDataset):
            Spatial reference is given by srs, but size and extent is
            determined
            from input GDAL dataset
        d = Domain(lon=lonGrid, lat=latGrid)
            Size, extent and spatial reference is given by two grids

        Parameters
        ----------
        srs : PROJ4 or EPSG or WKT or NSR or osr.SpatialReference()
            Input parameter for nansat.NSR()
        ext : string
            some gdalwarp options + additional options
            [http://www.gdal.org/gdalwarp.html]
            Specifies extent, resolution / size
            Available options: (('-te' or '-lle') and ('-tr' or '-ts'))
            (e.g. '-lle -10 30 55 60 -ts 1000 1000' or
            '-te 100 2000 300 10000 -tr 300 200')
            -tr resolutionx resolutiony
            -ts sizex sizey
            -te xmin ymin xmax ymax
            -lle lonmin latmin lonmax latmax
        ds : GDAL dataset
        lat : Numpy array
            Grid with latitudes
        lon : Numpy array
            Grid with longitudes
        name : string, optional
            Name to be added to the Domain object
        logLevel : int, optional, default=30
            level of logging

        Raises
        -------
        ProjectionError : occurs when Projection() is empty
            despite it is required for creating extentDic.
        OptionError : occures when the arguments are not proper.

        Modifies
        ---------
        self.vrt.datasetset : dataset in memory
            dataset is created based on the input arguments

        See Also
        ---------
        Nansat.reproject()
        [http://www.gdal.org/gdalwarp.html]
        [http://trac.osgeo.org/proj/]
        [http://spatialreference.org/]
        [http://www.gdal.org/ogr/osr_tutorial.html]

        '''
        # set default attributes
        self.logger = add_logger('Nansat', logLevel)
        self.name = name

        self.logger.debug('ds: %s' % str(ds))
        self.logger.debug('srs: %s' % srs)
        self.logger.debug('ext: %s' % ext)

        # If too much information is given raise error
        if ds is not None and srs is not None and ext is not None:
            raise OptionError('Ambiguous specification of both '
                              'dataset, srs- and ext-strings.')

        # choose between input opitons:
        # ds
        # ds and srs
        # srs and ext
        # lon and lat

        # if only a dataset is given:
        #     copy geo-reference from the dataset
        if ds is not None and srs is None:
            self.vrt = VRT(gdalDataset=ds)

        # If dataset and srs are given (but not ext):
        #   use AutoCreateWarpedVRT to determine bounds and resolution
        elif ds is not None and srs is not None:
            srs = NSR(srs)
            tmpVRT = gdal.AutoCreateWarpedVRT(ds, None, srs.wkt)
            if tmpVRT is None:
                raise ProjectionError('Could not warp the given dataset'
                                      'to the given SRS.')
            else:
                self.vrt = VRT(gdalDataset=tmpVRT)

        # If SpatialRef and extent string are given (but not dataset)
        elif srs is not None and ext is not None:
            srs = NSR(srs)
            # create full dictionary of parameters
            extentDic = self._create_extentDic(ext)

            # convert -lle to -te
            if 'lle' in extentDic.keys():
                extentDic = self._convert_extentDic(srs, extentDic)

            # get size/extent from the created extet dictionary
            [geoTransform, rasterXSize,
             rasterYSize] = self._get_geotransform(extentDic)
            # create VRT object with given geo-reference parameters
            self.vrt = VRT(srcGeoTransform=geoTransform,
                           srcProjection=srs.wkt,
                           srcRasterXSize=rasterXSize,
                           srcRasterYSize=rasterYSize)
            self.extentDic = extentDic
        elif lat is not None and lon is not None:
            # create self.vrt from given lat/lon
            self.vrt = VRT(lat=lat, lon=lon)
        else:
            raise OptionError('"dataset" or "srsString and extentString" '
                              'or "dataset and srsString" are required')

        self.logger.debug('vrt.dataset: %s' % str(self.vrt.dataset))

    def __repr__(self):
        '''Creates string with basic info about the Domain object

        Modifies
        ---------
        Print size, projection and corner coordinates

        '''
        outStr = 'Domain:[%d x %d]\n' % (self.vrt.dataset.RasterXSize,
                                         self.vrt.dataset.RasterYSize)
        outStr += '-' * 40 + '\n'
        try:
            corners = self.get_corners()
        except:
            self.logger.error('Cannot read projection from source!')
        else:
            outStr += 'Projection:\n'
            outStr += (NSR(self.vrt.get_projection()).ExportToPrettyWkt(1) +
                       '\n')
            outStr += '-' * 40 + '\n'
            outStr += 'Corners (lon, lat):\n'
            outStr += '\t (%6.2f, %6.2f)  (%6.2f, %6.2f)\n' % (
                corners[0][0], corners[1][0], corners[0][2], corners[1][2])
            outStr += '\t (%6.2f, %6.2f)  (%6.2f, %6.2f)\n' % (
                corners[0][1], corners[1][1], corners[0][3], corners[1][3])
        return outStr

    def write_kml(self, xmlFileName=None, kmlFileName=None):
        '''Write KML file with domains

        Convert XML-file with domains into KML-file for GoogleEarth
        or write KML-file with the current Domain

        Parameters
        -----------
        xmlFileName : string, optional
            Name of the XML-file to convert. If only this value is given
            - kmlFileName=xmlFileName+'.kml'

        kmlFileName : string, optional
            Name of the KML-file to generate from the current Domain

        '''
        # test input options
        if xmlFileName is not None and kmlFileName is None:
            # if only input XML-file is given - convert it to KML

            # open XML, get all domains
            xmlFile = file(xmlFileName, 'rb')
            kmlFileName = xmlFileName + '.kml'
            xmlDomains = ElementTree(file=xmlFile).getroot()
            xmlFile.close()

            # convert domains in XML into list of domains
            domains = []
            for xmlDomain in list(xmlDomains):
                # append Domain object to domains list
                domainName = xmlDomain.attrib['name']
                domains.append(Domain(srs=xmlFileName, ext=domainName))

        elif xmlFileName is None and kmlFileName is not None:
            # if only output KML-file is given
            # then convert the current domain to KML
            domains = [self]

        else:
            # otherwise it is potentially error
            raise OptionError('Either xmlFileName(%s)\
             or kmlFileName(%s) are wrong' % (xmlFileName, kmlFileName))

        # open KML, write header
        kmlFile = file(kmlFileName, 'wt')
        kmlFile.write('<?xml version="1.0" encoding="UTF-8"?>\n')
        kmlFile.write('<kml xmlns="http://www.opengis.net/kml/2.2" '
                      'xmlns:gx="http://www.google.com/kml/ext/2.2" '
                      'xmlns:kml="http://www.opengis.net/kml/2.2" '
                      'xmlns:atom="http://www.w3.org/2005/Atom">\n')
        kmlFile.write('<Document>\n')
        kmlFile.write('    <name>%s</name>\n' % kmlFileName)
        kmlFile.write('        <Folder><name>%s</name><open>1</open>\n' %
                      kmlFileName)

        # get border of each domain and add to KML
        for domain in list(domains):
            kmlEntry = domain._get_border_kml()
            kmlFile.write(kmlEntry)

        # write footer and close
        kmlFile.write('        </Folder></Document></kml>\n')
        kmlFile.close()

    def write_kml_image(self, kmlFileName=None, kmlFigureName=None):
        '''Create KML file for already projected image

        Write Domain Image into KML-file for GoogleEarth

        Parameters
        -----------
        kmlFileName : string, optional
            Name of the KML-file to generate from the current Domain
        kmlFigureName : string, optional
            Name of the projected image stored in .png format

        Examples
        ---------
        # First of all, reproject an image into Lat/Lon WGS84
          (Simple Cylindrical) projection
        # 1. Cancel previous reprojection
        # 2. Get corners of the image and the pixel resolution
        # 3. Create Domain with stereographic projection,
        #    corner coordinates and resolution 1000m
        # 4. Reproject
        # 5. Write image
        # 6. Write KML for the image
        n.reproject() # 1.
        lons, lats = n.get_corners() # 2.
        srsString = '+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs'
        extentString = '-lle %f %f %f %f -ts 3000 3000'
        % (min(lons), min(lats), max(lons), max(lats))
        d = Domain(srs=srsString, ext=extentString) # 3.
        n.reproject(d) # 4.
        n.write_figure(fileName=figureName, bands=[3], clim=[0,0.15],
                       cmapName='gray', transparency=0) # 5.
        n.write_kml_image(kmlFileName=oPath + fileName + '.kml',
                          kmlFigureName=figureName) # 6.

        '''
        # test input options
        if kmlFileName is None:
            raise OptionError('kmlFileName(%s) is wrong' % (kmlFileName))

        if kmlFigureName is None:
            raise OptionError('kmlFigureName(%s) is not specified' %
                              (kmlFigureName))

        # open KML, write header
        kmlFile = file(kmlFileName, 'wt')
        kmlFile.write('<?xml version="1.0" encoding="UTF-8"?>\n')
        kmlFile.write('<kml xmlns="http://www.opengis.net/kml/2.2" '
                      'xmlns:gx="http://www.google.com/kml/ext/2.2" '
                      'xmlns:kml="http://www.opengis.net/kml/2.2" '
                      'xmlns:atom="http://www.w3.org/2005/Atom">\n')
        kmlFile.write('<GroundOverlay>\n')
        kmlFile.write('    <name>%s</name>\n' % kmlFileName)
        kmlFile.write('    <Icon>\n')
        kmlFile.write('        <href>%s</href>\n' % kmlFigureName)
        kmlFile.write('        <viewBoundScale>0.75</viewBoundScale>\n')
        kmlFile.write('    </Icon>\n')

        # get corner of the domain and add to KML
        domainLon, domainLat = self.get_corners()

        kmlFile.write('    <LatLonBox>\n')
        kmlFile.write('        <north>%s</north>\n' % max(domainLat))
        kmlFile.write('        <south>%s</south>\n' % min(domainLat))
        kmlFile.write('        <east>%s</east>\n' % max(domainLon))
        kmlFile.write('        <west>%s</west>\n' % min(domainLon))
        kmlFile.write('    </LatLonBox>\n')

        # write footer and close
        kmlFile.write('</GroundOverlay>\n')
        kmlFile.write('</kml>')
        kmlFile.close()

    def get_geolocation_grids(self, stepSize=1, dstSRS=NSR()):
        '''Get longitude and latitude grids representing the full data grid

        If GEOLOCATION is not present in the self.vrt.dataset then grids
        are generated by converting pixel/line of each pixel into lat/lon
        If GEOLOCATION is present in the self.vrt.dataset then grids are read
        from the geolocation bands.

        Parameters
        -----------
        stepSize : int
            Reduction factor if output is desired on a reduced grid size

        Returns
        --------
        longitude : numpy array
            grid with longitudes
        latitude : numpy array
            grid with latitudes
        '''

        X = range(0, self.vrt.dataset.RasterXSize, stepSize)
        Y = range(0, self.vrt.dataset.RasterYSize, stepSize)
        Xm, Ym = np.meshgrid(X, Y)

        if len(self.vrt.geolocationArray.d) > 0:
            # if the vrt dataset has geolocationArray
            # read lon,lat grids from geolocationArray
            lon, lat = self.vrt.geolocationArray.get_geolocation_grids()
            longitude, latitude = lon[Ym, Xm], lat[Ym, Xm]
        else:
            # generate lon,lat grids using GDAL Transformer
            lonVec, latVec = self.transform_points(Xm.flatten(),
                                                   Ym.flatten(),
                                                   dstSRS=dstSRS)
            longitude = lonVec.reshape(Xm.shape)
            latitude = latVec.reshape(Xm.shape)

        return longitude, latitude

    def _convert_extentDic(self, dstSRS, extentDic):
        '''Convert -lle option (lat/lon) to -te (proper coordinate system)

        Source SRS from LAT/LON projection and target SRS from dstWKT.
        Create osr.CoordinateTransformation based on these SRSs and
        convert given values in degrees to the destination coordinate
        system given by WKT.
        Add key 'te' and the converted values into the extentDic.

        Parameters
        -----------
        dstSRS : NSR
            Destination Spatial Reference
        extentDic : dictionary
            dictionary with 'lle' key

        Returns
        --------
        extentDic : dictionary
            input dictionary + 'te' key and its values

        '''
        coorTrans = osr.CoordinateTransformation(NSR(), dstSRS)

        # convert lat/lon given by 'lle' to the target coordinate system and
        # add key 'te' and the converted values to extentDic
        x1, y1, _ = coorTrans.TransformPoint(extentDic['lle'][0],
                                             extentDic['lle'][3])
        x2, y2, _ = coorTrans.TransformPoint(extentDic['lle'][2],
                                             extentDic['lle'][3])
        x3, y3, _ = coorTrans.TransformPoint(extentDic['lle'][2],
                                             extentDic['lle'][1])
        x4, y4, _ = coorTrans.TransformPoint(extentDic['lle'][0],
                                             extentDic['lle'][1])

        minX = min([x1, x2, x3, x4])
        maxX = max([x1, x2, x3, x4])
        minY = min([y1, y2, y3, y4])
        maxY = max([y1, y2, y3, y4])

        extentDic['te'] = [minX, minY, maxX, maxY]

        return extentDic

    def _create_extentDic(self, extentString):
        '''Create a dictionary from extentString

        Check if extentString is proper.
            * '-te' and '-lle' take 4 numbers.
            * '-ts' and '-tr' take 2 numbers.
            * the combination should be ('-te' or '-lle') and ('-ts' or '-tr')
        If it is proper, create a dictionary
        Otherwise, raise the error.

        Parameters
        -----------
        extentString : string
            '-te xMin yMin xMax yMax',
            '-tr xResolution yResolution',
            '-ts width height',
            '-lle minlon minlat maxlon maxlat'

        Returns
        --------
        extentDic : dictionary
            has key ('te' or 'lle') and ('tr' or 'ts') and their values.

        Raises
        -------
        OptionError : occurs when the extentString is improper

        '''
        extentDic = {}

        # Find -re text
        str_tr = re.findall('-tr\s+[-+]?\d*[.\d*]*\s+[-+]?\d*[.\d*]*\s?',
                            extentString)
        if str_tr != []:
            # Check the number of -tr elements
            elm_str = str(str_tr[0].rstrip())
            elms_str = elm_str.split(None)
            if len(elms_str) != 3 or elms_str[2] == '-':
                raise OptionError('Domain._create_extentDic():'
                                  '-tr is used as'
                                  '"-tr xResolution yResolution"')
            # Add the key and value to extentDic
            extentString = extentString.replace(str_tr[0], '')
            trElem = str(str_tr).split(None)
            trkey = trElem[0].translate(string.maketrans('', ''), "[]-'")
            if trkey != '':
                elements = []
                for i in range(2):
                    elements.append(
                        float(trElem[i + 1].translate(string.maketrans('', ''),
                                                      "'[]'")))
                extentDic[trkey] = elements

        # Find -ts text
        str_ts = re.findall('-ts\s+[-+]?\d*[.\d*]*\s+[-+]?\d*[.\d*]*\s?',
                            extentString)
        if str_ts != []:
            # Check the number of -ts elements
            elm_str = str(str_ts[0].rstrip())
            elms_str = elm_str.split(None)
            if len(elms_str) != 3 or elms_str[2] == '-':
                raise OptionError('Domain._create_extentDic(): '
                                  '"-ts" is used as "-ts width height"')
            # Add the key and value to extentDic
            extentString = extentString.replace(str_ts[0], '')
            tsElem = str(str_ts).split(None)
            tskey = tsElem[0].translate(string.maketrans('', ''), "[]-'")
            if tskey != '':
                elements = []
                for i in range(2):
                    elements.append(
                        float(tsElem[i + 1].translate(string.maketrans('', ''),
                                                      "[]'")))
                extentDic[tskey] = elements

        # Find -te text
        str_te = re.findall(
            '-te\s+[-+]?\d*[.\d*]*\s+[-+]?\d*[.\d*]*\s'
            '+[-+]?\d*[.\d*]*\s+[-+]?\d*[.\d*]*\s?', extentString)
        if str_te != []:
            # Check the number of -te elements
            elm_str = str(str_te[0].rstrip())
            elms_str = elm_str.split(None)
            if len(elms_str) != 5:
                raise OptionError('Domain._create_extentDic():'
                                  '-te is used as "-te xMin yMin xMax yMax"')
            # Add the key and value to extentDic
            extentString = extentString.replace(str_te[0], '')
            teElem = str(str_te).split(None)
            tekey = teElem[0].translate(string.maketrans('', ''), "[]-'")
            if tekey != '':
                elements = []
                for i in range(4):
                    elements.append(
                        float(teElem[i + 1].translate(string.maketrans('', ''),
                                                      "[]'")))
                extentDic[tekey] = elements

        # Find -lle text
        str_lle = re.findall(
            '-lle\s+[-+]?\d*[.\d*]*\s+[-+]?\d*[.\d*]*\s'
            '+[-+]?\d*[.\d*]*\s+[-+]?\d*[.\d*]*\s?', extentString)
        if str_lle != []:
            # Check the number of -lle elements
            elm_str = str(str_lle[0].rstrip())
            elms_str = elm_str.split(None)
            if len(elms_str) != 5:
                raise OptionError('Domain._create_extentDic():'
                                  '-lle is used as '
                                  '"-lle minlon minlat maxlon maxlat"')
            # Add the key and value to extentDic
            extentString = extentString.replace(str_lle[0], '')
            lleElem = str(str_lle).split(None)
            llekey = lleElem[0].translate(string.maketrans('', ''), "[]-'")
            if llekey != '':
                elements = []
                for i in range(4):
                    elements.append(
                        float(lleElem[i + 1].translate(
                            string.maketrans('', ''), "[]'")))
                extentDic[llekey] = elements

        result = re.search('\S', extentString)
        # if there are unnecessary letters, give an error
        if result is not None:
            raise OptionError(
                'Domain._create_extentDic():'
                'extentString is not redable :', extentString)

        # check if one of '-te' and '-lle' is given
        if ('lle' not in extentDic) and ('te' not in extentDic):
            raise OptionError('Domain._create_extentDic():'
                              '"-lle" or "-te" is required.')
        elif ('lle' in extentDic) and ('te' in extentDic):
            raise OptionError('Domain._create_extentDic():'
                              '"-lle" or "-te" should be chosen.')

        # check if one of '-ts' and '-tr' is given
        if ('ts' not in extentDic) and ('tr' not in extentDic):
            raise OptionError('Domain._create_extentDic():'
                              '"-ts" or "-tr" is required.')
        elif ('ts' in extentDic) and ('tr' in extentDic):
            raise OptionError('Domain._create_extentDic():'
                              '"-ts" or "-tr" should be chosen.')
        return extentDic

    def get_border(self, nPoints=10):
        '''Generate two vectors with values of lat/lon for the border of domain

        Parameters
        -----------
        nPoints : int, optional
            Number of points on each border

        Returns
        --------
        lonVec, latVec : lists
            vectors with lon/lat values for each point at the border

        '''
        # prepare vectors with pixels and lines for upper, left, lower
        # and right borders
        sizes = [self.vrt.dataset.RasterXSize, self.vrt.dataset.RasterYSize]

        rcVector1 = [[], []]
        rcVector2 = [[], []]
        # loop for pixels and lines
        for n in range(0, 2):
            step = max(1, sizes[n] / nPoints)
            rcVector1[n] = range(0, sizes[n], step)[0:nPoints]
            rcVector1[n].append(sizes[n])
            rcVector2[n] = rcVector1[n][:]
            rcVector2[n].reverse()

        # coumpund vectors of pixels (col) and lines (row)
        colVector = (rcVector1[0] + [sizes[0]] * len(rcVector1[1]) +
                     rcVector2[0] + [0] * len(rcVector1[1]))
        rowVector = ([0] * len(rcVector1[0]) + rcVector1[1] +
                     [sizes[1]] * len(rcVector1[0]) + rcVector2[1])

        return self.transform_points(colVector, rowVector)

    def _get_border_kml(self, *args, **kwargs):
        '''Generate Placemark entry for KML

        Returns
        --------
        kmlEntry : String
            String with the Placemark entry

        '''
        domainLon, domainLat = self.get_border(*args, **kwargs)

        # convert Border coordinates into KML-like string
        coordinates = ''
        for lon, lat in zip(domainLon, domainLat):
            coordinates += '%f,%f,0 ' % (lon, lat)

        kmlEntry = ''
        # write placemark: name, style, polygon, coordinates
        kmlEntry += '            <Placemark>\n'
        kmlEntry += '                <name>%s</name>\n' % self.name
        kmlEntry += '                <Style>\n'
        kmlEntry += '                    <LineStyle><color>ffffffff</color>'\
                    '</LineStyle>\n'
        kmlEntry += '                    <PolyStyle><fill>0</fill>'\
                    '</PolyStyle>\n'
        kmlEntry += '                </Style>\n'
        kmlEntry += '                <Polygon><tessellate>1</tessellate>'\
                    '<outerBoundaryIs><LinearRing><coordinates>\n'
        kmlEntry += coordinates + '\n'
        kmlEntry += '            </coordinates></LinearRing>'\
                    '</outerBoundaryIs></Polygon></Placemark>\n'

        return kmlEntry

    def get_border_wkt(self, *args, **kwargs):
        '''Creates string with WKT representation of the border polygon

        Returns
        --------
        WKTPolygon : string
            string with WKT representation of the border polygon

        '''
        lonList, latList = self.get_border(*args, **kwargs)

        # apply > 180 deg correction to longitudes
        for ilon, lon in enumerate(lonList):
            lonList[ilon] = copysign(
                acos(cos(lon * pi / 180.)) / pi * 180, sin(lon * pi / 180.))

        polyCont = ','.join(
            str(lon) + ' ' + str(lat) for lon, lat in zip(lonList, latList))
        # outer quotes have to be double and inner - single!
        # wktPolygon = "PolygonFromText('POLYGON((%s))')" % polyCont
        wkt = 'POLYGON((%s))' % polyCont
        return wkt

    def get_border_geometry(self, *args, **kwargs):
        ''' Get OGR Geometry of the border Polygon

        Returns
        -------
        OGR Geometry, type Polygon

        '''

        return ogr.CreateGeometryFromWkt(self.get_border_wkt(*args, **kwargs))

    def overlaps(self, anotherDomain):
        ''' Checks if this Domain overlaps another Domain

        Returns
        -------
        overlaps : bool
            True if Domains overlaps, False otherwise

        '''

        return self.get_border_geometry().Intersects(
            anotherDomain.get_border_geometry())

    def contains(self, anotherDomain):
        ''' Checks if this Domain fully covers another Domain

        Returns
        -------
        contains : bool
            True if this Domain fully covers another Domain, False otherwise

        '''

        return self.get_border_geometry().Contains(
            anotherDomain.get_border_geometry())

    def get_border_postgis(self):
        ''' Get PostGIS formatted string of the border Polygon

        Returns
        -------
        str : 'PolygonFromText(PolygonWKT)'

        '''

        return "PolygonFromText('%s')" % self.get_border_wkt()

    def get_corners(self):
        '''Get coordinates of corners of the Domain

        Returns
        --------
        lonVec, latVec : lists
            vectors with lon/lat values for each corner

        '''

        colVector = [
            0, 0, self.vrt.dataset.RasterXSize, self.vrt.dataset.RasterXSize
        ]
        rowVector = [
            0, self.vrt.dataset.RasterYSize, 0, self.vrt.dataset.RasterYSize
        ]
        return self.transform_points(colVector, rowVector)

    def get_min_max_lat_lon(self):
        '''Get minimum and maximum lat and long values in the geolocation grid

        Returns
        --------
        minLat, maxLat, minLon, maxLon : float
            min/max lon/lat values for the Domain

        '''
        allLongitudes, allLatitudes = self.get_geolocation_grids()
        maxLat = -90
        minLat = 90
        for latitudes in allLatitudes:
            for lat in latitudes:
                if lat > maxLat:
                    maxLat = lat
                if lat < minLat:
                    minLat = lat

        maxLon = -180
        minLon = 180
        for longitudes in allLongitudes:
            for lon in longitudes:
                if lon > maxLon:
                    maxLon = lon
                if lon < minLon:
                    minLon = lon

        return minLat, maxLat, minLon, maxLon

    def get_pixelsize_meters(self):
        '''Returns the pixelsize (deltaX, deltaY) of the domain

        For projected domains, the exact result which is constant
        over the domain is returned.
        For geographic (lon-lat) projections, or domains with no geotransform,
        the haversine formula is used to calculate the pixel size
        in the center of the domain.
        Returns
        --------
        deltaX, deltaY : float
        pixel size in X and Y directions given in meters
        '''

        srs = osr.SpatialReference(self.vrt.dataset.GetProjection())
        if srs.IsProjected:
            if srs.GetAttrValue('unit') == 'metre':
                geoTransform = self.vrt.dataset.GetGeoTransform()
                deltaX = abs(geoTransform[1])
                deltaY = abs(geoTransform[5])
                return deltaX, deltaY

        # Estimate pixel size in center of domain using haversine formula
        centerCol = round(self.vrt.dataset.RasterXSize / 2)
        centerRow = round(self.vrt.dataset.RasterYSize / 2)
        lon00, lat00 = self.transform_points([centerCol], [centerRow])
        lon01, lat01 = self.transform_points([centerCol], [centerRow + 1])
        lon10, lat10 = self.transform_points([centerCol + 1], [centerRow])

        deltaX = haversine(lon00, lat00, lon01, lat01)
        deltaY = haversine(lon00, lat00, lon10, lat10)
        return deltaX[0], deltaY[0]

    def _get_geotransform(self, extentDic):
        '''
        the new coordinates and raster size are calculated based on
        the given extentDic.

        Parameters
        -----------
        extentDic : dictionary
            includes 'te' key and 'ts' or 'tr' key

        Raises
        -------
        OptionError : occurs when maxX - minX < 0 or maxY - minY < 0
        OptionError : occurs when the given resolution is larger than
                     width or height.

        Returns
        --------
        coordinate : list with 6 float
            GeoTransform

        rasterSize : list with two int
            rasterXSize and rasterYSize

        '''
        # recalculate GeoTransform based on extent option
        minX = extentDic['te'][0]
        minY = extentDic['te'][1]
        maxX = extentDic['te'][2]
        maxY = extentDic['te'][3]
        cornerX = minX
        cornerY = maxY
        width = maxX - minX
        height = maxY - minY
        if width <= 0 or height <= 0:
            raise OptionError('The extent is illegal. '
                              '"-te xMin yMin xMax yMax" ')

        if 'tr' in extentDic.keys():
            resolutionX = extentDic['tr'][0]
            resolutionY = -(extentDic['tr'][1])
            if (width < resolutionX or height < resolutionY):
                raise OptionError('"-tr" is too large. '
                                  'width is %s, height is %s ' %
                                  (str(width), str(height)))
            rasterXSize = width / resolutionX
            rasterYSize = abs(height / resolutionY)
        else:
            rasterXSize = extentDic['ts'][0]
            rasterYSize = extentDic['ts'][1]
            resolutionX = width / rasterXSize
            resolutionY = -abs(height / rasterYSize)

        # create a list for GeoTransform
        coordinates = [cornerX, resolutionX, 0.0, cornerY, 0.0, resolutionY]

        return coordinates, int(rasterXSize), int(rasterYSize)

    def transform_points(self, colVector, rowVector, DstToSrc=0, dstSRS=NSR()):
        '''Transform given lists of X,Y coordinates into lon/lat or inverse

        Parameters
        -----------
        colVector : lists
            X and Y coordinates in pixel/line or lon/lat  coordinate system
        DstToSrc : 0 or 1
            0 - forward transform (pix/line => lon/lat)
            1 - inverse transformation
        dstSRS : NSR
            destination spatial reference
            
        Returns
        --------
        X, Y : lists
            X and Y coordinates in lon/lat or pixel/line coordinate system

        '''
        return self.vrt.transform_points(colVector,
                                         rowVector,
                                         DstToSrc,
                                         dstSRS=dstSRS)

    def azimuth_y(self, reductionFactor=1):
        '''Calculate the angle of each pixel position vector with respect to
        the Y-axis (azimuth).

        In general, azimuth is the angle from a reference vector (e.g., the
        direction to North) to the chosen position vector. The azimuth
        increases clockwise from direction to North.
        http://en.wikipedia.org/wiki/Azimuth

        Parameters
        -----------
        reductionFactor : integer
            factor by which the size of the output array is reduced

        Returns
        -------
        azimuth : numpy array
            Values of azimuth in degrees in range 0 - 360

        '''

        lon, lat = self.get_geolocation_grids(reductionFactor)
        a = initial_bearing(lon[1:, :], lat[1:, :], lon[:-1:, :], lat[:-1:, :])
        # Repeat last row once to match size of lon-lat grids
        a = np.vstack((a, a[-1, :]))
        return a

    def shape(self):
        '''Return Numpy-like shape of Domain object (ySize, xSize)

        Returns
        --------
        shape : tuple of two INT
            Numpy-like shape of Domain object (ySize, xSize)

        '''
        return self.vrt.dataset.RasterYSize, self.vrt.dataset.RasterXSize

    def write_map(self,
                  outputFileName,
                  lonVec=None,
                  latVec=None,
                  lonBorder=10.,
                  latBorder=10.,
                  figureSize=(6, 6),
                  dpi=50,
                  projection='cyl',
                  resolution='c',
                  continetsColor='coral',
                  meridians=10,
                  parallels=10,
                  pColor='r',
                  pLine='k',
                  pAlpha=0.5,
                  padding=0.,
                  merLabels=[False, False, False, False],
                  parLabels=[False, False, False, False],
                  pltshow=False,
                  labels=None):
        ''' Create an image with a map of the domain

        Uses Basemap to create a World Map
        Adds a semitransparent patch with outline of the Domain
        Writes to an image file

        Parameters
        -----------
        outputFileName : string
            name of the output file name
        lonVec : [floats] or [[floats]]
            longitudes of patches to display
        latVec : [floats] or [[floats]]
            latitudes of patches to display
        lonBorder : float
            10, horisontal border around patch (degrees of longitude)
        latBorder : float
            10, vertical border around patch (degrees of latitude)
        figureSize : tuple of two integers
            (6, 6), size of the generated figure in inches
        dpi: int
            50, resolution of the output figure (size 6,6 and dpi 50
            produces 300 x 300 figure)
        projection : string, one of Basemap projections
            'cyl', projection of the map
        resolution : string, resolution of the map
            'c', crude
            'l', low
            'i', intermediate
            'h', high
            'f', full
        continetsColor : string or any matplotlib color representation
            'coral', color of continets
        meridians : int
            10, number of meridians to draw
        parallels : int
            10, number of parallels to draw
        pColor : string or any matplotlib color representation
            'r', color of the Domain patch
        pLine : string or any matplotlib color representation
            'k', color of the Domain outline
        pAlpha : float 0 - 1
            0.5, transparency of Domain patch
        padding : float
            0., width of white padding around the map
        merLabels : list of 4 booleans
            where to put meridian labels, see also Basemap.drawmeridians()
        parLables : list of 4 booleans
            where to put parallel labels, see also Basemap.drawparallels()
        labels : list of str
            labels to print on top of patches
        '''
        # if lat/lon vectors are not given as input
        if lonVec is None or latVec is None or len(lonVec) != len(latVec):
            lonVec, latVec = self.get_border()

        # convert vectors to numpy arrays
        lonVec = np.array(lonVec)
        latVec = np.array(latVec)

        # estimate mean/min/max values of lat/lon of the shown area
        # (real lat min max +/- latBorder) and (real lon min max +/- lonBorder)
        minLon = max(-180, lonVec.min() - lonBorder)
        maxLon = min(180, lonVec.max() + lonBorder)
        minLat = max(-90, latVec.min() - latBorder)
        maxLat = min(90, latVec.max() + latBorder)
        meanLon = lonVec.mean()
        meanLat = latVec.mean()

        # generate template map (can be also tmerc)
        plt.figure(num=1, figsize=figureSize, dpi=dpi)
        bmap = Basemap(projection=projection,
                       lat_0=meanLat,
                       lon_0=meanLon,
                       llcrnrlon=minLon,
                       llcrnrlat=minLat,
                       urcrnrlon=maxLon,
                       urcrnrlat=maxLat,
                       resolution=resolution)

        # add content: coastline, continents, meridians, parallels
        bmap.drawcoastlines()
        bmap.fillcontinents(color=continetsColor)
        bmap.drawmeridians(np.linspace(minLon, maxLon, meridians),
                           labels=merLabels,
                           fmt='%2.1f')
        bmap.drawparallels(np.linspace(minLat, maxLat, parallels),
                           labels=parLabels,
                           fmt='%2.1f')

        # convert input lat/lon vectors to arrays of vectors with one row
        # if only one vector was given
        if len(lonVec.shape) == 1:
            lonVec = [lonVec]
            latVec = [latVec]

        for i in range(len(lonVec)):
            # convert lat/lons to map units
            mapX, mapY = bmap(list(lonVec[i].flat), list(latVec[i].flat))

            # from x/y vectors create a Patch to be added to map
            boundary = Polygon(zip(mapX, mapY),
                               alpha=pAlpha,
                               ec=pLine,
                               fc=pColor)

            # add patch to the map
            plt.gca().add_patch(boundary)
            plt.gca().set_aspect('auto')

            if labels is not None and labels[i] is not None:
                plt.text(np.mean(mapX),
                         np.mean(mapY),
                         labels[i],
                         va='center',
                         ha='right',
                         alpha=0.5,
                         fontsize=10)

        # save figure and close
        plt.savefig(outputFileName,
                    bbox_inches='tight',
                    dpi=dpi,
                    pad_inches=padding)
        if pltshow:
            plt.show()
        else:
            plt.close('all')

    def reproject_GCPs(self, srsString=''):
        '''Reproject all GCPs to a new spatial reference system

        Necessary before warping an image if the given GCPs
        are in a coordinate system which has a singularity
        in (or near) the destination area (e.g. poles for lonlat GCPs)

        Parameters
        ----------
        srsString : string
            SRS given as Proj4 string. If empty '+proj=stere' is used

        Modifies
        --------
            Reprojects all GCPs to new SRS and updates GCPProjection
        '''
        if srsString == '':
            lon, lat = self.get_border()
            srsString = '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=%f +lon_0=%f +no_defs' % (
                np.nanmedian(lat), np.nanmedian(lon))

        self.vrt.reproject_GCPs(srsString)
Example #60
0
    def __init__(self,
                 fileName,
                 gdalDataset,
                 gdalMetadata,
                 latlonGrid=None,
                 mask='',
                 **kwargs):
        ''' Create VRT

        Parameters
        -----------
        fileName : string
        gdalDataset : gdal dataset
        gdalMetadata : gdal metadata
        latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid
        '''
        # test if input files is ASCAT
        iDir, iFile = os.path.split(fileName)
        iFileName, iFileExt = os.path.splitext(iFile)
        try:
            assert iFileName[0:6] == 'ascat_' and iFileExt == '.nc'
        except:
            raise WrongMapperError

        # Create geolocation
        subDataset = gdal.Open('NETCDF:"' + fileName + '":lat')
        self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize,
                             srcRasterYSize=subDataset.RasterYSize)

        GeolocMetaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lon'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': -360
            },
            'dst': {}
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":lat'),
                'SourceBand': 1,
                'ScaleRatio': 0.00001,
                'ScaleOffset': 0
            },
            'dst': {}
        }]

        self.GeolocVRT._create_bands(GeolocMetaDict)

        GeolocObject = GeolocationArray(
            xVRT=self.GeolocVRT,
            yVRT=self.GeolocVRT,
            # x = lon, y = lat
            xBand=1,
            yBand=2,
            lineOffset=0,
            pixelOffset=0,
            lineStep=1,
            pixelStep=1)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self,
                     srcRasterXSize=subDataset.RasterXSize,
                     srcRasterYSize=subDataset.RasterYSize,
                     gdalDataset=subDataset,
                     geolocationArray=GeolocObject,
                     srcProjection=GeolocObject.d['SRS'])

        # Scale and NODATA should ideally be taken directly from raw file
        metaDict = [{
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_speed'),
                'ScaleRatio': 0.01,
                'NODATA': -32767
            },
            'dst': {
                'name': 'wind_speed',
                'wkv': 'wind_speed'
            }
        }, {
            'src': {
                'SourceFilename': ('NETCDF:"' + fileName + '":wind_dir'),
                'ScaleRatio': 0.1,
                'NODATA': -32767
            },
            'dst': {
                'name': 'wind_direction',
                'wkv': 'wind_direction'
            }
        }]

        self._create_bands(metaDict)

        # This should not be necessary
        # - should be provided by GeolocationArray!
        self.dataset.SetProjection(GeolocObject.d['SRS'])

        # Add time
        startTime = datetime.datetime(int(iFileName[6:10]),
                                      int(iFileName[10:12]),
                                      int(iFileName[12:14]),
                                      int(iFileName[15:17]),
                                      int(iFileName[17:19]),
                                      int(iFileName[19:21]))
        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start',
                                     startTime.isoformat())
        self.dataset.SetMetadataItem('time_coverage_end',
                                     startTime.isoformat())

        # set SADCAT specific metadata
        self.dataset.SetMetadataItem('sensor', 'ASCAT')
        self.dataset.SetMetadataItem('satellite', 'Metop-A')
        warnings.warn("Setting satellite to Metop-A - update mapper if it is" \
                " e.g. Metop-B")
        self.dataset.SetMetadataItem('mapper', 'ascat_nasa')