コード例 #1
0
ファイル: test_node.py プロジェクト: zhangjiahuan17/nansat
 def test_add_node(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     self.assertIn(firstLevel, root.children)
コード例 #2
0
ファイル: test_node.py プロジェクト: nansencenter/nansat
 def test_delete_attribute(self):
     tag = 'Root'
     value = '   Value   '
     anAttr = 'elValue'
     node = Node(tag, value=value, anAttr=anAttr)
     self.assertIn('anAttr', node.attributes)
     node.delAttribute('anAttr')
     self.assertNotIn('anAttr', node.attributes)
コード例 #3
0
 def test_delete_attribute(self):
     tag = 'Root'
     value = '   Value   '
     anAttr = 'elValue'
     node = Node(tag, value=value, anAttr=anAttr)
     self.assertIn('anAttr', node.attributes)
     node.delAttribute('anAttr')
     self.assertNotIn('anAttr', node.attributes)
コード例 #4
0
 def test_insert(self):
     contents = ('<Element attr="attrValue"><Subnode>testValue</Subnode>'
                 '</Element>')
     root = Node('root')
     root2 = root.insert(contents)
     element = root2.node('Element')
     rawElement = Node.create(contents)
     self.assertEqual(element.xml(), rawElement.xml())
コード例 #5
0
ファイル: test_node.py プロジェクト: nansencenter/nansat
 def test_insert(self):
     contents = ('<Element attr="attrValue"><Subnode>testValue</Subnode>'
                 '</Element>')
     root = Node('root')
     root2 = root.insert(contents)
     element = root2.node('Element')
     rawElement = Node.create(contents)
     self.assertEqual(element.xml(), rawElement.xml())
コード例 #6
0
ファイル: test_node.py プロジェクト: nansencenter/nansat
 def test_create(self):
     test_file_element = os.path.join(ntd.test_data_path,
                                      'some_xml_file.xml')
     fileElement = Node.create(test_file_element)
     with open(test_file_element, 'r') as myfile:
         contents = myfile.read().replace('\n', '')
     root = Node('root')
     root = root.insert(contents)
     rawElement = root.children[0]
     self.assertEqual(fileElement.xml(), rawElement.xml())
コード例 #7
0
 def test_create(self):
     test_file_element = os.path.join(ntd.test_data_path,
                                      'some_xml_file.xml')
     fileElement = Node.create(test_file_element)
     with open(test_file_element, 'r') as myfile:
         contents = myfile.read().replace('\n', '')
     root = Node('root')
     root = root.insert(contents)
     rawElement = root.children[0]
     self.assertEqual(fileElement.xml(), rawElement.xml())
コード例 #8
0
ファイル: test_node.py プロジェクト: nansencenter/nansat
 def test_search_node(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     firstLevel2 = Node(firstLevelTag)
     root += firstLevel2
     firstLevel2ndTag = 'FirstLevel2ndTag'
     firstLevel3 = Node(firstLevel2ndTag)
     root += firstLevel3
     self.assertEqual(root.node(firstLevelTag,0), firstLevel)
     self.assertEqual(root.node(firstLevelTag,1), firstLevel2)
コード例 #9
0
 def test_add_nodes(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     firstLevel2 = Node(firstLevelTag)
     root += firstLevel2
     firstLevel2ndTag = 'FirstLevel2ndTag'
     firstLevel3 = Node(firstLevel2ndTag)
     root = root + firstLevel3
     self.assertIn(firstLevel, root.children)
     self.assertIn(firstLevel2, root.children)
     self.assertIn(firstLevel3, root.children)
コード例 #10
0
ファイル: test_node.py プロジェクト: nansencenter/nansat
 def test_replace_node(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     firstLevel2 = Node(firstLevelTag)
     root += firstLevel2
     firstLevel2ndTag = 'FirstLevel2ndTag'
     firstLevel3 = Node(firstLevel2ndTag)
     root.replaceNode(firstLevelTag, 1, firstLevel3)
     self.assertIn(firstLevel, root.children)
     self.assertNotIn(firstLevel2, root.children)
     self.assertIn(firstLevel3, root.children)
     self.assertEqual(len(root.children), 2)
コード例 #11
0
    def read_geolocation_lut(self, annotXML):
        ''' Read lon, lat, pixel, line, ia, ea from XML string <annotXML>'''
        xml = Node.create(annotXML)
        geolocationGridPointList = xml.node('geolocationGrid').node('geolocationGridPointList').children
        X = []
        Y = []
        lon = []
        lat = []
        inc = []
        ele = []
        for gridPoint in geolocationGridPointList:
            X.append(gridPoint['pixel'])
            Y.append(gridPoint['line'])
            lon.append(gridPoint['longitude'])
            lat.append(gridPoint['latitude'])
            inc.append(gridPoint['incidenceAngle'])
            ele.append(gridPoint['elevationAngle'])

        X = np.array(map(float, X))
        Y = np.array(map(float, Y))
        lon = np.array(map(float, lon))
        lat = np.array(map(float, lat))
        inc = np.array(map(float, inc))
        ele = np.array(map(float, ele))

        numberOfSamples = int(xml.node('imageAnnotation').node('imageInformation').node('numberOfSamples').value)
        numberOfLines = int(xml.node('imageAnnotation').node('imageInformation').node('numberOfLines').value)

        return X, Y, lon, lat, inc, ele, numberOfSamples, numberOfLines
コード例 #12
0
    def get_LUT_VRTs(self, XML, vectorListName, LUT_list):
        n = Node.create(XML)
        vecList = n.node(vectorListName)
        X = []
        Y = []
        LUTs = {}
        for LUT in LUT_list:
            LUTs[LUT] = []
        for vec in vecList.children:
            X.append(map(int, vec['pixel'].split()))
            Y.append(int(vec['line']))
            for LUT in LUT_list:
                LUTs[LUT].append(map(float, vec[LUT].split()))

        X = np.array(X)
        for LUT in LUT_list:
            LUTs[LUT] = np.array(LUTs[LUT])
        Ym = np.array([Y, ]*np.shape(X)[1]).transpose()

        lon, lat = self.transform_points(X.flatten(), Ym.flatten())
        longitude = lon.reshape(X.shape)
        latitude = lat.reshape(X.shape)

        LUT_VRTs = {}
        for LUT in LUT_list:
            LUT_VRTs[LUT] = VRT(array=LUTs[LUT], lat=latitude, lon=longitude)

        return LUT_VRTs, longitude, latitude
コード例 #13
0
    def read_geolocation_lut(self, annotXML):
        ''' Read lon, lat, pixel, line, ia, ea from XML string <annotXML>'''
        xml = Node.create(annotXML)
        geolocationGridPointList = xml.node('geolocationGrid').node('geolocationGridPointList').children
        X = []
        Y = []
        lon = []
        lat = []
        inc = []
        ele = []
        for gridPoint in geolocationGridPointList:
            X.append(gridPoint['pixel'])
            Y.append(gridPoint['line'])
            lon.append(gridPoint['longitude'])
            lat.append(gridPoint['latitude'])
            inc.append(gridPoint['incidenceAngle'])
            ele.append(gridPoint['elevationAngle'])

        X = np.array(map(float, X))
        Y = np.array(map(float, Y))
        lon = np.array(map(float, lon))
        lat = np.array(map(float, lat))
        inc = np.array(map(float, inc))
        ele = np.array(map(float, ele))

        numberOfSamples = int(xml.node('imageAnnotation').node('imageInformation').node('numberOfSamples').value)
        numberOfLines = int(xml.node('imageAnnotation').node('imageInformation').node('numberOfLines').value)

        return X, Y, lon, lat, inc, ele, numberOfSamples, numberOfLines
コード例 #14
0
ファイル: test_node.py プロジェクト: whigg/nansat
 def test_getAttributeList(self):
     tag = 'Root'
     value = '   Value   '
     anAttr = 'elValue'
     secondAttr = 'Some value'
     finalAttribute = 'A last value'
     node = Node(tag, value=value, anAttr=anAttr, secondAttr=secondAttr,
                 finalAttribute=finalAttribute)
     nameList, valList = node.getAttributeList()
     self.assertIsInstance(nameList, list)
     self.assertIsInstance(valList, list)
     index = valList.index(anAttr)
     self.assertEqual(nameList[index], 'anAttr')
     index = valList.index(secondAttr)
     self.assertEqual(nameList[index], 'secondAttr')
     index = valList.index(finalAttribute)
     self.assertEqual(nameList[index], 'finalAttribute')
コード例 #15
0
 def test_creation(self):
     tag = 'Root'
     value = '   Value   '
     anAttr = 'elValue'
     node = Node(tag, value=value, anAttr=anAttr)
     self.assertEqual(node.tag, tag)
     self.assertDictEqual(node.attributes, {'anAttr': anAttr})
     self.assertEqual(node.value, value.strip())
コード例 #16
0
ファイル: test_node.py プロジェクト: nansencenter/nansat
 def test_getAttributeList(self):
     tag = 'Root'
     value = '   Value   '
     anAttr = 'elValue'
     secondAttr = 'Some value'
     finalAttribute = 'A last value'
     node = Node(tag, value=value, anAttr=anAttr, secondAttr=secondAttr,
                 finalAttribute=finalAttribute)
     nameList, valList = node.getAttributeList()
     self.assertIsInstance(nameList, list)
     self.assertIsInstance(valList, list)
     index = valList.index(anAttr)
     self.assertEqual(nameList[index], 'anAttr')
     index = valList.index(secondAttr)
     self.assertEqual(nameList[index], 'secondAttr')
     index = valList.index(finalAttribute)
     self.assertEqual(nameList[index], 'finalAttribute')
コード例 #17
0
ファイル: test_node.py プロジェクト: nansencenter/nansat
 def test_xml(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     firstLevel2 = Node(firstLevelTag)
     root += firstLevel2
     firstLevel2ndTag = 'FirstLevel2ndTag'
     firstLevel3 = Node(firstLevel2ndTag)
     root += firstLevel3
     self.assertEqual(root.xml(),
                      ('<Root>\n'
                       '  <FirstLevel/>\n'
                       '  <FirstLevel/>\n'
                       '  <FirstLevel2ndTag/>\n'
                       '</Root>\n'),)
コード例 #18
0
ファイル: mapper_landsat.py プロジェクト: WYC19910220/nansat
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create LANDSAT VRT '''
        # try to open .tar or .tar.gz or .tgz file with tar
        try:
            tarFile = tarfile.open(fileName)
        except:
            raise WrongMapperError

        tarNames = tarFile.getnames()
        #print tarNames
        metaDict = []
        for tarName in tarNames:
            if ((tarName[0] == 'L' or tarName[0] == 'M') and
               (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')):
                #print tarName
                bandNo = tarName[-6:-4]
                metaDict.append({
                    'src': {'SourceFilename': '/vsitar/%s/%s' % (fileName,
                                                                 tarName),
                            'SourceBand':  1},
                    'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                            'suffix': bandNo}})

        if not metaDict:
            raise WrongMapperError

        #print metaDict
        sizeDiffBands = []
        for iFile in range(len(metaDict)):
            tmpName = metaDict[iFile]['src']['SourceFilename']
            gdalDatasetTmp = gdal.Open(tmpName)
            if iFile == 0:
                gdalDatasetTmp0 = gdalDatasetTmp
                xSize = gdalDatasetTmp.RasterXSize
                ySize = gdalDatasetTmp.RasterYSize
            elif (xSize != gdalDatasetTmp.RasterXSize or
                    ySize != gdalDatasetTmp.RasterYSize):
                sizeDiffBands.append(iFile)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDatasetTmp0)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # 8th band of LANDSAT8 is a double size band.
        # Reduce the size to same as the 1st band.
        if len(sizeDiffBands) != 0:
            vrtXML = self.read_xml()
            node0 = Node.create(vrtXML)
            for iBand in sizeDiffBands:
                iBandNode = node0.nodeList('VRTRasterBand')[iBand]
                iNodeDstRect = iBandNode.node('DstRect')
                iNodeDstRect.replaceAttribute('xSize', str(xSize))
                iNodeDstRect.replaceAttribute('ySize', str(ySize))

            self.write_xml(node0.rawxml())
コード例 #19
0
    def read_annotation(self, annotation_files):
        """ Read lon, lat, etc from annotation XML

        Parameters:
        ----------_
        annotation_files : list
            strings with names of annotation files

        Returns:
        --------
        data : dict
            geolocation data from the XML as 2D np.arrays. Keys:
                pixel, line, longitude, latitude, height, incidenceAngle, elevationAngle: 2D arrays
                shape : tuple (shape of geolocation data arrays)
                x_size, y_size : int
                pol : list

        """
        variable_names = [
            'pixel', 'line', 'longitude', 'latitude', 'height',
            'incidenceAngle', 'elevationAngle'
        ]
        data = {var_name: [] for var_name in variable_names}

        xml = self.read_vsi(annotation_files[0])
        xml = Node.create(xml)
        geolocation_points = xml.node('geolocationGrid').node(
            'geolocationGridPointList').children
        # collect data from XML into dictionary with lists
        for point in geolocation_points:
            for var_name in variable_names:
                data[var_name].append(point[var_name])

        # convert lists to 1D arrays
        for var_name in variable_names:
            data[var_name] = np.fromiter(data[var_name], float)

        # get shape of geolocation matrix (number of occurence of minimal element)
        data['shape'] = (data['line'] == 0).sum(), (data['pixel'] == 0).sum()

        # convert 1D arrays to 2D
        for var_name in variable_names:
            data[var_name].shape = data['shape']

        # get raster dimentions
        image_info = xml.node('imageAnnotation').node('imageInformation')
        data['x_size'] = int(image_info.node('numberOfSamples').value)
        data['y_size'] = int(image_info.node('numberOfLines').value)

        # get list of polarizations
        data['pol'] = []
        for ff in annotation_files:
            p = os.path.basename(ff).split('-')[3]
            data['pol'].append(p.upper())

        return data
コード例 #20
0
ファイル: test_vrt.py プロジェクト: nansencenter/nansat
    def test_hardcopy_bands(self):
        ds = gdal.Open(self.test_file_gcps)
        vrt = VRT.copy_dataset(ds)
        vrt.hardcopy_bands()

        self.assertTrue(np.allclose(vrt.dataset.ReadAsArray(), ds.ReadAsArray()))
        band_nodes = Node.create(str(vrt.xml)).nodeList('VRTRasterBand')
        self.assertEqual(band_nodes[0].node('SourceFilename').value, vrt.band_vrts[1].filename)
        self.assertEqual(band_nodes[1].node('SourceFilename').value, vrt.band_vrts[2].filename)
        self.assertEqual(band_nodes[2].node('SourceFilename').value, vrt.band_vrts[3].filename)
コード例 #21
0
ファイル: test_vrt.py プロジェクト: xingyaozhang1/nansat
    def test_hardcopy_bands(self):
        ds = gdal.Open(self.test_file_gcps)
        vrt = VRT.copy_dataset(ds)
        vrt.hardcopy_bands()

        self.assertTrue(np.allclose(vrt.dataset.ReadAsArray(), ds.ReadAsArray()))
        band_nodes = Node.create(str(vrt.xml)).nodeList('VRTRasterBand')
        self.assertEqual(band_nodes[0].node('SourceFilename').value, vrt.band_vrts[1].filename)
        self.assertEqual(band_nodes[1].node('SourceFilename').value, vrt.band_vrts[2].filename)
        self.assertEqual(band_nodes[2].node('SourceFilename').value, vrt.band_vrts[3].filename)
コード例 #22
0
    def read_annotation(self, annotation_files):
        """ Read lon, lat, etc from annotation XML

        Parameters
        ----------
        annotation_files : list
            strings with names of annotation files

        Returns
        -------
        data : dict
            geolocation data from the XML as 2D np.arrays. Keys:
                pixel, line, longitude, latitude, height, incidenceAngle, elevationAngle: 2D arrays
                shape : tuple (shape of geolocation data arrays)
                x_size, y_size : int
                pol : list

        """
        variable_names = ['pixel', 'line', 'longitude', 'latitude', 'height', 'incidenceAngle', 'elevationAngle']
        data = {var_name:[] for var_name in variable_names}

        xml = self.read_vsi(annotation_files[0])
        xml = Node.create(xml)
        geolocation_points = xml.node('geolocationGrid').node('geolocationGridPointList').children
        # collect data from XML into dictionary with lists
        for point in geolocation_points:
            for var_name in variable_names:
                data[var_name].append(point[var_name])

        # convert lists to 1D arrays
        for var_name in variable_names:
            data[var_name] = np.fromiter(data[var_name], float)

        # get shape of geolocation matrix (number of occurence of minimal element)
        data['shape'] = (data['pixel']==0).sum(), (data['line']==0).sum()

        # convert 1D arrays to 2D
        for var_name in variable_names:
            data[var_name].shape = data['shape']

        # get raster dimentions
        image_info = xml.node('imageAnnotation').node('imageInformation')
        data['x_size'] = int(image_info.node('numberOfSamples').value)
        data['y_size'] = int(image_info.node('numberOfLines').value)

        # get list of polarizations
        data['pol'] = []
        for ff in annotation_files:
            p = os.path.basename(ff).split('-')[3]
            data['pol'].append(p.upper())

        return data
コード例 #23
0
    def read_calibration(self, xml, vectorListName, variable_names, pol):
        """ Read calibration data from calibration or noise XML files
        Parameters
        ----------
        xml : str
            String with XML from calibration or noise files
        vectorListName : str
            tag of the element that contains lists with LUT values
        variable_names : list of str
            names of LUT variable to read
        pol : str
            HH, HV, etc

        Returns
        -------
        data : dict
            Calibration or noise data. Keys:
            The same as variable_names + 'pixel', 'line'
        """
        data = {}
        n = Node.create(xml)
        vecList = n.node(vectorListName)
        data['pixel'] = []
        data['line'] = []
        for var_name in variable_names:
            data[var_name + pol] = []
        xLengths = []
        for vec in vecList.children:
            xVec = list(map(int, vec['pixel'].split()))
            xLengths.append(len(xVec))
            data['pixel'].append(xVec)
            data['line'].append(int(vec['line']))
            for var_name in variable_names:
                data[var_name + pol].append(
                    np.fromiter(vec[var_name].split(), float))

        # truncate data['pixel'] and var_name to minimum length for all rows
        minLength = np.min(xLengths)
        data['pixel'] = [x[:minLength] for x in data['pixel']]
        for var_name in variable_names:
            data[var_name +
                 pol] = [d[:minLength] for d in data[var_name + pol]]

        data['pixel'] = np.array(data['pixel'])
        for var_name in variable_names:
            data[var_name + pol] = np.array(data[var_name + pol])
        data['line'] = np.array([
            data['line'],
        ] * np.shape(data['pixel'])[1]).transpose()

        return data
コード例 #24
0
    def read_calibration(self, xml, vectorListName, variable_names, pol):
        """ Read calibration data from calibration or noise XML files
        Parameters
        ----------
        xml : str
            String with XML from calibration or noise files
        vectorListName : str
            tag of the element that contains lists with LUT values
        variable_names : list of str
            names of LUT variable to read
        pol : str
            HH, HV, etc

        Returns
        -------
        data : dict
            Calibration or noise data. Keys:
            The same as variable_names + 'pixel', 'line'
        """
        data = {}
        n = Node.create(xml)
        vecList = n.node(vectorListName)
        data['pixel'] = []
        data['line'] = []
        for var_name in variable_names:
            data[var_name+pol] = []
        xLengths = []
        for vec in vecList.children:
            xVec = list(map(int, vec['pixel'].split()))
            xLengths.append(len(xVec))
            data['pixel'].append(xVec)
            data['line'].append(int(vec['line']))
            for var_name in variable_names:
                data[var_name+pol].append(np.fromiter(vec[var_name].split(), float))

        # truncate data['pixel'] and var_name to minimum length for all rows
        minLength = np.min(xLengths)
        data['pixel'] = [x[:minLength] for x in data['pixel']]
        for var_name in variable_names:
            data[var_name+pol] = [d[:minLength] for d in data[var_name+pol]]

        data['pixel'] = np.array(data['pixel'])
        for var_name in variable_names:
            data[var_name+pol] = np.array(data[var_name+pol])
        data['line'] = np.array([data['line'], ]*np.shape(data['pixel'])[1]).transpose()

        return data
コード例 #25
0
 def test_replace_node(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     firstLevel2 = Node(firstLevelTag)
     root += firstLevel2
     firstLevel2ndTag = 'FirstLevel2ndTag'
     firstLevel3 = Node(firstLevel2ndTag)
     root.replaceNode(firstLevelTag, 1, firstLevel3)
     self.assertIn(firstLevel, root.children)
     self.assertNotIn(firstLevel2, root.children)
     self.assertIn(firstLevel3, root.children)
     self.assertEqual(len(root.children), 2)
コード例 #26
0
 def test_search_node(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     firstLevel2 = Node(firstLevelTag)
     root += firstLevel2
     firstLevel2ndTag = 'FirstLevel2ndTag'
     firstLevel3 = Node(firstLevel2ndTag)
     root += firstLevel3
     self.assertEqual(root.node(firstLevelTag, 0), firstLevel)
     self.assertEqual(root.node(firstLevelTag, 1), firstLevel2)
コード例 #27
0
    def read_manifest_data(self, input_file):
        """ Read information (time_coverage_start, etc) manifest XML

        Parameters
        ----------
        input_file : str
            name of manifest file

        Returns
        -------
        data : dict
            manifest data. Keys:
                time_coverage_start
                time_coverage_end
                platform_familyName
                platform_number

        """

        data = {}
        xml = self.read_vsi(input_file)
        # set time as acquisition start time
        n = Node.create(xml)
        meta = n.node('metadataSection')
        for nn in meta.children:
            if str(nn.getAttribute('ID')) == 'acquisitionPeriod':
                # get valid time
                data['time_coverage_start'] = parse((nn.node('metadataWrap').
                                                     node('xmlData').
                                                     node('safe:acquisitionPeriod')['safe:startTime']
                                                     )).isoformat()
                data['time_coverage_end'] = parse((nn.node('metadataWrap').
                                                   node('xmlData').
                                                   node('safe:acquisitionPeriod')['safe:stopTime']
                                                   )).isoformat()
            if str(nn.getAttribute('ID')) == 'platform':
                data['platform_family_name'] = str(nn.node('metadataWrap').
                                                     node('xmlData').
                                                     node('safe:platform')['safe:familyName'])
                data['platform_number'] = str(nn.node('metadataWrap').
                                                     node('xmlData').
                                                     node('safe:platform')['safe:number'])
        return data
コード例 #28
0
    def read_manifest_data(self, input_file):
        """ Read information (time_coverage_start, etc) manifest XML

        Parameters:
        ----------_
        input_file : str
            name of manifest file

        Returns:
        --------
        data : dict
            manifest data. Keys:
                time_coverage_start
                time_coverage_end
                platform_familyName
                platform_number

        """

        data = {}
        xml = self.read_vsi(input_file)
        # set time as acquisition start time
        n = Node.create(xml)
        meta = n.node('metadataSection')
        for nn in meta.children:
            if str(nn.getAttribute('ID')) == 'acquisitionPeriod':
                # get valid time
                data['time_coverage_start'] = parse(
                    (nn.node('metadataWrap').node('xmlData').node(
                        'safe:acquisitionPeriod')['safe:startTime']
                     )).isoformat()
                data['time_coverage_end'] = parse(
                    (nn.node('metadataWrap').node('xmlData').node(
                        'safe:acquisitionPeriod')['safe:stopTime']
                     )).isoformat()
            if str(nn.getAttribute('ID')) == 'platform':
                data['platform_family_name'] = str(
                    nn.node('metadataWrap').node('xmlData').node(
                        'safe:platform')['safe:familyName'])
                data['platform_number'] = str(
                    nn.node('metadataWrap').node('xmlData').node(
                        'safe:platform')['safe:number'])
        return data
コード例 #29
0
    def get_LUT_VRTs(self, XML, vectorListName, LUT_list):
        n = Node.create(XML)
        vecList = n.node(vectorListName)
        X = []
        Y = []
        LUTs = {}
        for LUT in LUT_list:
            LUTs[LUT] = []
        xLengths = []
        for vec in vecList.children:
            xVec = map(int, vec['pixel'].split())
            xLengths.append(len(xVec))
            X.append(xVec)
            Y.append(int(vec['line']))
            for LUT in LUT_list:
                LUTs[LUT].append(map(float, vec[LUT].split()))

        # truncate X and LUT to minimum length for all rows
        minLength = np.min(xLengths)
        X = [x[:minLength] for x in X]
        for LUT in LUT_list:
            LUTs[LUT] = [lut[:minLength] for lut in LUTs[LUT]]

        X = np.array(X)
        for LUT in LUT_list:
            LUTs[LUT] = np.array(LUTs[LUT])
        Ym = np.array([
            Y,
        ] * np.shape(X)[1]).transpose()

        lon, lat = self.transform_points(X.flatten(), Ym.flatten())
        longitude = lon.reshape(X.shape)
        latitude = lat.reshape(X.shape)

        LUT_VRTs = {}
        for LUT in LUT_list:
            LUT_VRTs[LUT] = VRT(array=LUTs[LUT], lat=latitude, lon=longitude)

        return LUT_VRTs, longitude, latitude
コード例 #30
0
ファイル: test_node.py プロジェクト: whigg/nansat
 def test_xml(self):
     rootTag = 'Root'
     root = Node(rootTag)
     firstLevelTag = 'FirstLevel'
     firstLevel = Node(firstLevelTag)
     root += firstLevel
     firstLevel2 = Node(firstLevelTag)
     root += firstLevel2
     firstLevel2ndTag = 'FirstLevel2ndTag'
     firstLevel3 = Node(firstLevel2ndTag)
     root += firstLevel3
     self.assertEqual(root.xml(),
                      ('<Root>\n'
                       '  <FirstLevel/>\n'
                       '  <FirstLevel/>\n'
                       '  <FirstLevel2ndTag/>\n'
                       '</Root>\n'),)
コード例 #31
0
    def get_LUT_VRTs(self, XML, vectorListName, LUT_list):
        n = Node.create(XML)
        vecList = n.node(vectorListName)
        X = []
        Y = []
        LUTs = {}
        for LUT in LUT_list:
            LUTs[LUT] = []
        xLengths = []
        for vec in vecList.children:
            xVec = map(int, vec['pixel'].split())
            xLengths.append(len(xVec))
            X.append(xVec)
            Y.append(int(vec['line']))
            for LUT in LUT_list:
                LUTs[LUT].append(map(float, vec[LUT].split()))

        # truncate X and LUT to minimum length for all rows
        minLength = np.min(xLengths)
        X = [x[:minLength] for x in X]
        for LUT in LUT_list:
            LUTs[LUT] = [lut[:minLength] for lut in LUTs[LUT]]

        X = np.array(X)
        for LUT in LUT_list:
            LUTs[LUT] = np.array(LUTs[LUT])
        Ym = np.array([Y, ]*np.shape(X)[1]).transpose()

        lon, lat = self.transform_points(X.flatten(), Ym.flatten())
        longitude = lon.reshape(X.shape)
        latitude = lat.reshape(X.shape)

        LUT_VRTs = {}
        for LUT in LUT_list:
            LUT_VRTs[LUT] = VRT(array=LUTs[LUT], lat=latitude, lon=longitude)

        return LUT_VRTs, longitude, latitude
コード例 #32
0
ファイル: mapper_radarsat2.py プロジェクト: scollis/nansat
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create Radarsat2 VRT '''
        fPathName, fExt = os.path.splitext(fileName)

        if zipfile.is_zipfile(fileName):
            # Open zip file using VSI
            fPath, fName = os.path.split(fPathName)
            fileName = '/vsizip/%s/%s' % (fileName, fName)
            if not 'RS' in fName[0:2]:
                raise WrongMapperError('Provided data is not Radarsat-2')
            gdalDataset = gdal.Open(fileName)
            gdalMetadata = gdalDataset.GetMetadata()

        #if it is not RADARSAT-2, return
        if (not gdalMetadata or
                not 'SATELLITE_IDENTIFIER' in gdalMetadata.keys()):
            raise WrongMapperError
        elif gdalMetadata['SATELLITE_IDENTIFIER'] != 'RADARSAT-2':
            raise WrongMapperError

        # read product.xml
        productXmlName = os.path.join(fileName, 'product.xml')
        productXml = self.read_xml(productXmlName)

        # Get additional metadata from product.xml
        rs2_0 = Node.create(productXml)
        rs2_1 = rs2_0.node('sourceAttributes')
        rs2_2 = rs2_1.node('radarParameters')
        if rs2_2['antennaPointing'].lower() == 'right':
            antennaPointing = 90
        else:
            antennaPointing = -90
        rs2_3 = rs2_1.node('orbitAndAttitude').node('orbitInformation')
        passDirection = rs2_3['passDirection']

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        #define dictionary of metadata and band specific parameters
        pol = []
        metaDict = []

        # Get the subdataset with calibrated sigma0 only
        for dataset in gdalDataset.GetSubDatasets():
            if dataset[1] == 'Sigma Nought calibrated':
                s0dataset = gdal.Open(dataset[0])
                s0datasetName = dataset[0][:]
                band = s0dataset.GetRasterBand(1)
                s0datasetPol = band.GetMetadata()['POLARIMETRIC_INTERP']
                for i in range(1, s0dataset.RasterCount+1):
                    iBand = s0dataset.GetRasterBand(i)
                    polString = iBand.GetMetadata()['POLARIMETRIC_INTERP']
                    suffix = polString
                    # The nansat data will be complex
                    # if the SAR data is of type 10
                    dtype = iBand.DataType
                    if dtype == 10:
                        # add intensity band
                        metaDict.append(
                            {'src': {'SourceFilename':
                                     ('RADARSAT_2_CALIB:SIGMA0:'
                                      + fileName + '/product.xml'),
                                     'SourceBand': i,
                                     'DataType': dtype},
                             'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                                     'PixelFunctionType': 'intensity',
                                     'SourceTransferType': gdal.GetDataTypeName(dtype),
                                     'suffix': suffix,
                                     'polarization': polString,
                                     'dataType': 6}})
                        # modify suffix for adding the compled band below
                        suffix = polString+'_complex'
                    pol.append(polString)
                    metaDict.append(
                        {'src': {'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:'
                                                    + fileName
                                                    + '/product.xml'),
                                 'SourceBand': i,
                                 'DataType': dtype},
                         'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                                 'suffix': suffix,
                                 'polarization': polString}})

            if dataset[1] == 'Beta Nought calibrated':
                b0dataset = gdal.Open(dataset[0])
                b0datasetName = dataset[0][:]
                for j in range(1, b0dataset.RasterCount+1):
                    jBand = b0dataset.GetRasterBand(j)
                    polString = jBand.GetMetadata()['POLARIMETRIC_INTERP']
                    if polString == s0datasetPol:
                        b0datasetBand = j

        ###############################
        # Add SAR look direction
        ###############################
        d = Domain(ds=gdalDataset)
        lon, lat = d.get_geolocation_grids(100)

        '''
        (GDAL?) Radarsat-2 data is stored with maximum latitude at first
        element of each column and minimum longitude at first element of each
        row (e.g. np.shape(lat)=(59,55) -> latitude maxima are at lat[0,:],
        and longitude minima are at lon[:,0])

        In addition, there is an interpolation error for direct estimate along
        azimuth. We therefore estimate the heading along range and add 90
        degrees to get the "satellite" heading.

        '''
        if str(passDirection).upper() == 'DESCENDING':
            sat_heading = initial_bearing(lon[:, :-1], lat[:, :-1],
                                          lon[:, 1:], lat[:, 1:]) + 90
        elif str(passDirection).upper() == 'ASCENDING':
            sat_heading = initial_bearing(lon[:, 1:], lat[:, 1:],
                                          lon[:, :-1], lat[:, :-1]) + 90
        else:
            print 'Can not decode pass direction: ' + str(passDirection)

        # Calculate SAR look direction
        look_direction = sat_heading + antennaPointing
        # Interpolate to regain lost row
        look_direction = np.mod(look_direction, 360)
        look_direction = scipy.ndimage.interpolation.zoom(
            look_direction, (1, 11./10.))
        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT(array=look_direction_u, lat=lat, lon=lon)
        look_v_VRT = VRT(array=look_direction_v, lat=lat, lon=lon)

        # Note: If incidence angle and look direction are stored in
        #       same VRT, access time is about twice as large
        lookVRT = VRT(lat=lat, lon=lon)
        lookVRT._create_band(
            [{'SourceFilename': look_u_VRT.fileName, 'SourceBand': 1},
             {'SourceFilename': look_v_VRT.fileName, 'SourceBand': 1}],
            {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize,
                                          gdalDataset.RasterYSize)
        # Store VRTs so that they are accessible later
        self.bandVRTs['look_u_VRT'] = look_u_VRT
        self.bandVRTs['look_v_VRT'] = look_v_VRT
        self.bandVRTs['lookVRT'] = lookVRT

        # Add band to full sized VRT
        lookFileName = self.bandVRTs['lookVRT'].fileName
        metaDict.append({'src': {'SourceFilename': lookFileName,
                                 'SourceBand': 1},
                         'dst': {'wkv': 'sensor_azimuth_angle',
                                 'name': 'look_direction'}})

        ###############################
        # Create bands
        ###############################
        self._create_bands(metaDict)

        ###################################################
        # Add derived band (incidence angle) calculated
        # using pixel function "BetaSigmaToIncidence":
        ###################################################
        src = [{'SourceFilename': b0datasetName,
                'SourceBand':  b0datasetBand,
                'DataType': dtype},
               {'SourceFilename': s0datasetName,
                'SourceBand': 1,
                'DataType': dtype}]
        dst = {'wkv': 'angle_of_incidence',
               'PixelFunctionType': 'BetaSigmaToIncidence',
               'SourceTransferType': gdal.GetDataTypeName(dtype),
               '_FillValue': -10000,   # NB: this is also hard-coded in
                                       #     pixelfunctions.c
               'dataType': 6,
               'name': 'incidence_angle'}

        self._create_band(src, dst)
        self.dataset.FlushCache()

        ###################################################################
        # Add sigma0_VV - pixel function of sigma0_HH and beta0_HH
        # incidence angle is calculated within pixel function
        # It is assummed that HH is the first band in sigma0 and
        # beta0 sub datasets
        ###################################################################
        if 'VV' not in pol and 'HH' in pol:
            s0datasetNameHH = pol.index('HH')+1
            src = [{'SourceFilename': s0datasetName,
                    'SourceBand': s0datasetNameHH,
                    'DataType': 6},
                   {'SourceFilename': b0datasetName,
                    'SourceBand': b0datasetBand,
                    'DataType': 6}]
            dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                   'PixelFunctionType': 'Sigma0HHBetaToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self._create_band(src, dst)
            self.dataset.FlushCache()

        ############################################
        # Add SAR metadata
        ############################################
        if antennaPointing == 90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT')
        if antennaPointing == -90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'LEFT')
        self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                     str(passDirection).upper())

        # set valid time
        self.dataset.SetMetadataItem('time_coverage_start',
                                     (parse(gdalMetadata['FIRST_LINE_TIME']).
                                      isoformat()))
        self.dataset.SetMetadataItem('time_coverage_end',
                                     (parse(gdalMetadata['LAST_LINE_TIME']).
                                      isoformat()))

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('sar')
        ee = pti.get_gcmd_platform('radarsat-2')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))

        self._add_swath_mask_band()
コード例 #33
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create LANDSAT VRT '''
        # try to open .tar or .tar.gz or .tgz file with tar
        try:
            tarFile = tarfile.open(fileName)
        except:
            raise WrongMapperError

        tarNames = tarFile.getnames()
        metaDictAll = []
        for tarName in tarNames:
            if ((tarName[0] == 'L' or tarName[0] == 'M')
                    and (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')):
                # crate metadataDict for all mappers
                bandNo = tarName[-6:-4]
                metaDictAll.append({
                    'src': {
                        'SourceFilename':
                        '/vsitar/%s/%s' % (fileName, tarName),
                        'SourceBand': 1
                    },
                    'dst': {
                        'wkv': 'toa_outgoing_spectral_radiance',
                        'suffix': bandNo
                    }
                })

        if not metaDictAll:
            raise WrongMapperError

        # copy metadataDict which has the highest resolution.
        for iFile in range(len(metaDictAll)):
            tmpName = metaDictAll[iFile]['src']['SourceFilename']
            gdalDatasetTmp = gdal.Open(tmpName)
            # set an initial size
            if iFile == 0:
                gdalDatasetTmp0 = gdalDatasetTmp
                xSize0 = gdalDatasetTmp.RasterXSize
                ySize0 = gdalDatasetTmp.RasterYSize
                xSize, ySize = xSize0, ySize0
                metaDict = [metaDictAll[0]]
                ratio = 1.0
            # if size of gdalDatasetTmp is larger than current size, replace
            if (xSize < gdalDatasetTmp.RasterXSize
                    and ySize < gdalDatasetTmp.RasterYSize):
                ratio = float(xSize0) / float(gdalDatasetTmp.RasterXSize)
                xSize = gdalDatasetTmp.RasterXSize
                ySize = gdalDatasetTmp.RasterYSize
                metaDict = [metaDictAll[iFile]]
            # if size of gdalDatasetTmp is same as the current size, append metaDict
            elif (xSize == gdalDatasetTmp.RasterXSize
                  and ySize == gdalDatasetTmp.RasterYSize):
                metaDict.append(metaDictAll[iFile])

        # modify geoTarnsform for the highest resplution
        geoTransform = list(gdalDatasetTmp.GetGeoTransform())
        geoTransform[1] = float(geoTransform[1]) * ratio
        geoTransform[5] = float(geoTransform[5]) * ratio

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDatasetTmp0)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # 8th band of LANDSAT8 is a double size band.
        # Reduce the size to same as the 1st band.
        vrtXML = self.read_xml()
        node0 = Node.create(vrtXML)
        node0.replaceAttribute('rasterXSize', str(xSize))
        node0.replaceAttribute('rasterYSize', str(ySize))
        self.write_xml(str(node0.rawxml()))

        # set new goeTransform
        if ratio != 1.0:
            self.dataset.SetGeoTransform(tuple(geoTransform))
コード例 #34
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):

        try:
            satellite = gdalDataset.GetDescription().split(",")[2]
        except (AttributeError, IndexError):
            raise WrongMapperError

        for sat in satDict:
            if sat['name'] == satellite:
                print 'This is ' + satellite
                wavelengths = sat['wavelengths']
                try:
                    scale = sat['scale']
                    offset = sat['offset']
                except:
                    print "No scale and offset found"
                    scale = None
                    offset = None
                try:
                    LUT = sat['LUT']
                except:
                    print "No LUT found"
                    LUT = [""]*len(wavelengths)
                try:
                    NODATA = sat['NODATA']
                except:
                    print "No NODATA values found"
                    NODATA = [""]*len(wavelengths)

        if wavelengths is None:
            raise AttributeError("No Eumetcast geostationary satellite")

        path = gdalDataset.GetDescription().split(",")[0].split("(")[1]
        datestamp = gdalDataset.GetDescription().split(",")[3]
        if satellite[0:3] == 'MSG':
            resolution = 'H'
            dataType = 'T'  # Brightness temperatures and reflectances
        else:
            resolution = 'L'
            dataType = 'N'  # Counts, for manual calibration

        metaDict = []
        for i, wavelength in enumerate(wavelengths):
            if wavelength > 2000:
                standard_name = 'brightness_temperature'
            else:
                standard_name = 'albedo'
            bandSource = ('MSG(' + path + ',' + resolution + ',' + satellite +
                          ',' + datestamp + ',' + str(i + 1) + ',Y,' +
                          dataType + ',1,1)')
            try:
                gdal.Open(bandSource)
            except:
                print ('Warning: band missing for wavelength ' +
                       str(wavelength) + 'nm')
                continue
            src = {'SourceFilename': bandSource, 'SourceBand': 1,
                   'LUT': LUT[i], 'NODATA': NODATA[i]}
            dst = {'wkv': standard_name, 'wavelength': str(wavelength)}
            if scale is not None:
                bandScale = scale[i]
                bandOffset = offset[i]
                src['ScaleRatio'] = str(bandScale)
                src['ScaleOffset'] = str(bandOffset)
            metaDict.append({'src': src, 'dst': dst})

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        # Create bands
        self._create_bands(metaDict)

        # For Meteosat7 ch1 has higher resolution than ch2 and ch3
        # and for MSG, channel 12 (HRV) has
        # higher resolution than the other channels
        # If the high resolution channel is opened, the low res channels are
        # blown up to this size. If a low res channel is opened,
        # the high res channels
        # are reduced to this size.
        if satellite == 'MET7' or satellite[0:3] == 'MSG':
            node0 = Node.create(self.read_xml())
            bands = node0.nodeList("VRTRasterBand")
            if satellite == 'MET7':
                if self.dataset.RasterXSize == 5032:  # High res ch1 is opened
                    newSrcXSize = u'2532'
                    newSrcYSize = u'2500'
                    bands = bands[1:]  # Ch2 and ch3 should be modified
                if self.dataset.RasterXSize == 2532:  # Low res ch is opened
                    newSrcXSize = u'5032'
                    newSrcYSize = u'5000'
                    bands = [bands[0]]  # Only ch1 needs to be modified
            elif satellite[0:3] == 'MSG':
                if self.dataset.RasterXSize == 11136:  # High res ch1 is opened
                    newSrcXSize = u'3712'
                    newSrcYSize = u'3712'
                    bands = bands[0:10]  # Channels 1-11 should be modified
                if self.dataset.RasterXSize == 3712:  # Low res ch is opened
                    newSrcXSize = u'11136'
                    newSrcYSize = u'11136'
                    bands = [bands[11]]  # Only ch12 needs to be modified

            for band in bands:
                node1 = (band.nodeList("ComplexSource")[0].
                         nodeList("SrcRect")[0])
                node1.setAttribute("xSize", newSrcXSize)
                node1.setAttribute("ySize", newSrcYSize)
            self.write_xml(node0.rawxml())

        # Set global metadata
        self.dataset.SetMetadata({'satID': satellite})

        # Set time
        # Adding valid time to dataset
        self.dataset.SetMetadataItem('time_coverage_start',
            datetime.datetime.strptime(datestamp, '%Y%m%d%H%M').isoformat())

        return
コード例 #35
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create Radarsat2 VRT '''
        fPathName, fExt = os.path.splitext(fileName)

        if zipfile.is_zipfile(fileName):
            # Open zip file using VSI
            fPath, fName = os.path.split(fPathName)
            fileName = '/vsizip/%s/%s' % (fileName, fName)
            if not 'RS' in fName[0:2]:
                raise WrongMapperError('Provided data is not Radarsat-2')
            gdalDataset = gdal.Open(fileName)
            gdalMetadata = gdalDataset.GetMetadata()

        #if it is not RADARSAT-2, return
        if (not gdalMetadata
                or not 'SATELLITE_IDENTIFIER' in gdalMetadata.keys()):
            raise WrongMapperError
        elif gdalMetadata['SATELLITE_IDENTIFIER'] != 'RADARSAT-2':
            raise WrongMapperError

        # read product.xml
        productXmlName = os.path.join(fileName, 'product.xml')
        productXml = self.read_xml(productXmlName)

        # Get additional metadata from product.xml
        rs2_0 = Node.create(productXml)
        rs2_1 = rs2_0.node('sourceAttributes')
        rs2_2 = rs2_1.node('radarParameters')
        if rs2_2['antennaPointing'].lower() == 'right':
            antennaPointing = 90
        else:
            antennaPointing = -90
        rs2_3 = rs2_1.node('orbitAndAttitude').node('orbitInformation')
        passDirection = rs2_3['passDirection']

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDataset)

        #define dictionary of metadata and band specific parameters
        pol = []
        metaDict = []

        # Get the subdataset with calibrated sigma0 only
        for dataset in gdalDataset.GetSubDatasets():
            if dataset[1] == 'Sigma Nought calibrated':
                s0dataset = gdal.Open(dataset[0])
                s0datasetName = dataset[0][:]
                band = s0dataset.GetRasterBand(1)
                s0datasetPol = band.GetMetadata()['POLARIMETRIC_INTERP']
                for i in range(1, s0dataset.RasterCount + 1):
                    iBand = s0dataset.GetRasterBand(i)
                    polString = iBand.GetMetadata()['POLARIMETRIC_INTERP']
                    suffix = polString
                    # The nansat data will be complex
                    # if the SAR data is of type 10
                    dtype = iBand.DataType
                    if dtype == 10:
                        # add intensity band
                        metaDict.append({
                            'src': {
                                'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:' +
                                                   fileName + '/product.xml'),
                                'SourceBand':
                                i,
                                'DataType':
                                dtype
                            },
                            'dst': {
                                'wkv':
                                'surface_backwards_scattering_coefficient_of_radar_wave',
                                'PixelFunctionType': 'intensity',
                                'SourceTransferType':
                                gdal.GetDataTypeName(dtype),
                                'suffix': suffix,
                                'polarization': polString,
                                'dataType': 6
                            }
                        })
                        # modify suffix for adding the compled band below
                        suffix = polString + '_complex'
                    pol.append(polString)
                    metaDict.append({
                        'src': {
                            'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:' +
                                               fileName + '/product.xml'),
                            'SourceBand':
                            i,
                            'DataType':
                            dtype
                        },
                        'dst': {
                            'wkv':
                            'surface_backwards_scattering_coefficient_of_radar_wave',
                            'suffix': suffix,
                            'polarization': polString
                        }
                    })

            if dataset[1] == 'Beta Nought calibrated':
                b0dataset = gdal.Open(dataset[0])
                b0datasetName = dataset[0][:]
                for j in range(1, b0dataset.RasterCount + 1):
                    jBand = b0dataset.GetRasterBand(j)
                    polString = jBand.GetMetadata()['POLARIMETRIC_INTERP']
                    if polString == s0datasetPol:
                        b0datasetBand = j

        ###############################
        # Add SAR look direction
        ###############################
        d = Domain(ds=gdalDataset)
        lon, lat = d.get_geolocation_grids(100)
        '''
        (GDAL?) Radarsat-2 data is stored with maximum latitude at first
        element of each column and minimum longitude at first element of each
        row (e.g. np.shape(lat)=(59,55) -> latitude maxima are at lat[0,:],
        and longitude minima are at lon[:,0])

        In addition, there is an interpolation error for direct estimate along
        azimuth. We therefore estimate the heading along range and add 90
        degrees to get the "satellite" heading.

        '''
        if str(passDirection).upper() == 'DESCENDING':
            sat_heading = initial_bearing(lon[:, :-1], lat[:, :-1], lon[:, 1:],
                                          lat[:, 1:]) + 90
        elif str(passDirection).upper() == 'ASCENDING':
            sat_heading = initial_bearing(lon[:, 1:], lat[:, 1:], lon[:, :-1],
                                          lat[:, :-1]) + 90
        else:
            print 'Can not decode pass direction: ' + str(passDirection)

        # Calculate SAR look direction
        SAR_look_direction = sat_heading + antennaPointing
        # Interpolate to regain lost row
        SAR_look_direction = np.mod(SAR_look_direction, 360)
        SAR_look_direction = scipy.ndimage.interpolation.zoom(
            SAR_look_direction, (1, 11. / 10.))
        # Decompose, to avoid interpolation errors around 0 <-> 360
        SAR_look_direction_u = np.sin(np.deg2rad(SAR_look_direction))
        SAR_look_direction_v = np.cos(np.deg2rad(SAR_look_direction))
        look_u_VRT = VRT(array=SAR_look_direction_u, lat=lat, lon=lon)
        look_v_VRT = VRT(array=SAR_look_direction_v, lat=lat, lon=lon)

        # Note: If incidence angle and look direction are stored in
        #       same VRT, access time is about twice as large
        lookVRT = VRT(lat=lat, lon=lon)
        lookVRT._create_band([{
            'SourceFilename': look_u_VRT.fileName,
            'SourceBand': 1
        }, {
            'SourceFilename': look_v_VRT.fileName,
            'SourceBand': 1
        }], {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize,
                                          gdalDataset.RasterYSize)
        # Store VRTs so that they are accessible later
        self.bandVRTs['look_u_VRT'] = look_u_VRT
        self.bandVRTs['look_v_VRT'] = look_v_VRT
        self.bandVRTs['lookVRT'] = lookVRT

        # Add band to full sized VRT
        lookFileName = self.bandVRTs['lookVRT'].fileName
        metaDict.append({
            'src': {
                'SourceFilename': lookFileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': 'SAR_look_direction'
            }
        })

        ###############################
        # Create bands
        ###############################
        self._create_bands(metaDict)

        ###################################################
        # Add derived band (incidence angle) calculated
        # using pixel function "BetaSigmaToIncidence":
        ###################################################
        src = [{
            'SourceFilename': b0datasetName,
            'SourceBand': b0datasetBand,
            'DataType': dtype
        }, {
            'SourceFilename': s0datasetName,
            'SourceBand': 1,
            'DataType': dtype
        }]
        dst = {
            'wkv': 'angle_of_incidence',
            'PixelFunctionType': 'BetaSigmaToIncidence',
            'SourceTransferType': gdal.GetDataTypeName(dtype),
            '_FillValue': -10000,  # NB: this is also hard-coded in
            #     pixelfunctions.c
            'dataType': 6,
            'name': 'incidence_angle'
        }

        self._create_band(src, dst)
        self.dataset.FlushCache()

        ###################################################################
        # Add sigma0_VV - pixel function of sigma0_HH and beta0_HH
        # incidence angle is calculated within pixel function
        # It is assummed that HH is the first band in sigma0 and
        # beta0 sub datasets
        ###################################################################
        if 'VV' not in pol and 'HH' in pol:
            s0datasetNameHH = pol.index('HH') + 1
            src = [{
                'SourceFilename': s0datasetName,
                'SourceBand': s0datasetNameHH,
                'DataType': 6
            }, {
                'SourceFilename': b0datasetName,
                'SourceBand': b0datasetBand,
                'DataType': 6
            }]
            dst = {
                'wkv':
                'surface_backwards_scattering_coefficient_of_radar_wave',
                'PixelFunctionType': 'Sigma0HHBetaToSigma0VV',
                'polarization': 'VV',
                'suffix': 'VV'
            }
            self._create_band(src, dst)
            self.dataset.FlushCache()

        ############################################
        # Add SAR metadata
        ############################################
        if antennaPointing == 90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT')
        if antennaPointing == -90:
            self.dataset.SetMetadataItem('ANTENNA_POINTING', 'LEFT')
        self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                     str(passDirection).upper())

        # Set time
        validTime = gdalDataset.GetMetadata()['ACQUISITION_START_TIME']
        self.logger.info('Valid time: %s', str(validTime))
        self._set_time(parse(validTime))

        # set SADCAT specific metadata
        self.dataset.SetMetadataItem(
            'start_date', (parse(gdalMetadata['FIRST_LINE_TIME']).isoformat()))
        self.dataset.SetMetadataItem(
            'stop_date', (parse(gdalMetadata['LAST_LINE_TIME']).isoformat()))
        self.dataset.SetMetadataItem('sensor', 'SAR')
        self.dataset.SetMetadataItem('satellite', 'Radarsat2')
        self.dataset.SetMetadataItem('mapper', 'radarsat2')
コード例 #36
0
    def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):

        try:
            satellite = gdalDataset.GetDescription().split(",")[2]
        except (AttributeError, IndexError):
            raise WrongMapperError

        satDict = Mapper.calibration()

        for sat in satDict:
            if sat['name'] == satellite:
                print('This is ' + satellite)
                wavelengths = sat['wavelengths']
                try:
                    scale = sat['scale']
                    offset = sat['offset']
                except:
                    print("No scale and offset found")
                    scale = None
                    offset = None
                try:
                    LUT = sat['LUT']
                except:
                    print("No LUT found")
                    LUT = [""] * len(wavelengths)
                try:
                    NODATA = sat['NODATA']
                except:
                    print("No NODATA values found")
                    NODATA = [""] * len(wavelengths)

        if wavelengths is None:
            raise AttributeError("No Eumetcast geostationary satellite")

        path = gdalDataset.GetDescription().split(",")[0].split("(")[1]
        datestamp = gdalDataset.GetDescription().split(",")[3]
        if satellite[0:3] == 'MSG':
            resolution = 'H'
            dataType = 'T'  # Brightness temperatures and reflectances
        else:
            resolution = 'L'
            dataType = 'N'  # Counts, for manual calibration

        metaDict = []
        for i, wavelength in enumerate(wavelengths):
            if wavelength > 2000:
                standard_name = 'brightness_temperature'
            else:
                standard_name = 'albedo'
            bandSource = ('MSG(' + path + ',' + resolution + ',' + satellite +
                          ',' + datestamp + ',' + str(i + 1) + ',Y,' +
                          dataType + ',1,1)')
            try:
                gdal.Open(bandSource)
            except:
                print('Warning: band missing for wavelength ' +
                      str(wavelength) + 'nm')
                continue
            src = {
                'SourceFilename': bandSource,
                'SourceBand': 1,
                'LUT': LUT[i],
                'NODATA': NODATA[i]
            }
            dst = {'wkv': standard_name, 'wavelength': str(wavelength)}
            if scale is not None:
                bandScale = scale[i]
                bandOffset = offset[i]
                src['ScaleRatio'] = str(bandScale)
                src['ScaleOffset'] = str(bandOffset)
            metaDict.append({'src': src, 'dst': dst})

        # create empty VRT dataset with geolocation only
        self._init_from_gdal_dataset(gdalDataset)

        # Create bands
        self.create_bands(metaDict)

        # For Meteosat7 ch1 has higher resolution than ch2 and ch3
        # and for MSG, channel 12 (HRV) has
        # higher resolution than the other channels
        # If the high resolution channel is opened, the low res channels are
        # blown up to this size. If a low res channel is opened,
        # the high res channels
        # are reduced to this size.
        if satellite == 'MET7' or satellite[0:3] == 'MSG':
            node0 = Node.create(self.get_vrt_xml())
            bands = node0.nodeList("VRTRasterBand")
            if satellite == 'MET7':
                if self.dataset.RasterXSize == 5032:  # High res ch1 is opened
                    newSrcXSize = u'2532'
                    newSrcYSize = u'2500'
                    bands = bands[1:]  # Ch2 and ch3 should be modified
                if self.dataset.RasterXSize == 2532:  # Low res ch is opened
                    newSrcXSize = u'5032'
                    newSrcYSize = u'5000'
                    bands = [bands[0]]  # Only ch1 needs to be modified
            elif satellite[0:3] == 'MSG':
                if self.dataset.RasterXSize == 11136:  # High res ch1 is opened
                    newSrcXSize = u'3712'
                    newSrcYSize = u'3712'
                    bands = bands[0:10]  # Channels 1-11 should be modified
                if self.dataset.RasterXSize == 3712:  # Low res ch is opened
                    newSrcXSize = u'11136'
                    newSrcYSize = u'11136'
                    bands = [bands[11]]  # Only ch12 needs to be modified

            for band in bands:
                node1 = (
                    band.nodeList("ComplexSource")[0].nodeList("SrcRect")[0])
                node1.setAttribute("xSize", newSrcXSize)
                node1.setAttribute("ySize", newSrcYSize)
            self.write_xml(node0.rawxml())

        # Set global metadata
        self.dataset.SetMetadata({'satID': satellite})

        # Set time
        # Adding valid time to dataset
        self.dataset.SetMetadataItem(
            'time_coverage_start',
            datetime.datetime.strptime(datestamp, '%Y%m%d%H%M').isoformat())

        return
コード例 #37
0
ファイル: mapper_landsat.py プロジェクト: WYC19910220/nansat
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create LANDSAT VRT '''
        # try to open .tar or .tar.gz or .tgz file with tar
        try:
            tarFile = tarfile.open(fileName)
        except:
            raise WrongMapperError

        tarNames = tarFile.getnames()
        #print tarNames
        metaDict = []
        for tarName in tarNames:
            if ((tarName[0] == 'L' or tarName[0] == 'M')
                    and (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')):
                #print tarName
                bandNo = tarName[-6:-4]
                metaDict.append({
                    'src': {
                        'SourceFilename':
                        '/vsitar/%s/%s' % (fileName, tarName),
                        'SourceBand': 1
                    },
                    'dst': {
                        'wkv': 'toa_outgoing_spectral_radiance',
                        'suffix': bandNo
                    }
                })

        if not metaDict:
            raise WrongMapperError

        #print metaDict
        sizeDiffBands = []
        for iFile in range(len(metaDict)):
            tmpName = metaDict[iFile]['src']['SourceFilename']
            gdalDatasetTmp = gdal.Open(tmpName)
            if iFile == 0:
                gdalDatasetTmp0 = gdalDatasetTmp
                xSize = gdalDatasetTmp.RasterXSize
                ySize = gdalDatasetTmp.RasterYSize
            elif (xSize != gdalDatasetTmp.RasterXSize
                  or ySize != gdalDatasetTmp.RasterYSize):
                sizeDiffBands.append(iFile)

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDatasetTmp0)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # 8th band of LANDSAT8 is a double size band.
        # Reduce the size to same as the 1st band.
        if len(sizeDiffBands) != 0:
            vrtXML = self.read_xml()
            node0 = Node.create(vrtXML)
            for iBand in sizeDiffBands:
                iBandNode = node0.nodeList('VRTRasterBand')[iBand]
                iNodeDstRect = iBandNode.node('DstRect')
                iNodeDstRect.replaceAttribute('xSize', str(xSize))
                iNodeDstRect.replaceAttribute('ySize', str(ySize))

            self.write_xml(node0.rawxml())
コード例 #38
0
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                 manifestonly=False, **kwargs):

        if zipfile.is_zipfile(fileName):
            zz = zipfile.PyZipFile(fileName)
            # Assuming the file names are consistent, the polarization
            # dependent data should be sorted equally such that we can use the
            # same indices consistently for all the following lists
            # THIS IS NOT THE CASE...
            mdsFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist() if 'measurement/s1a' in fn]
            calFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist()
                        if 'annotation/calibration/calibration-s1a' in fn]
            noiseFiles = ['/vsizip/%s/%s' % (fileName, fn)
                          for fn in zz.namelist()
                          if 'annotation/calibration/noise-s1a' in fn]
            annotationFiles = ['/vsizip/%s/%s' % (fileName, fn)
                               for fn in zz.namelist()
                               if 'annotation/s1a' in fn]
            manifestFile = ['/vsizip/%s/%s' % (fileName, fn)
                            for fn in zz.namelist()
                            if 'manifest.safe' in fn]
            zz.close()
        else:
            mdsFiles = glob.glob('%s/measurement/s1a*' % fileName)
            calFiles = glob.glob('%s/annotation/calibration/calibration-s1a*'
                                 % fileName)
            noiseFiles = glob.glob('%s/annotation/calibration/noise-s1a*'
                                   % fileName)
            annotationFiles = glob.glob('%s/annotation/s1a*'
                                        % fileName)
            manifestFile = glob.glob('%s/manifest.safe' % fileName)

        if (not mdsFiles or not calFiles or not noiseFiles or
                not annotationFiles or not manifestFile):
            raise WrongMapperError

        mdsDict = {}
        for ff in mdsFiles:
            mdsDict[
                os.path.splitext(os.path.basename(ff))[0].split('-')[3]] = ff

        self.calXMLDict = {}
        for ff in calFiles:
            self.calXMLDict[
                os.path.splitext(
                os.path.basename(ff))[0].split('-')[4]] = self.read_xml(ff)

        self.noiseXMLDict = {}
        for ff in noiseFiles:
            self.noiseXMLDict[
                os.path.splitext(
                os.path.basename(ff))[0].split('-')[4]] = self.read_xml(ff)

        self.annotationXMLDict = {}
        for ff in annotationFiles:
            self.annotationXMLDict[
                os.path.splitext(
                os.path.basename(ff))[0].split('-')[3]] = self.read_xml(ff)

        self.manifestXML = self.read_xml(manifestFile[0])

        # very fast constructor without any bands
        if manifestonly:
            self.init_from_manifest_only(self.manifestXML,
                                         self.annotationXMLDict[
                                         self.annotationXMLDict.keys()[0]])
            return

        gdalDatasets = {}
        for key in mdsDict.keys():
            # Open data files
            gdalDatasets[key] = gdal.Open(mdsDict[key])

        if not gdalDatasets:
            raise WrongMapperError('No Sentinel-1 datasets found')

        # Check metadata to confirm it is Sentinel-1 L1
        for key in gdalDatasets:
            metadata = gdalDatasets[key].GetMetadata()
            break
        if not 'TIFFTAG_IMAGEDESCRIPTION' in metadata.keys():
            raise WrongMapperError
        if (not 'Sentinel-1' in metadata['TIFFTAG_IMAGEDESCRIPTION']
                and not 'L1' in metadata['TIFFTAG_IMAGEDESCRIPTION']):
            raise WrongMapperError

        warnings.warn('Sentinel-1 level-1 mapper is not yet adapted to '
                      'complex data. In addition, the band names should be '
                      'updated for multi-swath data - '
                      'and there might be other issues.')

        # create empty VRT dataset with geolocation only
        for key in gdalDatasets:
            VRT.__init__(self, gdalDatasets[key])
            break

        # Read annotation, noise and calibration xml-files
        pol = {}
        it = 0
        for key in self.annotationXMLDict:
            xml = Node.create(self.annotationXMLDict[key])
            pol[key] = (xml.node('product').
                        node('adsHeader')['polarisation'].upper())
            it += 1
            if it == 1:
                # Get incidence angle
                pi = xml.node('generalAnnotation').node('productInformation')

                self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                              str(pi['pass']))
                (X, Y, lon, lat, inc, ele, numberOfSamples,
                numberOfLines) = self.read_geolocation_lut(
                                                self.annotationXMLDict[key])

                X = np.unique(X)
                Y = np.unique(Y)

                lon = np.array(lon).reshape(len(Y), len(X))
                lat = np.array(lat).reshape(len(Y), len(X))
                inc = np.array(inc).reshape(len(Y), len(X))
                ele = np.array(ele).reshape(len(Y), len(X))

                incVRT = VRT(array=inc, lat=lat, lon=lon)
                eleVRT = VRT(array=ele, lat=lat, lon=lon)
                incVRT = incVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                eleVRT = eleVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                self.bandVRTs['incVRT'] = incVRT
                self.bandVRTs['eleVRT'] = eleVRT

        for key in self.calXMLDict:
            calibration_LUT_VRTs, longitude, latitude = (
                self.get_LUT_VRTs(self.calXMLDict[key],
                                  'calibrationVectorList',
                                  ['sigmaNought', 'betaNought',
                                   'gamma', 'dn']
                                  ))
            self.bandVRTs['LUT_sigmaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['sigmaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_betaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['betaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_gamma_VRT'] = calibration_LUT_VRTs['gamma']
            self.bandVRTs['LUT_dn_VRT'] = calibration_LUT_VRTs['dn']

        for key in self.noiseXMLDict:
            noise_LUT_VRT = self.get_LUT_VRTs(self.noiseXMLDict[key],
                                              'noiseVectorList',
                                              ['noiseLut'])[0]
            self.bandVRTs['LUT_noise_VRT_'+pol[key]] = (
                noise_LUT_VRT['noiseLut'].get_resized_vrt(
                    self.dataset.RasterXSize,
                    self.dataset.RasterYSize,
                    eResampleAlg=1))

        metaDict = []
        bandNumberDict = {}
        bnmax = 0
        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'DN_%s' % pol[key]
            # A dictionary of band numbers is needed for the pixel function
            # bands further down. This is not the best solution. It would be
            # better to have a function in VRT that returns the number given a
            # band name. This function exists in Nansat but could perhaps be
            # moved to VRT? The existing nansat function could just call the
            # VRT one...
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            band = gdalDatasets[key].GetRasterBand(1)
            dtype = band.DataType
            metaDict.append({
                'src': {
                    'SourceFilename': mdsDict[key],
                    'SourceBand': 1,
                    'DataType': dtype,
                },
                'dst': {
                    'name': name,
                    #'SourceTransferType': gdal.GetDataTypeName(dtype),
                    #'dataType': 6,
                },
            })
        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        '''
        Calibration should be performed as

        s0 = DN^2/sigmaNought^2,

        where sigmaNought is from e.g.
        annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml,
        and DN is the Digital Numbers in the tiff files.

        Also the noise should be subtracted.

        See
        https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT
        '''
        # Get look direction
        sat_heading = initial_bearing(longitude[:-1, :],
                                      latitude[:-1, :],
                                      longitude[1:, :],
                                      latitude[1:, :])
        look_direction = scipy.ndimage.interpolation.zoom(
            np.mod(sat_heading + 90, 360),
            (np.shape(longitude)[0] / (np.shape(longitude)[0]-1.), 1))

        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT(array=look_direction_u,
                         lat=latitude, lon=longitude)
        look_v_VRT = VRT(array=look_direction_v,
                         lat=latitude, lon=longitude)
        lookVRT = VRT(lat=latitude, lon=longitude)
        lookVRT._create_band([{'SourceFilename': look_u_VRT.fileName,
                               'SourceBand': 1},
                              {'SourceFilename': look_v_VRT.fileName,
                               'SourceBand': 1}],
                             {'PixelFunctionType': 'UVToDirectionTo'}
                             )

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize,
                                          eResampleAlg=1)

        # Store VRTs so that they are accessible later
        self.bandVRTs['look_u_VRT'] = look_u_VRT
        self.bandVRTs['look_v_VRT'] = look_v_VRT
        self.bandVRTs['lookVRT'] = lookVRT

        metaDict = []
        # Add bands to full size VRT
        for key in pol:
            name = 'LUT_sigmaNought_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': {'SourceFilename':
                         (self.bandVRTs['LUT_sigmaNought_VRT_' +
                          pol[key]].fileName),
                         'SourceBand': 1
                         },
                 'dst': {'name': name
                         }
                 })
            name = 'LUT_noise_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename': self.bandVRTs['LUT_noise_VRT_' +
                                                   pol[key]].fileName,
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })

        name = 'look_direction'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        metaDict.append({
            'src': {
                'SourceFilename': self.bandVRTs['lookVRT'].fileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': name
            }
        })

        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'sigma0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]],
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_sigmaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })
            name = 'beta0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]]
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_betaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_brightness_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })

        self._create_bands(metaDict)

        # Add incidence angle as band
        name = 'incidence_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['incVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_incidence',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add elevation angle as band
        name = 'elevation_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['eleVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_elevation',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add sigma0_VV
        pp = [pol[key] for key in pol]
        if 'VV' not in pp and 'HH' in pp:
            name = 'sigma0_VV'
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            src = [{'SourceFilename': self.fileName,
                    'SourceBand': bandNumberDict['DN_HH'],
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_HH'].
                                       fileName),
                    'SourceBand': 1
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_sigmaNought_VRT_HH'].
                                       fileName),
                    'SourceBand': 1,
                    },
                   {'SourceFilename': self.bandVRTs['incVRT'].fileName,
                    'SourceBand': 1}
                   ]
            dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                   'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self._create_band(src, dst)
            self.dataset.FlushCache()

        # set time as acquisition start time
        n = Node.create(self.manifestXML)
        meta = n.node('metadataSection')
        for nn in meta.children:
            if nn.getAttribute('ID') == u'acquisitionPeriod':
                # set valid time
                self.dataset.SetMetadataItem(
                    'time_coverage_start',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:startTime'])
                          ).isoformat())
                self.dataset.SetMetadataItem(
                    'time_coverage_end',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:stopTime'])
                          ).isoformat())

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('sar')
        ee = pti.get_gcmd_platform('sentinel-1a')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
コード例 #39
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
        ''' Create LANDSAT VRT '''
        # try to open .tar or .tar.gz or .tgz file with tar
        try:
            tarFile = tarfile.open(fileName)
        except:
            raise WrongMapperError

        tarNames = tarFile.getnames()
        metaDictAll = []
        for tarName in tarNames:
            if ((tarName[0] == 'L' or tarName[0] == 'M') and
               (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')):
                # crate metadataDict for all mappers
                bandNo = tarName[-6:-4]
                metaDictAll.append({
                    'src': {'SourceFilename': '/vsitar/%s/%s' % (fileName,
                                                                 tarName),
                            'SourceBand':  1},
                    'dst': {'wkv': 'toa_outgoing_spectral_radiance',
                            'suffix': bandNo}})

        if not metaDictAll:
            raise WrongMapperError

        # copy metadataDict which has the highest resolution.
        for iFile in range(len(metaDictAll)):
            tmpName = metaDictAll[iFile]['src']['SourceFilename']
            gdalDatasetTmp = gdal.Open(tmpName)
            # set an initial size
            if iFile == 0:
                gdalDatasetTmp0 = gdalDatasetTmp
                xSize0 = gdalDatasetTmp.RasterXSize
                ySize0 = gdalDatasetTmp.RasterYSize
                xSize, ySize = xSize0, ySize0
                metaDict = [metaDictAll[0]]
                ratio = 1.0
            # if size of gdalDatasetTmp is larger than current size, replace
            if (xSize < gdalDatasetTmp.RasterXSize and
                ySize < gdalDatasetTmp.RasterYSize):
                    ratio = float(xSize0) / float(gdalDatasetTmp.RasterXSize)
                    xSize = gdalDatasetTmp.RasterXSize
                    ySize = gdalDatasetTmp.RasterYSize
                    metaDict = [metaDictAll[iFile]]
            # if size of gdalDatasetTmp is same as the current size, append metaDict
            elif (xSize == gdalDatasetTmp.RasterXSize and
                  ySize == gdalDatasetTmp.RasterYSize):
                    metaDict.append(metaDictAll[iFile])

        # modify geoTarnsform for the highest resplution
        geoTransform = list(gdalDatasetTmp.GetGeoTransform())
        geoTransform[1] = float(geoTransform[1]) * ratio
        geoTransform[5] = float(geoTransform[5]) * ratio

        # create empty VRT dataset with geolocation only
        VRT.__init__(self, gdalDatasetTmp0)

        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        # 8th band of LANDSAT8 is a double size band.
        # Reduce the size to same as the 1st band.
        vrtXML = self.read_xml()
        node0 = Node.create(vrtXML)
        node0.replaceAttribute('rasterXSize', str(xSize))
        node0.replaceAttribute('rasterYSize', str(ySize))
        self.write_xml(str(node0.rawxml()))

        # set new goeTransform
        if ratio != 1.0:
            self.dataset.SetGeoTransform(tuple(geoTransform))
コード例 #40
0
    def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):

        if zipfile.is_zipfile(fileName):
            zz = zipfile.PyZipFile(fileName)
            # Assuming the file names are consistent, the polarization
            # dependent data should be sorted equally such that we can use the
            # same indices consistently for all the following lists
            # THIS IS NOT THE CASE...
            mdsFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist() if 'measurement/s1a' in fn]
            calFiles = ['/vsizip/%s/%s' % (fileName, fn)
                        for fn in zz.namelist()
                        if 'annotation/calibration/calibration-s1a' in fn]
            noiseFiles = ['/vsizip/%s/%s' % (fileName, fn)
                          for fn in zz.namelist()
                          if 'annotation/calibration/noise-s1a' in fn]
            annotationFiles = ['/vsizip/%s/%s' % (fileName, fn)
                               for fn in zz.namelist()
                               if 'annotation/s1a' in fn]
            manifestFile = ['/vsizip/%s/%s' % (fileName, fn)
                            for fn in zz.namelist()
                            if 'manifest.safe' in fn]
            zz.close()
        else:
            mdsFiles = glob.glob('%s/measurement/s1a*' % fileName)
            calFiles = glob.glob('%s/annotation/calibration/calibration-s1a*'
                                 % fileName)
            noiseFiles = glob.glob('%s/annotation/calibration/noise-s1a*'
                                   % fileName)
            annotationFiles = glob.glob('%s/annotation/s1a*'
                                        % fileName)
            manifestFile = glob.glob('%s/manifest.safe' % fileName)

        if (not mdsFiles or not calFiles or not noiseFiles or
                not annotationFiles or not manifestFile):
            raise WrongMapperError

        mdsDict = {}
        for mds in mdsFiles:
            mdsDict[int((os.path.splitext(os.path.basename(mds))[0].
                         split('-'))[-1:][0])] = mds
        calDict = {}
        for ff in calFiles:
            calDict[int((os.path.splitext(os.path.basename(ff))[0].
                         split('-'))[-1:][0])] = ff
        noiseDict = {}
        for ff in noiseFiles:
            noiseDict[int((os.path.splitext(os.path.basename(ff))[0].
                           split('-'))[-1:][0])] = ff
        annotationDict = {}
        for ff in annotationFiles:
            annotationDict[int((os.path.splitext(os.path.basename(ff))[0].
                                split('-'))[-1:][0])] = ff

        manifestXML = self.read_xml(manifestFile[0])

        gdalDatasets = {}
        for key in mdsDict.keys():
            # Open data files
            gdalDatasets[key] = gdal.Open(mdsDict[key])

        if not gdalDatasets:
            raise WrongMapperError('No Sentinel-1 datasets found')

        # Check metadata to confirm it is Sentinel-1 L1
        for key in gdalDatasets:
            metadata = gdalDatasets[key].GetMetadata()
            break
        if not 'TIFFTAG_IMAGEDESCRIPTION' in metadata.keys():
            raise WrongMapperError
        if (not 'Sentinel-1' in metadata['TIFFTAG_IMAGEDESCRIPTION']
                and not 'L1' in metadata['TIFFTAG_IMAGEDESCRIPTION']):
            raise WrongMapperError

        warnings.warn('Sentinel-1 level-1 mapper is not yet adapted to '
                      'complex data. In addition, the band names should be '
                      'updated for multi-swath data - '
                      'and there might be other issues.')

        # create empty VRT dataset with geolocation only
        for key in gdalDatasets:
            VRT.__init__(self, gdalDatasets[key])
            break

        # Read annotation, noise and calibration xml-files
        pol = {}
        it = 0
        for key in annotationDict.keys():
            xml = Node.create(self.read_xml(annotationDict[key]))
            pol[key] = (xml.node('product').
                        node('adsHeader')['polarisation'].upper())
            it += 1
            if it == 1:
                # Get incidence angle
                pi = xml.node('generalAnnotation').node('productInformation')
                self.dataset.SetMetadataItem('ORBIT_DIRECTION',
                                             str(pi['pass']))
                # Incidence angles are found in
                #<geolocationGrid>
                #    <geolocationGridPointList count="#">
                #          <geolocationGridPoint>
                geolocationGridPointList = (xml.node('geolocationGrid').
                                            children[0])
                X = []
                Y = []
                lon = []
                lat = []
                inc = []
                ele = []
                for gridPoint in geolocationGridPointList.children:
                    X.append(int(gridPoint['pixel']))
                    Y.append(int(gridPoint['line']))
                    lon.append(float(gridPoint['longitude']))
                    lat.append(float(gridPoint['latitude']))
                    inc.append(float(gridPoint['incidenceAngle']))
                    ele.append(float(gridPoint['elevationAngle']))

                X = np.unique(X)
                Y = np.unique(Y)

                lon = np.array(lon).reshape(len(Y), len(X))
                lat = np.array(lat).reshape(len(Y), len(X))
                inc = np.array(inc).reshape(len(Y), len(X))
                ele = np.array(ele).reshape(len(Y), len(X))

                incVRT = VRT(array=inc, lat=lat, lon=lon)
                eleVRT = VRT(array=ele, lat=lat, lon=lon)
                incVRT = incVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                eleVRT = eleVRT.get_resized_vrt(self.dataset.RasterXSize,
                                                self.dataset.RasterYSize,
                                                eResampleAlg=2)
                self.bandVRTs['incVRT'] = incVRT
                self.bandVRTs['eleVRT'] = eleVRT
        for key in calDict.keys():
            xml = self.read_xml(calDict[key])
            calibration_LUT_VRTs, longitude, latitude = (
                self.get_LUT_VRTs(xml,
                                  'calibrationVectorList',
                                  ['sigmaNought', 'betaNought',
                                   'gamma', 'dn']
                                  ))
            self.bandVRTs['LUT_sigmaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['sigmaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_betaNought_VRT_'+pol[key]] = (
                calibration_LUT_VRTs['betaNought'].
                get_resized_vrt(self.dataset.RasterXSize,
                                self.dataset.RasterYSize,
                                eResampleAlg=1))
            self.bandVRTs['LUT_gamma_VRT'] = calibration_LUT_VRTs['gamma']
            self.bandVRTs['LUT_dn_VRT'] = calibration_LUT_VRTs['dn']
        for key in noiseDict.keys():
            xml = self.read_xml(noiseDict[key])
            noise_LUT_VRT = self.get_LUT_VRTs(xml, 'noiseVectorList',
                                              ['noiseLut'])[0]
            self.bandVRTs['LUT_noise_VRT_'+pol[key]] = (
                noise_LUT_VRT['noiseLut'].get_resized_vrt(
                    self.dataset.RasterXSize,
                    self.dataset.RasterYSize,
                    eResampleAlg=1))

        metaDict = []
        bandNumberDict = {}
        bnmax = 0
        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'DN_%s' % pol[key]
            # A dictionary of band numbers is needed for the pixel function
            # bands further down. This is not the best solution. It would be
            # better to have a function in VRT that returns the number given a
            # band name. This function exists in Nansat but could perhaps be
            # moved to VRT? The existing nansat function could just call the
            # VRT one...
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            band = gdalDatasets[key].GetRasterBand(1)
            dtype = band.DataType
            metaDict.append({
                'src': {
                    'SourceFilename': mdsDict[key],
                    'SourceBand': 1,
                    'DataType': dtype,
                },
                'dst': {
                    'name': name,
                    'SourceTransferType': gdal.GetDataTypeName(dtype),
                    'dataType': 6,
                },
            })
        # add bands with metadata and corresponding values to the empty VRT
        self._create_bands(metaDict)

        '''
        Calibration should be performed as

        s0 = DN^2/sigmaNought^2,

        where sigmaNought is from e.g.
        annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml,
        and DN is the Digital Numbers in the tiff files.

        Also the noise should be subtracted.

        See
        https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT
        '''
        # Get look direction
        sat_heading = initial_bearing(longitude[:-1, :],
                                      latitude[:-1, :],
                                      longitude[1:, :],
                                      latitude[1:, :])
        look_direction = scipy.ndimage.interpolation.zoom(
            np.mod(sat_heading + 90, 360),
            (np.shape(longitude)[0] / (np.shape(longitude)[0]-1.), 1))

        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT(array=look_direction_u,
                         lat=latitude, lon=longitude)
        look_v_VRT = VRT(array=look_direction_v,
                         lat=latitude, lon=longitude)
        lookVRT = VRT(lat=latitude, lon=longitude)
        lookVRT._create_band([{'SourceFilename': look_u_VRT.fileName,
                               'SourceBand': 1},
                              {'SourceFilename': look_v_VRT.fileName,
                               'SourceBand': 1}],
                             {'PixelFunctionType': 'UVToDirectionTo'}
                             )

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize,
                                          eResampleAlg=1)

        # Store VRTs so that they are accessible later
        self.bandVRTs['look_u_VRT'] = look_u_VRT
        self.bandVRTs['look_v_VRT'] = look_v_VRT
        self.bandVRTs['lookVRT'] = lookVRT

        metaDict = []
        # Add bands to full size VRT
        for key in pol:
            name = 'LUT_sigmaNought_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': {'SourceFilename':
                         (self.bandVRTs['LUT_sigmaNought_VRT_' +
                          pol[key]].fileName),
                         'SourceBand': 1
                         },
                 'dst': {'name': name
                         }
                 })
            name = 'LUT_noise_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename': self.bandVRTs['LUT_noise_VRT_' +
                                                   pol[key]].fileName,
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })

        name = 'look_direction'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        metaDict.append({
            'src': {
                'SourceFilename': self.bandVRTs['lookVRT'].fileName,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': name
            }
        })

        for key in gdalDatasets.keys():
            dsPath, dsName = os.path.split(mdsDict[key])
            name = 'sigma0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]],
                          },
                         {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_%s'
                                             % pol[key]].fileName),
                          'SourceBand': 1
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_sigmaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })
            name = 'beta0_%s' % pol[key]
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            metaDict.append(
                {'src': [{'SourceFilename': self.fileName,
                          'SourceBand': bandNumberDict['DN_%s' % pol[key]]
                          },
                         {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_%s'
                                             % pol[key]].fileName),
                          'SourceBand': 1
                          },
                         {'SourceFilename':
                          (self.bandVRTs['LUT_betaNought_VRT_%s'
                           % pol[key]].fileName),
                          'SourceBand': 1
                          }
                         ],
                 'dst': {'wkv': 'surface_backwards_brightness_coefficient_of_radar_wave',
                         'PixelFunctionType': 'Sentinel1Calibration',
                         'polarization': pol[key],
                         'suffix': pol[key],
                         },
                 })

        self._create_bands(metaDict)

        # Add incidence angle as band
        name = 'incidence_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['incVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_incidence',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add elevation angle as band
        name = 'elevation_angle'
        bandNumberDict[name] = bnmax+1
        bnmax = bandNumberDict[name]
        src = {'SourceFilename': self.bandVRTs['eleVRT'].fileName,
               'SourceBand': 1}
        dst = {'wkv': 'angle_of_elevation',
               'name': name}
        self._create_band(src, dst)
        self.dataset.FlushCache()

        # Add sigma0_VV
        pp = [pol[key] for key in pol]
        if 'VV' not in pp and 'HH' in pp:
            name = 'sigma0_VV'
            bandNumberDict[name] = bnmax+1
            bnmax = bandNumberDict[name]
            src = [{'SourceFilename': self.fileName,
                    'SourceBand': bandNumberDict['DN_HH'],
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_noise_VRT_HH'].
                                       fileName),
                    'SourceBand': 1
                    },
                   {'SourceFilename': (self.bandVRTs['LUT_sigmaNought_VRT_HH'].
                                       fileName),
                    'SourceBand': 1,
                    },
                   {'SourceFilename': self.bandVRTs['incVRT'].fileName,
                    'SourceBand': 1}
                   ]
            dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
                   'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
                   'polarization': 'VV',
                   'suffix': 'VV'}
            self._create_band(src, dst)
            self.dataset.FlushCache()

        # set time as acquisition start time
        n = Node.create(manifestXML)
        meta = n.node('metadataSection')
        for nn in meta.children:
            if nn.getAttribute('ID') == u'acquisitionPeriod':
                # set valid time
                self.dataset.SetMetadataItem(
                    'time_coverage_start',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:startTime'])
                          ).isoformat())
                self.dataset.SetMetadataItem(
                    'time_coverage_end',
                    parse((nn.node('metadataWrap').
                           node('xmlData').
                           node('safe:acquisitionPeriod')['safe:stopTime'])
                          ).isoformat())

        # Get dictionary describing the instrument and platform according to
        # the GCMD keywords
        mm = pti.get_gcmd_instrument('sar')
        ee = pti.get_gcmd_platform('sentinel-1a')

        # TODO: Validate that the found instrument and platform are indeed what we
        # want....

        self.dataset.SetMetadataItem('instrument', json.dumps(mm))
        self.dataset.SetMetadataItem('platform', json.dumps(ee))
コード例 #41
0
ファイル: test_node.py プロジェクト: zhangjiahuan17/nansat
 def test_str(self):
     tag = 'Root'
     value = 'Value'
     node = Node(tag, value=value)
     self.assertEqual(str(node), '%s\n    value: [%s]' % (tag, value))