示例#1
0
def write_shp(G, outdir):
    try:
        from osgeo import ogr
    except ImportError:
        raise ImportError("write_shp requires OGR: http://www.gdal.org/")
    # easier to debug in python if ogr throws exceptions
    ogr.UseExceptions()

    def netgeometry(key, data):
        if 'Wkb' in data:
            geom = ogr.CreateGeometryFromWkb(data['Wkb'])
        elif 'Wkt' in data:
            geom = ogr.CreateGeometryFromWkt(data['Wkt'])
        elif type(key[0]).__name__ == 'tuple':  # edge keys are packed tuples
            geom = ogr.Geometry(ogr.wkbLineString)
            _from, _to = key[0], key[1]
            try:
                geom.SetPoint(0, *_from)
                geom.SetPoint(1, *_to)
            except TypeError:
                # assume user used tuple of int and choked ogr
                _ffrom = [float(x) for x in _from]
                _fto = [float(x) for x in _to]
                geom.SetPoint(0, *_ffrom)
                geom.SetPoint(1, *_fto)
        else:
            geom = ogr.Geometry(ogr.wkbPoint)
            try:
                geom.SetPoint(0, *key)
            except TypeError:
                # assume user used tuple of int and choked ogr
                fkey = [float(x) for x in key]
                geom.SetPoint(0, *fkey)

        return geom

    # Create_feature with new optional attributes arg (should be dict type)
    def create_feature(geometry, lyr, attributes=None):
        feature = ogr.Feature(lyr.GetLayerDefn())
        feature.SetGeometry(g)
        #        if attributes is not None:
        #            # Loop through attributes, assigning data to each field
        #            for field, data in attributes.items():
        #                feature.SetField(field, data)
        lyr.CreateFeature(feature)
        feature.Destroy()

    # Conversion dict between python and ogr types
    OGRTypes = {int: ogr.OFTInteger, str: ogr.OFTString, float: ogr.OFTReal}

    # Check/add fields from attribute data to Shapefile layers
    def add_fields_to_layer(key, value, fields, layer):
        # Field not in previous edges so add to dict
        if type(value) in OGRTypes:
            fields[key] = OGRTypes[type(value)]
        else:
            # Data type not supported, default to string (char 80)
            fields[key] = ogr.OFTString
        # Create the new field
        newfield = ogr.FieldDefn(key, fields[key])
        layer.CreateField(newfield)

    drv = ogr.GetDriverByName("ESRI Shapefile")
    shpdir = drv.CreateDataSource(outdir)
    # delete pre-existing output first otherwise ogr chokes
    try:
        shpdir.DeleteLayer("nodes")
    except:
        pass
    nodes = shpdir.CreateLayer("nodes", None, ogr.wkbPoint)

    # Storage for node field names and their data types
    node_fields = {}

    def create_attributes(data, fields, layer):
        attributes = {}  # storage for attribute data (indexed by field names)
        for key, value in data.items():
            # Reject spatial data not required for attribute table
            if (key != 'Json' and key != 'Wkt' and key != 'Wkb'
                    and key != 'ShpName'):
                # Check/add field and data type to fields dict
                if key not in fields:
                    add_fields_to_layer(key, value, fields, layer)
                # Store the data from new field to dict for CreateLayer()
                attributes[key] = value
        return attributes, layer

    for n in G:
        data = G.nodes[n]
        g = netgeometry(n, data)
        attributes, nodes = create_attributes(data, node_fields, nodes)
        print(attributes)
        create_feature(g, nodes, attributes)

    try:
        shpdir.DeleteLayer("edges")
    except:
        pass
    edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString)

    # New edge attribute write support merged into edge loop
    edge_fields = {}  # storage for field names and their data types

    for e in G.edges(data=True):
        data = G.get_edge_data(*e)
        g = netgeometry(e, data)
        attributes, edges = create_attributes(e[2], edge_fields, edges)
        create_feature(g, edges, attributes)

    nodes, edges = None, None
示例#2
0
def write_shp(G, outdir):
    """Writes a networkx.DiGraph to two shapefiles, edges and nodes.
    Nodes and edges are expected to have a Well Known Binary (Wkb) or
    Well Known Text (Wkt) key in order to generate geometries. Also
    acceptable are nodes with a numeric tuple key (x,y).

    "The Esri Shapefile or simply a shapefile is a popular geospatial vector
    data format for geographic information systems software [1]_."

    Parameters
    ----------
    outdir : directory path
       Output directory for the two shapefiles.

    Returns
    -------
    None

    Examples
    --------
    nx.write_shp(digraph, '/shapefiles') # doctest +SKIP

    References
    ----------
    .. [1] http://en.wikipedia.org/wiki/Shapefile
    """
    try:
        from osgeo import ogr
    except ImportError:
        raise ImportError("write_shp requires OGR: http://www.gdal.org/")
    # easier to debug in python if ogr throws exceptions
    ogr.UseExceptions()

    def netgeometry(key, data):
        if 'Wkb' in data:
            geom = ogr.CreateGeometryFromWkb(data['Wkb'])
        elif 'Wkt' in data:
            geom = ogr.CreateGeometryFromWkt(data['Wkt'])
        elif type(key[0]).__name__ == 'tuple':  # edge keys are packed tuples
            geom = ogr.Geometry(ogr.wkbLineString)
            _from, _to = key[0], key[1]
            try:
                geom.SetPoint(0, *_from)
                geom.SetPoint(1, *_to)
            except TypeError:
                # assume user used tuple of int and choked ogr
                _ffrom = [float(x) for x in _from]
                _fto = [float(x) for x in _to]
                geom.SetPoint(0, *_ffrom)
                geom.SetPoint(1, *_fto)
        else:
            geom = ogr.Geometry(ogr.wkbPoint)
            try:
                geom.SetPoint(0, *key)
            except TypeError:
                # assume user used tuple of int and choked ogr
                fkey = [float(x) for x in key]
                geom.SetPoint(0, *fkey)

        return geom

    # Create_feature with new optional attributes arg (should be dict type)
    def create_feature(geometry, lyr, attributes=None):
        feature = ogr.Feature(lyr.GetLayerDefn())
        feature.SetGeometry(g)
        if attributes != None:
            # Loop through attributes, assigning data to each field
            for field, data in attributes.items():
                feature.SetField(field, data)
        lyr.CreateFeature(feature)
        feature.Destroy()

    drv = ogr.GetDriverByName("ESRI Shapefile")
    shpdir = drv.CreateDataSource(outdir)
    # delete pre-existing output first otherwise ogr chokes
    try:
        shpdir.DeleteLayer("nodes")
    except:
        pass
    nodes = shpdir.CreateLayer("nodes", None, ogr.wkbPoint)
    for n in G:
        data = G.node[n]
        g = netgeometry(n, data)
        create_feature(g, nodes)
    try:
        shpdir.DeleteLayer("edges")
    except:
        pass
    edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString)

    # New edge attribute write support merged into edge loop
    fields = {}  # storage for field names and their data types
    attributes = {}  # storage for attribute data (indexed by field names)

    # Conversion dict between python and ogr types
    OGRTypes = {int: ogr.OFTInteger, str: ogr.OFTString, float: ogr.OFTReal}

    # Edge loop
    for e in G.edges(data=True):
        data = G.get_edge_data(*e)
        g = netgeometry(e, data)
        # Loop through attribute data in edges
        for key, data in e[2].items():
            # Reject spatial data not required for attribute table
            if (key != 'Json' and key != 'Wkt' and key != 'Wkb'
                    and key != 'ShpName'):
                # For all edges check/add field and data type to fields dict
                if key not in fields:
                    # Field not in previous edges so add to dict
                    if type(data) in OGRTypes:
                        fields[key] = OGRTypes[type(data)]
                    else:
                        # Data type not supported, default to string (char 80)
                        fields[key] = ogr.OFTString
                    # Create the new field
                    newfield = ogr.FieldDefn(key, fields[key])
                    edges.CreateField(newfield)
                    # Store the data from new field to dict for CreateLayer()
                    attributes[key] = data
                else:
                    # Field already exists, add data to dict for CreateLayer()
                    attributes[key] = data
        # Create the feature with, passing new attribute data
        create_feature(g, edges, attributes)

    nodes, edges = None, None
def getTimeseries(productcode, subproductcode, version, mapsetcode, geom,
                  start_date, end_date, aggregate):

    #    Extract timeseries from a list of files and return as JSON object
    #    It applies to a single dataset (prod/sprod/version/mapset) and between 2 dates
    #    Several types of aggregation foreseen:
    #
    #       mean :      Sum(Xi)/N(Xi)        -> min/max not considered          e.g. Rain
    #       cumulate:   Sum(Xi)              -> min/max not considered          e.g. Fire
    #
    #       count:      N(Xi where min < Xi < max)                              e.g. Vegetation anomalies
    #       surface:    count * PixelArea                                       e.g. Water Bodies
    #       percent:    count/Ntot                                              e.g. Vegetation anomalies
    #       precip:     compute the precipitation volume in m3*1E6              Rain (only)
    #
    #   History: 1.0 :  Initial release - since 2.0.1 -> now renamed '_green' from greenwich package
    #            1.1 :  Since Feb. 2017, it is based on a different approach (gdal.RasterizeLayer instead of greenwich)
    #                   in order to solve the issue with MULTIPOLYGON
    #

    ogr.UseExceptions()

    # Get Mapset Info
    mapset_info = querydb.get_mapset(mapsetcode=mapsetcode)

    # Prepare for computing conversion to area: the pixel size at Lat=0 is computed
    # The correction to the actual latitude (on AVERAGE value - will be computed below)
    const_d2km = 12364.35
    area_km_equator = abs(float(mapset_info.pixel_shift_lat)) * abs(
        float(mapset_info.pixel_shift_long)) * const_d2km

    # Get Product Info
    product_info = querydb.get_product_out_info(productcode=productcode,
                                                subproductcode=subproductcode,
                                                version=version)
    if product_info.__len__() > 0:
        # Get info from product_info
        scale_factor = 0
        scale_offset = 0
        nodata = 0
        date_format = ''
        for row in product_info:
            scale_factor = row.scale_factor
            scale_offset = row.scale_offset
            nodata = row.nodata
            date_format = row.date_format
            date_type = row.data_type_id

        # Create an output/temp shapefile, for managing the output layer (really mandatory ?? Can be simplified ???)
        try:
            tmpdir = tempfile.mkdtemp(prefix=__name__,
                                      suffix='_getTimeseries',
                                      dir=es_constants.base_tmp_dir)
        except:
            logger.error('Cannot create temporary dir ' +
                         es_constants.base_tmp_dir + '. Exit')
            raise NameError('Error in creating tmpdir')

        out_shape = tmpdir + os.path.sep + "output_shape.shp"
        outDriver = ogr.GetDriverByName('ESRI Shapefile')

        # Create the output shapefile
        outDataSource = outDriver.CreateDataSource(out_shape)
        dest_srs = ogr.osr.SpatialReference()
        dest_srs.ImportFromEPSG(4326)

        outLayer = outDataSource.CreateLayer("Layer", dest_srs)
        # outLayer = outDataSource.CreateLayer("Layer")
        idField = ogr.FieldDefn("id", ogr.OFTInteger)
        outLayer.CreateField(idField)

        featureDefn = outLayer.GetLayerDefn()
        feature = ogr.Feature(featureDefn)
        feature.SetGeometry(geom)
        # area = geom.GetArea()
        feature.SetField("id", 1)
        outLayer.CreateFeature(feature)
        feature = None

        [list_files,
         dates_list] = getFilesList(productcode, subproductcode, version,
                                    mapsetcode, date_format, start_date,
                                    end_date)

        # Built a dictionary with filenames/dates
        dates_to_files_dict = dict(list(zip(dates_list, list_files)))

        # Generate unique list of files
        unique_list = set(list_files)
        uniqueFilesValues = []

        geo_mask_created = False
        for infile in unique_list:
            single_result = {
                'filename': '',
                'meanvalue_noscaling': nodata,
                'meanvalue': None
            }

            if infile.strip() != '' and os.path.isfile(infile):
                # try:

                # Open input file
                orig_ds = gdal.Open(infile, gdal.GA_ReadOnly)
                orig_cs = osr.SpatialReference()
                orig_cs.ImportFromWkt(orig_ds.GetProjectionRef())
                orig_geoT = orig_ds.GetGeoTransform()
                x_origin = orig_geoT[0]
                y_origin = orig_geoT[3]
                pixel_size_x = orig_geoT[1]
                pixel_size_y = -orig_geoT[5]

                in_data_type_gdal = conv_data_type_to_gdal(date_type)

                # Create a mask from the geometry, with the same georef as the input file[s]
                if not geo_mask_created:

                    # Read polygon extent and round to raster resolution
                    x_min, x_max, y_min, y_max = outLayer.GetExtent()
                    x_min_round = int(old_div(
                        (x_min - x_origin),
                        pixel_size_x)) * pixel_size_x + x_origin
                    x_max_round = (
                        int(old_div(
                            (x_max - x_origin),
                            (pixel_size_x))) + 1) * pixel_size_x + x_origin
                    y_min_round = (
                        int(old_div(
                            (y_min - y_origin),
                            (pixel_size_y))) - 1) * pixel_size_y + y_origin
                    y_max_round = int(
                        old_div((y_max - y_origin),
                                (pixel_size_y))) * pixel_size_y + y_origin
                    #
                    #     # Create the destination data source
                    x_res = int(
                        round(
                            old_div((x_max_round - x_min_round),
                                    pixel_size_x)))
                    y_res = int(
                        round(
                            old_div((y_max_round - y_min_round),
                                    pixel_size_y)))
                    #
                    #     # Create mask in memory
                    mem_driver = gdal.GetDriverByName('MEM')
                    mem_ds = mem_driver.Create('', x_res, y_res, 1,
                                               in_data_type_gdal)
                    mask_geoT = [
                        x_min_round, pixel_size_x, 0, y_max_round, 0,
                        -pixel_size_y
                    ]
                    mem_ds.SetGeoTransform(mask_geoT)
                    mem_ds.SetProjection(orig_cs.ExportToWkt())
                    #
                    #     # Create a Layer with '1' for the pixels to be selected
                    gdal.RasterizeLayer(mem_ds, [1], outLayer, burn_values=[1])
                    # gdal.RasterizeLayer(mem_ds, [1], outLayer, None, None, [1])

                    # Read the polygon-mask
                    band = mem_ds.GetRasterBand(1)
                    geo_values = mem_ds.ReadAsArray()

                    # Create a mask from geo_values (mask-out the '0's)
                    geo_mask = ma.make_mask(geo_values == 0)
                    geo_mask_created = True
                    #
                    #     # Clean/Close objects
                    mem_ds = None
                    mem_driver = None
                    outDriver = None
                    outLayer = None

                # Read data from input file
                x_offset = int(old_div((x_min - x_origin), pixel_size_x))
                y_offset = int(old_div((y_origin - y_max), pixel_size_y))

                band_in = orig_ds.GetRasterBand(1)
                data = band_in.ReadAsArray(x_offset, y_offset, x_res, y_res)
                #   Catch the Error ES2-105 (polygon not included in Mapset)
                if data is None:
                    logger.error(
                        'ERROR: polygon extends out of file mapset for file: %s'
                        % infile)
                    return []

                # Create a masked array from the data (considering Nodata)
                masked_data = ma.masked_equal(data, nodata)

                # Apply on top of it the geo mask
                mxnodata = ma.masked_where(geo_mask, masked_data)

                # Test ONLY
                # write_ds_to_geotiff(mem_ds, '/data/processing/exchange/Tests/mem_ds.tif')

                if aggregate['aggregation_type'] == 'count' or aggregate[
                        'aggregation_type'] == 'percent' or aggregate[
                            'aggregation_type'] == 'surface' or aggregate[
                                'aggregation_type'] == 'precip':

                    if mxnodata.count() == 0:
                        meanResult = None
                    else:
                        mxrange = mxnodata
                        min_val = aggregate['aggregation_min']
                        max_val = aggregate['aggregation_max']

                        if min_val is not None:
                            min_val_scaled = old_div((min_val - scale_offset),
                                                     scale_factor)
                            mxrange = ma.masked_less(mxnodata, min_val_scaled)

                            # See ES2-271
                            if max_val is not None:
                                # Scale threshold from physical to digital value
                                max_val_scaled = old_div(
                                    (max_val - scale_offset), scale_factor)
                                mxrange = ma.masked_greater(
                                    mxrange, max_val_scaled)

                        elif max_val is not None:
                            # Scale threshold from physical to digital value
                            max_val_scaled = old_div((max_val - scale_offset),
                                                     scale_factor)
                            mxrange = ma.masked_greater(
                                mxnodata, max_val_scaled)

                        if aggregate['aggregation_type'] == 'percent':
                            # 'percent'
                            meanResult = float(mxrange.count()) / float(
                                mxnodata.count()) * 100

                        elif aggregate['aggregation_type'] == 'surface':
                            # 'surface'
                            # Estimate 'average' Latitude
                            y_avg = (y_min + y_max) / 2.0
                            pixelAvgArea = area_km_equator * math.cos(
                                old_div(y_avg, 180) * math.pi)
                            meanResult = float(mxrange.count()) * pixelAvgArea
                        elif aggregate['aggregation_type'] == 'precip':
                            # 'surface'
                            # Estimate 'average' Latitude
                            y_avg = (y_min + y_max) / 2.0
                            pixelAvgArea = area_km_equator * math.cos(
                                old_div(y_avg, 180) * math.pi)
                            n_pixels = mxnodata.count()
                            avg_precip = mxnodata.mean()
                            # Result is in km * km * mmm i.e. 1E3 m*m*m -> we divide by 1E3 to get 1E6 m*m*m
                            meanResult = float(
                                n_pixels) * pixelAvgArea * avg_precip * 0.001
                        else:
                            # 'count'
                            meanResult = float(mxrange.count())

                    # Both results are equal
                    finalvalue = meanResult

                else:  # if aggregate['type'] == 'mean' or if aggregate['type'] == 'cumulate':
                    if mxnodata.count() == 0:
                        finalvalue = None
                        meanResult = None
                    else:
                        if aggregate['aggregation_type'] == 'mean':
                            # 'mean'
                            meanResult = mxnodata.mean()
                        else:
                            # 'cumulate'
                            meanResult = mxnodata.sum()

                        finalvalue = (meanResult * scale_factor + scale_offset)

                # Assign results
                single_result['filename'] = infile
                single_result['meanvalue_noscaling'] = meanResult
                single_result['meanvalue'] = finalvalue

            else:
                logger.debug('ERROR: raster file does not exist - %s' % infile)

            uniqueFilesValues.append(single_result)

        # Define a dictionary to associate filenames/values
        files_to_values_dict = dict(
            (x['filename'], x['meanvalue']) for x in uniqueFilesValues)

        # Prepare array for result
        resultDatesValues = []

        # Returns a list of 'filenames', 'dates', 'values'
        for mydate in dates_list:

            my_result = {'date': datetime.date.today(), 'meanvalue': nodata}

            # Assign the date
            my_result['date'] = mydate
            # Assign the filename
            my_filename = dates_to_files_dict[mydate]

            # Map from array of Values
            my_result['meanvalue'] = files_to_values_dict[my_filename]

            # Map from array of dates
            resultDatesValues.append(my_result)

        try:
            shutil.rmtree(tmpdir)
        except:
            logger.debug('ERROR: Error in deleting tmpdir. Exit')

        # Return result
        return resultDatesValues
    else:
        logger.debug(
            'ERROR: product not registered in the products table! - %s %s %s' %
            (productcode, subproductcode, version))
        return []
class Watershed:
    """Define inputs and outputs for the main Watershed class"""

    ogr.UseExceptions()
    gdal.UseExceptions() 

    def __init__(self, x=None, y=None):

        self.x = x
        self.y = y
        self.catchmentIdentifier = None

        #geoms
        self.catchmentGeom = None
        self.splitCatchmentGeom = None
        self.upstreamBasinGeom = None
        self.mergedCatchmentGeom = None    

        #outputs
        self.catchment = None
        self.splitCatchment = None
        self.upstreamBasin = None
        self.mergedCatchment = None

        #input point spatial reference
        self.sourceprj = osr.SpatialReference()
        self.sourceprj.ImportFromProj4('+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
        # self.sourceprj_espg = self.sourceprj.GetAttrValue("AUTHORITY")
        # print("HERE", self.sourceprj)

        # Getting spatial reference of input raster
        raster = gdal.Open(IN_FDR, gdal.GA_ReadOnly)
        self.Projection = raster.GetProjectionRef()
        self.targetprj = osr.SpatialReference(wkt = raster.GetProjection())

        #create transform
        self.transformToRaster = osr.CoordinateTransformation(self.sourceprj, self.targetprj)
        self.transformToWGS = osr.CoordinateTransformation(self.targetprj, self.sourceprj)

        #kick off
        self.run()

    def serialize(self):
        return {
            'catchment': self.catchment,
            'splitCatchment': self.splitCatchment, 
            'upstreamBasin': self.upstreamBasin,
            'mergedCatchment': self.mergedCatchment
        }

## helper functions
    def geom_to_geojson(self, geom, name, simplify_tolerance=10, write_output=False):
        """Return a geojson from an OGR geom object"""

        #get area in local units
        area = geom.GetArea()
        print(name + ' area: ' + str(area*0.00000038610) + ' square miles')

        #optional simplify
        geom = geom.Simplify(simplify_tolerance)

        #don't want to affect original geometry
        transform_geom = geom.Clone()
        
        #trasnsform geometry from whatever the local projection is to wgs84
        transform_geom.Transform(self.transformToWGS)
        json_text = transform_geom.ExportToJson()

        #add some attributes
        geom_json = json.loads(json_text)

        #get area in local units
        area = geom.GetArea()

        #create json structure
        geojson_dict = {
            "type": "Feature",
            "geometry": geom_json,
            "properties": {
                "area": area
            }
        }

        if write_output:
            f = open(OUT_PATH + name + '.geojson','w')
            f.write(json.dumps(geojson_dict))
            f.close()
            print('Exported geojson:', name)
        
        return geojson_dict

## main functions
    def run(self):
        self.projectedLng, self.projectedLat = self.transform_click_point(self.x,self.y)
        self.catchmentIdentifier, self.catchmentGeom = self.get_local_catchment(self.x,self.y)
        minX, maxX, minY, maxY = self.catchmentGeom.GetEnvelope()
        self.splitCatchmentGeom = self.split_catchment([minX, minY, maxX, maxY], self.projectedLng,self.projectedLat)
        self.upstreamBasinGeom = self.get_upstream_basin(self.catchmentIdentifier)
        self.mergedCatchmentGeom = self.mergeGeoms(self.catchmentGeom, self.splitCatchmentGeom, self.upstreamBasinGeom)
        
        #outputs
        self.catchment = self.geom_to_geojson(self.catchmentGeom, 'catchment')
        self.splitCatchment = self.geom_to_geojson(self.splitCatchmentGeom, 'splitCatchment')
        self.upstreamBasin = self.geom_to_geojson(self.upstreamBasinGeom, 'upstreamBasin')
        self.mergedCatchment = self.geom_to_geojson(self.mergedCatchmentGeom, 'mergedCatchment')
        self.mergedCatchment = {'type': 'Feature', 'geometry': {'type': 'Polygon', 'coordinates': self.mergedCatchment['geometry']['coordinates'][0]}}

        print('Merged Geom: ' , self.mergedCatchment, type(self.mergedCatchment))
        #['geometry']['coordinates'][0]

    def transform_click_point(self, x, y):
        """Transform (reproject) assumed WGS84 coordinates to input raster coordinates"""

        print('Input X,Y:', x, y)
        projectedLng, projectedLat, z = self.transformToRaster.TransformPoint(x,y)      
        print('Projected X,Y:',projectedLng, ',', projectedLat)

        return (projectedLng, projectedLat)

    def get_local_catchment(self, x, y):
        """Perform point in polygon query to NLDI geoserver to get local catchment geometry"""

        print('requesting local catchment...')

        wkt_point = "POINT(%f %f)" %  (x , y)
        cql_filter = "INTERSECTS(the_geom, %s)" % (wkt_point)

        payload = {
            'service': 'wfs', 
            'version': '1.0.0', 
            'request': 'GetFeature', 
            'typeName': 'wmadata:catchmentsp', 
            'outputFormat': 'application/json',
            'srsName': 'EPSG:4326',
            'CQL_FILTER': cql_filter
        }

        #request catchment geometry from point in polygon query from NLDI geoserver
        # https://labs.waterdata.usgs.gov/geoserver/wmadata/ows?service=wfs&version=1.0.0&request=GetFeature&typeName=wmadata%3Acatchmentsp&outputFormat=application%2Fjson&srsName=EPSG%3A4326&CQL_FILTER=INTERSECTS%28the_geom%2C+POINT%28-73.745860+44.006830%29%29
        r = requests.get(NLDI_GEOSERVER_URL, params=payload)

        print('request url: ', r.url)
        resp = r.json()

        #get catchment id
        catchmentIdentifier = json.dumps(resp['features'][0]['properties']['featureid'])

        #get main catchment geometry polygon
        gj_geom = json.dumps(resp['features'][0]['geometry'])
        catchmentGeom = ogr.CreateGeometryFromJson(gj_geom)

        #transform catchment geometry
        catchmentGeom.Transform(self.transformToRaster)

        return catchmentIdentifier, catchmentGeom

    def get_upstream_basin(self, catchmentIdentifier):
        """Use local catchment identifier to get upstream basin geometry from NLDI"""

        #request upstream basin
        payload = {'f': 'json', 'simplified': 'false'}
        
        #request upstream basin from NLDI using comid of catchment point is in
        r = requests.get(NLDI_URL + catchmentIdentifier + '/basin', params=payload)
        print('upstream url', r.url)

        #print('upstream basin', r.text)
        resp = r.json()

        #convert geojson to ogr geom
        gj_geom = json.dumps(resp['features'][0]['geometry'])
        upstreamBasinGeom = ogr.CreateGeometryFromJson(gj_geom)
        upstreamBasinGeom.Transform(self.transformToRaster)

        return upstreamBasinGeom

    def mergeGeoms(self, catchment, splitCatchment, upstreamBasin):
        """Attempt at merging geometries"""

        #if point is on a flowline we have an upstream basin and need to do some geometry merging
        if self.query_flowlines(self.x,self.y):

            mergedCatchmentGeom = upstreamBasin
            diff = catchment.Union(splitCatchment)

            #subtract splitCatchment geom from upstream basin geometry
            mergedCatchmentGeom = mergedCatchmentGeom.Difference(catchment)
            mergedCatchmentGeom = mergedCatchmentGeom.Union(splitCatchment).Simplify(50)

            #write out
            return mergedCatchmentGeom

        #otherwise, we can just return the split catchment
        else:
            mergedCatchmentGeom = splitCatchment

        return mergedCatchmentGeom

    def query_flowlines(self, x, y):
        """Determine if X,Y falls on NHD Plus v2 flowline (within a tolerance)"""

        #example url
        #"https://hydro.nationalmap.gov/arcgis/rest/services/nhd/MapServer/6/query?geometry=-73.82705,43.29139&outFields=GNIS_NAME%2CREACHCODE&geometryType=esriGeometryPoint&inSR=4326&distance=100&units=esriSRUnit_Meter&returnGeometry=false&f=pjson", 

        #perhaps look at this code to snap input point to closest point along a line
        #https://github.com/marsmith/ADONNIS/blob/c54322eaeee17a415b7971c4f5ad714d3d3dccea/js/main.js#L421-L551

        #request upstream basin
        payload = {
            'f': 'pjson', 
            'geometryType': 'esriGeometryPoint',
            'inSR':'4326',
            'geometry': str(x) + ',' + str(y),
            'distance': 100,
            'units': 'esriSRUnit_Meter',
            'outFields': 'GNIS_NAME,REACHCODE',
            'returnGeometry': 'false'
        }
        
        # #request upstream basin from NLDI using comid of catchment point is in
        #r = requests.get(NHDPLUS_FLOWLINES_QUERY_URL, params=payload)

        #print('nhd flowline query:', r.url)

        #print('response', r.text)
        # resp = r.json()

        return True

    def split_catchment(self, bounds, x, y): 
        """Use catchment bounding box to clip NHD Plus v2 flow direction raster, and product split catchment delienation from X,Y"""

        print('test bounds:', bounds)

        RasterFormat = 'GTiff'
        PixelRes = 30

        #method to use catchment bounding box instead of exact geom
        gdal.Warp(OUT_FDR, IN_FDR, format=RasterFormat, outputBounds=bounds, xRes=PixelRes, yRes=PixelRes, dstSRS=self.Projection, resampleAlg=gdal.GRA_NearestNeighbour, options=['COMPRESS=DEFLATE'])

        #start pysheds catchment delineation
        grid = Grid.from_raster(OUT_FDR, data_name='dir')

        #compute flow accumulation to snap to
        dirmap = (64,  128,  1,   2,    4,   8,    16,  32)
        grid.accumulation(data='dir', dirmap=dirmap, out_name='acc', apply_mask=False)

        grid.to_raster('acc', OUT_PATH + 'acc.tif', view=False, blockxsize=16, blockysize=16)

        #snap the pourpoint to 
        xy = (x, y)
        new_xy = grid.snap_to_mask(grid.acc > 100, xy, return_dist=False)

        #get catchment with pysheds
        grid.catchment(data='dir', x=new_xy[0], y=new_xy[1], out_name='catch', recursionlimit=15000, xytype='label')

        # Clip the bounding box to the catchment
        grid.clip_to('catch')

        #some sort of strange raster to polygon conversion using rasterio method
        shapes = grid.polygonize()

        #get split Catchment geometry
        print('Split catchment complete')
        split_geom = ogr.Geometry(ogr.wkbPolygon)

        for shape in shapes:
            split_geom = split_geom.Union(ogr.CreateGeometryFromJson(json.dumps(shape[0])))

        return split_geom
示例#5
0
    def read_gml(self,
                 gml_path,
                 index_col=None,
                 groupby_column=None,
                 order_column=None,
                 id_col='code',
                 column_mapping={},
                 check_columns=True,
                 check_geotype=True,
                 clip=None):
        """
        Read GML file to GeoDataFrame.

        This function has the option to group Points into LineStrings. To do so,
        specify the groupby column (which the set has in common) and the order_column,
        the column which indicates the order of the grouping.

        A mask file can be specified to clip the selection.

        Parameters
        ----------
        gml_path : str
            Path to the GML file
        groupby_column : str
            Optional, column to group points by
        order_column : str
            Optional, columns to specify the order of the grouped points
        mask_file : str
            File containing the mask to clip points.
        """

        if not os.path.exists(gml_path):
            raise OSError(f'File not found: "{gml_path}"')

        ogr.UseExceptions()
        gml = ogr.Open(gml_path)
        layer = gml.GetLayer()
        layerDefinition = layer.GetLayerDefn()
        nfields = layerDefinition.GetFieldCount()

        # Get column names for features
        columns = [
            layerDefinition.GetFieldDefn(i).GetName() for i in range(nfields)
        ]

        # Collect features
        features = [f for f in layer]

        # Get geometries
        georefs = [f.GetGeometryRef() for f in features]
        for i, geo in enumerate(georefs):
            if geo is None:
                print('Skipping invalid geometry.')
                del georefs[i]
                del features[i]

        geometries = []
        new_feats = []
        for i, f in enumerate(features):
            geometry = wkb.loads(georefs[i].ExportToWkb())
            if (geometry.type == 'MultiPolygon'):
                new_geoms = list(geometry)
                geometries.extend(new_geoms)
                new_features = [f] * len(new_geoms)
                new_feats.extend(new_features)
            else:
                geometries.append(geometry)
                new_feats.append(f)
        features = new_feats
        #geometries = [wkb.loads(geo.ExportToWkb()) for geo in georefs]

        # Get group by columns
        if groupby_column is not None:
            # Check if the group by column is found
            if groupby_column not in columns:
                raise ValueError('Groupby column not found in feature list.')

            # Check if the geometry is as expected
            if not isinstance(geometries[0], Point):
                raise ValueError('Can only group Points to LineString')

            # Get the values from the column that is grouped
            columnid = columns.index(groupby_column)
            groupbyvalues = [f.GetField(columnid) for f in features]
            volgid = columns.index(order_column)
            order = [f.GetField(volgid) for f in features]

            # Create empty dict for lines
            branches, counts = np.unique(groupbyvalues, return_counts=True)
            lines = {
                branch: [0] * count
                for branch, count in zip(branches, counts)
            }

            # Since the order does not always start at 1, find the starting number per group
            startnr = {branch: len(features) + 1 for branch in branches}
            for branch, volgnr in zip(groupbyvalues, order):
                startnr[branch] = min(volgnr, startnr[branch])

            # Determine relative order of points in profile (required if the point numbering is not subsequent)
            order_rel = []
            for branch, volgnr in zip(groupbyvalues, order):
                lst_volgnr = [
                    x[1] for x in zip(groupbyvalues, order) if x[0] == branch
                ]
                lst_volgnr.sort()
                for i, x in enumerate(lst_volgnr):
                    if volgnr == x:
                        order_rel.append(i)

            # Filter branches with too few points
            singlepoint = (counts < 2)

            # Assign points
            for point, volgnr, branch, volgnr_rel in zip(
                    geometries, order, groupbyvalues, order_rel):
                #lines[branch][volgnr - startnr[branch]] = point
                lines[branch][volgnr_rel] = point

            # Group geometries to lines
            for branch in branches[~singlepoint]:
                if any(isinstance(pt, int) for pt in lines[branch]):
                    print(
                        f'Points are not properly assigned for branch "{branch}". Check the GML.'
                    )
                    lines[branch] = [
                        pt for pt in lines[branch] if not isinstance(pt, int)
                    ]
                lines[branch] = LineString(lines[branch])

            # Set order for branches with single point to 0, so features are not loaded
            for branch in branches[singlepoint]:
                order[groupbyvalues.index(branch)] = 0

            # Read fields at first occurence
            startnrs = [startnr[branch] for branch in groupbyvalues]
            fields = [
                list(map(f.GetField, range(nfields)))
                for i, volgnr, f in zip(order, startnrs, features)
                if i == volgnr
            ]

            # Get geometries in correct order for featuresF
            geometries = [lines[row[columnid]] for row in fields]

        else:
            fields = [list(map(f.GetField, range(nfields))) for f in features]

        # Create geodataframe
        gdf = gpd.GeoDataFrame(fields, columns=columns, geometry=geometries)
        gdf.rename(columns=column_mapping, inplace=True)

        # add a letter to 'exploded' multipolygons
        #sfx = ['_'+str(i) for i in range(100)]
        for ftc in gdf[id_col].unique():
            if len(gdf[gdf[id_col] == ftc]) > 1:
                gdf.loc[gdf[id_col] == ftc, id_col] = [
                    f'{i}_{n}'
                    for n, i in enumerate(gdf[gdf[id_col] == ftc][id_col])
                ]
                print(f'{ftc} is MultiPolygon; split into single parts.')

        # Add data to class GeoDataFrame
        self.set_data(gdf,
                      index_col=index_col,
                      check_columns=check_columns,
                      check_geotype=check_geotype)

        if clip is not None:
            self.clip(clip)
def run(quiet_status, create):
    def printStatus(status, newLine=False):
        if quiet_status and not newLine:
            return

        if newLine:
            ch = "\n"
        else:
            ch = ""
        sys.stdout.write("\r{}".format(status.ljust(100) + ch))
        sys.stdout.flush()

    def messageDiffDateTime(dt1, dt2):
        diff = dt2 - dt1
        return "Days = {} hours = {}".format(diff.days, diff.seconds / 3600)

    ogr.RegisterAll()
    ogr.UseExceptions()

    vars_env = ['USERPG', 'PWDPG']
    for v in vars_env:
        if not v in os.environ:
            msg = f"Missing '{v}' in OS enviroment"
            printStatus(msg, True)
            return 1

    args = (os.environ['USERPG'], os.environ['PWDPG'], '10.1.25.143', 'siscom')
    AggregatorGroupPG.setPostgres(*args)
    if AggregatorGroupPG.dsPG is None:
        msg = f"Error connection database: host={args[2]} db={args[3]} user={args[0]}"
        printStatus(msg, True)
        return 1

    AggregatorGroupPG.dtInit = datetime.now()
    status = 'Creation' if create else 'Update'
    msg = f"Started ({status} '{AggregatorGroupPG.tableAgregated}'): {AggregatorGroupPG.dtInit}"
    printStatus(msg, True)

    if create:
        r = AggregatorGroupPG.setProcessParams(printStatus)
    else:
        r = AggregatorGroupPG.setProcessParams(printStatus,
                                               useFilterDatetime=True)
    if not r['isOk']:
        printStatus(r['message'], True)
        return 1

    AggregatorGroup.init(AggregatorGroupPG.tableAlert)
    if create:
        aggGroups = AggregatorGroup.createGroups()  # generator
        r = AggregatorGroupPG.saveGroups(aggGroups, printStatus)
        if not r['isOk']:
            printStatus(r['message'])
            return 1
        dtEnd = datetime.now()
        msgDiff = messageDiffDateTime(AggregatorGroupPG.dtInit, dtEnd)
        args = (AggregatorGroupPG.tableAgregated, r['totalNewGroup'], dtEnd,
                msgDiff)
        msg = "Created '{}' in DB. Total Groups {} - {}({})".format(*args)
        printStatus(msg, True)
    else:
        r = AggregatorGroupPG.updateGroups(printStatus)
        if not r['isOk']:
            printStatus(r['message'])
            return 1
        if r['totalNewGroup'] == 0:
            msg = f"Missing new group '{AggregatorGroupPG.tableAgregated}' in DB."
        else:
            dtEnd = datetime.now()
            msgDiff = messageDiffDateTime(AggregatorGroupPG.dtInit, dtEnd)
            args = (AggregatorGroupPG.tableAgregated, r['totalNewGroup'],
                    r['totalDeleteGroup'], r['totalGroup'], dtEnd, msgDiff)
            msg = "Updated '{}' in DB. Groups: New {}, Delete {}, Total {} - {}({})".format(
                *args)
        printStatus(msg, True)

    totalInvalidUnions = len(ChainPolygons.invalidUnions)
    if totalInvalidUnions > 0:
        r = AggregatorGroupPG.createLayerInvalidUnion(
            ChainPolygons.invalidUnions)
        msg = "Created '{}' in DB".format(
            r['table']) if r['isOk'] else r['message']
        printStatus(msg, True)
        ChainPolygons.invalidUnions.clear()

    return 0
示例#7
0
文件: the_aerosol.py 项目: tcztzy/zac
    def _create_base_map(self, ):
        """
        Deal with different types way to define the AOI, if none is specified, then the image bound is used.
        """
        gdal.UseExceptions()
        ogr.UseExceptions()
        if self.aoi is not None:
            if os.path.exists(self.aoi):
                try:
                    g = gdal.Open(self.aoi)
                    # subprocess.call(['gdaltindex', '-f', 'GeoJSON', '-t_srs', 'EPSG:4326', self.toa_dir + '/AOI.json', self.aoi])
                    geojson = get_boundary(self.aoi)[0]
                    with open(self.toa_dir + "/AOI.json", "wb") as f:
                        f.write(geojson.encode())
                except:
                    try:
                        gr = ogr.Open(str(self.aoi))
                        l = gr.GetLayer(0)
                        f = l.GetFeature(0)
                        g = f.GetGeometryRef()
                    except:
                        raise IOError(
                            "AOI file cannot be opened by gdal, please check it or transform into format can be opened by gdal"
                        )
            else:
                try:
                    g = ogr.CreateGeometryFromJson(self.aoi)
                except:
                    try:
                        g = ogr.CreateGeometryFromGML(self.aoi)
                    except:
                        try:
                            g = ogr.CreateGeometryFromWkt(self.aoi)
                        except:
                            try:
                                g = ogr.CreateGeometryFromWkb(self.aoi)
                            except:
                                raise IOError(
                                    "The AOI has to be one of GeoJSON, GML, Wkt or Wkb."
                                )
            gjson_str = (
                """{"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":%s}]}"""
                % g.ExportToJson())
            with open(self.toa_dir + "/AOI.json", "wb") as f:
                f.write(gjson_str.encode())

        ogr.DontUseExceptions()
        gdal.DontUseExceptions()
        if not os.path.exists(self.toa_dir + "/AOI.json"):
            g = gdal.Open(self.toa_bands[0])
            proj = g.GetProjection()
            if "WGS 84" in proj:
                # subprocess.call(['gdaltindex', '-f', 'GeoJSON', self.toa_dir +'/AOI.json', self.toa_bands[0]])
                geojson = get_boundary(self.toa_bands[0], to_wgs84=False)
                with open(self.toa_dir + "/AOI.json", "wb") as f:
                    f.write(geojson.encode())
            else:
                # subprocess.call(['gdaltindex', '-f', 'GeoJSON', '-t_srs', 'EPSG:4326', self.toa_dir +'/AOI.json', self.toa_bands[0]])
                geojson = get_boundary(self.toa_bands[0])[0]
                with open(self.toa_dir + "/AOI.json", "wb") as f:
                    f.write(geojson.encode())

            self.logger.warning(
                "AOI is not created and full band extend is used")
            self.aoi = self.toa_dir + "/AOI.json"
        else:
            self.aoi = self.toa_dir + "/AOI.json"

        if self.pixel_res is None:
            self.pixel_res = abs(
                gdal.Open(self.toa_bands[0]).GetGeoTransform()[1])

        self.psf_xstd = 260 / self.pixel_res
        self.psf_ystd = 340 / self.pixel_res
示例#8
0
    def __init__(
            self,
            host="localhost",
            port=5432,
            db_name="postgis",
            user="******",
            password="******",
            views_included=1,
            dico_dataset=OrderedDict(),
            txt=dict(),
    ):
        """Uses gdal/ogr functions to extract basic informations about
        geographic file (handles shapefile or MapInfo tables)
        and store into the dictionaries.

        layer = path to the geographic file
        dico_dataset = dictionary for global informations
        dico_fields = dictionary for the fields' informations
        tipo = feature type to read
        text = dictionary of texts to display
        """
        # handling GDAL/OGR specific exceptions
        gdal.AllRegister()
        ogr.UseExceptions()
        gdal.UseExceptions()

        # Creating variables
        self.dico_dataset = dico_dataset
        self.txt = txt
        self.alert = 0
        if views_included:
            gdal.SetConfigOption(str("PG_LIST_ALL_TABLES"), str("YES"))
            logger.info("PostgreSQL views enabled.")
        else:
            gdal.SetConfigOption(str("PG_LIST_ALL_TABLES"), str("NO"))
            logger.info("PostgreSQL views disabled.")

        # connection infos
        self.host = host
        self.port = port
        self.db_name = db_name
        self.user = user
        self.password = password
        self.conn_settings = "PG: host={} port={} dbname={} user={} password={}".format(
            host, port, db_name, user, password)

        # testing connection
        self.conn = self.get_connection()
        if not self.conn:
            self.alert += 1
            youtils.erratum(
                ctner=dico_dataset,
                mess_type=1,
                ds_lyr=self.conn_settings,
                mess="err_connection_failed",
            )
            dico_dataset["err_gdal"] = gdal_err.err_type, gdal_err.err_msg
            return None
        else:
            pass

        # sgbd info
        dico_dataset["sgbd_version"] = self.get_version()
        dico_dataset["sgbd_schemas"] = self.get_schemas()
示例#9
0
    def __init__(self, dxfpath, dico_dxf, tipo, txt=""):
        """Uses OGR functions to extract basic informations about
        geographic vector file (handles shapefile or MapInfo tables)
        and store into dictionaries.

        dxfpath = path to the DXF file
        dico_dxf = dictionary for global informations
        tipo = format
        text = dictionary of text in the selected language
        """
        # changing working directory to layer folder
        chdir(path.dirname(dxfpath))

        # raising GDAL/OGR specific exceptions
        ogr.UseExceptions()
        self.alert = 0

        # opening DXF
        dr_dxf = ogr.GetDriverByName("DXF")
        try:
            dxf = dr_dxf.Open(dxfpath, 0)
        except Exception as err:
            logger.error(err)
            return

        # check if DXF is OGR friendly
        if dxf is None:
            self.alert += 1
            self.erratum(dico_dxf, dxfpath, "err_incomp")
            return
        else:
            pass

        # DXF name and parent folder
        dico_dxf["name"] = path.basename(dxf.GetName())
        dico_dxf["folder"] = path.dirname(dxf.GetName())

        # opening
        douxef = dxfgrabber.readfile(dxfpath)

        # AutoCAD version
        dico_dxf["version_code"] = douxef.dxfversion
        # see: http://dxfgrabber.readthedocs.org/en/latest/#Drawing.dxfversion
        if douxef.dxfversion == "AC1009":
            dico_dxf["version_name"] = "AutoCAD R12"
        elif douxef.dxfversion == "AC1015":
            dico_dxf["version_name"] = "AutoCAD R2000"
        elif douxef.dxfversion == "AC1018":
            dico_dxf["version_name"] = "AutoCAD R2004"
        elif douxef.dxfversion == "AC1021":
            dico_dxf["version_name"] = "AutoCAD R2007"
        elif douxef.dxfversion == "AC1024":
            dico_dxf["version_name"] = "AutoCAD R2010"
        elif douxef.dxfversion == "AC1027":
            dico_dxf["version_name"] = "AutoCAD R2013"
        else:
            dico_dxf["version_name"] = "NR"

        # layers count and names
        dico_dxf["layers_count"] = dxf.GetLayerCount()
        li_layers_names = []
        li_layers_idx = []
        dico_dxf["layers_names"] = li_layers_names
        dico_dxf["layers_idx"] = li_layers_idx

        # dependencies
        dependencies = [
            f for f in listdir(path.dirname(dxfpath))
            if path.splitext(path.abspath(f))[0] == path.splitext(dxfpath)[0]
            and not path.splitext(path.abspath(f).lower())[1] == ".dxf"
        ]
        dico_dxf["dependencies"] = dependencies

        # cumulated size
        dependencies.append(dxfpath)
        total_size = sum([path.getsize(f) for f in dependencies])
        dico_dxf["total_size"] = self.sizeof(total_size)
        dependencies.pop(-1)

        # global dates
        dico_dxf["date_actu"] = strftime("%d/%m/%Y",
                                         localtime(path.getmtime(dxfpath)))
        dico_dxf["date_crea"] = strftime("%d/%m/%Y",
                                         localtime(path.getctime(dxfpath)))
        # total fields count
        total_fields = 0
        dico_dxf["total_fields"] = total_fields
        # total objects count
        total_objs = 0
        dico_dxf["total_objs"] = total_objs
        # parsing layers
        for layer_idx in range(dxf.GetLayerCount()):
            # dictionary where will be stored informations
            dico_layer = OD()
            # parent DXF
            dico_layer["dxf_name"] = path.basename(dxf.GetName())
            # getting layer object
            layer = dxf.GetLayerByIndex(layer_idx)
            # layer name
            li_layers_names.append(layer.GetName())
            # layer index
            li_layers_idx.append(layer_idx)
            # getting layer globlal informations
            self.infos_basics(layer, dico_layer, txt)
            # storing layer into the DXF dictionary
            dico_dxf["{0}_{1}".format(layer_idx, layer.GetName())] = dico_layer
            # summing fields number
            total_fields += dico_layer.get("num_fields")
            # summing objects number
            total_objs += dico_layer.get("num_obj")
            # deleting dictionary to ensure having cleared space
            del dico_layer
        # storing fileds and objects sum
        dico_dxf["total_fields"] = total_fields
        dico_dxf["total_objs"] = total_objs
示例#10
0
def pd_save_gdal(df, output_path, layer_attribute='layer', driver_name=None):
    if driver_name is None:
        driver_name = detect_ogr_driver(output_path)

    if driver_name not in gdal_formats:
        if layer_attribute and layer_attribute != 'layer':
            df['layer'] = df[layer_attribute]
        return pd_save_dataframe(df, output_path)

    try:
        from osgeo import ogr
    except:
        return pd_save_dataframe(df, output_path)
    print("save using ogr driver", driver_name)

    import osgeo.osr as osr

    # use OGR specific exceptions
    ogr.UseExceptions()

    # Create the output
    dvr = ogr.GetDriverByName(driver_name)
    ods = dvr.CreateDataSource(output_path)
    poly = None
    lyr = ods.CreateLayer('')
    if lyr.TestCapability('CreateField'):
        if lyr.GetLayerDefn().GetFieldIndex('Layer') == -1:
            lyr.CreateField(ogr.FieldDefn('Layer', ogr.OFTString))
        for f in df.columns:
            if len(f) > 1:
                t = ogr.OFTString
                if df[f].dtype != np.object:
                    t = ogr.OFTReal
                lyr.CreateField(ogr.FieldDefn(f, t))
    # start from the bottom of the dataframe to simplify polygon creation
    for row in df.index[::-1]:
        l = None
        if layer_attribute in df:
            l = df.loc[row, layer_attribute]
        if not l or (isinstance(l, float) and np.isnan(l)):
            l = os.path.splitext(os.path.basename(output_path))[0]

        n, x, y, z = df.loc[row, ['n', 'x', 'y', 'z']].astype(np.float)
        if poly is None:
            ptype = ''
            if 'type' in df:
                ptype = str.upper(df.loc[row, 'type'])
            print(ptype)
            if ptype.find('POINT') >= 0:
                poly = ogr.Geometry(ogr.wkbPointZM)
            elif ptype == 'LINEARRING' or ptype.find('POLY') >= 0:
                poly = ogr.Geometry(ogr.wkbLinearRing)
            else:
                poly = ogr.Geometry(ogr.wkbLineStringZM)

        poly.SetPoint(int(n), x, y, z)

        if n == 0.0:
            feature = ogr.Feature(lyr.GetLayerDefn())
            ffDefn = feature.GetDefnRef()
            for i in range(ffDefn.GetFieldCount()):
                f = ffDefn.GetFieldDefn(i).GetName()
                if f in df:
                    feature.SetField(f, str(df.loc[row, f]))
                elif f.lower() in df:
                    feature.SetField(f, df.loc[row, f.lower()])
            feature.SetField('Layer', l)
            feature.SetGeometry(poly)
            lyr.CreateFeature(feature)
            poly = None
示例#11
0
def run(quiet_status):
    def printStatus(status, newLine=False):
        if not newLine and quiet_status:
            return

        if newLine:
            ch = "\n"
        else:
            ch = ""
        sys.stdout.write("\r{}".format(status.ljust(100) + ch))
        sys.stdout.flush()

    def getLayerPostgres(str_conn, table):
        ds = ogr.Open(str_conn)
        if ds is None:
            return {
                isOk: False,
                'message': "Connection '{}' can't open".format(str_conn)
            }
        lyr = ds.GetLayerByName(table)
        if lyr is None:
            return {
                'isOk':
                False,
                'message':
                "Table '{}' from connection '{}' not found".format(
                    table, str_conn)
            }
        return {'isOk': True, 'dataset': ds, 'layer': ds.GetLayer()}

    def checkFields(layer, fields):
        defn = layer.GetLayerDefn()
        for name in fields:
            if defn.GetFieldIndex(name) == -1:
                return {
                    'isOk':
                    False,
                    'message':
                    "Layer '{}' don't have field '{}'".format(
                        layer.GetName(), name)
                }
        return {'isOk': True}

    def copyMemoryLayer(inLayer, outName):
        driver = ogr.GetDriverByName('MEMORY')
        ds = driver.CreateDataSource('memData')
        driver.Open('memData', 1)  # Write access
        ds.CopyLayer(inLayer, outName, ['OVERWRITE=YES'])
        layer = ds.GetLayerByName(outName)
        layer.ResetReading()
        return {'dataset': ds, 'layer': layer}

    def getFeaturesOrderDate(layer):
        feats = []
        for feat in layer:
            items = feat.items()
            geom = feat.GetGeometryRef().Clone()
            if not geom.IsValid():
                geom = geom.Buffer(0)
            feats.append({
                'fid':
                feat.GetFID(),
                field_date:
                datetime.datetime.strptime(items[field_date],
                                           '%Y-%m-%d').date(),
                field_type:
                items[field_type],
                field_stage:
                items[field_stage],
                'geom':
                geom,
                'geomBuffer':
                geom.Buffer(vbuffer)
            })
        layer.ResetReading()
        feats.sort(key=lambda i: i[field_date])

        return feats

    def createOutLayer(spatialRef, outFile, fields, geomType):
        driver = ogr.GetDriverByName('ESRI Shapefile')
        if os.path.exists(outFile):
            driver.DeleteDataSource(outFile)
        #
        ds = driver.CreateDataSource(outFile)
        #
        if ds is None:
            return {
                isOk: False,
                'message': "File '{}' not be created".format(outFile)
            }
        #
        layer = ds.CreateLayer(outFile, srs=spatialRef, geom_type=geomType)
        #
        if layer is None:
            return {
                isOk: False,
                'message': "File '{}' not be created".format(outFile)
            }
        #
        for item in fields:
            f = ogr.FieldDefn(item['name'], item['type'])
            if item.has_key('width'):
                f.SetWidth(item['width'])
            layer.CreateField(f)
        #
        return {'isOk': True, 'dataset': ds, 'layer': layer}

    def addFeaturesOut(layer, f, fid, defnOut):
        feat = ogr.Feature(defnOut)
        feat.SetGeometry(f['geom'])
        feat.SetFID(f['fid'])
        for item in f['attributes']:
            feat.SetField(item['name'], item['value'])
        layer.CreateFeature(feat)
        feat.Destroy()

    def getCoordTransform7390(layer):
        wkt7390 = 'PROJCS["Brazil / Albers Equal Area Conic (WGS84)",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["longitude_of_center",-50.0],PARAMETER["standard_parallel_1",10.0],PARAMETER["standard_parallel_2",-40.0],PARAMETER["latitude_of_center",-25.0],UNIT["Meter",1.0]]'
        sr7390 = osr.SpatialReference()
        sr7390.ImportFromWkt(wkt7390)

        return osr.CreateCoordinateTransformation(layer.GetSpatialRef(),
                                                  sr7390)

    def getAreaHa(geom):
        g = geom.Clone()
        g.Transform(ct1390)
        area = g.Area() / 10000.0
        g.Destroy()
        return area

    def processNeighbour(inFeats, layer):
        def getValidTypeStage(item):
            vtype = item[field_type] if not item[
                field_type] is None else 'SEM Classificação'
            vstage = item[field_stage] if not item[
                field_stage] is None else 'SEM Classificação'
            return {'type': vtype, 'stage': vstage}

        def isInsideLimitMonth(featAggregator, feat):
            dateLast = featAggregator['events'][-1][field_date]
            dateFeat = feat[field_date]
            date_months = dateLast - dateutil.relativedelta.relativedelta(
                months=limitMonth)
            return dateFeat > date_months

        def getFeaturesHasRelation(layer, dateReference, geom, fids=[]):
            layer.SetSpatialFilter(geom)
            feats = getFeaturesOrderDate(layer)
            featsRelation = []
            for feat in feats:
                date_months = dateReference + dateutil.relativedelta.relativedelta(
                    months=limitMonth)
                if feat['fid'] in fids or feat[
                        field_date] > date_months or not geom.Intersects(
                            feat['geom']):
                    continue
                dateReference = feat[field_date]
                t_s = getValidTypeStage(feat)
                item = {
                    field_date: dateReference,
                    field_type: t_s['type'],
                    field_stage: t_s['stage'],
                    'geom': feat['geom'].Clone(),
                    'fid': feat['fid']
                }
                featsRelation.append(item)

            return featsRelation

        def getInitValues(inFeat):
            outFeat = {}
            outFeat['id_group'] = id_group
            geom = inFeat['geom'].Clone()
            outFeat['geom'] = geom
            outFeat['geomBuffer'] = geom.Buffer(vbuffer)
            t_s = getValidTypeStage(inFeat)
            item = {
                field_date: inFeat[field_date],
                'area_ha': getAreaHa(geom),
                field_type: t_s['type'],
                field_stage: t_s['stage']
            }
            outFeat['events'] = [item]
            outFeat['fids'] = [inFeat['fid']]

            return outFeat

        def addValues(outFeat, inFeat):
            # NEED use ogr.UseExceptions()
            isOk = True
            try:
                geomUnion = outFeat['geom'].Union(inFeat['geom'])
            except Exception as e:
                msg = "Error make Union for ID Group {}(used ZERO for FID area): {}".format(
                    id_group, e.message)
                lstExceptionFids[inFeat['fid']] = msg
                isOk = False

            if isOk:
                if not geomUnion.IsValid():
                    geomUnion = geomUnion.Buffer(0)
                outFeat['geom'].Destroy()
                outFeat['geom'] = geomUnion
                outFeat['geomBuffer'].Destroy()
                outFeat['geomBuffer'] = geomUnion.Buffer(vbuffer)
            item = {
                'area_ha': getAreaHa(inFeat['geom']) if isOk else 0.00,
                field_date: inFeat[field_date],
                field_type: inFeat[field_type],
                field_stage: inFeat[field_stage]
            }
            outFeat['events'].append(item)
            outFeat['fids'].append(inFeat['fid'])

        def destroyGeometries(fids, inFeats):
            # inFeats
            total = len(fids)
            i = 0
            for item in inFeats:
                if item['fid'] in fids:
                    i += 0
                    item['geom'].Destroy()
                    item['geom'] = None
                    if i == total:
                        break

        def aggregateSameDates(events):
            new_events = []
            dates = map(lambda x: x[field_date], events)
            dates = list(set(dates))
            for item1 in dates:
                types = []
                stages = []
                f_dates = filter(lambda x: x[field_date] == item1, events)
                if len(f_dates) == 1:
                    total_area_ha = f_dates[0]['area_ha']
                    types.append(f_dates[0][field_type])
                    stages.append(f_dates[0][field_stage])
                else:
                    total_area_ha = 0
                    for item2 in f_dates:
                        total_area_ha += item2['area_ha']
                        types.append(item2[field_type])
                        stages.append(item2[field_stage])
                    types = list(set(types))
                    stages = list(set(stages))
                event = {
                    field_date: item1,
                    'area_ha': total_area_ha,
                    field_type: types,
                    field_stage: stages
                }
                new_events.append(event)

            return new_events

        #  Layer WILL HAVE YOURS FEATURES DELETED !
        total = len(inFeats)
        outFeats = []
        id_group = 0
        loop = True
        while loop:
            loop = False
            for item in inFeats:
                # Add from Query inFeats
                id_group += 1
                printStatus(
                    "Interations: {} <> {} (Remaining Features)".format(
                        id_group, len(inFeats)))
                outFeat = getInitValues(item)
                layer.DeleteFeature(item['fid'])
                feats = getFeaturesHasRelation(layer, item[field_date],
                                               item['geomBuffer'],
                                               [item['fid']])
                #
                if len(feats) > 0:
                    for item2 in feats:
                        addValues(outFeat, item2)
                        layer.DeleteFeature(item2['fid'])
                    # Add from Query by outFeat
                    while True:
                        dataRef = outFeat['events'][-1][field_date]
                        feats = getFeaturesHasRelation(layer, dataRef,
                                                       outFeat['geomBuffer'],
                                                       outFeat['fids'])
                        if len(feats) == 0:
                            break
                        for item2 in feats:
                            addValues(outFeat, item2)
                            layer.DeleteFeature(item2['fid'])
                #
                destroyGeometries(outFeat['fids'], inFeats)
                outFeats.append(outFeat)
                inFeats = filter(lambda x: not x['geom'] is None, inFeats)
                if len(inFeats) > 0:
                    loop = True
                break  # NEWs values of inFeats!

        for item in outFeats:
            events = aggregateSameDates(item['events'])
            del item['events']
            item['events'] = events

            #outFeats = filter( lambda x: not x['geom'] is None, outFeats )

        return (outFeats, inFeats)

    def getSumEvents(outFeat):
        events = sorted(outFeat['events'], key=lambda x: x[field_date])
        num_events = len(events)
        date_ini = str(events[0][field_date])
        date_end = str(events[num_events - 1][field_date])
        area_ini_ha = events[0]['area_ha']
        #
        dates_events = str(date_ini)
        total_fid_ha = area_ini_ha
        area_events_ha = str(area_ini_ha)
        tipos = list(
            events[0][field_type])  # See createutFeatures/aggregateSameDates
        estagios = list(events[0][field_stage])  #  Change string to array
        for item in events[1:]:
            dates_events += ";{}".format(item[field_date])
            total_fid_ha += item['area_ha']
            area_events_ha += ";{}".format(item['area_ha'])
            tipos += item[field_type]  # List values
            estagios += item[field_stage]  # List values
        #
        s_fids = ';'.join(map(lambda item: str(item), outFeat['fids']))
        s_tipos = ';'.join(list(set(tipos)))
        s_estagios = ';'.join(list(set(estagios)))
        #
        return {
            'n_events': num_events,
            'ini_date': date_ini,
            'end_date': date_end,
            'ini_ha': area_ini_ha,
            'fids_ha': total_fid_ha,
            'n_fids': len(outFeat['fids']),
            'fids': s_fids,
            'dates_ev': dates_events,
            'ha_ev': area_events_ha,
            'tipos': s_tipos,
            'estagios': s_estagios
        }

    def saveErrorFids(nameFile):
        f = open(nameFile, "w")
        fids = sorted(lstExceptionFids.keys())
        for fid in fids:
            line = "FID {}: {}\n".format(fid, lstExceptionFids[fid])
            f.write(line)
        f.close

    ogr.RegisterAll()
    ogr.UseExceptions()
    #
    user, pwd = '???', '???'
    str_conn = "PG: host={} dbname={} user={} password={}".format(
        '10.1.25.143', 'siscom', user, pwd)
    schema = 'temp'
    table = 'alerta_filter'
    d = getLayerPostgres(str_conn, "{}.{}".format(schema, table))
    if not d['isOk']:
        printStatus(d['message'], True)
        return 1
    dsPostgres, layer = d['dataset'], d['layer']
    # Variable all scope
    dirOut = "/home/lmotta/Documentos/cotig2018/Agregador_George_2018-08/shp"
    field_date = 'date_img'
    field_type = 'tipo'
    field_stage = 'estagio'
    vbuffer = 0.00014  # 1 sec(1/2) = (1 / (60*60) degree) / 2
    limitMonth = 6
    ct1390 = getCoordTransform7390(layer)
    lstExceptionFids = {}
    #
    d = checkFields(layer, (field_date, field_type, field_stage))
    if not d['isOk']:
        printStatus(d['message'], True)
        return 1
    #
    printStatus("Copying Database to Memory...")
    d = copyMemoryLayer(layer, table)
    dsMemory, lyrMem = d['dataset'], d['layer']
    del dsPostgres
    layer = lyrMem
    #
    printStatus("Reading features...")
    inFeats = getFeaturesOrderDate(layer)
    msg = "Processing ({} features)...".format(len(inFeats))
    printStatus(msg)
    (outFeats, inFeats) = processNeighbour(
        inFeats, layer)  # MEMORY LAYER: it will have your features delete
    totalOutFeats = len(outFeats)
    # Output - Save Shapefile
    fields = [{
        'name': 'id_group',
        'type': ogr.OFTInteger
    }, {
        'name': 'n_events',
        'type': ogr.OFTInteger
    }, {
        'name': 'ini_date',
        'type': ogr.OFTString,
        'width': 10
    }, {
        'name': 'end_date',
        'type': ogr.OFTString,
        'width': 10
    }, {
        'name': 'ini_ha',
        'type': ogr.OFTReal
    }, {
        'name': 'end_ha',
        'type': ogr.OFTReal
    }, {
        'name': 'fids_ha',
        'type': ogr.OFTReal
    }, {
        'name': 'n_fids',
        'type': ogr.OFTInteger
    }, {
        'name': 'fids',
        'type': ogr.OFTString,
        'width': 200
    }, {
        'name': 'dates_ev',
        'type': ogr.OFTString,
        'width': 200
    }, {
        'name': 'ha_ev',
        'type': ogr.OFTString,
        'width': 200
    }, {
        'name': 'tipos',
        'type': ogr.OFTString,
        'width': 200
    }, {
        'name': 'estagios',
        'type': ogr.OFTString,
        'width': 200
    }]
    outFile = "{}/{}_{}.shp".format(dirOut, table, 'aggregate')
    d = createOutLayer(layer.GetSpatialRef(), outFile, fields,
                       ogr.wkbMultiPolygon)
    if not d['isOk']:
        printStatus(d['message'], True)
        return 1
    dsShape, outLayer = d['dataset'], d['layer']
    defnOut = outLayer.GetLayerDefn()
    fid = 0
    for item in outFeats:
        fid += 1
        sumEvent = getSumEvents(item)
        f = {
            'fid':
            fid,
            'geom':
            item['geom'],
            'attributes': [{
                'name': 'id_group',
                'value': item['id_group']
            }, {
                'name': 'n_events',
                'value': sumEvent['n_events']
            }, {
                'name': 'ini_date',
                'value': sumEvent['ini_date']
            }, {
                'name': 'end_date',
                'value': sumEvent['end_date']
            }, {
                'name': 'ini_ha',
                'value': sumEvent['ini_ha']
            }, {
                'name': 'end_ha',
                'value': getAreaHa(item['geom'])
            }, {
                'name': 'fids_ha',
                'value': sumEvent['fids_ha']
            }, {
                'name': 'n_fids',
                'value': sumEvent['n_fids']
            }, {
                'name': 'fids',
                'value': sumEvent['fids']
            }, {
                'name': 'dates_ev',
                'value': sumEvent['dates_ev']
            }, {
                'name': 'ha_ev',
                'value': sumEvent['ha_ev']
            }, {
                'name': 'tipos',
                'value': sumEvent['tipos']
            }, {
                'name': 'estagios',
                'value': sumEvent['estagios']
            }]
        }
        addFeaturesOut(outLayer, f, fid, defnOut)
        del f['attributes']
        item['geom'].Destroy()
        item['geomBuffer'].Destroy()

    del outFeats[:]

    dsShape.Destroy()
    dsMemory.Destroy()

    printStatus("Finish! '%s' (%d features)" % (outFile, totalOutFeats), True)

    if len(lstExceptionFids) > 0:
        nameFile = "{}/{}_{}_errors.txt".format(dirOut, table, 'aggregate')
        saveErrorFids(nameFile)
        printStatus(
            "Error! '%s' (%d total)" % (nameFile, len(lstExceptionFids)), True)

    return 0
示例#12
0
def tree(CHMASC="upa3_chm.asc",
         COPA=9,
         TOPMASK="crownMask.tif",
         EPSG=31982,
         export=True,
         INPATH="C:\\FUSION\\daad\\upa3\\",
         OUTPATH="C:\\FUSION\\daad\\upa3\\"):

    import processing
    import os
    from osgeo import ogr

    TOPCROWN = "crown.shp"
    EMERGENT = "emergent.shp"

    # define projecao a ser usada
    crs = QgsCoordinateReferenceSystem(
        EPSG, QgsCoordinateReferenceSystem.PostgisCrsId)
    '''
	Importa o modelo digital de altura de dossel sobre a qual todo o calculo sera realizado.
	1. importa CHM salvo no computador em formato ASC para dentro de uma variavel
	2. define a projecao da camada importada na etapa 1
	3. carrega a camada no canvas do qgis
	'''
    print "Locating and extracting emergent trees."
    chmlayer = QgsRasterLayer(INPATH + CHMASC, "CHM temp")
    chmlayer.setCrs(crs)
    QgsMapLayerRegistry.instance().addMapLayer(chmlayer)
    '''
	Extrair valor maximo da camada CHM e utilizar como referencia para o parametro HEIGHT
	'''
    # print "Computing reference height."
    extent = chmlayer.extent()
    provider = chmlayer.dataProvider()
    stats = provider.bandStatistics(1, QgsRasterBandStats.All, extent, 0)
    #HEIGHT = str(round(stats.maximumValue - 10))
    HEIGHT = str(round(stats.mean + 2. * stats.stdDev))
    '''
	A partir do CHM importado anteriormente, filtra os pixels acima de uma determianda altura criando
	uma raster chamado de modelo digital de copas emergentes (MDCE).
	1. cria a string que define o filtro a ser aplicado no rastercalculator
	2. cria o MDCE aplicando o filtro via rastercalculator do SAGA sobre o CHM
	3. importa o MDCE para uma variavel
	4. define a projecao da camada MDCE
	5. carrega a camada no canvas qgis
	CALC1 = "ifelse(a<"+HEIGHT+",-99999,a)"
	processing.runalg("saga:rastercalculator", chmlayer, None, CALC1, 3, False, 7, OUTPATH+TOPCROWN)
	toplayer = QgsRasterLayer(OUTPATH+TOPCROWN, "top crowns")
	toplayer.setCrs(crs)
	QgsMapLayerRegistry.instance().addMapLayer(toplayer)
	'''
    '''
	A partir do CHM importado anteriormente, cria uma mascada para os pixels acima de uma
	determianda altura criando.
	1. cria a string que define o filtro a ser aplicado no rastercalculator
	2. cria a mascara aplicando o filtro via rastercalculator do SAGA sobre o CHM
	3. importa a mascara para uma variavel
	4. define a projecao da mascara
	5. extrai a extensao da mascara
	6. carrega a mascara no canvas qgis
	'''
    # print "Creating mask of emergent crown"
    CALC2 = "ifelse(a<" + HEIGHT + ",-99999,1)"
    processing.runalg("saga:rastercalculator", chmlayer, None, CALC2, 3, False,
                      7, OUTPATH + TOPMASK)
    msklayer = QgsRasterLayer(OUTPATH + TOPMASK, "mascara")
    msklayer.setCrs(crs)
    extent = msklayer.extent()
    xmin = extent.xMinimum()
    xmax = extent.xMaximum()
    ymin = extent.yMinimum()
    ymax = extent.yMaximum()
    QgsMapLayerRegistry.instance().addMapLayer(msklayer)
    '''
	A partir mascara de copas emergentes cria uma camada vetorial de poligonos.
	1. chama o comando grass para vetorizar uma mascara
	2. importa a camada de poligonos para uma variavel
	3. define a projecao da camada de poligonos
	4. carrega a camada no canvas qgis
	'''
    # print "Creating crown layer 1 of 3 steps"
    TEMP1 = TOPCROWN[0:len(TOPCROWN) - 4] + "Temp1.shp"
    processing.runalg("grass7:r.to.vect", msklayer, 2, False,
                      "%f,%f,%f,%f" % (xmin, xmax, ymin, ymax), 0,
                      OUTPATH + TEMP1)
    vlayer = QgsVectorLayer(OUTPATH + TEMP1, "crownTemp1", "ogr")
    vlayer.setCrs(crs)
    QgsMapLayerRegistry.instance().addMapLayer(vlayer)
    '''
	A partir camada vetorial de poligonos, cria uma nova camada contendo o atributo de area. Esta camada
	indica uma aproximacao das copas das arvores emergentes.
	1. cria o nome da nova camada acrescido do indice 2
	2. roda o comando fieldcalculador para obter area de cada poligono
	3. importa a camada de poligonos para uma variavel
	4. define a projecao da camada de poligonos
	5. extrai a extensao da mascara
	6. carrega a camada no canvas qgis
	'''
    # print "Creating crown layer 2 of 3 steps"
    TEMP2 = TOPCROWN[0:len(TOPCROWN) - 4] + "Temp2.shp"
    processing.runalg('qgis:fieldcalculator', vlayer, 'area', 0, 10, 2, True,
                      '$area', OUTPATH + TEMP2)
    vlayer2 = QgsVectorLayer(OUTPATH + TEMP2, "crownTemp2", "ogr")
    vlayer2.setCrs(crs)
    extent = vlayer2.extent()
    xmin = extent.xMinimum()
    xmax = extent.xMaximum()
    ymin = extent.yMinimum()
    ymax = extent.yMaximum()
    QgsMapLayerRegistry.instance().addMapLayer(vlayer2)
    '''
	A partir do vetor de copas, com as informacoes de area, filtra apenas as copas maiores que area
	especificada pelo usuario.
	1. chama o comando grass para filtrar poligonos com area acima do limite espeficicado
	2. importa a camada de poligonos das copas emergente maiores que limite
	3. define a projecao da camada de poligonos
	4. carrega a camada no canvas qgis
	'''
    # print "Creating crown layer 3 of 3 steps"
    CROWN = "area>" + str(COPA)
    processing.runalg("grass7:v.extract", vlayer2, CROWN, False,
                      "%f,%f,%f,%f" % (xmin, xmax, ymin, ymax), -1, 0, 0,
                      OUTPATH + TOPCROWN)
    crownLayer = QgsVectorLayer(OUTPATH + TOPCROWN, "crown", "ogr")
    crownLayer.setCrs(crs)
    QgsMapLayerRegistry.instance().addMapLayer(crownLayer)
    '''
	1. calcula o centroid de cada poligono extraido como arvore
	2. salva num shape de pontos chamado centroid.shp
	3. esctrai as coordenadas x e y de cada ponto
	4. exporta as coordenadas num csv
	'''
    if export:
        ogr.UseExceptions()
        os.chdir(OUTPATH)

        print "Extracting crown centroids 1 of 3 steps"
        ds = ogr.Open(OUTPATH + TOPCROWN)
        ly = ds.ExecuteSQL('SELECT ST_Centroid(geometry), * FROM crown',
                           dialect='sqlite')
        drv = ogr.GetDriverByName('Esri shapefile')
        ds2 = drv.CreateDataSource('emergentesTemp1.shp')
        ds2.CopyLayer(ly, '')
        ly = crownLayer = ds2 = None  # save, close
        pointslayer = QgsVectorLayer(OUTPATH + 'emergentesTemp1.shp', "temp1",
                                     "ogr")
        pointslayer.setCrs(crs)

        print "Extracting crown centroids 2 of 2 steps"
        processing.runalg('qgis:fieldcalculator', pointslayer, 'xcoord', 0, 10,
                          2, True, '$x', OUTPATH + 'emergentesTemp2.shp')
        pointslayer = QgsVectorLayer(OUTPATH + 'emergentesTemp2.shp', "temp2",
                                     "ogr")
        pointslayer.setCrs(crs)

        print "Extracting crown centroids 3 of 3 steps"
        processing.runalg('qgis:fieldcalculator', pointslayer, 'ycoord', 0, 10,
                          2, True, '$y', OUTPATH + 'emergentes.shp')
        pointslayer = QgsVectorLayer(OUTPATH + 'emergentes.shp', "emergentes",
                                     "ogr")
        pointslayer.setCrs(crs)
        QgsMapLayerRegistry.instance().addMapLayer(pointslayer)

        print "Exporting centroids."
        QgsVectorFileWriter.writeAsVectorFormat(pointslayer,
                                                OUTPATH + "xy.csv",
                                                "utf-8",
                                                None,
                                                "CSV",
                                                layerOptions='GEOMETRY=AS_WKT')

        # print "Cleaning temporary files."
        driver = ogr.GetDriverByName("ESRI Shapefile")
        if os.path.exists(OUTPATH + 'emergentesTemp1.shp'):
            driver.DeleteDataSource(OUTPATH + 'emergentesTemp1.shp')
        if os.path.exists(OUTPATH + 'emergentesTemp2.shp'):
            driver.DeleteDataSource(OUTPATH + 'emergentesTemp2.shp')

        # print "Extracting emergent trees height."
        processing.runalg("grass7:v.what.rast.points", pointslayer, chmlayer,
                          "value", "area > 0", False,
                          "%f,%f,%f,%f" % (xmin, xmax, ymin, ymax), -1, 0.0001,
                          0, OUTPATH + 'emergentes2.shp')
        pointslayer2 = QgsVectorLayer(OUTPATH + 'emergentes2.shp',
                                      "emergentes", "ogr")
        pointslayer2.setCrs(crs)
        QgsMapLayerRegistry.instance().addMapLayer(pointslayer2)

    # print "Cleaning temporary files."
    QgsMapLayerRegistry.instance().removeMapLayer(chmlayer.id())
    QgsMapLayerRegistry.instance().removeMapLayer(msklayer.id())
    QgsMapLayerRegistry.instance().removeMapLayer(vlayer.id())
    QgsMapLayerRegistry.instance().removeMapLayer(vlayer2.id())
    QgsMapLayerRegistry.instance().removeMapLayer(pointslayer.id())
    driver = ogr.GetDriverByName("ESRI Shapefile")
    if os.path.exists(OUTPATH + TEMP2):
        driver.DeleteDataSource(OUTPATH + TEMP2)
    if os.path.exists(OUTPATH + TEMP1):
        driver.DeleteDataSource(OUTPATH + TEMP1)
    if os.path.exists(OUTPATH + 'emergentes.shp'):
        driver.DeleteDataSource(OUTPATH + 'emergentes.shp')

    return
示例#13
0
# geojson.py
#
# ---copyright goes here---
# this is python2 code
# requirements: gdal with its python bindings installed

import os
import osgeo.ogr as ogr

ogr.UseExceptions()  #make ogr closer to sane
assert ogr.GetUseExceptions() == True

#import geojson

#functions we care about:
# ogr.open() (not, not gdal.open, that's for rasters!!)
# layer.TestCapability("FastSpatialFilter")
# layer.GetFeatureCount()
# feature.geometry()
# layer.SetSpatialFilterRect()
# feature['key']

import IPython

IPython.terminal.embed.TerminalInteractiveShell.confirm_exit = False  #i want
#IPython.get_config().InteractiveShell.confirm_exit = False


def features(shapefile):
    "a simple iterator that returns items from a shapefile"
    "precondition: your shapefile has exactly one layer (you can split it up with gdal or qgis if this is not true)"
示例#14
0
class Watershed:

    ogr.UseExceptions()
    gdal.UseExceptions()

    def __init__(self, x=None, y=None):

        self.x = x
        self.y = y
        self.catchment_identifier = None
        self.catchmentGeom = None
        self.splitCatchmentGeom = None
        self.upstreamBasinGeom = None
        self.mergedCatchmentGeom = None

        #input point spatial reference
        self.sourceprj = osr.SpatialReference()
        self.sourceprj.ImportFromProj4(
            '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')

        # Getting spatial reference of input raster
        tif = gdal.Open(IN_FDR, gdal.GA_ReadOnly)
        self.Projection = tif.GetProjectionRef()
        self.targetprj = osr.SpatialReference(wkt=tif.GetProjection())

        #create transform
        self.transformToRaster = osr.CoordinateTransformation(
            self.sourceprj, self.targetprj)
        self.transformToWGS = osr.CoordinateTransformation(
            self.targetprj, self.sourceprj)

        #kick off
        self.transform_click_point()

## helper functions

    def geom_to_geojson(self,
                        in_geom,
                        name,
                        simplify_tolerance,
                        in_ref,
                        out_ref,
                        write_output=False):
        in_geom = in_geom.Simplify(simplify_tolerance)
        out_ref.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

        transform = osr.CoordinateTransformation(in_ref, out_ref)

        #don't want to affect original geometry
        transform_geom = in_geom.Clone()

        #trasnsform geometry from whatever the local projection is to wgs84
        transform_geom.Transform(transform)
        json_text = transform_geom.ExportToJson()

        #add some attributes
        geom_json = json.loads(json_text)

        #get area in local units
        area = in_geom.GetArea()

        print('processing: ' + name + ' area: ' + str(area * 0.00000038610))

        geojson_dict = {
            "type": "Feature",
            "geometry": geom_json,
            "properties": {
                "area": area
            }
        }

        if write_output:
            f = open(
                'C:/NYBackup/GitHub/ss-delineate/data/' + name + '.geojson',
                'w')
            f.write(json.dumps(geojson_dict))
            f.close()
            print('Exported geojson:', name)

        return geojson_dict

    def geom_to_shapefile(self, geom, name):

        #write out shapefile
        # set up the shapefile driver
        driver = ogr.GetDriverByName("ESRI Shapefile")

        # create the data source
        data_source = driver.CreateDataSource(OUT_PATH + name + '.shp')
        layer = data_source.CreateLayer(name, self.targetprj, ogr.wkbPolygon)
        feature = ogr.Feature(layer.GetLayerDefn())

        # Set the feature geometry using the point
        feature.SetGeometry(geom)
        # Create the feature in the layer (shapefile)
        layer.CreateFeature(feature)
        # Dereference the feature
        feature = None


## main functions

    def transform_click_point(self):

        print('Input X,Y:', self.x, self.y)
        self.projectedLng, self.projectedLat, z = self.transformToRaster.TransformPoint(
            self.x, self.y)
        print('Projected X,Y:', self.projectedLng, ',', self.projectedLat)

        self.get_local_catchment_geom()

    def get_local_catchment_geom(self):

        wkt_point = "POINT(%f %f)" % (self.x, self.y)
        cql_filter = "INTERSECTS(the_geom, %s)" % (wkt_point)

        payload = {
            'service': 'wfs',
            'version': '1.0.0',
            'request': 'GetFeature',
            'typeName': 'wmadata:catchmentsp',
            'outputFormat': 'application/json',
            'srsName': 'EPSG:4326',
            'CQL_FILTER': cql_filter
        }

        #request catchment geometry from point in polygon query from NLDI geoserver
        # https://labs.waterdata.usgs.gov/geoserver/wmadata/ows?service=wfs&version=1.0.0&request=GetFeature&typeName=wmadata%3Acatchmentsp&outputFormat=application%2Fjson&srsName=EPSG%3A4326&CQL_FILTER=INTERSECTS%28the_geom%2C+POINT%28-73.745860+44.006830%29%29
        r = requests.get(NLDI_GEOSERVER_URL, params=payload)
        resp = r.json()

        #print(r.text)

        #get catchment id
        self.catchment_identifier = json.dumps(
            resp['features'][0]['properties']['featureid'])

        #get main catchment geometry polygon
        gj_geom = json.dumps(resp['features'][0]['geometry'])
        self.catchmentGeom = ogr.CreateGeometryFromJson(gj_geom)

        #transform catchment geometry
        self.catchmentGeom.Transform(self.transformToRaster)

        self.geom_to_shapefile(self.catchmentGeom, 'catchment')

        #get extent of transformed polygon
        minX, maxX, minY, maxY = self.catchmentGeom.GetEnvelope(
        )  # Get bounding box of the shapefile feature
        bounds = [minX, minY, maxX, maxY]

        print('projected bounds', bounds)

        self.splitCatchmentGeom = self.split_catchment(bounds,
                                                       self.projectedLng,
                                                       self.projectedLat)

        #get upstream basin
        self.upstreamBasinGeom = self.get_upstream_basin()

    def get_local_catchment_id(self):

        #request local catchment identifier from NLDI
        # https://labs.waterdata.usgs.gov/api/nldi/linked-data/comid/position?f=json&coords=POINT(-89.35%2043.0864)
        wkt_point = wkt = "POINT(%f %f)" % (self.x, self.y)
        payload = {'f': 'json', 'coords': wkt_point}

        #get comid of catchment point is in
        r = requests.get(NLDI_URL + 'position', params=payload)

        resp = r.json()
        self.catchment_identifier = resp['features'][0]['properties'][
            'identifier']

        #print('identifier:  ', self.catchment_identifier)

        self.get_upstream_basin()

    def get_upstream_basin(self):

        #request upstream basin

        payload = {'f': 'json', 'simplified': 'false'}

        #request upstream basin from NLDI using comid of catchment point is in
        r = requests.get(NLDI_URL + self.catchment_identifier + '/basin',
                         params=payload)

        #print('upstream basin', r.text)
        resp = r.json()

        #convert geojson to ogr geom
        gj_geom = json.dumps(resp['features'][0]['geometry'])
        self.upstreamBasinGeom = ogr.CreateGeometryFromJson(gj_geom)
        self.upstreamBasinGeom.Transform(self.transformToRaster)

        self.geom_to_shapefile(self.upstreamBasinGeom, 'upstreamBasin')

        self.mergeGeoms()

    def mergeGeoms(self):

        #create new cloned geom
        self.mergedCatchmentGeom = self.upstreamBasinGeom.Clone()

        #remove downstream catchment
        diff = self.catchmentGeom.Difference(
            self.splitCatchmentGeom.Buffer(10).Buffer(-10))

        self.mergedCatchmentGeom = self.mergedCatchmentGeom.Difference(
            diff).Simplify(30)

        # #add split catchment
        # self.mergedCatchmentGeom.AddGeometry(self.splitCatchmentGeom)

        # self.mergedCatchmentGeom = self.mergedCatchmentGeom.UnionCascaded()

        # self.mergedCatchmentGeom = self.mergedCatchmentGeom.Simplify(30)

        #write out
        self.geom_to_shapefile(self.mergedCatchmentGeom, 'xxFinalBasinxx')

    def split_catchment(self, bounds, x, y):

        RasterFormat = 'GTiff'
        PixelRes = 30

        #method to use catchment bounding box instead of exact geom
        gdal.Warp(OUT_FDR,
                  IN_FDR,
                  format=RasterFormat,
                  outputBounds=bounds,
                  xRes=PixelRes,
                  yRes=PixelRes,
                  dstSRS=self.Projection,
                  resampleAlg=gdal.GRA_NearestNeighbour,
                  options=['COMPRESS=DEFLATE'])

        #start pysheds catchment delineation
        grid = Grid.from_raster(OUT_FDR, data_name='dir')

        #compute flow accumulation to snap to
        dirmap = (64, 128, 1, 2, 4, 8, 16, 32)
        grid.accumulation(data='dir',
                          dirmap=dirmap,
                          out_name='acc',
                          apply_mask=False)

        grid.to_raster('acc',
                       'C:/NYBackup/GitHub/ss-delineate/data/acc.tif',
                       view=False,
                       blockxsize=16,
                       blockysize=16)

        #snap the pourpoint to
        xy = (x, y)
        new_xy = grid.snap_to_mask(grid.acc > 50, xy, return_dist=False)

        #get catchment with pysheds
        grid.catchment(data='dir',
                       x=new_xy[0],
                       y=new_xy[1],
                       out_name='catch',
                       recursionlimit=15000,
                       xytype='label')

        # Clip the bounding box to the catchment
        grid.clip_to('catch')

        #some sort of strange raster to polygon conversion using rasterio method
        shapes = grid.polygonize()

        #get split Catchment geometry
        print('Split catchment complete')
        split_geom = ogr.Geometry(ogr.wkbPolygon)

        for shape in shapes:
            split_geom = split_geom.Union(
                ogr.CreateGeometryFromJson(json.dumps(shape[0])))

        #write out shapefile
        self.geom_to_shapefile(split_geom, 'splitCatchment')

        return split_geom
示例#15
0
    def __init__(self, path):
        self.logger = logging.getLogger(__name__)
        self.logger.info("Initiating GDB instance")

        ogr.UseExceptions()
        self.load(path)
def main():
    ogr.UseExceptions()

    shp = ogr.Open(TILEINDEX)
    layer = shp.GetLayer(0)

    for feature in layer:
        infileName = feature.GetField("location")
        baseName = os.path.basename(infileName)

        #if baseName != "611233_12_5cm.tif":
        #    continue

        print "*** " + baseName + " ***"

        geom = feature.GetGeometryRef()
        env = geom.GetEnvelope()

        minX = int(env[0] + 0.001 + 2000000)
        minY = int(env[2] + 0.001 + 1000000)
        maxX = int(env[1] + 0.001 + 2000000)
        maxY = int(env[3] + 0.001 + 1000000)

        #outFileName = METHOD + "_" + str(minX)[0:4] + "_" + str(minY)[0:4] + "_12_5cm.tif"
        outFileName = str(minX)[0:4] + "_" + str(minY)[0:4] + "_12_5cm.tif"
        outFileName = os.path.join(OUTPUT_DIR, outFileName)

        # 1) Create the new tile with a nice bounding box.
        vrt = os.path.join(INPUT_DIR, "ortho2014rgb.vrt")
        cmd = "gdalwarp -overwrite -s_srs \"" + S_SRS + "\" -t_srs \"" + T_SRS + "\" -te " + str(
            minX) + " " + str(minY) + " " + str(maxX) + " " + str(maxY)
        cmd += " -tr " + str(RES_M) + " " + str(
            RES_M
        ) + " -co 'PHOTOMETRIC=RGB' -co 'TILED=YES' -co 'PROFILE=GeoTIFF'"
        cmd += " -co 'INTERLEAVE=PIXEL' -co 'COMPRESS=DEFLATE' -co 'PREDICTOR=2' -co 'BLOCKXSIZE=256' -co 'BLOCKYSIZE=256'"
        cmd += " -r " + METHOD + " " + vrt + " " + outFileName
        #print cmd
        os.system(cmd)

        cmd = "gdal_edit.py -a_srs EPSG:2056 " + outFileName
        #print cmd
        os.system(cmd)

        # Resampling method does not really matter here.
        cmd = "gdaladdo -r average --config COMPRESS_OVERVIEW DEFLATE " + outFileName + " 2 4 8 16 32 64 128"
        #print cmd
        os.system(cmd)

    # 2) Create VRT and the 5m overview image.
    # The resampling method do not really matter here.
    # At least it should look nice.
    vrt = os.path.join(OUTPUT_DIR, "ortho2014rgb.vrt")
    cmd = "gdalbuildvrt -addalpha " + vrt + " " + os.path.join(
        OUTPUT_DIR, "*.tif")
    print cmd
    os.system(cmd)

    outFileName500cm = os.path.join(OUTPUT_DIR,
                                    "../500cm/ortho2014rgb_500.tif")
    cmd = "gdalwarp -overwrite -tr " + str(OVERVIEW_RES_M) + " " + str(
        OVERVIEW_RES_M
    ) + " -co 'PHOTOMETRIC=RGB' -co 'TILED=YES' -co 'PROFILE=GeoTIFF'"
    cmd += " -co 'INTERLEAVE=PIXEL' -co 'COMPRESS=DEFLATE' -co 'PREDICTOR=2' -co 'BLOCKXSIZE=256' -co 'BLOCKYSIZE=256'"
    cmd += " -r bilinear " + vrt + " " + outFileName500cm
    print cmd
    os.system(cmd)

    cmd = "gdaladdo -r average --config COMPRESS_OVERVIEW DEFLATE " + outFileName500cm + " 2 4 8 16 32 64 128"
    print cmd
    os.system(cmd)
def PCR_river2Shape(rivermap,
                    drainmap,
                    ordermap,
                    lddmap,
                    SHP_FILENAME,
                    catchmentmap,
                    srs=None):
    #    rivermap = riversid_map
    #    drainmap = drain_map
    #    ordermap = streamorder_map
    #    lddmap = ldd_map
    #    SHP_FILENAME = rivshp
    counter = 0.0
    percentage = 0.0
    file_att = os.path.splitext(os.path.basename(SHP_FILENAME))[0]
    x, y, riversid, FillVal = readMap(rivermap, "PCRaster")
    riversid[riversid == FillVal] = -1
    x, y, strahlerorder, FillVal = readMap(ordermap, "PCRaster")
    strahlerorder[strahlerorder == FillVal] = -1
    x, y, catchment, FillVal = readMap(catchmentmap, "PCRaster")
    catchment[catchment == FillVal] = -1
    x, y, drain, FillVal = readMap(drainmap, "PCRaster")
    drain[drain == FillVal] = np.nan
    x, y, ldd, FillVal = readMap(lddmap, "PCRaster")
    xi, yi = np.meshgrid(x, y)

    # mesh of surrounding pixels
    xi_window, yi_window = np.meshgrid(list(range(-1, 2)), list(range(-1, 2)))
    # mesh of ldd grid values
    ldd_values = np.array([[7, 8, 9], [4, 5, 6], [1, 2, 3]])
    [iiy, iix] = np.where(riversid > 0)
    riverId = riversid[iiy, iix]
    maxRiverId = riverId.max()

    # Create new shapefile
    ogr.UseExceptions()
    ds = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(SHP_FILENAME)
    layer_line = ds.CreateLayer(file_att, srs, ogr.wkbLineString)

    river_ID = ogr.FieldDefn()
    river_ID.SetName("ORDER")
    river_ID.SetType(ogr.OFTInteger)
    river_ID.SetWidth(6)
    layer_line.CreateField(river_ID)

    river_ID = ogr.FieldDefn()
    river_ID.SetName("CATCHMENT")
    river_ID.SetType(ogr.OFTInteger)
    river_ID.SetWidth(6)
    layer_line.CreateField(river_ID)

    # Create a new line geometry per river segment
    for id in np.arange(1, maxRiverId + 1):
        # for id in range(25,26):
        # print 'Writing line element "' + str(id) + '"'
        y_idx, x_idx = np.where(riversid == id)
        drain_idx = drain[y_idx, x_idx]
        lat_select = yi[y_idx, x_idx]
        lon_select = xi[y_idx, x_idx]
        strahlerorder_select = strahlerorder[y_idx, x_idx]
        catchment_select = catchment[y_idx, x_idx]
        order = drain_idx.argsort()
        lat_select = lat_select[order]
        lon_select = lon_select[order]
        catchment_select = catchment_select[order]
        strahlerorder_select = strahlerorder_select[order]
        line = ogr.Geometry(type=ogr.wkbLineString)
        # add points sequentially to line segment
        for nr in range(0, len(lat_select)):
            # line_latlon.AddPoint(np.float64(lon_select[nr]), np.float64(lat_select[nr]))
            line.AddPoint(np.float64(lon_select[nr]),
                          np.float64(lat_select[nr]))
        # now find the point downstream of the last pixel from the ldd, which
        # is connected with the downstream river
        try:
            xi_select = xi[y_idx[order][-1] + yi_window,
                           x_idx[order][-1] + xi_window]
            yi_select = yi[y_idx[order][-1] + yi_window,
                           x_idx[order][-1] + xi_window]
            ldd_at_pos = ldd[y_idx[order][-1], x_idx[order][-1]]
            ldd_y, ldd_x = np.where(ldd_values == ldd_at_pos)
            downstream_y = yi_select[ldd_y, ldd_x]
            downstream_x = xi_select[ldd_y, ldd_x]
            line.AddPoint(np.float64(downstream_x), np.float64(downstream_y))
        except:
            continue
            # most downstream point of segment is on the boundary of the map, so skip this step
            # print 'River segment id: %g is on boundary of the map' % id
        # Add line as a new feature to the shapefiles
        feature = ogr.Feature(feature_def=layer_line.GetLayerDefn())
        feature.SetGeometryDirectly(line)
        feature.SetField("ORDER", int(strahlerorder_select[0]))
        feature.SetField("CATCHMENT", int(catchment_select[0]))
        counter = counter + 1
        if (float(id) / float(maxRiverId)) * 100.0 > percentage:
            # logger.info(' ' + str(int(percentage)) + '% completed')
            percentage = percentage + 10.0
        # print 'Writing polyline ' + str(id) + ' of ' + str(maxRiverId)
        layer_line.CreateFeature(feature)
        # Cleanup
        feature.Destroy()
    ds.Destroy()
示例#18
0
def calculate(DATABASE_URL):
    '''
    '''
    index = requests.get(START_URL).json()
    geojson_url = urljoin(START_URL, index['render_geojson_url'])
    _L.info('Downloading {}...'.format(geojson_url))

    handle, filename = tempfile.mkstemp(prefix='render_geojson-',
                                        suffix='.geojson')
    geojson = os.write(handle, requests.get(geojson_url).content)
    os.close(handle)

    with psycopg2.connect(DATABASE_URL) as conn:
        with conn.cursor() as db:

            ogr.UseExceptions()
            iso_a2s, usps_codes = set(), set()
            rendered_ds = ogr.Open(filename)

            db.execute('''
                CREATE TEMPORARY TABLE rendered_world
                (
                    iso_a2  VARCHAR(2),
                    count   INTEGER,
                    geom    GEOMETRY(MultiPolygon, 4326)
                );

                CREATE TEMPORARY TABLE rendered_usa
                (
                    usps_code   VARCHAR(2),
                    count       INTEGER,
                    geom        GEOMETRY(MultiPolygon, 4326)
                );
                ''')

            for feature in rendered_ds.GetLayer(0):
                iso_a2, usps_code = insert_coverage_feature(db, feature)
                iso_a2s.add(iso_a2)
                if usps_code:
                    usps_codes.add(usps_code)
                    _L.debug('{}/{} - {} addresses from {}'.format(
                        iso_a2, usps_code, feature.GetField('address count'),
                        feature.GetField('source paths')))
                else:
                    _L.debug('{} - {} addresses from {}'.format(
                        iso_a2, feature.GetField('address count'),
                        feature.GetField('source paths')))

            db.execute('''
                DELETE FROM areas;
            
                INSERT INTO areas (iso_a2, addr_count, buffer_km, geom)
                SELECT iso_a2, SUM(count), 10, ST_Multi(ST_Union(ST_Buffer(geom, 0.00001)))
                FROM rendered_world GROUP BY iso_a2;

                DELETE FROM us_states;
            
                INSERT INTO us_states (usps_code, addr_count, buffer_km, geom)
                SELECT usps_code, SUM(count), 10, ST_Multi(ST_Union(ST_Buffer(geom, 0.00001)))
                FROM rendered_usa GROUP BY usps_code;
                ''')

            for (index, iso_a2) in enumerate(sorted(iso_a2s)):
                _L.info('Counting up {} ({}/{})...'.format(
                    iso_a2, index + 1, len(iso_a2s)))
                summarize_country_coverage(db, iso_a2)

            for (index, usps_code) in enumerate(sorted(usps_codes)):
                _L.info('Counting up US:{} ({}/{})...'.format(
                    usps_code, index + 1, len(usps_codes)))
                summarize_us_state_coverage(db, usps_code)

    os.remove(filename)
示例#19
0
def get_permafrost_mask(lons2d, lats2d, zones_path="data/permafrost/permaice.shp", land_mask=None
                        ):
    #    cache_file = "permafrost_types.bin"

    #    if os.path.isfile(cache_file):
    #        return pickle.load(open(cache_file))

    # TODO: Add a mask parameter, fro example there is no permafrost over ocean

    ogr.UseExceptions()

    driver = ogr.GetDriverByName("ESRI Shapefile")
    datastore = driver.Open(zones_path, 0)
    layer = datastore.GetLayer(0)

    latlong = osr.SpatialReference()
    latlong.ImportFromProj4("+proj=latlong")

    ct = osr.CoordinateTransformation(latlong, layer.GetSpatialRef())



    # points_lat_long = map(lambda x: create_gdal_point_and_transform(x[0], x[1]),
    #                      zip(lons2d.flatten(), lats2d.flatten()))

    if land_mask is None:
        i_indices_1d = np.array(range(lons2d.shape[0]))
        j_indices_1d = np.array(range(lons2d.shape[1]))

        j_indices_2d, i_indices_2d = np.meshgrid(j_indices_1d, i_indices_1d)
        indices = list(zip(i_indices_2d.flatten(), j_indices_2d.flatten()))
        indices = list(indices)

        points = [create_gdal_point_and_transform(x[0], x[1], ct) for x in zip(lons2d.flatten(), lats2d.flatten())]

    else:
        i_indices_1d, j_indices_1d = np.where(land_mask)
        indices = list(zip(i_indices_1d, j_indices_1d))
        points = [create_gdal_point_and_transform(x[0], x[1], ct) for x in zip(lons2d[land_mask], lats2d[land_mask])]


    # do not consider territories of the following countries
    # rej_countries = ["Greenland", "Iceland", "Russia"]
    # rej_countries = []
    # delete_points_in_countries(points_lat_long, points, indices, countries=rej_countries)

    permafrost_kind_field = np.zeros(lons2d.shape)
    # grid_polygon = create_points_envelope_gdal(points)

    # set spatial and attribute filters to take only the features with valid EXTENT field,
    # and those which are close to the area of interest
    # layer.SetSpatialFilter(grid_polygon)
    query = "EXTENT IN  (\'{0}\',\'{1}\',\'{2}\' ,\'{3}\')".format(*permafrost_types)
    query += "OR EXTENT IN  (\'{0}\',\'{1}\',\'{2}\' ,\'{3}\')".format(*[x.lower() for x in permafrost_types])
    print(query)
    layer.SetAttributeFilter(query)

    print(layer.GetFeatureCount())
    # print grid_polygon.ExportToWkt()

    # read features from the shape file
    feature = layer.GetNextFeature()
    i = 0
    while feature:
        geom = feature.GetGeometryRef()
        points_to_remove = []
        indices_to_remove = []
        for ind, p in zip(indices, points):
            # assert isinstance(geom, ogr.Geometry)
            if geom.Contains(p):
                perm_type = feature.items()["EXTENT"]
                permafrost_kind_field[ind] = permafrost_types.index(perm_type) + 1
                points_to_remove.append(p)
                indices_to_remove.append(ind)
                print(i)

        for the_p, the_i in zip(points_to_remove, indices_to_remove):
            indices.remove(the_i)
            points.remove(the_p)

        feature = layer.GetNextFeature()

        i += 1

    datastore.Destroy()
    # pickle.dump(permafrost_kind_field, open(cache_file, "w"))
    return permafrost_kind_field
示例#20
0
def main(args):

    region = 'EU'  # or NA (default)

    ogr.UseExceptions(
    )  # Unsure about this, but pretty sure we want errors to cause exceptions
    # "export CPL_LOG=/dev/null" -- to hide warnings, must be set from shell or in bashrc

    # Start clock
    start = time.time()

    # Set main directory:
    baseDir = '/att/gpfsfs/briskfs01/ppl/mwooten3/3DSI/ZonalStats/'
    if region == 'EU': baseDir = os.path.join(baseDir, 'EU')

    # Unpack arguments
    inRaster = args['rasterStack']
    inZonalFc = args['zonalFc']
    bigOutput = args['bigOutput']
    logOut = args['logOutput']

    stack = RasterStack(inRaster)
    inZones = ZonalFeatureClass(inZonalFc)  # This will be clipped

    # Set some variables from inputs
    stackExtent = stack.extent()
    stackEpsg = stack.epsg()
    stackName = stack.stackName

    # Get the output directory
    # outDir = baseDir / zonalType (ATL08_na or GLAS_buff30m) --> stackType / stackName
    zonalType = inZones.zonalName
    outDir = stack.outDir(os.path.join(baseDir, zonalType))

    # Figure out if we are writing to .gdb/.gpkg and .csv or just .csv
    bigExt = os.path.splitext(bigOutput)[1]
    if bigExt == '.gdb' or bigExt == '.gpkg':  # Write to both
        # Assume gdb/gpkg is node specific (eg. output-crane101.gdb)
        outCsv = bigOutput.replace(bigExt,
                                   '.csv')  # 6/4/21 - same as .gdb but .csv
        #outCsv = bigOutput.replace('-{}{}'.format(platform.node(), bigExt), '.csv')
        #if not outCsv.endswith('.csv'): # If that assumption is wrong and the above didn't work
        #  outCsv = bigOutput.replace(bigExt, '.csv') # then replace extension as is
        outGdb = bigOutput  # Keep gdb as is
    elif bigExt == '.csv':  # Write only to .csv
        outCsv = bigOutput
        outGdb = None

    # Create directory where output is supposed to go:
    os.system('mkdir -p {}'.format(os.path.dirname(outCsv)))

    # Stack-specific outputs
    stackCsv = os.path.join(
        outDir, '{}__{}__zonalStats.csv'.format(zonalType, stackName))
    stackShp = stackCsv.replace('.csv', '.shp')

    # "Big" outputs (unique for zonal/stack type combos)
    """ Need to come up with better/automated solution for locking issue when
        writing to the output gdb. For now, just write to a node-specific 
        output GPKG and merge by hand when all are done
    """

    # Start stack-specific log if doing so
    if logOut:
        logFile = stackCsv.replace('.csv', '__Log.txt')
        logOutput(logFile)

    # print some info
    print("BEGIN: {}\n".format(time.strftime("%m-%d-%y %I:%M:%S")))
    print("Input zonal feature class: {}".format(inZonalFc))
    print("Input raster stack: {}".format(inRaster))
    print("Output stack .csv: {}".format(stackCsv))
    print("Output aggregate fc: {}".format(outGdb))
    print("Output aggregate csv: {}".format(outCsv))
    print(" n layers = {}".format(stack.nLayers))

    # 10/20/20:
    #   ATL08 .gdb has already been filtered on can_open, so do not filter
    #   GLAS .gdb has been filtered on everything *except* wflen, so filter on wflen
    if zonalType == 'ATL08':
        filterStr = None  #"can_open != {}".format(float(340282346638999984013312))
    elif zonalType == 'GLAS':
        filterStr = 'wflen < 50'
    else:
        print("Zonal type {} not recognized".format(zonalType))
        #* 7/13/21: to generalize, edit needed here
        return None

    # 1. Clip input zonal shp to raster extent. Output proj = that of stack
    # 6/5 Try filtering src data in clip
    #tableName = inZones.baseName
    #sqlQry = 'SELECT * FROM {} WHERE {};'.format(tableName, filterStr.replace('!=', '<>'))

    clipZonal = os.path.join(outDir, '{}__{}.shp'.format(zonalType, stackName))
    if not os.path.isfile(clipZonal):
        print("\n1. Clipping input feature class to extent...")
        inZones.clipToExtent(stackExtent, stackEpsg, stackEpsg,
                             clipZonal)  #, sqlQry)
    else:
        print("\n1. Clipped feature class {} already exists...".format(
            clipZonal))

    # now zones is the clipped input ZFC object:
    zones = ZonalFeatureClass(clipZonal)
    # if checkResults == None, there are no features to work with
    if not checkZfcResults(zones, "clipping to stack extent"):
        return None

    # 2. Filter footprints based on attributes - filter GLAS, not ATL08
    #    (10/20/2020): If filterStr is not None, filter on attributes
    if filterStr:  # aka zonal type = GLAS
        print('\n2. Filtering on attributes using statement = "{}"...'.format(
            filterStr))
        filterShp = zones.filterAttributes(filterStr)

        zones = ZonalFeatureClass(filterShp)
        if not checkZfcResults(zones, "filtering on attributes"):
            return None
        # zones is filtered shp

    else:  # filterStr is None, aka zonal type = ATL08
        print("\n2. Not running attribute filter step")
        # zones is still the clipZonal shp

    # 3. Remove footprints under noData mask
    noDataMask = stack.noDataLayer()

    # Mask out NoDataValues if there is a noDataMask.
    if noDataMask:

        print("\n3. Masking out NoData values using {}...".format(noDataMask))
        rasterMask = RasterStack(noDataMask)

        # If noDataMask is NOT in same projection as zonal fc, supply correct EPSG
        transEpsg = None
        #import pdb; pdb.set_trace()
        if int(rasterMask.epsg()) != int(zones.epsg()):
            transEpsg = rasterMask.epsg(
            )  # Need to transform coords to that of mask

        zones.applyNoDataMask(noDataMask, transEpsg=transEpsg, outShp=stackShp)

    # If there is not, just copy the clipped .shp to our output .shp
    else:
        print("\n3. No NoDataMask. Not masking out NoData values.")
        cmd = 'ogr2ogr -f "ESRI Shapefile" {} {}'.format(
            stackShp, zones.filePath)
        print(' {}'.format(cmd))  #TEMP 10/7
        os.system(cmd)

    ## Before moving on, clean up the zonal shapefile by removing unnecessary columns
    ## This does not seem to be working, so leave it be for now
    #removeColumns = ['SHAPE_Leng', 'SHAPE_Area', 'SHAPE_Length', 'keep']
    #removeExtraColumns(stackShp, removeColumns)

    zones = ZonalFeatureClass(stackShp)
    if not checkZfcResults(zones, "masking out NoData values"):
        return None
    # Now zones is the filtered fc obj, will eventually have the stats added as attributes

    # Get stack key dictionary
    layerDict = buildLayerDict(
        stack)  # {layerNumber: [layerName, [statistics]]}

    # 4. Call zonal stats and return a pandas dataframe
    print("\n4. Running zonal stats for {} layers".format(len(layerDict)))
    zonalStatsDf = callZonalStats(stack, zones, layerDict)

    # 5. Complete the ZS DF by:
    #    adding stackName col, sunAngle if need be
    #    replacing None vals
    #    *removing columns if they exist: keep,SHAPE_Leng,SHAPE_Area
    zonalStatsDf = zonalStatsDf.fillna(stack.noDataValue)
    zonalStatsDf['stackName'] = [stackName for i in range(len(zonalStatsDf))]

    # Then add the zonal statistics columns from df to shp
    stackShp = addStatsToShp(zonalStatsDf, stackShp)

    # If there is an xml layer for stack, get sun angle and add as column to df
    stackXml = stack.xmlLayer()
    if stackXml:
        zonalStatsDf = addSunAngleColumn(zonalStatsDf, stackXml)

    # 6. Now write the stack csv, and finish stack-specific shp by adding
    #    new stats columns to ZFC
    zonalStatsDf.to_csv(stackCsv, sep=',', index=False,
                        header=True)  #), na_rep="NoData")

    # 7. Update the big csv and big output gdb (if True) by appending to them:
    updateOutputCsv(outCsv, zonalStatsDf)

    if outGdb:
        fc = ZonalFeatureClass(stackShp)  # Update GDB now a method in FC.py
        fc.addToFeatureClass(outGdb)  #, moreArgs = '-unsetFID')

    endTime = time.strftime("%m-%d-%y %I:%M:%S %p")
    elapsedTime = round((time.time() - start) / 60, 4)
    print("\nEND: {}\n".format(endTime))
    print(" Completed in {} minutes".format(elapsedTime))

    # 8. Lastly, record some info to a batch-level csv:
    batchCsv = os.path.join(
        baseDir, '_timing', '{}_{}__timing.csv'.format(zonalType,
                                                       stack.stackType()))
    os.system('mkdir -p {}'.format(os.path.dirname(batchCsv)))

    if not os.path.isfile(batchCsv):
        with open(batchCsv, 'w') as bc:
            bc.write(
                'stackName,n layers,n zonal features,node,minutes,datetime\n')
    with open(batchCsv, 'a') as bc:
        bc.write('{},{},{},{},{},{}\n'.format(stackName,
                                              stack.nLayers, zones.nFeatures,
                                              platform.node(), elapsedTime,
                                              endTime))

    sys.stdout.flush()

    return None
示例#21
0
from functools import wraps
from osgeo import gdal
from osgeo import ogr
from qgis.core import QgsDataSourceUri
from qgis.core import QgsVectorLayer
from qgis.core import QgsWkbTypes
from qgis.PyQt.QtCore import QVariant

import logging
import os


logger = logging.getLogger(__name__)


ogr.UseExceptions()  # fail fast


def disable_sqlite_synchronous(func):
    """
    Decorator for temporarily disabling the 'OGR_SQLITE_SYNCHRONOUS' global
    option. Without doing this creating a spatialite file fails (doesn't
    complete or incredibly slow) under Ubuntu 14.04.

    Note: shouldn't be needed anymore in newer versions of GDAL.

    Note 2: this decorator is 're-entrant'
    """

    @wraps(func)
    def wrapper(*args, **kwargs):
示例#22
0
 def _create_base_map(self,):
     '''
     Deal with different types way to define the AOI, if none is specified, then the image bound is used.
     '''
     gdal.UseExceptions()
     ogr.UseExceptions() 
     if self.aoi is not None:
         if os.path.exists(self.aoi):
             try:     
                 g = gdal.Open(self.aoi)
                 #subprocess.call(['gdaltindex', '-f', 'GeoJSON',  '-t_srs', 'EPSG:4326', self.toa_dir + '/AOI.json', self.aoi])
                 geojson = get_boundary(self.aoi)[0]
                 with open(self.toa_dir + '/AOI.json', 'wb') as f:
                     f.write(geojson.encode())
             except:  
                 try: 
                     gr = ogr.Open(self.aoi)
                     l = gr.GetLayer(0)
                     f = l.GetFeature(0)
                     g = f.GetGeometryRef()                                                                                                
                 except:
                     raise IOError('AOI file cannot be opened by gdal, please check it or transform into format can be opened by gdal')
         else:        
             try:     
                 g = ogr.CreateGeometryFromJson(self.aoi)
             except:  
                 try: 
                     g = ogr.CreateGeometryFromGML(self.aoi)
                 except:
                     try:
                         g = ogr.CreateGeometryFromWkt(self.aoi)
                     except:
                         try:
                             g = ogr.CreateGeometryFromWkb(self.aoi)
                         except:
                             raise IOError('The AOI has to be one of GeoJSON, GML, Wkt or Wkb.')
         gjson_str = '''{"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":%s}]}'''% g.ExportToJson()
         with open(self.toa_dir + '/AOI.json', 'wb') as f:
             f.write(gjson_str.encode())
     ogr.DontUseExceptions() 
     gdal.DontUseExceptions()
     if not os.path.exists(self.toa_dir + '/AOI.json'):
         #g = gdal.Open(self.toa_bands[0])
         #proj = g.GetProjection()
         #if 'WGS 84' in proj:
         #    subprocess.call(['gdaltindex', '-f', 'GeoJSON', self.toa_dir +'/AOI.json', self.toa_bands[0]])
         #else:
         #    subprocess.call(['gdaltindex', '-f', 'GeoJSON', '-t_srs', 'EPSG:4326', self.toa_dir +'/AOI.json', self.toa_bands[0]])
         if 'WGS 84' in proj:                                           
             #subprocess.call(['gdaltindex', '-f', 'GeoJSON', self.toa_dir +'/AOI.json', self.toa_bands[0]])
             geojson = get_boundary(self.toa_bands[0], to_wgs84 = False)                                                                                             
             with open(self.toa_dir + '/AOI.json', 'wb') as f:          
                 f.write(geojson.encode())                              
         else:                                                          
             #subprocess.call(['gdaltindex', '-f', 'GeoJSON', '-t_srs', 'EPSG:4326', self.toa_dir +'/AOI.json', self.toa_bands[0]])
             geojson = get_boundary(self.toa_bands[0])[0]               
             with open(self.toa_dir + '/AOI.json', 'wb') as f:          
                 f.write(geojson.encode()) 
         self.logger.warning('AOI is not created and full band extend is used')
         self.aoi = self.toa_dir + '/AOI.json'
     else:
         self.aoi = self.toa_dir + '/AOI.json'
示例#23
0
#coding=utf-8
#create by LiuJu
import os,sys
from osgeo import gdal,ogr,osr
import pandas as pd
import numpy
ogr.UseExceptions()  #报错机制
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8","NO")  #为了支持中文路径
gdal.SetConfigOption("SHAPE_ENCODING","CP936")  #为了使属性表字段支持中文
ogr.RegisterAll()  #注册所有的驱动 
Weibo_data = pd.DataFrame()
def readShap():
    global Weibo_data
    fn = r"G:\\Weibo\\bj.gdb" #输入shp文件
    ds = ogr.Open(fn,0)
    if ds is None:
        sys.exit('could not open{0}.'.format(fn))
    lyr = ds.GetLayer(0)
    i = 0
    ID = []
    USER_ID = []
    longtitude_G = []
    latitude_G = []
    longtitude_P =[]
    latitude_P =[]
    PubTime = []
    Tools =[]
    text = []
    print('Start read')
    for feat in lyr:
        if feat.GetField('PubTime')[5:7] not in ['01','02','12']:#避免过年信息的干扰
示例#24
0
    def mergelines(self,output_filename,DifferentFeaturesList=('NAME','HIGHWAY')):
    
        src_layer = self.srclayer
        logging.debug( 'Layer name: ' + self.srclayer.GetName() )
        
        #sort features gruoping by attributes
        fields = u''
        DifferentFeaturesList = ['"'+item+'"' for item in DifferentFeaturesList]
        sql = '''SELECT * FROM {layername} WHERE NAME IS NOT NULL ORDER BY {fields} '''.format(fields = ','.join(DifferentFeaturesList), layername = self.srclayer.GetName())

        logging.debug(sql)
        
        ogr.UseExceptions()
        
        outDataSource = self.create_output_layer(self.srclayer, output_filename)
        outLayer = outDataSource.GetLayer()
        logging.debug('out layer created')
        out_featureDefn = outLayer.GetLayerDefn()
        logging.debug('layer defn get')

        ResultSet = self.srcdataSource.ExecuteSQL(sql)
        logging.debug('sql executed')
        layer = self.srcdataSource.GetLayer()
        
        logging.debug('getlayer ok')
        
        features_list = list()
        i = 1
        result_count = ResultSet.GetFeatureCount()
        for feature in ResultSet:
            a = feature.GetField("NAME") #TODO: compare using DifferentFeaturesList
            fields = a
            if i == 1:
                fields = a
                prev_fields = fields
            logging.debug('feature '+str(i) + ' / ' + str(result_count))
            if (fields <> prev_fields) or (i == result_count):
                logging.debug('new street')
                if i == result_count: #for last
                    features_list.append(feature)
                logging.debug('len features_list before sent '+str(len(features_list))) 
                new_features = self.splitFeaturesBlock(features_list,layer.GetLayerDefn()) #return list of features
                logging.debug('len new_features:'+str(len(new_features)))
                
                #copy calculated features to output file
                for new_feature in new_features:
                    out_feature = ogr.Feature(out_featureDefn)
                    
                    p = features_list[0].GetGeometryRef()
                    wkt = p.ExportToWkt()
                                        
                    p = new_feature.GetGeometryRef()
                    wkt = p.ExportToWkt()
                    
                    out_feature.SetGeometry(new_feature.GetGeometryRef())
                    out_feature.SetField( "NAME", prev_fields ) #take attributes from previsious feature from sql
                    outLayer.CreateFeature(out_feature)

                features_list = list()
                features_list.append(feature)
            else:
                features_list.append(feature)
            fid = feature.GetField("OSM_ID")    
            logging.debug(str(fid).decode('utf-8') + '  ' +str(a).decode('utf-8'))
            prev_fields = fields
            i = i+1
示例#25
0
    def generate_download_bundle(self, tables, geos, geo_ids, data, fmt):
        if not HAS_GDAL:
            gdal_missing(critical=True)

        from osgeo import ogr, osr
        self.ogr = ogr
        self.osr = osr
        ogr.UseExceptions()

        format = self.DOWNLOAD_FORMATS[fmt]

        # where we're going to put the data temporarily
        temp_path = tempfile.mkdtemp()
        try:
            file_ident = "%s_%s" % (
                tables[0].id.upper(),
                # The gdal KML driver doesn't like certain chars in its layer names.
                # It will replace them for you, but then subsequent calls hang.
                self.BAD_LAYER_CHARS.sub('_', geos[0].name))

            # where the files go, what we'll eventually zip up
            inner_path = os.path.join(temp_path, file_ident)
            log.info("Generating download in %s" % inner_path)
            os.mkdir(inner_path)
            out_filepath = os.path.join(inner_path, '%s.%s' % (file_ident, fmt))

            out_driver = ogr.GetDriverByName(format['driver'])
            out_srs = osr.SpatialReference()
            out_srs.ImportFromEPSG(4326)
            out_data = out_driver.CreateDataSource(out_filepath)

            # See http://gis.stackexchange.com/questions/53920/ogr-createlayer-returns-typeerror
            # excel limits worksheet names to 31 chars
            out_layer = out_data.CreateLayer(file_ident.encode('utf-8')[0:31], srs=out_srs, geom_type=ogr.wkbMultiPolygon)
            out_layer.CreateField(ogr.FieldDefn('geo_level', ogr.OFTString))
            out_layer.CreateField(ogr.FieldDefn('geo_code', ogr.OFTString))
            out_layer.CreateField(ogr.FieldDefn('geoid', ogr.OFTString))
            out_layer.CreateField(ogr.FieldDefn('name', ogr.OFTString))

            for table in tables:
                for column_id, column_info in table.columns.iteritems():
                    out_layer.CreateField(ogr.FieldDefn(str(column_id), ogr.OFTReal))

            for geo in geos:
                geoid = geo.geoid

                out_feat = ogr.Feature(out_layer.GetLayerDefn())

                if format['geometry']:
                    geom = self.get_geometry(geo)
                    if geom:
                        out_feat.SetGeometry(geom)

                out_feat.SetField2('geo_level', geo.geo_level)
                out_feat.SetField2('geo_code', geo.geo_code)
                out_feat.SetField2('geoid', geoid)
                out_feat.SetField2('name', geo.name.encode('utf-8'))

                for table in tables:
                    table_estimates = data[geoid][table.id.upper()]['estimate']

                    for column_id, column_info in table.columns.iteritems():
                        if column_id in table_estimates:
                            est = table_estimates[column_id]
                            # None values get changed to zero, which isn't accurate
                            if est is None:
                                continue

                            # GDAL generates invalid excel spreadsheets for
                            # zero values in real columns
                            if fmt == 'xlsx' and est == 0:
                                continue
                            out_feat.SetField(str(column_id), est)

                out_layer.CreateFeature(out_feat)

            # this closes the object and ensure
            # the data is flushed to the file
            out_data = None

            # zip it up, they can be huge
            zfile_filename = file_ident + '.zip'
            zfile_filepath = os.path.join(temp_path, zfile_filename)
            log.info("Zipping download into %s" % zfile_filepath)

            zfile = zipfile.ZipFile(zfile_filepath, 'w', zipfile.ZIP_DEFLATED)
            for root, dirs, files in os.walk(inner_path):
                for f in files:
                    zfile.write(os.path.join(root, f), os.path.join(file_ident, f))
            zfile.close()

            log.info("Zipped. Reading and streaming.")

            with open(zfile_filepath) as f:
                content = f.read()
                return content, zfile_filename, 'application/zip'

        finally:
            shutil.rmtree(temp_path)
示例#26
0
    def __init__(self, wfsUrl, dico_wfs, tipo, txt=''):
        u""" Uses OGR functions to extract basic informations about
        geographic Web Features Services.

        wfsUrl = url of a WFS service
        dico_wfs = dictionary for global informations
        dico_fields = dictionary for the fields' informations
        li_fieds = ordered list of fields
        tipo = format
        text = dictionary of text in the selected language

        """
        # handling ogr specific exceptions
        ogrerr = OGRErrorHandler()
        errhandler = ogrerr.handler
        gdal.PushErrorHandler(errhandler)
        ogr.UseExceptions()

        # custom settings to enhance querying WFS capabilities for services with a lot of layers
        gdal.SetConfigOption(str('OGR_WFS_LOAD_MULTIPLE_LAYER_DEFN'),
                             str('NO'))
        gdal.SetConfigOption(str('OGR_WFS_BASE_START_INDEX'), str(1))

        # Set config for paging. Works on WFS 2.0 services and WFS 1.0 and 1.1 with some other services.
        # gdal.SetConfigOption(str('OGR_WFS_PAGING_ALLOWED'), str('YES'))
        # gdal.SetConfigOption(str('OGR_WFS_PAGE_SIZE'), str('5'))

        # counting alerts
        self.alert = 0

        # opening GDB
        try:
            drv_wfs = ogr.GetDriverByName(str('WFS'))
            wfs = drv_wfs.Open(str('WFS:') + str(wfsUrl))
        except Exception:
            self.erratum(dico_wfs, wfsUrl, u'err_corrupt')
            self.alert = self.alert + 1
            return None

        # GDB name and parent folder
        dico_wfs['name'] = wfs.GetName()
        dico_wfs['folder'] = path.dirname(wfs.GetName())
        # layers count and names
        dico_wfs['layers_count'] = wfs.GetLayerCount()
        li_layers_names = []
        li_layers_idx = []
        dico_wfs['layers_names'] = li_layers_names
        dico_wfs['layers_idx'] = li_layers_idx

        # total fields count
        total_fields = 0
        dico_wfs['total_fields'] = total_fields
        # total objects count
        total_objs = 0
        dico_wfs['total_objs'] = total_objs
        # parsing layers
        for layer_idx in range(wfs.GetLayerCount()):
            # dictionary where will be stored informations
            dico_layer = OrderedDict()
            # parent GDB
            dico_layer['wfs_name'] = path.basename(wfs.GetName())
            # getting layer object
            layer = wfs.GetLayerByIndex(layer_idx)
            # layer name
            li_layers_names.append(layer.GetName())
            # layer index
            li_layers_idx.append(layer_idx)
            # getting layer globlal informations
            self.infos_basics(layer, dico_layer, txt)
            # storing layer into the GDB dictionary
            dico_wfs['{0}_{1}'.format(layer_idx,
                                      dico_layer.get('title'))] = dico_layer
            # summing fields number
            total_fields += dico_layer.get(u'num_fields')
            # summing objects number
            total_objs += dico_layer.get(u'num_obj')
            # deleting dictionary to ensure having cleared space
            del dico_layer
        # storing fileds and objects sum
        dico_wfs['total_fields'] = total_fields
        dico_wfs['total_objs'] = total_objs

        # warnings messages
        dico_wfs['err_gdal'] = ogrerr.err_type, ogrerr.err_msg
示例#27
0
def get_vector_file(attributes, input_points, poly_or_line, ogr_output,
                    ogr_format):
    """ Returns spatial layer built on inputs - attributes, points, polygon or line, output in specified format"""

    input_points = update_points_crossing_antimeridian(input_points)
    spatialReference = osgeo.osr.SpatialReference()
    spatialReference.ImportFromProj4(
        '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')

    # if no points passed for ogr build return
    if len(input_points) == 0:
        return ()
    try:
        os.remove(ogr_output)
    except OSError:
        pass
    ogr.UseExceptions()

    driver = ogr.GetDriverByName(ogr_format)

    if os.path.exists(ogr_output):
        driver.DeleteDataSource(ogr_output)
    ds = driver.CreateDataSource(ogr_output)

    if poly_or_line == 'polygon':
        geomtype = ogr.wkbPolygon
    if poly_or_line == 'line':
        geomtype = ogr.wkbLineString
    if poly_or_line == 'point':
        geomtype = ogr.wkbPoint

    if ds is None:
        logging.info("Process could not create file")
        sys.exit(1)
    layer = ds.CreateLayer(attributes['Satellite name'], geom_type=geomtype)

    field_definition = ogr.FieldDefn('Satellite               :',
                                     ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    field_definition = ogr.FieldDefn('Sensor               :', ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    field_definition = ogr.FieldDefn('Orbit height                 :',
                                     ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    layer.CreateField(
        ogr.FieldDefn('Orbit number                 :', ogr.OFTInteger))
    '''
    field_definition = ogr.FieldDefn('Current UTC time             :', ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    field_definition = ogr.FieldDefn('Minutes to horizon           :', ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    '''
    field_definition = ogr.FieldDefn('Acquisition of Signal Local    :',
                                     ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    field_definition = ogr.FieldDefn('Acquisition of Signal UTC    :',
                                     ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    field_definition = ogr.FieldDefn('Loss of Signal UTC           :',
                                     ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    field_definition = ogr.FieldDefn('Transit time                 :',
                                     ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    field_definition = ogr.FieldDefn('Node                         :',
                                     ogr.OFTString)
    field_definition.SetWidth(30)
    layer.CreateField(field_definition)
    feature_definition = layer.GetLayerDefn()
    feature = ogr.Feature(feature_definition)
    feature.SetField('Satellite               :', attributes['Satellite name'])
    feature.SetField('Sensor               :', attributes['Sensor code'])
    feature.SetField('Orbit height                 :',
                     attributes['Orbit height'])
    feature.SetField('Orbit number                 :', attributes['Orbit'])
    '''
    feature.SetField('Current UTC time             :', str(attributes['Current time']))
    feature.SetField('Minutes to horizon           :', attributes['Minutes to horizon'])
    '''
    feature.SetField('Acquisition of Signal Local    :',
                     attributes['Local time'])
    feature.SetField('Acquisition of Signal UTC    :',
                     str(attributes['AOS time']))
    feature.SetField('Loss of Signal UTC           :',
                     str(attributes['LOS time']))
    feature.SetField('Transit time                 :',
                     str(attributes['Transit time']))
    feature.SetField('Node                         :', attributes['Node'])

    if poly_or_line == 'point':
        point = ogr.Geometry(ogr.wkbPoint)
        for x in input_points:
            point.AddPoint(x['lon2'], x['lat2'], x['alt2'])

        feature.SetGeometry(point)
        layer.CreateFeature(feature)

        point.Destroy()

    if poly_or_line == 'line':
        line = ogr.Geometry(type=ogr.wkbLineString)
        for x in input_points:

            line.AddPoint(x['lon2'], x['lat2'], x['alt2'])

        feature.SetGeometry(line)
        layer.CreateFeature(feature)

        line.Destroy()

    if poly_or_line == 'polygon':

        ring = ogr.Geometry(ogr.wkbLinearRing)

        #input_points = update_points_crossing_antimeridian(input_points, ogr_format, 'antimeridian.geojson')
        for x in input_points:

            ring.AddPoint(x['lon2'], x['lat2'])

        poly = ogr.Geometry(ogr.wkbPolygon)
        ring.color = "red"

        poly.AddGeometry(ring)

        feature.SetGeometry(poly)

        layer.CreateFeature(feature)

        ring.Destroy()
        poly.Destroy()

    feature.Destroy()

    ds.Destroy()
    # for KML - Add altitude to GeoJSON if ogr_format=="GeoJSON" and change colour of track to yellow
    if ogr_format == "GeoJSON":
        if poly_or_line == 'line':
            replace_string_in_file(
                ogr_output, '<LineString>',
                '<LineString><altitudeMode>absolute</altitudeMode>')
            replace_string_in_file(ogr_output, 'ff0000ff', 'ffffffff')
        if poly_or_line == 'point':
            replace_string_in_file(
                ogr_output, '<Point>',
                '<Point><altitudeMode>absolute</altitudeMode>')
        if poly_or_line == 'polygon':
            replace_string_in_file(
                ogr_output, '<PolyStyle><fill>0</fill>',
                '<PolyStyle><color>7f0000ff</color><fill>1</fill>')

    return ()
示例#28
0
from PyQt5.QtCore import Qt, QPoint, QPointF, QLine, QLineF, QRect, QRectF, QTime, qrand
from PyQt5.QtGui import QImage, QPixmap, QPainter, QBrush, QPen, QColor, QRadialGradient, QPainterPath, QPicture, \
    QPolygonF, QPolygon
from PyQt5.QtWidgets import QAction, QWidget, QPushButton, QApplication, QMessageBox, QFileDialog, QGraphicsScene, \
    QGraphicsPixmapItem, QGraphicsRectItem, QMainWindow, QGraphicsView, QGraphicsItem, QSizePolicy
# 通过from…import…导入PyQt5中所需的模块,减轻脚本依赖。

from GUI.Main import Ui_MainWindow as GUI0  # 导入界面脚本。
from GUI.SFC1 import sFC
# from GUI.ShowGraphic import myGraphicsScene,myGraphicsView
from GUI.WindowMove import windowMove
from GUI.Size import size

gdal.UseExceptions()  # 抛出gdal异常
gdal.AllRegister()  # gdal库需要注册后使用。
ogr.UseExceptions()  # 抛出异常
ogr.RegisterAll()  # ogr库需要注册后使用。
wholefiletype = ['.jpg', '.jpeg', '.JPG', '.JPEG', '.tif',
                 '.TIF']  # 创建一个数组存储图象格式
currentfiletype = ['.shp', '.txt']  # 创建一个存储矢量文件的数组。


class myGraphicsView(QGraphicsView):
    def __init__(self, parent=None):
        super(myGraphicsView, self).__init__(parent)
        self.setCacheMode(QGraphicsView.CacheBackground)
        self.setViewportUpdateMode(QGraphicsView.BoundingRectViewportUpdate)
        self.setRenderHint(QPainter.Antialiasing)
        self.setTransformationAnchor(QGraphicsView.AnchorUnderMouse)
        self.setResizeAnchor(QGraphicsView.AnchorViewCenter)
        self.scale(1, 1)
示例#29
0
# -*- coding: utf-8 -*-

import os
from PyQt4.QtCore import *
from qgis.core import *
from osgeo import ogr, osr
from ..calc_utils import force_gui_update, findMapNo, mapNoToCrs

ogr.UseExceptions()


class DxfLoader():
    iface = None
    parent = None

    layerCountDict = None
    layerListDict = None

    def __init__(self, iface, parent):
        self.iface = iface
        self.parent = parent
        self.progressMain = parent.prgMain
        self.progressSub = parent.prgSub
        self.lblStatus = parent.lblStatus

        self.info = parent.info
        self.error = parent.error
        self.debug = parent.debug
        self.comment = parent.comment
        self.progText = parent.progText
        self.alert = parent.alert
示例#30
0
def main():

    parser = getparser()
    args = parser.parse_args()

    in_fn = args.in_fn
    cell_size = args.cell_size
    fishnet_fn = args.out_fishnet_fn

    root = os.path.split(in_fn)[0]
    os.chdir(root)

    if args.llon is None:
        sys.exit(
            "Enter in correct geographic bounds for heatmap: llon rlon blat ulat"
        )

    outIntersect = in_fn.replace('.shp', '_INTERSECT_' + args.UID_index)

    print "\t[1] CREATE: fishnet (i.e., vector grid), and reproject to a srs that matches that of the footprint shp"
    create_start_time = time.time()

    # Get EPSG of in_fn
    cmdStr = "gdalsrsinfo -o proj4 {}".format(in_fn)
    Cmd = subp.Popen(cmdStr, stdout=subp.PIPE, shell=True)
    proj4_str, err = Cmd.communicate()

    fishnet_path, fishnet_name = os.path.split(fishnet_fn)

    os.system("fishnet.py {} {} {} {} {} {} {}".format(fishnet_fn, args.llon,
                                                       args.rlon, args.blat,
                                                       args.ulat, cell_size,
                                                       cell_size))

    # reproject fishnet to match footprint prj
    fishnet_fn_repro = fishnet_fn.replace('.shp', '_reprj.shp')
    cmdStr = "ogr2ogr {} {} -f 'ESRI Shapefile' -overwrite -t_srs {}".format(
        fishnet_fn_repro, fishnet_fn, proj4_str)

    Cmd = subp.Popen(cmdStr, stdout=subp.PIPE, shell=True)
    stdOut, err = Cmd.communicate()
    create_end_time = time.time()
    duration = (create_end_time - create_start_time) / 60
    print("\t\tElapsed CREATE time was %g minutes." % duration)

    print "\t[2] INTERSECT: 2 shps; fishnet and footprints"

    outIntersect_fn = outIntersect + '.shp'

    try:
        if not os.path.isfile(outIntersect_fn):

            intersect_start_time = time.time()
            ## https://gis.stackexchange.com/questions/119374/intersect-shapefiles-using-shapely
            ogr.UseExceptions()
            ogr_ds = ogr.Open(root, True)  # Windows: r'C:\path\to\data'
            SQL = """\
                SELECT ST_Intersection(A.geometry, B.geometry) AS geometry, A.*, B.*
                FROM {} A, {} B
                WHERE ST_Intersects(A.geometry, B.geometry);
            """.format(
                basename(in_fn).split('.')[0],
                basename(fishnet_fn_repro).split('.')[0])

            layer = ogr_ds.ExecuteSQL(SQL, dialect='SQLITE')
            # copy result back to datasource as a new shapefile
            layer2 = ogr_ds.CopyLayer(layer, basename(outIntersect))
            # save, close
            layer = layer2 = ogr_ds = None

            intersect_end_time = time.time()
            duration = (intersect_end_time - intersect_start_time) / 60
            print("\t\tElapsed INTERSECT time was %g minutes." % duration)

        else:
            print "Intersection file already exists: %s" % (outIntersect_fn)

    except Exception, e:
        print "\n\t!!!--- Problem with the intersection: "
        print "\n\t", e