예제 #1
0
 def test_feature_transform(self):
     feature_polygon = {
         'geometry': {
             'coordinates':
             [[[5.02, 45.319], [5.201, 45.217], [5.134, 45.074],
               [5.494, 45.071], [5.464, 44.793], [5.825, 44.7],
               [5.641, 44.651], [5.597, 44.543], [5.664, 44.501],
               [5.418, 44.424], [5.631, 44.331], [5.678, 44.146],
               [5.454, 44.119], [5.15, 44.235], [5.166, 44.314],
               [4.825, 44.228], [4.65, 44.329], [4.886, 44.936],
               [4.8, 45.298], [5.02, 45.319]],
              [[4.97, 44.429], [4.889, 44.304], [5.07, 44.376],
               [4.97, 44.429]]],
             'type':
             'Polygon'
         },
         'properties': {
             'name': 'atlantis'
         },
         'type': 'Feature'
     }
     crs_proj = "+proj=lcc +lat_1=17.5 +lat_2=29.5 +lat_0=12 +lon_0=-102 +x_0=2500000 +y_0=0 +a=6378137 +b=6378136.027241431 +units=m +no_defs"
     crs_geo = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
     # Perform round trip transformation
     feature_proj = feature_transform(feature_polygon, crs_proj, crs_geo)
     feature_geo = feature_transform(feature_proj, crs_geo, crs_proj)
     # Compare polygon
     np.testing.assert_almost_equal(
         feature_polygon['geometry']['coordinates'][0],
         feature_geo['geometry']['coordinates'][0])
     # Compare hole
     np.testing.assert_almost_equal(
         feature_polygon['geometry']['coordinates'][1],
         feature_geo['geometry']['coordinates'][1])
예제 #2
0
    def polygonize(self, crs_out="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"):
        """Transform the raster result of a segmentation to a feature collection

        Args:
            crs_out (proj4): The coordinate reference system of the feature collection
                produced. Defaults to longlat, can be None if no reprojection is needed
        """
        if self.segments_array is None:
            raise ValueError("self.segments_array is None, you must run segment before this method")
        # Use rasterio.features.shapes to generate a geometries collection from the
        # segmented raster
        geom_collection = features.shapes(self.segments_array.astype(np.uint16),
                                          transform=self.affine)
        # Make it a valid featurecollection
        def to_feature(feature):
            """Tranforms the results of rasterio.feature.shape to a feature"""
            fc_out = {
                "type": "Feature",
                "geometry": {
                    "type": feature[0]['type'],
                    "coordinates": feature[0]['coordinates']
                },
                "properties": {
                    "id": feature[1]
                }
            }
            return fc_out
        fc_out = (to_feature(x) for x in geom_collection)
        if crs_out is not None:
            fc_out = (feature_transform(x, crs_out=crs_out, crs_in=self.crs) for x in fc_out)
        self.fc = fc_out
예제 #3
0
def predict_object_to_feature(x, crs=None):
    """Convert a PredictObject to a feature

    This function is often meant to be called in a list comprehension whose iterator
    is a django QuerySet.
    The feature has a single attribute corresponding to the database object id

    Args:
        x (PredictObject): Object extracted from the database
        crs (str): proj4 string to reproject to. Can be extrated from a geoarray using
            ``geoarray.crs._crs.ExportToProj4()``. Default to None in which case no
            reprojection is performed. Data in the database must be stored in 4326 crs

    Return:
        dict: A geojson like feature
    """
    geometry = json.loads(x.the_geom.geojson)
    feature = {
        'type': 'feature',
        'geometry': geometry,
        'properties': {
            'id': x.id
        }
    }
    if crs is not None:
        feature = feature_transform(feature, crs)
    return feature
예제 #4
0
    def handle(self, **options):
        input_file = options['input_file']
        year = options['year']
        scheme = options['scheme']
        field = options['field']
        name = options['name']
        # Create ValidClassification objects list
        # Push it to database

        # Read file and Optionally reproject the features to longlat
        with fiona.open(input_file) as src:
            p = Proj(src.crs)
            if p.is_latlong(
            ):  # Here we assume that geographic coordinates are automatically 4326 (not quite true)
                fc = list(src)
            else:
                crs_str = to_string(src.crs)
                fc = [
                    feature_transform(x,
                                      crs_out='+proj=longlat',
                                      crs_in=crs_str) for x in src
                ]

        # Write features to ValidObject table
        def valid_obj_builder(x):
            """Build individual ValidObjects
            """
            geom = GEOSGeometry(json.dumps(x['geometry']))
            obj = ValidObject(the_geom=geom)
            return obj

        obj_list = [valid_obj_builder(x) for x in fc]
        ValidObject.objects.bulk_create(obj_list)

        # Get list of unique tags
        unique_numeric_codes = list(set([x['properties'][field] for x in fc]))

        # Update Tag table using get or create
        def make_tag_tuple(x):
            obj, _ = Tag.objects.get_or_create(numeric_code=x, scheme=scheme)
            return (x, obj)

        tag_dict = dict([make_tag_tuple(x) for x in unique_numeric_codes])

        # Build validClassification object list (valid_tag, valid_object, valid_set)
        def valid_class_obj_builder(x):
            """x is a tuple (ValidObject, feature)"""
            tag = tag_dict[x[1]['properties'][field]]
            obj = ValidClassification(valid_tag=tag,
                                      valid_object=x[0],
                                      valid_set=name,
                                      interpretation_year=year)
            return obj

        valid_class_obj_list = [
            valid_class_obj_builder(x) for x in zip(obj_list, fc)
        ]

        ValidClassification.objects.bulk_create(valid_class_obj_list)
예제 #5
0
    def handle(self, *args, **options):
        name = options['name']
        region = options['region']
        filename = options['filename']
        layer = options['layer']
        driver = options['driver']
        proj4 = options['proj4']

        # Define function to convert query set object to feature
        def to_fc(x):
            geometry = json.loads(x.predict_object.the_geom.geojson)
            feature = {
                'type': 'feature',
                'geometry': geometry,
                'properties': {
                    'class': x.tag.value,
                    'code': x.tag.numeric_code
                }
            }
            return feature

        # Query country or region contour
        try:
            region = Country.objects.get(name=region).the_geom
        except Country.DoesNotExist:
            region = Region.objects.get(name=region).the_geom

        # Query objects
        logger.info('Querying the database for intersecting records')
        qs = PredictClassification.objects.filter(name=name)
        qs = qs.filter(
            predict_object__the_geom__intersects=region).prefetch_related(
                'predict_object', 'tag')

        # Convert query set to feature collection generator
        logger.info('Generating feature collection')
        fc = (to_fc(x) for x in qs)
        crs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
        if proj4 is not None:
            fc = (feature_transform(x, crs_out=proj4) for x in fc)
            crs = proj4

        write_to_file(fc, filename, layer=layer, driver=driver, crs=crs)
예제 #6
0
    def handle(self, *args, **options):
        input_file = options['input_file']
        country = options['country']
        resolution = options['resolution']
        tile_size = options['tile_size']
        bucket = options['bucket']
        path = options['path']
        proj = options['proj']
        field = options['field']
        prefix = options['prefix']

        # Read the vector file
        with fiona.open(input_file) as src:
            crs = to_string(src.crs)
            fc = list(src)

        # Optionally reproject the feature collection to a specified CRS
        if proj is not None:
            fc = [feature_transform(x, crs_out=proj, crs_in=crs) for x in fc]
            crs = proj

        # Build the iterator using the field specified as argument (can be none, in which case binary rasterization is performed)
        if field is not None:
            shapes_iterator = [(x['geometry'], x['properties'][field])
                               for x in fc]
        else:
            shapes_iterator = [(x['geometry'], 1) for x in fc]

        # Either retrieve extent from file or from an ingested country geometry
        if country is None:
            bbox_list = (get_geom_bbox(x['geometry']) for x in fc)
            xmin_list, ymin_list, xmax_list, ymax_list = zip(*bbox_list)
            xmin = min(xmin_list)
            ymax = max(ymax_list)
            xmax = max(xmax_list)
            ymin = min(ymin_list)
            extent = (xmin, ymin, xmax, ymax)
        else:
            query = 'SELECT st_extent(st_transform(the_geom, %s)) FROM public.madmex_country WHERE name = %s;'
            with connection.cursor() as c:
                c.execute(query, [crs, country.upper()])
                bbox = c.fetchone()
            extent = parsers.postgis_box_parser(bbox[0])

        # Generate the tiles and write them either to filesystem or to s3 bucket
        grid_generator = grid_gen(extent, resolution, tile_size, prefix)
        # Generate the rasters
        for shape, aff, filename in grid_generator:
            arr = features.rasterize(shapes_iterator,
                                     out_shape=shape,
                                     transform=aff,
                                     dtype=np.uint8)
            meta = {
                'driver': 'GTiff',
                'height': shape[0],
                'width': shape[1],
                'count': 1,
                'transform': aff,
                'dtype': rasterio.uint8,
                'crs': crs,
                'compress': 'lzw'
            }
            fp = os.path.join(path, filename)
            if bucket is not None:
                s3.write_raster(bucket, fp, arr, **meta)
            else:
                with rasterio.open(fp, 'w', **meta) as dst:
                    dst.write(arr, 1)