Beispiel #1
0
def get_union(geojson):
    """ Returns a geojson geometry that is the union of all features in a geojson feature collection """
    shapes = []
    for feature in geojson['features']:
        if feature['geometry']['type'] not in ['Polygon', 'MultiPolygon']:
            continue

        s = shape(feature['geometry'])
        if s and s.is_valid:
            #get rid of holes
            if type(s) in (MultiPolygon, GeometryCollection):
                hulls = [Polygon(r.exterior) for r in s.geoms]
                hull = MultiPolygon(hulls)
            else:
                hull = Polygon(s.exterior)

            #simplify so calculating union doesnt take forever
            simplified = hull.simplify(0.01, preserve_topology=True)
            if simplified.is_valid:
                shapes.append(simplified)
            else:
                shapes.append(hull)

    try:
        result = cascaded_union(shapes)
    except Exception, e:
        #workaround for geos bug with cacscaded_union sometimes failing
        logging.error("cascaded_union failed, falling back to union")
        result = shapes.pop()
        for s in shapes:
            result = result.union(s)
Beispiel #2
0
    def pack_shape_scale_binary(self, plot=False):
        # TODO: select one of the base_shapes randomly
        # TODO: use bounding circle for first pass
        #       https://www.nayuki.io/res/smallest-enclosing-circle/smallestenclosingcircle.py
        center = self.random_point()
        base = self.base_shapes[0]
        ph = np.random.random() * 2 * np.pi
        R = np.matrix([[np.cos(ph), -np.sin(ph)], [np.sin(ph), np.cos(ph)]])
        rbase = [b * R for b in base]

        if plot:
            self.plot_border()
            plt.plot(center[:, 0], center[:, 1], 'k.')
            hc, = plt.plot([], [], 'k-')

        # binary search on scale to find best fit
        thresh = .001
        lo = 0
        r = .001
        hi = np.inf
        while True:
            transformed = [[(r * b + center).tolist(), []] for b in rbase]

            # a = [(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)]
            # b = [(1, 1), (1, 2), (2, 2), (2, 1), (1, 1)]
            # mp = MultiPolygon([[a, []], [b, []]])
            p = MultiPolygon(transformed)
            intersected = False
            for shape in self.shapes:
                if p.intersects(shape):
                    intersected = True
                    break

            if intersected:
                # too big, reduce size and adjust range
                hi = r
                r = (lo + r) / 2
            else:
                # too small, increase size
                lo = r
                if hi == np.inf:
                    # keep doubling while we haven't yet hit a neighboring shape
                    r = 2*r
                else:
                    r = (r + hi) / 2

            if plot:
                cc = arc(center, r)
                hc.set_xdata(cc[:, 0])
                hc.set_ydata(cc[:, 1])
                plt.show(block=False)
                # plt.pause(.01)

            if hi - lo <= thresh:
                break

        # print('  radius = %f' % r)

        self.shapes.append(p)
Beispiel #3
0
    def test_get_field_write_target(self):
        p1 = 'Polygon ((-116.94238466549290933 52.12861711455555991, -82.00526805089285176 61.59075286434307372, ' \
             '-59.92695130138864101 31.0207758265680269, -107.72286778108455962 22.0438778075388484, ' \
             '-122.76523743459291893 37.08624746104720771, -116.94238466549290933 52.12861711455555991))'
        p2 = 'Polygon ((-63.08099655131782413 21.31602121140134898, -42.70101185946779765 9.42769680782217279, ' \
             '-65.99242293586783603 9.912934538580501, -63.08099655131782413 21.31602121140134898))'
        p1 = wkt.loads(p1)
        p2 = wkt.loads(p2)

        mp1 = MultiPolygon([p1, p2])
        mp2 = mp1.buffer(0.1)
        geoms = [mp1, mp2]
        gvar = GeometryVariable(name='gc', value=geoms, dimensions='elementCount')
        gc = gvar.convert_to(node_dim_name='n_node')
        field = gc.parent
        self.assertEqual(field.grid.node_dim.name, 'n_node')

        actual = DriverESMFUnstruct._get_field_write_target_(field)
        self.assertEqual(field.grid.node_dim.name, 'n_node')
        self.assertNotEqual(id(field), id(actual))
        self.assertEqual(actual['numElementConn'].dtype, np.int32)
        self.assertEqual(actual['elementConn'].dtype, np.int32)
        self.assertNotIn(field.grid.cindex.name, actual)
        self.assertEqual(actual['nodeCoords'].dimensions[0].name, 'nodeCount')

        path = self.get_temporary_file_path('foo.nc')
        actual.write(path)

        try:
            import ESMF
        except ImportError:
            pass
        else:
            _ = ESMF.Mesh(filename=path, filetype=ESMF.FileFormat.ESMFMESH)

        path2 = self.get_temporary_file_path('foo2.nc')
        driver = DriverKey.NETCDF_ESMF_UNSTRUCT
        field.write(path2, driver=driver)

        # Test the polygons are equivalent when read from the ESMF unstructured file.
        rd = ocgis.RequestDataset(path2, driver=driver)
        self.assertEqual(rd.driver.key, driver)
        efield = rd.get()
        self.assertEqual(efield.driver.key, driver)
        grid_actual = efield.grid
        self.assertEqual(efield.driver.key, driver)
        self.assertEqual(grid_actual.parent.driver.key, driver)
        self.assertEqual(grid_actual.x.ndim, 1)

        for g in grid_actual.archetype.iter_geometries():
            self.assertPolygonSimilar(g[1], geoms[g[0]])

        ngv = grid_actual.archetype.convert_to()
        self.assertIsInstance(ngv, GeometryVariable)
Beispiel #4
0
def geometry_from_feature_collection(feature_collection):
    polygons = []
    for feature in feature_collection['features']:
        geometry = feature['geometry']
        if geometry['type'] == 'Polygon':
            polygons.append(asShape(geometry))

    if polygons:
        mp = MultiPolygon(polygons)
        if not mp.is_valid:
            mp = mp.buffer(0)
        return mp
Beispiel #5
0
 def __init__(self, input_file, factor=1):
     self.plot_obstacles_polygon = []
     self.obs_list = []
     self.obs_polygon = MultiPolygon()       # Shapely object to store all polygons
     self.initial_state, self.goal_state = [], []
     self.resolution = 0                     # Dimension of the plane.
     self.read_env_from_file(input_file)
Beispiel #6
0
def convert_to_multipolygon(geoms):
    from shapely.geometry import MultiPolygon

    rings = []
    for geom in geoms:
        if isinstance(geom, MultiPolygon):
            rings = rings + [geom for geom in geom.geoms]
        else:
            rings = rings + [geom]

    geometry = MultiPolygon(rings)

    # Downsample 3D -> 2D
    wkt2d = geometry.to_wkt()
    geom2d = shapely.wkt.loads(wkt2d)

    return geom2d
Beispiel #7
0
def mask_to_polygons(mask, epsilon=5, min_area=1.):
    # __author__ = Konstantin Lopuhin
    # https://www.kaggle.com/lopuhin/dstl-satellite-imagery-feature-detection/full-pipeline-demo-poly-pixels-ml-poly

    # first, find contours with cv2: it's much faster than shapely
    threashold_mask = ((mask == 1) * 255).astype(np.uint8)

    # opencv 3 
    # image, contours, hierarchy = cv2.findContours(threashold_mask, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_TC89_KCOS)
    contours, hierarchy = cv2.findContours(threashold_mask, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_TC89_KCOS)

    # create approximate contours to have reasonable submission size
    approx_contours = [cv2.approxPolyDP(cnt, epsilon, True)
                       for cnt in contours]
    if not contours:
        return MultiPolygon()
    # now messy stuff to associate parent and child contours
    cnt_children = defaultdict(list)
    child_contours = set()
    assert hierarchy.shape[0] == 1
    # http://docs.opencv.org/3.1.0/d9/d8b/tutorial_py_contours_hierarchy.html
    for idx, (_, _, _, parent_idx) in enumerate(hierarchy[0]):
        if parent_idx != -1:
            child_contours.add(idx)
            cnt_children[parent_idx].append(approx_contours[idx])
    # create actual polygons filtering by area (removes artifacts)
    all_polygons = []
    for idx, cnt in enumerate(approx_contours):
        if idx not in child_contours and cv2.contourArea(cnt) >= min_area:
            assert cnt.shape[1] == 1
            poly = Polygon(
                shell=cnt[:, 0, :],
                holes=[c[:, 0, :] for c in cnt_children.get(idx, [])
                       if cv2.contourArea(c) >= min_area])
            all_polygons.append(poly)
    # approximating polygons might have created invalid ones, fix them
    all_polygons = MultiPolygon(all_polygons)
    if not all_polygons.is_valid:
        all_polygons = all_polygons.buffer(0)
        # Sometimes buffer() converts a simple Multipolygon to just a Polygon,
        # need to keep it a Multi throughout
        if all_polygons.type == 'Polygon':
            all_polygons = MultiPolygon([all_polygons])
    return all_polygons
Beispiel #8
0
 def read_env_from_file(self, input_file):
     # Read json input
     try:
         print(input_file)
         with open(input_file, mode='r', encoding='utf-8') as a_file:
             environment = json.loads(a_file.read())
     except FileNotFoundError as fl:
         print("File not found for JSON ", fl)
         exit(1)
     except ValueError:
         print("Invalid JSON")
         exit(1)
     except Exception:
         print("Unable to process input file")
         exit(1)
     try:
         # Making sure the required entities are defined in the input json file.
         environment['resolution'] and environment['obstacles']
         environment['initial_state'] and environment['goal_state']
     except KeyError:
         print("Invalid Environment definition")
         exit(1)
     self.initial_state, self.goal_state = environment['initial_state'], environment['goal_state']
     self.resolution = environment['resolution']
     temp_polygon_list = []
     for obs in environment['obstacles']:
         if not obs.get('shape') and obs.get('property') and obs['property'].get('vertices'):
             print("Shape element not present for the obstacles")
             continue
         if obs['shape'] == 'polygon':
             # print("Polygon with vertices %s" %(np.array(obs['property']['vertices'])/100))
             polygon = mPolygon(np.array(obs['property']['vertices']))
             temp_polygon_list.append(Polygon(obs['property']['vertices']))
             self.plot_obstacles_polygon.append(polygon)
             self.obs_list.append(obs['property']['vertices'])
         else:
             print("Undefined shape")
             break
     self.obs_polygon = MultiPolygon(temp_polygon_list)
Beispiel #9
0
    def render(self,
               nb_class=8,
               disc_func=None,
               user_defined_breaks=None,
               output="GeoJSON",
               new_mask=False):
        """
        Parameters
        ----------
        nb_class : int, optionnal
            The number of class (default: 8).
        disc_func : str, optionnal
            The kind of data classification to be used (to be choosed in
            "equal_interval", "jenks", "percentiles, "head_tail_breaks"
            and "prog_geom"), default: None.
        user_defined_breaks : list or tuple, optionnal
            A list of ordered break to use to construct the contours
            (override `nb_class` and `disc_func` values if any)
            (default: None).
        output : string, optionnal
            The type of output expected (not case-sensitive)
            in {"GeoJSON", "GeoDataFrame"} (default: "GeoJSON").
        new_mask : str, optionnal
            Use a new mask by giving the path to the file (Polygons only)
            to use as clipping mask, can also be directly a GeoDataFrame
            (default: False).

        Returns
        -------
        smoothed_result : bytes or GeoDataFrame
            The result, dumped as GeoJSON (utf-8 encoded) or as a GeoDataFrame.
        """
        if disc_func and 'jenks' in disc_func and not jenks_breaks:
            raise ValueError(
                "Missing jenkspy package - could not use jenks breaks")

        zi = self.zi

        if isinstance(new_mask, (type(False), type(None))):
            if not self.use_mask:
                self.use_mask = False
                self.mask = None
        else:
            self.open_mask(new_mask, None)

        # We want levels with the first break value as the minimum of the
        # interpolated values and the last break value as the maximum of theses
        # values:
        if user_defined_breaks:
            levels = user_defined_breaks
            if levels[len(levels) - 1] < np.nanmax(zi):
                levels = levels + [np.nanmax(zi)]
            if levels[0] > np.nanmin(zi):
                levels = [np.nanmin(zi)] + levels
        else:
            levels = self.define_levels(nb_class, disc_func)

        # Ensure that the levels are unique/increasing
        #  to avoid error from `contourf` :
        s_levels = set(levels)
        if len(s_levels) != len(levels):
            levels = list(s_levels)
        levels.sort()

        try:
            collec_poly = contourf(self.XI,
                                   self.YI,
                                   zi.reshape(tuple(reversed(self.shape))).T,
                                   levels,
                                   vmax=abs(np.nanmax(zi)),
                                   vmin=-abs(np.nanmin(zi)))
        # Retry without setting the levels :
        except ValueError:
            collec_poly = contourf(self.XI,
                                   self.YI,
                                   zi.reshape(tuple(reversed(self.shape))).T,
                                   vmax=abs(np.nanmax(zi)),
                                   vmin=-abs(np.nanmin(zi)))

        # Fetch the levels returned by contourf:
        levels = collec_poly.levels
        # Set the maximum value at the maximum value of the interpolated values:
        levels[-1] = np.nanmax(zi)
        # Transform contourf contours into a GeoDataFrame of (Multi)Polygons:
        res = isopoly_to_gdf(collec_poly, levels=levels[1:], field_name="max")

        if self.longlat:

            def f(x, y, z=None):
                return (x / 0.017453292519943295, y / 0.017453292519943295)

            res.geometry = [transform(f, g) for g in res.geometry]

        res.crs = self.proj_to_use
        # Set the min/max/center values of each class as properties
        # if this contour layer:
        res["min"] = [np.nanmin(zi)] + res["max"][0:len(res) - 1].tolist()
        res["center"] = (res["min"] + res["max"]) / 2

        # Compute the intersection between the contour layer and the mask layer:
        ix_max_ft = len(res) - 1
        if self.use_mask:
            res.loc[0:ix_max_ft,
                    "geometry"] = res.geometry.buffer(0).intersection(
                        unary_union(self.mask.geometry.buffer(0)))

        # res.loc[0:ix_max_ft, "geometry"] = res.geometry.buffer(
        #     0).intersection(self.poly_max_extend.buffer(-0.1))

        # Repair geometries if necessary :
        if not all(t in ("MultiPolygon", "Polygon") for t in res.geom_type):
            res.loc[0:ix_max_ft, "geometry"] = \
                [geom if geom.type in ("Polygon", "MultiPolygon")
                 else MultiPolygon(
                     [j for j in geom if j.type in ('Polygon', 'MultiPolygon')]
                     )
                 for geom in res.geometry]

        if "geojson" in output.lower():
            return res.to_crs({"init": "epsg:4326"}).to_json().encode()
        else:
            return res
Beispiel #10
0
def txt2csv(pattern, location):
    query = pattern  #'bf'
    labelFolder = 'D:/ICT/Griffith/CEP/bushfire/datasets/produce/{}'.format(
        location)
    if not os.path.exists(labelFolder):
        os.makedirs(labelFolder)
    with open("../bin/results/{}/result_{}.txt".format(location, pattern),
              "a",
              encoding='utf-8') as fw:
        fw.write('\n\n\n********   Event Pattern    ********\n')
        with open("../bin/patterns/{}.eql".format(query)) as bf:
            fw.writelines(bf.readlines())

    times = []
    boundaries = []
    preLine = ''
    multi = []
    with open("../bin/results/{}/result_{}.txt".format(location, pattern),
              "r",
              encoding='utf-8') as f:
        while True:
            line = f.readline()
            if not line:
                break
            # if preLine != '' and len(times) > 0:
            # if preLine[:10] + '00' != times[-1]:
            multi = []
            # while line.find("POLYGON") > 0:
            # multi.append(line[10:])
            if line.find("POLYGON") > 0:
                # nextLine = f.readline()
                while line.find("POLYGON") > 0:
                    multi.append(line[10:])
                    line = f.readline()
                try:
                    int(preLine[:12])
                except:
                    preLine = line  #f.readline()
                times.append(preLine[:10] + '00')
                if len(multi) > 1:
                    # polygons =  [loads(poly) for poly in multi]
                    # multipoly = MultiPolygon(polygons)
                    # boundaries.append(dumps(multipoly))
                    boundaries.append(multi)
                    # print(multi)
                else:
                    boundaries.append(multi[0])
            else:
                preLine = line

    newBound = []
    newTimes = []
    boundAtTime = []
    for i in range(len(times)):
        if isinstance(boundaries[i], list):
            boundAtTime.extend(boundaries[i])
        else:
            boundAtTime.append(boundaries[i])
        if i == len(times) - 1 or times[i] != times[i + 1]:
            newTimes.append(times[i])
            if len(boundAtTime) > 1:
                polygons = [loads(poly) for poly in boundAtTime]
                multipoly = MultiPolygon(polygons)
                newBound.append(dumps(multipoly.buffer(0)))
            else:
                newBound.append(boundAtTime[0])
            boundAtTime = []

    # result = pd.DataFrame({"time":times,"boundary":boundaries})
    result = pd.DataFrame({"time": newTimes, "boundary": newBound})
    result.astype({'time': 'int64'})
    result['boundary'] = result['boundary'].apply(loads)
    geoResult = gpd.GeoDataFrame(result, geometry='boundary')

    geoResult.to_csv("./produce/{}/result_{}.csv".format(location, query),
                     index=None)
Beispiel #11
0
def visResult(poly, currentPoint, dist):
    circle = currentPoint.buffer(dist)
    circle2 = currentPoint.buffer(50)
    toDraw = MultiPolygon([circle, circle2, poly])
    open("exampleInscribed.json", "wb").write(json.dumps(mapping(toDraw)))
Beispiel #12
0
def raster_stats(
    vectors,
    raster,
    layer_num=0,
    band_num=1,
    nodata_value=None,
    exclude_ranges=None,
    global_src_extent=False,
    categorical=False,
    stats=None,
    copy_properties=False,
):

    if not stats:
        if not categorical:
            stats = DEFAULT_STATS
        else:
            stats = []
    else:
        if isinstance(stats, basestring):
            if stats in ["*", "ALL"]:
                stats = VALID_STATS
            else:
                stats = stats.split()
    for x in stats:
        if x not in VALID_STATS:
            raise RasterStatsError("Stat `%s` not valid;" " must be one of \n %r" % (x, VALID_STATS))

    # print "helloRezaTest"
    run_count = False
    if categorical or "majority" in stats or "minority" in stats or "unique" in stats:
        # run the counter once, only if needed
        run_count = True

    rds = gdal.Open(raster, GA_ReadOnly)
    if not rds:
        raise RasterStatsError("Cannot open %r as GDAL raster" % raster)
    rb = rds.GetRasterBand(band_num)
    rgt = rds.GetGeoTransform()
    rsize = (rds.RasterXSize, rds.RasterYSize)
    rbounds = raster_extent_as_bounds(rgt, rsize)

    if nodata_value is not None:
        nodata_value = float(nodata_value)
        rb.SetNoDataValue(nodata_value)
    else:
        nodata_value = rb.GetNoDataValue()

    features_iter, strategy, spatial_ref = get_features(vectors, layer_num)

    if global_src_extent:
        # create an in-memory numpy array of the source raster data
        # covering the whole extent of the vector layer
        if strategy != "ogr":
            raise RasterStatsError("global_src_extent requires OGR vector")

        # find extent of ALL features
        ds = ogr.Open(vectors)
        layer = ds.GetLayer(layer_num)
        ex = layer.GetExtent()
        # transform from OGR extent to xmin, ymin, xmax, ymax
        layer_extent = (ex[0], ex[2], ex[1], ex[3])

        global_src_offset = bbox_to_pixel_offsets(rgt, layer_extent)
        global_src_array = rb.ReadAsArray(*global_src_offset)

    mem_drv = ogr.GetDriverByName("Memory")
    driver = gdal.GetDriverByName("MEM")

    results = []

    for i, feat in enumerate(features_iter):
        if feat["type"] == "Feature":
            geom = shape(feat["geometry"])
        else:  # it's just a geometry
            geom = shape(feat)

        # Point and MultiPoint don't play well with GDALRasterize
        # convert them into box polygons the size of a raster cell
        buff = rgt[1] / 2.0
        if geom.type == "MultiPoint":
            geom = MultiPolygon([box(*(pt.buffer(buff).bounds)) for pt in geom.geoms])
        elif geom.type == "Point":
            geom = box(*(geom.buffer(buff).bounds))

        ogr_geom_type = shapely_to_ogr_type(geom.type)

        # "Clip" the geometry bounds to the overall raster bounding box
        # This should avoid any rasterIO errors for partially overlapping polys
        geom_bounds = list(geom.bounds)
        if geom_bounds[0] < rbounds[0]:
            geom_bounds[0] = rbounds[0]
        if geom_bounds[1] < rbounds[1]:
            geom_bounds[1] = rbounds[1]
        if geom_bounds[2] > rbounds[2]:
            geom_bounds[2] = rbounds[2]
        if geom_bounds[3] > rbounds[3]:
            geom_bounds[3] = rbounds[3]

        # calculate new geotransform of the feature subset
        src_offset = bbox_to_pixel_offsets(rgt, geom_bounds)

        new_gt = ((rgt[0] + (src_offset[0] * rgt[1])), rgt[1], 0.0, (rgt[3] + (src_offset[1] * rgt[5])), 0.0, rgt[5])

        if src_offset[2] < 0 or src_offset[3] < 0:
            # we're off the raster completely, no overlap at all
            # so there's no need to even bother trying to calculate
            feature_stats = dict([(s, None) for s in stats])
        else:
            if not global_src_extent:
                # use feature's source extent and read directly from source
                # fastest option when you have fast disks and well-indexed raster
                # advantage: each feature uses the smallest raster chunk
                # disadvantage: lots of disk reads on the source raster
                src_array = rb.ReadAsArray(*src_offset)
            else:
                # derive array from global source extent array
                # useful *only* when disk IO or raster format inefficiencies are your limiting factor
                # advantage: reads raster data in one pass before loop
                # disadvantage: large vector extents combined with big rasters need lotsa memory
                xa = src_offset[0] - global_src_offset[0]
                ya = src_offset[1] - global_src_offset[1]
                xb = xa + src_offset[2]
                yb = ya + src_offset[3]
                src_array = global_src_array[ya:yb, xa:xb]

            # Create a temporary vector layer in memory
            mem_ds = mem_drv.CreateDataSource("out")
            mem_layer = mem_ds.CreateLayer("out", spatial_ref, ogr_geom_type)
            ogr_feature = ogr.Feature(feature_def=mem_layer.GetLayerDefn())
            ogr_geom = ogr.CreateGeometryFromWkt(geom.wkt)
            ogr_feature.SetGeometryDirectly(ogr_geom)
            mem_layer.CreateFeature(ogr_feature)

            # Rasterize it
            rvds = driver.Create("rvds", src_offset[2], src_offset[3], 1, gdal.GDT_Byte)
            rvds.SetGeoTransform(new_gt)

            gdal.RasterizeLayer(rvds, [1], mem_layer, None, None, burn_values=[1])
            rv_array = rvds.ReadAsArray()

            # Mask the source data array with our current feature
            # we take the logical_not to flip 0<->1 to get the correct mask effect
            # we also mask out nodata values explictly
            # masked = np.ma.MaskedArray(
            #    src_array,
            #    mask=np.logical_or(
            #        src_array == nodata_value,# 1 if true
            #        np.logical_not(rv_array)  # flips 0s to 1s
            #    )
            # )

            # masked = np.ma.masked_outside(src_array,1,100)
            # masked = np.ma.masked_where(np.logical_or(src_array<1,src_array>100),src_array);

            # nodata_value_min = 1
            # nodata_value_max = 100
            masked = np.ma.masked_where(False, src_array)  # start with all

            # if you want to exclude,
            # make it true where the range is not specified
            # start with true
            # then set the range to false

            # nodata_value=105
            # nodata_value_min=1
            # nodata_value_max=100
            # 1,50 60,100
            places_to_mask = False * len(src_array)
            places_to_mask = np.logical_or(np.logical_not(rv_array), places_to_mask)
            if nodata_value is not None:
                places_to_mask = np.logical_or(src_array == nodata_value, places_to_mask)

            if exclude_ranges is not None:
                for range in exclude_ranges.split(" "):
                    nodata_values = range.split(",")
                    nodata_value_min = int(nodata_values[0])
                    nodata_value_max = int(nodata_values[1])
                    places_to_mask = np.logical_or(
                        np.logical_and(src_array >= nodata_value_min, src_array <= nodata_value_max), places_to_mask
                    )

            masked = np.ma.masked_where(places_to_mask, src_array)

            if run_count:
                pixel_count = Counter(masked.compressed())

            if categorical:
                feature_stats = dict(pixel_count)
            else:
                feature_stats = {}

            if "min" in stats:
                feature_stats["min"] = float(masked.min())
            if "max" in stats:
                feature_stats["max"] = float(masked.max())
            if "mean" in stats:
                feature_stats["mean"] = float(masked.mean())
            if "count" in stats:
                feature_stats["count"] = int(masked.count())
            # optional
            if "sum" in stats:
                feature_stats["sum"] = float(masked.sum())
            if "std" in stats:
                feature_stats["std"] = float(masked.std())
            if "median" in stats:
                feature_stats["median"] = float(np.median(masked.compressed()))
            if "majority" in stats:
                try:
                    feature_stats["majority"] = pixel_count.most_common(1)[0][0]
                except IndexError:
                    feature_stats["majority"] = None
            if "minority" in stats:
                try:
                    feature_stats["minority"] = pixel_count.most_common()[-1][0]
                except IndexError:
                    feature_stats["minority"] = None
            if "unique" in stats:
                feature_stats["unique"] = len(pixel_count.keys())
            if "range" in stats:
                try:
                    rmin = feature_stats["min"]
                except KeyError:
                    rmin = float(masked.min())
                try:
                    rmax = feature_stats["max"]
                except KeyError:
                    rmax = float(masked.max())
                feature_stats["range"] = rmax - rmin

        try:
            # Use the provided feature id as __fid__
            feature_stats["__fid__"] = feat["id"]
        except KeyError:
            # use the enumerator
            feature_stats["__fid__"] = i

        if feat.has_key("properties") and copy_properties:
            for key, val in feat["properties"].items():
                feature_stats[key] = val

        results.append(feature_stats)

    return results
Beispiel #13
0
def DiagnoseAndFixMultiPolygon(attributeName,boundariesList, printDiagnostic = False):
    print "---"
    print "*** %s is invalid. Running diagnostic... ***"%(attributeName)
    # Note: we have already tested each polygon, so the problem is when we add them to a multipolygon
    # the likely cause is overlapping polygons, so we will automatically union those polygons 
    #printDiagnostic = True
    goodPolygons = []
    badPolygons = []
    useSlowerCode = False
    numPolygons = len(boundariesList)
    if useSlowerCode:
        for i in range(0,numPolygons) :
            if i % 20 == 0:
                print "processing polygon", i
            thisPolygon = boundariesList[i]
            shapelyMultiPoly = MultiPolygon(goodPolygons + [thisPolygon])
            if shapelyMultiPoly.is_valid:
                goodPolygons.append(thisPolygon)
            else:
                print "*** problem when adding polygon %d ***"%(i)
                print thisPolygon
                badPolygons.append(thisPolygon)
    else:
        ''' it can be very slow trying these one at a time, 
        so lets try jumping when we can and dropping back to one at a time when we have a problem
        '''
        numPolygons = len(boundariesList)
        if printDiagnostic: print "numPolygons",numPolygons
        
        i = 0
        lineReported = 0
        while True:
            if i >= numPolygons:
                if printDiagnostic: print "reached end of list"
                break # end of list
            
            lineToReport = (i/100)*100
            if lineToReport != lineReported:
                lineReported = lineToReport
                if i > 0:
                    print "processing polygon %d of %d"%(i,numPolygons)
                
            continueToTop = False
            for numToJump in [100,50,20,10]:
                if continueToTop: continue
                if i + numToJump > numPolygons:
                    numToJump = numPolygons - i;
                    if printDiagnostic: print "adjusting numToJump to",numToJump
                #try the jump
                if printDiagnostic: print "trying jump"
                polygonsToAdd = boundariesList[i:i+numToJump]
                shapelyMultiPoly = MultiPolygon(goodPolygons + polygonsToAdd)
                if shapelyMultiPoly.is_valid:
                    goodPolygons = goodPolygons + polygonsToAdd
                    i = i + numToJump
                    if printDiagnostic: print "jumped to %i"%(i)
                    continueToTop = True
                
            if continueToTop: continue
                            
            #resort to one at a time
            if printDiagnostic: print "resorting to one at a time. i = %d"%(i)
            for j in range(0,numToJump):              
                thisPolygon = boundariesList[i]
                shapelyMultiPoly = MultiPolygon(goodPolygons + [thisPolygon])
                if shapelyMultiPoly.is_valid:
                    goodPolygons.append(thisPolygon)
                else:
                    if True: print "polygon %d is bad"%(i)
                    badPolygons.append(thisPolygon)
                i = i+1

    # now automatically generate a "fixed" multipolygon by unioning the bad polygons
    shapelyMultiPoly = MultiPolygon(goodPolygons)
    if len(badPolygons) > 0: 
        print "---"
        print "*** Handling bad polygons ***"
        for poly in badPolygons:
            print "Bad polygon:", poly
            islandPoly = MultiPolygon([poly])
            if islandPoly.is_valid:
                print "Fixed: The bad polygon was a valid polygon, it has been unioned to the whole."
                shapelyMultiPoly = shapelyMultiPoly.union(islandPoly)
            else:
                # try to fix this polygon
                '''
                Outer boundary problems:
                Sometimes with clipping of shoreline, there are cases of the outer boundary of the
                shoreline crossing itself.  There is not really anything we can do about such cases.
                '''
                # check to see if the outer boundary is valid
                outerBoundary,innerBoundaries = poly
                
                islandPoly = MultiPolygon([(outerBoundary,[])])
                if not islandPoly.is_valid:
                    print "Outer boundary is not valid."
                    print "Unable to fix this polygon."
                    if attributeName == "MAPLAND":
                        print "This is a minor error, it just means the oil contours will not be clipped to this part of the land." 
                    else:
                        print "This is a serious error.  Part of the %s area will be missing."%(attributeName)
                    print "---"
                    continue # we will omit this polygon
                
                print "The outer boundary of the polygon is valid."
                
                
                ##############################
                '''
                Hole problems:
                There are two kinds of problems that can occur with GNOME Analyst contours.
                
                Sometimes the holes stick slightly out of the outerboundary.
                In such a case we will rely on the fact that the holes were just areas to be removed.
                
                Sometimes there seem to be holes within holes. 
                I'm not sure why GNOME Analyst is doing that, but we will assume that 
                the holes were just areas to be removed.
                '''
                numHoles = len(innerBoundaries)
                if numHoles > 0: print "Examing the %d holes..."%(numHoles)
                assert numHoles > 0 # the only way to get to this part of the code is for a hole to be causing the problem
                numOmittedHoles = 0
                for innerBoundary in innerBoundaries:
                    hole = Polygon(innerBoundary)
                    if not hole.is_valid:
                        numOmittedHoles = numOmittedHoles + 1
                        print "Hole is not valid:", hole 
                        print "Omitting this hole. This is a minor error."
                    else:
                        # subtract this hole from the islandPolygon
                        islandPoly = islandPoly.difference(hole)
                    
                if numOmittedHoles > 0: 
                    print "Partially fixed: %d invalid holes were not subtracted from this polygon, but this polygon has been unioned to the whole."%(numOmittedHoles)
                elif numHoles > 0: 
                    print "Fixed: all holes successfully subtracted from this polygon and the polygon unioned to the whole."
                else : 
                    print "Fixed: polygon unioned to the whole."
                
                shapelyMultiPoly = shapelyMultiPoly.union(islandPoly)
                
            print "---"
        
    return shapelyMultiPoly
Beispiel #14
0
from shapely.geometry import MultiPoint, MultiLineString, MultiPolygon, box
import matplotlib.pyplot as plt

point1 = Point(2.2, 4.2)
point2 = Point(7.2, -25.1)
point3 = Point(9.26, -2.456)
point3D = Point(9.26, -2.456, 0.57)

multi_point = MultiPoint([point1, point2, point3])
multi_point2 = MultiPoint([(2.2, 4.2), (7.2, -25.1), (9.26, -2.456)])

line1 = LineString([point1, point2])
line2 = LineString([point2, point3])

multi_line = MultiLineString([line1, line2])

west_exterior = [(-180, 90), (-180, -90), (0, -90), (0, 90)]
west_hole = [[(-170, 80), (-170, -80), (-10, -80), (-10, 80)]]
west_poly = Polygon(shell=west_exterior, holes=west_hole)

min_x, min_y = 0, -90
max_x, max_y = 180, 90

east_poly_box = box(minx=min_x, miny=min_y, maxx=max_x, maxy=max_y)
multi_poly = MultiPolygon([west_poly, east_poly_box])

print("MultiPoint:", multi_point)
print("MultiLine: ", multi_line)
print("Bounding box: ", east_poly_box)
print("MultiPoly: ", multi_poly)
Beispiel #15
0
def geometrie_circonscription(geom):
    s = shape(geom)

    if not isinstance(s, MultiPolygon):
        s = MultiPolygon([s])
    return s.wkb_hex
Beispiel #16
0
if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('INPUT')
    parser.add_argument('OUTPUT', nargs='?')
    parser.add_argument('--shape', help='Shapefile')
    args = parser.parse_args()

    input_file = args.INPUT
    output_file = args.OUTPUT or '{0}.network'.format(*splitext(input_file))
    shape_file = args.shape

    start = timeit.default_timer()

    if shape_file:
        print('Load shapefile')
        shape_file = MultiPolygon([shape(pol['geometry']) for pol in fiona.open(shape_file)])

    print('Read {0}'.format(input_file))

    graph = Graph()

    items = 0
    utm_zone_number = None
    for item in iter_osm_file(input_file):
        tags = {i.key: i.value for i in item.tags}
        if isinstance(item, Way) and 'highway' in tags:
            # Nodes are created implicitly
            last = None
            for node in item.nds:
                if last:
                    graph.add_edge(last, node, type=tags['highway'])
Beispiel #17
0
    dlat = lat2 - lat1 
    a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
    c = 2 * np.arcsin(np.sqrt(a)) 

    # 6367 km is the radius of the Earth
    km = 1734 * c
    return km


    
# Fichier de base !
root = '/Users/thorey/Documents/These/Projet/FFC/Classification/'
Output = os.path.join(root,'Data/')


Maria = MultiPolygon(map(Polygon,Maria_Location_3(Output)[1:])).buffer(0)

for pix in [4,16,64]:

    print 'pix : ', pix

    data = Data(pix,Output,'').data
    df = pd.DataFrame(np.hstack(data['feat_df'].values()),columns = data['feat_df'].keys()).convert_objects(convert_numeric= True)
    filtre = df.Type == 1.0
    FFC = df[filtre]

    print 'Mask_C_FFC'
    tic = time.time()
    df['Mask_C_FFC'] = [haversine(FFC.Long.map(float)*np.pi/180.0,FFC.Lat.map(float)*np.pi/180.0,float(df.Long[i])*np.pi/180.0,float(df.Lat[i])*np.pi/180.0).min()<150
                     for i in range(len(df.Lat))]
    toc = time.time()
Beispiel #18
0
class Environment:

    def __init__(self, input_file, factor=1):
        self.plot_obstacles_polygon = []
        self.obs_list = []
        self.obs_polygon = MultiPolygon()       # Shapely object to store all polygons
        self.initial_state, self.goal_state = [], []
        self.resolution = 0                     # Dimension of the plane.
        self.read_env_from_file(input_file)

    def read_env_from_file(self, input_file):
        # Read json input
        try:
            print(input_file)
            with open(input_file, mode='r', encoding='utf-8') as a_file:
                environment = json.loads(a_file.read())
        except FileNotFoundError as fl:
            print("File not found for JSON ", fl)
            exit(1)
        except ValueError:
            print("Invalid JSON")
            exit(1)
        except Exception:
            print("Unable to process input file")
            exit(1)
        try:
            # Making sure the required entities are defined in the input json file.
            environment['resolution'] and environment['obstacles']
            environment['initial_state'] and environment['goal_state']
        except KeyError:
            print("Invalid Environment definition")
            exit(1)
        self.initial_state, self.goal_state = environment['initial_state'], environment['goal_state']
        self.resolution = environment['resolution']
        temp_polygon_list = []
        for obs in environment['obstacles']:
            if not obs.get('shape') and obs.get('property') and obs['property'].get('vertices'):
                print("Shape element not present for the obstacles")
                continue
            if obs['shape'] == 'polygon':
                # print("Polygon with vertices %s" %(np.array(obs['property']['vertices'])/100))
                polygon = mPolygon(np.array(obs['property']['vertices']))
                temp_polygon_list.append(Polygon(obs['property']['vertices']))
                self.plot_obstacles_polygon.append(polygon)
                self.obs_list.append(obs['property']['vertices'])
            else:
                print("Undefined shape")
                break
        self.obs_polygon = MultiPolygon(temp_polygon_list)

    def is_point_inside(self, xy):
        """
        :param xy: tuple with x coordinate as first element and y coordinate as second element
        :return: True if the point is inside the obstacles and False if it isn't
        """
        return Point(xy[0], xy[1]).within(self.obs_polygon)

    def is_line_inside(self, xy_start, xy_end):
        # xy_start is tuple of (x, y) coordinate of one end of the line.
        # xy_end is tuple of (x, y) coordinate of the other end of line.
        line = LineString([xy_start, xy_end])
        return self.obs_polygon.contains(line) or self.obs_polygon.touches(line) or self.obs_polygon.crosses(line)

    def draw_env(self, path, key_xy, k_value):
        # Method to draw an arrow in the environment
        # path is a list of state objects. index 0 maps from-state and index 1 maps to-state.
        fig, ax = plt.subplots()
        x_path, y_path = [], []

        for ls in path:
            x_path.append(key_xy(ls)[0])
            y_path.append(key_xy(ls)[1])

        colors = 100*np.random.rand(len(self.plot_obstacles_polygon))
        p = PatchCollection(self.plot_obstacles_polygon, cmap=matplotlib.cm.jet, alpha=0.4)
        p.set_array(np.array(colors))
        ax.add_collection(p)
        plt.colorbar(p)
        plt.plot([self.initial_state[0]], [self.initial_state[1]], 'bs', self.goal_state[0], self.goal_state[1], 'g^')
        plt.axis([0, self.resolution, 0, self.resolution])
        plt.arrow(x_path[0], y_path[0], x_path[1]-x_path[0], y_path[1]-y_path[0], fc="k", ec="k", head_width=1.55, head_length=1.1)
        plt.title("figure" + str(k_value)+".png")

        fig.savefig("figure" + str(k_value)+".png", format='png', dpi=fig.dpi)

    def animate_path(self, path, key_xy):
        fig, ax = plt.subplots()

        colors = 100*np.random.rand(len(self.plot_obstacles_polygon))
        p = PatchCollection(self.plot_obstacles_polygon, cmap=matplotlib.cm.jet, alpha=0.4)
        p.set_array(np.array(colors))
        ax.add_collection(p)
        plt.colorbar(p)

        plt.plot([self.initial_state[0]], [self.initial_state[1]], 'bs', self.goal_state[0], self.goal_state[1], 'g^')
        plt.axis([0, self.resolution, 0, self.resolution])

        x_0, y_0 = key_xy(path[0])[0], key_xy(path[0])[1]
        x_1, y_1 = key_xy(path[0 + 1])[0], key_xy(path[0 + 1])[1]
        dx, dy = x_1 - x_0, y_0 - y_1
        qv = ax.quiver(x_0, y_0, dx, dy, angles='xy',scale_units='xy',scale=1)

        def animate(i):
            x_init, y_init =key_xy(path[i])[0], key_xy(path[i])[1]
            x_f, y_f = key_xy(path[i + 1])[0], key_xy(path[i + 1])[1]
            dx, dy = x_f - x_init, y_f - y_init
            qv.set_UVC(np.array(dx), np.array(dy))
            qv.set_offsets((x_init, y_init))
            return qv

        anim = animation.FuncAnimation(fig, animate, frames=range(0, len(path)-1), interval=500)
        plt.show()

    def get_apprx_visible_vertices(self, xy_robot):
        # To get visible vertices from robot point
        # xy_robot should be a tuple of (x, y) coordinate
        if self.is_point_inside(xy_robot):
            print("Invalid robot position")
            return None
        pool = copy.deepcopy(self.obs_list)
        pool.append([self.goal_state])
        visible_vertices, visible_lines = [], []

        for obj in pool:
            for vertex in obj:
                vertex = tuple(vertex)
                if vertex == xy_robot:
                    continue
                crosses, line = self.visibility_line(xy_robot, vertex)
                if not crosses:
                    visible_lines.append(line)
        visible_vertices.extend([x.xy[0][1], x.xy[1][1]] for x in visible_lines)
        return visible_vertices

    def get_actual_visible_vertices(self, xy_robot):
        if self.is_point_inside(xy_robot):
            print("Invalid robot position")
            return None
        pool = copy.deepcopy(self.obs_list)
        pool.append([self.goal_state])
        visible_vertices, line_robot_vertices = [], {}

        def line_slope(xy1, xy2):
            return (xy2[1] - xy1[1])/(xy2[0] - xy1[0]) if (xy2[0] - xy1[0]) != 0 else sys.maxsize

        for obj in pool:
            for vertex in obj:
                crosses, line = self.visibility_line(xy_robot, vertex)
                if not crosses:
                    if line_slope(xy_robot, vertex) in line_robot_vertices:
                        if line.length < line_robot_vertices[line_slope(xy_robot, vertex)].length:
                            line_robot_vertices[line_slope(xy_robot, vertex)] = line
                    else:
                        line_robot_vertices[line_slope(xy_robot, vertex)] = line
        visible_vertices.extend([x.xy[0][1], x.xy[1][1]] for x in line_robot_vertices.values())
        return visible_vertices

    def visibility_line(self, xy_start, xy_end):
        # Helper Method to check if the line is intersected by any obstacles.
        line = LineString([xy_start, xy_end])
        return self.obs_polygon.crosses(line) or self.obs_polygon.contains(line), line

    def __str__(self):
        return "Obstacle list: %s\nInitial State: %s\nGoal State: %s\nResolution: %d\n" \
               % ([cord.xy for cord in self.plot_obstacles_polygon], self.initial_state, self.goal_state, self.resolution)
Beispiel #19
0
def main():
    parser = ArgumentParser(
        description='Used to import the osm2pgsql expire-tiles file to Postgres',
        prog='./buildout/bin/import_expire_tiles',
    )
    parser.add_argument(
        '--buffer',
        type=float,
        default=0.0,
        help='Extent buffer to the tiles [m], default is 0',
    )
    parser.add_argument(
        '--simplify',
        type=float,
        default=0.0,
        help='Simplify the result geometry [m], default is 0',
    )
    parser.add_argument(
        '--create',
        default=False,
        action="store_true",
        help='create the table if not exists',
    )
    parser.add_argument(
        '--delete',
        default=False,
        action="store_true",
        help='empty the table',
    )
    parser.add_argument(
        'file',
        metavar='FILE',
        help='The osm2pgsql expire-tiles file',
    )
    parser.add_argument(
        'connection',
        metavar='CONNECTION',
        help='The PostgreSQL connection string e.g. "user=www-data password=www-data dbname=sig host=localhost"',
    )
    parser.add_argument(
        'table',
        metavar='TABLE',
        help='The PostgreSQL table to fill',
    )
    parser.add_argument(
        '--schema',
        default='public',
        help='The PostgreSQL schema to use (should already exists), default is public',
    )
    parser.add_argument(
        'column',
        metavar='COLUMN',
        default='geom',
        nargs='?',
        help='The PostgreSQL column, default is "geom"',
    )
    parser.add_argument(
        '--srid',
        type=int,
        default=3857,
        nargs='?',
        help='The stored geometry SRID, no conversion by default (3857)',
    )
    options = parser.parse_args()

    connection = psycopg2.connect(options.connection)
    cursor = connection.cursor()

    if options.create:
        cursor.execute(
            "SELECT count(*) FROM pg_tables WHERE schemaname='%s' AND tablename='%s'" % (
                options.schema, options.table,
            )
        )
        if cursor.fetchone()[0] == 0:
            cursor.execute('CREATE TABLE IF NOT EXISTS "%s"."%s" (id serial)' % (
                options.schema, options.table,
            ))
            cursor.execute("SELECT AddGeometryColumn('%s', '%s', '%s', %s, 'MULTIPOLYGON', 2)" % (
                options.schema, options.table, options.column, options.srid,
            ))

    if options.delete:
        cursor.execute('DELETE FROM "%s"' % (options.table))

    geoms = []
    f = file(options.file, 'r')
    grid = QuadTileGrid(
        max_extent=(-20037508.34, -20037508.34, 20037508.34, 20037508.34),
    )
    for coord in f:
        extent = grid.extent(parse_tilecoord(coord), options.buffer)
        geoms.append(Polygon((
            (extent[0], extent[1]),
            (extent[0], extent[3]),
            (extent[2], extent[3]),
            (extent[2], extent[1])
        )))
    f.close()
    if len(geoms) == 0:
        print "No coords found"
        connection.commit()
        cursor.close()
        connection.close()
        exit(0)
    geom = cascaded_union(geoms)
    if geom.geom_type == 'Polygon':
        geom = MultiPolygon((geom,))

    if options.simplify > 0:
        geom.simplify(options.simplify)

    sql_geom = "ST_GeomFromText('%s', 3857)" % geom.wkt
    if options.srid <= 0:
        sql_geom = "ST_GeomFromText('%s')" % geom.wkt  # pragma: no cover
    elif options.srid != 3857:
        sql_geom = 'ST_Transform(%s, %i)' % (sql_geom, options.srid)

    cursor.execute('INSERT INTO "%s" ("%s") VALUES (%s)' % (
        options.table, options.column, sql_geom
    ))
    connection.commit()
    cursor.close()
    connection.close()
    print 'Import successful'
    # color = []
    # Selecting a default source point
    source = (90,20,math.pi/18)
    # List which stores the points generated in this process starting from the source
    pts = [source]
    # ur = velocity of right wheel, ul = velocity of left wheel set to a constant value '1' radians/second
    ur = 2
    ul = 2

    # This defines the obstacle area for Collision Detection which is done using "Shapely"
    poly = ShapelyPolygon(((5, 5), (30, 25), (38, 15), (9, 2), (5, 5)))
    poly2 = ShapelyPolygon(((40, 40), (60, 60), (50, 40), (60, 20), (40, 40)))
    poly3 = ShapelyPolygon(((38, 70), (58, 70), (58, 95), (38, 90), (38, 70)))
    poly4 = ShapelyPolygon(((5, 40), (25, 40), (25, 60), (5, 60), (5, 40)))
    poly5 = ShapelyPolygon(((70, 60), (100, 60), (100, 90), (70, 90), (70,60)))
    polygons = ShapelyMultiPolygon([poly,poly2,poly3,poly4,poly5])

    # This list is to store the points which are used in RRT generation
    lines = []
    # This list is used to store the points leading to the destination forming shortest path
    lines2 = []
    # for i in range(12):
    #     color.append((1, 0, 0, 1))

    # This iterates for a specified number of times, in each iteration performs a set of tasks to get a newpoint for building the RRT
    for i in range(10000):
        sys.stdout.write("\rCountdown: %d" % i)
        sys.stdout.flush()
        #Randomly generates a point with (x,y,theta) random values
        randompt = (random.uniform(0,100),random.uniform(0,100),random.uniform(0.0,2*math.pi))
        #Finds nearest point to the random point
Beispiel #21
0
    #raw_input()

    sleep(0.05)
    print "Filtering datapoints..."
    # set up a map dataframe
    df_map = pd.DataFrame({
        'poly': [Polygon(hood_points) for hood_points in m.city],
        'name': [hood[sector_name] for hood in m.city_info],
    })

    def get_area(poly):
        return poly.area

    df_map['area'] = df_map['poly'].apply(get_area, args=())

    hood_polygons = prep(MultiPolygon(list(df_map['poly'].values)))

    if data_info_type == 0:
        # Convert our latitude and longitude into Basemap cartesian map coordinates
        mapped_points = [
            Point(m(mapped_x, mapped_y)) for mapped_x, mapped_y in data
        ]
        all_points = MultiPoint(mapped_points)
        # Use prep to optimize polygons for faster computation
        # Filter out the points that do not fall within the map we're making
        city_points = filter(hood_polygons.contains, all_points)

    #print len(city_points)

    sleep(0.05)
    print "Analyzing datapoints..."
Beispiel #22
0
def ReadMossPolygons(mossBaseFileName, printDiagnostic = False):
    '''
    .ms1 file is fixed format
    
    Header lines 56 characters
    char 1-5 : Item Number(NEGATIVE IF THE COORDINATES ARE LON/LAT)
    char 16-45: Attribute Name
    char 51-55: Number of coord. pairs
    
    X,Y, Coordinate pairs 23 characters
    01-11: x coordinate
    12-22: y coordinate
    
    Long,Lat pairs
    char 01-10: LONGITUDE
    char 11-20: LATITUDE
    char 21-22: FLAG 
        0-NORMAL 
        1-INDICATES FIRST POINT OF ISLAND POLYGON
    
    '''
    # import shapely here so it won't be imported if not needed
    from shapely.geometry import Polygon, MultiPolygon

    # what about MAPBOUND, EXTENDEDOUTLOOKTHREAT
    attributesToRead = ["MAPLAND","FORECASTHEAVY","FORECASTMEDIUM","FORECASTLIGHT","FORECASTUNCERTAINTY"]
    
    landBoundariesList = [] 
    heavyBoundariesList = [] 
    mediumBoundariesList = [] 
    lightBoundariesList = []
    uncertaintyBoundariesList = []
     
    extension = ".ms1"
    if os.path.exists(mossBaseFileName + extension):
        inFile = file(mossBaseFileName + extension, 'rU')
        alreadyReadNextLine = False
        while True:  # read the header lines     
            if not alreadyReadNextLine:
                line = inFile.readline()
            alreadyReadNextLine = False
            
            if not line: break # end of this file
            if line.strip() == "" : continue; # blank line
            
            itemNum = int(line[0:5])
            assert itemNum < 0 # we expect long/lat values, so the itemNum should be negative
            
            attributeName = line[15:45].strip()

            numCoordinates = int(line[50:55])
            
            
            # note: for some versions of GNOME analyst
            # the number of coordinates in MAPLAND overflows and is reported as a negative number or incorrect number.
            # To support those files, we will ignore the number and just read lines based on the length of the lines
              
            if printDiagnostic: 
                print itemNum,attributeName,numCoordinates
            
            readingOuterBoundary = True
            coordList = []
            
            outerBoundary = None
            innerBoundaries = []
             
            if numCoordinates <= 0:
                print "*** ignoring bad %s header line value: numCoordinates: %d ***"%(attributeName,numCoordinates)
            
            # read the points for the polygon for this header           
            while True: #for i in range(0,numCoordinates):
                # since we don't want to rely on numCoordinates
                # we need to look to see if this is the end of this block of coordinates
                ##################
                line = inFile.readline()
                
                # The lines are fixed format,
                # read until we find a header line
                # header lines lines are longer than coordinate lines 
                if (not line) or len(line.strip()) > 50 : # must be end of file or a header line
                    alreadyReadNextLine = True 
                    break # out of this while loop
                
                if attributeName in attributesToRead:
                    #process this line
                    longitudeStr = line[0:10].strip()
                    latitudeStr = line[10:20].strip()
                    flag = line[20:22].strip()
                    
                    if flag == "1" :
                        #then we are starting a new "inner hole"
                        
                        # enforce having the last point of the polygon equal the first point
                        if len(coordList) > 0 and coordList[0] != coordList[-1]:
                            coordList.append(coordList[0])

                        # save the previous coordList
                        if len(coordList) >= 4: # less than 4 would be a degenerate case                          
                            if readingOuterBoundary:
                                outerBoundary = coordList
                            else:
                                innerBoundaries.append(coordList)
                                
                        # reset the coordinate list 
                        coordList = []                                         
                        readingOuterBoundary = False
                    
                    coordList.append((float(longitudeStr),float(latitudeStr)))
                ############# end of while loop
                
            # finished reading the header
            # record the lists we have filled in
            
            if not attributeName in attributesToRead:
                continue # on to the next header line 
            
            # enforce having the last point of the polygon equal the first point
            if len(coordList) > 0 and coordList[0] != coordList[-1]:
                coordList.append(coordList[0])
                
            #filter out degenerate cases
            if len(coordList) < 4: # less than 4 would be a degenerate case
                print "*** ignoring degenerate polygon ***"
                continue # on to the next header line 
                
            # save the coordinate list                             
            if readingOuterBoundary:
                outerBoundary = coordList
            else:
                innerBoundaries.append(coordList)
                
            
            #save thisPolygon
            if len (outerBoundary) > 0 :
                # outerBoundary,innerBoundaries = VerifyAndFixGnomeAnalystPolygon(attributeName,outerBoundary,innerBoundaries)
                if len (outerBoundary) > 0 :
                    thisPolygon = (outerBoundary,innerBoundaries)
                    if attributeName == "MAPLAND": landBoundariesList.append(thisPolygon)
                    elif attributeName == "FORECASTHEAVY": heavyBoundariesList.append(thisPolygon)
                    elif attributeName == "FORECASTMEDIUM": mediumBoundariesList.append(thisPolygon)
                    elif attributeName == "FORECASTLIGHT": lightBoundariesList.append(thisPolygon)
                    elif attributeName == "FORECASTUNCERTAINTY": uncertaintyBoundariesList.append(thisPolygon) 
                
                
        inFile.close()
        
    # convert the lists of MossPolygons to shapely MultiPolygons
    if len(landBoundariesList) == 0: landPolygons = None
    else: 
        landPolygons = MultiPolygon(landBoundariesList)
        if not landPolygons.is_valid:
            # try analysing and fixing the problem
            landPolygons = DiagnoseAndFixMultiPolygon("MAPLAND",landBoundariesList)
    
    if len(heavyBoundariesList) == 0: heavyPolygons = None
    else: 
        heavyPolygons = MultiPolygon(heavyBoundariesList)
        if not heavyPolygons.is_valid:
            heavyPolygons =DiagnoseAndFixMultiPolygon("FORECASTHEAVY",heavyBoundariesList)
    
    if len(mediumBoundariesList) == 0: mediumPolygons = None
    else: 
        mediumPolygons = MultiPolygon(mediumBoundariesList)
        if not mediumPolygons.is_valid:
            mediumPolygons = DiagnoseAndFixMultiPolygon("FORECASTMEDIUM",mediumBoundariesList)
    
    if len(lightBoundariesList) == 0: lightPolygons = None
    else: 
        lightPolygons = MultiPolygon(lightBoundariesList)
        if not lightPolygons.is_valid:
            lightPolygons = DiagnoseAndFixMultiPolygon("FORECASTLIGHT",lightBoundariesList)
                              
    if len(uncertaintyBoundariesList) == 0: uncertaintyPolygons = None
    else: 
        uncertaintyPolygons = MultiPolygon(uncertaintyBoundariesList)
        if not uncertaintyPolygons.is_valid:
            uncertaintyPolygons = DiagnoseAndFixMultiPolygon("FORECASTUNCERTAINTY",uncertaintyBoundariesList)
        
   
    # clip the oil contours to the shoreline
    # note: we need to check that the polygons are valid before trying to clip to prevent shapely from crashing
    if landPolygons != None :
        if landPolygons.is_valid == False:
            print "*** landPolygons is not valid. We will not clip to the shoreline. ***"
        else :
            if heavyPolygons != None: 
                if heavyPolygons.is_valid == False:
                    print "*** heavyPolygons is not valid. It will not be clipped to the shoreline. ***"
                elif heavyPolygons.intersects(landPolygons):
                    print "clipping heavyPolygons to shoreline"
                    heavyPolygons = heavyPolygons.difference(landPolygons)
                    
            if mediumPolygons != None: 
                if mediumPolygons.is_valid == False:
                    print "*** mediumPolygons is not valid. It will not be clipped to the shoreline. ***"
                elif mediumPolygons.intersects(landPolygons):
                    print "clipping mediumPolygons to shoreline"
                    mediumPolygons = mediumPolygons.difference(landPolygons)
                    
            if lightPolygons != None: 
                if lightPolygons.is_valid == False:
                    print "*** lightPolygons is not valid. It will not be clipped to the shoreline. ***"
                elif lightPolygons.intersects(landPolygons):
                    print "clipping lightPolygons to shoreline"
                    lightPolygons = lightPolygons.difference(landPolygons)
                    
            # note: JerryM wonders we should clip the uncertainty to the shoreline.  That it looks better as simple polygons going over the land.
            if uncertaintyPolygons != None: 
                if uncertaintyPolygons.is_valid == False:
                    print "*** uncertaintyPolygons is not valid. It will not be clipped to the shoreline or oil polygons ***"
                else:
                    print "clipping uncertaintyPolygons to shoreline and oil polygons"
                    
                    for polygons,nameOfPolygons in [(lightPolygons,"lightPolygons"),(mediumPolygons,"mediumPolygons"),(heavyPolygons,"heavyPolygons"),(landPolygons,"landPolygons")]:
                        if polygons != None: 
                            print "taking difference with",nameOfPolygons
                            newUncertaintyPolygons = uncertaintyPolygons.difference(polygons)
                            print "finished taking difference"
                            if not newUncertaintyPolygons.is_valid:
                                #print "Uncertainty Polygon is no longer valid after taking difference with",polygons,nameOfPolygons
                                s = "*** uncertaintyPolygons have not been clipped to %s ***"%(nameOfPolygons)
                                print s
                            else:
                                uncertaintyPolygons = newUncertaintyPolygons
                                
                    
   
           
    return   (landPolygons,heavyPolygons,mediumPolygons,lightPolygons,uncertaintyPolygons)
Beispiel #23
0
def parse_osm_relations(relations, osm_way_df):
    """
    Parses the osm relations (multipolygons) from osm 
    ways and nodes. See more information about relations 
    from OSM documentation: http://wiki.openstreetmap.org/wiki/Relation 
         
    Parameters
    ----------
    relations : list
        OSM 'relation' items (dictionaries) in a list. 
    osm_way_df : gpd.GeoDataFrame
        OSM 'way' features as a GeoDataFrame that contains all the 
        'way' features that will constitute the multipolygon relations.
     
    Returns
    -------
    gpd.GeoDataFrame
        A GeoDataFrame with MultiPolygon representations of the 
        relations and the attributes associated with them.   
    """

    gdf_relations = gpd.GeoDataFrame()

    # Iterate over relations and extract the items
    for relation in relations:
        if relation['tags']['type'] == 'multipolygon':
            try:
                # Parse member 'way' ids
                member_way_ids = [
                    member['ref'] for member in relation['members']
                    if member['type'] == 'way'
                ]
                # Extract the ways
                member_ways = osm_way_df.reindex(member_way_ids)
                # Extract the nodes of those ways
                member_nodes = list(member_ways['nodes'].values)
                try:
                    # Create MultiPolygon from geometries (exclude NaNs)
                    multipoly = MultiPolygon(list(member_ways['geometry']))
                except Exception:
                    multipoly = invalid_multipoly_handler(
                        gdf=member_ways,
                        relation=relation,
                        way_ids=member_way_ids)

                if multipoly:
                    # Create GeoDataFrame with the tags and the MultiPolygon and its 'ways' (ids), and the 'nodes' of those ways
                    geo = gpd.GeoDataFrame(relation['tags'],
                                           index=[relation['id']])
                    # Initialize columns (needed for .loc inserts)
                    geo = geo.assign(geometry=None,
                                     ways=None,
                                     nodes=None,
                                     element_type=None,
                                     osmid=None)
                    # Add attributes
                    geo.loc[relation['id'], 'geometry'] = multipoly
                    geo.loc[relation['id'], 'ways'] = member_way_ids
                    geo.loc[relation['id'], 'nodes'] = member_nodes
                    geo.loc[relation['id'], 'element_type'] = 'relation'
                    geo.loc[relation['id'], 'osmid'] = relation['id']

                    # Append to relation GeoDataFrame
                    gdf_relations = gdf_relations.append(geo, sort=False)
                    # Remove such 'ways' from 'osm_way_df' that are part of the 'relation'
                    osm_way_df = osm_way_df.drop(member_way_ids)
            except Exception:
                log("Could not handle OSM 'relation': {}".format(
                    relation['id']))

    # Merge 'osm_way_df' and the 'gdf_relations'
    osm_way_df = osm_way_df.append(gdf_relations, sort=False)
    return osm_way_df
Beispiel #24
0
def random_multipolygon(size):
    polygons = []
    for i in range(size):
        polygons.append(random_polygon(i))
    result = MultiPolygon(polygons)
    return result
    def _create_annotations(self):
        # Creates annotations for each isolated mask

        # Each image may have multiple annotations, so create an array
        self.annotations = []
        for key, mask in self.isolated_masks.items():
            annotation = dict()
            annotation['segmentation'] = []
            annotation['iscrowd'] = 0
            annotation['image_id'] = self.image_id
            if not self.category_ids.get(key):
                print(
                    f'category color not found: {key}; check for missing category or antialiasing'
                )
                continue
            annotation['category_id'] = self.category_ids[key]
            annotation['id'] = self._next_annotation_id()

            # Find contours in the isolated mask
            contours = measure.find_contours(mask,
                                             0.5,
                                             positive_orientation='low')

            polygons = []
            for contour in contours:
                # Flip from (row, col) representation to (x, y)
                # and subtract the padding pixel
                for i in range(len(contour)):
                    row, col = contour[i]
                    contour[i] = (col - 1, row - 1)

                # Make a polygon and simplify it
                poly = Polygon(contour)
                poly = poly.simplify(1.0, preserve_topology=False)

                if (poly.area > 16):  # Ignore tiny polygons
                    if (poly.geom_type == 'MultiPolygon'):
                        # if MultiPolygon, take the smallest convex Polygon containing all the points in the object
                        poly = poly.convex_hull

                    if (
                            poly.geom_type == 'Polygon'
                    ):  # Ignore if still not a Polygon (could be a line or point)
                        polygons.append(poly)
                        segmentation = np.array(
                            poly.exterior.coords).ravel().tolist()
                        annotation['segmentation'].append(segmentation)

            if len(polygons) == 0:
                # This item doesn't have any visible polygons, ignore it
                # (This can happen if a randomly placed foreground is covered up
                #  by other foregrounds)
                continue

            # Combine the polygons to calculate the bounding box and area
            multi_poly = MultiPolygon(polygons)
            x, y, max_x, max_y = multi_poly.bounds
            self.width = max_x - x
            self.height = max_y - y
            annotation['bbox'] = (x, y, self.width, self.height)
            annotation['area'] = multi_poly.area

            # Finally, add this annotation to the list
            self.annotations.append(annotation)
Beispiel #26
0
#print 'Lines with points: ', n_line
#for i in range(n_line):
#    print i+1, ' th lines with points : ', linepoints[i]
#print '\n'

# ==================================================
# Make mutilinestring from line list
# ==================================================
multilines = MultiLineString(linepoints)

x = multilines.intersection(multilines)

# Polygonize
result, dangles, cuts, invalids = polygonize_full(x)

result = MultiPolygon(result)
polygon = cascaded_union(result)

# Make mutilinestring from line list
#multilines = MultiLineString(linepoints)

# Polygonize
#result, dangles, cuts, invalids = polygonize_full(multilines)

#result = MultiPolygon(result)
#polygon = cascaded_union(result)

##################################################
multilines = polygon.boundary.union(result.boundary)

# Polygonize
Beispiel #27
0
class OSM_Boundary(object):
    def __init__(self, relation_element, way_elements, node_elements):
        self.polygons = []
        self.ways = {}
        self.relation = relation_element
        self.open = True
        reporting = Reporting()
        self.id = relation_element.attrib["id"]
        self.version = relation_element.attrib["version"]
        self.changeset = relation_element.attrib["changeset"]
        self.tags = {}

        nodes = {}
        for element in node_elements:
            node = OSM_Node(element)
            nodes[node.id] = node

        ways = {}
        for element in way_elements:
            way = OSM_Way(element)
            way.add_nodes(nodes)
            ways[way.id] = way

        for sub in relation_element:
            if sub.tag == 'tag':
                key = sub.attrib["k"]
                value = sub.attrib["v"]
                self.tags[key] = value
            elif sub.tag == 'member' and sub.attrib["type"] == 'way':
                role = sub.attrib.get("role", "")
                if role:
                    raise NotImplementedError, "Role %r for relation way members not supported" % (role,)
                way_id = sub.attrib["ref"]
                way = ways.get(way_id)
                if way:
                    if not way.complete:
                        raise ValueError, "Incomplete way: %r" % (way,)
                    self.ways[way_id] = way
                else:
                    raise ValueError, "Way not found: %r" % (way_id,)

        self.name = self.tags.get("name")
        if not self.ways:
            raise ValueError, "No ways"
        
        self.open = False
        ways_left = self.ways.values()
        while ways_left:
            segment_start = ways_left.pop(0)
            polygon = []
            for node in segment_start.nodes:
                polygon.append((node.lat, node.lon))
            last_end = segment_start.end_node
            while ways_left:
                if last_end is segment_start.start_node:
                    # cycle ended
                    break
                next = None
                for way in ways_left:
                    if way.start_node is last_end:
                        last_end = way.end_node
                        for node in way.nodes[1:]:
                            polygon.append((node.lat, node.lon))
                        next = way
                        break
                    elif way.end_node is last_end:
                        last_end = way.start_node
                        rnodes = list(way.nodes[1:])
                        rnodes.reverse()
                        for node in rnodes:
                            polygon.append((node.lat, node.lon))
                        next = way
                        break
                if next:
                    ways_left.remove(next)
                else:
                    # open segment ends
                    self.open = True
                    break
            self.polygons.append(polygon)
        if HAVE_SHAPELY:
            reporting.output_msg("info", 
                    "Using Shapely for 'point in polygon' checks")
            self.multi_polygon = MultiPolygon([(p, ()) for p in self.polygons])
            self. _contains_impl = self._contains_shapely_impl
        else:
            reporting.output_msg("info", 
                "Using Python function for the 'point in polygon' checks")
            self. _contains_impl = self._contains_python_impl

    def __repr__(self):
        if self.open:
            open_s = "open"
        else:
            open_s = "closed"
        return "<OSM_Boundary #%s %r %s %i ways %i polygons>" % (self.id, 
                self.name, open_s, len(self.ways), len(self.polygons))

    def _contains_python_impl(self, location):
        lat = location.lat
        lon = location.lon
        for pol in self.polygons:
            contains = False
            plen = len(pol)
            i = 0
            j = plen - 1
            while i < plen:
                if ( ((pol[i][0] > lat) != (pol[j][0] > lat)) and 
                        (lon < ((pol[j][1] - pol[i][1]) * (lat - pol[i][0]) / (pol[j][0] - pol[i][0]) + pol[i][1])) ):
                    contains = not contains
                j = i
                i += 1
            if contains:
                return True
        return False
    
    def _contains_shapely_impl(self, location):
        point = Point(location.lat, location.lon)
        return self.multi_polygon.contains(point)
   
    def __contains__(self, location):
        if self.open:
            return True
        return self._contains_impl(location)
Beispiel #28
0
    if len(unassigned) == len(holes):
        # give up
        break
    holes = unassigned
print >> sys.stderr, "%d retried, %d unassigned." % (retries, len(unassigned))

print >> sys.stderr, "Buffering polygons."
for place_id, polygon in polygons.items():
    if type(polygon) is Polygon:
        polygon = Polygon(polygon.exterior.coords)
    else:
        bits = []
        for p in polygon.geoms:
            if type(p) is Polygon:
                bits.append(Polygon(p.exterior.coords))
        polygon = MultiPolygon(bits)
    polygons[place_id] = polygon.buffer(0)

print >> sys.stderr, "Writing output."
features = []
for place_id, poly in polygons.items():
    features.append({
        "type": "Feature",
        "id": place_id,
        "geometry": poly.__geo_interface__,
        "properties": {
            "woe_id": place_id,
            "name": names.get(place_id, "")
        }
    })
def main():
    global ERASE_SHP
    if not str(ERASE_SHP).endswith('.shp'):
        arcpy.AddMessage("Converting Erase feature class to shapefile...")
        NEW_ERASE_SHP = os.path.join("in_memory", OUT_SHP + "_1")
        arcpy.CopyFeatures_management(ERASE_SHP, NEW_ERASE_SHP)
        ERASE_SHP = NEW_ERASE_SHP
    
    # fix veg polys... there is likely bad geometry (self intersecting rings, overlaping polys, etc.)
    arcpy.AddMessage("Repairing potential invalid geometry with Erase polys...")
    
    ERASE_SHP_UNION = os.path.join("in_memory", OUT_SHP + "_union")
    arcpy.Union_analysis(ERASE_SHP, ERASE_SHP_UNION, "ALL", "1 FEET", "GAPS")
    arcpy.DeleteIdentical_management(ERASE_SHP_UNION, "Shape")
    
#    ERASE_SHP_UNION_DISS = os.path.join(OUT_FOLDER, OUT_SHP + '_union_diss_repair.shp')
    ERASE_SHP_UNION_DISS = os.path.join(OUT_FOLDER, OUT_SHP + '_union_diss_repair.shp')
    arcpy.Dissolve_management(ERASE_SHP_UNION, ERASE_SHP_UNION_DISS, "", "", "SINGLE_PART")
    arcpy.RepairGeometry_management(ERASE_SHP_UNION_DISS, 'DELETE_NULL')
    
    ERASE_SHP = ERASE_SHP_UNION_DISS
    arcpy.AddMessage("Created:\n" + str(ERASE_SHP))
    
    # ESRI -> PYSHP
    arcpy.AddMessage("Reading shapefiles...")
    shpA = shapefile.Reader(POLYS_SHP)
    shpB = shapefile.Reader(ERASE_SHP)
    
    # PYSHP -> SHAPELY
    arcpy.AddMessage("Converting IVM Polygons...")
    shpA_polys = ConvertPolys(shpA)    
    shpA_multipolys = MultiPolygon(shpA_polys)
    
    arcpy.AddMessage("Converting Erase Polygons...")
    shpB_polys = ConvertPolys(shpB)
    shpB_multipolys = MultiPolygon(shpB_polys)
    
    # SHAPELY
    arcpy.AddMessage("Performing Erase...")
    arcpy.AddMessage(time.strftime("%H:%M"))
    
    try:
        shpC = shpA_multipolys.difference(shpB_multipolys) # SHAPELY [(x,y),(x,y),...]
    except Exception as error:
        message = error.message
        args = error.args
        raise error
    
    arcpy.AddMessage(time.strftime("%H:%M"))
    
    # SHAPELY -> PYSHP
    FINAL_SHP = os.path.join(OUT_FOLDER, OUT_SHP + ".shp")
    
    arcpy.AddMessage("Saving: " + os.path.basename(FINAL_SHP))
    
    w = shapefile.Writer(shapefile.POLYGON)
    w.field('ID')   
    
    for i, geom in enumerate(list(shpC.geoms)):
        shpC_exterior = []
        shpC_pyshp_fmt = []
        # get exterior rings
        for coord in geom.exterior.coords:
            x_y = [coord[0], coord[1]] # PYSHP [[[x,y],[x,y],...]]
            shpC_exterior.append(x_y)
        shpC_pyshp_fmt.append(shpC_exterior)
        # get interior rings
        if len(list(geom.interiors)) > 0:
            for i, ring in enumerate(list(geom.interiors)):
                shpC_interior = []
                for coord in list(ring.coords):
                    x_y = [coord[0], coord[1]]
                    shpC_interior.append(x_y)
                ##check sign, counter clockwise point order creates hole, else overlapping poly
                #if shapefile.signed_area(list(ring.coords)) >= 0:
                    #shpC_interior.reverse()
                shpC_pyshp_fmt.append(shpC_interior)
        
        w.poly(shpC_pyshp_fmt)
        w.record(ID='0')
    
    w.save(FINAL_SHP)

    arcpy.AddMessage("Done!")
# create storage list for our new shapely objects
fairways_multiply = []
green_multply = []

# create shapely geometry objects for fairways
for feature in fairways_data['features']:
    shape = asShape(feature['geometry'])
    fairways_multiply.append(shape)

# create shapely geometry objects for greens
for green in greens_data['features']:
    green_shape = asShape(green['geometry'])
    green_multply.append(green_shape)

# create shapely MultiPolygon objects for input analysis
fairway_plys = MultiPolygon(fairways_multiply)
greens_plys = MultiPolygon(green_multply)

# run the symmetric difference function creating a new Multipolygon
result = fairway_plys.symmetric_difference(greens_plys)

# write the results out to well known text (wkt) with shapely dump
def write_wkt(filepath, features):
    with open(filepath, "w") as f:
        # create a js variable called ply_data used in html
        # Shapely dumps geometry out to WKT
        f.write("var ply_data = '" + dumps(features) + "'")

# write to our output js file the new polygon as wkt
write_wkt(output_wkt_sym_diff, result)
Beispiel #31
0
from shapely.geometry import Point, MultiPolygon, Polygon
import numpy as np

point = Point(-38.561737, -3.736494)
poligon = Polygon()
import ipdb
ipdb.set_trace()
multi = MultiPolygon([[]], [])
Beispiel #32
0
    def custom_render(self, level_render_data, access_permissions, full_levels):
        if full_levels:
            levels = get_full_levels(level_render_data)
        else:
            levels = get_main_levels(level_render_data)

        buildings = None
        areas = None

        main_building_block = None
        main_building_block_diff = None

        current_upper_bound = None
        for geoms in levels:
            # hide indoor and outdoor rooms if their access restriction was not unlocked
            restricted_spaces_indoors = unary_union(
                tuple(area.geom for access_restriction, area in geoms.restricted_spaces_indoors.items()
                      if access_restriction not in access_permissions)
            )
            restricted_spaces_outdoors = unary_union(
                tuple(area.geom for access_restriction, area in geoms.restricted_spaces_outdoors.items()
                      if access_restriction not in access_permissions)
            )
            restricted_spaces = unary_union((restricted_spaces_indoors, restricted_spaces_outdoors))  # noqa

            # crop altitudeareas
            for altitudearea in geoms.altitudeareas:
                altitudearea.geometry = altitudearea.geometry.geom.difference(restricted_spaces)
                altitudearea.geometry_prep = prepared.prep(altitudearea.geometry)

            # crop heightareas
            new_heightareas = []
            for geometry, height in geoms.heightareas:
                geometry = geometry.geom.difference(restricted_spaces)
                geometry_prep = prepared.prep(geometry)
                new_heightareas.append((geometry, geometry_prep, height))
            geoms.heightareas = new_heightareas

            if geoms.on_top_of_id is None:
                buildings = geoms.buildings
                areas = MultiPolygon()
                current_upper_bound = geoms.upper_bound

                holes = geoms.holes.difference(restricted_spaces)
                buildings = buildings.difference(holes)
                areas = areas.union(holes.buffer(0).buffer(0.01, join_style=JOIN_STYLE.mitre))

                main_building_block = OpenScadBlock('union()', comment='Level %s' % geoms.short_label)
                self.root.append(main_building_block)
                main_building_block_diff = OpenScadBlock('difference()')
                main_building_block.append(main_building_block_diff)
                main_building_block_inner = OpenScadBlock('union()')
                main_building_block_diff.append(main_building_block_inner)
                main_building_block_inner.append(
                    self._add_polygon(None, buildings.intersection(self.bbox), geoms.lower_bound, geoms.upper_bound)
                )

            for altitudearea in sorted(geoms.altitudeareas, key=attrgetter('altitude')):
                if not altitudearea.geometry.intersects(self.bbox):
                    continue

                if altitudearea.altitude2 is not None:
                    name = 'Altitudearea %s-%s' % (altitudearea.altitude/1000, altitudearea.altitude2/1000)
                else:
                    name = 'Altitudearea %s' % (altitudearea.altitude / 1000)

                # why all this buffering?
                # buffer(0) ensures a valid geometry, this is sadly needed sometimes
                # the rest of the buffering is meant to make polygons overlap a little so no glitches appear
                # the intersections below will ensure that they they only overlap with each other and don't eat walls
                geometry = altitudearea.geometry.buffer(0)
                inside_geometry = geometry.intersection(buildings).buffer(0).buffer(0.01, join_style=JOIN_STYLE.mitre)
                outside_geometry = geometry.difference(buildings).buffer(0).buffer(0.01, join_style=JOIN_STYLE.mitre)
                geometry_buffered = geometry.buffer(0.01, join_style=JOIN_STYLE.mitre)
                if geoms.on_top_of_id is None:
                    areas = areas.union(geometry)
                    buildings = buildings.difference(geometry).buffer(0)
                    inside_geometry = inside_geometry.intersection(areas).buffer(0)
                    outside_geometry = outside_geometry.intersection(areas).buffer(0)
                    geometry_buffered = geometry_buffered.intersection(areas).buffer(0)
                outside_geometry = outside_geometry.intersection(self.bbox)

                if not inside_geometry.is_empty:
                    if altitudearea.altitude2 is not None:
                        min_slope_altitude = min(altitudearea.altitude, altitudearea.altitude2)
                        max_slope_altitude = max(altitudearea.altitude, altitudearea.altitude2)
                        bounds = inside_geometry.bounds

                        # cut in
                        polygon = self._add_polygon(None, inside_geometry,
                                                    min_slope_altitude-10, current_upper_bound+1000)

                        slope = self._add_slope(bounds, altitudearea.altitude, altitudearea.altitude2,
                                                altitudearea.point1, altitudearea.point2, bottom=True)
                        main_building_block_diff.append(
                            OpenScadBlock('difference()', children=[polygon, slope], comment=name+' inside cut')
                        )

                        # actual thingy
                        if max_slope_altitude > current_upper_bound and inside_geometry.intersects(self.bbox):
                            polygon = self._add_polygon(None, inside_geometry.intersection(self.bbox),
                                                        current_upper_bound-10, max_slope_altitude+10)
                            slope = self._add_slope(bounds, altitudearea.altitude, altitudearea.altitude2,
                                                    altitudearea.point1, altitudearea.point2, bottom=False)
                            main_building_block.append(
                                OpenScadBlock('difference()',
                                              children=[polygon, slope], comment=name + 'outside')
                            )
                    else:
                        if altitudearea.altitude < current_upper_bound:
                            main_building_block_diff.append(
                                self._add_polygon(name+' inside cut', inside_geometry,
                                                  altitudearea.altitude, current_upper_bound+1000)
                            )
                        else:
                            main_building_block.append(
                                self._add_polygon(name+' inside', inside_geometry.intersection(self.bbox),
                                                  min(altitudearea.altitude-700, current_upper_bound-10),
                                                  altitudearea.altitude)
                            )

                if not outside_geometry.is_empty:
                    if altitudearea.altitude2 is not None:
                        min_slope_altitude = min(altitudearea.altitude, altitudearea.altitude2)
                        max_slope_altitude = max(altitudearea.altitude, altitudearea.altitude2)
                        bounds = outside_geometry.bounds

                        polygon = self._add_polygon(None, outside_geometry,
                                                    min_slope_altitude-710, max_slope_altitude+10)
                        slope1 = self._add_slope(bounds, altitudearea.altitude, altitudearea.altitude2,
                                                 altitudearea.point1, altitudearea.point2, bottom=False)
                        slope2 = self._add_slope(bounds, altitudearea.altitude-700, altitudearea.altitude2-700,
                                                 altitudearea.point1, altitudearea.point2, bottom=True)
                        union = OpenScadBlock('union()', children=[slope1, slope2], comment=name+'outside')
                        main_building_block.append(
                            OpenScadBlock('difference()',
                                          children=[polygon, union], comment=name+'outside')
                        )
                    else:
                        if geoms.on_top_of_id is None:
                            lower = geoms.lower_bound
                        else:
                            lower = altitudearea.altitude-700
                            if lower == current_upper_bound:
                                lower -= 10
                        main_building_block.append(
                            self._add_polygon(name+' outside', outside_geometry, lower, altitudearea.altitude)
                        )

                # obstacles
                if altitudearea.altitude2 is not None:
                    obstacles_diff_block = OpenScadBlock('difference()', comment=name + ' obstacles')
                    had_obstacles = False

                    obstacles_block = OpenScadBlock('union()')
                    obstacles_diff_block.append(obstacles_block)

                    min_slope_altitude = min(altitudearea.altitude, altitudearea.altitude2)
                    max_slope_altitude = max(altitudearea.altitude, altitudearea.altitude2)
                    bounds = geometry.bounds

                    for height, obstacles in altitudearea.obstacles.items():
                        height_diff = OpenScadBlock('difference()')
                        had_height_obstacles = None

                        height_union = OpenScadBlock('union()')
                        height_diff.append(height_union)

                        for obstacle in obstacles:
                            if not obstacle.geom.intersects(self.bbox):
                                continue
                            obstacle = obstacle.geom.buffer(0).buffer(0.01, join_style=JOIN_STYLE.mitre)
                            if self.min_width:
                                obstacle = obstacle.union(self._satisfy_min_width(obstacle)).buffer(0)
                            obstacle = obstacle.intersection(geometry_buffered)
                            if not obstacle.is_empty:
                                had_height_obstacles = True
                                had_obstacles = True
                            height_union.append(
                                self._add_polygon(None, obstacle.intersection(self.bbox),
                                                  min_slope_altitude-20, max_slope_altitude+height+10)
                            )

                        if had_height_obstacles:
                            obstacles_block.append(height_diff)
                            height_diff.append(
                                self._add_slope(bounds, altitudearea.altitude+height, altitudearea.altitude2+height,
                                                altitudearea.point1, altitudearea.point2, bottom=False)
                            )

                    if had_obstacles:
                        main_building_block.append(obstacles_diff_block)
                        obstacles_diff_block.append(
                            self._add_slope(bounds, altitudearea.altitude-10, altitudearea.altitude2-10,
                                            altitudearea.point1, altitudearea.point2, bottom=True)
                        )
                else:
                    obstacles_block = OpenScadBlock('union()', comment=name + ' obstacles')
                    had_obstacles = False
                    for height, obstacles in altitudearea.obstacles.items():
                        for obstacle in obstacles:
                            if not obstacle.geom.intersects(self.bbox):
                                continue
                            obstacle = obstacle.geom.buffer(0).buffer(0.01, join_style=JOIN_STYLE.mitre)
                            if self.min_width:
                                obstacle = obstacle.union(self._satisfy_min_width(obstacle)).buffer(0)
                            obstacle = obstacle.intersection(geometry_buffered).intersection(self.bbox)
                            if not obstacle.is_empty:
                                had_obstacles = True
                            obstacles_block.append(
                                self._add_polygon(None, obstacle,
                                                  altitudearea.altitude-10, altitudearea.altitude+height)
                            )

                    if had_obstacles:
                        main_building_block.append(obstacles_block)

            if self.min_width and geoms.on_top_of_id is None:
                main_building_block_inner.append(
                    self._add_polygon('min width',
                                      self._satisfy_min_width(buildings).intersection(self.bbox).buffer(0),
                                      geoms.lower_bound, geoms.upper_bound)
                )
Beispiel #33
0
def main() -> None:
    """Import the osm2pgsql expire-tiles file to Postgres."""
    try:
        parser = ArgumentParser(
            description=
            "Used to import the osm2pgsql expire-tiles file to Postgres",
            prog=sys.argv[0])
        parser.add_argument(
            "--buffer",
            type=float,
            default=0.0,
            help="Extent buffer to the tiles [m], default is 0",
        )
        parser.add_argument(
            "--simplify",
            type=float,
            default=0.0,
            help="Simplify the result geometry [m], default is 0",
        )
        parser.add_argument(
            "--create",
            default=False,
            action="store_true",
            help="create the table if not exists",
        )
        parser.add_argument(
            "--delete",
            default=False,
            action="store_true",
            help="empty the table",
        )
        parser.add_argument(
            "file",
            metavar="FILE",
            help="The osm2pgsql expire-tiles file",
        )
        parser.add_argument(
            "connection",
            metavar="CONNECTION",
            help=(
                "The PostgreSQL connection string e.g. "
                '"user=www-data password=www-data dbname=sig host=localhost"'),
        )
        parser.add_argument(
            "table",
            metavar="TABLE",
            help="The PostgreSQL table to fill",
        )
        parser.add_argument(
            "--schema",
            default="public",
            help=
            "The PostgreSQL schema to use (should already exists), default is public",
        )
        parser.add_argument(
            "column",
            metavar="COLUMN",
            default="geom",
            nargs="?",
            help='The PostgreSQL column, default is "geom"',
        )
        parser.add_argument(
            "--srid",
            type=int,
            default=3857,
            nargs="?",
            help="The stored geometry SRID, no conversion by default (3857)",
        )
        options = parser.parse_args()

        connection = psycopg2.connect(options.connection)
        cursor = connection.cursor()

        if options.create:
            cursor.execute(
                "SELECT count(*) FROM pg_tables WHERE schemaname=%(schema)s AND tablename=%(table)s",
                {
                    "schema": options.schema,
                    "table": options.table
                },
            )
            if cursor.fetchone()[0] == 0:
                cursor.execute(
                    f'CREATE TABLE IF NOT EXISTS "{options.schema}"."{options.table}" (id serial)'
                )
                cursor.execute(
                    "SELECT AddGeometryColumn(%(schema)s, %(table)s, %(column)s, %(srid)s, 'MULTIPOLYGON', 2)",
                    {
                        "schema": options.schema,
                        "table": options.table,
                        "column": options.column,
                        "srid": options.srid,
                    },
                )

        if options.delete:
            cursor.execute(
                psycopg2.sql.SQL("DELETE FROM {}").format(
                    psycopg2.sql.Identifier(options.table)))

        geoms = []
        grid = QuadTileGrid(max_extent=(-20037508.34, -20037508.34,
                                        20037508.34, 20037508.34), )
        with open(options.file, encoding="utf-8") as f:
            for coord in f:
                extent = grid.extent(parse_tilecoord(coord), options.buffer)
                geoms.append(
                    Polygon((
                        (extent[0], extent[1]),
                        (extent[0], extent[3]),
                        (extent[2], extent[3]),
                        (extent[2], extent[1]),
                    )))
        if len(geoms) == 0:
            print("No coords found")
            connection.commit()
            cursor.close()
            connection.close()
            sys.exit(0)
        geom = unary_union(geoms)
        if geom.geom_type == "Polygon":
            geom = MultiPolygon((geom, ))

        if options.simplify > 0:
            geom.simplify(options.simplify)

        sql_geom = f"ST_GeomFromText('{geom.wkt}', 3857)"
        if options.srid <= 0:
            sql_geom = f"ST_GeomFromText('{geom.wkt}')"
        elif options.srid != 3857:
            sql_geom = f"ST_Transform({sql_geom}, {options.srid})"

        cursor.execute(
            f'INSERT INTO "{options.table}" ("{options.column}") VALUES ({sql_geom})'
        )
        connection.commit()
        cursor.close()
        connection.close()
        print("Import successful")
    except SystemExit:
        raise
    except:  # pylint: disable=bare-except
        logger.exception("Exit with exception")
        sys.exit(1)
#  dict_dpts.setdefault(dpt_info['NOM_DEPT'], [[],[]])
#  dict_dpts[dpt_info['NOM_DEPT']][0].append(dpt_geo)
#  dict_dpts[dpt_info['NOM_DEPT']][1].append(dpt_info)
##import pprint
##pprint.pprint(dict_dpts['VENDEE'][1])
### info same except for RINGNUM thus ok to drop
#region_vendee = MultiPolygon([Polygon(ls_xy) for ls_xy in dict_dpts['VENDEE'][0]])

df_dpt = pd.DataFrame({'poly' : [Polygon(xy) for xy in m_fra.dpt],
                       'dpt_name' : [d['NOM_DEPT'] for d in m_fra.dpt_info],
                       'dpt_code' : [d['CODE_DEPT'] for d in m_fra.dpt_info],
                       'region_name' : [d['NOM_REGION'] for d in m_fra.dpt_info],
                       'region_code' : [d['CODE_REG'] for d in m_fra.dpt_info]})
# print df_dpt[['code_dpt', 'dpt_name','code_region', 'region_name']].to_string()
# Some regions broken in multi polygons: appear mult times (138 items, all France Metr)
region_multipolygon = MultiPolygon(list(df_dpt[df_dpt['region_name'] ==\
                                          "NORD-PAS-DE-CALAIS"]['poly'].values))
region_multipolygon_prep = prep(region_multipolygon)
region_bounds = region_multipolygon.bounds

#for k,v in dict_120_main['communes'].items():
#  if v[1]['NOM_COMM'] == 'WISSANT' or v[1]['NOM_COMM'] == 'DENAIN':ls_com_nodes
#    print k,v
## Find nearest node(s) via dict_120_sub['rat_com'] (can be several!)
#print dict_120_sub['rat_com'][23216]
#print dict_120_sub['rat_com'][25781]

ls_node_ids_wd = nx.dijkstra_path(G, 1168, 124)
ls_coord = [dict_120_main['noeuds'][node_id][0] for node_id in ls_node_ids_wd]
m_fra.scatter([x[0] for x in ls_coord],
              [x[1] for x in ls_coord], zorder = 8)
Beispiel #35
0
    def _process_element(self, element):
        if not bool(element):
            return element.clone(crs=self.p.projection)

        crs = element.crs
        proj = self.p.projection
        if (isinstance(crs, ccrs.PlateCarree) and not isinstance(proj, ccrs.PlateCarree)
            and crs.proj4_params['lon_0'] != 0):
            element = self.instance(projection=ccrs.PlateCarree())(element)

        if isinstance(proj, ccrs.CRS) and not isinstance(proj, ccrs.Projection):
            raise ValueError('invalid transform:'
                             ' Spherical contouring is not supported - '
                             ' consider using PlateCarree/RotatedPole.')

        if isinstance(element, Polygons):
            geoms = polygons_to_geom_dicts(element, skip_invalid=False)
        else:
            geoms = path_to_geom_dicts(element, skip_invalid=False)

        projected = []
        for path in geoms:
            geom = path['geometry']

            # Ensure minimum area for polygons (precision issues cause errors)
            if isinstance(geom, Polygon) and geom.area < 1e-15:
                continue
            elif isinstance(geom, MultiPolygon):
                polys = [g for g in geom if g.area > 1e-15]
                if not polys:
                    continue
                geom = MultiPolygon(polys)
            elif (not geom or isinstance(geom, GeometryCollection)):
                continue

            proj_geom = proj.project_geometry(geom, element.crs)

            # Attempt to fix geometry without being noisy about it
            logger = logging.getLogger()
            try:
                prev = logger.level
                logger.setLevel(logging.ERROR)
                if not proj_geom.is_valid:
                    proj_geom = proj.project_geometry(geom.buffer(0), element.crs)
            except:
                continue
            finally:
                logger.setLevel(prev)
            if proj_geom.geom_type == 'GeometryCollection' and len(proj_geom) == 0:
                continue
            data = dict(path, geometry=proj_geom)
            if 'holes' in data:
                data.pop('holes')
            projected.append(data)

        if len(geoms) and len(projected) == 0:
            self.warning('While projecting a %s element from a %s coordinate '
                         'reference system (crs) to a %s projection none of '
                         'the projected paths were contained within the bounds '
                         'specified by the projection. Ensure you have specified '
                         'the correct coordinate system for your data.' %
                         (type(element).__name__, type(element.crs).__name__,
                          type(self.p.projection).__name__))

        # Try casting back to original types
        if element.interface is GeoPandasInterface:
            import geopandas as gpd
            projected = gpd.GeoDataFrame(projected, columns=element.data.columns)
        elif element.interface is MultiInterface:
            x, y = element.kdims
            item = element.data[0] if element.data else None
            if item is None or (isinstance(item, dict) and 'geometry' in item):
                return element.clone(projected, crs=self.p.projection)
            projected = [geom_dict_to_array_dict(p, [x.name, y.name]) for p in projected]
            if any('holes' in p for p in projected):
                pass
            elif pd and isinstance(item, pd.DataFrame):
                projected = [pd.DataFrame(p, columns=item.columns) for p in projected]
            elif isinstance(item, np.ndarray):
                projected = [np.column_stack([p[d.name] for d in element.dimensions()])
                             for p in projected]
        return element.clone(projected, crs=self.p.projection)
Beispiel #36
0
def zonal_stats(vectors, raster, layer_num=0, band_num=1, nodata_value=None,
                global_src_extent=False, categorical=False, stats=None,
                copy_properties=False, all_touched=False, transform=None,
                affine=None, add_stats=None, raster_out=False, opt_georaster=False):
    """Summary statistics of a raster, broken out by vector geometries.

    Attributes
    ----------
    vectors : path to an OGR vector source or list of geo_interface or WKT str
    raster : ndarray or path to a GDAL raster source
        If ndarray is passed, the `transform` kwarg is required.
    layer_num : int, optional
        If `vectors` is a path to an OGR source, the vector layer to use
        (counting from 0).
        defaults to 0.
    band_num : int, optional
        If `raster` is a GDAL source, the band number to use (counting from 1).
        defaults to 1.
    nodata_value : float, optional
        If `raster` is a GDAL source, this value overrides any NODATA value
        specified in the file's metadata.
        If `None`, the file's metadata's NODATA value (if any) will be used.
        `ndarray`s don't support `nodata_value`.
        defaults to `None`.
    global_src_extent : bool, optional
        Pre-allocate entire raster before iterating over vector features.
        Use `True` if limited by disk IO or indexing into raster;
            requires sufficient RAM to store array in memory
        Use `False` with fast disks and a well-indexed raster, or when
        memory-constrained.
        Ignored when `raster` is an ndarray,
            because it is already completely in memory.
        defaults to `False`.
    categorical : bool, optional
    stats : list of str, or space-delimited str, optional
        Which statistics to calculate for each zone.
        All possible choices are listed in `VALID_STATS`.
        defaults to `DEFAULT_STATS`, a subset of these.
    copy_properties : bool, optional
        Include feature properties alongside the returned stats.
        defaults to `False`
    all_touched : bool, optional
        Whether to include every raster cell touched by a geometry, or only
        those having a center point within the polygon.
        defaults to `False`
    transform : list or tuple of 6 floats or Affine object, optional
        Required when `raster` is an ndarray.
        6-tuple for GDAL-style geotransform coordinates
        Affine for rasterio-style geotransform coordinates
        Can use the keyword `affine` which is an alias for `transform`
    add_stats : Dictionary with names and functions of additional statistics to
                compute, optional
    raster_out : Include the masked numpy array for each feature, optional
        Each feature dictionary will have the following additional keys:
            clipped raster (`mini_raster`)
            Geo-transform (`mini_raster_GT`)
            No Data Value (`mini_raster_NDV`)
    opt_georaster : Whether the raster should be GeoRaster or not (Boolean, default=False)

    Returns
    -------
    list of dicts
        Each dict represents one vector geometry.
        Its keys include `__fid__` (the geometry feature id)
        and each of the `stats` requested.
    """
    if not stats:
        if not categorical:
            stats = DEFAULT_STATS
        else:
            stats = []
    else:
        if isinstance(stats, str):
            if stats in ['*', 'ALL']:
                stats = VALID_STATS
            else:
                stats = stats.split()
    for x in stats:
        if x.startswith("percentile_"):
            get_percentile(x)
        elif x not in VALID_STATS:
            raise ValueError(
                "Stat `%s` not valid; "
                "must be one of \n %r" % (x, VALID_STATS))

    if opt_georaster:
        import georasters

    run_count = False
    if categorical or 'majority' in stats or 'minority' in stats or \
       'unique' in stats:
        # run the counter once, only if needed
        run_count = True

    if isinstance(raster, np.ndarray):
        raster_type = 'ndarray'

        # must have transform info
        if affine:
            transform = affine
        if not transform:
            raise ValueError("Must provide the 'transform' kwarg "
                             "when using ndarrays as src raster")
        try:
            rgt = transform.to_gdal()  # an Affine object
        except AttributeError:
            rgt = transform  # a GDAL geotransform

        rshape = (raster.shape[1], raster.shape[0])

        # global_src_extent is implicitly turned on, array is already in memory
        global_src_extent = True

        if nodata_value:
            raise NotImplementedError("ndarrays don't support 'nodata_value'")
    else:
        raster_type = 'gdal'

        with rasterio.drivers():
            with rasterio.open(raster, 'r') as src:
                affine = src.affine
                rgt = affine.to_gdal()
                rshape = (src.width, src.height)
                rnodata = src.nodata

        if nodata_value is not None:
            # override with specified nodata
            nodata_value = float(nodata_value)
        else:
            nodata_value = rnodata

    features_iter, strategy, spatial_ref = get_features(vectors, layer_num)

    if global_src_extent and raster_type == 'gdal':
        # create an in-memory numpy array of the source raster data
        extent = raster_extent_as_bounds(rgt, rshape)
        global_src_offset = bbox_to_pixel_offsets(rgt, extent, rshape)
        window = pixel_offsets_to_window(global_src_offset)
        with rasterio.drivers():
            with rasterio.open(raster, 'r') as src:
                global_src_array = src.read(
                    band_num, window=window, masked=False)
    elif global_src_extent and raster_type == 'ndarray':
        global_src_offset = (0, 0, raster.shape[0], raster.shape[1])
        global_src_array = raster

    results = []

    for i, feat in enumerate(features_iter):
        if feat['type'] == "Feature":
            geom = shape(feat['geometry'])
        else:  # it's just a geometry
            geom = shape(feat)

        # Point and MultiPoint don't play well with GDALRasterize
        # convert them into box polygons the size of a raster cell
        buff = rgt[1] / 2.0
        if geom.type == "MultiPoint":
            geom = MultiPolygon([box(*(pt.buffer(buff).bounds))
                                for pt in geom.geoms])
        elif geom.type == 'Point':
            geom = box(*(geom.buffer(buff).bounds))

        geom_bounds = list(geom.bounds)

        # calculate new pixel coordinates of the feature subset
        src_offset = bbox_to_pixel_offsets(rgt, geom_bounds, rshape)

        new_gt = (
            (rgt[0] + (src_offset[0] * rgt[1])),
            rgt[1],
            0.0,
            (rgt[3] + (src_offset[1] * rgt[5])),
            0.0,
            rgt[5]
        )

        if src_offset[2] <= 0 or src_offset[3] <= 0:
            # we're off the raster completely, no overlap at all
            # so there's no need to even bother trying to calculate
            feature_stats = dict([(s, None) for s in stats])
        else:
            if not global_src_extent:
                # use feature's source extent and read directly from source
                window = pixel_offsets_to_window(src_offset)
                with rasterio.drivers():
                    with rasterio.open(raster, 'r') as src:
                        src_array = src.read(
                            band_num, window=window, masked=False)
            else:
                # subset feature array from global source extent array
                xa = src_offset[0] - global_src_offset[0]
                ya = src_offset[1] - global_src_offset[1]
                xb = xa + src_offset[2]
                yb = ya + src_offset[3]
                src_array = global_src_array[ya:yb, xa:xb]

            # create ndarray of rasterized geometry
            rv_array = rasterize_geom(geom, src_offset, new_gt, all_touched)
            assert rv_array.shape == src_array.shape

            # Mask the source data array with our current feature
            # we take the logical_not to flip 0<->1 for the correct mask effect
            # we also mask out nodata values explicitly
            masked = np.ma.MaskedArray(
                src_array,
                mask=np.logical_or(
                    src_array == nodata_value,
                    np.logical_not(rv_array)
                )
            )

            if run_count:
                pixel_count = Counter(masked.compressed().tolist())

            if categorical:
                feature_stats = dict(pixel_count)
            else:
                feature_stats = {}

            if 'min' in stats:
                feature_stats['min'] = float(masked.min())
            if 'max' in stats:
                feature_stats['max'] = float(masked.max())
            if 'mean' in stats:
                feature_stats['mean'] = float(masked.mean())
            if 'count' in stats:
                feature_stats['count'] = int(masked.count())
            # optional
            if 'sum' in stats:
                feature_stats['sum'] = float(masked.sum())
            if 'std' in stats:
                feature_stats['std'] = float(masked.std())
            if 'median' in stats:
                feature_stats['median'] = float(np.median(masked.compressed()))
            if 'majority' in stats:
                try:
                    feature_stats['majority'] = float(pixel_count.most_common(1)[0][0])
                except IndexError:
                    feature_stats['majority'] = None
            if 'minority' in stats:
                try:
                    feature_stats['minority'] = float(pixel_count.most_common()[-1][0])
                except IndexError:
                    feature_stats['minority'] = None
            if 'unique' in stats:
                feature_stats['unique'] = len(list(pixel_count.keys()))
            if 'range' in stats:
                try:
                    rmin = feature_stats['min']
                except KeyError:
                    rmin = float(masked.min())
                try:
                    rmax = feature_stats['max']
                except KeyError:
                    rmax = float(masked.max())
                feature_stats['range'] = rmax - rmin

            for pctile in [s for s in stats if s.startswith('percentile_')]:
                q = get_percentile(pctile)
                pctarr = masked.compressed()
                if pctarr.size == 0:
                    feature_stats[pctile] = None
                else:
                    feature_stats[pctile] = np.percentile(pctarr, q)

            if add_stats is not None:
                for stat_name, stat_func in add_stats.items():
                        feature_stats[stat_name] = stat_func(masked)
            if raster_out:
                masked.fill_value = nodata_value
                masked.data[masked.mask] = nodata_value
                if opt_georaster:
                    feature_stats['mini_raster'] = georasters.GeoRaster(
                        masked, new_gt, nodata_value=nodata_value,
                        projection=spatial_ref)
                else:
                    feature_stats['mini_raster'] = masked
                    feature_stats['mini_raster_GT'] = new_gt
                    feature_stats['mini_raster_NDV'] = nodata_value

        if 'fid' in feat:
            # Use the fid directly,
            # likely came from OGR data via .utils.feature_to_geojson
            feature_stats['__fid__'] = feat['fid']
        else:
            # Use the enumerated id
            feature_stats['__fid__'] = i

        if 'properties' in feat and copy_properties:
            for key, val in list(feat['properties'].items()):
                feature_stats[key] = val

        results.append(feature_stats)

    return results
Beispiel #37
0
def create_relation_geometry(relation_key, relation_val, footprints):
    """
    Create Shapely geometry for relations - Polygons with holes or MultiPolygons

    OSM relations are used to define complex polygons - polygons with holes or
    multi-polygons. The polygons' outer and inner rings may be made up of chains
    of LineStrings. https://wiki.openstreetmap.org/wiki/Relation:multipolygon 
    requires that multipolygon rings have an outer or inner 'role'.
    
    OSM's data model allows a polygon type tag e.g. 'building' to be added to 
    any OSM element. This can include non-polygon relations e.g. bus routes.
    Relations that do not have at least one closed ring with an outer role 
    are filtered out.

    Inner rings that are tagged with the footprint type in their own right e.g.
    landuse=meadow as an inner ring of landuse=forest will have been included in
    the footprints dictionary as part of the original parsing and are not dealt
    with here.

    Parameters
    ----------
    relation_key : int
        the id of the relation to process
    relation_val : dict
        members and tags of the relation
    footprints : dictionary
        dictionary of all footprints (including open and closed ways)

    Returns
    -------
    Shapely Polygon or MultiPolygon
    """

    # create empty lists to hold member geometries
    multipoly = []
    outer_polys = []
    outer_lines = []
    inner_polys = []
    inner_lines = []

    # add each members geometry to a list according to its role and geometry type
    for member_id, member_role in relation_val['members'].items():
        if member_role == 'outer':
            if footprints[member_id]['geometry'].geom_type == 'Polygon':
                outer_polys.append(footprints[member_id]['geometry'])
            elif footprints[member_id]['geometry'].geom_type == 'LineString':
                outer_lines.append(footprints[member_id]['geometry'])
        elif member_role == 'inner':
            if footprints[member_id]['geometry'].geom_type == 'Polygon':
                inner_polys.append(footprints[member_id]['geometry'])
            elif footprints[member_id]['geometry'].geom_type == 'LineString':
                inner_lines.append(footprints[member_id]['geometry'])

    # try to polygonize open outer ways and concatenate them to outer_polys
    if len(outer_lines) > 0:
        try:
            result = list(polygonize(outer_lines))
        except Exception:
            log("polygonize failed for 'outer' ways in relation: {}".format(
                relation_key))
        else:
            outer_polys += result

    # try to polygonize open inner ways and concatenate them to inner_polys
    if len(inner_lines) > 0:
        try:
            result = list(polygonize(inner_lines))
        except Exception:
            log("polygonize failed for 'inner' ways in relation: {}".format(
                relation_key))
        else:
            inner_polys += result

    # filter out relations missing both 'outer' and 'inner' polygons or just 'outer'
    if len(outer_polys + inner_polys) == 0:
        log("Relation {} missing 'outer' and 'inner' closed ways".format(
            relation_key))
    elif len(outer_polys) == 0:
        log("Relation {} missing 'outer' closed ways".format(relation_key))
    # process the others to multipolygons
    else:
        for outer_poly in outer_polys:
            outer_poly = outer_poly.buffer(0)  #fix invalid geometry if present
            temp_poly = outer_poly
            for inner_poly in inner_polys:
                inner_poly = inner_poly.buffer(
                    0)  #fix invalid geometry if present
                if inner_poly.within(outer_poly):
                    temp_poly = temp_poly.difference(inner_poly)
            multipoly.append(temp_poly)

    # return relations with one outer way as Polygons, multiple outer ways as MultiPolygons
    if len(multipoly) == 1:
        return multipoly[0]
    elif len(multipoly) > 1:
        return MultiPolygon(multipoly)
    else:
        log('relation {} could not be converted to a complex footprint'.format(
            relation_key))
Beispiel #38
0
def zonal_stats(vectors, raster, layer_num=0, band_num=1, func=None,
                nodata_value=None, categorical=False, stats=None,
                copy_properties=False, all_touched=False, transform=None):

    if not stats:
        if not categorical:
            stats = ['count', 'min', 'max', 'mean', 'std']
            if func:
                stats.append('func')

    # must have transform arg
    if not transform:
        raise Exception("Must provide the 'transform' kwarg")
    rgt = transform
    rsize = (raster.shape[1], raster.shape[0])

    rbounds = raster_extent_as_bounds(rgt, rsize)
    features_iter, strategy, spatial_ref = get_features(vectors, layer_num)
    global_src_offset = (0, 0, raster.shape[0], raster.shape[1])
    global_src_array = raster

    mem_drv = ogr.GetDriverByName('Memory')
    driver = gdal.GetDriverByName('MEM')

    results = []
    entity_images = []

    for i, feat in enumerate(features_iter):
        if feat['type'] == "Feature":
            geom = shape(feat['geometry'])
        else:  # it's just a geometry
            geom = shape(feat)

        # Point and MultiPoint don't play well with GDALRasterize
        # convert them into box polygons the size of a raster cell
        buff = rgt[1] / 2.0
        if geom.type == "MultiPoint":
            geom = MultiPolygon([box(*(pt.buffer(buff).bounds))
                                for pt in geom.geoms])
        elif geom.type == 'Point':
            geom = box(*(geom.buffer(buff).bounds))

        ogr_geom_type = shapely_to_ogr_type(geom.type)

        # "Clip" the geometry bounds to the overall raster bounding box
        # This should avoid any rasterIO errors for partially overlapping polys
        geom_bounds = list(geom.bounds)
        if geom_bounds[0] < rbounds[0]:
            geom_bounds[0] = rbounds[0]
        if geom_bounds[1] < rbounds[1]:
            geom_bounds[1] = rbounds[1]
        if geom_bounds[2] > rbounds[2]:
            geom_bounds[2] = rbounds[2]
        if geom_bounds[3] > rbounds[3]:
            geom_bounds[3] = rbounds[3]

        # calculate new geotransform of the feature subset
        src_offset = bbox_to_pixel_offsets(rgt, geom_bounds)

        new_gt = (
            (rgt[0] + (src_offset[0] * rgt[1])),
            rgt[1],
            0.0,
            (rgt[3] + (src_offset[1] * rgt[5])),
            0.0,
            rgt[5]
        )

        if src_offset[2] <= 0 or src_offset[3] <= 0:
            # we're off the raster completely, no overlap at all
            # so there's no need to even bother trying to calculate
            feature_stats = dict([(s, None) for s in stats])
            img = {'__fid__': i, 'img': None}
        else:
            # derive array from global source extent array
            # useful *only* when disk IO or raster format inefficiencies
            # are your limiting factor
            # advantage: reads raster data in one pass before loop
            # disadvantage: large vector extents combined with big rasters
            # need lotsa memory
            xa = src_offset[0] - global_src_offset[0]
            ya = src_offset[1] - global_src_offset[1]
            xb = xa + src_offset[2]
            yb = ya + src_offset[3]
            src_array = global_src_array[ya:yb, xa:xb]

            # Create a temporary vector layer in memory
            mem_ds = mem_drv.CreateDataSource('out')
            mem_layer = mem_ds.CreateLayer('out', spatial_ref, ogr_geom_type)
            ogr_feature = ogr.Feature(feature_def=mem_layer.GetLayerDefn())
            ogr_geom = ogr.CreateGeometryFromWkt(geom.wkt)
            ogr_feature.SetGeometryDirectly(ogr_geom)
            mem_layer.CreateFeature(ogr_feature)

            # Rasterize it
            rvds = driver.Create(
                'rvds', src_offset[2], src_offset[3], 1, gdal.GDT_Byte)
            rvds.SetGeoTransform(new_gt)

            if all_touched:
                gdal.RasterizeLayer(
                    rvds, [1], mem_layer, None, None,
                    burn_values=[1], options=['ALL_TOUCHED=True'])
            else:
                gdal.RasterizeLayer(
                    rvds, [1], mem_layer, None, None,
                    burn_values=[1], options=['ALL_TOUCHED=False'])
            rv_array = rvds.ReadAsArray()

            # Mask the source data array with our current feature
            # we take the logical_not to flip 0<->1 to get the correct mask effect
            # we also mask out nodata values explictly
            masked = np.ma.MaskedArray(
                src_array,
                mask=np.logical_or(
                    src_array == nodata_value,
                    np.logical_not(rv_array)
                )
            )

            feature_stats = {}

            if 'min' in stats:
                feature_stats['min'] = float(masked.min())
            if 'max' in stats:
                feature_stats['max'] = float(masked.max())
            if 'mean' in stats:
                feature_stats['mean'] = float(masked.mean())
            if 'count' in stats:
                feature_stats['count'] = int(masked.count())
            if 'std' in stats:
                feature_stats['std'] = float(masked.std())
            # optional
            if 'func' in stats:
                feature_stats[func.__name__] = func(masked)
            if 'sum' in stats:
                feature_stats['sum'] = float(masked.sum())
            if 'std' in stats:
                feature_stats['std'] = float(masked.std())
            if 'median' in stats:
                feature_stats['median'] = float(np.median(masked.compressed()))
            if 'range' in stats:
                try:
                    rmin = feature_stats['min']
                except KeyError:
                    rmin = float(masked.min())
                try:
                    rmax = feature_stats['max']
                except KeyError:
                    rmax = float(masked.max())
                feature_stats['range'] = rmax - rmin
            img = {'__fid__': i, 'img': masked}

        # Use the enumerated id as __fid__
        feature_stats['__fid__'] = i

        if 'properties' in feat and copy_properties:
            for key, val in list(feat['properties'].items()):
                feature_stats[key] = val

        results.append(feature_stats)
        entity_images.append(img)
    return results, entity_images
Beispiel #39
0
def visPolyTriangle(poly):
    (wtTriangle, pgeom) = prepPolygon(poly)
    triangles = [x[1] for x in wtTriangle]
    toDraw = MultiPolygon(triangles)
    open("toTriangulate.json", "wb").write(json.dumps(mapping(poly)))
    open("exampleTriangulation.json", "wb").write(json.dumps(mapping(toDraw)))
def raster_stats_multi(vectors, rasterlist, geom_attr='GeomWKT', id_attr='fid', 
                        band_num=1, nodata_value=None, 
                        global_src_extent=False, categorical=False, stats=None, 
                        copy_properties=False, all_touched = False):
    '''
    Multi-raster version of the raster_stats (zonal_stats) function found in rasterstats package.
    
    When running zonal stats using the rasterstats package each feature (zone) must first 
    be rasterized. These are then used to mask the input raster. 
    However we often need to run raster stats on many (thousands) of input rasters 
    (all with identical geotransforms) for the same zones. 
    
    In this scenario the rasterization of the zones is a major overhead.
    This version rasterizes once and then runs the overlay against all rasters (which must have 
    the same resolution / extent as one another). It returns a generator so the stats for 
    each raster are generated when the calling code is ready for them.
    '''
    DEFAULT_STATS = ['count', 'min', 'max', 'mean']
    VALID_STATS = DEFAULT_STATS + \
        ['sum', 'std', 'median', 'majority', 'minority', 'unique', 'range']
    if not stats:
        if not categorical:
            stats = DEFAULT_STATS
        else:
            stats = []
    else:
        if isinstance(stats, basestring):
            if stats in ['*', 'ALL']:
                stats = VALID_STATS
            else:
                stats = stats.split()
    for x in stats:
        if x not in VALID_STATS:
            raise RasterStatsError("Stat `%s` not valid;" \
                " must be one of \n %r" % (x, VALID_STATS))

    run_count = False
    if categorical or 'majority' in stats or 'minority' in stats or \
       'unique' in stats:
        # run the counter once, only if needed
        run_count = True
    
    # open the first raster and use this, we will assume they are all the same size / bounds etc
    initrast = rasterlist[0]
    rds = gdal.Open(initrast, gdal.GA_ReadOnly)
    if not rds:
        raise RasterStatsError("Cannot open %r as GDAL raster" % raster)
    rb = rds.GetRasterBand(band_num)
    rgt = rds.GetGeoTransform()
    rsize = (rds.RasterXSize, rds.RasterYSize)
    rbounds = raster_extent_as_bounds(rgt, rsize)

    if nodata_value is not None:
        nodata_value = float(nodata_value)
        rb.SetNoDataValue(nodata_value)
    else:
        nodata_value = rb.GetNoDataValue()

    mem_drv = ogr.GetDriverByName('Memory')
    driver = gdal.GetDriverByName('MEM')

    results = []

    # in order to avoid re-rasterizing the zones for every values raster we've moved the rasterization out of the loop 
    # and will save the rasterized zone arrays into a dictionary (so we need enough memory to hold that)
    zoneFeatureRasters = {}
    globL = inf
    globB = inf
    globT = -inf
    globR = -inf
    
    for i,feat in enumerate(vectors):
    #for i,feat in vectors.iteritems():
        try:
            geomWKT = feat[geom_attr]
        except KeyError:
            print "No geom attr found in feature!"
            continue
        geom = wkt.loads(geomWKT)
        
        # Point and MultiPoint don't play well with GDALRasterize
        # convert them into box polygons the size of a raster cell
        buff = rgt[1] / 2.0
        if geom.type == "MultiPoint":
            geom = MultiPolygon([box(*(pt.buffer(buff).bounds)) 
                                for pt in geom.geoms])
        elif geom.type == 'Point':
            geom = box(*(geom.buffer(buff).bounds))

        ogr_geom_type = shapely_to_ogr_type(geom.type)

        # "Clip" the geometry bounds to the overall raster bounding box
        # This should avoid any rasterIO errors for partially overlapping polys
        geom_bounds = list(geom.bounds)
        if geom_bounds[0] < rbounds[0]:
            geom_bounds[0] = rbounds[0]
        if geom_bounds[1] < rbounds[1]:
            geom_bounds[1] = rbounds[1]
        if geom_bounds[2] > rbounds[2]:
            geom_bounds[2] = rbounds[2]
        if geom_bounds[3] > rbounds[3]:
            geom_bounds[3] = rbounds[3]
        
        # Record the overall bounds of the features
        if geom_bounds[0] < globL:
            globL = geom_bounds[0]
        if geom_bounds[1] < globB:
            globB = geom_bounds[1]
        if geom_bounds[2] > globR:
            globR = geom_bounds[2]
        if geom_bounds[3] > globT:
            globT = geom_bounds[3]
            
        # calculate new geotransform of the feature subset
       
        src_offset = bbox_to_pixel_offsets(rgt, geom_bounds, rsize)

        new_gt = (
            (rgt[0] + (src_offset[0] * rgt[1])),
            rgt[1],
            0.0,
            (rgt[3] + (src_offset[1] * rgt[5])),
            0.0,
            rgt[5]
        )
        fid = None
        try:
            fid= feat[id_attr]
        except KeyError:
            fid = i
        if src_offset[2] < 0 or src_offset[3] < 0:
                # we're off the raster completely, no overlap at all
                # so there's no need to even bother trying to calculate
                print "Feature "+fid+" is off raster extent - skipping!"
                zoneFeatureRasters[fid] = None
            
        else: # Create a temporary vector layer in memory
            mem_ds = mem_drv.CreateDataSource('out')
            mem_layer = mem_ds.CreateLayer('out', None, ogr_geom_type)
            ogr_feature = ogr.Feature(feature_def=mem_layer.GetLayerDefn())
            ogr_geom = ogr.CreateGeometryFromWkt(geom.wkt)
            ogr_feature.SetGeometryDirectly(ogr_geom)
            mem_layer.CreateFeature(ogr_feature)

            # Rasterize it
            rvds = driver.Create('rvds', src_offset[2], src_offset[3], 1, gdal.GDT_Byte)
            rvds.SetGeoTransform(new_gt)
            #(raster_dataset, [1], shape_layer, None, None, burn_values=[1], ['ALL_TOUCHED=TRUE']
            gdal.RasterizeLayer(rvds, [1], mem_layer, None, None, [1], ['ALL_TOUCHED='+str(all_touched)])
            rv_array = rvds.ReadAsArray()
            zoneFeatureRasters[fid] = {
                 "zonearray":rv_array,
                 "src_offset":src_offset
            }
            
    initrast=None      
    if global_src_extent:
        # outside the loop: everything except actually reading the raster data
        # create an in-memory numpy array of the source raster data
        # covering the whole extent of the vector layer
        #if strategy != "ogr":
        #    raise RasterStatsError("global_src_extent requires OGR vector")

        # find extent of ALL features
        #ds = ogr.Open(vectors)
        #layer = ds.GetLayer(layer_num)
        #ex = layer.GetExtent()
        # transform from OGR extent to xmin, ymin, xmax, ymax
        #layer_extent = (ex[0], ex[2], ex[1], ex[3])
        
        layer_extent = (globL, globB, globR, globT)
        global_src_offset = bbox_to_pixel_offsets(rgt, layer_extent, rsize)
            
    # now do the raster calculation aspects of the original task once for each input raster but getting the zone rasters from the populated dictionary
    # rather than re-rasterizing each time
    for rast in rasterlist:
        rastresults = []
        rds = gdal.Open(rast, gdal.GA_ReadOnly)
        if not rds:
           # raise RasterStatsError("Cannot open %r as GDAL raster" % rast)
           print
           print ("Cannot open %r as GDAL raster" % rast)
           print
           continue
        rb = rds.GetRasterBand(band_num)
        # we have to assume the raster size and transform are the same 
        thisRgt = rds.GetGeoTransform()
        thisRsize = (rds.RasterXSize, rds.RasterYSize)
        thisRbounds = raster_extent_as_bounds(rgt, rsize)
        if (thisRgt != rgt or thisRsize != rsize or thisRbounds != rbounds):
            print "Raster " + rast +" has differing size or geotransform from others - skipping!"
            continue

        if global_src_extent:
            global_src_array = rb.ReadAsArray(*global_src_offset)

        if nodata_value is not None:
            nodata_value = float(nodata_value)
            rb.SetNoDataValue(nodata_value)
        else:
            nodata_value = rb.GetNoDataValue()
       
        #for i, feat in enumerate(features_iter):
        # for i,feat in vectors.iteritems():
        for i, feat in enumerate(vectors):
            fid = None
            try:
                fid = feat[id_attr]
            except:
                fid = i
            if zoneFeatureRasters[fid] is None:
                # this happens when the feature was outside the raster extent so rasterizing it was skipped
                #feature_stats = dict([(s,None) for s in stats])
                continue
            else:
                zone_array = zoneFeatureRasters[fid]["zonearray"]
                src_offset = zoneFeatureRasters[fid]["src_offset"]
                if not global_src_extent:
                    # use feature's source extent and read directly from source
                    # fastest option when you have fast disks and well-indexed raster
                    # advantage: each feature uses the smallest raster chunk
                    # disadvantage: lots of disk reads on the source raster
                    src_array = rb.ReadAsArray(*src_offset)
                else:
                    # derive array from global source extent array
                    # useful *only* when disk IO or raster format inefficiencies are your limiting factor
                    # advantage: reads raster data in one pass before loop
                    # disadvantage: large vector extents combined with big rasters need lotsa memory
                    xa = src_offset[0] - global_src_offset[0]
                    ya = src_offset[1] - global_src_offset[1]
                    xb = xa + src_offset[2]
                    yb = ya + src_offset[3]
                    src_array = global_src_array[ya:yb, xa:xb]
                
                # Mask the source data array with our current feature
                # we take the logical_not to flip 0<->1 to get the correct mask effect
                # we also mask out nodata values explictly
                masked = numpy.ma.MaskedArray(
                    src_array,
                    mask=numpy.logical_or(
                        src_array == nodata_value,
                        numpy.logical_not(zone_array)
                    )
                )

                if run_count:
                    pixel_count = Counter(masked.compressed())

                if categorical:  
                    feature_stats = dict(pixel_count)
                else:
                    feature_stats = {}

                if 'min' in stats:
                    feature_stats['min'] = float(masked.min())
                if 'max' in stats:
                    feature_stats['max'] = float(masked.max())
                if 'mean' in stats:
                    feature_stats['mean'] = float(masked.mean())
                if 'count' in stats:
                    feature_stats['count'] = int(masked.count())
                # optional
                if 'sum' in stats:
                    feature_stats['sum'] = float(masked.sum())
                if 'std' in stats:
                    feature_stats['std'] = float(masked.std())
                if 'median' in stats:
                    feature_stats['median'] = float(numpy.median(masked.compressed()))
                if 'majority' in stats:
                    try:
                        feature_stats['majority'] = pixel_count.most_common(1)[0][0]
                    except IndexError:
                        feature_stats['majority'] = None
                if 'minority' in stats:
                    try:
                        feature_stats['minority'] = pixel_count.most_common()[-1][0]
                    except IndexError:
                        feature_stats['minority'] = None
                if 'unique' in stats:
                    feature_stats['unique'] = len(pixel_count.keys())
                if 'range' in stats:
                    try:
                        rmin = feature_stats['min']
                    except KeyError:
                        rmin = float(masked.min())
                    try:
                        rmax = feature_stats['max']
                    except KeyError:
                        rmax = float(masked.max())
                    feature_stats['range'] = rmax - rmin
        
            try:
                # Use the provided feature id as __fid__
                feature_stats[id_attr] = feat[id_attr]
            except:
                # use the enumerator
                feature_stats[id_attr] = i 

            if copy_properties:
                for key, val in feat.iteritems():
                    if key == id_attr or key == geom_attr:
                        continue
                    feature_stats[key] = val
            rastresults.append(feature_stats)
        yield {'rastername':rast,'stats':rastresults}
    rb = None
    rds = None
    zoneFeatureRasters = None
    ds = None
    
    
Beispiel #41
0
 def __add__(self, other):
     """ 合并两个文本行 """
     box = rect2polygon(MultiPolygon([self.polygon, other.polygon]).bounds)
     return TextlineShape(box)
Beispiel #42
0
def zonal_stats(vectors, raster, layer_num=0, band_num=1, nodata_value=None, 
                 global_src_extent=False, categorical=False, stats=None, 
                 copy_properties=False, all_touched=False, transform=None):

    if not stats:
        if not categorical:
            stats = DEFAULT_STATS
        else:
            stats = []
    else:
        if isinstance(stats, str):
            if stats in ['*', 'ALL']:
                stats = VALID_STATS
            else:
                stats = stats.split()
    for x in stats:
        if x not in VALID_STATS:
            raise RasterStatsError("Stat `%s` not valid;" \
                " must be one of \n %r" % (x, VALID_STATS))

    run_count = False
    if categorical or 'majority' in stats or 'minority' in stats or \
       'unique' in stats:
        # run the counter once, only if needed
        run_count = True

    if isinstance(raster, np.ndarray):
        raster_type = 'ndarray'

        # must have transform arg
        if not transform:
            raise RasterStatsError("Must provide the 'transform' kwarg when "\
                "using ndarrays as src raster")
        rgt = transform
        rsize = (raster.shape[1], raster.shape[0])

        # global_src_extent is implicitly turned on, array is already in memory
        if not global_src_extent:
            global_src_extent = True

        if nodata_value:
            raise NotImplementedError("ndarrays don't support 'nodata_value'")

    else:
        raster_type = 'gdal'
        rds = gdal.Open(raster, GA_ReadOnly)
        if not rds:
            raise RasterStatsError("Cannot open %r as GDAL raster" % raster)
        rb = rds.GetRasterBand(band_num)
        rgt = rds.GetGeoTransform()
        rsize = (rds.RasterXSize, rds.RasterYSize)

        if nodata_value is not None:
            nodata_value = float(nodata_value)
            rb.SetNoDataValue(nodata_value)
        else:
            nodata_value = rb.GetNoDataValue()

    rbounds = raster_extent_as_bounds(rgt, rsize)

    features_iter, strategy, spatial_ref = get_features(vectors, layer_num)

    if global_src_extent and raster_type == 'gdal':
        # create an in-memory numpy array of the source raster data
        # covering the whole extent of the vector layer
        if strategy != "ogr":
            raise RasterStatsError("global_src_extent requires OGR vector")

        # find extent of ALL features
        ds = ogr.Open(vectors)
        layer = ds.GetLayer(layer_num)
        ex = layer.GetExtent()
        # transform from OGR extent to xmin, ymin, xmax, ymax
        layer_extent = (ex[0], ex[2], ex[1], ex[3])

        global_src_offset = bbox_to_pixel_offsets(rgt, layer_extent)
        global_src_array = rb.ReadAsArray(*global_src_offset)
    elif global_src_extent and raster_type == 'ndarray':
        global_src_offset = (0, 0, raster.shape[0], raster.shape[1])
        global_src_array = raster

    mem_drv = ogr.GetDriverByName('Memory')
    driver = gdal.GetDriverByName('MEM')

    results = []

    for i, feat in enumerate(features_iter):
        if feat['type'] == "Feature":
            geom = shape(feat['geometry'])
        else:  # it's just a geometry
            geom = shape(feat)

        # Point and MultiPoint don't play well with GDALRasterize
        # convert them into box polygons the size of a raster cell
        buff = rgt[1] / 2.0
        if geom.type == "MultiPoint":
            geom = MultiPolygon([box(*(pt.buffer(buff).bounds)) 
                                for pt in geom.geoms])
        elif geom.type == 'Point':
            geom = box(*(geom.buffer(buff).bounds))

        ogr_geom_type = shapely_to_ogr_type(geom.type)

        # "Clip" the geometry bounds to the overall raster bounding box
        # This should avoid any rasterIO errors for partially overlapping polys
        geom_bounds = list(geom.bounds)
        if geom_bounds[0] < rbounds[0]:
            geom_bounds[0] = rbounds[0]
        if geom_bounds[1] < rbounds[1]:
            geom_bounds[1] = rbounds[1]
        if geom_bounds[2] > rbounds[2]:
            geom_bounds[2] = rbounds[2]
        if geom_bounds[3] > rbounds[3]:
            geom_bounds[3] = rbounds[3]

        # calculate new geotransform of the feature subset
        src_offset = bbox_to_pixel_offsets(rgt, geom_bounds)

        new_gt = (
            (rgt[0] + (src_offset[0] * rgt[1])),
            rgt[1],
            0.0,
            (rgt[3] + (src_offset[1] * rgt[5])),
            0.0,
            rgt[5]
        )

        if src_offset[2] <= 0 or src_offset[3] <= 0:
            # we're off the raster completely, no overlap at all
            # so there's no need to even bother trying to calculate
            feature_stats = dict([(s, None) for s in stats])
        else:
            if not global_src_extent:
                # use feature's source extent and read directly from source
                # fastest option when you have fast disks and well-indexed raster
                # advantage: each feature uses the smallest raster chunk
                # disadvantage: lots of disk reads on the source raster
                src_array = rb.ReadAsArray(*src_offset)
            else:
                # derive array from global source extent array
                # useful *only* when disk IO or raster format inefficiencies are your limiting factor
                # advantage: reads raster data in one pass before loop
                # disadvantage: large vector extents combined with big rasters need lotsa memory
                xa = src_offset[0] - global_src_offset[0]
                ya = src_offset[1] - global_src_offset[1]
                xb = xa + src_offset[2]
                yb = ya + src_offset[3]
                src_array = global_src_array[ya:yb, xa:xb]

            # Create a temporary vector layer in memory
            mem_ds = mem_drv.CreateDataSource('out')
            mem_layer = mem_ds.CreateLayer('out', spatial_ref, ogr_geom_type)
            ogr_feature = ogr.Feature(feature_def=mem_layer.GetLayerDefn())
            ogr_geom = ogr.CreateGeometryFromWkt(geom.wkt)
            ogr_feature.SetGeometryDirectly(ogr_geom)
            mem_layer.CreateFeature(ogr_feature)

            # Rasterize it
            rvds = driver.Create('rvds', src_offset[2], src_offset[3], 1, gdal.GDT_Byte)
            rvds.SetGeoTransform(new_gt)
            
            if all_touched:
                gdal.RasterizeLayer(rvds, [1], mem_layer, None, None, burn_values=[1], options = ['ALL_TOUCHED=True'])
            else:
                gdal.RasterizeLayer(rvds, [1], mem_layer, None, None, burn_values=[1], options = ['ALL_TOUCHED=False'])
            rv_array = rvds.ReadAsArray()

            # Mask the source data array with our current feature
            # we take the logical_not to flip 0<->1 to get the correct mask effect
            # we also mask out nodata values explictly
            masked = np.ma.MaskedArray(
                src_array,
                mask=np.logical_or(
                    src_array == nodata_value,
                    np.logical_not(rv_array)
                )
            )

            if run_count:
                pixel_count = Counter(masked.compressed())

            if categorical:  
                feature_stats = dict(pixel_count)
            else:
                feature_stats = {}

            if 'min' in stats:
                feature_stats['min'] = float(masked.min())
            if 'max' in stats:
                feature_stats['max'] = float(masked.max())
            if 'mean' in stats:
                feature_stats['mean'] = float(masked.mean())
            if 'count' in stats:
                feature_stats['count'] = int(masked.count())
            # optional
            if 'sum' in stats:
                feature_stats['sum'] = float(masked.sum())
            if 'std' in stats:
                feature_stats['std'] = float(masked.std())
            if 'median' in stats:
                feature_stats['median'] = float(np.median(masked.compressed()))
            if 'majority' in stats:
                try:
                    feature_stats['majority'] = pixel_count.most_common(1)[0][0]
                except IndexError:
                    feature_stats['majority'] = None
            if 'minority' in stats:
                try:
                    feature_stats['minority'] = pixel_count.most_common()[-1][0]
                except IndexError:
                    feature_stats['minority'] = None
            if 'unique' in stats:
                feature_stats['unique'] = len(list(pixel_count.keys()))
            if 'range' in stats:
                try:
                    rmin = feature_stats['min']
                except KeyError:
                    rmin = float(masked.min())
                try:
                    rmax = feature_stats['max']
                except KeyError:
                    rmax = float(masked.max())
                feature_stats['range'] = rmax - rmin
        
        # Use the enumerated id as __fid__
        feature_stats['__fid__'] = i

        if 'properties' in feat and copy_properties:
            for key, val in list(feat['properties'].items()):
                feature_stats[key] = val

        results.append(feature_stats)

    return results
    def fetch(self):

        catchment_fetcher = CatchmentFetcher(
            self.catchment_type,
            self.identifier,
            self.epsg_id,
            server_name=self.server_name,
            server_name_preprod=self.server_name_preprod)
        catchments = catchment_fetcher.fetch(id_list=self.id_list)

        # Construct cells and populate with elevations from tdm
        dtm_fetcher = DTMFetcher(self.grid_specification,
                                 server_name=self.server_name,
                                 server_name_preprod=self.server_name_preprod)
        elevations = dtm_fetcher.fetch()
        cells = self.grid_specification.cells(elevations)
        catchment_land_types = {}
        catchment_cells = {}

        # Filter all data with each catchment
        epsg = self.grid_specification.epsg()
        ltf = LandTypeFetcher(geometry=self.grid_specification.geometry,
                              epsg_id=epsg,
                              server_name=self.server_name,
                              server_name_preprod=self.server_name_preprod)
        rf = ReservoirFetcher(epsg_id=epsg,
                              server_name=self.server_name,
                              server_name_preprod=self.server_name_preprod)
        all_reservoir_coords = rf.fetch(
            geometry=self.grid_specification.geometry)
        all_glaciers = ltf.fetch(name="glacier")
        prep_glaciers = prep(all_glaciers)
        all_lakes = ltf.fetch(name="lake")
        prep_lakes = prep(all_lakes)
        # all_forest  = ltf.fetch(name="forest")
        # prep_forest = prep(all_forest)
        print("Doing catchment loop, n reservoirs", len(all_reservoir_coords))
        for catchment_id, catchment in catchments.items():
            if catchment_id not in catchment_land_types:  # SiH: default land-type, plus the special ones fetched below
                catchment_land_types[catchment_id] = {}
            if prep_lakes.intersects(catchment):
                lake_in_catchment = all_lakes.intersection(catchment)
                if isinstance(
                        lake_in_catchment,
                    (Polygon,
                     MultiPolygon)) and lake_in_catchment.area > 1000.0:
                    reservoir_list = []
                    for rsv_point in all_reservoir_coords:
                        if isinstance(lake_in_catchment, Polygon):
                            if lake_in_catchment.contains(rsv_point):
                                reservoir_list.append(lake_in_catchment)
                        else:
                            for lake in lake_in_catchment:
                                if lake.contains(rsv_point):
                                    reservoir_list.append(lake)
                    if reservoir_list:
                        reservoir = MultiPolygon(reservoir_list)
                        catchment_land_types[catchment_id][
                            "reservoir"] = reservoir
                        diff = lake_in_catchment.difference(reservoir)
                        if diff.area > 1000.0:
                            catchment_land_types[catchment_id]["lake"] = diff
                    else:
                        catchment_land_types[catchment_id][
                            "lake"] = lake_in_catchment
            if prep_glaciers.intersects(catchment):
                glacier_in_catchment = all_glaciers.intersection(catchment)
                if isinstance(glacier_in_catchment, (Polygon, MultiPolygon)):
                    catchment_land_types[catchment_id][
                        "glacier"] = glacier_in_catchment
            # if prep_forest.intersects(catchment): # we are not using forest at the moment, and it takes time!!
            #    forest_in_catchment= all_forest.intersection(catchment)
            #    if isinstance(forest_in_catchment, (Polygon, MultiPolygon)):
            #        catchment_land_types[catchment_id]["forest"]=forest_in_catchment

            catchment_cells[catchment_id] = []
            for cell, elevation in cells:
                if cell.intersects(catchment):
                    catchment_cells[catchment_id].append(
                        (cell.intersection(catchment), elevation))

        # Gather cells on a per catchment basis, and compute the area fraction for each landtype
        print("Done with catchment cell loop, calc fractions")
        cell_data = {}
        for catchment_id in catchments.keys():
            cell_data[catchment_id] = []
            for cell, elevation in catchment_cells[catchment_id]:
                data = {"cell": cell, "elevation": elevation}
                for land_type_name, land_type_shape in iter(
                        catchment_land_types[catchment_id].items()):
                    data[land_type_name] = cell.intersection(
                        land_type_shape).area / cell.area
                cell_data[catchment_id].append(data)
        self.cell_data = cell_data
        self.catchment_land_types = catchment_land_types
        self.elevation_raster = elevations
        return {
            "cell_data": self.cell_data,
            "catchment_land_types": self.catchment_land_types,
            "elevation_raster": self.elevation_raster
        }
Beispiel #44
0
    def add_patch(
            self,
            multipolygon: Union[MultiPolygon, Polygon],
            expansion_rate: Optional[float] = None,
            target_size: Optional[float] = None,
            nprocs: Optional[int] = None
            ) -> None:
        """Add refinement as a region of fixed size with an optional rate

        Add a refinement based on a region specified by `multipolygon`.
        The fixed `target_size` refinement can be expanded outside the
        region specified by the shape if `expansion_rate` is provided.

        Parameters
        ----------
        multipolygon : MultiPolygon or Polygon
            Shape of the region to use specified `target_size` for
            refinement.
        expansion_rate : float or None, default=None
            Optional rate to use for expanding refinement outside
            the specified shape in `multipolygon`.
        target_size : float or None, default=None
            Fixed target size of mesh to use for refinement in
            `multipolygon`
        nprocs : int or None, default=None
            Number of processors to use in parallel sections of the
            algorithm

        Returns
        -------
        None

        See Also
        --------
        add_feature :
            Add refinement for specified line string
        """

        # TODO: Add pool input support like add_feature for performance

        # TODO: Support other shapes - call buffer(1) on non polygons(?)
        if not isinstance(multipolygon, (Polygon, MultiPolygon)):
            raise TypeError(
                    f"Wrong type \"{type(multipolygon)}\""
                    f" for multipolygon input.")

        if isinstance(multipolygon, Polygon):
            multipolygon = MultiPolygon([multipolygon])

        # Check nprocs
        nprocs = -1 if nprocs is None else nprocs
        nprocs = cpu_count() if nprocs == -1 else nprocs
        _logger.debug(f'Using nprocs={nprocs}')


        # check target size
        target_size = self.hmin if target_size is None else target_size
        if target_size is None:
            # TODO: Is this relevant for mesh type?
            raise ValueError('Argument target_size must be specified if no '
                             'global hmin has been set.')
        if target_size <= 0:
            raise ValueError("Argument target_size must be greater than zero.")

        # For expansion_rate
        if expansion_rate is not None:
            exteriors = [ply.exterior for ply in multipolygon]
            interiors = [
                inter for ply in multipolygon for inter in ply.interiors]

            features = MultiLineString([*exteriors, *interiors])
            # pylint: disable=E1123, E1125
            self.add_feature(
                feature=features,
                expansion_rate=expansion_rate,
                target_size=target_size,
                nprocs=nprocs)

        coords = self.mesh.msh_t.vert2['coord']
        values = self.mesh.msh_t.value

        verts_in = utils.get_verts_in_shape(
            self.mesh.msh_t, shape=multipolygon, from_box=False)

        if len(verts_in):
            # NOTE: Don't continue, otherwise the final
            # destination file might end up being empty!
            values[verts_in, :] = target_size

        # NOTE: unlike raster self.hmin is based on values of this
        # hfun before applying feature; it is ignored so that
        # the new self.hmin becomes equal to "target" specified
#        if self.hmin is not None:
#            values[np.where(values < self.hmin)] = self.hmin
        if self.hmax is not None:
            values[np.where(values > self.hmax)] = self.hmax
        values = np.minimum(self.mesh.msh_t.value, values)
        values = values.reshape(self.mesh.msh_t.value.shape)

        self.mesh.msh_t.value = values
Beispiel #45
0
 def _get_data_mask(self, bboxes):
     polys = []
     for x, y, w, h in bboxes:
         polys.append(
             Polygon([[x, y], [x+w, y], [x+w, y+h], [x, y+h], [x, y]]))
     return MultiPolygon(polys).buffer(0)
Beispiel #46
0
def osmproxy(request):
    url = request.params.get("url")
    if url is None:
        return HTTPBadRequest()

    # instantiate parser and parser and start parsing
    parser = RelationParser()
    p = OSMParser(concurrency=1,
            coords_callback=parser.get_coords,
            relations_callback=parser.get_relations,
            ways_callback=parser.get_ways)

    temp = tempfile.NamedTemporaryFile(suffix='.osm')
    urllib.urlretrieve(url, temp.name)
    p.parse(temp.name)
    temp.close()

    polygons = []
    r = parser.relation

    # first check for self closing ways
    for i in range(len(r) - 1, 0, -1):
        w = parser.ways[r[i]]
        if w[len(w) - 1] == w[0]:
            r.pop(i)
            nodes = []
            polygon = Polygon([parser.nodes[node] for node in w])
            polygons.append(polygon)

    if len(r) > 0:
        prev = parser.ways[r[0]]
        ordered_ways = []
        ordered_ways.append(prev)
        r.pop(0)
        while len(r):
            match = False
            for i in range(0, len(r)):
                w = parser.ways[r[i]]
                # first node of the next way matches the last of the previous one
                if w[0] == prev[len(prev) - 1]:
                    match = w
                # or maybe the way has to be reversed 
                elif w[len(w) - 1] == prev[len(prev) - 1]:
                    match = w[::-1]
                if match:
                    prev = match
                    ordered_ways.append(match)
                    r.pop(i)
                    break

        if len(ordered_ways) > 0:
            # now that ways are correctly ordered, we can create a unique geometry
            nodes = []
            for way in ordered_ways:
                for node in way:
                    nodes.append(parser.nodes[node])
            # make sure that first and last node are similar
            if nodes[0] != nodes[len(nodes) - 1]:
                raise
            # create a shapely polygon with the nodes
            polygons.append(Polygon(nodes))

    multipolygon = MultiPolygon(polygons)
    return Response(multipolygon.to_wkt())
Beispiel #47
0
    def from_shape(cls,
                   shape,
                   height=0.,
                   name="area",
                   properties=None,
                   unit='um',
                   min_x=None,
                   max_x=None):
        '''
        Create an :class:`Area` from a :class:`Shape` object.

        Parameters
        ----------
        shape : :class:`Shape`
            Shape that should be converted to an Area.

        Returns
        -------
        :class:`Area` object.
        '''
        if _unit_support:
            from .units import Q_
            if isinstance(height, Q_):
                height = height.m_as(unit)
            if isinstance(min_x, Q_):
                min_x = min_x.m_as(unit)
            if isinstance(max_x, Q_):
                max_x = max_x.m_as(unit)

        obj = None
        g_type = None
        if isinstance(shape, MultiPolygon):
            g_type = "MultiPolygon"
        elif isinstance(shape, (Polygon, Shape, Area)):
            g_type = "Polygon"
        else:
            raise TypeError("Expected a Polygon or MultiPolygon object.")
        # find the scaling factor
        scaling = 1.
        if None not in (min_x, max_x):
            ext = np.array(shape.exterior.coords)
            leftmost = np.min(ext[:, 0])
            rightmost = np.max(ext[:, 0])
            scaling = (max_x - min_x) / (rightmost - leftmost)
            obj = scale(shape, scaling, scaling)
        else:
            if g_type == "Polygon":
                obj = Polygon(shape)
            else:
                obj = MultiPolygon(shape)

        obj.__class__ = cls
        obj._parent = None
        obj._unit = unit
        obj._geom_type = g_type
        obj.__class__ = Area
        obj._areas = None
        obj.height = height
        obj.name = name
        obj._prop = _PDict({} if properties is None else deepcopy(properties))
        obj._return_quantity = False

        return obj
    print(g.wkt)

###############################################################################
c.geoms[0].wkt
c[1].wkt
###############################################################################
from shapely.geometry import MultiPoint
points = MultiPoint([(0.0, 0.0), (1.0, 1.0)])
points.area
points.length
points.bounds
###############################################################################
from shapely.geometry import MultiLineString
coords = [((0, 0), (1, 1)), ((-1, 0), (1, 0))]
lines = MultiLineString(coords)
lines.area
lines.length
lines.bounds
len(lines.geoms)
###############################################################################
polygon = [(0, 0), (1, 1), (1, 2), (2, 2), (0, 0)]
s = [(10, 0), (21, 1), (31, 2), (24, 2), (10, 0)]
t = [(0, 50), (1, 21), (1, 22), (32, 2), (0, 50)]
from shapely.geometry import Polygon
p_a, s_a, t_a = [Polygon(x) for x in [polygon, s, t]]
from shapely.geometry import MultiPolygon
polygons = MultiPolygon([p_a, s_a, t_a])
len(polygons.geoms)
len(polygons)
polygons.bounds
Beispiel #49
0
    def createConvexPath(self, pair):
        #pr = cProfile.Profile()
        #pr2 = cProfile.Profile()
        
        print pair[1]
        odPointsList = ((pair[0][0].x, pair[0][0].y), (pair[0][1].x, pair[0][1].y))
        #st_line = LineString(odPointsList)
        labeledObstaclePoly = []
        totalConvexPathList = {}
        
        dealtArcList = {}
        totalConvexPathList[odPointsList] = LineString(odPointsList)
        
        terminate = 0
        idx_loop1 = 0
        #sp_l_set = []
        time_loop1 = 0
        time_contain2 = 0
        time_crossingDict = 0
        time_convexLoop = 0 
        time_impedingArcs = 0
        time_spatialFiltering = 0
        time_loop1_crossingDict = 0
        time_buildConvexHulls = 0
        while terminate == 0:
            t1s = time.time()
            idx_loop1 += 1
            
            t6s = time.time()
            w = shapefile.Writer(shapefile.POLYLINE)
            w.field('nem')
            for line in totalConvexPathList:
                w.line(parts=[[ list(x) for x in line ]])
                w.record('ff')
            w.save(self.path + "graph_" + str(idx_loop1) + self.version_name)

            totalGrpah = self.createGraph(totalConvexPathList.keys())
            spatial_filter_n = networkx.dijkstra_path(totalGrpah, odPointsList[0], odPointsList[1])            
            spatial_filter = []
            for i in xrange(len(spatial_filter_n)-1):
                spatial_filter.append([spatial_filter_n[i], spatial_filter_n[i+1]])

            w = shapefile.Writer(shapefile.POLYLINE)
            w.field('nem')
            for line in spatial_filter:
                w.line(parts=[[ list(x) for x in line ]])
                w.record('ff')
            w.save(self.path + "spatial Filter_" + str(idx_loop1) + self.version_name)
            
            #sp_length = 0
            #for j in spatial_filter:
                #sp_length += LineString(j).length        
            #sp_l_set.append(sp_length)
            
            crossingDict = defaultdict(list)
            
            for line in spatial_filter:
                Line = LineString(line)
                for obs in self.obstaclesPolygons:
                    if Line.crosses(obs):
                        if obs not in labeledObstaclePoly:
                            labeledObstaclePoly.append(obs)
                    
                        crossingDict[tuple(line)].append(obs)
            
            t6e = time.time()
            time_spatialFiltering += t6e - t6s 
            
            if len(crossingDict.keys()) == 0:
                terminate = 1
                continue
            else:
                t7s = time.time()
                for tLine in crossingDict.keys():
                    #cLine = list(tLine)
                    if dealtArcList.has_key(tLine):
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        continue
                    else:
                        dealtArcList[tLine] = LineString(list(tLine))
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        containingObs = []
                        for obs in crossingDict[tLine]:
                            
                            convexHull = self.createConvexhull(obs, tLine)
                            self.splitBoundary(totalConvexPathList, convexHull)
                            
                            
                            convexHull = self.createConvexhull(obs, odPointsList)
                            self.splitBoundary(totalConvexPathList, convexHull)
                            convexHull2 = self.createConvexhull(obs)
                            if convexHull2.contains(Point(tLine[0])):
                                containingObs.append(obs)
                            elif convexHull2.contains(Point(tLine[1])):
                                containingObs.append(obs)
                        if len(containingObs) != 0:   #SPLIT
                            subConvexPathList = {}
                            vi_obs = MultiPolygon([x for x in containingObs])
                            containedLineCoords = list(tLine)
                            fromX = containedLineCoords[0][0]
                            fromY = containedLineCoords[0][1]
                            toX = containedLineCoords[1][0]
                            toY = containedLineCoords[1][1]
                            fxA = (fromY - toY) / (fromX - toX)
                            fxB = fromY - (fxA * fromX)
                            minX = vi_obs.bounds[0]
                            maxX = vi_obs.bounds[2]
                            split_line = LineString([(min(minX, fromX, toX), fxA * min(minX, fromX, toX) + fxB), (max(maxX, fromX, toX), fxA * max(maxX, fromX, toX) + fxB)])
                            
                            for obs in containingObs:
                                s1, s2 = self.splitPolygon(split_line, obs)
                                dividedObsPoly = []
                                #to deal with multipolygon
                                a = s1.intersection(obs)
                                b = s2.intersection(obs)
                                if a.type == "Polygon":
                                    dividedObsPoly.append(a)
                                else:
                                    for o in a.geoms:
                                        if o.type == "Polygon":
                                            dividedObsPoly.append(o)
                                if b.type == "Polygon":
                                    dividedObsPoly.append(b)
                                else:
                                    for o2 in b.geoms:
                                        if o2.type == "Polygon":
                                            dividedObsPoly.append(o2)
                                
                                for obs2 in dividedObsPoly:
                                    for pt in tLine:
                                        convexHull = self.createConvexhull(obs2, [pt])
                                        self.splitBoundary(subConvexPathList, convexHull)
                            subVertices = []
                            for line in subConvexPathList:
                                subVertices.extend(line)
                            subVertices = list(set(subVertices))
                            containingObsVertices = []
                            for obs in containingObs:
                                containingObsVertices.extend(list(obs.exterior.coords))
                            subVertices = [x for x in subVertices if x in containingObsVertices]
                            deleteList = []
                            for line in subConvexPathList:
                                chk_cross = 0
                                for obs in containingObs:
                                    if subConvexPathList[line].crosses(obs):
                                        chk_cross = 1
                                if chk_cross == 1:
                                    deleteList.append(line)
                            for line in deleteList:
                                del subConvexPathList[line]
                                #subConvexPathList.remove(line)
                            pairList = []
                            for i in range(len(subVertices)):
                                for j in range(i+1, len(subVertices)):
                                    pairList.append((subVertices[i], subVertices[j]))
                            for i in pairList:
                                Line = LineString(i)
                                chk_cross = 0
                                for obs in containingObs:
                                    if Line.crosses(obs):
                                        chk_cross = 1
                                    elif Line.within(obs):
                                        chk_cross = 1
                                if chk_cross == 0:
                                    subConvexPathList[i] = Line
                                    #subConvexPathList.append(i)
                            buffer_st_line = split_line.buffer(0.1)
                            deleteList = []
                            for line in subConvexPathList:
                                if buffer_st_line.contains(subConvexPathList[line]):
                                    deleteList.append(line)
                            for line in deleteList:
                                if subConvexPathList.has_key(line):
                                    del subConvexPathList[line]
                            #subConvexPathList = [x for x in subConvexPathList if x not in deleteList]
                            for line in subConvexPathList:
                                if not totalConvexPathList.has_key(line):
                                    if not totalConvexPathList.has_key((line[1],line[0])):
                                        totalConvexPathList[line] = subConvexPathList[line]                                #if line not in totalConvexPathList:
                                    #if [line[1], line[0]] not in totalConvexPathList:
                                        #totalConvexPathList.append(line)

                w = shapefile.Writer(shapefile.POLYLINE)
                w.field('nem')
                for line in totalConvexPathList:
                    w.line(parts=[[ list(x) for x in line ]])
                    w.record('ff')
                w.save(self.path + "graph2_" + str(idx_loop1) + self.version_name) 
                t7e = time.time()
                time_loop1_crossingDict += t7e - t7s
                #new lines            
                labeled_multyPoly = MultiPolygon([x for x in labeledObstaclePoly])
                convexHull = self.createConvexhull(labeled_multyPoly, odPointsList)
                self.splitBoundary(totalConvexPathList, convexHull)
                #new lines end             
                                  
                #impededPathList 
                t5s = time.time()
                impededPathList = {}
                for line in totalConvexPathList:
                    for obs in labeledObstaclePoly:
                        if totalConvexPathList[line].crosses(obs):
                            impededPathList[line] = totalConvexPathList[line]
                            break
                t5e = time.time()
                time_impedingArcs += t5e - t5s
                for line in impededPathList:
                    del totalConvexPathList[line]
               
                terminate2 = 0
                idx_loop2 = 0
                t1e = time.time()
                time_loop1 += t1e - t1s    
                #w = shapefile.Writer(shapefile.POLYGON)
                #w.field('net')
                #for obs in labeledObstaclePoly:
                    #w.poly(parts=[[list(x) for x in list(obs.exterior.coords)]])
                    #w.record('ff')
                #w.save(self.path + "obs"+ str(idx_loop1) + "_" + self.version_name)                  
                while terminate2 == 0:
                    idx_loop2 += 1

                    deleteList = []
                    crossingDict = defaultdict(list)

                    for line in dealtArcList:
                        if impededPathList.has_key(line):
                            del impededPathList[line]
                        elif impededPathList.has_key((line[1], line[0])):
                            del impededPathList[(line[1],line[0])]
                    
                    t3s = time.time()
                    #pr.enable()
                    for line in impededPathList:
                        for obs in labeledObstaclePoly:
                            if impededPathList[line].crosses(obs):
                                crossingDict[line].append(obs)
                    
                    t3e = time.time()
                    time_crossingDict += t3e - t3s
                    #at this point, impededArcList should be emptied, as it only contains crossing arcs, and all of them 
                    #should be replaced by convex hulls. 
                    for line in crossingDict:
                        del impededPathList[line]
                    for line in impededPathList:
                        if not totalConvexPathList.has_key(line):
                            totalConvexPathList[line] = impededPathList[line]
                    impededPathList = {}
   
                    if len(crossingDict.keys()) == 0:
                        terminate2 = 1
                        continue
                    else:
                        #w = shapefile.Writer(shapefile.POLYLINE)
                        #w.field('nem')
                        #for line in crossingDict:
                            #w.line(parts=[[ list(x) for x in line ]])
                            #w.record('ff')
                        #w.save(self.path + "crossingDict_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)                        
                        t4s = time.time()
                        
                        for tLine in crossingDict.keys():
                            dealtArcList[tLine] = crossingDict[tLine]                
                            containingObs = []
                            for obs in crossingDict[tLine]:
                                chk_contain = 0
                                convexHull2 = self.createConvexhull(obs)
                                if convexHull2.contains(Point(tLine[0])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                elif convexHull2.contains(Point(tLine[1])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                if chk_contain == 0:
                                    t10s = time.time()
                                    convexHull = self.createConvexhull(obs, tLine)
                                    self.splitBoundary(impededPathList, convexHull)
                                    t10e = time.time()
                                    time_buildConvexHulls += t10e - t10s

                            if len(containingObs) != 0:  #SPLIT
                                #print "SPLIT"
                                t2s = time.time()
                                subConvexPathList = {}
                                vi_obs = MultiPolygon([x for x in containingObs])
                                containedLineCoords = tLine
                                fromX = containedLineCoords[0][0]
                                fromY = containedLineCoords[0][1]
                                toX = containedLineCoords[1][0]
                                toY = containedLineCoords[1][1]
                                fxA = (fromY - toY) / (fromX - toX)
                                fxB = fromY - (fxA * fromX)
                                minX = vi_obs.bounds[0]
                                maxX = vi_obs.bounds[2]
                                split_line = LineString([(min(minX, fromX, toX), fxA * min(minX, fromX, toX) + fxB), (max(maxX, fromX, toX), fxA * max(maxX, fromX, toX) + fxB)])
                                
                                for obs in containingObs:
                                    s1, s2 = self.splitPolygon(split_line, obs)
                                    dividedObsPoly = []
                                    #to deal with multipolygon
                                    a = s1.intersection(obs)
                                    b = s2.intersection(obs)
                                    if a.type == "Polygon":
                                        dividedObsPoly.append(a)
                                    else:
                                        for o in a.geoms:
                                            if o.type == "Polygon":
                                                dividedObsPoly.append(o)
                                    if b.type == "Polygon":
                                        dividedObsPoly.append(b)
                                    else:
                                        for o2 in b.geoms:
                                            if o2.type == "Polygon":
                                                dividedObsPoly.append(o2)
                                    
                                    for obs2 in dividedObsPoly:
                                        for pt in tLine:
                                            convexHull = self.createConvexhull(obs2, [pt])
                                            self.splitBoundary(subConvexPathList, convexHull)
                                subVertices = []
                                for line in subConvexPathList:
                                    subVertices.extend(line)
                                subVertices = list(set(subVertices))
                                containingObsVertices = []
                                for obs in containingObs:
                                    containingObsVertices.extend(list(obs.exterior.coords))
                                subVertices = [x for x in subVertices if x in containingObsVertices]
                                deleteList = []
                                for line in subConvexPathList:
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if subConvexPathList[line].crosses(obs):
                                            chk_cross = 1
                                    if chk_cross == 1:
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]
                                    
                                pairList = []
                                for i in range(len(subVertices)):
                                    for j in range(i+1, len(subVertices)):
                                        pairList.append((subVertices[i], subVertices[j]))
                                
                                for i in pairList:
                                    Line = LineString(list(i))
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if Line.crosses(obs):
                                            chk_cross = 1
                                        elif Line.within(obs):
                                            chk_cross = 1
                                    if chk_cross == 0:
                                        subConvexPathList[i] = Line
                                      
                                buffer_st_line = split_line.buffer(0.1)
                                deleteList = []
                                for line in subConvexPathList:
                                    if buffer_st_line.contains(subConvexPathList[line]):
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]
                                for line in subConvexPathList:
                                    if not impededPathList.has_key(line):
                                        if not impededPathList.has_key((line[1], line[0])):
                                            impededPathList[line] = subConvexPathList[line]
                                    
                                t2e = time.time()
                                time_contain2 += t2e - t2s
                        #pr.disable()
                        for line in dealtArcList:
                            if impededPathList.has_key(line):
                                del impededPathList[line]
                        #impededPathList = [x for x in impededPathList if x not in dealtArcList]
                        t4e = time.time()
                        time_convexLoop += t4e - t4s
                        #end of else
                    w = shapefile.Writer(shapefile.POLYLINE)
                    w.field('nem')
                    for line in impededPathList:
                        w.line(parts=[[ list(x) for x in line ]])
                        w.record('ff')
                    w.save(self.path + "After_graph_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)
                    #end of while2
                for line in impededPathList:
                    if not totalConvexPathList.has_key(line):
                        totalConvexPathList[line] = impededPathList[line]
                
                #totalConvexPathList.extend(impededPathList)
        totalGraph = self.createGraph(totalConvexPathList.keys())
        esp_n = networkx.dijkstra_path(totalGraph, odPointsList[0], odPointsList[1])
        esp = []
        for i in range(len(esp_n)-1):
            esp.append([esp_n[i], esp_n[i+1]])
        w = shapefile.Writer(shapefile.POLYLINE)
        w.field('nem')
        no_edges = 0
        for line in totalConvexPathList.keys():
            no_edges += 1
            w.line(parts=[[ list(x) for x in line ]])
            w.record('ff')
        w.save(self.path + "totalpath" + self.version_name + "%d" % pair[1] )              
        w = shapefile.Writer(shapefile.POLYLINE)
        w.field('nem')
        for line in esp:
            w.line(parts=[[ list(x) for x in line ]])
            w.record('ff')
        w.save(self.path + "ESP_" + self.version_name + "%d" % pair[1])
        #sp_len_str = str(sp_l_set)[1:-1]
        
        #s = StringIO.StringIO()
        #sortby = 'cumulative'
        #ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
        #ps.print_stats()
        #print s.getvalue()
#        
#        s = StringIO.StringIO()
#        sortby = 'cumulative'
#        ps = pstats.Stats(pr2, stream=s).sort_stats(sortby)
#        ps.print_stats()
#        print s.getvalue()
        
        print "loop1: ", time_loop1
        print "Spatial Filtering: ", time_spatialFiltering
        print "loop1 crossingDict: ", time_loop1_crossingDict
        print "crossingDict: ", time_crossingDict
        print 'convexLoop: ', time_convexLoop
        print "time_contain: ", time_contain2
        print "impedingArcs: ", time_impedingArcs
        print "convexHUll: ", time_buildConvexHulls
        return 'convexpath %d %d %d %f %f %f' % (pair[1], no_edges, len(labeledObstaclePoly), time_convexLoop, time_crossingDict, time_buildConvexHulls)
def main():

    print(sys.argv)

    catalog_v = sys.argv[1]
    projection = sys.argv[2]
    datasetId = sys.argv[3]
    entryId = sys.argv[4]
    parameter = sys.argv[5]
    time = sys.argv[6]
    tab = sys.argv[7]
    colorBar = sys.argv[8]

    print(projection)

    print('Starting')

    # Polygon

    # Convert KML file to Shape File
    # (because I had existing code for working with shape files)
    # Wont use this here, but do want to make sure I can get the kml converter working for future use
    # after testing pull straight from the pre-generated shp file

    myShape = keyholemarkup2x('./config/masks/antarctic_bounds/polymask.kml',
                              output='shp')

    #print(myShape)
    # Open Shapefiles, read coverage areas
    # get extents, then query Erddap
    # use shapely to pull data from within polygons

    areaProps = []

    for pol in fiona.open('./config/masks/antarctic_bounds/polymask.shp'):
        areaProps.append(pol['properties'])

    Multi = MultiPolygon([
        shape(pol['geometry'])
        for pol in fiona.open('./config/masks/antarctic_bounds/polymask.shp')
    ])
    #Multi.wkt

    polygon = Multi[0]  # we only have one polygon

    # the shapefile data, satellite data and plotting projections are all different
    # this step transforms the shapefile data to the 4326 projection of the satellite data
    # requests to the server need to be in 4326
    #print(polygon.bounds)

    # if working with a projected shapefile instead of kml file
    # see the 06_09_ShapefileLoad notebook for demo of how to work with a shape file that is projected

    epsg4326 = pyproj.Proj(
        init='epsg:4326')  # lon/lat coordinate system (polygon kml file)
    epsg3031 = pyproj.Proj(init='epsg:3031')  # South polar stereo plots)
    epsg3412 = pyproj.Proj(
        init='epsg:3412')  # South polar stereo (data coordinates)
    esri102020 = pyproj.Proj(
        init='esri:102020')  # Antarctic equal area (area calculations)

    #convert polygon to dataset projection
    project = partial(
        pyproj.transform,
        pyproj.Proj(init='epsg:4326'),  # lon/lat coordinate system
        pyproj.Proj(init='epsg:3412'))  # south polar stereo

    p3412 = transform(project,
                      polygon)  # new shapely polygon with new projection

    #print(p3412.bounds) #minx, miny, maxx, maxy

    # Erddap want the bounds in a different order (miny, maxy, minx, maxx)
    p3412boundSwap = [
        p3412.bounds[1], p3412.bounds[0], p3412.bounds[3], p3412.bounds[2]
    ]

    # Convert original polygon to equal area projection
    # Calculate the area of the polygon
    areaProj = partial(
        pyproj.transform,
        pyproj.Proj(init='epsg:4326'),  # lon/lat coordinate system
        pyproj.Proj(init='esri:102020'))  # antarctic equal area

    p102020 = transform(
        areaProj,
        polygon)  # new shapely polygon in new projection, used later?
    p102020_area = transform(
        areaProj,
        polygon).area  # area of projected polygon in meters (projection units)
    study_area_km = p102020_area / 1000000  # area of polygon in km squared
    #print(study_area_km)

    study_area_info = {'study_area_square_km': study_area_km}

    # Get polygon path for data masking and plotting
    polyListx, polyListy = p3412.exterior.xy  # perimeter of polygon
    polyList = list(zip(list(polyListx),
                        list(polyListy)))  # formatted perimeter
    studyAreaPath = path.Path(polyList)  # path for data mask, in EPSG:3031

    # Dataset Info

    # What is the id of the dataset you want to work with?
    # Will use the NSIDC CDR Sea Ice Concentration Monthly when it is loaded in
    dId = 'nsidcSISQSHmday'

    # Get dataset metadata info from ERDDAP in preparation for data request
    erddap_metadata = getDatasetInfo(dId)
    md = makemd(erddap_metadata)
    md["dimensions"] = getDimensions(erddap_metadata)
    md["parameters"] = getParameterList(erddap_metadata)

    # Get valid times for this dataset to later loop through each timestep

    # Customize start time because we know the cdr data actually doesn't start until July 1987
    # (the way the data is published the timestamps go back further but there is no data prior to July 1987)
    timeStart = '1987-07-01T00:00:00Z'
    timeEnd = md["time_coverage_end"]

    validTimesUrl = 'https://polarwatch.noaa.gov/erddap/griddap/' + dId + '.json?time[(' + timeStart + '):1:(' + timeEnd + ')]'

    http = urllib3.PoolManager()

    projection = 'epsg3031'
    tab = 'monthly'
    colorBar = 'KT_ice,,,0,1,'
    parameter = 'seaice_conc_monthly_cdr'
    m0 = datetime.now()
    entryId = 'ice-nsidc-cdr'
    datasetId = 'nsidcSISQSHmday'
    time = '2017-01-18T00:00:00Z'

    # apache needs to make these directories here for the permissions to be set correctly
    mapImageDirRoot = '/home/jpatterson/pythonscripts/projected_data_demo/'
    if not os.path.exists(mapImageDirRoot): os.makedirs(mapImageDirRoot)

    logdir = mapImageDirRoot + 'logs'
    if not os.path.exists(logdir): os.makedirs(logdir)

    mapImageDir = mapImageDirRoot + entryId + '/' + datasetId
    if not os.path.exists(mapImageDir):
        try:
            os.makedirs(mapImageDir)  # creates with default perms 0777
            os.chmod(mapImageDir, 0o4775)
        except:
            print('could not make image directory')

    # ** Setup logging system **
    todayStr = datetime.today().strftime("%m_%d_%Y")
    log_fn = mapImageDirRoot + '/logs/catalog.log'
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.INFO)
    # create a file handler
    handler = logging.FileHandler(log_fn)
    handler.setLevel(logging.INFO)
    # create a logging format
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # add the handlers to the logger
    logger.addHandler(handler)  # file writing
    logger.addHandler(logging.StreamHandler())  # print to console
    # to include in log file for records
    scriptstart = datetime.now()
    scriptstartstr = scriptstart.strftime("%Y-%m-%d %H:%M:%S")
    #logger.info('PREVIEW.PY START: ' + scriptstartstr)

    mm = time[5:7]
    dd = time[8:10]
    yyyy = time[0:4]
    HH = time[11:13]
    MM = time[14:16]
    timep = yyyy + '_' + mm + '_' + dd + '_' + HH + '_' + MM

    #Keep track of the input info to pass back to the webpage
    req = {
        'projection': projection,
        'datasetId': datasetId,
        'time': time,
        'timep': timep,
        'scriptstarttime': scriptstartstr,
        'entryId': entryId,
        'parameter': parameter,
        'tab': tab,
        'colorBar': colorBar
    }

    dimensions = getDimensions(erddap_metadata)

    ycoord_dimension = next(
        (dimension for dimension in dimensions if dimension["axis"] == "Y"))
    xcoord_dimension = next(
        (dimension for dimension in dimensions if dimension["axis"] == "X"))
    logger.info(ycoord_dimension["name"])

    #working on rewriting this section to use the structure I am creating for the info
    # stopped here 1/22/18 shutdown
    ycoord_cells = getDimensionInfo(dimensions, ycoord_dimension["name"],
                                    'nValues')
    xcoord_cells = getDimensionInfo(dimensions, xcoord_dimension["name"],
                                    'nValues')

    # use erddap info to determine if latitude is increasing or decreasing
    req["ycoord_range"] = getDimensionInfo(dimensions,
                                           ycoord_dimension["name"],
                                           'actual_range')
    ycoord_avgSpacing = getDimensionInfo(
        dimensions, ycoord_dimension["name"],
        'averageSpacing')  # used in check bounds

    if float(ycoord_avgSpacing) >= 0:
        ycoord_1 = str(float(req["ycoord_range"][0]))
        ycoord_2 = str(float(req["ycoord_range"][1]))
    else:
        ycoord_1 = str(float(req["ycoord_range"][1]))
        ycoord_2 = str(float(req["ycoord_range"][0]))

    req["ycoord_range"] = [ycoord_1, ycoord_2]

    req["xcoord_range"] = getDimensionInfo(dimensions,
                                           xcoord_dimension["name"],
                                           'actual_range')

    #req["ycoord_res"] = getDimensionInfo(dimensions, ycoord_dimension["name"], 'averageSpacing')
    #req["xcoord_res"] = getDimensionInfo(dimensions, xcoord_dimension["name"], 'averageSpacing')

    # Note: Cannot use ERDDAP actual range plus resolution to determine extent (innaccurate)

    # using erddap provided dataset size to determine how large of spacing to use for dataset request.
    # want images to be less than 1000px
    ycoord_sub = float(ycoord_cells) / 800
    xcoord_sub = float(xcoord_cells) / 800

    # pick the larger of the two spacing options, should usually be the lon sub
    if ycoord_sub <= xcoord_sub: sub = xcoord_sub
    else: sub = ycoord_sub
    #sub = (lat_sub + lon_sub)/2         # Use the average of the two
    req["sub"] = str((math.ceil(sub)))  # Round up to largest whole number

    # ?add a check that dataset has data in this region (arctic, antarctic)
    # if not pass something back to the webpage that says it doesn't have data in the area

    # Use polygon maximum bounds to make sure there should be data available in the polygon
    # return the bounds in the format required by the data server
    ycoord_min = ycoord_dimension['valid_range'][0]
    ycoord_max = ycoord_dimension['valid_range'][1]
    xcoord_min = xcoord_dimension['valid_range'][0]
    xcoord_max = xcoord_dimension['valid_range'][1]

    req["bounds"] = [ycoord_max, ycoord_min, xcoord_min, xcoord_max]
    print(req["bounds"])

    #adjust bounds order for erddap if needed
    qbounds = getRequestBounds(md, req)
    print(qbounds)

    # Set the name of the parameter to access
    parameter = 'seaice_conc_monthly_cdr'

    # You can reduce the resolution of the data returned from the server
    # This can be helpful during testing if the dataset is very large
    # Set this to one for full resolution, two for half, and so on
    sub = '1'
    print(req["sub"])

    #shp = shapereader.Reader('/home/jenn/aerdData/shoreline/GSHHS_shp/i/GSHHS_i_L6')

    timestamp = req["time"]
    timestamp = '2016-08-17T00:00:00Z'
    print(timestamp)
    m0 = datetime.now()
    dataset = getData(dId, parameter, qbounds, timestamp, sub)
    m1 = datetime.now()

    data3412 = dataset['data']

    m0 = datetime.now()
    # check data file for expected altitude dimension response
    if len(np.shape(data3412.variables[parameter])) == 3:
        data = data3412.variables[parameter][0, :, :]
    elif len(np.shape(data3412.variables[parameter])) == 4:
        data = data3412.variables[parameter][0, 0, :, :]

    xgrid = data3412.variables['xgrid'][:]
    ygrid = data3412.variables['ygrid'][:]

    xmin = float(xgrid[0])

    xmax = float(xgrid[len(xgrid) - 1])
    ymin = float(ygrid[len(ygrid) - 1])
    ymax = float(
        ygrid[0]
    )  # note: ymax should be bigger than ymin and is towards top of plot

    # output projected bounds corner points for leaflet
    # format to pass is top left, bottom left, bottom right, top right
    top_left = [xmin, ymax]
    bottom_left = [xmin, ymin]
    bottom_right = [xmax, ymin]
    top_right = [xmax, ymax]
    req["boundsProjected"] = [top_left, bottom_left, bottom_right, top_right]

    #xrange = abs(xmin) + abs(xmax)
    #yrange = abs(ymin) + abs(ymax)

    cellWidth = 25000  #25 km = 25000 meters
    cellHeight = 25000

    xgridMod = xgrid - (cellWidth / 2)

    extraX = xgridMod[len(xgridMod) - 1] + cellWidth
    xgridMod = np.append(xgridMod, extraX)
    ygridMod = ygrid + (cellHeight / 2)
    extraY = ygridMod[len(ygridMod) - 1] - cellHeight
    ygridMod = np.append(ygridMod, extraY)
    #adjust start left by half pixel width (move to the left)
    startLeft = xgrid[0] - (cellWidth / 2)
    #adjust start top by half pixel width ( move towards the top)
    startTop = ygrid[0] + (cellHeight / 2)

    rows = len(ygrid) - 1
    cols = len(xgrid) - 1

    m0 = datetime.now()
    X, Y = np.meshgrid(xgrid, ygrid)  #create the grid
    points = np.array((X.flatten(), Y.flatten())).T  #break it down
    mask = studyAreaPath.contains_points(points).reshape(
        X.shape)  # calc and grid a mask based on polygon path

    datamasked = np.ma.masked_where(mask != True,
                                    data)  # create masked data array

    study_area_data_cells = datamasked.count()
    print(study_area_data_cells)  #print(np.asscalar(study_area_data_cells))

    try:
        mymin = np.min(datamasked[~datamasked.mask])
    except ValueError:
        print('no data within polygon for this timestamp')
        print(
            'either before start of data (pre 1987) or all ice concentration values are 0 (summertime)'
        )
    else:
        if "num_data_cells" not in study_area_info:
            print("some ice in polygon, can calculate number of cells")
            study_area_info["num_data_cells"] = np.asscalar(
                study_area_data_cells)

    # fill data = 255, land = 254, coastline = 253, lakes = 252
    # check for any non-data values
    fillSpot = np.where(datamasked == 255)
    for i in range(len(fillSpot)):
        if fillSpot[i]: print('FOUND A FILL VALUE')
    landSpot = np.where(datamasked == 254)
    for i in range(len(landSpot)):
        if landSpot[i]: print('FOUND A LAND VALUE')
    coastSpot = np.where(datamasked == 253)
    for i in range(len(coastSpot)):
        if coastSpot[i]: print('FOUND A COAST VALUE')
    lakeSpot = np.where(datamasked == 252)
    for i in range(len(lakeSpot)):
        if lakeSpot[i]: print('FOUND A LAKE VALUE')
    m1 = datetime.now()
    print(m1 - m0)

    epsg3412 = ccrs.epsg(3412)

    #show all erddap data within study area
    thisproj = ccrs.SouthPolarStereo()
    # Notes on cartopy map projection options
    # cannot directly specify the projection with a proj4 string or a crs id
    # different projections have different options that can be passed to them
    # south polar stereo, north polar stereo and plate caree are the PW standard map projections
    # I haven't been passing other options to the plots but now that they will go on maps I may have to.
    # Albers is an option and I would have to pass it some options to get it centered on alaska I think.
    # Documentation says that specifying crs with espg via epsg.io should work. Not working for espg 3031 or 3412 though

    fig = plt.figure()
    ax1 = plt.axes(projection=thisproj)  # set projection to sps
    ax1.set_global()
    '''
    #add shoreline shape file as plate carree
    for record, geometry in zip(shp.records(), shp.geometries()):
        ax1.add_geometries([geometry], ccrs.PlateCarree(), facecolor='lightgray',
                          edgecolor='black')
    '''
    dataplt = ax1.pcolormesh(xgridMod,
                             ygridMod,
                             datamasked,
                             transform=epsg3412,
                             vmin=0.0,
                             vmax=1.0)

    #ax1.set_extent([-3100000, -1200000, 300000, 2300000], thisproj)
    print(ymin, ymax, xmin, xmax)

    # set_extent is (x0, x1, y0, y1)
    #ax1.set_extent([ xmin-550000, xmax+550000, ymin-550000, ymax+550000], thisproj) # expand plot bounds
    ax1.set_extent(
        [xmin, xmax, ymin, ymax],
        thisproj)  # a little off only because i made the polygon by hand
    #ax1.gridlines(alpha='0.3')
    #ax1.outline_patch.set_linewidth(0.5)
    ax1.outline_patch.set_visible(False)
    ax1.background_patch.set_visible(False)
    #ax1.coastlines(color='red')
    #ax1.outline_patch.set_edgecolor('#dddddd')
    #cbar = fig.colorbar(dataplt)
    imagefn = mapImageDirRoot + 'testoutput_1500' + timestamp + '.png'
    print(imagefn)
    m1 = datetime.now()
    print(m1 - m0)
    plt.savefig(imagefn, dpi=300, bbox_inches='tight', transparent=True)
    m1 = datetime.now()
    print(m1 - m0)
    plt.show()
    plt.cla()
    plt.clf()
    plt.close()
Beispiel #51
0
    def run(self, args):

        logging.info(args)

        base_path = pathlib.Path(args.basemesh)
        demlo_paths = args.demlo
        demhi_paths = args.demhi
        out_path = pathlib.Path(args.out)

        out_path.parent.mkdir(exist_ok=True, parents=True)

        base_mesh_4_hfun = Mesh.open(base_path, crs="EPSG:4326")
        base_mesh_4_geom = Mesh.open(base_path, crs="EPSG:4326")

        geom_rast_list = []
        hfun_rast_list = []
        hfun_hirast_list = []
        hfun_lorast_list = []
        interp_rast_list = []
        for dem_path in demlo_paths:
            hfun_lorast_list.append(Raster(dem_path))
            interp_rast_list.append(Raster(dem_path))

        for dem_path in demhi_paths:
            geom_rast_list.append(Raster(dem_path))
            hfun_hirast_list.append(Raster(dem_path))
            interp_rast_list.append(Raster(dem_path))

        hfun_rast_list = [*hfun_lorast_list, *hfun_hirast_list]

        geom = Geom(geom_rast_list,
                    base_mesh=base_mesh_4_geom,
                    zmax=15,
                    nprocs=4)

        hfun = Hfun(hfun_rast_list,
                    base_mesh=base_mesh_4_hfun,
                    hmin=30,
                    hmax=15000,
                    nprocs=4)

        ## Add contour refinements at 0 separately for GEBCO and NCEI
        ctr1 = Contour(level=0, sources=hfun_hirast_list)
        hfun.add_contour(None, 1e-3, 30, contour_defn=ctr1)

        ctr2 = Contour(level=0, sources=hfun_lorast_list)
        hfun.add_contour(None, 1e-2, 500, contour_defn=ctr2)

        ## Add constant values from 0 to inf on hi-res rasters
        hfun.add_constant_value(30,
                                0,
                                source_index=list(range(len(demhi_paths))))

        # Calculate geom
        geom_mp = geom.get_multipolygon()
        # Write to disk
        gpd.GeoDataFrame({
            'geometry': geom_mp
        }, crs="EPSG:4326").to_file(str(out_path) + '.geom.shp')
        del geom_mp

        # Calculate hfun
        hfun_msh_t = hfun.msh_t()
        # Write to disk
        sms2dm.writer(msh_t_to_2dm(hfun_msh_t),
                      str(out_path) + '.hfun.2dm', True)
        del hfun_msh_t

        # Read back stored values to pass to mesh driver
        read_gdf = gpd.read_file(str(out_path) + '.geom.shp')
        geom_from_disk = MultiPolygonGeom(MultiPolygon(list(
            read_gdf.geometry)),
                                          crs=read_gdf.crs)

        read_hfun = Mesh.open(str(out_path) + '.hfun.2dm', crs="EPSG:4326")
        hfun_from_disk = HfunMesh(read_hfun)

        jigsaw = JigsawDriver(geom_from_disk,
                              hfun=hfun_from_disk,
                              initial_mesh=None)
        jigsaw.verbosity = 1

        ## Execute mesher (processing of geom and hfun happens here)
        mesh = jigsaw.run()

        ## Free-up memory
        del read_gdf
        del geom_from_disk
        del read_hfun
        del hfun_from_disk
        gc.collect()

        mesh.write(str(out_path) + '.raw.2dm', format='2dm', overwrite=True)

        ## Interpolate DEMs on the mesh
        mesh.interpolate(interp_rast_list, nprocs=4)

        ## Output
        mesh.write(out_path, format='2dm', overwrite=True)
Beispiel #52
0
    def func_checker(*args, **kwargs):
        """
        A decorator to split and reproject polygon vectors in a GeoDataFrame whose values cross the Greenwich Meridian.
         Begins by examining whether the geometry bounds the supplied cross longitude = 0 and if so, proceeds to split
         the polygons at the meridian into new polygons and erase a small buffer to prevent invalid geometries when
         transforming the lons from WGS84 to WGS84 +lon_wrap=180 (longitudes from 0 to 360).

         Returns a GeoDataFrame with the new features in a wrap_lon WGS84 projection if needed.
        """
        try:
            poly = kwargs["poly"]
            x_dim = kwargs["x_dim"]
            wrap_lons = kwargs["wrap_lons"]
        except KeyError:
            return func(*args, **kwargs)

        if wrap_lons:
            if (np.min(x_dim) < 0
                    and np.max(x_dim) >= 360) or (np.min(x_dim) < -180
                                                  and np.max >= 180):
                warnings.warn(
                    "Dataset doesn't seem to be using lons between 0 and 360 degrees or between -180 and 180 degrees."
                    " Tread with caution.",
                    UserWarning,
                    stacklevel=4,
                )
            split_flag = False
            for (index, feature) in poly.iterrows():
                if (feature.geometry.bounds[0] <
                        0) and (feature.geometry.bounds[2] > 0):
                    split_flag = True
                    warnings.warn(
                        "Geometry crosses the Greenwich Meridian. Proceeding to split polygon at Greenwich."
                        " This feature is experimental. Output might not be accurate.",
                        UserWarning,
                        stacklevel=4,
                    )

                    # Create a meridian line at Greenwich, split polygons at this line and erase a buffer line
                    if isinstance(feature.geometry, MultiPolygon):
                        union = MultiPolygon(cascaded_union(feature.geometry))
                    else:
                        union = Polygon(cascaded_union(feature.geometry))
                    meridian = LineString([Point(0, 90), Point(0, -90)])
                    buffered = meridian.buffer(0.000000001)
                    split_polygons = split(union, meridian)
                    # TODO: This doesn't seem to be thread safe in Travis CI on macOS. Merits testing with a local machine.
                    buffered_split_polygons = [
                        feat for feat in split_polygons.difference(buffered)
                    ]

                    # Cannot assign iterable with `at` (pydata/pandas#26333) so a small hack:
                    # Load split features into a new GeoDataFrame with WGS84 CRS
                    split_gdf = gpd.GeoDataFrame(
                        geometry=[cascaded_union(buffered_split_polygons)],
                        crs="epsg:4326",
                    )
                    poly.at[[index], "geometry"] = split_gdf.geometry.values
                    # split_gdf.columns = ["index", "geometry"]

                    # feature = split_gdf

            # Reproject features in WGS84 CSR to use 0 to 360 as longitudinal values
            poly = poly.to_crs(
                "+proj=longlat +ellps=WGS84 +lon_wrap=180 +datum=WGS84 +no_defs"
            )
            crs1 = poly.crs
            if split_flag:
                warnings.warn(
                    "Rebuffering split polygons to ensure edge inclusion in selection",
                    UserWarning,
                    stacklevel=4,
                )
                poly = gpd.GeoDataFrame(poly.buffer(0.000000001),
                                        columns=["geometry"])
                poly.crs = crs1

            kwargs["poly"] = poly

        return func(*args, **kwargs)
def main():
    parser = ArgumentParser(
        description=
        'Used to import the osm2pgsql expire-tiles file to Postgres',
        prog=sys.argv[0])
    parser.add_argument(
        '--buffer',
        type=float,
        default=0.0,
        help='Extent buffer to the tiles [m], default is 0',
    )
    parser.add_argument(
        '--simplify',
        type=float,
        default=0.0,
        help='Simplify the result geometry [m], default is 0',
    )
    parser.add_argument(
        '--create',
        default=False,
        action="store_true",
        help='create the table if not exists',
    )
    parser.add_argument(
        '--delete',
        default=False,
        action="store_true",
        help='empty the table',
    )
    parser.add_argument(
        'file',
        metavar='FILE',
        help='The osm2pgsql expire-tiles file',
    )
    parser.add_argument(
        'connection',
        metavar='CONNECTION',
        help=
        'The PostgreSQL connection string e.g. "user=www-data password=www-data dbname=sig host=localhost"',
    )
    parser.add_argument(
        'table',
        metavar='TABLE',
        help='The PostgreSQL table to fill',
    )
    parser.add_argument(
        '--schema',
        default='public',
        help=
        'The PostgreSQL schema to use (should already exists), default is public',
    )
    parser.add_argument(
        'column',
        metavar='COLUMN',
        default='geom',
        nargs='?',
        help='The PostgreSQL column, default is "geom"',
    )
    parser.add_argument(
        '--srid',
        type=int,
        default=3857,
        nargs='?',
        help='The stored geometry SRID, no conversion by default (3857)',
    )
    options = parser.parse_args()

    connection = psycopg2.connect(options.connection)
    cursor = connection.cursor()

    if options.create:
        cursor.execute(
            "SELECT count(*) FROM pg_tables WHERE schemaname='{}' AND tablename='{}'"
            .format(options.schema, options.table))
        if cursor.fetchone()[0] == 0:
            cursor.execute(
                'CREATE TABLE IF NOT EXISTS "{}"."{}" (id serial)'.format(
                    options.schema, options.table))
            cursor.execute(
                "SELECT AddGeometryColumn('{}', '{}', '{}', {}, 'MULTIPOLYGON', 2)"
                .format(options.schema, options.table, options.column,
                        options.srid))

    if options.delete:
        cursor.execute('DELETE FROM "{}"'.format((options.table)))

    geoms = []
    grid = QuadTileGrid(max_extent=(-20037508.34, -20037508.34, 20037508.34,
                                    20037508.34), )
    with open(options.file, "r") as f:
        for coord in f:
            extent = grid.extent(parse_tilecoord(coord), options.buffer)
            geoms.append(
                Polygon(((extent[0], extent[1]), (extent[0], extent[3]),
                         (extent[2], extent[3]), (extent[2], extent[1]))))
    if len(geoms) == 0:
        print("No coords found")
        connection.commit()
        cursor.close()
        connection.close()
        exit(0)
    geom = cascaded_union(geoms)
    if geom.geom_type == 'Polygon':
        geom = MultiPolygon((geom, ))

    if options.simplify > 0:
        geom.simplify(options.simplify)

    sql_geom = "ST_GeomFromText('{}', 3857)".format(geom.wkt)
    if options.srid <= 0:
        sql_geom = "ST_GeomFromText('{}')".format(geom.wkt)  # pragma: no cover
    elif options.srid != 3857:
        sql_geom = 'ST_Transform({}, {})'.format(sql_geom, options.srid)

    cursor.execute('INSERT INTO "{}" ("{}") VALUES ({})'.format(
        options.table, options.column, sql_geom))
    connection.commit()
    cursor.close()
    connection.close()
    print('Import successful')
Beispiel #54
0
def polygons_to_geom_dicts(polygons, skip_invalid=True):
    """
    Converts a Polygons element into a list of geometry dictionaries,
    preserving all value dimensions.

    For array conversion the following conventions are applied:

    * Any nan separated array are converted into a MultiPolygon
    * Any array without nans is converted to a Polygon
    * If there are holes associated with a nan separated array
      the holes are assigned to the polygons by testing for an
      intersection
    * If any single array does not have at least three coordinates
      it is skipped by default
    * If skip_invalid=False and an array has less than three
      coordinates it will be converted to a LineString
    """
    interface = polygons.interface.datatype
    if interface == 'geodataframe':
        return [row.to_dict() for _, row in polygons.data.iterrows()]
    elif interface == 'geom_dictionary':
        return polygons.data

    polys = []
    xdim, ydim = polygons.kdims
    has_holes = polygons.has_holes
    holes = polygons.holes() if has_holes else None
    for i, polygon in enumerate(polygons.split(datatype='columns')):
        array = np.column_stack([polygon.pop(xdim.name), polygon.pop(ydim.name)])
        splits = np.where(np.isnan(array[:, :2].astype('float')).sum(axis=1))[0]
        arrays = np.split(array, splits+1) if len(splits) else [array]

        invalid = False
        subpolys = []
        subholes = None
        if has_holes:
            subholes = [[LinearRing(h) for h in hs] for hs in holes[i]]
        for j, arr in enumerate(arrays):
            if j != (len(arrays)-1):
                arr = arr[:-1] # Drop nan

            if len(arr) == 0:
                continue
            elif len(arr) == 1:
                if skip_invalid:
                    continue
                poly = Point(arr[0])
                invalid = True
            elif len(arr) == 2:
                if skip_invalid:
                    continue
                poly = LineString(arr)
                invalid = True
            elif not len(splits):
                poly = Polygon(arr, (subholes[j] if has_holes else []))
            else:
                poly = Polygon(arr)
                hs = [h for h in subholes[j]] if has_holes else []
                poly = Polygon(poly.exterior, holes=hs)
            subpolys.append(poly)

        if invalid:
            polys += [dict(polygon, geometry=sp) for sp in subpolys]
            continue
        elif len(subpolys) == 1:
            geom = subpolys[0]
        elif subpolys:
            geom = MultiPolygon(subpolys)
        else:
            continue
        polygon['geometry'] = geom
        polys.append(polygon)
    return polys
Beispiel #55
0
    def __init__(self, relation_element, way_elements, node_elements):
        self.polygons = []
        self.ways = {}
        self.relation = relation_element
        self.open = True
        reporting = Reporting()
        self.id = relation_element.attrib["id"]
        self.version = relation_element.attrib["version"]
        self.changeset = relation_element.attrib["changeset"]
        self.tags = {}

        nodes = {}
        for element in node_elements:
            node = OSM_Node(element)
            nodes[node.id] = node

        ways = {}
        for element in way_elements:
            way = OSM_Way(element)
            way.add_nodes(nodes)
            ways[way.id] = way

        for sub in relation_element:
            if sub.tag == 'tag':
                key = sub.attrib["k"]
                value = sub.attrib["v"]
                self.tags[key] = value
            elif sub.tag == 'member' and sub.attrib["type"] == 'way':
                role = sub.attrib.get("role", "")
                if role:
                    raise NotImplementedError, "Role %r for relation way members not supported" % (role,)
                way_id = sub.attrib["ref"]
                way = ways.get(way_id)
                if way:
                    if not way.complete:
                        raise ValueError, "Incomplete way: %r" % (way,)
                    self.ways[way_id] = way
                else:
                    raise ValueError, "Way not found: %r" % (way_id,)

        self.name = self.tags.get("name")
        if not self.ways:
            raise ValueError, "No ways"
        
        self.open = False
        ways_left = self.ways.values()
        while ways_left:
            segment_start = ways_left.pop(0)
            polygon = []
            for node in segment_start.nodes:
                polygon.append((node.lat, node.lon))
            last_end = segment_start.end_node
            while ways_left:
                if last_end is segment_start.start_node:
                    # cycle ended
                    break
                next = None
                for way in ways_left:
                    if way.start_node is last_end:
                        last_end = way.end_node
                        for node in way.nodes[1:]:
                            polygon.append((node.lat, node.lon))
                        next = way
                        break
                    elif way.end_node is last_end:
                        last_end = way.start_node
                        rnodes = list(way.nodes[1:])
                        rnodes.reverse()
                        for node in rnodes:
                            polygon.append((node.lat, node.lon))
                        next = way
                        break
                if next:
                    ways_left.remove(next)
                else:
                    # open segment ends
                    self.open = True
                    break
            self.polygons.append(polygon)
        if HAVE_SHAPELY:
            reporting.output_msg("info", 
                    "Using Shapely for 'point in polygon' checks")
            self.multi_polygon = MultiPolygon([(p, ()) for p in self.polygons])
            self. _contains_impl = self._contains_shapely_impl
        else:
            reporting.output_msg("info", 
                "Using Python function for the 'point in polygon' checks")
            self. _contains_impl = self._contains_python_impl
def _format_shape_osm(bbox, result_NodesFromWays, result_NodesWaysFromRels,
                      item, save_path):
    """format edges, nodes and relations from overpy result objects into shapes
    Parameters:
        bbox
        result_NodesFromWays
        result_NodesWaysFromRels
        item
        save_path

    Returns:
        gdf_all: Geodataframe with Linestrings, Polygons & Multipolygons
    """
    # polygon vs. linestrings in nodes from ways result:

    schema_poly = {
        'geometry': 'Polygon',
        'properties': {
            'Name': 'str:80',
            'Natural_Type': 'str:80',
            'Item': 'str:80'
        }
    }
    schema_line = {
        'geometry': 'LineString',
        'properties': {
            'Name': 'str:80',
            'Natural_Type': 'str:80',
            'Item': 'str:80'
        }
    }
    shapeout_poly = save_path + '/' + str(item) + '_poly_' + str(int(bbox[0])) +\
    '_' + str(int(bbox[1])) + ".shp"
    shapeout_line = save_path + '/' + str(item) + '_line_' + str(int(bbox[0])) +\
    '_' + str(int(bbox[1])) + ".shp"

    way_poly = []
    way_line = []
    for way in result_NodesFromWays.ways:
        if (way.nodes[0].id == way.nodes[-1].id) & (len(way.nodes) > 2):
            way_poly.append(way)
        else:
            way_line.append(way)

    with fiona.open(shapeout_poly,
                    'w',
                    crs=from_epsg(4326),
                    driver='ESRI Shapefile',
                    schema=schema_poly) as output:
        for way in way_poly:
            geom = mapping(
                geometry.Polygon([node.lon, node.lat] for node in way.nodes))
            prop = {
                'Name': way.tags.get("name", "n/a"),
                'Natural_Type': way.tags.get("natural", "n/a"),
                'Item': item
            }
            output.write({'geometry': geom, 'properties': prop})

    with fiona.open(shapeout_line,
                    'w',
                    crs=from_epsg(4326),
                    driver='ESRI Shapefile',
                    schema=schema_line) as output2:
        for way in way_line:
            geom2 = {
                'type': 'LineString',
                'coordinates': [(node.lon, node.lat) for node in way.nodes]
            }
            prop2 = {
                'Name': way.tags.get("name", "n/a"),
                'Natural_Type': way.tags.get("natural", "n/a"),
                'Item': item
            }
            output2.write({'geometry': geom2, 'properties': prop2})

    gdf_poly = geopandas.read_file(shapeout_poly)
    for ending in ['.shp', ".cpg", ".dbf", ".prj", '.shx']:
        os.remove(save_path + '/' + str(item) + '_poly_' + str(int(bbox[0])) +
                  '_' + str(int(bbox[1])) + ending)
    gdf_line = geopandas.read_file(shapeout_line)
    for ending in ['.shp', ".cpg", ".dbf", ".prj", '.shx']:
        os.remove(save_path + '/' + str(item) + '_line_' + str(int(bbox[0])) +
                  '_' + str(int(bbox[1])) + ending)

    # add buffer to the lines (0.000045° are ~5m)
    for geom in gdf_line.geometry:
        geom = geom.buffer(0.000045)

    gdf_all = gdf_poly.append(gdf_line)

    # detect multipolygons in relations:
    print(
        'Converting results for %s to correct geometry and GeoDataFrame: MultiPolygons'
        % item)

    MultiPoly = []
    for relation in result_NodesWaysFromRels.relations:
        OuterList = []
        InnerList = []
        PolyList = []
        # get inner and outer parts from overpy results, convert into linestrings
        # to check for closedness later
        for relationway in relation.members:
            if relationway.role == 'outer':
                for way in result_NodesWaysFromRels.ways:
                    if way.id == relationway.ref:
                        OuterList.append(
                            geometry.LineString([node.lon, node.lat]
                                                for node in way.nodes))
            else:
                for way in result_NodesWaysFromRels.ways:
                    if way.id == relationway.ref:
                        InnerList.append(
                            geometry.LineString([node.lon, node.lat]
                                                for node in way.nodes))

        OuterPoly = []
        # in case outer polygons are not fragmented, add those already in correct geometry
        for outer in OuterList:
            if outer.is_closed:
                OuterPoly.append(
                    Polygon(outer.coords[0:(len(outer.coords) + 1)]))
                OuterList.remove(outer)

        initialLength = len(OuterList)
        i = 0
        OuterCoords = []

        # loop to account for more than one fragmented outer ring
        while (len(OuterList) > 0) & (i <= initialLength):
            OuterCoords.append(
                OuterList[0].coords[0:(len(OuterList[0].coords) + 1)])
            OuterList.remove(OuterList[0])
            for _ in range(0, len(OuterList)):
                # get all the other outer polygon pieces in the right order
                # (only works if fragments are in correct order, anyways!!
                # so added another loop around it in case not!)
                for outer in OuterList:
                    if outer.coords[0] == OuterCoords[-1][-1]:
                        OuterCoords[-1] = OuterCoords[-1] + outer.coords[0:(
                            len(outer.coords) + 1)]
                        OuterList.remove(outer)

        for entry in OuterCoords:
            if len(entry) > 2:
                OuterPoly.append(Polygon(entry))

        PolyList = OuterPoly
        # get the inner polygons (usually in correct, closed shape - not accounting
        # for the fragmented case as in outer poly)
        for inner in InnerList:
            if inner.is_closed:
                PolyList.append(Polygon(inner))

        MultiPoly.append(MultiPolygon([shape(poly) for poly in PolyList]))

    schema_multi = {
        'geometry': 'MultiPolygon',
        'properties': {
            'Name': 'str:80',
            'Type': 'str:80',
            'Item': 'str:80'
        }
    }

    shapeout_multi = (save_path + '/' + str(item) + '_multi_' +
                      str(int(bbox[0])) + '_' + str(int(bbox[1])) + ".shp")

    with fiona.open(shapeout_multi,
                    'w',
                    crs=from_epsg(4326),
                    driver='ESRI Shapefile',
                    schema=schema_multi) as output:
        for i in range(0, len(MultiPoly)):
            prop1 = {
                'Name': relation.tags.get("name", "n/a"),
                'Type': relation.tags.get("type", "n/a"),
                'Item': item
            }
            geom = mapping(MultiPoly[i])
            output.write({'geometry': geom, 'properties': prop1})
    gdf_multi = geopandas.read_file(
        shapeout_multi)  # save_path + '/' + shapeout_multi)
    for ending in ['.shp', ".cpg", ".dbf", ".prj", '.shx']:
        os.remove(save_path + '/' + str(item) + '_multi_' + str(int(bbox[0])) +
                  '_' + str(int(bbox[1])) + ending)
    gdf_all = gdf_all.append(gdf_multi, sort=True)

    print('Combined all results for %s to one GeoDataFrame: done' % item)

    return gdf_all
Beispiel #57
0
from shapely.geometry import Polygon, MultiPolygon
from scipy.spatial import Voronoi

import pickle


# # NYC boundary data
with fiona.open('indata/nybb_13a/nybb.shp') as source:
    # set up three projection types: boundary data start; google-standard
    # lat/lon, using WGS84; Albers Equal Area
    p1 = Proj(source.crs,preserve_units=True)
    p2 = Proj({'proj':'longlat', 'datum':'WGS84'})
    p3 = Proj({'proj':'aea', 'datum':'WGS84', 'lon_0':'-96'})

    # for each shape, convert its coordinates to AEA
    nyc = MultiPolygon()
    for shape in source:
        for subshape in shape['geometry']['coordinates']:
            p1_points = np.array(subshape[0])

            p2_points = transform(p1, p2, p1_points[:,0], p1_points[:,1])
            p3_points = transform(p2, p3, p2_points[0], p2_points[1])
            p3_points = np.vstack([p3_points[0], p3_points[1]]).T

            new = Polygon(p3_points)
            nyc = nyc.union(new)

# i = 0
# for shape in nyc:
    # x,y = shape.exterior.xy
    # plt.plot(x,y)
Beispiel #58
0
    def createConvexPath(self, pair, FID_ij):
        #pr = cProfile.Profile()
        #pr2 = cProfile.Profile()

        fd_fullPayload = 5 * 5280
        fd_empty = 10 * 5280
        fd_delivery = 3.33 * 5280

        #print pair
        odPointsList = ((pair[0].x, pair[0].y), (pair[1].x, pair[1].y))
        st_line = LineString(odPointsList)
        if self.indi == "FF":
            if st_line.length > fd_fullPayload:
                return 0, 0, None
        elif self.indi == "FD":
            if st_line.length > fd_delivery:
                return 0, 0, None
        labeledObstaclePoly = []
        totalConvexPathList = {}

        dealtArcList = {}
        totalConvexPathList[odPointsList] = LineString(odPointsList)

        terminate = 0
        idx_loop1 = 0
        #sp_l_set = []
        time_loop1 = 0
        time_contain2 = 0
        time_crossingDict = 0
        time_convexLoop = 0
        time_impedingArcs = 0
        time_spatialFiltering = 0
        time_loop1_crossingDict = 0
        time_buildConvexHulls = 0
        while terminate == 0:
            t1s = time.time()
            idx_loop1 += 1

            t6s = time.time()

            totalGrpah = self.createGraph(totalConvexPathList.keys())
            spatial_filter_n = networkx.dijkstra_path(totalGrpah,
                                                      odPointsList[0],
                                                      odPointsList[1])
            spatial_filter = []
            for i in xrange(len(spatial_filter_n) - 1):
                spatial_filter.append(
                    [spatial_filter_n[i], spatial_filter_n[i + 1]])

            crossingDict = defaultdict(list)

            for line in spatial_filter:
                Line = LineString(line)
                for obs in self.obstaclesPolygons:
                    if Line.crosses(obs):
                        if obs not in labeledObstaclePoly:
                            labeledObstaclePoly.append(obs)

                        crossingDict[tuple(line)].append(obs)

            t6e = time.time()
            time_spatialFiltering += t6e - t6s

            if len(crossingDict.keys()) == 0:

                terminate = 1
                continue
            else:
                t7s = time.time()
                for tLine in crossingDict.keys():
                    #cLine = list(tLine)
                    if dealtArcList.has_key(tLine):
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        continue
                    else:
                        dealtArcList[tLine] = LineString(list(tLine))
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        containingObs = []
                        for obs in crossingDict[tLine]:

                            convexHull = self.createConvexhull(obs, tLine)
                            self.splitBoundary(totalConvexPathList, convexHull)

                            convexHull = self.createConvexhull(
                                obs, odPointsList)
                            self.splitBoundary(totalConvexPathList, convexHull)
                            convexHull2 = self.createConvexhull(obs)
                            if convexHull2.contains(Point(tLine[0])):
                                containingObs.append(obs)
                            elif convexHull2.contains(Point(tLine[1])):
                                containingObs.append(obs)
                        if len(containingObs) != 0:  #SPLIT
                            subConvexPathList = {}
                            vi_obs = MultiPolygon([x for x in containingObs])
                            containedLineCoords = list(tLine)
                            fromX = containedLineCoords[0][0]
                            fromY = containedLineCoords[0][1]
                            toX = containedLineCoords[1][0]
                            toY = containedLineCoords[1][1]
                            fxA = (fromY - toY) / (fromX - toX)
                            fxB = fromY - (fxA * fromX)
                            minX = vi_obs.bounds[0]
                            maxX = vi_obs.bounds[2]
                            split_line = LineString([
                                (min(minX, fromX,
                                     toX), fxA * min(minX, fromX, toX) + fxB),
                                (max(maxX, fromX,
                                     toX), fxA * max(maxX, fromX, toX) + fxB)
                            ])

                            for obs in containingObs:
                                s1, s2 = self.splitPolygon(split_line, obs)
                                dividedObsPoly = []
                                #to deal with multipolygon
                                a = s1.intersection(obs)
                                b = s2.intersection(obs)
                                if a.type == "Polygon":
                                    dividedObsPoly.append(a)
                                else:
                                    for o in a.geoms:
                                        if o.type == "Polygon":
                                            dividedObsPoly.append(o)
                                if b.type == "Polygon":
                                    dividedObsPoly.append(b)
                                else:
                                    for o2 in b.geoms:
                                        if o2.type == "Polygon":
                                            dividedObsPoly.append(o2)

                                for obs2 in dividedObsPoly:
                                    for pt in tLine:
                                        convexHull = self.createConvexhull(
                                            obs2, [pt])
                                        self.splitBoundary(
                                            subConvexPathList, convexHull)
                            subVertices = []
                            for line in subConvexPathList:
                                subVertices.extend(line)
                            subVertices = list(set(subVertices))
                            containingObsVertices = []
                            for obs in containingObs:
                                containingObsVertices.extend(
                                    list(obs.exterior.coords))
                            subVertices = [
                                x for x in subVertices
                                if x in containingObsVertices
                            ]
                            deleteList = []
                            for line in subConvexPathList:
                                chk_cross = 0
                                for obs in containingObs:
                                    if subConvexPathList[line].crosses(obs):
                                        chk_cross = 1
                                if chk_cross == 1:
                                    deleteList.append(line)
                            for line in deleteList:
                                del subConvexPathList[line]
                                #subConvexPathList.remove(line)
                            pairList = []
                            for i in range(len(subVertices)):
                                for j in range(i + 1, len(subVertices)):
                                    pairList.append(
                                        (subVertices[i], subVertices[j]))
                            for i in pairList:
                                Line = LineString(i)
                                chk_cross = 0
                                for obs in containingObs:
                                    if Line.crosses(obs):
                                        chk_cross = 1
                                    elif Line.within(obs):
                                        chk_cross = 1
                                if chk_cross == 0:
                                    subConvexPathList[i] = Line
                                    #subConvexPathList.append(i)
                            buffer_st_line = split_line.buffer(0.1)
                            deleteList = []
                            for line in subConvexPathList:
                                if buffer_st_line.contains(
                                        subConvexPathList[line]):
                                    deleteList.append(line)
                            for line in deleteList:
                                if subConvexPathList.has_key(line):
                                    del subConvexPathList[line]
                            #subConvexPathList = [x for x in subConvexPathList if x not in deleteList]
                            for line in subConvexPathList:
                                if not totalConvexPathList.has_key(line):
                                    if not totalConvexPathList.has_key(
                                        (line[1], line[0])):
                                        totalConvexPathList[
                                            line] = subConvexPathList[
                                                line]  #if line not in totalConvexPathList:
                                    #if [line[1], line[0]] not in totalConvexPathList:
                                    #totalConvexPathList.append(line)

                #w = shapefile.Writer(shapefile.POLYLINE)
                #w.field('nem')
                #for line in totalConvexPathList:
                #w.line(parts=[[ list(x) for x in line ]])
                #w.record('ff')
                #w.save(self.path + "graph2_" + str(idx_loop1) + self.version_name)
                t7e = time.time()
                time_loop1_crossingDict += t7e - t7s
                #new lines
                labeled_multyPoly = MultiPolygon(
                    [x for x in labeledObstaclePoly])
                convexHull = self.createConvexhull(labeled_multyPoly,
                                                   odPointsList)
                self.splitBoundary(totalConvexPathList, convexHull)
                #new lines end

                #impededPathList
                t5s = time.time()
                impededPathList = {}
                for line in totalConvexPathList:
                    for obs in labeledObstaclePoly:
                        if totalConvexPathList[line].crosses(obs):
                            impededPathList[line] = totalConvexPathList[line]
                            break
                t5e = time.time()
                time_impedingArcs += t5e - t5s
                for line in impededPathList:
                    del totalConvexPathList[line]

                terminate2 = 0
                idx_loop2 = 0
                t1e = time.time()
                time_loop1 += t1e - t1s
                while terminate2 == 0:
                    idx_loop2 += 1

                    deleteList = []
                    crossingDict = defaultdict(list)

                    for line in dealtArcList:
                        if impededPathList.has_key(line):
                            del impededPathList[line]
                        elif impededPathList.has_key((line[1], line[0])):
                            del impededPathList[(line[1], line[0])]

                    t3s = time.time()
                    #pr.enable()
                    for line in impededPathList:
                        for obs in labeledObstaclePoly:
                            if impededPathList[line].crosses(obs):
                                crossingDict[line].append(obs)

                    t3e = time.time()
                    time_crossingDict += t3e - t3s
                    #at this point, impededArcList should be emptied, as it only contains crossing arcs, and all of them
                    #should be replaced by convex hulls.
                    for line in crossingDict:
                        del impededPathList[line]
                    for line in impededPathList:
                        if not totalConvexPathList.has_key(line):
                            totalConvexPathList[line] = impededPathList[line]
                    impededPathList = {}

                    if len(crossingDict.keys()) == 0:
                        terminate2 = 1
                        continue
                    else:
                        #w = shapefile.Writer(shapefile.POLYLINE)
                        #w.field('nem')
                        #for line in crossingDict:
                        #w.line(parts=[[ list(x) for x in line ]])
                        #w.record('ff')
                        #w.save(self.path + "crossingDict_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)
                        t4s = time.time()

                        for tLine in crossingDict.keys():
                            dealtArcList[tLine] = crossingDict[tLine]
                            containingObs = []
                            for obs in crossingDict[tLine]:
                                chk_contain = 0
                                convexHull2 = self.createConvexhull(obs)
                                if convexHull2.contains(Point(tLine[0])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                elif convexHull2.contains(Point(tLine[1])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                if chk_contain == 0:
                                    t10s = time.time()
                                    convexHull = self.createConvexhull(
                                        obs, tLine)
                                    self.splitBoundary(impededPathList,
                                                       convexHull)
                                    t10e = time.time()
                                    time_buildConvexHulls += t10e - t10s

                            if len(containingObs) != 0:  #SPLIT
                                #print "SPLIT"
                                t2s = time.time()
                                subConvexPathList = {}
                                vi_obs = MultiPolygon(
                                    [x for x in containingObs])
                                containedLineCoords = tLine
                                fromX = containedLineCoords[0][0]
                                fromY = containedLineCoords[0][1]
                                toX = containedLineCoords[1][0]
                                toY = containedLineCoords[1][1]
                                fxA = (fromY - toY) / (fromX - toX)
                                fxB = fromY - (fxA * fromX)
                                minX = vi_obs.bounds[0]
                                maxX = vi_obs.bounds[2]
                                split_line = LineString([
                                    (min(minX, fromX, toX),
                                     fxA * min(minX, fromX, toX) + fxB),
                                    (max(maxX, fromX, toX),
                                     fxA * max(maxX, fromX, toX) + fxB)
                                ])

                                for obs in containingObs:
                                    s1, s2 = self.splitPolygon(split_line, obs)
                                    dividedObsPoly = []
                                    #to deal with multipolygon
                                    a = s1.intersection(obs)
                                    b = s2.intersection(obs)
                                    if a.type == "Polygon":
                                        dividedObsPoly.append(a)
                                    else:
                                        for o in a.geoms:
                                            if o.type == "Polygon":
                                                dividedObsPoly.append(o)
                                    if b.type == "Polygon":
                                        dividedObsPoly.append(b)
                                    else:
                                        for o2 in b.geoms:
                                            if o2.type == "Polygon":
                                                dividedObsPoly.append(o2)

                                    for obs2 in dividedObsPoly:
                                        for pt in tLine:
                                            convexHull = self.createConvexhull(
                                                obs2, [pt])
                                            self.splitBoundary(
                                                subConvexPathList, convexHull)
                                subVertices = []
                                for line in subConvexPathList:
                                    subVertices.extend(line)
                                subVertices = list(set(subVertices))
                                containingObsVertices = []
                                for obs in containingObs:
                                    containingObsVertices.extend(
                                        list(obs.exterior.coords))
                                subVertices = [
                                    x for x in subVertices
                                    if x in containingObsVertices
                                ]
                                deleteList = []
                                for line in subConvexPathList:
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if subConvexPathList[line].crosses(
                                                obs):
                                            chk_cross = 1
                                    if chk_cross == 1:
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]

                                pairList = []
                                for i in range(len(subVertices)):
                                    for j in range(i + 1, len(subVertices)):
                                        pairList.append(
                                            (subVertices[i], subVertices[j]))

                                for i in pairList:
                                    Line = LineString(list(i))
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if Line.crosses(obs):
                                            chk_cross = 1
                                        elif Line.within(obs):
                                            chk_cross = 1
                                    if chk_cross == 0:
                                        subConvexPathList[i] = Line

                                buffer_st_line = split_line.buffer(0.1)
                                deleteList = []
                                for line in subConvexPathList:
                                    if buffer_st_line.contains(
                                            subConvexPathList[line]):
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]
                                for line in subConvexPathList:
                                    if not impededPathList.has_key(line):
                                        if not impededPathList.has_key(
                                            (line[1], line[0])):
                                            impededPathList[
                                                line] = subConvexPathList[line]

                                t2e = time.time()
                                time_contain2 += t2e - t2s
                        #pr.disable()
                        for line in dealtArcList:
                            if impededPathList.has_key(line):
                                del impededPathList[line]
                        #impededPathList = [x for x in impededPathList if x not in dealtArcList]
                        t4e = time.time()
                        time_convexLoop += t4e - t4s
                        #end of else
                    #w = shapefile.Writer(shapefile.POLYLINE)
                    #w.field('nem')
                    #for line in impededPathList:
                    #w.line(parts=[[ list(x) for x in line ]])
                    #w.record('ff')
                    #w.save(self.path + "After_graph_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)
                    #end of while2
                for line in impededPathList:
                    if not totalConvexPathList.has_key(line):
                        totalConvexPathList[line] = impededPathList[line]

                #totalConvexPathList.extend(impededPathList)
        totalGraph = self.createGraph(totalConvexPathList.keys())
        esp_n = networkx.dijkstra_path(totalGraph, odPointsList[0],
                                       odPointsList[1])
        esp = []
        for i in range(len(esp_n) - 1):
            esp.append([esp_n[i], esp_n[i + 1]])
        w = shapefile.Writer(shapefile.POLYLINE)
        #w.field('nem')
        #no_edges = 0
        #for line in totalConvexPathList.keys():
        #no_edges += 1
        #w.line(parts=[[ list(x) for x in line ]])
        #w.record('ff')
        #w.save(self.path + "totalpath_" + "%s" % FID_ij )
        #w = shapefile.Writer(shapefile.POLYLINE)
        if self.indi == "FF":
            w.field('nem')
            for line in esp:
                w.line(parts=[[list(x) for x in line]])
                w.record('ff')
            w.save(self.path + "ESP_" + "%s" % FID_ij)
        #targetPysal = pysal.IOHandlers.pyShpIO.shp_file(self.path + "ESP_" + "%s" % FID_ij)
        #targetShp = self.generateGeometry(targetPysal)
        total_length = 0
        for coords in esp:
            line = LineString(coords)
            total_length += line.length

        if self.indi == "FF":
            if total_length <= fd_fullPayload:

                return 1, total_length, self.path + "ESP_" + FID_ij + ".shp"
            else:
                return 0, 0, None
        elif self.indi == 'FD':
            if total_length <= fd_delivery:
                return 1, total_length, None
            else:
                return 0, 0, None
Beispiel #59
0
def zonal_stats(vectors, raster, layer_num=0, band_num=1, nodata_value=None,
                global_src_extent=False, categorical=False, stats=None,
                copy_properties=False, all_touched=False, transform=None,
                add_stats=None, raster_out=False):
    """Summary statistics of a raster, broken out by vector geometries.

    Attributes
    ----------
    vectors : path to an OGR vector source or list of geo_interface or WKT str
    raster : ndarray or path to a GDAL raster source
        If ndarray is passed, the `transform` kwarg is required.
    layer_num : int, optional
        If `vectors` is a path to an OGR source, the vector layer to use
        (counting from 0).
        defaults to 0.
    band_num : int, optional
        If `raster` is a GDAL source, the band number to use (counting from 1).
        defaults to 1.
    nodata_value : float, optional
        If `raster` is a GDAL source, this value overrides any NODATA value
        specified in the file's metadata.
        If `None`, the file's metadata's NODATA value (if any) will be used.
        `ndarray`s don't support `nodata_value`.
        defaults to `None`.
    global_src_extent : bool, optional
        Pre-allocate entire raster before iterating over vector features.
        Use `True` if limited by disk IO or indexing into raster;
            requires sufficient RAM to store array in memory
        Use `False` with fast disks and a well-indexed raster, or when
        memory-constrained.
        Ignored when `raster` is an ndarray,
            because it is already completely in memory.
        defaults to `False`.
    categorical : bool, optional
    stats : list of str, or space-delimited str, optional
        Which statistics to calculate for each zone.
        All possible choices are listed in `VALID_STATS`.
        defaults to `DEFAULT_STATS`, a subset of these.
    copy_properties : bool, optional
        Include feature properties alongside the returned stats.
        defaults to `False`
    all_touched : bool, optional
        Whether to include every raster cell touched by a geometry, or only
        those having a center point within the polygon.
        defaults to `False`
    transform : list of float, optional
        GDAL-style geotransform coordinates when `raster` is an ndarray.
        Required when `raster` is an ndarray, otherwise ignored.
    add_stats : Dictionary with names and functions of additional statistics to
                compute, optional
    raster_out : Include the masked numpy array for each feature, optional
        Each feature dictionary will have the following additional keys:
            clipped raster (`mini_raster`)
            Geo-transform (`mini_raster_GT`)
            No Data Value (`mini_raster_NDV`)

    Returns
    -------
    list of dicts
        Each dict represents one vector geometry.
        Its keys include `__fid__` (the geometry feature id)
        and each of the `stats` requested.
    """

    if not stats:
        if not categorical:
            stats = DEFAULT_STATS
        else:
            stats = []
    else:
        if isinstance(stats, str):
            if stats in ['*', 'ALL']:
                stats = VALID_STATS
            else:
                stats = stats.split()
    for x in stats:
        if x.startswith("percentile_"):
            try:
                get_percentile(x)
            except ValueError:
                raise RasterStatsError(
                    "Stat `%s` is not valid; must use"
                    " `percentile_` followed by a float >= 0 or <= 100")
        elif x not in VALID_STATS:
            raise RasterStatsError(
                "Stat `%s` not valid; "
                "must be one of \n %r" % (x, VALID_STATS))

    run_count = False
    if categorical or 'majority' in stats or 'minority' in stats or \
       'unique' in stats:
        # run the counter once, only if needed
        run_count = True

    if isinstance(raster, np.ndarray):
        raster_type = 'ndarray'

        # must have transform arg
        if not transform:
            raise RasterStatsError("Must provide the 'transform' kwarg when "
                                   "using ndarrays as src raster")
        rgt = transform
        rsize = (raster.shape[1], raster.shape[0])

        # global_src_extent is implicitly turned on, array is already in memory
        if not global_src_extent:
            global_src_extent = True

        if nodata_value:
            raise NotImplementedError("ndarrays don't support 'nodata_value'")

    else:
        raster_type = 'gdal'
        rds = gdal.Open(raster, GA_ReadOnly)
        if not rds:
            raise RasterStatsError("Cannot open %r as GDAL raster" % raster)
        rb = rds.GetRasterBand(band_num)
        rgt = rds.GetGeoTransform()
        rsize = (rds.RasterXSize, rds.RasterYSize)

        if nodata_value is not None:
            nodata_value = float(nodata_value)
            rb.SetNoDataValue(nodata_value)
        else:
            nodata_value = rb.GetNoDataValue()

    features_iter, strategy, spatial_ref = get_features(vectors, layer_num)

    if global_src_extent and raster_type == 'gdal':
        # create an in-memory numpy array of the source raster data
        # covering the whole extent of the vector layer
        if strategy != "ogr":
            raise RasterStatsError("global_src_extent requires OGR vector")

        # find extent of ALL features
        ds = ogr.Open(vectors)
        layer = ds.GetLayer(layer_num)
        ex = layer.GetExtent()
        # transform from OGR extent to xmin, ymin, xmax, ymax
        layer_extent = (ex[0], ex[2], ex[1], ex[3])

        global_src_offset = bbox_to_pixel_offsets(rgt, layer_extent, rsize)
        global_src_array = rb.ReadAsArray(*global_src_offset)
    elif global_src_extent and raster_type == 'ndarray':
        global_src_offset = (0, 0, raster.shape[0], raster.shape[1])
        global_src_array = raster

    mem_drv = ogr.GetDriverByName('Memory')
    driver = gdal.GetDriverByName('MEM')

    results = []

    for i, feat in enumerate(features_iter):
        if feat['type'] == "Feature":
            geom = shape(feat['geometry'])
        else:  # it's just a geometry
            geom = shape(feat)

        # Point and MultiPoint don't play well with GDALRasterize
        # convert them into box polygons the size of a raster cell
        buff = rgt[1] / 2.0
        if geom.type == "MultiPoint":
            geom = MultiPolygon([box(*(pt.buffer(buff).bounds))
                                for pt in geom.geoms])
        elif geom.type == 'Point':
            geom = box(*(geom.buffer(buff).bounds))

        ogr_geom_type = shapely_to_ogr_type(geom.type)

        geom_bounds = list(geom.bounds)

        # calculate new pixel coordinates of the feature subset
        src_offset = bbox_to_pixel_offsets(rgt, geom_bounds, rsize)

        new_gt = (
            (rgt[0] + (src_offset[0] * rgt[1])),
            rgt[1],
            0.0,
            (rgt[3] + (src_offset[1] * rgt[5])),
            0.0,
            rgt[5]
        )

        if src_offset[2] <= 0 or src_offset[3] <= 0:
            # we're off the raster completely, no overlap at all
            # so there's no need to even bother trying to calculate
            feature_stats = dict([(s, None) for s in stats])
        else:
            if not global_src_extent:
                # use feature's source extent and read directly from source
                # fastest option when you have fast disks and fast raster
                # advantage: each feature uses the smallest raster chunk
                # disadvantage: lots of disk reads on the source raster
                src_array = rb.ReadAsArray(*src_offset)
            else:
                # derive array from global source extent array
                # useful *only* when disk IO or raster format inefficiencies
                # are your limiting factor
                # advantage: reads raster data in one pass before loop
                # disadvantage: large vector extents combined with big rasters
                #               require lotsa memory
                xa = src_offset[0] - global_src_offset[0]
                ya = src_offset[1] - global_src_offset[1]
                xb = xa + src_offset[2]
                yb = ya + src_offset[3]
                src_array = global_src_array[ya:yb, xa:xb]

            # Create a temporary vector layer in memory
            mem_ds = mem_drv.CreateDataSource('out')
            mem_layer = mem_ds.CreateLayer('out', spatial_ref, ogr_geom_type)
            ogr_feature = ogr.Feature(feature_def=mem_layer.GetLayerDefn())
            ogr_geom = ogr.CreateGeometryFromWkt(geom.wkt)
            ogr_feature.SetGeometryDirectly(ogr_geom)
            mem_layer.CreateFeature(ogr_feature)

            # Rasterize it
            rvds = driver.Create('rvds', src_offset[2], src_offset[3], 1, gdal.GDT_Byte)
            rvds.SetGeoTransform(new_gt)

            if all_touched:
                gdal.RasterizeLayer(rvds, [1], mem_layer, None, None,
                                    burn_values=[1],
                                    options=['ALL_TOUCHED=True'])
            else:
                gdal.RasterizeLayer(rvds, [1], mem_layer, None, None,
                                    burn_values=[1],
                                    options=['ALL_TOUCHED=False'])

            rv_array = rvds.ReadAsArray()

            # Mask the source data array with our current feature
            # we take the logical_not to flip 0<->1 for the correct mask effect
            # we also mask out nodata values explicitly
            masked = np.ma.MaskedArray(
                src_array,
                mask=np.logical_or(
                    src_array == nodata_value,
                    np.logical_not(rv_array)
                )
            )

            if run_count:
                pixel_count = Counter(masked.compressed())

            if categorical:
                feature_stats = dict(pixel_count)
            else:
                feature_stats = {}

            if 'min' in stats:
                feature_stats['min'] = float(masked.min())
            if 'max' in stats:
                feature_stats['max'] = float(masked.max())
            if 'mean' in stats:
                feature_stats['mean'] = float(masked.mean())
            if 'count' in stats:
                feature_stats['count'] = int(masked.count())
            # optional
            if 'sum' in stats:
                feature_stats['sum'] = float(masked.sum())
            if 'std' in stats:
                feature_stats['std'] = float(masked.std())
            if 'median' in stats:
                feature_stats['median'] = float(np.median(masked.compressed()))
            if 'majority' in stats:cd
                try:
                    feature_stats['majority'] = pixel_count.most_common(1)[0][0]
                except IndexError:
                    feature_stats['majority'] = None
            if 'minority' in stats:
                try:
                    feature_stats['minority'] = pixel_count.most_common()[-1][0]
                except IndexError:
                    feature_stats['minority'] = None
            if 'unique' in stats:
                feature_stats['unique'] = len(list(pixel_count.keys()))
            if 'range' in stats:
                try:
                    rmin = feature_stats['min']
                except KeyError:
                    rmin = float(masked.min())
                try:
                    rmax = feature_stats['max']
                except KeyError:
                    rmax = float(masked.max())
                feature_stats['range'] = rmax - rmin

            for pctile in [s for s in stats if s.startswith('percentile_')]:
                q = get_percentile(pctile)
                pctarr = masked.compressed()
                if pctarr.size == 0:
                    feature_stats[pctile] = None
                else:
                    feature_stats[pctile] = np.percentile(pctarr, q)

            if add_stats is not None:
                for stat_name, stat_func in add_stats.items():
                        feature_stats[stat_name] = stat_func(masked)
            if raster_out:
                masked.fill_value = nodata_value
                masked.data[masked.mask] = nodata_value
                feature_stats['mini_raster'] = masked
                feature_stats['mini_raster_GT'] = new_gt
                feature_stats['mini_raster_NDV'] = nodata_value

        # Use the enumerated id as __fid__
        feature_stats['__fid__'] = i

        if 'properties' in feat and copy_properties:
            for key, val in list(feat['properties'].items()):
                feature_stats[key] = val

        results.append(feature_stats)
def write_shapefiles(out_dir,
                     block_size=500,
                     block_overlap=box_size,
                     max_count=np.infty,
                     filter_edges=True,
                     get_background=False):
    """Writes 3 shapefiles: CONTOURS.shp, BLOCK_LINES.shp, POINTS.shp, which respectively contain crop
	contours, block shapes and crop centroids. Also writes a pickle file containing the output in dictionary form.
	This dictionary also contains the dictionary with all parameters used in the simulation under the key 'metadata'.
	The input tif is divided into overlapping blocks of size block_size+2*block_overlap.
	Duplicates in the overlap region are removed using KDTrees. The parameter max_count is included for debug purposes;
	the process is terminated after max_count blocks."""

    field_shape = fiona.open(clp_path)
    field_polygons = []
    for feature in field_shape:
        poly = shape(feature['geometry'])
        field_polygons.append(poly)
    field = MultiPolygon(field_polygons)

    crop_dict, bg_dict = run_model(block_size,
                                   block_overlap,
                                   max_count=max_count,
                                   get_background=get_background)
    crop_dict = process_overlap(crop_dict, block_overlap)

    schema_lines = {'geometry': 'Polygon', 'properties': {'name': 'str'}}
    schema_pnt = {
        'geometry': 'Point',
        'properties': {
            'name': 'str',
            'confidence': 'float'
        }
    }
    schema_cnt = {
        'geometry': 'Polygon',
        'properties': {
            'name': 'str',
            'confidence': 'float'
        }
    }

    with fiona.collection(out_dir + 'CONTOURS.shp',
                          "w",
                          "ESRI Shapefile",
                          schema_cnt,
                          crs=from_epsg(4326)) as output_cnt:  # add projection
        with fiona.collection(out_dir + 'POINTS.shp',
                              "w",
                              "ESRI Shapefile",
                              schema_pnt,
                              crs=from_epsg(4326)) as output_pnt:
            with fiona.collection(out_dir + 'BLOCK_LINES.shp',
                                  "w",
                                  "ESRI Shapefile",
                                  schema_lines,
                                  crs=from_epsg(4326)) as output_lines:

                for (i, j) in crop_dict:
                    contours = crop_dict[(i, j)]['contours']
                    centroids = crop_dict[(i, j)]['centroids']
                    probs = crop_dict[(i, j)]['confidence']
                    (i_ad, j_ad, height, width) = crop_dict[(i, j)]['block']

                    count = 0
                    for (k, cnt) in enumerate(contours):  # write contours
                        xs, ys = cnt[:, 1] + j_ad, cnt[:, 0] + i_ad
                        centroid = (centroids[k, 0] + j_ad,
                                    centroids[k, 1] + i_ad)
                        transformed_contour = Polygon([
                            transform * (xs[l], ys[l]) for l in range(len(xs))
                        ])
                        transformed_centroid = Point(transform * centroid)
                        try:
                            if transformed_contour.difference(
                                    field
                            ).is_empty or not filter_edges:  # if contour is complete enclosed in field
                                output_cnt.write({
                                    'properties': {
                                        'name': '({},{}): {}'.format(i, j, k),
                                        'confidence': float(max(probs[k]))
                                    },
                                    'geometry':
                                    mapping(transformed_contour)
                                })
                                output_pnt.write({
                                    'properties': {
                                        'name': '({},{}): {}'.format(i, j, k),
                                        'confidence': float(max(probs[k]))
                                    },
                                    'geometry':
                                    mapping(transformed_centroid)
                                })
                                count += 1
                            else:
                                print('Crop ({},{}):{} intersects field edge'.
                                      format(i, j, k))
                        except:
                            print('Contour ({},{}):{} invalid'.format(i, j, k))
                    print('{} crops written to block ({},{})'.format(
                        count, i, j))

                    block_vertices = [(i_ad, j_ad), (i_ad + height, j_ad),
                                      (i_ad + height, j_ad + width),
                                      (i_ad, j_ad + width)]
                    transformed_vertices = [
                        transform * (a, b) for (b, a) in block_vertices
                    ]
                    output_lines.write({
                        'properties': {
                            'name': 'block ({},{})'.format(i, j)
                        },
                        'geometry':
                        mapping(Polygon(transformed_vertices))
                    })

    params['input_tif'] = img_path
    params['input_dem'] = dem_path
    params['input_clp'] = clp_path
    crop_dict['metadata'] = params

    with open(out_dir + 'DATA.pickle', 'wb') as file:
        pickle.dump(crop_dict, file)

    if get_background:
        with open(out_dir + 'BG_DATA.pickle', 'wb') as bg_file:
            pickle.dump(bg_dict, bg_file)

    print('\nFinished!')