def testDispatcherClassifierThreeRule(self):
        # create polygons to test
        box1 = box(0, 0, 100, 100)
        box2 = box(0, 0, 10, 10)
        poly = Polygon([(0, 0), (0, 1000), (50, 1250), (1000, 1000), (1000, 0), (0, 0)])

        dispatcher = RuleBasedDispatcher([QuadrilaterRule(), NotQuadrilaterRule()])
        dispatcher_classifier = DispatcherClassifier(dispatcher, [AreaClassifier(500), AreaClassifier(500)])

        # simple dispatch test
        cls, probability, dispatch, _ = dispatcher_classifier.dispatch_classify(None, box1)
        self.assertEqual(1, cls)
        self.assertEqual(1.0, probability)
        self.assertEqual(0, dispatch)

        # batch dispatch test
        classes, probas, dispatches, _ = dispatcher_classifier.dispatch_classify_batch(None, [box1, box2, poly])
        self.assertEqual(1, classes[0])
        self.assertEqual(0, classes[1])
        self.assertEqual(1, classes[2])
        self.assertEqual(1.0, probas[0])
        self.assertEqual(1.0, probas[1])
        self.assertEqual(1.0, probas[2])
        self.assertEqual(0, dispatches[0])
        self.assertEqual(0, dispatches[1])
        self.assertEqual(1, dispatches[2])
Example #2
0
    def test_update_crs_to_cartesian(self):
        """Test a spherical to cartesian CRS update."""

        bbox = box(-170., 40., 150., 80.)
        original_bounds = deepcopy(bbox.bounds)
        geom = GeometryVariable(name='geom', value=[bbox], dimensions='geom', crs=Spherical())

        other_crs = Cartesian()
        geom.update_crs(other_crs)
        actual = geom.get_value()[0].bounds
        desired = (-0.7544065067354889, -0.13302222155948895, -0.15038373318043535, 0.38302222155948895)
        self.assertNumpyAllClose(np.array(actual), np.array(desired))
        self.assertIsInstance(geom.crs, Cartesian)

        other_crs = Spherical()
        geom.update_crs(other_crs)
        self.assertEqual(geom.crs, Spherical())
        actual = geom.get_value()[0].bounds
        self.assertNumpyAllClose(np.array(original_bounds), np.array(actual))

        # Test data may not be wrapped.
        bbox = box(0, 40, 270, 80)
        geom = GeometryVariable(name='geom', value=[bbox], dimensions='geom', crs=Spherical())
        other_crs = Cartesian()
        with self.assertRaises(ValueError):
            geom.update_crs(other_crs)
def build_grid():
    #grid_boundaries=(-185,15,-65,70) # upright edge is plus delta (lower 48 states)
    grid={(i,j):{}
            for i in range((grid_boundaries[2]-grid_boundaries[0])/delta) 
            for j in range((grid_boundaries[3]-grid_boundaries[1])/delta) }
 
    with fiona.open(options.shape_file_path) as fc: 
        print >>sys.stderr, fc.driver,"###",fc.schema,"###", len(fc),"###",fc.crs
        print >> sys.stderr,fc.schema
        print >>sys.stderr, "Number of records:", len(fc)
        print >>sys.stderr, "Bounds of all records:", fc.bounds
        print >>sys.stderr, "Bounds applied:",grid_boundaries
        print >> sys.stderr,"######## indexing shapes to grid ########"
        print >> sys.stderr,"shapes complete:"
        c=0
        for feature in fc: 
            c+=1
            GEOID=str(feature['properties']['GEOID'])
            NAME=feature['properties']['NAME']
            INTPTLON=float(feature['properties']['INTPTLON'])
            INTPTLAT=float(feature['properties']['INTPTLAT'])
            shp=shape(feature['geometry']) # list of coordinates of geometric shape
            bb=box(*shp.bounds) #box(minx,miny,maxx,maxy)) creates one boxlike shape to rule them all
            for i,j in grid:
                grid_box=box(i*delta+grid_boundaries[0]
                            ,j*delta+grid_boundaries[1]
                            ,(i+1)*delta+grid_boundaries[0]
                            ,(j+1)*delta+grid_boundaries[1] )
                if grid_box.intersects(bb): #http://toblerity.org/shapely/manual.html#object.intersects
                    grid[(i,j)][bb]=(shp,GEOID,NAME,INTPTLON,INTPTLAT) # (county shape, countyID)
            if c%100==0:
                print >> sys.stderr, c
    return grid
Example #4
0
File: fes.py Project: dodobas/pycsw
def set_spatial_ranking(geometry):
    """Given that we have a spatial query in ogc:Filter we check the type of geometry 
    and set the ranking variables"""
    
    if util.ranking_enabled:
        if geometry.type in ['Polygon', 'Envelope']:
            util.ranking_pass = True
            util.ranking_query_geometry = geometry.wkt
        elif geometry.type in ['LineString', 'Point']:
            from shapely.geometry.base import BaseGeometry
            from shapely.geometry import box
            from shapely.wkt import loads,dumps
            ls = loads(geometry.wkt)
            b = ls.bounds
            if geometry.type == 'LineString':
                tmp_box = box(b[0],b[1],b[2],b[3])
                tmp_wkt = dumps(tmp_box)
                if tmp_box.area > 0:
                    util.ranking_pass = True
                    util.ranking_query_geometry = tmp_wkt
            elif geometry.type == 'Point':
                tmp_box = box((float(b[0])-1.0),(float(b[1])-1.0),(float(b[2])+1.0),(float(b[3])+1.0))
                tmp_wkt = dumps(tmp_box)
                util.ranking_pass = True
                util.ranking_query_geometry = tmp_wkt
Example #5
0
def _get_reprojected_features(
    input_file=None,
    dst_bounds=None,
    dst_crs=None,
    validity_check=None
    ):
    assert isinstance(input_file, str)
    assert isinstance(dst_bounds, tuple)
    assert isinstance(dst_crs, CRS)
    assert isinstance(validity_check, bool)

    with fiona.open(input_file, 'r') as vector:
        vector_crs = CRS(vector.crs)
        # Reproject tile bounding box to source file CRS for filter:
        if vector_crs == dst_crs:
            dst_bbox = box(*dst_bounds)
        else:
            dst_bbox = reproject_geometry(
                box(*dst_bounds),
                src_crs=dst_crs,
                dst_crs=vector_crs,
                validity_check=True
                )
        for feature in vector.filter(bbox=dst_bbox.bounds):
            feature_geom = shape(feature['geometry'])
            if not feature_geom.is_valid:
                try:
                    feature_geom = feature_geom.buffer(0)
                    assert feature_geom.is_valid
                    warnings.warn(
                        "fixed invalid vector input geometry"
                        )
                except AssertionError:
                    warnings.warn(
                        "irreparable geometry found in vector input file"
                        )
                    continue
            geom = clean_geometry_type(
                feature_geom.intersection(dst_bbox),
                feature_geom.geom_type
            )
            if geom:
                # Reproject each feature to tile CRS
                if vector_crs == dst_crs and validity_check:
                    assert geom.is_valid
                else:
                    try:
                        geom = reproject_geometry(
                            geom,
                            src_crs=vector_crs,
                            dst_crs=dst_crs,
                            validity_check=validity_check
                            )
                    except ValueError:
                        warnings.warn("feature reprojection failed")
                yield {
                    'properties': feature['properties'],
                    'geometry': mapping(geom)
                }
Example #6
0
    def render_pad(self, layer_query, drill, **options):

        DRU = self.get_DRU()
        if self._layer_matches(layer_query, "Holes"):
            hole = shapes.Point(self.get_x(), self.get_y()).buffer(drill/2)
            return hole;

        radius = (drill/2);
        radius = radius + scaleAndBound(radius, DRU.rvPadTop, DRU.rlMinPadTop, DRU.rlMaxPadTop)
        
        if layer_query is not None and self.get_file().get_layer(layer_query).get_number() > 16 and not self._layer_matches(layer_query, "tStop") and  not self._layer_matches(layer_query,"bStop"):
            return shapes.LineString()

        if self.get_shape() == "square":
            shape = shapes.box(self.get_x() - radius ,
                               self.get_y() - radius,
                               self.get_x() + radius,
                               self.get_y() + radius)
        elif self.get_shape() == "round" or self.get_shape() is None:
            shape = shapes.point.Point(self.get_x(), self.get_y()).buffer(radius)
        elif self.get_shape() == "octagon":
            shape = shapes.box(self.get_x() - radius,
                               self.get_y() - radius,
                               self.get_x() + radius,
                               self.get_y() + radius)
            shape = shape.intersection(affinity.rotate(shape, 45))
        elif self.get_shape() == "long":
            shape = shapely.ops.unary_union([shapes.point.Point(self.get_x() + DRU.psElongationLong/100.0 * radius,
                                                                self.get_y()).buffer(radius),
                                             shapes.point.Point(self.get_x() - DRU.psElongationLong/100.0 * radius,
                                                                self.get_y()).buffer(radius),
                                             shapes.box(self.get_x() - DRU.psElongationLong/100.0 * radius,
                                                        self.get_y() - radius,
                                                        self.get_x() + DRU.psElongationLong/100.0 * radius,
                                                        self.get_y() + radius)])
        elif self.get_shape() == "offset":
            shape = shapely.ops.unary_union([shapes.point.Point(self.get_x() + DRU.psElongationOffset/100.0 * radius * 2,
                                                                self.get_y()).buffer(radius),
                                             shapes.point.Point(self.get_x(),
                                                                self.get_y()).buffer(radius),
                                             shapes.box(self.get_x(),
                                                        self.get_y() - radius,
                                                        self.get_x() + DRU.psElongationLong/100.0 * radius * 2,
                                                        self.get_y() + radius)
                                         ])
        else:
            raise Swoop.SwoopError("Unknown pad shape: '{}'".format(self.get_shape()))

        if shape is not None:
            if self._layer_matches(layer_query,"tStop") or self._layer_matches(layer_query, "bStop"):
                shape = shape.buffer(computeStopMaskExtra(radius, DRU))

        if options and "fail_on_missing" in options and options["fail_on_missing"] and shape is None:
            raise NotImplemented("Geometry for pad shape '{}' is not implemented yet.".format(self.get_shape()))
        elif shape is None:
            shape = shapes.LineString()

        return shape
Example #7
0
    def __init__(self, element):
        self._root = element

        OM_NS = ns.get_versioned_namespace('om','1.0')
        GML_NS = ns.get_versioned_namespace('gml','3.1.1')
        SWE_NS = ns.get_versioned_namespace('swe','1.0')
        XLINK_NS = ns.get_namespace("xlink")
        
        self.name = testXMLValue(self._root.find(nsp("name", GML_NS)))
        self.description = testXMLValue(self._root.find(nsp("description", GML_NS)))
        self.observedProperties = []
        for op in self._root.findall(nsp('observedProperty', OM_NS)):
            self.observedProperties.append(testXMLAttribute(op,nsp('href', XLINK_NS)))

        # BBOX
        try:
            envelope = self._root.find(nsp('boundedBy/Envelope', GML_NS))
            lower_left_corner = testXMLValue(envelope.find(nsp('lowerCorner', GML_NS))).split(" ")
            upper_right_corner = testXMLValue(envelope.find(nsp('upperCorner', GML_NS))).split(" ")

            self.bbox_srs = Crs(testXMLAttribute(envelope,'srsName'))
            # Always keep the BBOX as minx, miny, maxx, maxy
            if self.bbox_srs.axisorder == "yx":
                self.bbox = box(float(lower_left_corner[1]), float(lower_left_corner[0]), float(upper_right_corner[1]), float(upper_right_corner[0]))
            else:
                self.bbox = box(float(lower_left_corner[0]), float(lower_left_corner[1]), float(upper_right_corner[0]), float(upper_right_corner[1]))
        except Exception:
            self.bbox = None
            self.bbox_srs = None

        # Time range
        fp = nsp('samplingTime', OM_NS)
        mp = nsp('TimePeriod', GML_NS)

        lp = nsp('beginPosition', GML_NS)
        begin_position_element = self._root.find('%s/%s/%s' % (fp, mp, lp))
        self.begin_position = extract_time(begin_position_element)

        ep = nsp('endPosition', GML_NS)
        end_position_element = self._root.find('%s/%s/%s' % (fp, mp, ep))
        self.end_position = extract_time(end_position_element)

        feature_of_interest_element = self._root.find(nsp("featureOfInterest", OM_NS))
        self.feature_type = testXMLValue(feature_of_interest_element.find(nsp("FeatureCollection/metaDataProperty/name", GML_NS)))

        # Now the fields change depending on the FeatureType
        result_element = self._root.find(nsp("result", OM_NS))

        #TODO: This should be implemented as a Factory
        self.feature = None
        if self.feature_type == 'timeSeries':
            self.feature = SweTimeSeries(feature_of_interest=feature_of_interest_element, result=result_element, GML_NS=GML_NS, OM_NS=OM_NS, SWE_NS=SWE_NS)
        elif self.feature_type == 'trajectoryProfile':
            self.feature = SweTrajectoryProfile(feature_of_interest=feature_of_interest_element, result=result_element, GML_NS=GML_NS, OM_NS=OM_NS, SWE_NS=SWE_NS)
        elif self.feature_type == 'timeSeriesProfile':
            self.feature = SweTimeseriesProfile(feature_of_interest=feature_of_interest_element, result=result_element, GML_NS=GML_NS, OM_NS=OM_NS, SWE_NS=SWE_NS)
Example #8
0
    def setUp(self):
        self.f1 = FeatureTest(1, influence="notall")
        self.f2 = FeatureTest(10, influence="notall")
        self.f3 = FeatureTestReplace(100, influence="notall")
        self.f4 = FeatureTestAddition(1000, influence="notall")

        self.f1.shape = geom.box(0.0, 0.0, 1.0, 1.0)
        self.f2.shape = geom.box(0.5, 0.5, 1.5, 1.5)
        self.f3.shape = geom.box(0.75, 0, 0.8, 2)
        self.f4.shape = geom.box(0.75, 0, 0.8, 2)
Example #9
0
    def fixture_polygon_with_hole(self):
        outer_box = box(2.0, 10.0, 4.0, 20.0)
        inner_box = box(2.5, 10.5, 3.5, 15.5)

        outer_coords = list(outer_box.exterior.coords)
        inner_coords = [list(inner_box.exterior.coords)]

        with_interior = Polygon(outer_coords, holes=inner_coords)

        return with_interior
Example #10
0
def load_polys_and_clip(shpfile, wl, el, nl, sl, dateline=False, lev=1):
    """
    Loop through polygons in shapefile, if they overlap with user
    specified rectangle, load the polygon and clip
    Note that GSHHS polygons are -180-->180, shorelines that cross the 180 line are
    clipped into west and east polygons, so there is some special casing to handle
    clip rectangles that cross the dateline
    Change lev to 2 if loading lakes
    These *may not* be able to be used to clip trajectories from GAn in Arc?
    """

    bound_box = (wl, sl, el, nl)
    clip_box = sgeo.box(*bound_box)

    if dateline:
        bound_box = (-180, sl, 180, nl)

    bna_polys = []

    with fiona.open(shpfile) as source:
        for s in source.filter(bbox=bound_box):
            geo = s['geometry']['coordinates']
            for c in geo:
                poly = sgeo.Polygon(c)
                if dateline:
                    poly_bnds = poly.bounds
                    if poly_bnds[0] > -10 and poly_bnds[2] <= 180:
                        clip_box = sgeo.box(wl, sl, 180, nl)
                    else:
                        clip_box = sgeo.box(-180, sl, el, nl)

                clipped_polys = poly.intersection(clip_box)
                if type(clipped_polys) is sgeo.multipolygon.MultiPolygon:
                    for clipped_poly in clipped_polys:
                        try:
                            bna_polys.append(clipped_poly.exterior.xy)
                        except AttributeError:
                            pass
                elif type(clipped_polys) is sgeo.polygon.Polygon:
                    try:
                        bna_polys.append(clipped_polys.exterior.xy)
                    except AttributeError:
                        pass

    if len(bna_polys) < 1 and lev == 1:  # all water
        bna_polys.append(clip_box.exterior.xy)
        levels = [2]
    else:  # define levels and add lakes if specified
        levels = [lev for x in range(len(bna_polys))]

    if lev == 1:
        bna_polys.insert(0, ([wl, wl, el, el], [sl, nl, nl, sl]))
        levels.insert(0, 0)

    return bna_polys, levels
Example #11
0
    def test(self):
        gs = self.fixture_grid_chunker()

        desired_dst_grid_sum = gs.dst_grid.parent['data'].get_value().sum()
        desired_dst_grid_sum = MPI_COMM.gather(desired_dst_grid_sum)
        if vm.rank == 0:
            desired_sum = np.sum(desired_dst_grid_sum)

        desired = [{'y': slice(0, 180, None), 'x': slice(0, 240, None)},
                   {'y': slice(0, 180, None), 'x': slice(240, 480, None)},
                   {'y': slice(0, 180, None), 'x': slice(480, 720, None)},
                   {'y': slice(180, 360, None), 'x': slice(0, 240, None)},
                   {'y': slice(180, 360, None), 'x': slice(240, 480, None)},
                   {'y': slice(180, 360, None), 'x': slice(480, 720, None)}]
        actual = list(gs.iter_dst_grid_slices())
        self.assertEqual(actual, desired)

        gs.write_chunks()

        if vm.rank == 0:
            rank_sums = []

        for ctr in range(1, gs.nchunks_dst[0] * gs.nchunks_dst[1] + 1):
            src_path = gs.create_full_path_from_template('src_template', index=ctr)
            dst_path = gs.create_full_path_from_template('dst_template', index=ctr)

            src_field = RequestDataset(src_path).get()
            dst_field = RequestDataset(dst_path).get()

            src_envelope_global = box(*src_field.grid.extent_global)
            dst_envelope_global = box(*dst_field.grid.extent_global)

            self.assertTrue(does_contain(src_envelope_global, dst_envelope_global))

            actual = get_variable_names(src_field.data_variables)
            self.assertIn('data', actual)

            actual = get_variable_names(dst_field.data_variables)
            self.assertIn('data', actual)
            actual_data_sum = dst_field['data'].get_value().sum()
            actual_data_sum = MPI_COMM.gather(actual_data_sum)
            if MPI_RANK == 0:
                actual_data_sum = np.sum(actual_data_sum)
                rank_sums.append(actual_data_sum)

        if vm.rank == 0:
            self.assertAlmostEqual(desired_sum, np.sum(rank_sums))
            index_path = gs.create_full_path_from_template('index_file')
            self.assertTrue(os.path.exists(index_path))

        vm.barrier()

        index_path = gs.create_full_path_from_template('index_file')
        index_field = RequestDataset(index_path).get()
        self.assertTrue(len(list(index_field.keys())) > 2)
Example #12
0
    def __init__(self, *args, **kwargs):
        super(TestClassesNodes, self).__init__(*args, **kwargs)
        self.f1 = FeatureTest(1)
        self.f2 = FeatureTest(10)
        self.f2.shape = geom.box(1.0, 0.0, 2.0, 1.0)
        self.f3 = FeatureTest(100)
        self.f3.shape = geom.box(1.0, 0.0, 3.0, 1.0)

        self.n1 = BlendNode([self.f1])
        self.n2 = BlendNode([self.f2])
        self.n3 = BlendNode([self.f3])
    def testRuleBasedDispatcher(self):
        # prepare data for test
        box1 = box(0, 0, 100, 100)
        box2 = box(0, 0, 10, 10)

        dispatcher = RuleBasedDispatcher([CatchAllRule()], ["catchall"])
        self.assertEqual(dispatcher.dispatch(None, box1), "catchall")
        dispatch_batch = dispatcher.dispatch_batch(None, [box1, box2])
        assert_array_equal(dispatch_batch, ["catchall", "catchall"])
        labels, dispatch_map = dispatcher.dispatch_map(None, [box1, box2])
        assert_array_equal(labels, dispatch_batch)
        assert_array_equal(dispatch_map, [0, 0])
    def _generate_points(self, polygon: Polygon, n) -> list:
        """Generates sample points within a given geometry

        :param shapely.geometry.Polygon polygon: the polygon to create points in
        :param int n: Number of points to generate in polygon
        :return: A list with point in the polygon
        :rtype: list[Point]
        """
        if n <= 0:
            return []

        if polygon.area <= 0:
            return []

        bbox = polygon.envelope
        """(minx, miny, maxx, maxy) bbox"""
        if (polygon.area * self.t) < bbox.area:
            if (bbox.bounds[2] - bbox.bounds[0]) > (bbox.bounds[3] - bbox.bounds[1]):
                bbox_1 = box(*self._bbox_left(bbox.bounds))
                bbox_2 = box(*self._bbox_right(bbox.bounds))
            else:
                bbox_1 = box(*self._bbox_bottom(bbox.bounds))
                bbox_2 = box(*self._bbox_top(bbox.bounds))

            p1 = shape(polygon)
            p1 = p1.difference(bbox_1)

            p2 = shape(polygon)
            p2 = p2.difference(bbox_2)

            del bbox_1, bbox_2
            # k = bisect.bisect_left(u, p1.area / polygon.area)
            k = int(round(n * (p1.area / polygon.area)))

            v = self._generate_points(p1, k) + self._generate_points(p2, n - k)
            del polygon, p1, p2
        else:
            v = []
            max_iterations = self.t * n + 5 * math.sqrt(self.t * n)
            v_length = len(v)
            while v_length < n and max_iterations > 0:
                max_iterations -= 1
                v.append(self._random_point_in_polygon(polygon))
                v_length = len(v)

            if len(v) < n:
                raise Exception('Too many iterations')

            self.logging.debug('Generated %s points', n)

        del bbox
        return v
Example #15
0
    def test_multiple_interiors(self):
        exterior = numpy.array(sgeom.box(0, 0, 12, 12).exterior.coords)
        interiors = [
                     numpy.array(sgeom.box(1, 1, 2, 2, ccw=False).exterior.coords),
                     numpy.array(sgeom.box(1, 8, 2, 9, ccw=False).exterior.coords),
                     ]

        poly = sgeom.Polygon(exterior, interiors)

        target = ccrs.PlateCarree()
        source = ccrs.Geodetic()

        assert len(list(target.project_geometry(poly, source))) == 1
    def testCustomDispatcher(self):
        # prepare data for test
        box1 = box(0, 0, 500, 500)
        box2 = box(0, 0, 10, 10)
        box3 = box(0, 0, 1000, 1000)

        dispatcher = CustomDispatcher()
        self.assertEqual(dispatcher.dispatch(None, box1), "BIG")
        dispatch_batch = dispatcher.dispatch_batch(None, [box1, box2, box3])
        assert_array_equal(dispatch_batch, ["BIG", "SMALL", "BIG"])
        labels, dispatch_map = dispatcher.dispatch_map(None, [box1, box2, box3])
        assert_array_equal(labels, dispatch_batch)
        assert_array_equal(dispatch_map, [1, 0, 1])
Example #17
0
    def setUp(self):
        self.background = FeatureTest(100, influence="notall", val_influence=1)
        self.background.shape = geom.box(5, 5, 45, 45)
        
        self.up = FeatureTest(150, influence="notall", val_influence=1)
        self.up.shape = geom.box(10, 10, 30, 30)
        
        self.rep = FeatureTestReplace(0, influence="notall", val_influence=1)
        self.rep.shape = geom.box(10, 10, 30, 30)
        
        self.forest = Vegetation(geom.box(5, 5, 45, 45), model=AbstractModel(), tree_number=50)

        self.env = Environment([self.rep, self.up, self.background, self.forest])
Example #18
0
def test_shapely_intersection():
    """Testing Shapely: Test against prepared geometry intersection bug #603"""
    # http://trac.osgeo.org/geos/ticket/603
    from shapely.geometry import MultiPolygon, box
    from shapely.prepared import prep
    from shapely import wkt

    assert MultiPolygon([box(0, 0, 1, 10), box(40, 0, 41, 10)]).intersects(box(20, 0, 21, 10)) == False
    assert prep(MultiPolygon([box(0, 0, 1, 10), box(40, 0, 41, 10)])).intersects(box(20, 0, 21, 10)) == False

    # tile_grid(3857, origin='nw').tile_bbox((536, 339, 10))
    tile = box(939258.2035682462, 6731350.458905761, 978393.9620502564, 6770486.217387771)
    tile = box(978393.9620502554, 6770486.217387772, 1017529.7205322656, 6809621.975869782)
    # "{"type":"FeatureCollection","features":[{"type":"Feature","properties":{"type":"box_control"},"geometry":{"type":"Polygon","coordinates":[[[1449611.9686912997,6878109.5532215],[1449611.9686912997,6909907.3569881],[1476517.8026477,6909907.3569881],[1476517.8026477,6878109.5532215],[1449611.9686912997,6878109.5532215]]]}},{"type":"Feature","properties":{"type":"box_control"},"geometry":{"type":"Polygon","coordinates":[[[909049.30465869,6435386.285393901],[909049.30465869,6457400.14954],[943293.0933304401,6457400.14954],[943293.0933304401,6435386.285393901],[909049.30465869,6435386.285393901]]]}}]}"
    coverage = wkt.loads(
        "MULTIPOLYGON ("
            "((1449611.9686912996694446 6878109.5532214995473623, "
              "1449611.9686912996694446 6909907.3569881003350019, "
              "1476517.8026477000676095 6909907.3569881003350019, "
              "1476517.8026477000676095 6878109.5532214995473623, "
              "1449611.9686912996694446 6878109.5532214995473623)), "
            "((909049.3046586900018156 6435386.2853939011693001, "
              "909049.3046586900018156 6457400.1495399996638298, "
              "943293.0933304401114583 6457400.1495399996638298, "
              "943293.0933304401114583 6435386.2853939011693001, "
              "909049.3046586900018156 6435386.2853939011693001)))"
    )
    assert prep(coverage).contains(tile) == False
    assert prep(coverage).intersects(tile) == False
Example #19
0
def week_data(date, bounds):
    """
    Query Transfer data for a week, optionally spatially filtered.
    Returns a GeoJSON FeatureCollection.
    """
    try:
        # week should be in ISO YYYY-MM-DD format
        week_start = datetime.datetime.strptime(date, '%Y-%m-%d').date()
    except ValueError as e:
        r = jsonify({"error": str(e)})
        r.status_code = 400
        return r

    if bounds:
        # Optionally, filter the results spatially
        # west,south,east,north in degrees (latitude/longitude)
        try:
            m = re.match(r'((-?\d+(?:\.\d+)?),){3}(-?\d+(\.\d+)?)$', bounds)
            if not m:
                raise ValueError("Bounds should be longitudes/latitudes in west,south,east,north order")
            w,s,e,n = map(float, bounds.split(','))
            if w < -180 or w > 180 or e < -180 or e > 180:
                raise ValueError("Bounds should be longitudes/latitudes in west,south,east,north order")
            elif s < -90 or s > 90 or n < -90 or n > 90 or s > n:
                raise ValueError("Bounds should be longitudes/latitudes in west,south,east,north order")

            if e < w:
                bounds = MultiPolygon([box(w, s, 180, n), box(-180, s, e, n)])
            else:
                bounds = MultiPolygon([box(w, s, e, n)])
        except ValueError as e:
            r = jsonify({"error": str(e)})
            r.status_code = 400
            return r

    # Filter the transfers - the DB query happens here
    query = Transfer.query.filter_by(week_start=week_start)
    if bounds:
        query = query.filter(Transfer.location.ST_Intersects(from_shape(bounds, 4326)))

    features = []
    for transfer in query:
        features.append(transfer.as_geojson())

    # Format the response as a GeoJSON FeatureCollection
    return jsonify({
        "type": "FeatureCollection",
        "features": features,
    })
Example #20
0
 def test_conflicts(self):
     conflict = box(0, 0, 20, 20)
     self.map.conflict_union(conflict, margin=0)
     #: correct calculation of conflict density
     self.assertEqual(self.map.conflict_density(10, 10, radius=10), 1)
     self.assertEqual(self.map.conflict_density(50, 50, radius=10), 0)
     self.assertAlmostEqual(self.map.conflict_density(10, 20, radius=10),
         0.5, places=PLACES)
     #: test positioning
     new = box(10, 10, 50, 30)
     self.assertEqual(self.map.find_free_position(new, step=1, number=10),
         (10, 20))
     self.assertIsNone(self.map.find_free_position(new, step=1, number=5))
     new = box(30, 30, 50, 30)
     self.assertEqual(self.map.find_free_position(new, number=0), (30, 30))
def test_polygon_interiors():

    ax = plt.subplot(211, projection=ccrs.PlateCarree())
    ax.coastlines()
    ax.set_global()

    pth = Path([[0, -45], [60, -45], [60, 45], [0, 45], [0, 45],
                [10, -20], [10, 20], [40, 20], [40, -20], [10, 20]],
               [1, 2, 2, 2, 79, 1, 2, 2, 2, 79])

    patches_native = []
    patches = []
    for geos in cpatch.path_to_geos(pth):
        for pth in cpatch.geos_to_path(geos):
            patches.append(mpatches.PathPatch(pth))

        # buffer by 10 degrees (leaves a small hole in the middle)
        geos_buffered = geos.buffer(10)
        for pth in cpatch.geos_to_path(geos_buffered):
            patches_native.append(mpatches.PathPatch(pth))

    # Set high zorder to ensure the polygons are drawn on top of coastlines.
    collection = PatchCollection(patches_native, facecolor='red', alpha=0.4,
                                 transform=ax.projection, zorder=10)
    ax.add_collection(collection)

    collection = PatchCollection(patches, facecolor='yellow', alpha=0.4,
                                 transform=ccrs.Geodetic(), zorder=10)

    ax.add_collection(collection)

    # test multiple interior polygons
    ax = plt.subplot(212, projection=ccrs.PlateCarree(),
                     xlim=[-5, 15], ylim=[-5, 15])
    ax.coastlines()

    exterior = np.array(sgeom.box(0, 0, 12, 12).exterior.coords)
    interiors = [np.array(sgeom.box(1, 1, 2, 2, ccw=False).exterior.coords),
                 np.array(sgeom.box(1, 8, 2, 9, ccw=False).exterior.coords)]
    poly = sgeom.Polygon(exterior, interiors)

    patches = []
    for pth in cpatch.geos_to_path(poly):
        patches.append(mpatches.PathPatch(pth))

    collection = PatchCollection(patches, facecolor='yellow', alpha=0.4,
                                 transform=ccrs.Geodetic(), zorder=10)
    ax.add_collection(collection)
Example #22
0
    def test_init(self):
        ge = GeometrySplitter(self.fixture_polygon_with_hole)
        self.assertIsInstance(ge.geometry, Polygon)

        # Test a geometry with no holes.
        with self.assertRaises(NoInteriorsError):
            GeometrySplitter(box(1, 2, 3, 4))
Example #23
0
    def test_iter_intersects(self):

        desired = {False: [(0, False), (1, False), (2, False), (3, True), (4, False)],
                   True: [(0, False), (1, True), (2, False), (3, True), (4, False)]}

        def the_geometry_iterator():
            x = [1, 2, 3, 4, 5]
            y = [6, 7, 8, 9, 10]
            mask = [False, False, True, False, False, False]
            for idx in range(len(x)):
                if mask[idx]:
                    yld = None
                else:
                    yld = Point(x[idx], y[idx])
                yield idx, yld

        subset_geometry = box(2.0, 7.0, 4.5, 9.5)

        for keep_touches in [False, True]:
            gp = GeometryProcessor(the_geometry_iterator(), subset_geometry, keep_touches=keep_touches)
            actual = list(gp.iter_intersects())
            for idx_actual in range(len(actual)):
                self.assertEqual(actual[idx_actual][0:2], desired[keep_touches][idx_actual])
                if idx_actual == 2:
                    self.assertIsNone(actual[idx_actual][2])
                else:
                    self.assertIsInstance(actual[idx_actual][2], Point)

        # Test the object may only be used once.
        with self.assertRaises(ValueError):
            list(gp.iter_intersects())
Example #24
0
    def test_split(self):
        to_test = [self.fixture_polygon_with_hole,
                   MultiPolygon([self.fixture_polygon_with_hole, box(200, 100, 300, 400)])]
        desired_counts = {0: 4, 1: 5}

        for ctr, t in enumerate(to_test):
            ge = GeometrySplitter(t)
            split = ge.split()

            self.assertEqual(len(split), desired_counts[ctr])
            self.assertEqual(split.area, t.area)

            actual_bounds = [g.bounds for g in split]
            actual_areas = [g.area for g in split]

            desired_bounds = [(2.0, 10.0, 3.0, 13.0), (3.0, 10.0, 4.0, 13.0),
                              (3.0, 13.0, 4.0, 20.0), (2.0, 13.0, 3.0, 20.0)]
            desired_areas = [1.75, 1.75, 5.75, 5.75]

            if ctr == 1:
                desired_bounds.append((200.0, 100.0, 300.0, 400.0))
                desired_areas.append(30000.0)

            self.assertEqual(actual_bounds, desired_bounds)
            self.assertEqual(actual_areas, desired_areas)
Example #25
0
    def create(self, user_token, layer):
        from gbi_server.model import User

        user = User.by_authproxy_token(user_token)
        if not user:
            raise InvalidUserToken()

        result = db.session.query(WMTS, WMTS.view_coverage.transform(3857).wkt()).filter_by(name=layer).first()
        if result:
            wmts, view_coverage = result
            if wmts and wmts.is_public:
                return wkt.loads(view_coverage)

        if user.is_customer:
            couch_url = self.couchdb_url
            couchdb = CouchDBBox(couch_url, '%s_%s' % (SystemConfig.AREA_BOX_NAME, user.id))
            geom = couchdb.layer_extent(self.geometry_layer)
            return optimize_geometry(geom) if geom else None
        elif user.is_service_provider:
            couch_url = self.couchdb_url
            couchdb = CouchDBBox(couch_url, '%s_%s' % (SystemConfig.AREA_BOX_NAME, user.id))
            geom = couchdb.layer_extent()
            return optimize_geometry(geom) if geom else None
        elif user.is_admin or user.is_consultant:
            # permit access to everything
            return box(-20037508.3428, -20037508.3428, 20037508.3428, 20037508.3428)

        return None
    def test_o_and_m_get_observation(self):
        data = open(resource_file(os.path.join('ioos_swe', 'OM-GetObservation.xml')), "rb").read()

        d = IoosGetObservation(data)
        assert d.ioos_version       == "1.0"
        assert len(d.observations)  == 1

        ts = d.observations[0]
        assert ts.description.replace("\n", "").replace("       ", "").replace("  -", " -") == "Observations at point station urn:ioos:station:wmo:41001, 150 NM East of Cape HATTERAS. Observations at point station urn:ioos:station:wmo:41002, S HATTERAS - 250 NM East of Charleston, SC"
        assert ts.begin_position                == datetime(2009, 5, 23, 0, tzinfo=pytz.utc)
        assert ts.end_position                  == datetime(2009, 5, 23, 2, tzinfo=pytz.utc)
        assert sorted(ts.procedures)            == sorted(["urn:ioos:station:wmo:41001", "urn:ioos:station:wmo:41002"])
        assert sorted(ts.observedProperties)    == sorted([ "http://mmisw.org/ont/cf/parameter/air_temperature",
                                                            "http://mmisw.org/ont/cf/parameter/sea_water_temperature",
                                                            "http://mmisw.org/ont/cf/parameter/wind_direction",
                                                            "http://mmisw.org/ont/cf/parameter/wind_speed",
                                                            "http://mmisw.org/ont/ioos/parameter/dissolved_oxygen"
                                                            ])
        assert ts.feature_type          == "timeSeries"

        assert ts.bbox_srs.getcode()    == "EPSG:4326"
        assert ts.bbox.equals(box(-75.42, 32.38, -72.73, 34.7))

        assert ts.location["urn:ioos:station:wmo:41001"].equals(Point(-72.73, 34.7))
        assert ts.location["urn:ioos:station:wmo:41002"].equals(Point(-75.415, 32.382))
Example #27
0
    def _rings_to_multi_polygon(self, rings):
        exterior_rings = []
        interior_rings = []
        for ring in rings:
            if ring.is_ccw:
                interior_rings.append(ring)
            else:
                exterior_rings.append(ring)

        polygon_bits = []

        # Turn all the exterior rings into polygon definitions,
        # "slurping up" and interior rings they contain.
        for exterior_ring in exterior_rings:
            polygon = sgeom.Polygon(exterior_ring)
            holes = []
            for interior_ring in interior_rings[:]:
                if polygon.contains(interior_ring):
                    holes.append(interior_ring)
                    interior_rings.remove(interior_ring)
            polygon_bits.append((exterior_ring.coords,
                                 [ring.coords for ring in holes]))

        # Any left over "interior" rings need "inverting" with respect
        # to the boundary.
        if interior_rings:
            boundary_poly = sgeom.Polygon(self.boundary)
            x3, y3, x4, y4 = boundary_poly.bounds
            bx = (x4 - x3) * 0.1
            by = (y4 - y3) * 0.1
            x3 -= bx
            y3 -= by
            x4 += bx
            y4 += by
            for ring in interior_rings:
                polygon = sgeom.Polygon(ring)
                x1, y1, x2, y2 = polygon.bounds
                bx = (x2 - x1) * 0.1
                by = (y2 - y1) * 0.1
                x1 -= bx
                y1 -= by
                x2 += bx
                y2 += by
                box = sgeom.box(min(x1, x3), min(y1, y3),
                                max(x2, x4), max(y2, y4))

                # Invert the polygon
                polygon = box.difference(polygon)

                # Intersect the inverted polygon with the boundary
                polygon = boundary_poly.intersection(polygon)

                if not polygon.is_empty:
                    polygon_bits.append(polygon)

        if polygon_bits:
            multi_poly = sgeom.MultiPolygon(polygon_bits)
        else:
            multi_poly = sgeom.MultiPolygon()
        return multi_poly
Example #28
0
def test_google_wts():
    gt = cimgt.GoogleTiles()

    ll_target_domain = sgeom.box(-15, 50, 0, 60)
    multi_poly = gt.crs.project_geometry(ll_target_domain, ccrs.PlateCarree())
    target_domain = multi_poly.geoms[0]

    with assert_raises(AssertionError):
        list(gt.find_images(target_domain, -1))
    assert_equal(tuple(gt.find_images(target_domain, 0)),
                 ((0, 0, 0),))
    assert_equal(tuple(gt.find_images(target_domain, 2)),
                 ((1, 1, 2), (2, 1, 2)))

    assert_equal(list(gt.subtiles((0, 0, 0))),
                 [(0, 0, 1), (0, 1, 1), (1, 0, 1), (1, 1, 1)])
    assert_equal(list(gt.subtiles((1, 0, 1))),
                 [(2, 0, 2), (2, 1, 2), (3, 0, 2), (3, 1, 2)])

    with assert_raises(AssertionError):
        gt.tileextent((0, 1, 0))

    assert_arr_almost(gt.tileextent((0, 0, 0)), KNOWN_EXTENTS[(0, 0, 0)])
    assert_arr_almost(gt.tileextent((2, 0, 2)), KNOWN_EXTENTS[(2, 0, 2)])
    assert_arr_almost(gt.tileextent((0, 2, 2)), KNOWN_EXTENTS[(0, 2, 2)])
    assert_arr_almost(gt.tileextent((2, 2, 2)), KNOWN_EXTENTS[(2, 2, 2)])
    assert_arr_almost(gt.tileextent((8, 9, 4)), KNOWN_EXTENTS[(8, 9, 4)])
Example #29
0
def us_grid(resolution=.5, sparse=True):
    resolution = .5
    bounds = USA.bounds
    # Grid boundaries are determined by nearest degree.
    min_long = np.floor(bounds[0])
    min_lat  = np.floor(bounds[1])
    max_long = np.ceil(bounds[2])
    max_lat  = np.ceil(bounds[3])
    # Division should be close to an integer.
    # Add one to number of points to include the end
    # This is robust only to resolutions that "evenly" divide the range.
    nPointsLong = np.around((max_long - min_long) / resolution) + 1
    nPointsLat  = np.around((max_lat  - min_lat ) / resolution) + 1
    long_points = np.linspace(min_long, max_long, nPointsLong)
    lat_points  = np.linspace(min_lat,  max_lat,  nPointsLat )

    outline = contiguous_outline2('../tiger/cb_2013_us_nation_20m.shp')

    for i, (xi, yi) in enumerate(product(range(len(long_points)-1),
                                         range(len(lat_points)-1))):
        cell = box(long_points[xi], lat_points[yi],
                   long_points[xi+1], lat_points[yi+1])
        if sparse:
            # Add cell only if it intersects contiguous USA
            if cell.intersects(outline):
                yield cell
        else:
            yield cell
Example #30
0
 def test_unwrap(self):
     geom = box(195, -40, 225, -30)
     gvar = GeometryVariable(name='geoms', value=geom, crs=Spherical(), dimensions='geoms')
     gvar.wrap()
     self.assertEqual(gvar.get_value()[0].bounds, (-165.0, -40.0, -135.0, -30.0))
     gvar.unwrap()
     self.assertEqual(gvar.get_value()[0].bounds, (195.0, -40.0, 225.0, -30.0))
Example #31
0
    def tile_generator(self,
                       src,
                       tile_bounds,
                       tile_bounds_crs=None,
                       geom_type='Polygon',
                       split_multi_geoms=True,
                       min_partial_perc=0.0,
                       obj_id_col=None):
        """Generate `src` vector data tiles bounded by `tile_bounds`.

        Arguments
        ---------
        src : `str` or :class:`geopandas.GeoDataFrame`
            The source vector data to tile. Must either be a path to a GeoJSON
            or a :class:`geopandas.GeoDataFrame`.
        tile_bounds : list
            A :class:`list` made up of ``[left, top, right, bottom] `` sublists
            (this can be extracted from
            :class:`solaris.tile.raster_tile.RasterTiler` after tiling imagery)
        tile_bounds_crs : int, optional
            The EPSG code for the CRS that the tile bounds are in. If not
            provided, it's assumed that the CRS is the same as in `src`. This
            argument must be provided if the bound coordinates and `src` are
            not in the same CRS, otherwise tiling will not occur correctly.
        geom_type : str, optional (default: "Polygon")
            The type of geometries contained within `src`. Defaults to
            ``"Polygon"``, can also be ``"LineString"``.
        split_multi_geoms : bool, optional (default: True)
            Should multi-polygons or multi-linestrings generated by clipping
            a geometry into discontinuous pieces be separated? Defaults to yes
            (``True``).
        min_partial_perc : float, optional (default: 0.0)
            The minimum percentage of a :class:`shapely.geometry.Polygon` 's
            area or :class:`shapely.geometry.LineString` 's length that must
            be retained within a tile's bounds to be included in the output.
            Defaults to ``0.0``, meaning that the contained portion of a
            clipped geometry will be included, no matter how small.
        obj_id_col : str, optional (default: None)
            If ``split_multi_geoms=True``, the name of a column that specifies
            a unique identifier for each geometry (e.g. the ``"BuildingId"``
            column in many SpaceNet datasets.) See
            :func:`solaris.utils.geo.split_multi_geometries` for more.

        Yields
        ------
        tile_gdf : :class:`geopandas.GeoDataFrame`
            A tile geodataframe.
        tb : list
            A list with ``[left, top, right, bottom] `` coordinates for the
            boundaries contained by `tile_gdf`.
        """
        self.src = _check_gdf_load(src)
        if self.verbose:
            print("Num tiles:", len(tile_bounds))

        self.src_crs = _check_crs(self.src.crs)
        # check if the tile bounds and vector are in the same crs
        if tile_bounds_crs is not None:
            tile_bounds_crs = _check_crs(tile_bounds_crs)
        else:
            tile_bounds_crs = self.src_crs
        if self.src_crs != tile_bounds_crs:
            reproject_bounds = True  # used to transform tb for clip_gdf()
        else:
            reproject_bounds = False

        self.proj_unit = get_projection_unit(self.src_crs)
        if getattr(self, 'dest_crs', None) is None:
            self.dest_crs = self.src_crs
        for i, tb in enumerate(tile_bounds):
            if self.super_verbose:
                print("\n", i, "/", len(tile_bounds))
            if reproject_bounds:
                tile_gdf = clip_gdf(self.src,
                                    reproject_geometry(box(*tb),
                                                       tile_bounds_crs,
                                                       self.src_crs),
                                    min_partial_perc,
                                    geom_type,
                                    verbose=self.super_verbose)
            else:
                tile_gdf = clip_gdf(self.src,
                                    tb,
                                    min_partial_perc,
                                    geom_type,
                                    verbose=self.super_verbose)
            if self.src_crs != self.dest_crs:
                tile_gdf = tile_gdf.to_crs(crs=self.dest_crs.to_wkt())
            if split_multi_geoms:
                split_multi_geometries(tile_gdf, obj_id_col=obj_id_col)
            yield tile_gdf, tb
Example #32
0
    def create(self, size=100000, iter_size=50000, use_timer=True):
        print "\n** build_packages *******************************************"
        print "----"

        self.package_size = size
        timer_a = timeit.default_timer()  # Initialize timer

        # Containers for edges represented as GeoJSON and as shapely objects:
        container_edges_geojson = []
        container_edges_geom_2d = []

        # Statistics per package:
        package_scale_low = np.inf
        package_scale_high = 0
        package_count_edges = 0
        package_size_bytes = 0
        package_id = 1
        package_num_coords = 0

        for edges in self.dataset.get_generator(iter_size=iter_size):
            if use_timer:
                print "----"
                print "Processing new fetch from generator.."
            for edge in edges:

                # Add edge representations to containers for package:
                edge_geojson = {
                    "type": "Feature",
                    "geometry": {
                        "type":
                        "LineString",
                        "coordinates":
                        as_python_linestring(sg_loads(edge["geom"].wkt))
                    },
                    "properties": {
                        "edge_id": edge["edge_id"],
                        "scale_low": edge["scale_low"],
                        "scale_high": edge["scale_high"],
                    }
                }

                package_num_coords += len(edge["geom"].coords)
                edge_bytes = len(bytes(edge_geojson))

                if package_size_bytes + edge_bytes > size:
                    if package_id % 500 == 0 and use_timer:
                        print "----"
                        print "Created", package_id, "packages.."

                        # print "Package: ", package_id
                        # print "# edges: ", package_count_edges
                        # print "# bytes: ", package_size_bytes
                        # print "-------- "

                    # Update info:
                    self.number_of_coors_per_package.append(package_num_coords)
                    self.number_of_edges_per_package.append(
                        package_count_edges)
                    self.height_per_package.append(package_scale_high -
                                                   package_scale_low)
                    if package_count_edges < self.package_least_edges:
                        self.package_least_edges = package_count_edges
                    if package_count_edges > self.package_most_edges:
                        self.package_most_edges = package_count_edges
                    self.package_average_edges += package_count_edges

                    # Create package:
                    package_geom_2d = box(
                        *MultiLineString(container_edges_geom_2d).bounds)
                    package = self._create_package(package_id, package_geom_2d,
                                                   package_scale_low,
                                                   package_scale_high,
                                                   container_edges_geojson)
                    self._packages.append(package)

                    # Empty containers:
                    container_edges_geojson = []
                    container_edges_geom_2d = []

                    # Reset statistics:
                    package_scale_low = np.inf
                    package_scale_high = 0
                    package_size_bytes = 0
                    package_count_edges = 0
                    package_id += 1
                    package_num_coords = 0

                container_edges_geojson.append(edge_geojson)
                container_edges_geom_2d.append(edge["geom"])

                # Keep track of package statistics:
                if edge["scale_low"] < package_scale_low:
                    package_scale_low = edge["scale_low"]
                if edge["scale_high"] > package_scale_high:
                    package_scale_high = edge["scale_high"]

                package_count_edges += 1
                package_size_bytes += edge_bytes

        # Create last package:
        if package_count_edges != 0:
            # print "Package: ", package_id
            # print "# edges: ", package_count_edges
            # print "# bytes: ", package_size_bytes
            # print "-------- "

            # Update info:
            self.number_of_coors_per_package.append(package_num_coords)
            self.number_of_edges_per_package.append(package_count_edges)
            self.height_per_package.append(package_scale_high -
                                           package_scale_low)
            if package_count_edges < self.package_least_edges:
                self.package_least_edges = package_count_edges
            if package_count_edges > self.package_most_edges:
                self.package_most_edges = package_count_edges
            self.package_average_edges += package_count_edges

            # Create last package:
            package_geom_2d = box(
                *MultiLineString(container_edges_geom_2d).bounds)
            package = self._create_package(package_id, package_geom_2d,
                                           package_scale_low,
                                           package_scale_high,
                                           container_edges_geojson)
            self._packages.append(package)

        self.count = package_id
        self.package_average_edges = self.package_average_edges / self.count

        timer_b = timeit.default_timer()
        if use_timer:
            print "----"
            print str(package_id), 'packages created in:', timer_b - timer_a, \
                'seconds'

        if self.db == "corine":
            packages = self._packages
            self._packages = []  # Remove packages from constructor for memory
            return packages
        else:
            # Calculate total overlap:
            print "----"
            print "Starting total overlap calculation between packages.."
            timer_overlap_a = timeit.default_timer()
            self.total_overlap = sum([
                (pair[0]["geom"].intersection(pair[1]["geom"]).area *
                 overlap_1d(pair[0]["scale_low"], pair[0]["scale_high"],
                            pair[1]["scale_low"], pair[1]["scale_high"]))
                for pair in combinations(self._packages, 2)
            ])
            timer_overlap_b = timeit.default_timer()
            print 'Overlap between packages calculated in:', \
                timer_overlap_b - timer_overlap_a, 'seconds'

            return self._packages
Example #33
0
def clip_gdf(gdf,
             tile_bounds,
             min_partial_perc=0.0,
             geom_type="Polygon",
             use_sindex=True,
             verbose=False):
    """Clip GDF to a provided polygon.

    Clips objects within `gdf` to the region defined by
    `poly_to_cut`. Also adds several columns to the output::

        `origarea`
            The original area of the polygons (only used if `geom_type` ==
            ``"Polygon"``).
        `origlen`
            The original length of the objects (only used if `geom_type` ==
            ``"LineString"``).
        `partialDec`
            The fraction of the object that remains after clipping
            (fraction of area for Polygons, fraction of length for
            LineStrings.) Can filter based on this by using `min_partial_perc`.
        `truncated`
            Boolean indicator of whether or not an object was clipped.

    Arguments
    ---------
    gdf : :py:class:`geopandas.GeoDataFrame`
        A :py:class:`geopandas.GeoDataFrame` of polygons to clip.
    tile_bounds : `list` or :class:`shapely.geometry.Polygon`
        The geometry to clip objects in `gdf` to. This can either be a
        ``[left, top, right, bottom] `` bounds list or a
        :class:`shapely.geometry.Polygon` object defining the area to keep.
    min_partial_perc : float, optional
        The minimum fraction of an object in `gdf` that must be
        preserved. Defaults to 0.0 (include any object if any part remains
        following clipping).
    geom_type : str, optional
        Type of objects in `gdf`. Can be one of
        ``["Polygon", "LineString"]`` . Defaults to ``"Polygon"`` .
    use_sindex : bool, optional
        Use the `gdf` sindex be used for searching. Improves efficiency
        but requires `libspatialindex <http://libspatialindex.github.io/>`__ .
    verbose : bool, optional
        Switch to print relevant values.

    Returns
    -------
    cut_gdf : :py:class:`geopandas.GeoDataFrame`
        `gdf` with all contained objects clipped to `poly_to_cut` .
        See notes above for details on additional clipping columns added.

    """
    gdf['geometry'] = gdf.buffer(0)
    if isinstance(tile_bounds, tuple):
        tb = box(*tile_bounds)
    elif isinstance(tile_bounds, list):
        tb = box(*tile_bounds)
    elif isinstance(tile_bounds, Polygon):
        tb = tile_bounds
    if use_sindex and (geom_type == "Polygon"):
        gdf = search_gdf_polygon(gdf, tb)

    # if geom_type == "LineString":
    if 'origarea' in gdf.columns:
        pass
    else:
        if "geom_type" == "LineString":
            gdf['origarea'] = 0
        else:
            gdf['origarea'] = gdf.area

    if 'origlen' in gdf.columns:
        pass
    else:
        if "geom_type" == "LineString":
            gdf['origlen'] = gdf.length
        else:
            gdf['origlen'] = 0
    # TODO must implement different case for lines and for spatialIndex
    # (Assume RTree is already performed)

    cut_gdf = gdf.copy()
    cut_gdf.geometry = gdf.intersection(tb)

    if geom_type == 'Polygon':
        cut_gdf['partialDec'] = cut_gdf.area / cut_gdf['origarea']
        cut_gdf = cut_gdf.loc[cut_gdf['partialDec'] > min_partial_perc, :]
        cut_gdf['truncated'] = (cut_gdf['partialDec'] != 1.0).astype(int)
    else:
        # assume linestrings
        # remove null
        cut_gdf = cut_gdf[cut_gdf['geometry'].notnull()]
        cut_gdf['partialDec'] = 1
        cut_gdf['truncated'] = 0
        # cut_gdf = cut_gdf[cut_gdf.geom_type != "GeometryCollection"]
        if len(cut_gdf) > 0 and verbose:
            print("clip_gdf() - gdf.iloc[0]:", gdf.iloc[0])
            print("clip_gdf() - tb:", tb)
            print("clip_gdf() - gdf_cut:", cut_gdf)

    # TODO: IMPLEMENT TRUNCATION MEASUREMENT FOR LINESTRINGS

    return cut_gdf
Example #34
0
 def of_rectangle(min_x: int, min_y: int, max_x: int,
                  max_y: int) -> BoundingBox:
     return BoundingBox(box(min_x, min_y, max_x, max_y))
def main():
    args = parse_args()

    # load the csv file into pandas for cleanup
    print('Loading...')
    df = pd.read_csv(args.input_file)

    # filter down to area of interest records
    print('Finding AoI...')
    geohashes_aoi = set()
    if args.coverage_file is not None:
        # loading coverage polygon from geo json file
        coverage_geojson = json.load(open(args.coverage_file))

        # generate geohashes covered by the AoI
        geohashes_aoi = helpers.geohashes_from_geojson_poly(
            coverage_geojson, precision=args.geohash_level)
    # filter down to country of interest records
    elif args.country_iso is not None:
        df = df.loc[df["iso3"] == args.country_iso]

    # extract x, y locations and crop of interest
    df = df[(["x", "y"] + args.crop_columns)]
    df = df.reset_index()

    # loop over the x, y which are the cell centroids, and generate a bounding box based on
    # the cell size (taken from the associated geotiff resolution)
    print('Converting points to bounds...')
    centroids = zip(df["x"], df["y"])
    bounds = [
        geometry.box(
            c[0] - CELL_SIZE_X / 2,
            c[1] - CELL_SIZE_Y / 2,
            c[0] + CELL_SIZE_X / 2,
            c[1] + CELL_SIZE_Y / 2,
        ) for c in tqdm(centroids)
    ]

    # loop through the bounds we've created and intersect each with the intended geohash grid
    print('Converting bounds to geohashes...')
    geohashes = [
        polygon_geohasher.polygon_to_geohashes(b,
                                               precision=args.geohash_level,
                                               inner=False)
        for b in tqdm(bounds)
    ]

    # flatten  gh set for each cell preserving index - no clean way to do this in pandas
    flattened_gh = []
    print('Clipping geohashes to AoI...')
    for idx, gh_set in tqdm(enumerate(geohashes)):
        for gh in gh_set:
            if (len(geohashes_aoi) > 0
                    and gh in geohashes_aoi) or len(geohashes_aoi) is 0:
                bounds_str = helpers.geohash_to_array_str(gh)
                flattened_gh.append((idx, gh, bounds_str))

    # store as a dataframe with any geohashes that were part of 2 cells reduced to 1
    # a better implementation of this would take the value of both cells into  account and
    # compute a final adjusted value for the given geohash
    print('Genering output csv...')
    geohash_df = pd.DataFrame(flattened_gh,
                              columns=["cell", "geohash", "bounds"])
    geohash_df = geohash_df.drop_duplicates(subset="geohash", keep="first")
    geohash_df = geohash_df.set_index("cell")

    joined = pd.merge(df, geohash_df, left_index=True, right_index=True)
    joined = joined.drop(columns=["x", "y", "index"])

    joined.to_csv(args.output_file, index=False)
def plot_topomap(events=None):
    from cartopy.feature import NaturalEarthFeature as NEF
    from matplotlib.colors import LinearSegmentedColormap
    import shapely.geometry as sgeom
    from util.imaging import EA_EURO, GEO, add_scale, de_border, add_ticklabels, plot_elevation

    mlf = [(12.410, 50.261), (12.485, 50.183), (12.523, 50.127),
           (12.517, 50.125), (12.538, 50.131), (12.534, 50.130),
           (12.543, 50.109), (12.547, 50.083), (12.547, 50.074),
           (12.545, 50.066), (12.546, 50.057), (12.576, 50.034),
           (12.594, 50.016), (12.632, 50.004), (12.665, 49.980)]
    fig = plt.figure()
    #    ax = fig.add_axes([0, 0, 1, 1], projection=EA_EURO)
    ax = fig.add_subplot(111, projection=EA_EURO)
    extent = [12.05, 12.85, 50, 50.45]
    ax.set_extent(extent, crs=GEO)

    # Create an inset GeoAxes showing the location of the Solomon Islands.
    box = ax.get_position().bounds
    subax = fig.add_axes(
        [box[0] + box[2] - 0.23, box[1] + box[3] - 0.3, 0.28, 0.28],
        projection=EA_EURO)
    subax.set_extent([8, 16, 47, 55], GEO)
    subax.add_feature(NEF('physical', 'land', '10m'),
                      facecolor='0.7',
                      alpha=0.5,
                      rasterized=True)  #facecolor='sandybrown'
    subax.add_feature(NEF('physical', 'coastline', '10m'),
                      facecolor='none',
                      edgecolor='k',
                      linewidth=0.5,
                      rasterized=True)
    subax.add_feature(NEF('cultural', 'admin_0_boundary_lines_land', '10m'),
                      facecolor='none',
                      edgecolor='k',
                      linewidth=0.5,
                      rasterized=True)
    subax.add_geometries(
        [sgeom.box(extent[0], extent[2], extent[1], extent[3])],
        GEO,
        facecolor='none',
        edgecolor='k',
        linewidth=1,
        alpha=0.5)
    lonticks = [12.2, 12.4, 12.6, 12.8]
    latticks = [50, 50.1, 50.2, 50.3, 50.4]
    add_ticklabels(ax, lonticks, latticks)
    ax.tick_params(axis='both', which='major', labelsize=8)
    plot_elevation(ax,
                   shading=False,
                   cmap=LinearSegmentedColormap.from_list(
                       'littlegray', ['white', '0.5']),
                   azimuth=315,
                   altitude=60,
                   rasterized=True)
    add_scale(ax, 10, (12.15, 50.02))
    de_border(ax, edgecolor='0.5', rasterized=True)
    eqs, sta = load_webnet(stations=True)
    if eqs is not None:
        ax.scatter(eqs.lon.values,
                   eqs.lat.values,
                   4,
                   '#9bd7ff',
                   alpha=0.4,
                   marker='.',
                   transform=GEO,
                   rasterized=True)
    if events is not None:
        _, _, lon, lat, _, mag, *_ = zip(*events2lists(events))
        ax.scatter(lon, lat, 4, 'C0', marker='o', transform=GEO)
    ax.plot(*list(zip(*mlf)), color='0.5', transform=GEO)
    ax.annotate('MLF', (12.505, 50.145),
                None,
                GEO._as_mpl_transform(ax),
                size='x-small',
                zorder=10,
                rotation=290)
    used_stations = 'NKC LBC VAC KVC STC POC SKC KRC ZHC'.split()
    sta = sta[sta.station.isin(used_stations)]
    ax.scatter(sta.lon.values,
               sta.lat.values,
               100,
               marker='^',
               color='none',
               edgecolors='k',
               transform=GEO,
               zorder=10)
    for idx, s in sta.iterrows():
        xy = (2, 2) if s.station not in ('KOPD', 'KAC') else (-10, 5)
        ax.annotate(s.station, (s.lon, s.lat),
                    xy,
                    GEO._as_mpl_transform(ax),
                    'offset points',
                    size='x-small',
                    zorder=10)
    x0, y0 = EA_EURO.transform_point(LATLON0[1], LATLON0[0], GEO)
    ax.add_geometries([sgeom.box(x0 - 2500, y0 - 2300, x0 + 2500, y0 + 2700)],
                      EA_EURO,
                      facecolor='none',
                      edgecolor='C1',
                      linewidth=2,
                      alpha=0.8,
                      zorder=11)
    fig.savefig('figs/topomap.pdf',
                bbox_inches='tight',
                pad_inches=0.1,
                dpi=300)
    plt.show()
    return sta
Example #37
0
    def __init__(self,
                 lulc_raster_filepath,
                 biophysical_table_filepath,
                 cc_method,
                 ref_et_raster_filepaths,
                 t_refs=None,
                 uhi_maxs=None,
                 t_raster_filepaths=None,
                 station_t_filepath=None,
                 station_locations_filepath=None,
                 dates=None,
                 align_rasters=True,
                 workspace_dir=None,
                 extra_ucm_args=None,
                 num_workers=None):
        """
        Pythonic and open source interface to the InVEST urban cooling model.
        A set of additional utility methods serve to compute temperature maps
        and data frames.

        Parameters
        ----------
        lulc_raster_filepath : str
            Path to the raster of land use/land cover (LULC) file
        biophysical_table_filepath : str
            Path to the biophysical table CSV file
        cc_method : str
            Cooling capacity calculation method. Can be either 'factors' or
            'intensity'
        ref_et_raster_filepaths : str or list-like
            Path to the reference evapotranspiration raster, or sequence of
            strings with a path to the reference evapotranspiration raster
        t_refs : numeric or list-like, optional
            Reference air temperature. If not provided, it will be set as the
            minimum observed temperature (raster or station measurements, for
            each respective date if calibrating for multiple dates).
        uhi_maxs : numeric or list-like, optional
            Magnitude of the UHI effect. If not provided, it will be set as the
            difference between the maximum and minimum observed temperature
            (raster or station measurements, for each respective date if
            calibrating for multiple dates).
        t_raster_filepaths : str or list-like, optional
            Path to the observed temperature raster, or sequence of strings
            with a path to the observed temperature rasters. The raster must
            be aligned to the LULC raster. Required if calibrating against
            temperature map(s).
        station_t_filepath : str, optional
            Path to a table of air temperature measurements where each column
            corresponds to a monitoring station and each row to a datetime.
            Required if calibrating against station measurements. Ignored if
            providing `t_raster_filepaths`.
        station_locations_filepath : str, optional
            Path to a table with the locations of each monitoring station,
            where the first column features the station labels (that match the
            columns of the table of air temperature measurements), and there
            are (at least) a column labelled 'x' and a column labelled 'y'
            that correspod to the locations of each station (in the same CRS
            as the other rasters). Required if calibrating against station
            measurements. Ignored if providing `t_raster_filepaths`.
        dates : str or datetime-like or list-like, optional
            Date or list of dates that correspond to each of the observed
            temperature raster provided in `t_raster_filepaths`. Ignored if
            `station_t_filepath` is provided.
        align_rasters : bool, default True
            Whether the rasters should be aligned before passing them as
            arguments of the InVEST urban cooling model. Since the model
            already aligns the LULC and reference evapotranspiration rasters,
            this argument is only useful to align the temperature rasters, and
            is therefore ignored if calibrating against station measurements.
        workspace_dir : str, optional
            Path to the folder where the model outputs will be written. If not
            provided, a temporary directory will be used.
        extra_ucm_args : dict-like, optional
            Other keyword arguments to be passed to the `execute` method of
            the urban cooling model.
        num_workers : int, optional
            Number of workers so that the simulations of each iteration can be
            executed at scale. Only useful if calibrating for multiple dates.
            If not provided, it will be set automatically depending on the
            number of dates and available number of processors in the CPU.
        """

        if workspace_dir is None:
            # TODO: how do we ensure that this is removed?
            workspace_dir = tempfile.mkdtemp()
            # TODO: log to warn that we are using a temporary directory
        # self.workspace_dir = workspace_dir
        # self.base_args.update()

        # evapotranspiration rasters for each date
        if isinstance(ref_et_raster_filepaths, str):
            ref_et_raster_filepaths = [ref_et_raster_filepaths]

        # get the raster metadata from lulc (used to predict air temperature
        # rasters)
        with rio.open(lulc_raster_filepath) as src:
            meta = src.meta.copy()
            data_mask = src.dataset_mask().astype(bool)

        # calibration approaches
        if t_raster_filepaths is not None:
            # calibrate against a map
            if isinstance(t_raster_filepaths, str):
                t_raster_filepaths = [t_raster_filepaths]

            if align_rasters:
                # a list is needed for the `_align_rasters` method
                if isinstance(ref_et_raster_filepaths, tuple):
                    ref_et_raster_filepaths = list(ref_et_raster_filepaths)
                if isinstance(t_raster_filepaths, tuple):
                    t_raster_filepaths = list(t_raster_filepaths)
                # align the rasters to the LULC raster and dump them to new
                # paths in the workspace directory
                dst_lulc_raster_filepath = path.join(workspace_dir, 'lulc.tif')
                dst_ref_et_raster_filepaths = [
                    path.join(workspace_dir, f'ref-et_{i}.tif')
                    for i in range(len(t_raster_filepaths))
                ]
                dst_t_raster_filepaths = [
                    path.join(workspace_dir, f't_{i}.tif')
                    for i in range(len(t_raster_filepaths))
                ]
                # the call below returns the same `dst_lulc_raster_filepath`
                # `dst_ref_et_raster_filepaths` and `dst_t_raster_filepaths`
                # passed as args
                (meta, data_mask, lulc_raster_filepath,
                 ref_et_raster_filepaths, t_raster_filepaths) = _align_rasters(
                     lulc_raster_filepath, ref_et_raster_filepaths,
                     t_raster_filepaths, dst_lulc_raster_filepath,
                     dst_ref_et_raster_filepaths, dst_t_raster_filepaths)

            # observed values array, Tref and UHImax
            if t_refs is None:
                if uhi_maxs is None:
                    obs_arrs, t_refs, uhi_maxs = _preprocess_t_rasters(
                        t_raster_filepaths)
                else:
                    obs_arrs, t_refs, _ = _preprocess_t_rasters(
                        t_raster_filepaths)
            else:
                if uhi_maxs is None:
                    obs_arrs, _, uhi_maxs = _preprocess_t_rasters(
                        t_raster_filepaths)
                else:
                    obs_arrs, _, __ = _preprocess_t_rasters(t_raster_filepaths)
            # need to replace nodata with `nan` so that `dropna` works below
            # the `_preprocess_t_rasters` method already uses `np.where` to
            # that end, however the `data_mask` used here might be different
            # (i.e., the intersection of the data regions of all rasters)
            obs_arr = np.concatenate([
                np.where(data_mask, _obs_arr, np.nan) for _obs_arr in obs_arrs
            ])

            # attributes to index the samples
            if isinstance(dates, str):
                dates = [dates]
            sample_name = 'pixel'
            # the sample index/keys here will select all the pixels of the
            # rasters, indexed by their flat-array position - this is rather
            # silly but this way the attributes work in the same way when
            # calibrating against observed temperature rasters or station
            # measurements
            # sample_keys = np.flatnonzero(data_mask)
            # sample_index = np.arange(data_mask.sum())
            sample_index = np.arange(data_mask.size)
            sample_keys = np.arange(data_mask.size)
        elif station_t_filepath is not None:
            station_location_df = pd.read_csv(station_locations_filepath,
                                              index_col=0)
            station_t_df = pd.read_csv(station_t_filepath,
                                       index_col=0)[station_location_df.index]
            station_t_df.index = pd.to_datetime(station_t_df.index)

            # observed values array, Tref and UHImax
            if t_refs is None:
                t_refs = station_t_df.min(axis=1)
            if uhi_maxs is None:
                uhi_maxs = station_t_df.max(axis=1) - t_refs
            obs_arr = station_t_df.values  # .flatten()

            # attributes to index the samples
            dates = station_t_df.index
            sample_name = 'station'
            sample_index = station_t_df.columns
            sample_keys = np.ravel_multi_index(
                transform.rowcol(meta['transform'], station_location_df['x'],
                                 station_location_df['y']),
                (meta['height'], meta['width']))
        else:
            # this is useful in this same method (see below)
            dates = None
            sample_name = None
            sample_index = None
            sample_keys = None
            obs_arr = None

        # create a dummy geojson with the bounding box extent for the area of
        # interest - this is completely ignored during the calibration
        aoi_vector_filepath = path.join(workspace_dir, 'dummy_aoi.geojson')
        with rio.open(lulc_raster_filepath) as src:
            # geom = geometry.box(*src.bounds)
            with fiona.open(aoi_vector_filepath,
                            'w',
                            driver='GeoJSON',
                            crs=src.crs,
                            schema={
                                'geometry': 'Polygon',
                                'properties': {
                                    'id': 'int'
                                }
                            }) as c:
                c.write({
                    'geometry': geometry.mapping(geometry.box(*src.bounds)),
                    'properties': {
                        'id': 1
                    },
                })

        # store the attributes to index the samples
        self.meta = meta
        self.data_mask = data_mask
        self.dates = dates
        self.sample_name = sample_name
        self.sample_index = sample_index
        self.sample_keys = sample_keys

        # store reference temperatures and UHI magnitudes as class attributes
        if not _is_sequence(t_refs):
            t_refs = [t_refs]
        if not _is_sequence(uhi_maxs):
            uhi_maxs = [uhi_maxs]
        self.t_refs = t_refs
        self.uhi_maxs = uhi_maxs

        # flat observation array to compute the calibration metric
        if obs_arr is not None:
            self.obs_arr = obs_arr.flatten()
            self.obs_mask = ~np.isnan(self.obs_arr)
            self.obs_arr = self.obs_arr[self.obs_mask]

        # model parameters: prepare the dict here so that all the paths/
        # parameters have been properly set above
        self.base_args = {
            'lulc_raster_path': lulc_raster_filepath,
            'biophysical_table_path': biophysical_table_filepath,
            'aoi_vector_path': aoi_vector_filepath,
            'cc_method': cc_method,
            'workspace_dir': workspace_dir,
        }
        # if model_params is None:
        #     model_params = DEFAULT_MODEL_PARAMS
        self.base_args.update(**settings.DEFAULT_UCM_PARAMS)

        if extra_ucm_args is None:
            extra_ucm_args = settings.DEFAULT_EXTRA_UCM_ARGS
        if 'do_valuation' not in extra_ucm_args:
            extra_ucm_args['do_valuation'] = settings.DEFAULT_EXTRA_UCM_ARGS[
                'do_valuation']
        self.base_args.update(**extra_ucm_args)
        # also store the paths to the evapotranspiration rasters
        self.ref_et_raster_filepaths = ref_et_raster_filepaths

        # number of workers to perform each calibration iteration at scale
        if num_workers is None:
            num_workers = min(len(self.ref_et_raster_filepaths),
                              os.cpu_count())
        self.num_workers = num_workers
Example #38
0
             driver='ESRI Shapefile',
             crs='epsg:3006',
             schema=covSchema) as dest:
 for picFolder, metaFolder in zip(picFolders, metaFolders):
     files = [
         os.path.splitext(os.path.basename(x))[0]
         for x in glob.glob(picFolder + '/*.tif')
     ]
     for file in files:
         data = gdal.Open(picFolder + file + '.tif', gdal.GA_ReadOnly)
         geoTransform = data.GetGeoTransform()
         minx = geoTransform[0]
         maxy = geoTransform[3]
         maxx = minx + geoTransform[1] * data.RasterXSize
         miny = maxy + geoTransform[5] * data.RasterYSize
         geom = box(minx, miny, maxx, maxy)
         img_id = file[0:11]
         img_year = file[12:]
         yeard[picFolder + file + '.tif'] = int(img_year)
         metapath = metaFolder + img_id + '_flygbild_' + img_year + '.json'
         metapath_orto = metaFolder + img_id + '_ortofoto_' + img_year + '.json'
         if os.path.isfile(metapath) and os.path.isfile(metapath_orto):
             with open(metapath) as metafile:
                 metadata = json.load(metafile)
             try:
                 img_date = metadata['features'][0]['properties'][
                     'tidpunkt'][0:10]
                 img_camera = metadata['features'][0]['properties'][
                     'kamera']
                 dest.write({
                     'geometry': mapping(geom),
Example #39
0
def mosaic_bulk(meta_path, s3_list_path, min_scale, max_scale, min_year,
                max_year, woodland_tint, allow_orthophoto, bounds, minzoom,
                maxzoom, quadkey_zoom, sort_preference, closest_to_year,
                filter_only):
    """Create MosaicJSON from CSV of bulk metadata
    """
    if (sort_preference == 'closest-to-year') and (not closest_to_year):
        msg = 'closest-to-year parameter required when sort-preference is closest-to-year'
        raise ValueError(msg)

    df = pd.read_csv(meta_path, low_memory=False)
    # Rename column names to lower case and snake case
    df = df.rename(columns=lambda col: col.lower().replace(' ', '_'))

    # Keep only historical maps
    # Newer maps are only in GeoPDF, and not in GeoTIFF, let alone COG
    df = df[df['series'] == 'HTMC']

    # Create year column as Imprint Year if it exists, otherwise Date On Map
    df['year'] = df['imprint_year'].fillna(df['date_on_map'])

    # Apply filters
    if min_scale:
        df = df[df['scale'] >= min_scale]
    if max_scale:
        df = df[df['scale'] <= max_scale]
    if min_year:
        df = df[df['year'] >= min_year]
    if max_year:
        df = df[df['year'] <= max_year]
    if woodland_tint is not None:
        if woodland_tint:
            df = df[df['woodland_tint'] == 'Y']
        else:
            df = df[df['woodland_tint'] == 'N']
    if not allow_orthophoto:
        df = df[df['orthophoto'].isna()]

    # Create s3 GeoTIFF paths from metadata
    df['s3_tif'] = construct_s3_tif_url(df['download_product_s3'])

    if s3_list_path:
        # Load list of GeoTIFF files
        s3_files_df = load_s3_list(s3_list_path)

        # Keep only files that exist as GeoTIFF
        df = filter_cog_exists(df, s3_files_df)

    df['geometry'] = df.apply(construct_geometry, axis=1)
    gdf = gpd.GeoDataFrame(df)

    # Filter within provided bounding box
    if bounds:
        bounds = box(*map(float, bounds.split(',')))
        gdf = gdf[gdf.geometry.intersects(bounds)]

    if not maxzoom:
        maxzoom = gdf.apply(
            lambda row: get_maxzoom(row['scale'], row['scanner_resolution']),
            axis=1)
        # Take 75th percentile of maxzoom series
        maxzoom = int(round(maxzoom.describe()['75%']))
    if not minzoom:
        minzoom = maxzoom - 5

    # Columns to keep for creating MosaicJSON
    cols = ['scale', 'year', 's3_tif', 'geometry', 'cell_id']

    if sort_preference == 'newest':
        sort_by = ['year', 'scale']
        sort_ascending = [False, True]
    elif sort_preference == 'oldest':
        sort_by = ['year', 'scale']
        sort_ascending = [True, True]
    elif sort_preference == 'closest-to-year':
        gdf['reference_year'] = (closest_to_year - gdf['year']).abs()
        sort_by = ['reference_year', 'scale']
        sort_ascending = [True, True]
        cols.remove('year')
        cols.append('reference_year')

    if filter_only:
        for row in gdf[cols].iterfeatures():
            print(json.dumps(row, separators=(',', ':')))

        return

    # Convert to features
    features = gdf[cols].__geo_interface__['features']

    mosaic = MosaicJSON.from_features(features,
                                      minzoom=minzoom,
                                      maxzoom=maxzoom,
                                      quadkey_zoom=quadkey_zoom,
                                      asset_filter=asset_filter,
                                      accessor=path_accessor,
                                      sort_by=sort_by,
                                      sort_ascending=sort_ascending)

    print(json.dumps(mosaic.dict(), separators=(',', ':')))
Example #40
0
def load_pa_townships(gdb_path, layer, datastore):

    dml = """
    INSERT INTO landgrid.pa_township (MSLINK, COUNTY, MUNICIPAL_, MUNICIPAL1,
                                      FIPS_MUN_C, FED_AID_UR, FIPS_COUNT,  
                                      FIPS_AREA_, FIPS_NAME, FIPS_SQ_MI,
                                      FIPS_MUN_P, FED_ID_NUM, CLASS_OF_M,
                                      Shape_Length, Shape_Area, County_Name,
                                      County_TOWNSHIP, State_Name, geobounds, 
                                      shape)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
            %s, %s, ST_GeomFromText(%s, 4326));
    """

    try:
        with fiona.open(gdb_path, layer=layer) as src:

            datastore.execute_dml('TRUNCATE TABLE landgrid.pa_township;')
            # restart

            total = len(src)
            logger.info(f"Starting load of {layer}: {total} sections...")
            # logger.info(f"Schema: {src.schema}")

            for i, rec in enumerate(src):

                poly = shape(rec['geometry'])
                if not poly.is_valid:
                    logger.warning(
                        f"Cleaning {rec['properties']['MUNICIPAL1']}...")

                    clean = poly.buffer(0.0)
                    assert clean.is_valid, 'Invalid Polygon!'
                    assert clean.geom_type == 'Polygon' or clean.geom_type == 'MultiPolygon', \
                        f'{clean.geom_type} is not a Polygon!'
                    poly = clean

                bbox = box(*poly.bounds)
                g_json = geojson.dumps(mapping(bbox), sort_keys=True)

                datastore.write_record(
                    dml,
                    (
                        rec['properties']['MSLINK'],
                        rec['properties']['COUNTY'],
                        rec['properties']['MUNICIPAL_'],
                        rec['properties']['MUNICIPAL1'],
                        rec['properties']['FIPS_MUN_C'],
                        rec['properties']['FED_AID_UR'],
                        rec['properties']['FIPS_COUNT'],
                        rec['properties']['FIPS_AREA_'],
                        rec['properties']['FIPS_NAME'],
                        rec['properties']['FIPS_SQ_MI'],
                        rec['properties']['FIPS_MUN_P'],
                        rec['properties']['FED_ID_NUM'],
                        rec['properties']['CLASS_OF_M'],
                        rec['properties']['Shape_Length'],
                        rec['properties']['Shape_Area'],
                        rec['properties']['County_Name'],
                        rec['properties']['County_TOWNSHIP'],
                        'Pennsylvania',  # TMP
                        g_json,
                        poly.wkt))

                if i % 5000 == 0:
                    logger.info(
                        f"{round(i / total * 100, 2)}%: {i} of {total}: {rec['properties']['MUNICIPAL1']}"
                    )

            datastore.batch_commit()

            logger.info(f"Completed {layer} load.")

    except Exception as e:
        # datastore.rollback()
        logger.exception("Error processing PA Townships.", e)
        raise e
Example #41
0
 def to_polygon(x):
     assert len(x) in [4, 8]
     if len(x) == 4:
         return box(x[0], x[1], x[0] + x[2], x[1] + x[3])
     elif len(x) == 8:
         return Polygon([(x[2 * i], x[2 * i + 1]) for i in range(4)])
Example #42
0
def load_wv_districts(gdb_path, layer, datastore):
    dml = """
    INSERT INTO landgrid.wv_district (WV_ID, DNAME, DNUMBER, CNAME, CNUMBER,
                                      Area_sqm, lat, long, Shape_Length, 
                                      Shape_Area, County_District, 
                                      State_Name, geobounds, shape)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
            ST_GeomFromText(%s, 4326));
    """

    try:
        with fiona.open(gdb_path, layer=layer) as src:

            datastore.execute_dml('TRUNCATE TABLE landgrid.wv_district;')
            # restart

            total = len(src)
            logger.info(f"Starting load of {layer}: {total} sections...")
            # logger.info(f"Schema: {src.schema}")

            for i, rec in enumerate(src):

                poly = shape(rec['geometry'])
                if not poly.is_valid:
                    logger.warning(f"Cleaning {rec['properties']['DNAME']}...")

                    clean = poly.buffer(0.0)
                    assert clean.is_valid, 'Invalid Polygon!'
                    assert clean.geom_type == 'Polygon' or clean.geom_type == 'MultiPolygon', \
                        f'{clean.geom_type} is not a Polygon!'
                    poly = clean

                bbox = box(*poly.bounds)
                g_json = geojson.dumps(mapping(bbox), sort_keys=True)

                datastore.write_record(
                    dml,
                    (
                        rec['properties']['WV_ID'],
                        rec['properties']['DNAME'],
                        rec['properties']['DNUMBER'],
                        rec['properties']['CNAME'],
                        rec['properties']['CNUMBER'],
                        rec['properties']['Area_sqm'],
                        rec['properties']['lat'],
                        rec['properties']['long'],
                        rec['properties']['Shape_Length'],
                        rec['properties']['Shape_Area'],
                        rec['properties']['County_District'],
                        'West Virginia',  # TMP
                        g_json,
                        poly.wkt))

                if i % 5000 == 0:
                    logger.info(
                        f"{round(i / total * 100, 2)}%: {i} of {total}: {rec['properties']['DNAME']}"
                    )

            datastore.batch_commit()

            logger.info(f"Completed {layer} load.")

    except Exception as e:
        # datastore.rollback()
        logger.exception("Error processing WV Districts.", e)
        raise e
Example #43
0
def load_tx_abstracts(gdb_path, layer, datastore):
    dml = """
    INSERT INTO landgrid.tx_abstract (PERIMETER, FIPS, CountyName, 
                                      Shape_Length, Shape_Area, 
                                      AbstractNumber, AbstractName,
                                      Block, Township, Section,
                                      AbstractNameALT, FormNumber,
                                      ControlNumber, State_Name, 
                                      geobounds, shape)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
            %s, ST_GeomFromText(%s, 4326));
    """

    try:
        with fiona.open(gdb_path, layer=layer) as src:

            datastore.execute_dml('TRUNCATE TABLE landgrid.tx_abstract;')
            # restart

            total = len(src)
            logger.info(f"Starting load of {layer}: {total} abstracts...")
            # logger.info(f"Schema: {src.schema}")

            for i, rec in enumerate(src):

                poly = shape(rec['geometry'])
                if not poly.is_valid:
                    logger.warning(
                        f"Cleaning {rec['properties']['AbstractName']}...")

                    clean = poly.buffer(0.0)
                    assert clean.is_valid, 'Invalid Polygon!'
                    assert clean.geom_type == 'Polygon' or clean.geom_type == 'MultiPolygon', \
                        f'{clean.geom_type} is not a Polygon!'
                    poly = clean

                bbox = box(*poly.bounds)
                g_json = geojson.dumps(mapping(bbox), sort_keys=True)

                datastore.write_record(
                    dml,
                    (
                        rec['properties']['PERIMETER'],
                        rec['properties']['FIPS'],
                        rec['properties']['CountyName'],
                        rec['properties']['Shape_Length'],
                        rec['properties']['Shape_Area'],
                        rec['properties']['AbstractNumber'],
                        rec['properties']['AbstractName'],
                        rec['properties']['Block'],
                        rec['properties']['Township'],
                        rec['properties']['Section'],
                        rec['properties']['AbstractNameALT'],
                        rec['properties']['FormNumber'],
                        rec['properties']['ControlNumber'],
                        'Texas',  # TMP
                        g_json,
                        poly.wkt))

                if i % 5000 == 0:
                    logger.info(
                        f"{round(i / total * 100, 2)}%: {i} of {total}: {rec['properties']['AbstractName']}"
                    )

            datastore.batch_commit()

            logger.info(f"Completed {layer} load.")

    except Exception as e:
        # datastore.rollback()
        logger.exception("Error processing Texas Abstracts.", e)
        raise e
Example #44
0
def load_counties(gdb_path, layer, datastore):

    dml = """
    INSERT INTO landgrid.county_us (County_Name, State_Name, CountyID, 
                                    StateID, FIPS_State, FIPS_County, 
                                    API_State, API_County, LAT, LON,
                                    Shape_Length, Shape_Area, geobounds, 
                                    shape)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
            ST_GeomFromText(%s, 4326));
    """

    with fiona.open(gdb_path, layer=layer) as src:
        try:
            # logger.info(f"Schema: {src.schema}")

            datastore.execute_dml('TRUNCATE TABLE landgrid.county_us;')
            # restart

            total = len(src)
            logger.info(f"Starting load of {total} counties...")

            for i, rec in enumerate(src):

                poly = shape(rec['geometry'])
                if not poly.is_valid:
                    logger.warning(
                        f"Cleaning {rec['properties']['County_Name']}...")
                    clean = poly.buffer(0.0)
                    assert clean.is_valid, f"Invalid County {rec['properties']['County_Name']}!"
                    assert clean.geom_type == 'MultiPolygon' or clean.geom_type == 'Polygon', \
                        f'{clean.geom_type} is not a Polygon!'
                    poly = clean

                bbox = box(*poly.bounds)
                g_json = geojson.dumps(mapping(bbox), sort_keys=True)

                datastore.write_record(
                    dml, (rec['properties']['County_Name'],
                          rec['properties']['State_Name'],
                          rec['properties']['CountyID'],
                          rec['properties']['StateID'],
                          rec['properties']['FIPS_State'],
                          rec['properties']['FIPS_County'],
                          rec['properties']['API_State'],
                          rec['properties']['API_County'],
                          rec['properties']['LAT'], rec['properties']['LON'],
                          rec['properties']['Shape_Length'],
                          rec['properties']['Shape_Area'], g_json, poly.wkt))

                if i % 500 == 0:
                    logger.info(
                        f"{round(i / total * 100, 2)}%: {i} of {total}: {rec['properties']['County_Name']}"
                    )

            datastore.batch_commit()
            # cursor.close()

            logger.info("Completed county load.")

        except Exception as e:
            # datastore.rollback()
            logger.exception("Error processing Counties", e)
            raise e
Example #45
0
        self._img = img
        self._mean_radiosity = self._compute_mean_radiosity()
        return self


if __name__ == "__main__":

    from shapely.geometry import mapping
    import fiona
    from collections import OrderedDict
    import pprint

    x0, y0, x1, y1 = 0, 0, 10800, 5400

    kernel = np.array([[0, -1, 0], [-1, 4, -1], [0, -1, 0]], np.float32)
    tile = Tile('data/NE1_50M_SR_W/NE1_50M_SR_W.tif', box(x0, y0, x1, y1))

    # tile.filter2D(kernel)
    # Result in ./res.png
    tmp = np.moveaxis(tile.img, 0, -1)
    print(tmp.shape)
    cv.imwrite("res.png", cv.cvtColor(tmp, cv.COLOR_RGB2BGR))

    # ----------------- Test de Polygon
    print(tile.bounding_polygon)

    # ----------------- Test de Fiona
    schema = {
        'geometry': 'Polygon',
        'properties': OrderedDict([('id', 'int')])
    }
Example #46
0
async def check_wms(source, session: ClientSession):
    """
    Check WMS source

    Parameters
    ----------
    source : dict
        Source dictionary
    session : ClientSession
        aiohttp ClientSession object

    Returns
    -------
    list:
        Good messages
    list:
        Warning messages
    list:
        Error Messages

    """

    error_msgs = []
    warning_msgs = []
    info_msgs = []

    wms_url = source["properties"]["url"]
    headers = get_http_headers(source)

    params = ["{proj}", "{bbox}", "{width}", "{height}"]
    missingparams = [p for p in params if p not in wms_url]
    if len(missingparams) > 0:
        error_msgs.append(
            "The following values are missing in the URL: {}".format(
                ",".join(missingparams)))

    wms_args = {}
    u = urlparse(wms_url)
    url_parts = list(u)
    for k, v in parse_qsl(u.query, keep_blank_values=True):
        wms_args[k.lower()] = v

    def validate_wms_getmap_url():
        """
        Layers and styles can contain whitespaces. Ignore them here. They are checked against GetCapabilities later.
        """
        url_parts_without_layers = "&".join([
            "{}={}".format(key, value) for key, value in wms_args.items()
            if key not in {"layers", "styles"}
        ])
        parts = url_parts.copy()
        parts[4] = url_parts_without_layers
        url = urlunparse(parts).replace("{", "").replace("}", "")
        return validators.url(url)

    if not validate_wms_getmap_url():
        error_msgs.append("URL validation error: {}".format(wms_url))

    # Check mandatory WMS GetMap parameters (Table 8, Section 7.3.2, WMS 1.3.0 specification)
    missing_request_parameters = set()
    is_esri = "request" not in wms_args
    if is_esri:
        required_parameters = [
            "f", "bbox", "size", "imageSR", "bboxSR", "format"
        ]
    else:
        required_parameters = [
            "version",
            "request",
            "layers",
            "bbox",
            "width",
            "height",
            "format",
        ]

    for request_parameter in required_parameters:
        if request_parameter.lower() not in wms_args:
            missing_request_parameters.add(request_parameter)

    # Nothing more to do for esri rest api
    if is_esri:
        return info_msgs, warning_msgs, error_msgs

    if "version" in wms_args and wms_args["version"] == "1.3.0":
        if "crs" not in wms_args:
            missing_request_parameters.add("crs")
        if "srs" in wms_args:
            error_msgs.append(
                "WMS {} urls should not contain SRS parameter.".format(
                    wms_args["version"]))
    elif "version" in wms_args and not wms_args["version"] == "1.3.0":
        if "srs" not in wms_args:
            missing_request_parameters.add("srs")
        if "crs" in wms_args:
            error_msgs.append(
                "WMS {} urls should not contain CRS parameter.".format(
                    wms_args["version"]))
    if len(missing_request_parameters) > 0:
        missing_request_parameters_str = ",".join(missing_request_parameters)
        error_msgs.append("Parameter '{}' is missing in url.".format(
            missing_request_parameters_str))
        return info_msgs, warning_msgs, error_msgs
    # Styles is mandatory according to the WMS specification, but some WMS servers seems not to care

    if "styles" not in wms_args:
        warning_msgs.append(
            "Parameter 'styles' is missing in url. 'STYLES=' can be used to request default style."
        )

    def get_getcapabilitie_url(wms_version=None):
        get_capabilities_args = {
            "service": "WMS",
            "request": "GetCapabilities"
        }
        if wms_version is not None:
            get_capabilities_args["version"] = wms_version

        # Drop all wms getmap parameters, keep all extra arguments (e.g. such as map or key)
        for key in wms_args:
            if key not in {
                    "version",
                    "request",
                    "layers",
                    "bbox",
                    "width",
                    "height",
                    "format",
                    "crs",
                    "srs",
                    "styles",
                    "transparent",
                    "dpi",
                    "map_resolution",
                    "format_options",
            }:
                get_capabilities_args[key] = wms_args[key]
        url_parts[4] = urlencode(list(get_capabilities_args.items()))
        return urlunparse(url_parts)

    # We first send a service=WMS&request=GetCapabilities request to server
    # According to the WMS Specification Section 6.2 Version numbering and negotiation, the server should return
    # the GetCapabilities XML with the highest version the server supports.
    # If this fails, it is tried to explicitly specify a WMS version
    exceptions = []
    wms = None
    for wmsversion in [None, "1.3.0", "1.1.1", "1.1.0", "1.0.0"]:
        if wmsversion is None:
            wmsversion_str = "-"
        else:
            wmsversion_str = wmsversion

        try:
            wms_getcapabilites_url = get_getcapabilitie_url(wmsversion)

            resp = await get_url(wms_getcapabilites_url,
                                 session,
                                 with_text=True,
                                 headers=headers)
            if resp.exception is not None:
                exceptions.append("WMS {}: {}".format(wmsversion,
                                                      resp.exception))
                continue
            xml = resp.text
            if isinstance(xml, bytes):
                # Parse xml encoding to decode
                try:
                    xml_ignored = xml.decode(errors="ignore")
                    str_encoding = re.search('encoding="(.*?)"',
                                             xml_ignored).group(1)
                    xml = xml.decode(encoding=str_encoding)
                except Exception as e:
                    raise RuntimeError("Could not parse encoding: {}".format(
                        str(e)))

            wms = parse_wms(xml)
            if wms is not None:
                break
        except Exception as e:
            exceptions.append("WMS {}: Error: {}".format(
                wmsversion_str, str(e)))
            continue

    if wms is None:
        for msg in exceptions:
            error_msgs.append(msg)
        return info_msgs, warning_msgs, error_msgs

    for access_constraint in wms["AccessConstraints"]:
        info_msgs.append("WMS AccessConstraints: {}".format(access_constraint))
    for fee in wms["Fees"]:
        info_msgs.append("WMS Fees: {}".format(fee))

    if source["geometry"] is None:
        geom = None
    else:
        geom = shape(source["geometry"])

    # Check layers
    if "layers" in wms_args:
        layer_arg = wms_args["layers"]
        not_found_layers = []
        layers = layer_arg.split(",")
        for layer_name in layer_arg.split(","):
            if layer_name not in wms["layers"]:
                for wms_layer in wms["layers"]:
                    if layer_name.lower() == wms_layer.lower():
                        warning_msgs.append(
                            "Layer '{}' is advertised by WMS server as '{}'".
                            format(layer_name, wms_layer))
                not_found_layers.append(layer_name)

        if len(not_found_layers) > 0:
            error_msgs.append(
                "Layers '{}' not advertised by WMS GetCapabilities request. "
                "In rare cases WMS server do not advertise layers.".format(
                    ",".join(not_found_layers)))

        # Check source geometry against layer bounding box
        # Regardless of its projection, each layer should advertise an approximated bounding box in lon/lat.
        # See WMS 1.3.0 Specification Section 7.2.4.6.6 EX_GeographicBoundingBox
        if geom is not None and geom.is_valid:
            max_outside = 0.0
            for layer_name in layers:
                if layer_name in wms["layers"]:
                    bbox = wms["layers"][layer_name]["BBOX"]
                    geom_bbox = box(*bbox)
                    geom_outside_bbox = geom.difference(geom_bbox)
                    area_outside_bbox = geom_outside_bbox.area / geom.area * 100.0
                    max_outside = max(max_outside, area_outside_bbox)

            if max_outside > 100.0:
                error_msgs.append("{}% of geometry is outside of the layers "
                                  "bounding box.".format(
                                      round(area_outside_bbox, 2)))
            elif max_outside > 15.0:
                warning_msgs.append("{}% of geometry is outside of the layers "
                                    "bounding box.".format(
                                        round(area_outside_bbox, 2)))

        # Check styles
        if "styles" in wms_args:
            style = wms_args["styles"]
            # default style needs not to be advertised by the server
            if not (style == "default" or style == ""
                    or style == "," * len(layers)):
                styles = wms_args["styles"].split(",")
                if not len(styles) == len(layers):
                    error_msgs.append(
                        "Not the same number of styles and layers.")
                else:
                    for layer_name, style in zip(layers, styles):
                        if (len(style) > 0 and not style == "default"
                                and layer_name in wms["layers"] and style
                                not in wms["layers"][layer_name]["Styles"]):
                            error_msgs.append(
                                "Layer '{}' does not support style '{}'".
                                format(layer_name, style))

        # Check CRS
        crs_should_included_if_available = {"EPSG:4326", "EPSG:3857", "CRS:84"}
        if "available_projections" not in source["properties"]:
            error_msgs.append(
                "source is missing 'available_projections' element.")
        else:
            for layer_name in layer_arg.split(","):
                if layer_name in wms["layers"]:
                    not_supported_crs = set()
                    for crs in source["properties"]["available_projections"]:
                        # WMS sync bot checks if these projections are supported despite not advertised
                        if crs in {"EPSG:4326", "EPSG:3857"}:
                            continue
                        if crs.upper() not in wms["layers"][layer_name]["CRS"]:
                            not_supported_crs.add(crs)

                    if len(not_supported_crs) > 0:
                        supported_crs_str = ",".join(
                            wms["layers"][layer_name]["CRS"])
                        not_supported_crs_str = ",".join(not_supported_crs)
                        warning_msgs.append(
                            "Layer '{}': CRS '{}' not in: {}".format(
                                layer_name, not_supported_crs_str,
                                supported_crs_str))

                    supported_but_not_included = set()
                    for crs in crs_should_included_if_available:
                        if (crs not in source["properties"]
                            ["available_projections"]
                                and crs in wms["layers"][layer_name]["CRS"]):
                            supported_but_not_included.add(crs)

                    if len(supported_but_not_included) > 0:
                        supported_but_not_included_str = ",".join(
                            supported_but_not_included)
                        warning_msgs.append(
                            "Layer '{}': CRS '{}' not included in available_projections but "
                            "supported by server.".format(
                                layer_name, supported_but_not_included_str))

    if wms_args["version"] < wms["version"]:
        warning_msgs.append(
            "Query requests WMS version '{}', server supports '{}'".format(
                wms_args["version"], wms["version"]))

    # Check formats
    imagery_format = wms_args["format"]
    imagery_formats_str = "', '".join(wms["formats"])
    if imagery_format not in wms["formats"]:
        error_msgs.append("Format '{}' not in '{}'.".format(
            imagery_format, imagery_formats_str))

    if ("category" in source["properties"]
            and "photo" in source["properties"]["category"]):
        if "jpeg" not in imagery_format and "jpeg" in imagery_formats_str:
            warning_msgs.append(
                "Server supports JPEG, but '{}' is used. "
                "JPEG is typically preferred for photo sources, but might not be always "
                "the best choice. "
                "(Server supports: '{}')".format(imagery_format,
                                                 imagery_formats_str))
    # elif 'category' in source['properties'] and 'map' in source['properties']['category']:
    #     if 'png' not in imagery_format and 'png' in imagery_formats_str:
    #         warning_msgs.append("Server supports PNG, but '{}' is used. "
    #                             "PNG is typically preferred for map sources, but might not be always "
    #                             "the best choice. "
    #                             "(Server supports: '{}')".format(imagery_format, imagery_formats_str))

    return info_msgs, warning_msgs, error_msgs
Example #47
0
def main():
    # Define the two coordinate systems with different ellipses.
    wgs84 = ccrs.PlateCarree(globe=ccrs.Globe(datum='WGS84', ellipse='WGS84'))
    sphere = ccrs.PlateCarree(
        globe=ccrs.Globe(datum='WGS84', ellipse='sphere'))

    # Define the coordinate system of the data we have from Natural Earth and
    # acquire the 1:10m physical coastline shapefile.
    geodetic = ccrs.Geodetic(globe=ccrs.Globe(datum='WGS84'))
    dataset = cfeature.NaturalEarthFeature(category='physical',
                                           name='coastline',
                                           scale='10m')

    # Create a Stamen map tiler instance, and use its CRS for the GeoAxes.
    tiler = Stamen('terrain-background')
    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1, projection=tiler.crs)
    ax.set_title('The effect of incorrectly referencing the Solomon Islands')

    # Pick the area of interest. In our case, roughly the Solomon Islands, and
    # get hold of the coastlines for that area.
    extent = [155, 163, -11.5, -6]
    ax.set_extent(extent, geodetic)
    geoms = list(dataset.intersecting_geometries(extent))

    # Add the Stamen aerial imagery at zoom level 7.
    ax.add_image(tiler, 7)

    # Transform the geodetic coordinates of the coastlines into the two
    # projections of differing ellipses.
    wgs84_geoms = [
        geom_transform(transform_fn_factory(wgs84, geodetic), geom)
        for geom in geoms
    ]
    sphere_geoms = [
        geom_transform(transform_fn_factory(sphere, geodetic), geom)
        for geom in geoms
    ]

    # Using these differently referenced geometries, assume that they are
    # both referenced to WGS84.
    ax.add_geometries(wgs84_geoms, wgs84, edgecolor='white', facecolor='none')
    ax.add_geometries(sphere_geoms, wgs84, edgecolor='gray', facecolor='none')

    # Create a legend for the coastlines.
    legend_artists = [
        Line([0], [0], color=color, linewidth=3) for color in ('white', 'gray')
    ]
    legend_texts = ['Correct ellipse\n(WGS84)', 'Incorrect ellipse\n(sphere)']
    legend = ax.legend(legend_artists,
                       legend_texts,
                       fancybox=True,
                       loc='lower left',
                       framealpha=0.75)
    legend.legendPatch.set_facecolor('wheat')

    # Create an inset GeoAxes showing the location of the Solomon Islands.
    sub_ax = fig.add_axes([0.7, 0.625, 0.2, 0.2],
                          projection=ccrs.PlateCarree())
    sub_ax.set_extent([110, 180, -50, 10], geodetic)

    # Make a nice border around the inset axes.
    effect = Stroke(linewidth=4, foreground='wheat', alpha=0.5)
    sub_ax.outline_patch.set_path_effects([effect])

    # Add the land, coastlines and the extent of the Solomon Islands.
    sub_ax.add_feature(cfeature.LAND)
    sub_ax.coastlines()
    extent_box = sgeom.box(extent[0], extent[2], extent[1], extent[3])
    sub_ax.add_geometries([extent_box],
                          ccrs.PlateCarree(),
                          facecolor='none',
                          edgecolor='blue',
                          linewidth=2)

    plt.show()
Example #48
0
        mec='k')
ax.axis(zoom)

leg_ax.legend(loc='upper left',
              frameon=False,
              bbox_to_anchor=[0, 0.90],
              fontsize=9,
              handles=ax.lines)

plot_wkb.plot_wkb(sfe_geom.intersection(clip_poly),
                  ax=overview_ax,
                  facecolor='#2d74ad',
                  edgecolor='#2d74ad',
                  lw=0.2)
if overview_ax2:
    rect = geometry.box(zoom2[0], zoom2[2], zoom2[1], zoom2[3]).buffer(5000)
    plot_wkb.plot_wkb(sfe_geom.intersection(rect),
                      ax=overview_ax2,
                      facecolor='#2d74ad',
                      edgecolor='#2d74ad',
                      lw=0.2)

for ov_ax in [overview_ax, overview_ax2]:
    ov_ax.axis('equal')
    ov_ax.xaxis.set_visible(0)
    ov_ax.yaxis.set_visible(0)

x = 0.5 * (zoom[0] + zoom[1])
y = 0.5 * (zoom[2] + zoom[3])
# overview_ax.plot( [x], [y], 'r*',ms=7)
Example #49
0
def search(dataset,
           node,
           lat=None,
           lng=None,
           distance=100,
           ll=None,
           ur=None,
           start_date=None,
           end_date=None,
           where=None,
           max_results=50000,
           starting_number=1,
           sort_order="DESC",
           api_key=None):
    """
    :param dataset:

    :param node:

    :param lat:

    :param lng:

    :param ll:

    :param distance:

    :param ur:

    :param start_date:

    :param end_date:

    :param where:
        Specify additional search criteria

    :param max_results:

    :param starting_number:

    :param sort_order:

    :param api_key:
        API key is not required.
    """

    payload = {
        "datasetName": dataset,
        "node": node,
        "apiKey": api_key,
    }

    # Latitude and longitude take precedence over ll and ur
    if lat and lng:

        try:
            import pyproj
            from shapely import geometry
        except ImportError:
            raise USGSDependencyRequired(
                "Shapely and PyProj are required for spatial searches.")

        prj = pyproj.Proj(proj='aeqd', lat_0=lat, lon_0=lng)
        half_distance = 0.5 * distance
        box = geometry.box(-half_distance, -half_distance, half_distance,
                           half_distance)

        lngs, lats = prj(*box.exterior.xy, inverse=True)

        ll = {"longitude": min(*lngs), "latitude": min(*lats)}
        ur = {"longitude": max(*lngs), "latitude": max(*lats)}

    if ll and ur:
        payload["lowerLeft"] = ll
        payload["upperRight"] = ur

    if start_date:
        payload["startDate"] = start_date

    if end_date:
        payload["endDate"] = end_date

    if where:

        # TODO: Support more than AND key/value equality queries
        # usgs search --node EE LANDSAT_8_C1 --start-date 20170410 --end-date 20170411 --where wrs-row 032 | jq ""
        # LC81810322017101LGN00
        payload["additionalCriteria"] = {
            "filterType":
            "and",
            "childFilters": [{
                "filterType": "value",
                "fieldId": field_id,
                "value": value,
                "operand": "="
            } for field_id, value in where.items()]
        }

    if max_results:
        payload["maxResults"] = max_results

    if starting_number:
        payload["startingNumber"] = starting_number

    if sort_order:
        payload["sortOrder"] = sort_order

    return json.dumps(payload)
    '''
    p_coll_circ = circ_coll_prob(car, obst, obst_mu, obst_cov) 

    if p_coll_circ > circ_thresh:
        return total_coll_prob(car, obst, 
                               obst_mu, obst_cov, 
                               gamma_mu, gamma_sd, delta, n_intervals)
    else:
        return p_coll_circ
            
    
if __name__ == "__main__":
    # Test it
    plt.close('all')
    
    obstacle = geom.box(0,0,2,1)
    obstacle = affinity.translate(obstacle, -0.5, -0.5) # shift so back axle at origin
    
    car = geom.box(0,0,2,1)
    car = affinity.translate(car, -0.5, -0.5) # shift so back axle at origin
    
        
    plt.figure()
 
    # Make a rotated convariance matrix
    obst_cov = np.array([[2e-1,0],[0,1e-1]])
    theta = -np.pi/4
    R = lambda theta: np.array([[np.cos(theta), -np.sin(theta)],
                      [np.sin(theta),  np.cos(theta)]])
    obst_cov = np.dot(np.dot(R(theta), obst_cov), R(theta).T)
    
Example #51
0
def load_oh_sections(gdb_path, layer, datastore):
    dml = """
    INSERT INTO landgrid.ohio_section (SUBDIV_NM, TWP, TNS, RGE, REW, SEC, 
                                       QTR_TWP, ALLOTMENT, TRACT, LOT, DIVISION, 
                                       FRACTION, COUNTY, TOWNSHIP, SURVEY_TYP, 
                                       ObjectID, VMSLOT, OTHER_SUB, Shape_Length, 
                                       Shape_Area, State_Name, geobounds, shape)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
            %s, %s, %s, %s, %s, ST_GeomFromText(%s, 4326));
    """

    try:
        with fiona.open(gdb_path, layer=layer) as src:

            datastore.execute_dml('TRUNCATE TABLE landgrid.ohio_section;')
            # restart

            total = len(src)
            logger.info(f"Starting load of {layer}: {total} sections...")
            # logger.info(f"Schema: {src.schema}")

            for i, rec in enumerate(src):

                poly = shape(rec['geometry'])
                if not poly.is_valid:
                    logger.warning(
                        f"Cleaning {rec['properties']['TOWNSHIP']}...")

                    clean = poly.buffer(0.0)
                    assert clean.is_valid, 'Invalid Polygon!'
                    assert clean.geom_type == 'Polygon' or clean.geom_type == 'MultiPolygon', \
                        f'{clean.geom_type} is not a Polygon!'
                    poly = clean

                bbox = box(*poly.bounds)
                g_json = geojson.dumps(mapping(bbox), sort_keys=True)

                datastore.write_record(
                    dml,
                    (
                        rec['properties']['SUBDIV_NM'],
                        rec['properties']['TWP'],
                        rec['properties']['TNS'],
                        rec['properties']['RGE'],
                        rec['properties']['REW'],
                        rec['properties']['SEC'],
                        rec['properties']['QTR_TWP'],
                        rec['properties']['ALLOTMENT'],
                        rec['properties']['TRACT'],
                        rec['properties']['LOT'],
                        rec['properties']['DIVISION'],
                        rec['properties']['FRACTION'],
                        rec['properties']['COUNTY'],
                        rec['properties']['TOWNSHIP'],
                        rec['properties']['SURVEY_TYP'],
                        rec['properties']['ObjectID'],
                        rec['properties']['VMSLOT'],
                        rec['properties']['OTHER_SUB'],
                        rec['properties']['Shape_Length'],
                        rec['properties']['Shape_Area'],
                        'Ohio',  # TMP
                        g_json,
                        poly.wkt))

                if i % 5000 == 0:
                    logger.info(
                        f"{round(i / total * 100, 2)}%: {i} of {total}: {rec['properties']['TOWNSHIP']}"
                    )

            datastore.batch_commit()

            logger.info(f"Completed {layer} load.")

    except Exception as e:
        # datastore.rollback()
        logger.exception("Error processing OH Sections.", e)
        raise e
Example #52
0
def grid():
    raw_query_params = request.args.copy()

    buff = request.args.get('buffer', 100)
    
    resolution = request.args.get('resolution')
    if not resolution:
        resolution = 500
    else:
        del raw_query_params['resolution']
    
    center = request.args.getlist('center[]')
    if not center:
        center = [41.880517,-87.644061]
    else:
        del raw_query_params['center[]']
    location_geom = request.args.get('location_geom__within')

    if raw_query_params.get('buffer'):
        del raw_query_params['buffer']

    agg, datatype, queries = parse_join_query(raw_query_params)

    size_x, size_y = getSizeInDegrees(float(resolution), float(center[0]))
    if location_geom:
        location_geom = json.loads(location_geom)['geometry']
        if location_geom['type'] == 'LineString':
            shape = asShape(location_geom)
            lat = shape.centroid.y
            # 100 meters by default
            x, y = getSizeInDegrees(int(buff), lat)
            size_x, size_y = getSizeInDegrees(50, lat)
            location_geom = shape.buffer(y).__geo_interface__
        location_geom['crs'] = {"type":"name","properties":{"name":"EPSG:4326"}}
    mt = MasterTable.__table__
    valid_query, base_clauses, resp, status_code = make_query(mt, queries['base'])

    if valid_query:
        base_query = session.query(func.count(mt.c.dataset_row_id), 
                func.ST_SnapToGrid(mt.c.location_geom, size_x, size_y))
        dname = raw_query_params['dataset_name']
        dataset = Table('dat_%s' % dname, Base.metadata,
            autoload=True, autoload_with=engine,
            extend_existing=True)
        valid_query, detail_clauses, resp, status_code = make_query(dataset, queries['detail'])
        if valid_query:
            pk = [p.name for p in dataset.primary_key][0]
            base_query = base_query.join(dataset, mt.c.dataset_row_id == dataset.c[pk])
            for clause in base_clauses:
                base_query = base_query.filter(clause)
            for clause in detail_clauses:
                base_query = base_query.filter(clause)

            base_query = base_query.group_by(func.ST_SnapToGrid(mt.c.location_geom, size_x, size_y))
            values = [d for d in base_query.all()]
            resp = {'type': 'FeatureCollection', 'features': []}
            for value in values:
                d = {
                    'type': 'Feature', 
                    'properties': {
                        'count': value[0], 
                    },
                }
                if value[1]:
                    pt = loads(value[1].decode('hex'))
                    south, west = (pt.x - (size_x / 2)), (pt.y - (size_y /2))
                    north, east = (pt.x + (size_x / 2)), (pt.y + (size_y / 2))
                    d['geometry'] = box(south, west, north, east).__geo_interface__
                
                resp['features'].append(d)
    
    resp = make_response(json.dumps(resp, default=dthandler), status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
Example #53
0
def load_sections(gdb_path, layer_queue, datastore):
    dml = """
    INSERT INTO landgrid.plss_section (StateID, StateAPI, TWPCODE,
                                        SECCODE, MER, MST, TWP, THALF,
                                        TNS, RGE, RHALF, REW, SEC,
                                        Shape_Length, Shape_Area, 
                                        State_Name, geobounds, shape)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
            %s, %s, ST_GeomFromText(%s, 4326));
    """

    try:

        datastore.execute_dml('TRUNCATE TABLE landgrid.plss_section;')
        # restart

        for ts in layer_queue:
            with fiona.open(gdb_path, layer=ts.layer) as src:

                total = len(src)
                logger.info(
                    f"Starting load of {ts.layer}: {total} sections...")
                # logger.info(f"Schema: {src.schema}")

                for i, rec in enumerate(src):

                    try:
                        poly = shape(rec['geometry'])
                    except Exception as e:
                        logger.error(
                            f"Error reading {rec['properties']['SECCODE']}: {rec['geometry']}"
                        )
                        logger.exception("Unknown error", e)
                        continue

                    if not poly.is_valid:
                        logger.warning(
                            f"Cleaning {rec['properties']['SECCODE']} [{ts.layer}]..."
                        )

                        clean = poly.buffer(0.0)
                        assert clean.is_valid, 'Invalid Polygon!'
                        assert clean.geom_type == 'MultiPolygon' or clean.geom_type == 'Polygon', \
                            f'{clean.geom_type} is not a Polygon!'
                        poly = clean

                    bbox = box(*poly.bounds)
                    g_json = geojson.dumps(mapping(bbox), sort_keys=True)

                    datastore.write_record(
                        dml,
                        (
                            rec['properties']['StateID'],
                            rec['properties']['StateAPI'],
                            rec['properties']['TWPCODE'],
                            rec['properties']['SECCODE'],
                            rec['properties']['MER'],
                            rec['properties']['MST'],
                            rec['properties']['TWP'],
                            rec['properties']['THALF'],
                            rec['properties']['TNS'],
                            rec['properties']['RGE'],
                            rec['properties']['RHALF'],
                            rec['properties']['REW'],
                            rec['properties']['SEC'],
                            rec['properties']['Shape_Length'],
                            rec['properties']['Shape_Area'],
                            ts.state,  # TMP
                            g_json,
                            poly.wkt))

                    if i % 5000 == 0:
                        logger.info(
                            f"{round(i / total * 100, 2)}%: {i} of {total} [{ts.layer}]"
                        )

                datastore.batch_commit()

                logger.info(f"Completed {ts.layer} load.")

    except Exception as e:
        # datastore.rollback()
        logger.exception(f"Error processing {rec['properties']['SECCODE']}", e)
        raise e
def save_patches(slice_no, slice_length, pickled_files_path, dg_path, struct_locs_landuse_path, output_path, buff_radius):
    # print("Reading building locations file appended with land cover information", flush=True)
    # dll = pd.read_pickle(struct_locs_landuse_path + s_l_file)
    # print("Read the locs + landuse file", flush=True)
    print("***********************************************************", flush=True)
    print("Figuring which folders to process based on slicing input", flush=True)
    list_of_folders = glob.glob(pickled_files_path + "*")
    total_length = len(list_of_folders)
    assert slice_length<=total_length, "Choose smaller slice length"
    print("Total number of available folders: {}".format(total_length), flush=True)
    folders_at_a_time = total_length//slice_length
    print("Number of folders to be read at a time: {}".format(folders_at_a_time), flush=True)
    if slice_no == slice_length-1:
        # maybe here +1 is not needed. ***** CHECK ********
        folders_to_process = list_of_folders[slice_no*folders_at_a_time : total_length+1]
    else:
        folders_to_process = list_of_folders[slice_no*folders_at_a_time : (slice_no+1)*folders_at_a_time]
    print("Folders to be processed on one machine: {}".format(folders_to_process), flush=True)
    print("Number of folders: {}".format(len(folders_to_process)), flush=True)

    for pck_file in folders_to_process:
        print("##############################################################################", flush=True)
        foldername = pck_file.split('/')[6].split('_pickle_file')[0] #6 for laptop, 5 for gypsum?
        print("Reading pickle file for folder {}".format(foldername), flush=True)
        df = pd.read_pickle(pck_file)
        print("Pickle file read!", flush=True)
        # df['ct_array'] = df.struct_locs.apply(lambda x: len(x))
        # print("Created struct_locs array length column", flush=True)
        # df = df[(df.ct_array != 0)]
        # print("Filtered dataframe by removing image names without structures i.e. with count=0", flush=True)
        # print("Checking if dataframe is empty after filteration", flush=True)
        # if len(df)==0:
        #     print("Dataframe is empty. Moving to next folder.", flush=True)
        #     continue
        print("Dataframe is not empty. Continuing the process!", flush=True)
        df = df.groupby(['file_name']).struct_locs.apply(lambda x: itertools.chain(*x)).reset_index()
        print("Created iter objects for locations in every image", flush=True)
        print("###################################", flush=True)
        print("Folder Name = {}".format(foldername), flush=True)
        print("Total number of files = {}".format(df.file_name.nunique()), flush=True)
        print("###################################", flush=True)

        file_counter = 0
        for fname in df.file_name.unique():
            print("------------------------------------------------------------------", flush=True)
            print("{} - Processing satellite image: {}".format(file_counter, fname), flush=True)
            print('{} : Processing satellite image'.format(time.strftime('%m/%d/%Y %H:%M:%S',time.localtime())))
            filename = fname.split("/")[1]
            sat_img_path = glob.glob(dg_path + foldername + "/" + filename)[0]
            print("Sat image path extracted", flush=True)
            sat_img = cv2.imread(sat_img_path)
            print("Cv2 read!", flush=True)
            sat_img_rast = rasterio.open(sat_img_path)
            print("rasterio read!", flush=True)
            db = df[(df.file_name == fname)]
            # An itertool chain object can only be used once and so
            # we save it in list instead and iterate through the list
            iter_list = list(db.struct_locs.values[0])
            print("Total buildings in image: {}".format(len(iter_list)), flush=True)
            print("Beginning patch extraction", flush=True)
            # create a hollow box representing boundary of raster
            bound = sat_img_rast.bounds
            img_box = box(bound.left, bound.bottom, bound.right, bound.top)
            # reduce the boundary of the box in order to avoid
            # errors when the points are selected near the edge of the image
            # and the negative patch length crosses the image.
            # boundary buffer radius will be equal to radius of
            # patch (16m or 32pixels)
            boundary_buff = buff_radius/2
            img_box = box(bound.left+boundary_buff, bound.bottom+boundary_buff, bound.right-boundary_buff, bound.top-boundary_buff)
            print("image box cropped at the edges for buffering purposes")
            # if an image contains no buildings, then the complete image can
            # be used for extracting negative patches.
            # In that case, we won't need cascaded union
            if len(iter_list) !=0:
                print("Proceeding with cascaded union formation")
                # create point geoms for using in cascaded_union
                geom = []
                for k in iter_list:
                    if k != []: #avoids "not enough values to unpack" error
                        geom.append(Point(k[0],k[1]))
                    else:
                        None
                # geom = [Point(k[0],k[1]) for k in iter_list]
                # create buffers around each point
                buff = [x.buffer(buff_radius, cap_style=3) for x in geom]
                # create cascaded_union object
                casc_union = cascaded_union(buff)
                print("Cascaded union is ready")
                # extract portion of box that doesn't intersect with cascaded union
                # this shape should not contain buildings
                if casc_union.intersects(img_box) == True:
                    nobuild_poly = img_box.difference(casc_union)
            else:
                print("No buildings in image. Using complete image for extracting negative patches")
                nobuild_poly = img_box

            # create a list of random geo-spatial points that lie
            # Approx 1500 patches per image (127 folders x 64 images per folder)
            # will give us 12 million negative patches
            print('{} : Negative Poly is ready'.format(time.strftime('%m/%d/%Y %H:%M:%S',time.localtime())))
            print("Extracting 1500 random non-building points inside the image file")
            nobuild_list = generate_random_spatialpoints(1500, nobuild_poly)
            print('{} : Negative Points extracted'.format(time.strftime('%m/%d/%Y %H:%M:%S',time.localtime())))

            locs_counter = 0
            for p in nobuild_list:
                print("=================================", flush=True)
                x, y = p
                print("{} - Obtained (x,y) coordinates: {}".format(locs_counter, p), flush=True)
                img_name = foldername + "_z_" + filename.split('.')[0] + "_z_" + str(x) + "_z_" + str(y) + "_z_" +"nobuild"
                print("Sending locs for a point:{} for patch extraction".format(p), flush=True)
                patch = read_and_extract(sat_img, sat_img_rast, x, y, img_name, output_path)
                locs_counter = locs_counter + 1
            file_counter = file_counter + 1
class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase):
    """Test DAF support for common_obs_spatial data"""

    datatype = "common_obs_spatial"

    envelope = box(-97.0, 41.0, -96.0, 42.0)
    """Default request area (box around KOAX)"""
    def testGetAvailableParameters(self):
        req = DAL.newDataRequest(self.datatype)
        self.runParametersTest(req)

    def testGetAvailableLocations(self):
        req = DAL.newDataRequest(self.datatype)
        req.addIdentifier("country", ["US", "CN"])
        self.runLocationsTest(req)

    def testGetIdentifierValues(self):
        self.runGetIdValuesTest(['country'])

    @unittest.skip('avoid EDEX error')
    def testGetInvalidIdentifierValuesThrowsException(self):
        self.runInvalidIdValuesTest()

    @unittest.skip('avoid EDEX error')
    def testGetNonexistentIdentifierValuesThrowsException(self):
        self.runNonexistentIdValuesTest()

    def testGetGeometryData(self):
        req = DAL.newDataRequest(self.datatype)
        req.setEnvelope(self.envelope)
        req.setParameters("name", "stationid")
        self.runGeometryDataTest(req)

    def testRequestingTimesThrowsTimeAgnosticDataException(self):
        req = DAL.newDataRequest(self.datatype)
        self.runTimeAgnosticTest(req)

    def _runConstraintTest(self, key, operator, value):
        req = DAL.newDataRequest(self.datatype)
        constraint = RequestConstraint.new(operator, value)
        req.addIdentifier(key, constraint)
        req.setParameters('catalogtype', 'elevation', 'state')
        return self.runGeometryDataTest(req)

    def testGetDataWithEqualsString(self):
        geometryData = self._runConstraintTest('state', '=', 'NE')
        for record in geometryData:
            self.assertEqual(record.getString('state'), 'NE')

    def testGetDataWithEqualsUnicode(self):
        geometryData = self._runConstraintTest('state', '=', u'NE')
        for record in geometryData:
            self.assertEqual(record.getString('state'), 'NE')

    def testGetDataWithEqualsInt(self):
        geometryData = self._runConstraintTest('catalogtype', '=', 32)
        for record in geometryData:
            self.assertEqual(record.getNumber('catalogtype'), 32)

    def testGetDataWithEqualsLong(self):
        geometryData = self._runConstraintTest('elevation', '=', 0L)
        for record in geometryData:
            self.assertEqual(record.getNumber('elevation'), 0)

    # No float test since there are no float identifiers available. Attempting
    # to filter a non-float identifier on a float value raises an exception.

    def testGetDataWithEqualsNone(self):
        geometryData = self._runConstraintTest('state', '=', None)
        for record in geometryData:
            self.assertEqual(record.getType('state'), 'NULL')

    def testGetDataWithNotEquals(self):
        geometryData = self._runConstraintTest('state', '!=', 'NE')
        for record in geometryData:
            self.assertNotEqual(record.getString('state'), 'NE')

    def testGetDataWithNotEqualsNone(self):
        geometryData = self._runConstraintTest('state', '!=', None)
        for record in geometryData:
            self.assertNotEqual(record.getType('state'), 'NULL')

    def testGetDataWithGreaterThan(self):
        geometryData = self._runConstraintTest('elevation', '>', 500)
        for record in geometryData:
            self.assertGreater(record.getNumber('elevation'), 500)

    def testGetDataWithLessThan(self):
        geometryData = self._runConstraintTest('elevation', '<', 100)
        for record in geometryData:
            self.assertLess(record.getNumber('elevation'), 100)

    def testGetDataWithGreaterThanEquals(self):
        geometryData = self._runConstraintTest('elevation', '>=', 500)
        for record in geometryData:
            self.assertGreaterEqual(record.getNumber('elevation'), 500)

    def testGetDataWithLessThanEquals(self):
        geometryData = self._runConstraintTest('elevation', '<=', 100)
        for record in geometryData:
            self.assertLessEqual(record.getNumber('elevation'), 100)

    def testGetDataWithInTuple(self):
        collection = ('NE', 'TX')
        geometryData = self._runConstraintTest('state', 'in', collection)
        for record in geometryData:
            self.assertIn(record.getString('state'), collection)

    def testGetDataWithInList(self):
        collection = ['NE', 'TX']
        geometryData = self._runConstraintTest('state', 'in', collection)
        for record in geometryData:
            self.assertIn(record.getString('state'), collection)

    def testGetDataWithInGenerator(self):
        collection = ('NE', 'TX')
        generator = (item for item in collection)
        geometryData = self._runConstraintTest('state', 'in', generator)
        for record in geometryData:
            self.assertIn(record.getString('state'), collection)

    def testGetDataWithInvalidConstraintTypeThrowsException(self):
        with self.assertRaises(ValueError):
            self._runConstraintTest('state', 'junk', 'NE')

    def testGetDataWithInvalidConstraintValueThrowsException(self):
        with self.assertRaises(TypeError):
            self._runConstraintTest('state', '=', {})

    def testGetDataWithEmptyInConstraintThrowsException(self):
        with self.assertRaises(ValueError):
            self._runConstraintTest('state', 'in', [])
    def plane_cluster(self, cloud_data):
        """Clustering the exported plane data

            Finding clustered points in the data extracted as planes.

        Args:
            cloud_data: Pointcloud data extracted by plane
        Returns:
            new_cloud_data: Clustered PointCloud data list
        """
        max_size = cloud_data.size  # The number of points included in plane
        min_size = 100  # Minimum size of point included in each result of clustering
        new_data_st = list()
        count = 0
        check_count = 0
        while True:

            if cloud_data.size < min_size or count > 10:
                break
            tree = cloud_data.make_kdtree()
            segment = cloud_data.make_EuclideanClusterExtraction()
            segment.set_ClusterTolerance(0.1)  # distance of clustering
            segment.set_MinClusterSize(min_size)
            segment.set_MaxClusterSize(max_size)
            segment.set_SearchMethod(tree)
            cluster_indices = segment.Extract()

            if len(cluster_indices) != 0:
                inliers = cloud_data.extract(
                    cluster_indices[0],
                    negative=False)  # Save all the inliers as a point cloudd
                outliers = cloud_data.extract(
                    cluster_indices[0],
                    negative=True)  # Save all the outliers as a point cloud.

                if inliers.size >= min_size:
                    inliers_p, outliers_p, coeff_p = self.do_plane_ransac(
                        inliers)

                    bbox_info = PointCloudUtils.get_range(
                        inliers
                    )  # Getting the bounding box information [[min_x, min_y, min_z], [max_x, max_y, max_z]]

                    # Use of information with a height of 40 cm or more
                    check_height = True if math.fabs(
                        bbox_info[1][2] - bbox_info[0][2]) > 1.0 else False
                    check_count += 1

                    m_minX = bbox_info[0][0]
                    m_minY = bbox_info[0][1]
                    m_maxX = bbox_info[1][0]
                    m_maxY = bbox_info[1][1]

                    make_box = box(m_minX, m_minY, m_maxX, m_maxY)
                    # Use of information with a area of 10 or more
                    if check_height is False:
                        check_height = True if math.fabs(
                            bbox_info[0][2]) >= 1.5 else False
                    check_area = True if make_box.area > 0.1 else False

                    if check_height:
                        # 4 outer points of extracted result of clustering
                        side_points = PointCloudUtils.make_side_line(
                            bbox_info, coeff_p)
                        # Adding the list of points, plane equation and outer points
                        new_data_st.append([inliers, coeff_p, side_points])

                        count = 0
                    cloud_data = outliers
                else:
                    cloud_data = outliers
                    count += 1
            else:
                break
        return new_data_st
Example #57
0
def trend_all():

    srfc = cnst.ERA5_MONTHLY_SRFC_SYNOP  #cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA5_MONTHLY_PL_SYNOP  #cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_mean_5000km2.nc'  #gridsat_WA_-70_monthly_mean_5000km2.nc' #gridsat_WA_-50_monthly_count_-50base.nc' #gridsat_WA_-70_monthly_mean_5000km2.nc'  gridsat_WA_-50_monthly_count

    fpath = cnst.network_data + 'figs/CLOVER/months/ERA5_WA/'

    box = [-18, 30, 0, 25]  #[-18,30,0,25]#  [-18,40,0,25] #

    da = xr.open_dataset(pl)  #xr.open_dataset(pl)
    #da = xr.decode_cf(da)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))
    da2 = xr.open_dataset(srfc)  #xr.open_dataset(srfc)
    #da2 = xr.decode_cf(da2)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))
    da3 = xr.open_dataarray(mcs) * 100  #/30*100
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))
    #ipdb.set_trace()
    da = da.isel(time=(da['time.hour'] == 12))
    da2 = da2.isel(time=(da2['time.hour'] == 12))

    lons = da.longitude
    lats = da.latitude

    press = da2['tcwv']
    #press = press[press['time.hour'] == 12]
    #press.values = press.values#*1000
    low_press = 950
    up_press = 650
    mid_press = 700

    q = da['q'].sel(level=slice(low_press - 30, low_press)).mean('level')
    t2d = da2['t2m']

    theta_low = u_met.theta_e(
        low_press,
        da['t'].sel(level=slice(low_press -
                                30, low_press)).mean('level').values - 273.15,
        da['q'].sel(level=slice(low_press -
                                30, low_press)).mean('level').values)
    theta_high = u_met.theta_e(
        mid_press,
        da['t'].sel(level=slice(up_press, mid_press)).mean('level').values -
        273.15,
        da['q'].sel(level=slice(up_press, mid_press)).mean('level').values)
    theta_high_d = u_met.theta(
        mid_press,
        da['t'].sel(level=slice(up_press, mid_press)).mean('level').values -
        273.15)
    theta_low_d = u_met.theta(
        low_press,
        da['t'].sel(level=slice(low_press -
                                30, low_press)).mean('level').values - 273.15)

    # punit = units.Quantity(mid_press, 'hPa')
    # tunit = units.Quantity(da['t'].sel(level=slice(mid_press-30, mid_press)).mean('level').values, 'K')
    # theta_high_d = calc.saturation_equivalent_potential_temperature(punit,tunit)
    #
    # punit = units.Quantity(low_press, 'hPa')
    # tunit = units.Quantity(da['t'].sel(level=slice(low_press-30, low_press)).mean('level').values, 'K')
    # theta_low_d = calc.saturation_equivalent_potential_temperature(punit, tunit)

    theta_diff = (theta_high /
                  theta_low) * 100  #(np.array(theta_high)-273.15) #theta_low -
    theta_diff_d = da2[
        'cape']  ##np.array(theta_low_d) - np.array(theta_high_d)
    #
    theta_e = t2d.copy(deep=True)
    theta_e.name = 'theta'
    theta_e.values = theta_diff

    theta_e = da['r'].sel(level=slice(mid_press - 30, mid_press)).mean(
        'level')  #da2['cape']

    theta_d = t2d.copy(deep=True)
    theta_d.name = 'theta'
    theta_d.values = theta_diff_d

    u600 = da['u'].sel(level=slice(up_press, mid_press)).mean('level')
    v600 = da['v'].sel(level=slice(up_press, mid_press)).mean('level')
    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=925)

    v800 = da['v'].sel(level=925)

    shear_u = u600 - u800
    shear_v = v600 - v800
    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = t2d.copy(deep=True)
    ws_600.name = 'ws'

    ws_600.values = ws600[0]

    shear = t2d.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    u6 = shear_u
    v6 = shear_v

    q.values = q.values * 1000

    grid = t2d.salem.grid.regrid(factor=1)
    t2 = t2d  # grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3)  #t2d.salem.lookup_transform(da3['tir']) #

    grid = grid.to_dataset()
    tir = xr.DataArray(tir,
                       coords=[da3['time'], grid['y'], grid['x']],
                       dims=['time', 'latitude', 'longitude'])

    months = [
        1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12
    ]  #[3,4,5,6,9,10,11]#,4,5,6,9,10,11#,4,5,6,9,10,11,(3,5), (9,11)]#, 10,5,9]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    dicm = {}
    dicmean = {}

    for m in months:
        method = 'polyfit'

        if type(m) == int:
            m = [m]

        sig = True

        t2trend, t2mean = calc_trend(t2,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        t2_mean = t2mean.mean(axis=0)

        tirtrend, tirmean = calc_trend(tir,
                                       m,
                                       method=method,
                                       sig=True,
                                       wilks=False)

        tirm_mean = tirmean.mean(axis=0)

        qtrend, qmean = calc_trend(q,
                                   m,
                                   method=method,
                                   sig=sig,
                                   hour=12,
                                   wilks=False)  #hour=12,
        q_mean = qmean.mean(axis=0)

        sheartrend, shearmean = calc_trend(shear,
                                           m,
                                           method=method,
                                           sig=sig,
                                           hour=12,
                                           wilks=False)  #hour=12,
        shear_mean = shearmean.mean(axis=0)

        presstrend, pressmean = calc_trend(press,
                                           m,
                                           method=method,
                                           sig=sig,
                                           hour=12,
                                           wilks=False)  #hour=12,
        press_mean = pressmean.mean(axis=0)

        u6trend, u6mean = calc_trend(u6,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        u6_mean = u6mean.mean(axis=0)
        v6trend, v6mean = calc_trend(v6,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        v6_mean = v6mean.mean(axis=0)

        u8trend, u8mean = calc_trend(u800,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        u8_mean = u8mean.mean(axis=0)
        v8trend, v8mean = calc_trend(v800,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        v8_mean = v8mean.mean(axis=0)

        aej = np.argmin(u6_mean, axis=0)
        itd = np.argmin(np.abs(v8_mean.values), axis=0)

        thetatrend, thetamean = calc_trend(theta_e,
                                           m,
                                           method=method,
                                           sig=sig,
                                           hour=12,
                                           wilks=False)  #hour=12,
        theta_mean = thetamean.mean(axis=0)

        thetatrend_d, thetamean_d = calc_trend(theta_d,
                                               m,
                                               method=method,
                                               sig=sig,
                                               hour=12,
                                               wilks=False)  #hour=12,
        thetad_mean = thetamean_d.mean(axis=0)

        t_da = t2trend * 10.  # warming over decade
        q_da = qtrend * 10.  # warming over decade
        s_da = sheartrend * 10.  # warming over decade
        u6trend = u6trend * 10
        v6trend = v6trend * 10
        tcwv_da = presstrend * 10
        theta_da = thetatrend * 10
        thetad_da = thetatrend_d * 10
        u8trend = u8trend * 10
        v8trend = v8trend * 10

        tdata = (tirtrend.values * 10. / tirm_mean.values) * 100.
        #ipdb.set_trace()
        tirtrend_out = xr.DataArray(tdata,
                                    coords=[grid['y'], grid['x']],
                                    dims=['latitude', 'longitude'])
        tirtrend_out.name = 'tir'
        #tirmean_out = xr.DataArray(tirm_mean, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])

        dicm[m[0]] = tirtrend_out
        dicmean[m[0]] = tirm_mean

        if len(m) == 1:
            fp = fpath + 'use/ERA5_-70_use_nosig_2003_' + str(
                m[0]).zfill(2) + '.png'
        else:
            fp = fpath + 'use/ERA5_-70_use_nosig_2003_' + str(
                m[0]).zfill(2) + '-' + str(m[1]).zfill(2) + '.png'
        map = shear.salem.get_map(countries=False)
        # Change the country borders
        map.set_shapefile(countries=True, color='grey', linewidths=0.5)
        #map.set_lonlat_contours(interval=0)
        # Change the lon-lat countour setting
        map.set_lonlat_contours(add_ytick_labels=True,
                                interval=5,
                                linewidths=0.01,
                                linestyles='-',
                                colors='white')

        ti_da = t2d.salem.transform(tirtrend_out)

        f = plt.figure(figsize=(15, 8), dpi=300)

        # transform their coordinates to the map reference system and plot the arrows
        xx, yy = map.grid.transform(shear.longitude.values,
                                    shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xaej, yaej = map.grid.transform(u6_mean.longitude.values,
                                        u6_mean.latitude.values[aej.values],
                                        crs=shear.salem.grid.proj)

        xitd, yitd = map.grid.transform(v8_mean.longitude.values,
                                        v8_mean.latitude.values[itd],
                                        crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)

        #ipdb.set_trace()
        #Quiver only every 7th grid point
        u = u6trend.values[1::2, 1::2]
        v = v6trend.values[1::2, 1::2]

        #Quiver only every 7th grid point
        uu = u8trend.values[1::2, 1::2]
        vv = v8trend.values[1::2, 1::2]

        #Quiver only every 7th grid point
        um = u8_mean.values[1::2, 1::2]
        vm = v8_mean.values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        # pdic = {
        #     'tlin' : (t2_mean.values-273.15).astype(np.float64),
        #     'tmean' : (t2_mean.values-273.15).astype(np.float64),
        #     'qmean' : (q_mean.values).astype(np.float64),
        #     'qlin'  : q_da.values,
        #     'shearlin' : s_da.values,
        #     'u' : u,
        #     'v' : v,
        #     'xx' : xx,
        #     'yy' : yy,
        #     'tirmean' : tirm_mean,
        #
        #
        # }

        # pkl.dump(dicm,
        #          open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_WA.p',
        #               'wb'))

        map.set_shapefile(countries=True, linewidths=1.2, color='grey')

        ax1 = f.add_subplot(221)
        map.set_data(t_da.values, interp='linear')  # interp='linear'

        map.set_contour(s_da.values,
                        interp='linear',
                        levels=[0.4, 0.6, 0.8],
                        colors='k',
                        linewidths=1.8)
        map.set_plot_params(
            levels=[-0.5, -0.4, -0.3, -0.2, 0.2, 0.3, 0.4, 0.5],
            cmap='RdBu_r',
            extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        qu = ax1.quiver(xx, yy, u, v, scale=30, width=0.002, headwidth=4)

        # qk = plt.quiverkey(qu, 0.4, 0.03, 1, '1 m s$^{-1}$decade$^{-1}$',
        #                    labelpos='E', coordinates='figure')

        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        dic = map.visualize(
            ax=ax1,
            title=
            '2m temperature | 925-600hPa wind shear | 650hPa wind vectors',
            cbar_title=r'K decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.1f')
        qk = plt.quiverkey(qu,
                           0.45,
                           0.52,
                           1,
                           '1 m s$^{-1}$decade$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        ax2 = f.add_subplot(222)
        map.set_data(theta_da.values - 0.2, interp='linear')  # interp='linear'
        map.set_contour(
            (q_da.values).astype(np.float64),
            interp='linear',
            colors='k',
            linewidths=1.8,
            levels=[
                -0.6, -0.4, -0.2, 0.2, 0.4, 0.6
            ])  #[6,8,10,12,14,16] #levels=[-0.6,-0.4,-0.2,0.2,0.4, 0.6],
        map.set_plot_params(
            levels=np.array([-0.4, -0.3, -0.2, -0.1, 0.1, 0.2, 0.3, 0.4]) * 10,
            cmap='RdBu',
            extend='both'
        )  # levels=np.arange(-0.5,0.51,0.1), [-0.6,-0.4,-0.2,0.2,0.4,0.6]

        qu = ax2.quiver(xx, yy, um, vm, scale=100, width=0.002, headwidth=4)
        qk = plt.quiverkey(qu,
                           0.94,
                           0.52,
                           3,
                           '3 m s$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        dic = map.visualize(
            ax=ax2,
            title=r'650hPa RH | 925hPa q | 925hPa wind vectors',
            cbar_title=r'% decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.1f')

        ax3 = f.add_subplot(223)
        map.set_data(tcwv_da.values - 0.05, interp='linear')  # interp='linear'
        map.set_contour(thetad_da.values,
                        interp='linear',
                        levels=np.array([-2, -1.5, -1, -0.5, 0.5, 1, 1.5, 2]) *
                        100,
                        colors='k',
                        linewidths=1.8)

        map.set_plot_params(levels=[
            -1.5, -1, -0.8, -0.6, -0.4, -0.2, 0.2, 0.4, 0.6, 0.8, 1, 1.5
        ],
                            cmap='RdBu',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1)

        qu = ax3.quiver(xx, yy, uu, vv, scale=30, width=0.002, headwidth=4)

        qk = plt.quiverkey(qu,
                           0.45,
                           0.03,
                           1,
                           '1 m s$^{-1}$decade$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        dic = map.visualize(ax=ax3,
                            title=r'TCWV | CAPE | 925hPa wind vectors',
                            cbar_title=r'kg m$^{-2}$ decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.0f')

        ax4 = f.add_subplot(224)
        map.set_contour((tirm_mean),
                        interp='linear',
                        levels=[0.1, 1, 2, 4],
                        colors='k',
                        linewidths=1.5)

        ti_da.values[ti_da.values == 0] = np.nan
        map.set_data(ti_da)  #
        coord = [18, 25, -28, -20]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        #map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.3)

        map.set_plot_params(cmap='viridis',
                            extend='both',
                            levels=np.arange(10, 41,
                                             10))  # levels=np.arange(10,51,10)

        ax4.scatter(xaej, yaej, color='r', s=50, edgecolors='r', linewidths=1)

        #ax4.scatter(xitd, yitd, color='r', s=50, edgecolors='k', linewidths=1)

        dic = map.visualize(ax=ax4,
                            title='-70$^{\circ}$C cloud cover change ',
                            cbar_title='$\%$ decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.1f')

        plt.tight_layout()

        plt.annotate('a)',
                     xy=(0.02, 0.96),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')
        plt.annotate('b)',
                     xy=(0.49, 0.96),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')
        plt.annotate('c)',
                     xy=(0.02, 0.48),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')
        plt.annotate('d)',
                     xy=(0.49, 0.48),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')

        plt.savefig(fp)
        plt.close('all')
Example #58
0
    def __call__(self, sample):
        image, bb = sample['image'], sample['bb']
        # img_size = image.size
        if self.train:
            min_x, max_y, max_x, min_y = bb[0], bb[1], bb[2], bb[3]
        else:
            if self.real:
                ## This is for Homebrew dataset
                ##min_x,max_y,max_x,min_y = bb[0]-25, bb[1]-25, bb[0] + bb[2] + 25 , bb[1] + bb[3] + 25
                ## For others, probabaly
                min_x, max_y, max_x, min_y = bb[0], bb[
                    1], bb[0] + bb[2], bb[1] + bb[3]
            else:
                min_x, max_y, max_x, min_y = bb[0], bb[1], bb[2], bb[3]

        center_x = (min_x + max_x) / 2
        center_y = (min_y + max_y) / 2
        width, height = max_x - min_x, max_y - min_y

        scaleFactor = max([width, height])
        min_x = int(center_x) - int(scaleFactor) // 2
        min_y = int(center_y) - int(scaleFactor) // 2
        max_x = int(center_x) + int(scaleFactor) // 2
        max_y = int(center_y) + int(scaleFactor) // 2
        ## This is for Homebrew dataset
        ## Image crop works in a way (0, 0, 10, 10) but here the
        ## image coordinates are revresed on Y-axis nd so the crop.
        sample['image'] = image.crop(box=(min_x, min_y, max_x, max_y))
        sample['orig_image'] = image
        sample['center'] = np.array([center_x, center_y], dtype=np.float32)
        sample['width'] = width
        sample['height'] = height
        ## This sclae is used for OKS calculation
        sample['scaleArea'] = np.sqrt(
            np.divide(
                box(min_x, min_y, max_x, max_y).area, sample['scaleArea']))
        #print(sample['scaleArea'])

        w, h = self.out_size
        ## Crop and scale
        sample['crop'] = np.array([min_x, min_y], dtype=np.float32)
        sample['scale'] = np.array([w / scaleFactor, h / scaleFactor],
                                   dtype=np.float32)

        if width != self.out_size[0]:
            sample['image'] = sample['image'].resize((w, h))
        if 'mask' in sample:
            sample['mask'] = sample['mask'].crop(box=(min_x, min_y, max_x,
                                                      max_y)).resize((w, h))
        if 'keypoints' in sample:
            keypoints = sample['keypoints']
            for i in range(keypoints.shape[0]):
                if keypoints[i, 0] < min_x or keypoints[
                        i, 0] > max_x or keypoints[i, 1] < min_y or keypoints[
                            i, 1] > max_y:
                    keypoints[i, :] = [0, 0, 0]
                else:
                    keypoints[i, :2] = (keypoints[i, :2] -
                                        sample['crop']) * sample['scale']
            sample['keypoints'] = keypoints

        if 'initial_keypoints' in sample:
            initial_keypoints = sample['initial_keypoints']
            for i in range(initial_keypoints.shape[0]):
                if initial_keypoints[i,0] < min_x or initial_keypoints[i,0] > max_x \
                                or initial_keypoints[i,1] < min_y or initial_keypoints[i,1] > max_y:
                    initial_keypoints[i, :] = [0, 0, 0]
                else:
                    initial_keypoints[
                        i, :2] = (initial_keypoints[i, :2] -
                                  sample['crop']) * sample['scale']

            sample['initial_keypoints'] = initial_keypoints
        sample.pop('bb')
        return sample
Example #59
0
 def __geo_interface__(self):
     return box(*self.bounds).__geo_interface__
Example #60
0
# Input raster
fp = os.path.join(data_dir, "p188r018_7t20020529_z34__LV-FIN.tif")

# Output raster
out_tif = os.path.join(data_dir, "Helsinki_masked.tif")

# Read the data
raster = rasterio.open(fp)

# Visualize NIR
show((raster, 4), cmap="terrain")

minx, miny = 24.60, 60.00
maxx, maxy = 25.22, 60.35
bbox = box(minx, miny, maxx, maxy)

# Create a GeoDataFrame
crs_code = pycrs.parser.from_epsg_code(4326).to_proj4()
geo = gpd.GeoDataFrame({"geometry": bbox}, index=[0], crs=crs_code)

geo.plot()

# When masking the data they need ot be in same coordinate system
# Project the GeoDataFrame to the same projection as the raster
geo = geo.to_crs(crs=raster.crs)

# Convert GeoDataFrame to the same projection as the raster
coords = get_features(geo)
print(coords)