コード例 #1
0
ファイル: fes2.py プロジェクト: raymondnijssen/pycsw
def set_spatial_ranking(geometry):
    """Given that we have a spatial query in ogc:Filter we check the type of geometry 
    and set the ranking variables"""

    if util.ranking_enabled:
        if geometry.type in ['Polygon', 'Envelope']:
            util.ranking_pass = True
            util.ranking_query_geometry = geometry.wkt
        elif geometry.type in ['LineString', 'Point']:
            from shapely.geometry.base import BaseGeometry
            from shapely.geometry import box
            from shapely.wkt import loads, dumps
            ls = loads(geometry.wkt)
            b = ls.bounds
            if geometry.type == 'LineString':
                tmp_box = box(b[0], b[1], b[2], b[3])
                tmp_wkt = dumps(tmp_box)
                if tmp_box.area > 0:
                    util.ranking_pass = True
                    util.ranking_query_geometry = tmp_wkt
            elif geometry.type == 'Point':
                tmp_box = box((float(b[0]) - 1.0), (float(b[1]) - 1.0),
                              (float(b[2]) + 1.0), (float(b[3]) + 1.0))
                tmp_wkt = dumps(tmp_box)
                util.ranking_pass = True
                util.ranking_query_geometry = tmp_wkt
コード例 #2
0
def odk_geom_to_wkt(coords):
    """Convert geometries in ODK format to WKT."""
    try:
        if coords == '':
            return ''
        coords = coords.replace('\n', '')
        coords = coords.split(';')
        coords = [c.strip() for c in coords]
        if (coords[-1] == ''):
            coords.pop()

        if len(coords) > 1:
            # check for a geoshape taking into account
            # the bug in odk where the second coordinate in a geoshape
            # is the same as the last (first and last should be equal)
            if len(coords) > 3:
                if coords[1] == coords[-1]:  # geom is closed
                    coords.pop()
                    coords.append(coords[0])
            points = []
            for coord in coords:
                coord = coord.split(' ')
                coord = [x for x in coord if x]
                latlng = [float(coord[1]), float(coord[0])]
                points.append(tuple(latlng))
            if (coords[0] != coords[-1] or len(coords) == 2):
                return dumps(LineString(points))
            else:
                return dumps(Polygon(points))
        else:
            latlng = coords[0].split(' ')
            latlng = [x for x in latlng if x]
            return dumps(Point(float(latlng[1]), float(latlng[0])))
    except Exception as e:
        raise InvalidODKGeometryError(e)
コード例 #3
0
    def parse(self, filepath):
        log.info('Reallocating tables for regions.')
        self.create_tables()

        log.info('Loading Maricopa MAZ and TAZ region data.')
        parser = shapefile.Reader(filepath)
        regions = []
        count = 0
        n = 1

        log.info('Parsing regions from data.')
        for item in parser:
            poly = Polygon(item.shape.points)
            regions.append(
                (item.record.MAZ_ID_10, item.record.TAZ_2015,
                 item.record.Sq_miles, dumps(poly.centroid), dumps(poly)))

            count += 1
            if count == n:
                log.info(f'Parsing region {count}.')
                n <<= 1

        if count != n >> 1:
            log.info(f'Parsing region {count}.')

        log.info('Writing parsed regions to database.')
        self.database.insert_values('regions', regions, 5)
        self.database.connection.commit()

        log.info('Creating indexes on new tables.')
        self.create_indexes()
コード例 #4
0
def reassign_spatial_geometry(instance):
    coords = list(instance.geometry.coords)
    if type(coords[0]) == float:
        coords = [coords]
    else:
        while (type(coords[0][0]) != float):
            coords = coords[0]
        coords = [list(x) for x in coords]
    for point in coords:
        if point[0] >= -180 and point[0] <= 180:
            return
    while coords[0][0] < -180:
        for point in coords:
            point[0] += 360
    while coords[0][0] > 180:
        for point in coords:
            point[0] -= 360
    geometry = []
    for point in coords:
        latlng = [point[0], point[1]]
        geometry.append(tuple(latlng))
    if len(geometry) > 1:
        if geometry[0] == geometry[-1]:
            instance.geometry = dumps(Polygon(geometry))
        else:
            instance.geometry = dumps(LineString(geometry))
    else:
        instance.geometry = dumps(Point(geometry))
コード例 #5
0
    def _format_geometry(self, coords, geoshape=False):
        if coords == '':
            return ''
        if '\n' in coords:
            coords = coords.replace('\n', '')
        coords = coords.split(';')
        if (coords[-1] == ''):
            coords.pop()
        # fixes bug in geoshape:
        # Geoshape copies the second point, not the first.
        if geoshape:
            coords.pop()
            coords.append(coords[0])

        if len(coords) > 1:
            points = []
            for coord in coords:
                coord = coord.split(' ')
                coord = [x for x in coord if x]
                latlng = [float(coord[1]), float(coord[0])]
                points.append(tuple(latlng))
            if (coords[0] != coords[-1] or len(coords) == 2):
                return dumps(LineString(points))
            else:
                return dumps(Polygon(points))
        else:
            latlng = coords[0].split(' ')
            latlng = [x for x in latlng if x]
            return dumps(Point(float(latlng[1]), float(latlng[0])))
コード例 #6
0
ファイル: models.py プロジェクト: mikael19/cadasta-platform
def reassign_spatial_geometry(instance):
    coords = list(instance.geometry.coords)
    if type(coords[0]) == float:
        coords = [coords]
    else:
        while (type(coords[0][0]) != float):
            coords = coords[0]
        coords = [list(x) for x in coords]
    for point in coords:
        if point[0] >= -180 and point[0] <= 180:
            return
    while coords[0][0] < -180:
        for point in coords:
            point[0] += 360
    while coords[0][0] > 180:
        for point in coords:
            point[0] -= 360
    geometry = []
    for point in coords:
        latlng = [point[0], point[1]]
        geometry.append(tuple(latlng))
    if len(geometry) > 1:
        if geometry[0] == geometry[-1]:
            instance.geometry = dumps(Polygon(geometry))
        else:
            instance.geometry = dumps(LineString(geometry))
    else:
        instance.geometry = dumps(Point(geometry))
コード例 #7
0
    def _measure_ring_intensity_around_nucleus(self, image, cfg):
        assert self._ix.any(), "no rows in the filtered dataframe"
        for ix, row in self._mdf[self._ix].iterrows():
            nucl_bnd = shapely.wkt.loads(row["nuc_pix"])
            thickness = float(cfg['rng_thickness'])
            thickness *= self.pix_per_um
            rng_bnd = (
                nucl_bnd.buffer(thickness).difference(nucl_bnd).simplify(
                    self.pix_per_um / 2, preserve_topology=True))
            if rng_bnd.area > 0:
                rng_int = m.integral_over_surface(image, rng_bnd)
                if np.isnan(rng_int): continue
                rng_density = rng_int / rng_bnd.area
            else:
                logger.warning(
                    "Ring polygon with no area!\r\nThickness of ring set to %.2f [pix]"
                    % thickness)
                continue

            logger.debug(
                "ring_around_nucleus on tag '%s' for nucleus id %d = %s" %
                (cfg['tag'].iloc[0], row['id'],
                 m.eng_string(rng_int, si=True, format='%.2f')))
            rng_um = affinity.scale(rng_bnd,
                                    xfact=self.um_per_pix,
                                    yfact=self.um_per_pix,
                                    origin=(0, 0, 0))

            # TODO: scale intensity from pixels^2 to um^2
            self._mdf.loc[ix, 'ring'] = dumps(rng_um, rounding_precision=4)
            self._mdf.loc[ix, 'ring_pix'] = dumps(rng_bnd,
                                                  rounding_precision=1)
            self._mdf.loc[ix, '%s_rng_int' % cfg['tag'].iloc[0]] = int(rng_int)
            self._mdf.loc[ix, '%s_rng_dens' %
                          cfg['tag'].iloc[0]] = int(rng_density)
コード例 #8
0
ファイル: fes.py プロジェクト: dodobas/pycsw
def set_spatial_ranking(geometry):
    """Given that we have a spatial query in ogc:Filter we check the type of geometry 
    and set the ranking variables"""
    
    if util.ranking_enabled:
        if geometry.type in ['Polygon', 'Envelope']:
            util.ranking_pass = True
            util.ranking_query_geometry = geometry.wkt
        elif geometry.type in ['LineString', 'Point']:
            from shapely.geometry.base import BaseGeometry
            from shapely.geometry import box
            from shapely.wkt import loads,dumps
            ls = loads(geometry.wkt)
            b = ls.bounds
            if geometry.type == 'LineString':
                tmp_box = box(b[0],b[1],b[2],b[3])
                tmp_wkt = dumps(tmp_box)
                if tmp_box.area > 0:
                    util.ranking_pass = True
                    util.ranking_query_geometry = tmp_wkt
            elif geometry.type == 'Point':
                tmp_box = box((float(b[0])-1.0),(float(b[1])-1.0),(float(b[2])+1.0),(float(b[3])+1.0))
                tmp_wkt = dumps(tmp_box)
                util.ranking_pass = True
                util.ranking_query_geometry = tmp_wkt
def dist_point_multi(point, multi):
    if multi is not None:
        wktMulti = dumps(multi)
        # create geometries from wkt
        Multi = ogr.CreateGeometryFromWkt(wktMulti)
        # transform both geometries to the fittest projection
        if Multi is not None:
            Point = ogr.CreateGeometryFromWkt(dumps(point))
            # transform both geometries to the fittest projection
            source = osr.SpatialReference()
            source.ImportFromEPSG(4326)
            target = osr.SpatialReference()
            target.ImportFromEPSG(26918)

            transform = osr.CoordinateTransformation(source, target)
            Point.Transform(transform)
            # create a line for each point in the first geometry of the polygon
            # initialize
            x0 = Point.GetX()
            y0 = Point.GetY()

            distance1 = []
            for line in Multi:
                for i in range(0, line.GetPointCount() - 1):
                    xi, yi, zi = line.GetPoint(i)
                    ai, bi, ci = line.GetPoint(i + 1)
                    # create line and check length
                    distance1.append(dist(xi, yi, ai, bi, x0, y0))
            return min(distance1)
コード例 #10
0
def parse_regions(database: SqliteUtil, regions_file: str, src_epsg: int,
                  prj_epsg: int):

    log.info('Allocating tables for regions.')
    create_tables(database)

    transformer = Transformer.from_crs(f'epsg:{src_epsg}',
                                       f'epsg:{prj_epsg}',
                                       always_xy=True,
                                       skip_equivalent=True)
    project = transformer.transform

    log.info('Parsing regions from shapefile.')
    parser = shapefile.Reader(regions_file)
    iter_regions = counter(iter(parser), 'Parsing region %s.')
    regions = []
    for item in iter_regions:
        points = (project(*point) for point in item.shape.points)
        polygon = Polygon(points)

        regions.append(
            (item.record.MAZ_ID_10, item.record.TAZ_2015, item.record.Sq_miles,
             dumps(polygon.centroid), dumps(polygon)))

    parser.close()

    log.info('Writing parsed regions to database.')
    database.insert_values('regions', regions, 5)
    database.connection.commit()

    log.info('Creating indexes on new tables.')
    create_indexes(database)
コード例 #11
0
def intersection(args):
    '''
    Reads ply, applies boundingbox and writes matching polygon/cuboid
    '''
    # ./ply-tool.py intersection test_data/house1.ply "POLYGON ((30 10, 80 80, 20 40, 0 10, 30 10))" bla2

    print "Plyfile=", args.plyfile
    print "Boundingbox=", args.boundingbox
    print "Outfile=", args.outfile

    # Open outfile for appending
    outf = open(args.outfile, 'a')

    # Load WKT into shapely polygon
    bbox = Polygon(loads(args.boundingbox))
    #print bbox
    #print bbox.area
    #print bbox.length

    # Reading the PLY file
    ply = PlyData.read(args.plyfile)

    # Loop through polygons
    poly_count = ply['polygon'].count
    for poly_i in range(0,poly_count):
        # List of vertice tuples
        vtcs = ply['vertex'][ply['polygon'][poly_i].tolist()].tolist()
        #print vtcs
        # into shapely Polygon
        poly = Polygon(vtcs)
        #print poly
        # Shapely intersects() does the job nicely
        print "Polygon", poly_i, "intersects=", bbox.intersects(poly)
        if bbox.intersects(poly):
            # to WKT
            polywkt = dumps(poly) + '\n'
            print "Adding to file", polywkt
            outf.write(polywkt)

    # Loop through cuboids
    cube_count = ply['cuboid'].count
    for cub_i in range(0,cube_count):
        # List of vertice tuples
        vtcs = ply['vertex'][ply['cuboid'][cub_i].tolist()].tolist()
        #print vtcs
        # into multipoint for now. should be polyhedron(?) I think but shapely doesn't have that
        mp = MultiPoint(vtcs)
        #print mp
        # Shapely intersects() does the job nicely
        print "Cuboid", cub_i, "intersects=", bbox.intersects(mp)
        if bbox.intersects(mp):
            # to WKT
            mpwkt = dumps(mp) + '\n'
            print "Adding to file", mpwkt
            outf.write(mpwkt)

    #write intermediary file
    outf.close()
コード例 #12
0
ファイル: services.py プロジェクト: wayne-abarquez/csitewalk
def get_coordinates_dump(data):
    if 'coordinates' in data and type(data['coordinates']) is dict:
        point = data['coordinates']
        if all(key in point for key in ('lat', 'lng')):
            point = Point(float(point['lng']), float(point['lat']))
            return dumps(point)
    elif all(key in data for key in ('latitude', 'longitude')):
        point = Point(float(data['longitude']), float(data['latitude']))
        return dumps(point)
コード例 #13
0
def geojson_validator(value):
    if value:
        try:
            gjson = json.loads(value)
            shape = asShape(gjson)
            wkt.dumps(shape)
        except:
            raise Invalid(_("Invalid GeoJSON"))
    return value
コード例 #14
0
def geojson_validator(value):
    if value:
        try:
            # accept decoded geojson too
            if isinstance(value, basestring):
                value = json.loads(value)
            shape = asShape(value)
            wkt.dumps(shape)
        except Exception:
            raise Invalid(_("Invalid GeoJSON"))
        # must store as JSON
        return json.dumps(value)
    return value
コード例 #15
0
def geojson_validator(value):
    if value:
        try:
            # accept decoded geojson too
            if isinstance(value, basestring):
                value = json.loads(value)
            shape = asShape(value)
            wkt.dumps(shape)
        except Exception:
            raise Invalid(_("Invalid GeoJSON"))
        # must store as JSON
        return json.dumps(value)
    return value
コード例 #16
0
def main():
    db = load_collideoscope_database()

    reproject = partial(pyproj.transform, pyproj.Proj(init='epsg:27700'),
                        pyproj.Proj(init='epsg:3857'))

    meta = {
        'crs': {
            'init': 'epsg:3857'
        },
        'driver': 'GPKG',
        'schema': {
            'geometry': 'LineString',
            'properties': {
                'density': 'float'
            }
        }
    }

    with fiona.open(OUTPUT_PATH, 'w', **meta) as output:
        matches = 0
        for i, geom in enumerate(load_roads(), start=1):
            buffered = geom.buffer(BUFFER, cap_style=CAP_STYLE.flat)
            bbox = box(*buffered.bounds)
            q = """SELECT
                    COUNT(*) AS count
                   FROM incidents
                   WHERE
                    ROWID IN (
                        SELECT ROWID
                        FROM SpatialIndex
                        WHERE f_table_name = 'incidents'
                        AND search_frame = ST_GeomFromText(:bbox, 27700)
                    )
                    AND ST_Contains(ST_GeomFromText(:buffered, 27700), geom)
                   """
            count = db.query(q, bbox=dumps(bbox),
                             buffered=dumps(buffered))[0].count
            if count:
                output.write({
                    'type': 'Feature',
                    'id': '-1',
                    'geometry': mapping(transform(reproject, geom)),
                    'properties': {
                        'density': count / geom.length
                    }
                })
                matches += 1
                if matches % 1000 == 0:
                    log("Found {} features from {} so far".format(matches, i))
        log("Found {} features with incidents.".format(matches))
コード例 #17
0
ファイル: regions.py プロジェクト: listenerri/alibirouting
def gen_multiple_desirable_regions_by_grid(pre_file_name,
                                           src_lon,
                                           src_lat,
                                           dst_lon,
                                           dst_lat,
                                           grid_scale=2,
                                           ineq_factor=2,
                                           tolerance=0.5,
                                           strategy=1,
                                           max_ineq_factor=None,
                                           ineq_factor_interval=None,
                                           p_fr=None):
    if p_fr == None:
        print('Forbidden region must be given')
        return None

    if tolerance == 0:
        fr = p_fr
    else:
        fr = p_fr.simplify(tolerance)

    if max_ineq_factor == None:
        max_ineq_factor = ineq_factor
        ineq_factor_interval = 1

    if isinstance(fr, MultiPolygon):
        str_fr = dumps(fr)
    else:
        str_fr = dumps(MultiPolygon([fr]))

    ret_str = gc.gen_desirable_region_by_grid(str_fr, src_lon, src_lat,
                                              dst_lon, dst_lat, grid_scale,
                                              ineq_factor, strategy,
                                              max_ineq_factor,
                                              ineq_factor_interval)

    sps = ret_str.split('|')
    ret_list = []
    for sp in sps:
        if len(sp) < 2:
            continue

        if len(sp) < 20:
            ret_list.append(None)
        else:
            ret_list.append(loads(sp))

    return ret_list
コード例 #18
0
def get_final_annotations(image_id,anns,mx,my):
    keep = []
    discard = []
    building_id = 0
    for item in anns[0]:
        image_id, bid, poly, _, _ = item
        if bid == -1:
            continue
        if poly.intersects(mx) or poly.intersects(my):
            discard.append([item[0], 0, dumps(poly,3), item[3]])
        else:
            keep.append([item[0], building_id, dumps(poly, 3), item[3]])
            building_id += 1

    apoly1 = []
    for item in anns[1]:
        image_id, bid, poly, _, _ = item
        if bid == -1:
            continue
        if (poly.intersects(my)):
            apoly1.append(poly)

    apoly1 = cascaded_union(MultiPolygon(apoly1)).buffer(0)
    if not apoly1.is_empty:
        if apoly1.geom_type == 'Polygon':
            apoly11 = [apoly1]
        else:
            apoly11 = apoly1
        for ap1 in apoly11:
            keep.append([image_id, building_id, dumps(ap1, 3), 1])
            building_id += 1

    apoly2 = []
    for item in anns[2]:
        image_id, bid, poly, _ ,_ = item
        if bid == -1:
            continue
        if (poly.intersects(mx) and not poly.intersects(apoly1)):
            apoly2.append(poly)
    apoly2 = cascaded_union(MultiPolygon(apoly2)).buffer(0)
    if not apoly2.is_empty:
        if apoly2.geom_type == 'Polygon':
            apoly2 = [apoly2]
        for ap2 in apoly2:
            keep.append([image_id, building_id, dumps(ap2, 3), 1])
            building_id += 1

    return keep
コード例 #19
0
ファイル: product.py プロジェクト: sportsbitenews/pyIEM
    def process_latlon(self):
        """Parse the segment looking for the 'standard' LAT...LON encoding"""
        data = self.unixtext.replace("\n", " ")
        search = LAT_LON_PREFIX.search(data)
        if search is None:
            return None
        pos = search.start()
        newdata = data[pos + 9:]
        # Go find our next non-digit, non-space character, if we find it, we
        # should truncate our string, this could be improved, I suspect
        search = re.search(r"[^\s0-9]", newdata)
        if search is not None:
            pos2 = search.start()
            newdata = newdata[:pos2]

        poly = str2polygon(newdata)
        if poly is None:
            return None

        # check 1, is the polygon valid?
        if not poly.is_valid:
            self.tp.warnings.append(
                ("LAT...LON polygon is invalid!\n%s") % (poly.exterior.xy, ))
            return
        # check 2, is the exterior ring of the polygon clockwise?
        if poly.exterior.is_ccw:
            self.tp.warnings.append(
                ("LAT...LON polygon exterior is CCW, reversing\n%s") %
                (poly.exterior.xy, ))
            poly = Polygon(
                zip(poly.exterior.xy[0][::-1], poly.exterior.xy[1][::-1]))
        self.giswkt = 'SRID=4326;%s' % (dumps(MultiPolygon([poly]),
                                              rounding_precision=6), )
        return poly
コード例 #20
0
def __createCSVSummaryFile(chipSummaryList, outputFileName, pixPrecision=2):
    with open(outputFileName, 'w') as csvfile:
        writerTotal = csv.writer(csvfile, delimiter=',', lineterminator='\n')
        writerTotal.writerow(
            ['ImageId', 'BuildingId', 'PolygonWKT_Pix', 'Confidence'])

        # TODO: Add description=createCSVSummaryFile
        for chipSummary in tqdm.tqdm(chipSummaryList,
                                     total=len(chipSummaryList),
                                     desc='createCSVSummaryFile'):
            chipName = chipSummary['chipName']
            geoVectorName = chipSummary['geoVectorName']
            rasterChipDirectory = chipSummary['geotiffPath']
            imageId = chipSummary['imageId']

            buildingList = gT.geoJsonToPixDF(geoVectorName,
                                             rasterName=os.path.join(
                                                 rasterChipDirectory,
                                                 chipName),
                                             affineObject=[],
                                             gdal_geomTransform=[],
                                             pixPrecision=pixPrecision)
            buildingList = gT.explodeGeoPandasFrame(buildingList)

            if len(buildingList) > 0:
                for idx, building in buildingList.iterrows():
                    tmpGeom = dumps(building.geometry,
                                    rounding_precision=pixPrecision)
                    writerTotal.writerow([imageId, idx, tmpGeom, 1])
            else:
                imageId = chipSummary['imageId']
                writerTotal.writerow([imageId, -1, 'POLYGON EMPTY', 1])
コード例 #21
0
def gj2geom(geojson):

    """
    Convert a GeoJSON geometry into an OGR geometry.
    """

    return ogr.CreateGeometryFromWkt(wkt.dumps(shape(geojson)))
コード例 #22
0
def make_grid(xmin, xmax, ymin, ymax, resolution):
    """
        Function to make a regular polygon grid
        spanning over xmin, xmax, ymin, ymax 
        and with a given resolution

        output: geoDataFrame of grid
        """

    nx = np.arange(xmin, xmax, resolution)
    ny = np.arange(ymin, ymax, resolution)

    # create polygon grid
    polygons = []
    for x in nx:
        for y in ny:
            poly = Polygon([(x, y), (x + resolution, y),
                            (x + resolution, y - resolution),
                            (x, y - resolution)])
            # account for precision (necessary to create grid at exact location)
            poly = wkt.loads(wkt.dumps(poly, rounding_precision=2))
            polygons.append(poly)

    # store polygons in geodataframe
    grid = gpd.GeoDataFrame({'geometry': polygons})
    return grid
コード例 #23
0
def main(opts):
    pattern = loads(open(opts.input, "r").read())
    extent = loads(open(opts.extent, "r").read())

    if not contains.matches(extent.relate(pattern)):
        print "ERROR: pattern must be contained within the extent"
        return

    c = pattern.centroid
    (xs, ys) = extent.boundary.xy
    (minx, maxx, miny, maxy) = (min(xs) - c.x, max(xs) - c.x, min(ys) - c.y, max(ys) - c.y)

    outputFile = open(opts.output, "w")

    geoms = []

    while len(geoms) < opts.number:
        dx = random.uniform(minx, maxx)
        dy = random.uniform(miny, maxy)

        geom = translate(pattern, xoff=dx, yoff=dy)

        if contains.matches(extent.relate(geom)):
            # Check that it is within the extent
            overlap = False
            for g in geoms:
                if intersects.matches(g.relate(geom)):
                    overlap = True
            if overlap == False:
                geoms.append(geom)

    for geom in geoms:
        outputFile.write(dumps(geom) + "\n")
    outputFile.close()
コード例 #24
0
ファイル: find_stereo_pairs.py プロジェクト: nasa-jpl/sstmp
def bounding_box(*, west: float, east: float, south: float, north: float, plot: bool = False,
                 find_covering: bool = True,
                 return_pairset: bool = False, verbose=False) -> 'StereoPairSet':
    """
    Find stereo pairs that fill a given bounding box
    
    # :param west: Western limit of the box, in -180 to 180 longitude, positive east
    # :param east: Eastern limit of the box, in -180 to 180 longitude, positive east
    # :param south: Southern limit of the box, in -90 to 90 latitude, positive north
    # :param north: Northern limit of the box, in -90 to 90 latitude, positive north
    :param plot: Whether to plot the footprints of the selected images
    :param find_covering: Whether to search for a minimal set of pairs covering the bounding box. Otherwise, outputs all
    pairs that have good sun and spacecraft geometry.
    :return: A StereoPairSet
    """

    search_poly_shapely = geom_helpers.corners_to_quadrilateral(west, east, south, north, lonC0=True)
    imgs = ImageSearch(polygon=wkt.dumps(search_poly_shapely))
    pairset = StereoPairSet(imgs)
    filtered_pairset = pairset.filter_sun_geometry().filter_small_overlaps()
    if find_covering:
        search_poly_shapely = wkt.loads(imgs.search_poly)
        filtered_pairset.pairs, stats = geom_helpers.covering_set_search(
            full_poly_set=filtered_pairset.pairs,
            search_poly=search_poly_shapely,
            plot=plot,
            verbose=False
        )
    print(filtered_pairset.pairs_json())
    if return_pairset:
        return filtered_pairset
コード例 #25
0
ファイル: forms_helper.py プロジェクト: dalinhuang/indoorMap2
def parse_coordinates(coordinates):
    if coordinates is None or type(coordinates) is not dict:
        return coordinates

    if all(key in coordinates for key in ('lat', 'lng')):
        point = Point(float(coordinates['lng']), float(coordinates['lat']))
        return dumps(point)
コード例 #26
0
def parse_coordinates(coordinates):
    if coordinates is None or type(coordinates) is not dict:
        return coordinates

    if all(key in coordinates for key in ('lat', 'lng')):
        point = Point(float(coordinates['lng']), float(coordinates['lat']))
        return dumps(point)
コード例 #27
0
ファイル: geotext.py プロジェクト: davidmcclure/hilt-2016
def csv_to_neatline(in_file, out_file):

    """
    Format a CSV file for Neatline.
    """

    reader = csv.DictReader(in_file)

    # Add wkt field to the CSV.
    cols = reader.fieldnames + ['wkt']
    writer = csv.DictWriter(out_file, cols)
    writer.writeheader()

    for row in reader:

        lat = float(row.pop('latitude'))
        lon = float(row.pop('longitude'))

        # Convert degrees -> meters.
        meters = degrees_to_meters(lon, lat)

        # Convert to WKT.
        point = ShapelyPoint(meters)
        row['wkt'] = wkt.dumps(point)

        writer.writerow(row)
コード例 #28
0
ファイル: gis.py プロジェクト: USEPA/lcia-eutrophication
def round_geometry_wkt (geom, precision):
    """
    Given a precision, change geometry coordinates by rounding to that level
    :param geom: a geometry element from <class 'geopandas.geoseries.GeoSeries'>
    :param precision: integer; number of decimal places
    :return: a geometry object that has been rounded

    # this is a less elegant version of round_geometry_np

    e.g., start with 'POLYGON ((-170.7439000044051 -14.37555495213201, ....)
    dumps(poly,rounding_precision=1 )
    'POLYGON ((-170.7 -14.4, -170.7 ....)

    # But this is buggy; sometimes the rounding doesn't happen

    dumps(round_geometry_wkt(g[1],1))
    'POLYGON ((-5829486.5000000000000000 -504910.2000000000116415, ....
    dumps(round_geometry_wkt(g[1],2))
    'POLYGON ((-5829486.5000000000000000 -504910.2000000000116415, ....
    dumps(round_geometry_wkt(g[1],0))
    'POLYGON ((-5829487.0000000000000000 -504910.0000000000000000, ....

    https://gis.stackexchange.com/questions/368533/shapely-wkt-dumps-and-loads-does-not-always-preserve-rounding-precision

    """

    geom = wkt.loads(wkt.dumps(geom, rounding_precision=precision))
    return geom
コード例 #29
0
 def _convert_mundi_coverage(mundi_coverage_string: str):
     coords = mundi_coverage_string.split(" ")
     coord_list = []
     for i in range(0, len(coords), 2):
         coord_list.append((float(coords[i + 1]), float(coords[i])))
     coverage = Polygon(coord_list)
     return dumps(coverage)
コード例 #30
0
ファイル: geom.py プロジェクト: manuelep/GeoPbf
def geom2tile(x, y, z, geom):
    """ """
    geom_xy = merc2xy(x, y, z, shape(geom))
    # Courtesy of: https://gis.stackexchange.com/a/276512
    as_json = mapping(
        wkt.loads(wkt.dumps(geom_xy, rounding_precision=0)).simplify(0))
    return as_json
コード例 #31
0
 def _find_parts(cat_id, band_type):
     vectors = Vectors()
     aoi = wkt.dumps(box(-180, -90, 180, 90))
     query = "item_type:IDAHOImage AND attributes.catalogID:{} " \
             "AND attributes.colorInterpretation:{}".format(cat_id, band_types[band_type])
     return sorted(vectors.query(aoi, query=query),
                   key=lambda x: x['properties']['id'])
コード例 #32
0
ファイル: Location.py プロジェクト: isawnyu/PleiadesEntity
 def getGeometryWKT(self):
     """Return WKT representation of geometry"""
     parts = self._getGeometryRaw().split(':')
     j = '{"type": "%s", "coordinates": %s}' % (
         parts[0].strip(), parts[1].strip())
     d = simplejson.loads(j)
     return wkt.dumps(asShape(d))
コード例 #33
0
def mask_to_poly(mask, min_polygon_area_th=MIN_POLYGON_AREA, thresh=0.5):
    mask = (mask > thresh).astype(np.uint8)
    shapes = rasterio.features.shapes(mask.astype(np.int16), mask > 0)
    poly_list = []
    mp = shapely.ops.cascaded_union(
        shapely.geometry.MultiPolygon(
            [shapely.geometry.shape(shape) for shape, value in shapes]))

    if isinstance(mp, shapely.geometry.Polygon):
        df = pd.DataFrame({
            'area_size': [mp.area],
            'poly': [mp],
        })
    else:
        df = pd.DataFrame({
            'area_size': [p.area for p in mp],
            'poly': [p for p in mp],
        })

    df = df[df.area_size > min_polygon_area_th].sort_values(by='area_size',
                                                            ascending=False)
    df.loc[:,
           'wkt'] = df.poly.apply(lambda x: wkt.dumps(x, rounding_precision=0))
    df.loc[:, 'bid'] = list(range(1, len(df) + 1))
    df.loc[:, 'area_ratio'] = df.area_size / df.area_size.max()
    return df
コード例 #34
0
def mask_to_polygons(mask, min_area=8.):
    """Convert a mask ndarray (binarized image) to Multipolygons"""
    # first, find contours with cv2: it's much faster than shapely
    image, contours, hierarchy = cv2.findContours(mask,
                                  cv2.RETR_CCOMP,
                                  cv2.CHAIN_APPROX_NONE)
    if not contours:
        return Polygon()
    # now messy stuff to associate parent and child contours
    cnt_children = defaultdict(list)
    child_contours = set()
    assert hierarchy.shape[0] == 1
    # http://docs.opencv.org/3.1.0/d9/d8b/tutorial_py_contours_hierarchy.html
    for idx, (_, _, _, parent_idx) in enumerate(hierarchy[0]):
        if parent_idx != -1:
            child_contours.add(idx)
            cnt_children[parent_idx].append(contours[idx])
    # create actual polygons filtering by area (removes artifacts)
    all_polygons = []
    for idx, cnt in enumerate(contours):
        if idx not in child_contours and cv2.contourArea(cnt) >= min_area:
            assert cnt.shape[1] == 1
            poly = Polygon(
                shell=cnt[:, 0, :],
                holes=[c[:, 0, :] for c in cnt_children.get(idx, [])
                       if cv2.contourArea(c) >= min_area])
            all_polygons.append(poly)
    if len(all_polygons) > 1:
        print('more than one polygon!')
    wkt = dumps(all_polygons[0], rounding_precision=0)

    return wkt
コード例 #35
0
ファイル: binarisation.py プロジェクト: boetien/navitia
def load_bounding_shape(instance_name, instance_conf, shape_path):
    logging.info("loading bounding shape for {} from = {}".format(
        instance_name, shape_path))

    if shape_path.endswith(".poly"):
        with open(shape_path, "r") as myfile:
            shape = parse_poly(myfile.readlines())
    elif shape_path.endswith(".wkt"):
        with open(shape_path, "r") as myfile:
            shape = wkt.loads(myfile.read())
    else:
        logging.error(
            "bounding_shape: {} has an unknown extension.".format(shape_path))
        return

    connection_string = "postgres://{u}:{pw}@{h}/{db}"\
        .format(u=instance_conf.pg_username, pw=instance_conf.pg_password,
                h=instance_conf.pg_host, db=instance_conf.pg_dbname)
    engine = sqlalchemy.create_engine(connection_string)
    # create the line if it does not exists
    engine.execute("""
    INSERT INTO navitia.parameters (shape)
    SELECT NULL WHERE NOT EXISTS (SELECT * FROM navitia.parameters)
    """).close()
    # update the line, simplified to approx 100m
    engine.execute("""
    UPDATE navitia.parameters
    SET shape_computed = FALSE, shape = ST_Multi(ST_SimplifyPreserveTopology(ST_GeomFromText('{shape}'), 0.001))
    """.format(shape=wkt.dumps(shape))).close()
コード例 #36
0
ファイル: geotext.py プロジェクト: sashafr/price-lab-python
def csv_to_neatline(in_file, out_file):
    """
    Format a CSV file for Neatline.
    """

    reader = csv.DictReader(in_file)

    # Add wkt field to the CSV.
    cols = reader.fieldnames + ['wkt']
    writer = csv.DictWriter(out_file, cols)
    writer.writeheader()

    for row in reader:

        lat = float(row.pop('latitude'))
        lon = float(row.pop('longitude'))

        # Convert degrees -> meters.
        meters = degrees_to_meters(lon, lat)

        # Convert to WKT.
        point = ShapelyPoint(meters)
        row['wkt'] = wkt.dumps(point)

        writer.writerow(row)
コード例 #37
0
    def __init__(self, shape_paths):


        pluto_filter = lambda x: ('unclipped' not in x) and ('mappluto' in x)
        shape_paths = list(filter(pluto_filter, shape_paths))


        print("aggregating shapefiles")
        if len(shape_paths) > 1:
            d = [read_shapefile(f) for f in shape_paths]
            d = pd.concat(d)
        else:
            d = read_shapefile(shape_paths[0])

        print("filling nulls geometries with empty polygons")
        d['geometry'] = d['geometry'].fillna()
        
        

        print("converting coordinate reference system to ESPG:4326")
        d = d.to_crs({'init': "EPSG:4326"})

        print("making well known text representation out of geometries")
        wkt_geom = d['geometry'].apply(lambda x: wkt.dumps(x))
        d = pd.DataFrame(d)
        d = d.drop(columns=['geometry'])
        d['wkt_geom'] = wkt_geom
        print("cleaning column names")
        d = d.clean_names()
        self.wkt_file = d
コード例 #38
0
ファイル: submission.py プロジェクト: avenash97/DCNN
def make_submit(name, max_score, trs=None):
    """
    Creates the final submission by loading all raw predictions, creating 0-1 masks by thresholding them and
    creating Multipolygons from these masks.
    """
    print("make submission file")
    # Get the best scores to load the best predictions
    for idx, row in SB.iterrows():
        id = row[0]
        kls = row[1] - 1
        # Get the prediction from the respective class model and the best performing iteration of it
        msk = np.load('../msks/{}_{}.npy'.format(max_score, id))[kls]
        msk = msk >= trs[kls]
        # Create correctly sizes polygons for the submission file
        pred_polygons = mask_to_polygons(msk, epsilon=1, min_area=1)
        x_max = GS.loc[GS['ImageId'] == id, 'Xmax'].as_matrix()[0]
        y_min = GS.loc[GS['ImageId'] == id, 'Ymin'].as_matrix()[0]
        x_scaler, y_scaler = get_scalers(msk.shape, x_max, y_min)
        scaled_pred_polygons = shapely.affinity.scale(pred_polygons, xfact=1.0 / x_scaler, yfact=1.0 / y_scaler,
                                                      origin=(0, 0, 0))
        SB.iloc[idx, 2] = dumps(scaled_pred_polygons, rounding_precision=8)
        if SB.iloc[idx, 2]=="GEOMETRYCOLLECTION EMPTY":
            SB.iloc[idx, 2] = "MULTIPOLYGON EMPTY"
        if idx % 100 == 0: print(idx)
    os.makedirs("../subm", exist_ok=True)
    SB.to_csv('../subm/{}.csv.gz'.format(name), compression="gzip", index=False)
コード例 #39
0
def vector_services_query(query, aoi=None, **kwargs):
    vectors = Vectors()
    if not aoi:
        aoi = wkt.dumps(box(-180, -90, 180, 90))
    _parts = sorted(vectors.query(aoi, query=query, **kwargs),
                    key=lambda x: x['properties']['id'])
    return _parts
コード例 #40
0
def import_with_fiona(fpath, source):
    """
    Use fiona to import a parcel file.

    Return a list of dict objects containing WKT-formatted geometries in 
    addition to any metadata.
    """
    shapes = []

    try:
        with fiona.drivers():
            data = fiona.open(fpath)
            for obj in data:
                try:
                    shape = scrape_fiona_metadata(obj, source)
                    geom = to_shapely_obj(obj)
                    if geom:
                        shape['geom'] = dumps(geom)
                        shapes.append(shape)
                except Exception as e:
                    _L.warning('error loading shape from fiona. {}'.format(e))
    except Exception as e:
        _L.warning('error importing file. {}'.format(e))

    return shapes
コード例 #41
0
ファイル: fxcc2shp.py プロジェクト: fpsampayo/utils
def procesaLineaInterna(featuresExternas, featuresInternas, featuresCentroide, featureDefn):
	#print "Procedemos a procesar las lineas internas"
	
	centroides = []
	for centroide in featuresCentroide:
		#obtenemos la altura y el rotulo del estilo de cada centroide
		for n in centroide.GetStyleString().split(','):
			if n.startswith('s'):
				altura = float(n.replace('s:', '').replace('g', ''))
			elif n.startswith('t'):
				rotulo = n.split('"')[1]
		punto = centroide.GetGeometryRef()
		x = punto.GetX()
		y = punto.GetY()
		longitudRotulo = len(rotulo)
		factor = 0.15 * (altura * 3.3333)
		desfaseX = longitudRotulo * factor - 0.05
		punto.SetPoint(point = 0, x = x + desfaseX, y = y - 0.20)
		
		centroides.append((rotulo, punto))
	
	featuresProceso = featuresExternas + featuresInternas
	
	outFeature = []
	if len(featuresProceso) > 1:
		geometry_out = None
		for inFeature in featuresProceso:
			geometry_in = inFeature.GetGeometryRef()
			if geometry_out is None:
				geometry_out = geometry_in
				geometry_out = ogr.ForceToMultiLineString(geometry_out)
			else:
				geometry_out = geometry_out.Union(geometry_in) 
		
		lineasInternasShapely = loads(geometry_out.ExportToWkt())
		polygonsShapely = polygonize(lineasInternasShapely)
	
		polygonGeom = []
		for polygon in polygonsShapely:
			polygonGeom.append(ogr.CreateGeometryFromWkt(dumps(polygon)))
		
		for pol in polygonGeom:
			for cen in centroides:
				if pol.Contains(cen[1]):
					feature = ogr.Feature(featureDefn)
					feature.SetGeometry(pol)
					feature.SetField('rotulo', cen[0])
					outFeature.append(feature.Clone())
					feature.Destroy()
	else:
		feature = ogr.Feature(featureDefn)
		geometryPoly = ogr.BuildPolygonFromEdges(ogr.ForceToMultiLineString(featuresProceso[0].GetGeometryRef()), dfTolerance = 0)
		feature.SetGeometry(geometryPoly)
		feature.SetField('rotulo', centroides[0][0])
		outFeature.append(feature.Clone())
		feature.Destroy()
	
	
	return outFeature
コード例 #42
0
ファイル: plugin.py プロジェクト: Sapphirine/stackexchange
    def _get_wkt_from_shape(self, shape):
        if shape.type == POINT:
            # shapely float precision errors break cypher matches!
            wkt = dumps(shape, rounding_precision=8)
        else:
            wkt = shape.wkt

        return wkt
コード例 #43
0
    def process_latlon(self):
        """ FIND the LAT...LON data """
        data = self.unixtext.replace("\n", " ")
        pos = data.find("LAT...LON")
        if pos == -1:
            return None
        newdata = data[pos+9:]
        # Go find our next non-digit, non-space character, if we find it, we
        # should truncate our string, this could be improved, I suspect
        m = re.search(r"[^\s0-9]", newdata)
        if m is not None:
            pos2 = m.start()
            newdata = newdata[:pos2]

        pts = []
        partial = None

        def checker(lon, lat):
            ''' make sure our values are legit! '''
            if lat >= 90 or lat <= -90:
                raise TextProductException("invalid latitude %s from %s" % (
                                                        lat, newdata))
            if lon > 180 or lon < -180:
                raise TextProductException("invalid longitude %s from %s" % (
                                                        lon, newdata))
            return (lon, lat)

        # We have two potential formats, one with 4 or 5 places and one
        # with eight!
        vals = re.findall(LAT_LON, newdata)
        for val in vals:
            if len(val) == 8:
                lat = float(val[:4]) / 100.00
                lon = float(val[4:]) / 100.00
                if lon < 40:
                    lon += 100.
                lon = 0 - lon
                pts.append(checker(lon, lat))
            else:
                s = float(val) / 100.00
                if partial is None:  # we have lat
                    partial = s
                    continue
                # we have a lon
                if s < 40:
                    s += 100.
                s = 0 - s
                pts.append(checker(s, partial))
                partial = None

        if len(pts) == 0:
            return None
        if pts[0][0] != pts[-1][0] and pts[0][1] != pts[-1][1]:
            pts.append(pts[0])

        self.giswkt = 'SRID=4326;%s' % (dumps(MultiPolygon([Polygon(pts)]),
                                              rounding_precision=6),)
        return Polygon(pts)
コード例 #44
0
ファイル: test_locale.py プロジェクト: SIGISLV/Shapely
    def test_wkt_locale(self):

        # Test reading and writing
        p = loads('POINT (0.0 0.0)')
        self.assertEqual(p.x, 0.0)
        self.assertEqual(p.y, 0.0)
        wkt = dumps(p)
        self.assertTrue(wkt.startswith('POINT'))
        self.assertFalse(',' in wkt)
コード例 #45
0
ファイル: reconscape.py プロジェクト: bzhanghmc/dlcpar
def write_regions(filename, regions, duprange, lossrange):
    out = util.open_stream(filename, 'w')
    print >>out, '\t'.join(map(str, duprange + lossrange))
    for cv, region in regions.iteritems():
        coords = None; area = None
        if isinstance(region, geometry.Polygon):                                              # non-degenerate
            coords = list(region.exterior.coords)
            area = region.area
        elif isinstance(region, geometry.LineString) or isinstance(region, geometry.Point):   # degenerate
            coords = list(region.coords)
            area = region.area
        else:
            raise Exception("count vector (%s) has invalid region (%s)" % (cv, dumps(region)))

        coords = dumps(region)
        toks = (cv, coords, area)
        print >>out, '\t'.join(map(str, toks))
    out.close()
コード例 #46
0
    def handle(self, *args, **options):


        CENSUS_BLOCK_2010 = pickle.load(open(BOUNDARY_DATA + "census.p", 'rb')) 

        NEIGHBORHOOD = pickle.load(open(BOUNDARY_DATA + "community.p",'rb'))
        NEIGHBORHOOD_INDEX = create_spatial_index(NEIGHBORHOOD)
        aggregate_results = {}            
        for i, (aggregate_id, aggregate_shape) in \
                enumerate(CENSUS_BLOCK_2010.iteritems(), 1):

            # print out a nice message to know how it's going
            if i % 1000 == 0:
                print >> sys.stderr, 'Aggregating %i of %i' % \
                    (i, len(CENSUS_BLOCK_2010))
        

            if not CensusBlocks.objects.filter(census_id=aggregate_id,
                                               building_subtype="All").exists():
                print >> sys.stderr, "Updating census_id: %s" %(aggregate_id)
                neighborhood_count = 0
                for j,item_2010 in enumerate(NEIGHBORHOOD_INDEX.intersection(aggregate_shape.bounds,
                                                                      objects=True)):

                    # get the element id
                    raw_id = item_2010.object


                    # get the shape from the shape dictionary
                    raw_shape = NEIGHBORHOOD[raw_id]

                    # calculate the intersection of the polygons
                    intersection = aggregate_shape.intersection(raw_shape)

                    # calculate the fraction of the area of the aggregate shape that
                    # is in the raw shape
                    frac_raw = float(intersection.area) / aggregate_shape.area

                    # fix rawid in those two cases for mckliney and ohare
                    raw_id = raw_id.title()
                    if raw_id == 'Mckinley Park':
                        raw_id = 'McKinley Park'
                    elif raw_id == 'Ohare':
                        raw_id = "O'hare"
                    # if there is any area above tolerance, then add it up
                    if neighborhood_count == 0:
                        neighborhood, created = Neighborhoods.objects.get_or_create(name = raw_id)

                        census_block, created = CensusBlocks.objects.get_or_create(census_id=aggregate_id,
                                                                         building_type="Residential",
                                                                         building_subtype="All",
                                                                         neighborhood = neighborhood,
                                                                         shape = wkt.dumps(aggregate_shape))
     
                        neighborhood_count += 1
                    else:
                        continue
コード例 #47
0
ファイル: import.py プロジェクト: atlefren/geojson2pg
def insert(cursor, feature, tablename, srid):
    """
    Insert a "feature" as a row in PostGIS
    """
    fields = ", ".join([field for field in feature["properties"].keys()])
    values = ", ".join(["'%s'" % unicode(field) for field in feature["properties"].values()])
    sql = """
    INSERT INTO %s (%s, geom)
    VALUES (%s, ST_GeomFromText('%s', %s))
    """ % (tablename, fields, values, dumps(get_geom(feature)), srid)
    cursor.execute(sql)
コード例 #48
0
def write_wkt(filepath, shply_geom):
    """

    :param filepath: output path for new javascript file
    :param shply_geom: shapely geometry features
    :return:
    """
    with open(filepath, "w") as f:
        # create a javascript variable called ply_data used in html
        # Shapely dumps geometry out to WKT
        f.write("var ply_data = '" + dumps(shply_geom) + "'")
コード例 #49
0
def parse_area(area):
    if area is None:
        return area

    points = map(get_lng_lat_pair, area)

    if len(points) == 2:
        polygon = box(points[0][0], points[0][1], points[1][0], points[1][1])
    else:
        polygon = Polygon(points)

    return dumps(polygon)
コード例 #50
0
ファイル: rammData.py プロジェクト: petehilljnr/info-portal
def getMaintenanceData(
    start_date,
    end_date,
    token,
    sz_roads,
    ):
    errors=[]
    filters = []
    filters.append({'columnName': 'transaction_date',
                   'operator': 'GreaterEqualThan', 'value': start_date})
    filters.append({'columnName': 'transaction_date',
                   'operator': 'LessEqualThan', 'value': end_date})

    mc_cost = getTable(
        token=token,
        table='mc_cost',
        columns=[
            'cost_group',
            'activity',
            'fault',
            'cost_amount',
            ],
        filters=filters,
        szRoads=sz_roads,
        getGeometry=True
        )

    mc_cost.getData()

    if mc_cost.error is True:
        errors.append('mc_cost: ' + mc_cost.message)

    data = []

    for row in mc_cost.data:
        line = loads(row['values'][4])
        #length = line.length

        #sections = int(math.ceil(length / 200))
        #sections_pc = 1 / (sections + 1)
        #section = 1
        #while (section <= sections):
        point = line.interpolate(0.5, True)
        data.append([
            row['values'][0],
            row['values'][1],
            row['values'][2],
            row['values'][3],
            dumps(point, rounding_precision=0),
            ])
        #  section = section + 1

    return {'data': data, 'errors': errors}
コード例 #51
0
def parse_area(area):
    if area is None:
        return area

    points = map(get_lng_lat_pair, area)

    # if len(points) == 2:
    #     polygon = box(points[0][0], points[0][1], points[1][0], points[1][1])
    # else:
    polygon = LineString(points)

    return dumps(polygon)
コード例 #52
0
ファイル: import.py プロジェクト: atlefren/geojson2pg
def check_geojson(file):
    """
    Figure out what can't be read from file
    """

    f = open(file)
    features = ijson.items(f, "features.item")

    for feature in features:
        try:
            geom = dumps(get_geom(feature))
        except Exception, e:
            print e, feature
コード例 #53
0
 def create(self, cr, uid, vals, context=None):
     """
     This method si called when every action at wizard
     """
     try:
         id_val = super(urban_bridge_wizard_update_shape,self).create(cr, uid, vals, context=context)
         #3. Captura de Dato desde un WKT
         wkt = vals["wkt"]
         bridge_id = vals["bridge_id"]
         if ((wkt is not None) or (wkt is not False)):
             shape = dumps(loads(wkt))
             self.pool.get('urban_bridge.bridge').write(cr,uid,bridge_id,{'shape':shape})
         return id_val
     except Exception:
         raise except_osv(_('Geometry wizard Load Fail'), str("Geometry bad definition"))
コード例 #54
0
ファイル: utils.py プロジェクト: Cadasta/cadasta-platform
def odk_geom_to_wkt(coords):
    """Convert geometries in ODK format to WKT."""
    try:
        if coords == '':
            return ''
        coords = coords.replace('\n', '')
        coords = coords.split(';')
        coords = [c.strip() for c in coords]
        if (coords[-1] == ''):
            coords.pop()

        if len(coords) > 1:
            # check for a geoshape taking into account
            # the bug in odk where the second coordinate in a geoshape
            # is the same as the last (first and last should be equal)
            if len(coords) > 3:
                if coords[1] == coords[-1]:  # geom is closed
                    coords.pop()
                    coords.append(coords[0])
            points = []
            for coord in coords:
                coord = coord.split(' ')
                coord = [x for x in coord if x]
                latlng = [float(coord[1]),
                          float(coord[0])]
                points.append(tuple(latlng))
            if (coords[0] != coords[-1] or len(coords) == 2):
                return dumps(LineString(points))
            else:
                return dumps(Polygon(points))
        else:
            latlng = coords[0].split(' ')
            latlng = [x for x in latlng if x]
            return dumps(Point(float(latlng[1]), float(latlng[0])))
    except Exception as e:
        raise InvalidODKGeometryError(e)
コード例 #55
0
    def geom_to_points(geom):
        """
        Convert binary geom column to list of lat/lon point strings
        :return: list of lists containing lat/lon as string.
        """
        binary = unhexlify(geom.desc)
        point = wkb.loads(binary)
        data = wkt.dumps(point)
        data = data.replace('MULTILINESTRING ((', '')
        data = data.replace('))', '')
        point_list = data.split(',')
        points = [p.strip().split(' ') for p in point_list]

        points_to_float_list = [
            [float(point[0]), float(point[1])] for point in points]
        return points_to_float_list
コード例 #56
0
ファイル: models.py プロジェクト: Cadasta/cadasta-platform
def reassign_project_extent(instance):
    coords = [list(x) for x in list(instance.extent.boundary.coords)]
    for point in coords:
        if point[0] >= -180 and point[0] <= 180:
            return
    while coords[0][0] < -180:
        for point in coords:
            point[0] += 360
    while coords[0][0] > 180:
        for point in coords:
            point[0] -= 360
    extent = []
    for point in coords:
        latlng = [point[0], point[1]]
        extent.append(tuple(latlng))
    instance.extent = dumps(Polygon(extent))
コード例 #57
0
    def query(self, queryId, iterationId, queriesParameters):
        (eTime, result) = (-1, None)
        connection = self.getConnection()
        cursor = connection.cursor()
               
        self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0)
        postgresops.dropTable(cursor, self.resultTable, True)    
       
        wkt = self.qp.wkt
        if self.qp.queryType == 'nn':
            g = loads(self.qp.wkt)
            wkt = dumps(g.buffer(self.qp.rad))
       
        t0 = time.time()
        scaledWKT = wktops.scale(wkt, self.scaleX, self.scaleY, self.minX, self.minY)    
        (mimranges,mxmranges) = self.quadtree.getMortonRanges(scaledWKT, self.mortonDistinctIn, maxRanges = MAXIMUM_RANGES)
       
        if len(mimranges) == 0 and len(mxmranges) == 0:
            logging.info('None morton range in specified extent!')
            return (eTime, result)

        if self.qp.queryType == 'nn':
            logging.error('NN queries not available!')
            return (eTime, result)

        if self.numProcessesQuery > 1:
            if self.qp.queryMethod != 'stream' and self.qp.queryType in ('rectangle','circle','generic') :
                 return self.pythonParallelization(t0, mimranges, mxmranges)
            else:
                 logging.error('Python parallelization only available for disk queries (CTAS) which are not NN queries!')
                 return (eTime, result)
        
        (query, queryArgs) = self.getSelect(self.qp, mimranges, mxmranges)        
         
        if self.qp.queryMethod != 'stream': # disk or stat
            postgresops.mogrifyExecute(cursor, "CREATE TABLE "  + self.resultTable + " AS (" + query + ")", queryArgs)
            (eTime, result) = dbops.getResult(cursor, t0, self.resultTable, self.DM_FLAT, (not self.mortonDistinctIn), self.qp.columns, self.qp.statistics)
        else:
            sqlFileName = str(queryId) + '.sql'
            postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs)
            result = postgresops.executeSQLFileCount(self.getConnectionString(False, True), sqlFileName)
            eTime = time.time() - t0
            
        connection.close()
        return (eTime, result)
コード例 #58
0
    def __iter__(self):
        """ Returns generator over shapefile rows.

        Note:
            The first column is an id field, taken from the id value of each shape
            The middle values are taken from the property_schema
            The last column is a string named geometry, which has the wkt value, the type is geometry_type.

        """

        # These imports are nere, not at the module level, so the geo
        # support can be an extra

        import fiona

        from shapely.geometry import shape
        from shapely.wkt import dumps
        from .spec import ColumnSpec

        self.start()

        with fiona.drivers():
            # retrive full path of the zip and convert it to url
            virtual_fs = 'zip://{}'.format(self._fstor._fs.zf.filename)
            layer_index = self.spec.segment or 0
            with fiona.open('/', vfs=virtual_fs, layer=layer_index) as source:
                # geometry_type = source.schema['geometry']
                property_schema = source.schema['properties']
                self.spec.columns = [ColumnSpec(**c) for c in self._get_columns(property_schema)]
                self._headers = [x['name'] for x in self._get_columns(property_schema)]

                for s in source:
                    row_data = s['properties']
                    shp = shape(s['geometry'])
                    wkt = dumps(shp)
                    row = [int(s['id'])]
                    for col_name, elem in six.iteritems(row_data):
                        row.append(elem)

                    row.append(wkt)

                    yield row

        self.finish()
コード例 #59
0
ファイル: plugins.py プロジェクト: open-data/ckanext-wet-boew
    def geojson_to_wkt(self, gjson_str):
        ## Ths GeoJSON string should look something like:
        ##  u'{"type": "Polygon", "coordinates": [[[-54, 46], [-54, 47], [-52, 47], [-52, 46], [-54, 46]]]}']
        ## Convert this JSON into an object, and load it into a Shapely object. The Shapely library can
        ## then output the geometry in Well-Known-Text format

        try:
            gjson = json.loads(gjson_str)
            try:
                gjson = _add_extra_longitude_points(gjson)
            except:
                # this is bad, but all we're trying to do is improve
                # certain shapes and if that fails showing the original
                # is good enough
                pass
            shape = shapely.geometry.asShape(gjson)
        except ValueError:
            return None # avoid 500 error on bad geojson in DB

        wkt_str = wkt.dumps(shape)
        return wkt_str