Пример #1
0
def poly_dist(inshp, max_distance=None):
    driver = ogr.GetDriverByName('ESRI Shapefile')
    dataset = driver.Open(inshp, 0)
    layer = dataset.GetLayer()
    shpname = inshp.split('\\')[-1].split('.')[0]
    outfolder = inshp.split(nfile.split('\\')[-1])[0]
    distfile = outfolder + r"\dist_" + shpname + ".txt"
    nodefile = outfolder + r"\node_" + shpname + ".txt"
    d_obj = open(distfile, "w")
    n_obj = open(nodefile, "w")

    checklist = []
    for ind in itertools.combinations(xrange(layer.GetFeatureCount()),2):
        feat1 = layer.GetFeature(ind[0])
        feat2 = layer.GetFeature(ind[1])

        geom1 = loads(feat1.GetGeometryRef().ExportToWkb())
        geom2 = loads(feat2.GetGeometryRef().ExportToWkb())

        dist = geom1.distance(geom2)
        if max_distance!=None:
            if dist < max_distance:
                d_obj.write(str(ind[0]) + '\t' + str(ind[1]) + '\t' + str(dist) + '\n')

        if not ind[0] in checklist:
            checklist.append(ind[0])
            n_obj.write(str(ind[0]) + '\t' + str(geom1.area) + '\n')
        if not ind[1] in checklist:
            checklist.append(ind[1])
            n_obj.write(str(ind[1]) + '\t' + str(geom2.area) + '\n')

    d_obj.close()
    n_obj.close()
Пример #2
0
    def test_polygon_filter(self):
        from mapfish.protocol import create_geom_filter
        poly = Polygon(((1, 2), (1, 3), (2, 3), (2, 2), (1, 2)))
        request = FakeRequest(
            {"geometry": dumps(poly), "tolerance": "1"}
        )
        filter = create_geom_filter(request, MappedClass)
        compiled_filter = filter.compile(engine)
        params = compiled_filter.params
        filter_str = _compiled_to_string(compiled_filter)
        eq_(filter_str, '(ST_Expand(GeomFromWKB(%(GeomFromWKB_1)s, %(GeomFromWKB_2)s), %(ST_Expand_1)s) && "table".geom) AND (ST_Expand("table".geom, %(ST_Expand_2)s) && GeomFromWKB(%(GeomFromWKB_3)s, %(GeomFromWKB_4)s)) AND ST_Distance("table".geom, GeomFromWKB(%(GeomFromWKB_5)s, %(GeomFromWKB_6)s)) <= %(ST_Distance_1)s')
        assert wkb.loads(str(params["GeomFromWKB_1"])).equals(poly)
        assert params["GeomFromWKB_2"] == 4326
        assert params["ST_Expand_1"] == 1
        assert params["ST_Distance_1"] == 1

        poly = Polygon(((1, 2), (1, 3), (2, 3), (2, 2), (1, 2)))
        request = FakeRequest(
            {"geometry": dumps(poly), "tolerance": "1", "epsg": "900913"}
        )
        filter = create_geom_filter(request, MappedClass)
        compiled_filter = filter.compile(engine)
        params = compiled_filter.params
        filter_str = _compiled_to_string(compiled_filter)
        eq_(filter_str, '(ST_Expand(GeomFromWKB(%(GeomFromWKB_1)s, %(GeomFromWKB_2)s), %(ST_Expand_1)s) && ST_Transform("table".geom, %(param_1)s)) AND (ST_Expand(ST_Transform("table".geom, %(param_2)s), %(ST_Expand_2)s) && GeomFromWKB(%(GeomFromWKB_3)s, %(GeomFromWKB_4)s)) AND ST_Distance(ST_Transform("table".geom, %(param_3)s), GeomFromWKB(%(GeomFromWKB_5)s, %(GeomFromWKB_6)s)) <= %(ST_Distance_1)s')
        assert wkb.loads(str(params["GeomFromWKB_1"])).equals(poly)
        assert params["GeomFromWKB_2"] == 900913
        assert params["ST_Expand_1"] == 1
        assert params["param_1"] == 900913
        assert params["ST_Distance_1"] == 1        #assert isinstance(filter, sql.expression.ClauseElement)
def Reach_Upstream_Edge(New_Gage_watershed_Dissolve,Main_watershed,ID,dir_main,out_dir):
    os.chdir(dir_main)
    file=Main_watershed+'.shp'
    file1=ogr.Open(file)
    layer1 = file1.GetLayerByName(Main_watershed)
    os.chdir(out_dir)
    file2=New_Gage_watershed_Dissolve+'.shp'
    file11=ogr.Open(file2)
    layer12 = file11.GetLayerByName(New_Gage_watershed_Dissolve)
    polygon2= layer12.GetNextFeature()
    geomPolygon2 = loads(polygon2.GetGeometryRef().ExportToWkb())
    polygon1 = layer1.GetNextFeature()
    g=len(layer1)
    subwatershed_ID=ID
    compli_ID=[]
    while polygon1 is not None:
       geomPolygon = loads(polygon1.GetGeometryRef().ExportToWkb())
       if geomPolygon.intersects(geomPolygon2):
          geomPoly=geomPolygon.difference(geomPolygon2)
          name1 = polygon1.GetField("GRIDCODE")
          print (name1)
          if(name1!=subwatershed_ID):
            x1=round(list(geomPolygon.centroid.xy[0])[0],6)
            y1=round(list(geomPolygon.centroid.xy[1])[0],6)
            x2=round(list(geomPoly.centroid.xy[0])[0],6)
            y2=round(list(geomPoly.centroid.xy[1])[0],6)
            if((x1!=x2)|(y1!=y2)):
                compli_ID.append(name1)
                print (name1)
            else:
                compli_ID.append(-1)

       polygon1 = layer1.GetNextFeature()

    return compli_ID
Пример #4
0
  def test_box_filter(self):
      from mapfish.protocol import create_geom_filter
      request = FakeRequest(
          {"bbox": "-180,-90,180,90", "tolerance": "1"}
      )
      filter = create_geom_filter(request, MappedClass)
      compiled_filter = filter.compile(engine)
      params = compiled_filter.params
      filter_str = _compiled_to_string(compiled_filter)
      eq_(filter_str, '(ST_Expand(GeomFromWKB(%(GeomFromWKB_1)s, %(GeomFromWKB_2)s), %(ST_Expand_1)s) && "table".geom) AND (ST_Expand("table".geom, %(ST_Expand_2)s) && GeomFromWKB(%(GeomFromWKB_3)s, %(GeomFromWKB_4)s)) AND ST_Distance("table".geom, GeomFromWKB(%(GeomFromWKB_5)s, %(GeomFromWKB_6)s)) <= %(ST_Distance_1)s')
      assert wkb.loads(str(params["GeomFromWKB_1"])).equals(wkt.loads('POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'))
      assert params["GeomFromWKB_2"] == 4326
      assert params["ST_Expand_1"] == 1
      assert params["ST_Distance_1"] == 1
 
      request = FakeRequest(
          {"bbox": "-180,-90,180,90", "tolerance": "1", "epsg": "900913"}
      )
      filter = create_geom_filter(request, MappedClass)
      compiled_filter = filter.compile(engine)
      params = compiled_filter.params
      filter_str = _compiled_to_string(compiled_filter)
      eq_(filter_str, '(ST_Expand(GeomFromWKB(%(GeomFromWKB_1)s, %(GeomFromWKB_2)s), %(ST_Expand_1)s) && ST_Transform("table".geom, %(param_1)s)) AND (ST_Expand(ST_Transform("table".geom, %(param_2)s), %(ST_Expand_2)s) && GeomFromWKB(%(GeomFromWKB_3)s, %(GeomFromWKB_4)s)) AND ST_Distance(ST_Transform("table".geom, %(param_3)s), GeomFromWKB(%(GeomFromWKB_5)s, %(GeomFromWKB_6)s)) <= %(ST_Distance_1)s')
      assert wkb.loads(str(params["GeomFromWKB_1"])).equals(wkt.loads('POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'))
      assert params["GeomFromWKB_2"] == 900913
      assert params["ST_Expand_1"] == 1
      assert params["param_1"] == 900913
      assert params["ST_Distance_1"] == 1
Пример #5
0
def read_cities(db, osm_id=0):
    cur = db.cursor()
    if osm_id:
        cur.execute(
            """SELECT ST_Union(pl.way) FROM planet_osm_polygon pl, planet_osm_polygon b WHERE b.osm_id = %s AND pl.place IN ('town', 'city') AND ST_Area(pl.way) < 500*1000*1000 AND ST_Contains(b.way, pl.way);""",
            (osm_id,),
        )
    else:
        cur.execute(
            """SELECT ST_Union(way) FROM planet_osm_polygon WHERE place IN ('town', 'city') AND ST_Area(way) < 500*1000*1000;"""
        )
    result = cur.fetchone()
    poly = loads(result[0].decode("hex")) if result else Polygon()
    if osm_id:
        cur.execute(
            """SELECT ST_Union(ST_Buffer(p.way, 5000)) FROM planet_osm_point p, planet_osm_polygon b WHERE b.osm_id=%s AND ST_Contains(b.way, p.way) AND p.place IN ('town', 'city') AND NOT EXISTS(SELECT 1 FROM planet_osm_polygon pp WHERE pp.name=p.name AND ST_Contains(pp.way, p.way));""",
            (osm_id,),
        )
    else:
        cur.execute(
            """SELECT ST_Union(ST_Buffer(p.way, 5000)) FROM planet_osm_point p WHERE p.place in ('town', 'city') AND NOT EXISTS(SELECT 1 FROM planet_osm_polygon pp WHERE pp.name=p.name AND ST_Contains(pp.way, p.way));"""
        )
    result = cur.fetchone()
    if result:
        poly = poly.union(loads(result[0].decode("hex")))
    return project(poly)
Пример #6
0
def encode(file, features, zoom, is_clipped):
    ''' Encode a list of (WKB, property dict) features into a GeoJSON stream.
    
        Also accept three-element tuples as features: (WKB, property dict, id).
    
        Geometries in the features list are assumed to be unprojected lon, lats.
        Floating point precision in the output is truncated to six digits.
    '''
    try:
        # Assume three-element features
        features = [dict(type='Feature', properties=p, geometry=loads(g).__geo_interface__, id=i) for (g, p, i) in features]

    except ValueError:
        # Fall back to two-element features
        features = [dict(type='Feature', properties=p, geometry=loads(g).__geo_interface__) for (g, p) in features]
    
    if is_clipped:
        for feature in features:
            feature.update(dict(clipped=True))
    
    geojson = dict(type='FeatureCollection', features=features)
    encoder = json.JSONEncoder(separators=(',', ':'))
    encoded = encoder.iterencode(geojson)
    flt_fmt = '%%.%df' % precisions[zoom]
    
    for token in encoded:
        if charfloat_pat.match(token):
            # in python 2.7, we see a character followed by a float literal
            piece = token[0] + flt_fmt % float(token[1:])
        elif float_pat.match(token):
            # in python 2.6, we see a simple float literal
            piece = flt_fmt % float(token)
        else:
            piece = token
        file.write(piece.encode('utf8'))
Пример #7
0
def generate_nearest_neighbor(scenario, pop_nodes, facility_nodes):
    """
    TODO, look to make this an interface
    Note that this function does not commit any edges to the database.
    arguments:
          scenario: the scenario we are running in
          pop_nodes: an iterable of next.models.Node
          facility_nodes: iterable of next.models.Node

    return: A list of edges that have the property of being a
            relation between a pop_node and its closest facility_node
    """
    edges = []
    for pop_node in pop_nodes:
        nearestDist = ()
        pop_geometry = loads(str(pop_node.point.geom_wkb))
        for fac_node in facility_nodes:
            fac_geometry = loads(str(fac_node.point.geom_wkb))
            between = compute_spherical_distance(pop_geometry.coords[0], fac_geometry.coords[0])
            if between <= nearestDist:
                nearest = fac_node
                nearestDist = between

        edge = Edge(
            scenario,
            pop_node,
            nearest,
            nearestDist)

        edges.append(edge)

    return edges
Пример #8
0
def get_street_multilines(db, opts, low_street, high_street):
    '''
    '''
    if high_street is None:
        name_test = 'name >= %s'
        values = (low_street, )

    else:
        name_test = 'name >= %s AND name < %s'
        values = (low_street, high_street)

    table = opts.table
    
    try:
        #
        # Try to let Postgres do the grouping for us, it's faster.
        #
        db.execute('''
            SELECT name, 'none' as kind, highway,
                   AsBinary(Transform(Collect(way), 4326)) AS way_wkb
            
            FROM street_ids
            
            WHERE %(name_test)s
            GROUP BY name, highway
            ORDER BY name''' % locals(), values)
    
        multilines = [(name, kind, highway, loads(bytes(way_wkb)))
                      for (name, kind, highway, way_wkb) in db.fetchall()]

    except OperationalError, err:
        #
        # Known to happen: "array size exceeds the maximum allowed (1073741823)"
        # Try again, but this time we'll need to do our own grouping.
        #
        logging.debug('Rolling back and doing our own grouping: %s' % err)
    
        db.execute('ROLLBACK')

        db.execute('''
            SELECT name, 'none' as kind, highway,
                   AsBinary(Transform(way, 4326)) AS way_wkb
            
            FROM street_ids
            
            WHERE %(name_test)s
            ORDER BY name, highway''' % locals(), values)
        
        logging.debug('...executed...')
        
        groups = groupby(db.fetchall(), lambda (n, k, h, w): (n, k, h))
        multilines = []
        
        logging.debug('...fetched...')
        
        for ((name, kind, highway), group) in groups:
            lines = [loads(bytes(way_wkb)) for (n, k, h, way_wkb) in group]
            multilines.append((name, kind, highway, MultiLineString(lines)))
    
        logging.debug('...collected.')
Пример #9
0
def tre_d(layer):
	layer.select([])
	layer.setSelectedFeatures([obj.id() for obj in layer])
	mylayer = qgis.utils.iface.activeLayer()
	tre_d(mylayer)	

	from shapely.wkb import loads
	
	x=[]
	y=[]
	z=[]
	for elem in mylayer.selectedFeatures():
		   
		geom= elem.geometry() 
		wkb = geom.asWkb()
				  
		x.append(loads(wkb).x)
		y.append(loads(wkb).y)
		z.append(loads(wkb).z)

		x=[]
		y=[]
		z=[]
	for elem in mylayer.selectedFeatures():
		geom= elem.geometry() 
		x.append(geom.asPoint()[0])
		y.append(geom.asPoint()[1])
		z.append(elem.attributeMap()[15].toFloat()[0])
Пример #10
0
    def coastlines(self):
        '''
        Draws coastlines on the map.

        TODO: fill map with sea color if no coastline intersects the map but
            the area actually is no land mass
        '''

        coastlines = session.query(OSMLine).filter(and_(
            BBOX_QUERY_COND % ((OSMLine.__table__, ) + self.mapobj.bbox.bounds),
            OSMLine.natural=='coastline'
        )).all()
        coastpolygons = session.query(OSMPolygon).filter(and_(
            BBOX_QUERY_COND % ((OSMPolygon.__table__, ) + self.mapobj.bbox.bounds),
            OSMPolygon.natural=='coastline'
        )).all()
        # only fill map with sea color if there is a at least one coastline
        if coastlines or coastpolygons:
            lines = tuple(wkb.loads(str(cl.geom.geom_wkb))
                for cl in coastlines)
            merged = utils.merge_lines(lines)
            islands = []
            shorelines = []
            for line in merged:
                #: closed rings are islands and must be filled with map background
                if line.is_ring:
                    islands.append(line)
                else:
                    inter = line.intersection(self.mapobj.bbox)
                    points = line.intersection(self.mapobj.bbox.exterior)
                    #: only add line to closing process if number of intersections
                    #: with bbox is even. Otherwise we have a incomplete coastline
                    #: which ends in the visible map
                    if points.geom_type == 'MultiPoint' and len(points) % 2 == 0 \
                            and len(points) > 0:
                        if inter.geom_type == 'LineString':
                            shorelines.append(inter)
                        else:
                            shorelines.extend(inter)
            #: save all polygon coordinates as numpy arrays and add to islands
            for island in coastpolygons:
                islands.append(numpy.array(wkb.loads(str(island.geom.geom_wkb)).exterior))
            #: fill water with sea background
            shore = None
            for shore in utils.close_coastlines(shorelines, self.mapobj.bbox):
                self.mapobj.draw_polygon(
                    exterior=numpy.array(shore),
                    background_color=self.stylesheet.sea_background
                )
            #: fill map with sea background if there is no shoreline
            if shore is None and islands:
                self.mapobj.draw_background(self.stylesheet.sea_background)
            #: fill land filled area with map background
            for island in islands:
                self.mapobj.draw_polygon(
                    exterior=numpy.array(island),
                    background_color=self.stylesheet.map_background
                )
Пример #11
0
    def test_wkb(self):

        p = Point(0.0, 0.0)
        wkb_big_endian = wkb.dumps(p, big_endian=True)
        wkb_little_endian = wkb.dumps(p, big_endian=False)
        # Regardless of byte order, loads ought to correctly recover the
        # geometry
        self.assertTrue(p.equals(wkb.loads(wkb_big_endian)))
        self.assertTrue(p.equals(wkb.loads(wkb_little_endian)))
Пример #12
0
 def simple():
     for name, layer in l.items():
         for data in layer.get([(-0.25, 51), (0.25, 52)]):
             raw_data = data[-1]
             if isinstance(raw_data, str):
                 print wkb.loads(raw_data.decode('hex'))
             elif isinstance(raw_data, buffer):
                 print wkb.loads(str(raw_data))
             else:
                 print type(raw_data)
Пример #13
0
 def process(value):
     if value is None:
         return None
     elif ';' in value:
         geom = wkb.loads(a2b_hex(value.split(';')[-1]))
         geom.srid = self.SRID
         return geom
     else:
         geom = wkb.loads(a2b_hex(value))
         geom.srid = self.SRID
         return geom
Пример #14
0
 def json(self, rawjson=False, nogeom=False):
         o = {'service': '', 
              'id': self.id, 
              'label': '',
              'bbox': self.bbox if not nogeom else None, 
              'objectorig': self.objectorig,
              'name': self.name}
         if self.origin == 'zipcode':
             o.update({'service': 'postalcodes',
                       'name': self.name,
                       'nr': self.plz,
                       'label': "%s <b>%s - %s (%s)</b>"%(_('plz'), self.plz, self.ort_27, self.kanton)})
         elif self.origin == 'sn25':
             o.update({'service': 'swissnames',
                       'label': "<b>%s</b> (%s) - %s"%(self.name, self.kanton, self.gemname)})
         elif self.origin == 'gg25':
             o.update({'service': 'cities',
                       'name': self.gemname,
                       'bfsnr': self.bfsnr,
                       'nr': self.id,
                       'label': "<b>%s (%s)</b>"%(self.gemname, self.kanton)})
         elif self.origin == 'kantone':
             o.update({'service': 'cantons',
                       'name': self.name,
                       'bfsnr': self.bfsnr,
                       'code': self.kanton,
                       'nr': self.id,
                       'label': "%s <b>%s</b>"%(_('ct'), self.name)})
         elif self.origin == 'district':
             o.update({'service': 'districts',
                       'name': self.name,
                       'bfsnr': self.bfsnr,
                       'label': "%s <b>%s</b>"%( _('district'), self.name)})
         elif self.origin == 'address':
             if self.deinr is None:
                address_nr = ''
             else:
                address_nr = self.deinr
             o.update({'service': 'address',
                       'egid': self.egid,
                       'label': "%s %s <b>%s %s</b> "%(self.strname1, address_nr,self.plz, self.ort_27)})
         elif self.origin == 'parcel':
             o.update({'service': 'parcel',
                       'name': self.name,
                       'bfsnr': self.bfsnr,
                       'city': self.gemname,
                       'Y' : loads(self.geom_point.geom_wkb.decode('hex')).x,
                       'X' : loads(self.geom_point.geom_wkb.decode('hex')).y,
                       'label': "<b>%s %s</b> (%s)"%(self.gemname,self.name,_('parcel'))}) 
         if rawjson:
                 del o['label']
                 del o['bbox']
                 del o['rank']
         return o
Пример #15
0
 def location_4326(self):
     if self._location_4326 is None:
         # God this is ugly Fix for bug #xxx in SQLAlchemy
         meta.Session.commit()
         if self._location_4326 is None:
             return None
     if ';' in self._location_4326:
         geom = wkb.loads(a2b_hex(self._location_4326.split(';')[-1]))
         geom.srid = 4326
         return geom
     else:
         geom = wkb.loads(a2b_hex(self._location_4326))
         geom.srid = 4326
         return geom
Пример #16
0
def write_to(data, property_names, output_file):
    '''
    Write list of tuples to geojson.
       First entry of each tuple should be geometry in hex coordinates
       and the rest properties.

       Args:
           data: List of tuples.
           property_names: List of strings. Should be same length as the
                           number of properties.
           output_file (str): Output file name.

    '''

    geojson_features = []
    for entry in data:
        coords_in_hex, properties = entry[0], entry[1:]
        geometry = loads(coords_in_hex, hex=True)
        property_dict = dict(zip(property_names, properties))
        if geometry.geom_type == 'Polygon':
            coords = [list(geometry.exterior.coords)]   # brackets required
            geojson_feature = geojson.Feature(geometry=geojson.Polygon(coords),
                                              properties=property_dict)
        elif geometry.geom_type == 'Point':
            coords = list(geometry.coords)[0]
            geojson_feature = geojson.Feature(geometry=geojson.Point(coords),
                                              properties=property_dict)
        geojson_features.append(geojson_feature)

    feature_collection = geojson.FeatureCollection(geojson_features)

    with open(output_file, 'wb') as f:
        geojson.dump(feature_collection, f)
Пример #17
0
    def _row_result_to_dict(row, past):
        """Convertit une requête pour être passée via JSON à l'appli web.

        ``row`` est le résultat de la requête dans "query_from_params()".
        ``past`` indique si l'événement est passé par rapport à now()
        """
        point = wkb.loads(str(row.st_asbinary))
        delta = abs(row.dtend - row.dtstart)
        duration = delta.seconds + delta.days * 84600
        phone = row.arrondissement_phone
        nice_phone = "%s-%s-%s" % (phone[:3], phone[3:6], phone[6:])
        out = dict(occurence_id=row.id,
                   dtstart=row.dtstart.strftime("%Y-%m-%d %H:%M:%S"),
                   duration=duration,
                   title=row.title,
                   location=row.location,
                   location_info=row.location_info,
                   position=(point.x, point.y),
                   price=("%.2f $" % row.price) if row.price else 'GRATUIT',
                   distance="%0.1f" % (row.distance_1 / 1000.0),
                   categ_name=row.category_name,
                   categ_icon=row.icon_name.strip(),
                   arrond_name=row.arrondissement_name,
                   arrond_phone=nice_phone,
                   )
        out.update(Activity._format_date(row, past))
        return out
Пример #18
0
def load(sourcePath, sourceProj4='', targetProj4=''):
    'Load proj4, shapelyGeometries, fields'
    # Get layer
    dataSource = ogr.Open(sourcePath)
    if not dataSource:
        raise GeometryError('Could not load {}'.format(os.path.basename(sourcePath)))
    layer = dataSource.GetLayer()
    # Get fieldDefinitions from featureDefinition
    featureDefinition = layer.GetLayerDefn()
    fieldIndices = xrange(featureDefinition.GetFieldCount())
    fieldDefinitions = []
    for fieldIndex in fieldIndices:
        fieldDefinition = featureDefinition.GetFieldDefn(fieldIndex)
        fieldDefinitions.append((fieldDefinition.GetName(), fieldDefinition.GetType()))
    # Get spatialReference
    spatialReference = layer.GetSpatialRef()
    sourceProj4 = spatialReference.ExportToProj4() if spatialReference else '' or sourceProj4
    # Load shapelyGeometries and fieldPacks
    shapelyGeometries, fieldPacks = [], []
    transform_geometry = get_transform_geometry(sourceProj4, targetProj4)
    feature = layer.GetNextFeature()
    while feature:
        # Append
        shapelyGeometries.append(wkb.loads(transform_geometry(feature.GetGeometryRef()).ExportToWkb()))
        fieldPacks.append([feature.GetField(x) for x in fieldIndices])
        # Get the next feature
        feature = layer.GetNextFeature()
    # Return
    return targetProj4 or sourceProj4, shapelyGeometries, fieldPacks, fieldDefinitions
Пример #19
0
 def process_data(self, query_result):
     
     
     size = len(query_result)
     
     # the geometries are not clipped, due to performace reasons, so we need some extra space around the tile
     invisible_space = { 'x' : 65536 - ( self.bbox['maxx']-self.bbox['minx'] ), 'y' : 65536 - ( self.bbox['maxy']-self.bbox['miny'] ) }
     offset = {'x': int(invisible_space['x']/2.0), 'y':int(invisible_space['y']/2.0)} #calculate offset
     
     table_name_header = array.array('c', self.table) #layer name
     header = array.array('L', [size,0]) # features size
     offset_headers = array.array('H', [offset['x'],offset['y']])# the offset
     sub_headers = array.array('H') # features
     data = array.array('H') # coordinate data
     
     #print offset_headers, data[:100]
     
     #build data structure
     for result in query_result:
         obj = loads(array.array('c',result[0]).tostring())
         
         sub_header = array.array('H', [len(obj.coords),0,0,0])
         
         sub_headers.extend(sub_header)
         
         for x,y in obj.coords:
             data.append(int(x-self.bbox['minx'])+offset['x'])
             data.append(int(y-self.bbox['miny'])+offset['y'])
     
     #return binary string
     return table_name_header.tostring()+header.tostring()+offset_headers.tostring()+sub_headers.tostring()+data.tostring()
    def get_slope(self, level, wkb_geometry):
        """
        Calculates an average slope for geometry, using elevation data for a given level

        :param level: level of the elevation data
        :param wkb_geometry:
        :return: average slope
        """
        self.geometry = loads(wkb_geometry, hex=True)
        bounds = self.geometry.bounds  # geometry's bounding box
        # Calculate row/col for the tiles intersecting the geometry
        tlt = self.deg2num(bounds[1], bounds[0], level)
        trb = self.deg2num(bounds[3], bounds[2], level)
        average_slope = 0.0
        min_r = min(tlt[0], trb[0])
        min_c = min(tlt[1], trb[1])
        max_r = max(tlt[0], trb[0])
        max_c = max(tlt[1], trb[1])
        counter = 0
        # Loop through tiles of bounding box of geometry
        for c in range(min_c, max_c+1):
            for r in range(min_r, max_r+1):
                if self.is_tile_in_polygon(r, c, level):
                    self.download_tile(r, c, level)
                    slope = self.get_tile_slope(r, c, level)
                    average_slope += slope
                    counter += 1
        if counter > 0:
            average_slope = average_slope / counter
        return average_slope
Пример #21
0
 def getLinkGeometry(self, linkId):
     self.pgCursor.execute(linkSql, [linkId])
     result = self.pgCursor.fetchone()
     
     linkGeom = wkb.loads(result[0], hex=True)
     
     return linkGeom
    def onMessage(self, data, is_binary):

        assert is_binary

        msg = array.array("d", data)

        minx, maxx, miny, maxy = msg
        # print minx,maxx,miny,maxy

        assert maxx - minx < 65536 - 5000
        assert maxy - miny < 65536 - 5000

        self.cursor = self.connection.cursor()
        # self.cursor.execute("""select ST_AsBinary(st_transform(st_simplify(geom,1),4326)) from "VEG" """)
        # self.cursor.execute("""select ST_AsBinary(geom) from "VEG" limit 100 where geom &&  box2d(ST_GeomFromText('LINESTRING(583000 6644000, 584000 6645000)'))""")
        # self.cursor.execute("select ST_AsBinary(geom) from \"VEG\" where geom && box2d(ST_GeomFromText('LINESTRING("+str(minx)+" "+str(miny)+", "+str(maxx)+" "+str(maxy)+")'))")
        # self.cursor.execute("select ST_AsBinary(geom) from \"VEG\" where geom && box2d(ST_GeomFromText('LINESTRING("+str(minx)+" "+str(miny)+", "+str(maxx)+" "+str(maxy)+")'))")
        self.cursor.execute(
            'select ST_AsBinary(geom) from "VEGSIMPLE1" where geom && st_setsrid(box2d(ST_GeomFromText(\'LINESTRING('
            + str(minx)
            + " "
            + str(miny)
            + ", "
            + str(maxx)
            + " "
            + str(maxy)
            + ")')),25832)"
        )
        # self.cursor.execute("select ST_AsBinary(geom) from \"VEGSIMPLE2p5\" where geom && st_setsrid(box2d(ST_GeomFromText('LINESTRING("+str(minx)+" "+str(miny)+", "+str(maxx)+" "+str(maxy)+")')),25832)")
        # self.cursor.execute("select ST_AsBinary(geom) from \"VEGSIMPLE5\" where geom && st_setsrid(box2d(ST_GeomFromText('LINESTRING("+str(minx)+" "+str(miny)+", "+str(maxx)+" "+str(maxy)+")')),25832)")
        results = self.cursor.fetchall()
        size = len(results)

        header = array.array("L", [size, 0])
        sub_headers = array.array("H")
        data = array.array("H")

        for result in results:
            obj = loads(array.array("c", result[0]).tostring())

            sub_header = array.array("H", [len(obj.coords), 0, 0, 0])

            sub_headers.extend(sub_header)

            for x, y in obj.coords:
                # print maxx, x, int(maxx-x)+2500
                # print maxy, y, int(maxy-y)+2500
                data.append(int(maxx - x) + 2500)
                data.append(int(maxy - y) + 2500)

        result_data = header.tostring() + sub_headers.tostring() + data.tostring()

        # print header
        # print sub_headers

        print "Conversion finished"

        self.sendMessage(result_data, binary=True)

        print "Message sent"
Пример #23
0
def get_intersecting_features(datasource, dataname, geometry, include_geom):
    '''
    '''
    features = []

    layer = datasource.GetLayer(0)
    
    defn = layer.GetLayerDefn()
    names = [defn.GetFieldDefn(i).name for i in range(defn.GetFieldCount())]
    
    layer.SetSpatialFilter(geometry)
    
    for feature in layer:
        properties = dict(dataset=dataname)
        
        for (index, name) in enumerate(names):
            properties[name] = feature.GetField(index)
        
        if not include_geom:
            features.append(dict(type='Feature', properties=properties, geometry=None))
            continue
        
        geometry = feature.GetGeometryRef()
        shape = wkb.loads(geometry.ExportToWkb())
        
        features.append(dict(type='Feature', properties=properties, geometry=shape.__geo_interface__))
    
    return features
Пример #24
0
    def __geo_interface__(self):
        """ Objects implement the Python Geo Interface, making them
        candidates to serialization with the ``geojson`` module, or
        Papyrus' GeoJSON renderer.
        """
        id = None
        geom = None
        properties = {}
        
        if hasattr(self, '_shape'):
            geom = self._shape

        for p in class_mapper(self.__class__).iterate_properties:
            if not isinstance(p, ColumnProperty):
                continue
            col = p.columns[0]
            val = getattr(self, p.key)
            if col.primary_key:
                id = val
            elif isinstance(col.type, Geometry):
                if not geom:
                    geom = wkb.loads(str(val.geom_wkb))
            else:
                properties[p.key] = val

        return geojson.Feature(id=id, geometry=geom, properties=properties)
Пример #25
0
    def __read__(self):
        id = None
        geom = None
        properties = {}

        for p in class_mapper(self.__class__).iterate_properties:
            if isinstance(p, ColumnProperty):
                if len(p.columns) != 1:  # pragma: no cover
                    raise NotImplementedError
                col = p.columns[0]
                val = getattr(self, p.key)
                if col.primary_key:
                    id = val
                elif isinstance(col.type, Geometry) and col.name == self.geometry_column_to_return().name:
                    if hasattr(self, '_shape'):
                        geom = self._shape
                    else:
                        geom = wkb.loads(str(val.geom_wkb))
                elif not col.foreign_keys and not isinstance(col.type, Geometry):
                    properties[p.key] = val

        if self.__add_properties__:
            for k in self.__add_properties__:
                properties[k] = getattr(self, k)

        return geojson.Feature(id=id, geometry=geom, properties=properties)
Пример #26
0
 def __geo_interface__(self):
     geometry = wkb.loads(str(self.geometry), True)
     properties = dict(name=self.name,
                       description=self.description,
                       thumbnail=self.thumbnail,
                       image=self.image,
                       color=self.color,
                       stroke=self.stroke,
                       isLabel=self.is_label,
                       isCircle=self.is_circle,
                       showOrientation=self.show_orientation,
                       linestyle='plain' if self.linestyle == 0
                       else 'dashed' if self.linestyle == 1 else 'dotted',
                       fid=self.id,
                       symbolId=self.symbol_id,
                       angle=self.angle if self.angle is not None else 0,
                       size=self.size if self.size is not None else 10,
                       fontSize=self.font_size
                       if self.font_size is not None else 15,
                       opacity=self.opacity
                       if self.opacity is not None else 0.5,
                       shape=self.shape
                       )
     return geojson.Feature(id=self.id,
                            geometry=geometry,
                            properties=properties
                            )
Пример #27
0
 def geom(self):
     if self._geom is None:
         assert self._wkb is not None
         assert type(self._wkb) is str
         assert len(self._wkb) != 0
         self._geom = wkb.loads(self._wkb)
     return self._geom
Пример #28
0
 def polycontainspoints(self, shapefile, pointfile):
     """
     Take comma-delimited lon,lat points from a pointfile and test each one for membership
     within the polygon specified by the shapefile.
     """
     from shapely.wkb import loads
     from shapely.geometry import Point
     # Open the shapefile
     source = ogr.Open(shapefile)
     # Extract the first layer, assume it is the only one
     layer = source.GetLayer(0)
     # Get the first feature, assume it is the only one
     feature = layer.GetNextFeature()
     # Convert the OGR polygon into a Shapely polygon using WKB (Well-Known Binary) format
     polygon = loads(feature.GetGeometryRef().ExportToWkb())
     # Read the lon,lat points from the file
     lonlats = open(pointfile, "r").readlines()
     # Initialize the result array
     result = []
     # Loop over the points, there's a faster way to do this, see Shapely manual section 5.1.1
     for lonlat in lonlats:
         lonlat = lonlat.split(",")
         lon, lat = [float(ll) for ll in lonlat]
         point = Point(lon, lat)
         within = polygon.contains(point)
         result.append((lon, lat, within))
     # Give back the result
     return result
Пример #29
0
def weather_stations():
    #print "weather_stations()"
    raw_query_params = request.args.copy()
    #print "weather_stations(): raw_query_params=", raw_query_params

    stations_table = Table('weather_stations', Base.metadata, 
        autoload=True, autoload_with=engine, extend_existing=True)
    valid_query, query_clauses, resp, status_code = make_query(stations_table,raw_query_params)
    if valid_query:
        resp['meta']['status'] = 'ok'
        base_query = session.query(stations_table)
        for clause in query_clauses:
            print "weather_stations(): filtering on clause", clause
            base_query = base_query.filter(clause)
        values = [r for r in base_query.all()]
        fieldnames = [f for f in stations_table.columns.keys()]
        for value in values:
            d = {f:getattr(value, f) for f in fieldnames}
            loc = str(value.location)
            d['location'] = loads(loc.decode('hex')).__geo_interface__
            resp['objects'].append(d)
    resp['meta']['query'] = raw_query_params
    resp = make_response(json.dumps(resp, default=dthandler), status_code)
    resp.headers['Content-Type'] = 'application/json'
    return resp
Пример #30
0
    def almanacs_kml(self):
        json = request.params.get('extent')
        # We need to make sure we only select almanacs with pages here,
        query = meta.Session.query(Almanac).join(Almanac.pages).distinct()
        # ... and eager-load the pages since the ktml template uses them.
        query = query.options(eagerload(Almanac.pages))

        # Tried also with contains_eager, not sure what the difference is
        # but I only get a fraction of the expected almanacs:
        #query = meta.Session.query(Almanac).join(Almanac.pages).distinct()
        #query = query.options(contains_eager(Almanac.pages))

        # Also tried using a single, second query for the pages.
        # ... NOPE, requires sqlalchemy > 0.6.0 which blows up on us,
        # maybe not compatible w/ pylons 0.9.7?
        #query = meta.Session.query(Almanac).join(Almanac.pages).distinct()
        #query = query.options(subqueryload(Almanac.pages))

        # Tried also without the explicit join().distinct(), this gives
        # back all almanac whether they have any pages or not:
        #query = meta.Session.query(Almanac).options(eagerload(Almanac.pages))

        if json is not None:
            shape = simplejson.loads(json)
            # Stupid asShape returns an Adapter instead of a Geometry.  We round
            # trip it through wkb to get the correct type.
            bbox = wkb.loads(asShape(shape).to_wkb())
            query = query.filter(func.st_intersects(Almanac.location, func.st_transform('SRID=%s;%s' % ('4326', b2a_hex(bbox.to_wkb())), storage_SRID)))

        c.almanacs = query.order_by(Almanac.modified.desc()).limit(200).all()
        response.content_type = 'application/vnd.google-earth.kml+xml kml'
        return render('/almanac/kml.mako')
Пример #31
0
def wkb_to_geojson(row, wkb_col):
    """
    Return a Geojson from a row containing a wkb column
    """
    my_wkt = wkb.loads(getattr(row, wkb_col), hex=True)
    return mapping(my_wkt)
Пример #32
0
connection = psycopg2.connect(database=options.databaseName,
                              user=options.userName)
cursor = connection.cursor()

sql = 'CREATE TABLE %s(id bigint PRIMARY KEY, name varchar(200), source varchar(100), woeType integer, geom geometry(MultiPolygon,4326))' % options.tableName
cursor.execute(sql)

count = 0
for feature in features.find({"hasPoly": True}):
    count += 1
    if (count % 10000 == 0):
        print("processed %d features" % count)

    for polygon in polygons.find({"_id": feature['polyId']}):
        source = polygon['source']
        geom = wkb.loads(polygon['polygon'])
        wktGeom = wkt.dumps(geom)
        name = ''
        enNames = [
            elem for elem in feature['displayNames'] if elem['lang'] == 'en'
        ]
        if (len(enNames) > 0):
            name = enNames[0]['name']
        else:
            if (len(feature['displayNames']) > 0):
                name = feature['displayNames'][0]['name']

        sql = 'INSERT into %s VALUES(%%s, %%s, %%s, %%s, ST_Multi(ST_GeomFromText(%%s, 4326)))' % options.tableName
        cursor.execute(sql, (feature['_id'], name, polygon['source'],
                             feature['_woeType'], wktGeom))
Пример #33
0
def wkb_tranformation(line):
   return wkb.loads(line.geom, hex=True)
Пример #34
0
    def query(self,
              geometry_operator='intersects',
              query_geometry=None,
              query_mbr=None,
              query_geometry_srid=None,
              only=None,
              start=None,
              end=None,
              limit=None,
              geometry_format='geojson',
              order_by=None,
              **kwargs):
        operators = {
            'eq': '=',
            '=': '=',
            'gt': '>',
            'ge': '>=',
            'lt': '<',
            'le': '<=',
            'contains': 'like',
            'startswith': 'like',
            'endswith': 'like',
            'isnull': '',
            'notnull': '',
            'ne': '!=',
            'regexp': 'regexp',
            'glob': 'glob',
            'match': 'match',
            'between': 'between',
            'like': 'like'
        }
        geom_operators = {
            'equals', 'disjoint', 'touches', 'within', 'overlaps', 'crosses',
            'intersects', 'contains', 'mbrequal', 'mbrdisjoint', 'mbrtouches',
            'mbrwithin', 'mbroverlaps', 'mbrintersects', 'mbrcontains'
        }

        c = self._cursor()
        keys = self.schema() if not only else only
        table = self._tablename
        index = self._index_name
        geometry = self._geometry_field
        geometry_operator = geometry_operator.lower(
        ) if geometry_operator else None

        if query_geometry and not isinstance(query_geometry, basestring):
            query_geometry = query_geometry.wkt
        elif query_mbr:
            query_mbr = shapely.geometry.box(*query_mbr)
            query_geometry = query_mbr.wkt

        limit_clause = 'LIMIT {limit}'.format(**locals()) if limit else ''
        start_clause = 'OGC_FID >= {start}'.format(
            **locals()) if start else False
        end_clause = 'OGC_FID >= {end}'.format(**locals()) if end else False
        columns = ','.join(keys)
        checks = [
            key.split('__') if '__' in key else [key, '=']
            for key in kwargs.keys()
        ]
        where_clauses = [
            '{variable} {op} ?'.format(variable=v, op=operators[o])
            for v, o in checks
        ]
        where_values = [
            "%" + x + '%' if checks[i][1] == 'contains' else x
            for i, x in enumerate(kwargs.values())
        ]
        where_values = [
            x + '%' if checks[i][1] == 'startswith' else x
            for i, x in enumerate(where_values)
        ]
        where_values = [
            '%' + x if checks[i][1] == 'endswith' else x
            for i, x in enumerate(where_values)
        ]

        if start_clause:
            where_clauses.append(start_clause)
        if end_clause:
            where_clauses.append(end_clause)

        if query_geometry:
            qg = "GeomFromText(?, {srid})".format(
                srid=int(query_geometry_srid
                         )) if query_geometry_srid else "GeomFromText(?)"
            if geometry_operator not in geom_operators and \
                    not geometry_operator.startswith('distance') and \
                    not geometry_operator.startswith('relate'):
                raise NotImplementedError(
                    'unsupported query operator for geometry')

            if geometry_operator.startswith('relate'):
                geometry_operator, matrix = geometry_operator.split(':')
                geometry_where = "relate({geometry}, {qg}, '{matrix}')"

            elif geometry_operator.startswith('distance'):
                geometry_operator, srid, comparator, val = geometry_operator.split(
                    ":")
                op = operators[comparator]
                val = float(val)
                geometry_where = "distance(transform({geometry}, {srid}), {qg}) {op} {val}".format(
                    **locals()) if len(
                        srid
                    ) > 0 else "distance({geometry}, {qg}) {op} {val}".format(
                        **locals())
            else:
                geometry_where = """{geometry_operator}({geometry}, {qg})""".format(
                    **locals())

            where_values.append(query_geometry)
            where_clauses.append(geometry_where)

        where_clauses = ' where ' + ' and '.join(where_clauses) if len(
            where_clauses) > 0 else ''

        query1 = 'select {columns} from {table} {where_clauses} {limit_clause}'.format(
            **locals())
        query2 = 'select AsBinary({geometry}) from {table} {where_clauses} {limit_clause}'.format(
            **locals())

        c.execute("select load_extension('libspatialite.so')")
        c.execute(query1, where_values)

        records = []
        for row in c.fetchall():
            records.append(dict(p for p in zip(keys, row) if p[0] != geometry))

        geo = []
        if (not only) or (geometry in only):
            c.execute(query2, where_values)

            if geometry_format.lower() == 'geojson':
                geo = [
                    json.loads(geojson.dumps(wkb.loads(str(g[0]))))
                    for g in c.fetchall()
                ]
            elif geometry_format.lower() == 'wkt':
                geo = [wkb.loads(str(g[0])).wkt for g in c.fetchall()]
            else:
                geo = [None for g in c.fetchall()]

        gj = []
        for i, record in enumerate(records):
            if (not only) or (geometry in only):
                record[geometry] = geo[i]
            gj.append(record)

        return gj
Пример #35
0
 def node(self, node):
     if self._is_relevant_node(node):
         point_wkb = WKBFAB.create_point(node)
         point_geometry = wkblib.loads(point_wkb, hex=True)
         self.points.append(point_geometry)
Пример #36
0
    def joiner(self, data):
        """
        Entry point for the class Join. This function identiefs junctions
        (intersection points) of shared paths.

        The join function is the second step in the topology computation.
        The following sequence is adopted:
        1. extract
        2. join
        3. cut
        4. dedup
        5. hashmap

        Detects the junctions of shared paths from the specified hash of linestrings.

        After decomposing all geometric objects into linestrings it is necessary to
        detect the junctions or start and end-points of shared paths so these paths can
        be 'merged' in the next step. Merge is quoted as in fact only one of the
        shared path is kept and the other path is removed.

        Parameters
        ----------
        data : dict
            object created by the method topojson.extract.
        quant_factor : int, optional (default: None)
            quantization factor, used to constrain float numbers to integer values.
            - Use 1e4 for 5 valued values (00001-99999)
            - Use 1e6 for 7 valued values (0000001-9999999)

        Returns
        -------
        dict
            object expanded with
            - new key: junctions
            - new key: transform (if quant_factor is not None)
        """

        # presimplify linestrings if required
        if self.options.presimplify > 0:
            # set default if not specifically given in the options
            if type(self.options.presimplify) == bool:
                simplify_factor = 2
            else:
                simplify_factor = self.options.presimplify

            data["linestrings"] = simplify(
                data["linestrings"],
                simplify_factor,
                algorithm=self.options.simplify_algorithm,
                package=self.options.simplify_with,
                input_as="linestring",
            )

        # compute the bounding box of input geometry
        lsbs = geometry.asMultiLineString(data["linestrings"]).bounds
        ptbs = geometry.asMultiPoint(data["coordinates"]).bounds
        data["bbox"] = compare_bounds(lsbs, ptbs)

        # prequantize linestrings if required
        if self.options.prequantize > 0:
            # set default if not specifically given in the options
            if type(self.options.prequantize) == bool:
                quant_factor = 1e6
            else:
                quant_factor = self.options.prequantize

            data["linestrings"], data["transform"] = quantize(
                data["linestrings"], data["bbox"], quant_factor)

            data["coordinates"], data["transform"] = quantize(
                data["coordinates"], data["bbox"], quant_factor)

        if not self.options.topology or not data["linestrings"]:
            data["junctions"] = self.junctions
            return data

        if self.options.shared_paths == "coords":

            def _get_verts(geom):
                # get coords of each LineString
                return [x for x in geom.coords]

            geoms = {}
            junctions = []

            for ls in data["linestrings"]:
                verts = _get_verts(ls)
                for i, vert in enumerate(verts):
                    ran = geoms.pop(vert, None)
                    neighs = sorted([
                        verts[i - 1], verts[i + 1 if i < len(verts) - 1 else 0]
                    ])
                    if ran and ran != neighs:
                        junctions.append(vert)
                    geoms[vert] = neighs

            self.junctions = [geometry.Point(xy) for xy in set(junctions)]
        else:

            # create list with unique combinations of lines using a rdtree
            line_combs = select_unique_combs(data["linestrings"])

            # iterate over index combinations
            for i1, i2 in line_combs:
                g1 = data["linestrings"][i1]
                g2 = data["linestrings"][i2]

                # check if geometry are equal
                # being equal meaning the geometry object coincide with each other.
                # a rotated polygon or reversed linestring are both considered equal.
                if not g1.equals(g2):
                    # geoms are unique, let's find junctions
                    self.shared_segs(g1, g2)

            # self.segments are nested lists of LineStrings, get coordinates of each nest
            s_coords = []
            for segment in self.segments:
                s_coords.extend([[(x.xy[0][y], x.xy[1][y]) for x in segment
                                  for y in range(len(x.xy[0]))]])
                # s_coords.extend([[y for x in segment for y in list(x.coords)]])

            # only keep junctions that appear only once in each segment (nested list)
            # coordinates that appear multiple times are not junctions
            for coords in s_coords:
                self.junctions.extend([
                    geometry.Point(i) for i in coords if coords.count(i) == 1
                ])

            # junctions can appear multiple times in multiple segments, remove duplicates
            self.junctions = [
                loads(xy) for xy in list(set([x.wkb for x in self.junctions]))
            ]

        # prepare to return object
        data["junctions"] = self.junctions

        return data
Пример #37
0
import geopandas as gdp
import cartoframes
import pandas as pd
APIKEY = "1353407a098fef50ec1b6324c437d6d52617b890"

cc = cartoframes.CartoContext(base_url='https://lokiintelligent.carto.com/',
                              api_key=APIKEY)
from shapely.geometry import Point
from shapely.wkb import loads
arenas_df = cc.read('arenas_nba')
shp = r"C:\Data\US_States\US_States.shp"
states_df = gdp.read_file(shp)

# for index, arena in arenas_df.iterrows():
# 	print(arena['the_geom'])
# 	arenas_df.at[index, 'the_geom'] = loads(arena.the_geom, hex=True)
# 	print(arenas_df.at[index, 'the_geom'])
data=[]



for index, orig in states_df.iterrows():
    for index2, ref in arenas_df.iterrows():

    	if loads(ref['the_geom'], hex=True).intersects(orig['geometry']):
        	print(orig['STATE'], ref['team'])
Пример #38
0
import os
import errno
import json
import psycopg2
from shapely.wkb import loads
from shapely.geometry import mapping

conn = psycopg2.connect(dbname="us.ca.san_francisco")
cur = conn.cursor()
cur.execute(
    "select ST_AsBinary(bounds_for_tile_indices(y, x, z)), x,y as y,z as geom from tasks order by x, y;"
)

features = []
for result in cur:
    x = result[1]
    y = result[2]
    z = result[3]
    name = "{0}_{1}_{2}.osm".format(z, x, y)
    features.append({
        'type': 'Feature',
        'properties': {
            'import_url':
            "http://tiles.openmassing.org/api/sfbuildingheight_" + name
        },
        'id': name,
        'geometry': mapping(loads(str(result[0])))
    })

print json.dumps({'type': 'FeatureCollection', 'features': features})
Пример #39
0
 def parse(self, shape):
     """Parses coordinates or shapely object"""
     if shape:
         if hasattr(shape, 'name'):
             self.name = shape.name
         # Check for class with a geometry attribute
         try:
             shape = shape.geometry
         except AttributeError:
             pass
         if isinstance(shape, NaiveGeoMetry):
             # Transform the shape to the given CRS if necessary
             if epsg_id(self.crs) != epsg_id(shape.crs):
                 shape = shape.transform(self.crs)
             # Shape is an instance of this class
             self.verbatim = shape.verbatim
             self.verbatim_shape = shape.verbatim_shape
             self.verbatim_crs = shape.verbatim_crs
             self.geom_type = shape.geom_type
             self.shape = shape.parsed_shape
             # Attributes that only exist in a subclass will not be carried over
             for attr in self.cache:
                 try:
                     setattr(self, attr, getattr(shape, attr))
                 except AttributeError:
                     pass
             if self._radius_km is None:
                 # Setting the private attribute sidesteps the radius
                 # setter, which produces a different shape for points
                 self.radius_km = shape.radius_km
             self.subshapes = shape.subshapes
             return None
         if isinstance(shape, BaseGeometry):
             # Shape is a shapely geometry object
             return shape
         if isinstance(shape, bytes):
             return wkb.loads(shape)
         if isinstance(shape, str):
             return wkt.loads(shape)
         if isinstance(shape, dict):
             # Shape is a GeoNames-style bounding box
             lats = [shape['south'], shape['north']]
             lngs = [shape['west'], shape['east']]
             return Polygon(bounding_box(lats[0], lngs[0], lats[1],
                                         lngs[1]))
         if isinstance(shape, (list, tuple)):
             shape = shape[:]
             # Convert numpy arrays to lists
             try:
                 shape = [c.tolist() for c in shape]
             except AttributeError:
                 pass
             # Extract underlying shapely shapes from a list of geometries
             if isinstance(shape[0], NaiveGeoMetry):
                 geoms = []
                 for geom in shape:
                     shape = geom.verbatim_shape
                     geom = self.__class__(shape, crs=geom.verbatim_crs)
                     if geom.crs != self.crs:
                         geom = geom.transform(self.crs)
                     geoms.append(geom)
                 shape = [g.shape for g in geoms]
             # Lists of shapely objects
             if isinstance(shape[0], BaseGeometry):
                 if len(shape) == 1:
                     return shape[0]
                 # Shape is a list mixing multiple shapely objects
                 if len({s.geom_type for s in shape}) > 1:
                     return GeometryCollection(shape)
                 # Shape is a list of Points
                 shape_class = LineString if len(shape) == 2 else Polygon
                 try:
                     return shape_class([(p.x, p.y) for p in shape])
                 except AttributeError:
                     pass
                 # Shape is a list of Polygons
                 if isinstance(shape[0], Polygon):
                     try:
                         return MultiPolygon(shape)
                     except ValueError:
                         pass
                 # Shape is a list of LineStrings
                 if isinstance(shape[0], LineString):
                     try:
                         return MultiLineString(shape)
                     except ValueError:
                         pass
             # Shape is a list of coordinates
             list_of_lists = isinstance(shape[0], (list, tuple))
             try:
                 list_of_pairs = all([len(c) == 2 for c in shape[:10]])
             except TypeError:
                 list_of_pairs = False
             if list_of_lists and list_of_pairs:
                 # Shape is [(lat, lng)] or [(lat1, lng1),...]
                 lat_lngs = list(shape)
             elif list_of_lists:
                 # Shape is [lats, lngs]
                 lat_lngs = list(zip(*shape))
             elif len(shape) == 2:
                 # Shape is (lat, lng)
                 lat_lngs = [shape]
             else:
                 msg = 'Parse failed: {} (unknown format)'.format(shape)
                 logger.error(msg)
                 raise ValueError(msg)
             # Ensure that coordinates are floats
             lats = []
             lngs = []
             for lat, lng in lat_lngs:
                 lats.append(self.parse_coordinate(lat, 'latitude'))
                 lngs.append(self.parse_coordinate(lng, 'longitude'))
             # Convert coordinates to shapely geometry
             xy = list(zip(lngs, lats))
             if len(xy) == 1:
                 return Point(xy[0])
             if len(xy) == 2:
                 return LineString(xy)
             return Polygon(xy)
     msg = 'Parse failed: {} (empty)'.format(shape)
     raise ValueError(msg)
Пример #40
0
def get_boundary():
    obj = models.SanFranciscoDivision()
    wkb_hex_str = obj.select(fields=('mpoly', ))[0]['mpoly']
    mpoly = wkb.loads(wkb_hex_str, hex=True)
    return mpoly
Пример #41
0
 def test_wkb_is_valid(self):
     from shapely.wkb import loads
     self.assertTrue(bool(loads(self.geom.wkb)))
Пример #42
0
 def transform_shapely_geometry(shapely_geometry):
     gdal_geometry = ogr.CreateGeometryFromWkb(shapely_geometry.wkb)
     return wkb.loads(transform_gdal_geometry(gdal_geometry).ExportToWkb())
Пример #43
0
polyLayer = processing.getObject(Polygons)
polyPrder = polyLayer.dataProvider()
n = polyLayer.featureCount()
l = 0

writer = processing.VectorWriter(Results, None, polyPrder.fields(),
                                 QGis.WKBMultiPolygon, polyPrder.crs())

resgeom = QgsGeometry()
resfeat = QgsFeature()

for feat in processing.features(polyLayer):
    progress.setPercentage(int(100 * l / n))
    l += 1

    g = loads(feat.geometry().asWkb())

    if g.geom_type == 'MultiPolygon':
        resg = [
            Polygon(p.exterior,
                    [r for r in p.interiors if Polygon(r).area > Max_area])
            for p in g
        ]

    else:
        resg = [
            Polygon(g.exterior,
                    [r for r in g.interiors if Polygon(r).area > Max_area])
        ]

    resgeom = QgsGeometry().fromWkt(dumps(MultiPolygon(resg)))
Пример #44
0
from shapely import geometry, ops, wkb
start = 2372627054
end = 19191797
# end = 1889862438

start = 19191797
end = 1889862438

# Using OSM ids as source and target values calculate shortest route and convert to shapely geometric object
query = f"SELECT seq, edge, b.the_geom AS \"the_geom (truncated)\" FROM pgr_dijkstra('select gid as id, source_osm as source, target_osm as target, length as cost FROM ways as ways_outer, (select ST_Expand(ST_Extent(the_geom),0.1) as box from ways as box_table where box_table.source_osm = {start} OR box_table.target_osm = {end}) as box_final where ways_outer.the_geom && box_final.box', {start}, {end}, false) as a INNER JOIN ways as b ON a.edge = b.gid ORDER BY seq;"
query = f"SELECT UNNEST(pgr_flipedges(ARRAY(SELECT st_astext(b.the_geom) AS \"the_geom (truncated)\" FROM pgr_dijkstra('select gid as id, source_osm as source, target_osm as target, length as cost FROM ways as ways_outer, (select ST_Expand(ST_Extent(the_geom),0.05) as box from ways as box_table where box_table.source_osm = 19191797 OR box_table.target_osm = 1889862438) as box_final where ways_outer.the_geom && box_final.box', {start}, {end}, false) as a INNER JOIN ways as b ON a.edge = b.gid ORDER BY seq)));"
curs.execute(query)
# Load_populations.database.cursor.execute("SELECT seq, edge, b.the_geom AS \"the_geom (truncated)\", b.name FROM pgr_dijkstra('SELECT gid as id, source_osm as source, target_osm as target, length as cost FROM ways',%s, %s, false) a INNER JOIN ways b ON (a.edge = b.gid) ORDER BY seq;", [start, end])
#route_list = [wkb.loads(row[2], hex=True) for row in curs]
route_list = [wkb.loads(row[0], hex=True) for row in curs]
# print("route_list", list(route_list[0].coords))
# print("route_list", route_list)
for v in route_list:
    print(v)
# merge linestrings into one large shapely linestring object
merged_routes = ops.linemerge([*route_list])
# print(merged_routes)

print("!!!!!!!!!!!!!!!!!!!!!!!!!")
start = 1889862438
end = 19191797
start = 2372627054
end = 2448172873
query = f"SELECT seq, edge, b.the_geom AS \"the_geom (truncated)\" FROM pgr_dijkstra('select gid as id, source_osm as source, target_osm as target, length as cost FROM ways as ways_outer, (select ST_Expand(ST_Extent(the_geom),0.1) as box from ways as box_table where box_table.source_osm = {start} OR box_table.target_osm = {end}) as box_final where ways_outer.the_geom && box_final.box', {start}, {end}, false) as a INNER JOIN ways as b ON a.edge = b.gid ORDER BY seq;"
query = f"SELECT UNNEST(pgr_flipedges(ARRAY(SELECT st_astext(b.the_geom) AS \"the_geom (truncated)\" FROM pgr_dijkstra('select gid as id, source_osm as source, target_osm as target, length as cost FROM ways as ways_outer, (select ST_Expand(ST_Extent(the_geom),0.1) as box from ways as box_table where box_table.source_osm = {start} OR box_table.target_osm = {end}) as box_final where ways_outer.the_geom && box_final.box', {start}, {end}, false) as a INNER JOIN ways as b ON a.edge = b.gid ORDER BY seq)));"
Пример #45
0
p = Point([-106.578677,35.062485]) 
pgeojson=mapping(p) 
player=GeoJSON(data=pgeojson) 
map.add_layer(player)


cursor.execute("SELECT ST_AsGeoJSON(ST_Buffer(ST_GeomFromText('{}')::geography,1500));".format(p.wkt)) 
buff=cursor.fetchall() 
buffer=json.loads(buff[0][0]) 
bufferlayer=GeoJSON(data=buffer) 
map.add_layer(bufferlayer) 


cursor.execute("SELECT ST_AsText(ST_Buffer(ST_GeomFromText('{}')::geography,1500));".format(p.wkt) ) 
bufferwkt=cursor.fetchall() 
b=loads(bufferwkt[0][0]) 

cursor.execute("SELECT ST_AsGeoJSON(incidents.geom) FROM incidents where ST_Intersects(ST_GeomFromText('{}'), incidents.geom) and date >= NOW() interval '10 day';".format(b.wkt)) 
crime=cursor.fetchall() 
for x in crime:    
    layer=json.loads(x[0])    
    layergeojson=GeoJSON(data=layer)    
    map.add_layer(layergeojson)


p = Point([-106.578677,35.062485]) 
cursor.execute("SELECT ST_AsGeoJSON(incidents.geom), ST_Distance(incidents.geom::geography,ST_GeometryFromText('{}')::geography) from incidents ORDER BY incidents.geom<->ST_GeometryFromText('{}') LIMIT 15".format(p.wkt,p.wkt)) 
c=cursor.fetchall() 
for x in c:    
    layer=json.loads(x[0])    
    layergeojson=GeoJSON(data=layer)    
Пример #46
0
def main(shapes_file_list, db_file, groups):
    field_ids = {}
    # Create a GlobalMercator object for later conversions

    merc = GlobalMercator()

    # Set-up the output db

    conn = sqlite3.connect(db_file)
    c = conn.cursor()
    #c.execute("drop table if exists people_by_group")
    c.execute(
        "create table if not exists people_by_group (x real, y real, quadkey text, rand real, group_type text)"
    )
    c.execute("drop index if exists i_quadkey")

    # Open the shapefiles

    for input_filename in shapes_file_list:
        print "Processing file {0}".format(input_filename)
        ds = ogr.Open(input_filename)

        if ds is None:
            print "Open failed.\n"
            sys.exit(1)

        # Obtain the first (and only) layer in the shapefile

        lyr = ds.GetLayerByIndex(0)

        lyr.ResetReading()

        # Obtain the field definitions in the shapefile layer

        feat_defn = lyr.GetLayerDefn()
        field_defns = [
            feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())
        ]

        # Set up a coordinate transformation to latlon
        wgs84 = osr.SpatialReference()
        wgs84.SetWellKnownGeogCS("WGS84")
        sr = lyr.GetSpatialRef()
        xformer = osr.CoordinateTransformation(sr, wgs84)

        # Obtain the index of the group fields
        for i, defn in enumerate(field_defns):
            if defn.GetName() in groups:
                field_ids[defn.GetName()] = i

        # Obtain the number of features (Census Blocks) in the layer
        n_features = len(lyr)

        # Iterate through every feature (Census Block Ploygon) in the layer,
        # obtain the population counts, and create a point for each person within
        # that feature.
        for j, feat in enumerate(lyr):

            # Print a progress read-out for every 1000 features and export to hard disk
            if j % 1000 == 0:
                conn.commit()
                print "%s/%s (%0.2f%%)" % (j + 1, n_features, 100 *
                                           ((j + 1) / float(n_features)))

            # Obtain total population, racial counts, and state fips code of the individual census block

            counts = {}
            for f in field_ids:
                val = feat.GetField(field_ids[f])
                if val:
                    counts[f] = int(val)
                else:
                    counts[f] = 0

            # Obtain the OGR polygon object from the feature
            geom = feat.GetGeometryRef()
            if geom is None:
                continue

            # Convert the OGR Polygon into a Shapely Polygon
            poly = loads(geom.ExportToWkb())

            if poly is None:
                continue

            # Obtain the "boundary box" of extreme points of the polygon
            bbox = poly.bounds

            if not bbox:
                continue

            leftmost, bottommost, rightmost, topmost = bbox

            # Generate a point object within the census block for every person by race

            for f in field_ids:
                for i in range(counts[f]):
                    # Choose a random longitude and latitude within the boundary box
                    # and within the orginial ploygon of the census block
                    while True:
                        samplepoint = Point(uniform(leftmost, rightmost),
                                            uniform(bottommost, topmost))
                        if samplepoint is None:
                            break
                        if poly.contains(samplepoint):
                            break

                    # Convert the longitude and latitude coordinates to meters and
                    # a tile reference

                    try:
                        # In general we don't know the coordinate system of input data
                        # so transform it to latlon
                        lon, lat, z = xformer.TransformPoint(
                            samplepoint.x, samplepoint.y)
                        x, y = merc.LatLonToMeters(lat, lon)
                    except:
                        print "Failed to convert ", lat, lon
                        sys.exit(-1)
                    tx, ty = merc.MetersToTile(x, y, 21)

                    # Create a unique quadkey for each point object
                    quadkey = merc.QuadTree(tx, ty, 21)

                    # Create categorical variable for the race category
                    group_type = f

                    # Export data to the database file
                    try:
                        c.execute(
                            "insert into people_by_group values (?,?,?,random(),?)",
                            (x, y, quadkey, group_type))
                    except:
                        print "Failed to insert ", x, y, tx, ty, group_type
                        sys.exit(-1)

            c.execute(
                "create index if not exists i_quadkey on people_by_group(x, y, quadkey, rand, group_type)"
            )
        conn.commit()
Пример #47
0
 def f_y(x):
     p = wkb.loads(x, hex=True)
     return p.y
Пример #48
0
    def runDataRef(self, dataRef):
        skyMap = dataRef.get('deepCoadd_skyMap', immediate=True)
        tractId = dataRef.dataId['tract']
        tract = skyMap[tractId]
        angle = self.config.theta_grid
        ra_vert, dec_vert = list(zip(*tract.getVertexList()))
        ra_vert = sorted(ra_vert)
        dec_vert = sorted(dec_vert)
        ra0 = ra_vert[0].asDegrees()
        ra1 = ra_vert[-1].asDegrees()
        dec0 = dec_vert[0].asDegrees()
        dec1 = dec_vert[-1].asDegrees()

        if self.config.innerTract:
            ra0 += 0.0167
            ra1 -= 0.0167
            dec0 += 0.0167
            dec1 -= 0.0167

        raArr, decArr = InputFakesGrid(ra0, ra1, dec0, dec1,
                                       self.config.theta_grid)
        nFakes = raArr.size

        if (self.config.acpMask != '') or (self.config.rejMask != ''):
            try:
                from shapely import wkb
                from shapely.geometry import Point
                from shapely.prepared import prep

                # Filter through the accept mask
                acpUse = self.config.acpMask
                if (acpUse != '') and os.path.isfile(acpUse):
                    print("## Filter through : %s" % acpUse)
                    acpWkb = open(acpUse, 'r')
                    acpRegs = wkb.loads(acpWkb.read().decode('hex'))
                    acpPrep = prep(acpRegs)
                    acpWkb.close()

                    inside = np.asarray(
                        list(
                            map(lambda x, y: acpPrep.contains(Point(x, y)),
                                raArr, decArr)))
                else:
                    inside = np.isfinite(raArr)

                # Filter through the reject mask
                rejUse = self.config.rejMask
                if (rejUse != '') and os.path.isfile(rejUse):
                    print("## Filter through : %s" % rejUse)
                    rejWkb = open(rejUse, 'r')
                    rejRegs = wkb.loads(rejWkb.read().decode('hex'))
                    rejPrep = prep(rejRegs)
                    rejWkb.close()
                    masked = np.asarray(
                        list(
                            map(lambda x, y: rejPrep.contains(Point(x, y)),
                                raArr, decArr)))
                else:
                    masked = np.isnan(raArr)

                useful = np.asarray(
                    list(map(lambda x, y: x and (not y), inside, masked)))
                ra, dec = raArr[useful], decArr[useful]

                print("## %d out of %d objects left" % (len(ra), len(raArr)))

                # Keep a log of the deleted ra, dec
                ra_deleted = raArr[np.invert(useful)],
                dec_deleted = decArr[np.invert(useful)]
                deleteLog = 'src_%d_radec_grid_%0.1farcsec_deleted' % (tractId,
                                                                       angle)
                np.save(os.path.join(self.config.outDir, deleteLog),
                        [ra_deleted, dec_deleted])
            except ImportError:
                warnings.warn('Can not import Shapely, no filter performed!')
                ra, dec = raArr, decArr
        else:
            ra, dec = raArr, decArr

        # Number of fakes that will be added
        nFakes = len(ra)
        # Create an empty astropy.Table object
        outTab = astropy.table.Table()

        # Add columns for Ra, Dec
        outTab.add_column(astropy.table.Column(name="RA", data=ra))
        outTab.add_column(astropy.table.Column(name="Dec", data=dec))
        if self.config.inputCat is not None:
            galData = astropy.table.Table().read(self.config.inputCat)
            randInd = np.random.choice(list(range(len(galData))), size=nFakes)
            mergedData = galData[randInd]

            for colname in mergedData.columns:
                colAdd = astropy.table.Column(name=colname,
                                              data=mergedData[colname])
                outTab.add_column(colAdd)

            # Replace ID with a unique integer (using index)
            if ('ID' in outTab.colnames) and (self.config.uniqueID):
                print("## Rename the ID column")
                outTab.rename_column('ID', 'modelID')
                outTab.add_column(
                    astropy.table.Column(name="ID",
                                         data=np.arange(len(outTab))))
            elif ('ID' not in outTab.colnames):
                outTab.add_column(
                    astropy.table.Column(name="ID",
                                         data=np.arange(len(outTab))))

            # Generate multiBand catalog at the same time
            magList = [col for col in galData.colnames if 'mag_' in col]
            if len(magList) >= 1:
                print("Find magnitude in %d band(s)" % len(magList))
                for mag in magList:
                    try:
                        outTab.remove_column('mag')
                    except KeyError:
                        pass
                    outTab.add_column(
                        astropy.table.Column(name='mag', data=mergedData[mag]))
                    filt = mag.split('_')[1].upper()
                    outFits = 'src_%d_radec_grid_%0.1fasec_%s.fits' % (
                        tractId, angle, filt)
                    outFits = os.path.join(self.config.outDir, outFits)
                    outTab.write(outFits, overwrite=True)
            else:
                outFits = 'src_%d_radec_grid_%0.1fasec.fits' % (tractId, angle)
                outTab.write(os.path.join(self.config.outDir, outFits),
                             overwrite=True)
        else:
            outFits = 'src_%d_radec_only_grid_%0.1fasec.fits' % (tractId,
                                                                 angle)
            outTab.write(os.path.join(self.config.outDir, outFits),
                         overwrite=True)
Пример #49
0
 def lambert_and_centroid(geom):
     obj = wkb.loads(geom,hex=True)
     return obj,obj.centroid
Пример #50
0
        return pols
    return []


layer = processing.getobject(input)
provider = layer.dataProvider()
fields = provider.fields()
buffer_dist = distance / 2

inFeat = QgsFeature()
inGeom = QgsGeometry()

outFeat = QgsFeature()
writer = VectorWriter(output, None, fields, provider.geometryType(),
                      layer.crs())

feats = processing.getfeatures(layer)
for inFeat in feats:
    inGeom = inFeat.geometry()
    if not inGeom is None:
        poly = loads(inGeom.asWkb())
        buff = buffer(poly, -buffer_dist)
        buff = buffer(buff, buffer_dist)
        pols = extract_pols(buff)
        for pol in pols:
            outGeom = QgsGeometry()
            outGeom.fromWkb(dumps(pol))
            outFeat.setGeometry(outGeom)
            writer.addFeature(outFeat)

del writer
Пример #51
0
    def iter_geoms(self,
                   key=None,
                   select_uid=None,
                   path=None,
                   load_geoms=True,
                   uid=None,
                   select_sql_where=None,
                   slc=None,
                   dest_crs=None,
                   driver_kwargs=None):
        """
        See documentation for :class:`~ocgis.GeomCabinetIterator`.
        """

        # ensure select ugid is in ascending order
        if select_uid is not None:
            test_select_ugid = list(deepcopy(select_uid))
            test_select_ugid.sort()
            if test_select_ugid != list(select_uid):
                raise ValueError(
                    '"select_uid" must be sorted in ascending order.')

        # get the path to the output shapefile
        shp_path = self._get_path_by_key_or_direct_path_(key=key, path=path)

        # get the source CRS
        meta = self.get_meta(path=shp_path)

        # open the target shapefile
        ds = ogr.Open(shp_path)
        try:
            # return the features iterator
            features = self._get_features_object_(
                ds,
                uid=uid,
                select_uid=select_uid,
                select_sql_where=select_sql_where,
                driver_kwargs=driver_kwargs)
            for ctr, feature in enumerate(features):
                # With a slice passed, ...
                if slc is not None:
                    # ... iterate until start is reached.
                    if ctr < slc[0]:
                        continue
                    # ... stop if we have reached the stop.
                    elif ctr == slc[1]:
                        raise StopIteration

                ogr_geom = feature.GetGeometryRef()
                if dest_crs is not None:
                    ogr_geom.TransformTo(dest_crs)

                if load_geoms:
                    yld = {'geom': wkb.loads(ogr_geom.ExportToWkb())}
                else:
                    yld = {}
                items = feature.items()
                properties = OrderedDict([(key, items[key])
                                          for key in feature.keys()])
                yld.update({'properties': properties})

                if ctr == 0:
                    uid, add_uid = get_uid_from_properties(properties, uid)
                    # The properties schema needs to be updated to account for the adding of a unique identifier.
                    if add_uid:
                        meta['schema']['properties'][uid] = 'int'
                else:
                    add_uid = None

                # add the unique identifier if required
                if add_uid:
                    properties[uid] = ctr + 1
                # ensure the unique identifier is an integer
                else:
                    properties[uid] = int(properties[uid])

                yield yld
            try:
                assert ctr >= 0
            except UnboundLocalError:
                # occurs if there were not feature returned by the iterator. raise a more clear exception.
                msg = 'No features returned from target shapefile. Were features appropriately selected?'
                raise ValueError(msg)
        finally:
            # close the dataset object
            ds.Destroy()
            ds = None
Пример #52
0
    def main(self, data, quant_factor=None):
        """
        Entry point for the class Join.

        The join function is the second step in the topology computation.
        The following sequence is adopted:
        1. extract
        2. join
        3. cut 
        4. dedup 
        5. hashmap  

        Detects the junctions of shared paths from the specified hash of linestrings.
        
        After decomposing all geometric objects into linestrings it is necessary to 
        detect the junctions or start and end-points of shared paths so these paths can 
        be 'merged' in the next step. Merge is quoted as in fact only one of the 
        shared path is kept and the other path is removed.

        Parameters
        ----------
        data : dict
            object created by the method topojson.extract.
        quant_factor : int, optional (default: None)
            quantization factor, used to constrain float numbers to integer values.
            - Use 1e4 for 5 valued values (00001-99999)
            - Use 1e6 for 7 valued values (0000001-9999999)
        
        Returns
        -------
        dict
            object expanded with 
            - new key: junctions
            - new key: transform (if quant_factor is not None)        
        """

        if not data["linestrings"]:
            data["junctions"] = self.junctions
            return data

        # quantize linestrings before comparing
        # if set to None or a value < 1 (True equals 1) no quantizing is applied.
        if quant_factor is not None:
            if quant_factor > 1:
                kx, ky, x0, y0 = self.prequantize(data["linestrings"],
                                                  quant_factor)
                data["transform"] = {"scale": [kx, ky], "translate": [x0, y0]}

        # create list with unique combinations of lines using a rdtree
        line_combs = select_unique_combs(data["linestrings"])

        # iterate over index combinations
        for i1, i2 in line_combs:
            g1 = data["linestrings"][i1]
            g2 = data["linestrings"][i2]

            # check if geometry are equal
            # being equal meaning the geometry object coincide with each other.
            # a rotated polygon or reversed linestring are both considered equal.
            if not g1.equals(g2):
                # geoms are unique, let's find junctions
                self.shared_segs(g1, g2)

        # self.segments are nested lists of LineStrings, get coordinates of each nest
        s_coords = []
        for segment in self.segments:
            s_coords.extend([[(x.xy[0][y], x.xy[1][y]) for x in segment
                              for y in range(len(x.xy[0]))]])
            # s_coords.extend([[y for x in segment for y in list(x.coords)]])

        # only keep junctions that appear only once in each segment (nested list)
        # coordinates that appear multiple times are not junctions
        for coords in s_coords:
            self.junctions.extend(
                [geometry.Point(i) for i in coords if coords.count(i) is 1])

        # junctions can appear multiple times in multiple segments, remove duplicates
        self.junctions = [
            loads(xy) for xy in list(set([x.wkb for x in self.junctions]))
        ]

        # prepare to return object
        data["junctions"] = self.junctions

        return data
Пример #53
0
def wkb_adapter(obj, request):
    """Json adapter for Decimal objects."""
    return Feature(
        geometry=wkb.loads(bytes(obj.data)),
    )
Пример #54
0
 (scenario_loss  - baseline_loss) / 8.0 as val, i.huc_12
 from huc12 i JOIN agg d on (d.huc_12 = i.huc_12)
 WHERE i.states ~* 'IA' ORDER by val DESC

""",
    (scenario, ),
)

# bins = np.arange(0, 101, 10)
bins = [-25, -10, -5, -2, 0, 2, 5, 10, 25]
cmap = plt.get_cmap("BrBG_r")
cmap.set_under("purple")
cmap.set_over("black")
norm = mpcolors.BoundaryNorm(bins, cmap.N)
patches = []

for row in cursor:
    polygon = loads(row[0].decode("hex"))
    a = np.asarray(polygon.exterior)
    x, y = m.map(a[:, 0], a[:, 1])
    a = zip(x, y)
    c = cmap(norm([float(row[1])]))[0]
    p = Polygon(a, fc=c, ec="None", zorder=2, lw=0.1)
    patches.append(p)

m.ax.add_collection(PatchCollection(patches, match_original=True))
m.draw_colorbar(bins, cmap, norm, units="T/a")

m.drawcounties()
m.postprocess(filename="test.png")
Пример #55
0
 def area(self, a):
     if 'amenity' in a.tags:
         wkb = wkbfab.create_multipolygon(a)
         poly = wkblib.loads(wkb, hex=True)
         centroid = poly.representative_point()
         self.print_amenity(a.tags, centroid.x, centroid.y)
Пример #56
0
    filnavn = 'elanlegg_OvreEiker.gpkg'
    eldf['geometry'] = eldf['geometri'].apply(lambda x: wkt.loads(x))
    elGdf = gpd.GeoDataFrame(eldf, geometry='geometry', crs=5973)
    elGdf.to_file(filnavn, layer='elanlegg', driver='GPKG')

    lysdf['geometry'] = lysdf['geometri'].apply(lambda x: wkt.loads(x))
    lysGdf = gpd.GeoDataFrame(lysdf, geometry='geometry', crs=5973)
    lysGdf.to_file(filnavn, layer='lysarmatur', driver='GPKG')

    # nvdbgeotricks.records2gpkg( nvdbapiv3.nvdbfagdata2records( alleElanlegg,     geometri=True), filnavn, 'elanlegg' )
    # nvdbgeotricks.records2gpkg( nvdbapiv3.nvdbfagdata2records( alleLysArmaturer, geometri=True), filnavn, 'lysarmatur' )

    # Lager fancy kartvisning med linje fra lysarmatur => El.anlegg
    # For å tvinge 2D-geometri bruker vi tricks med wkb.loads( wkb.dumps( GEOM, output_dimension=2 ))
    lysdf['geometry'] = lysdf.apply(lambda x: LineString([
        wkb.loads(wkb.dumps(wkt.loads(x['geometri']), output_dimension=2)),
        wkb.loads(wkb.dumps(wkt.loads(x['ElAnlegg_geom']), output_dimension=2))
    ]),
                                    axis=1)

    minGdf = gpd.GeoDataFrame(lysdf, geometry='geometry', crs=5973)
    # må droppe kolonne vegsegmenter hvis data er hentet med vegsegmenter=False
    if 'vegsegmenter' in minGdf.columns:
        minGdf.drop('vegsegmenter', 1, inplace=True)
    if 'relasjoner' in minGdf.columns:
        minGdf.drop('relasjoner', 1, inplace=True)

    minGdf.to_file(filnavn, layer='kartvisning_lysarmatur', driver="GPKG")

#     In [33]: lysdf.dtypes
# Out[33]:
Пример #57
0
    def iter_geoms(self, key=None, select_uid=None, path=None, load_geoms=True, as_field=False,
                   uid=None, select_sql_where=None, slc=None, union=False, data_model=None,
                   driver_kwargs=None):
        """
        See documentation for :class:`~ocgis.GeomCabinetIterator`.
        """

        # Get the path to the output shapefile.
        shp_path = self._get_path_by_key_or_direct_path_(key=key, path=path)

        # Get the source metadata.
        meta = self.get_meta(path=shp_path, driver_kwargs=driver_kwargs)

        if union:
            gic = GeomCabinetIterator(key=key, select_uid=select_uid, path=path, load_geoms=load_geoms, as_field=False,
                                      uid=uid, select_sql_where=select_sql_where, slc=slc, union=False,
                                      data_model=data_model, driver_kwargs=driver_kwargs)
            yld = Field.from_records(gic, meta['schema'], crs=meta['crs'], uid=uid, union=True, data_model=data_model)
            yield yld
        else:
            if slc is not None and (select_uid is not None or select_sql_where is not None):
                exc = ValueError('Slice is not allowed with other select statements.')
                ocgis_lh(exc=exc, logger='geom_cabinet')

            # Format the slice for iteration. We will get the features by index if a slice is provided.
            if slc is not None:
                slc = get_index_slice_for_iteration(slc)

            # Open the target geometry file.
            ds = ogr.Open(shp_path)
            try:
                # Return the features iterator.
                features = self._get_features_object_(ds, uid=uid, select_uid=select_uid,
                                                      select_sql_where=select_sql_where, driver_kwargs=driver_kwargs)

                # Using slicing, we will select the features individually from the object.
                if slc is None:
                    itr = features
                else:
                    # The geodatabase API requires iterations to get the given location.
                    if self.get_gdal_driver(shp_path) == 'OpenFileGDB' or isinstance(slc, slice):
                        def _o_itr_(features_object, slice_start, slice_stop):
                            for ctr2, fb in enumerate(features_object):
                                # ... iterate until start is reached.
                                if ctr2 < slice_start:
                                    continue
                                # ... stop if we have reached the stop.
                                elif ctr2 == slice_stop:
                                    raise StopIteration
                                yield fb

                        itr = _o_itr_(features, slc.start, slc.stop)
                    else:
                        # Convert the slice index to an integer to avoid type conflict in GDAL layer.
                        itr = (features.GetFeature(int(idx)) for idx in slc)

                # Convert feature objects to record dictionaries.
                for ctr, feature in enumerate(itr):
                    if load_geoms:
                        yld = {'geom': wkb.loads(feature.geometry().ExportToWkb())}
                    else:
                        yld = {}
                    items = feature.items()
                    properties = OrderedDict([(key, items[key]) for key in feature.keys()])
                    yld.update({'properties': properties, 'meta': meta})

                    if ctr == 0:
                        uid, add_uid = get_uid_from_properties(properties, uid)
                        # The properties schema needs to be updated to account for the adding of a unique identifier.
                        if add_uid:
                            meta['schema']['properties'][uid] = 'int'

                    # Add the unique identifier if required
                    if add_uid:
                        properties[uid] = feature.GetFID()
                    # Ensure the unique identifier is an integer
                    else:
                        properties[uid] = int(properties[uid])

                    if as_field:
                        yld = Field.from_records([yld], schema=meta['schema'], crs=yld['meta']['crs'], uid=uid,
                                                 data_model=data_model)

                    yield yld
                try:
                    assert ctr >= 0
                except UnboundLocalError:
                    # occurs if there were not feature returned by the iterator. raise a more clear exception.
                    msg = 'No features returned from target data source. Were features appropriately selected?'
                    raise ValueError(msg)
            finally:
                # Close or destroy the data source object if it actually exists.
                if ds is not None:
                    ds.Destroy()
                    ds = None
Пример #58
0
data = [('Nevada', geometryNevada), ('Colorado', geometryColorado),
        ('Wyoming', geometryWyoming)]
cursor.executemany('insert into TestStates values (:state, :obj)', data)

# We now have test geometries in Oracle Spatial (SDO_GEOMETRY) and will next
# bring them back into Python to analyze with GeoPandas. GeoPandas is able to
# consume geometries in the Well Known Text (WKT) and Well Known Binary (WKB)
# formats. Oracle database includes utility functions to return SDO_GEOMETRY as
# both WKT and WKB. Therefore we use that utility function in the query below
# to provide results in a format readily consumable by GeoPandas. These utility
# functions were introduced in Oracle 10g. We use WKB here; however the same
# process applies for WKT.
cursor.execute("""
        SELECT state, sdo_util.to_wkbgeometry(geometry)
        FROM TestStates""")
gdf = gpd.GeoDataFrame(cursor.fetchall(), columns=['state', 'wkbgeometry'])

# create GeoSeries to replace the WKB geometry column
gdf['geometry'] = gpd.GeoSeries(gdf['wkbgeometry'].apply(lambda x: loads(x)))
del gdf['wkbgeometry']

# display the GeoDataFrame
print()
print(gdf)

# perform a basic GeoPandas operation (unary_union)
# to combine the 3 adjacent states into 1 geometry
print()
print("GeoPandas combining the 3 geometries into a single geometry...")
print(gdf.unary_union)
Пример #59
0
curs.execute("""
    SELECT type, geometry, area FROM {db_name}.{db_prefix}waterareas 
    WHERE type IN ({types})
    AND area > {minimum_area}
    AND {db_name}.{db_prefix}waterareas.geometry && ST_MakeEnvelope({env_0}, {env_1}, {env_2}, {env_3}, 3857)
""".format(**params))

print(
    TIMER_STRING.format("querying waterarea data",
                        (datetime.now() - timer_start).total_seconds()))

timer_start = datetime.now()
results = curs.fetchall()
for item in results:
    waterarea = loads(item[1], hex=True)
    waterareas.append(waterarea)

print(
    TIMER_STRING.format("reading waterarea data",
                        (datetime.now() - timer_start).total_seconds()))

timer_start = datetime.now()
for i in range(0, len(waterareas)):
    waterareas[i] = ops.transform(conv.convert_mercator_to_map_list,
                                  waterareas[i])
print(
    TIMER_STRING.format("transforming waterarea data",
                        (datetime.now() - timer_start).total_seconds()))

waterareas = unpack_multipolygons(waterareas)
Пример #60
0
 def __init__(self, provider, properties, *args, **kwargs):
     self.provider = provider
     self.product_type = kwargs.get("productType")
     self.location = self.remote_location = properties.get("downloadLink", "")
     self.properties = {
         key: value
         for key, value in properties.items()
         if key != "geometry" and value not in [NOT_MAPPED, NOT_AVAILABLE]
     }
     product_geometry = properties["geometry"]
     # Let's try 'latmin lonmin latmax lonmax'
     if isinstance(product_geometry, six.string_types):
         bbox_pattern = re.compile(
             r"^(-?\d+\.?\d*) (-?\d+\.?\d*) (-?\d+\.?\d*) (-?\d+\.?\d*)$"
         )
         found_bbox = bbox_pattern.match(product_geometry)
         if found_bbox:
             coords = found_bbox.groups()
             if len(coords) == 4:
                 product_geometry = geometry.box(
                     float(coords[1]),
                     float(coords[0]),
                     float(coords[3]),
                     float(coords[2]),
                 )
     # Best effort to understand provider specific geometry (the default is to
     # assume an object implementing the Geo Interface: see
     # https://gist.github.com/2217756)
     if isinstance(product_geometry, six.string_types):
         try:
             product_geometry = wkt.loads(product_geometry)
         except geos.WKTReadingError:
             try:
                 product_geometry = wkb.loads(product_geometry)
             # Also catching TypeError because product_geometry can be a unicode
             # string and not a bytes string
             except (geos.WKBReadingError, TypeError):
                 # Giv up!
                 raise
     self.geometry = self.search_intersection = geometry.shape(product_geometry)
     self.search_args = args
     self.search_kwargs = kwargs
     if self.search_kwargs.get("geometry") is not None:
         searched_bbox = self.search_kwargs["geometry"]
         searched_bbox_as_shape = geometry.box(
             searched_bbox["lonmin"],
             searched_bbox["latmin"],
             searched_bbox["lonmax"],
             searched_bbox["latmax"],
         )
         try:
             self.search_intersection = self.geometry.intersection(
                 searched_bbox_as_shape
             )
         except TopologicalError:
             logger.warning(
                 "Unable to intersect the requested extent: %s with the product "
                 "geometry: %s",
                 searched_bbox_as_shape,
                 product_geometry,
             )
             self.search_intersection = None
     self.driver = DRIVERS.get(self.product_type, NoDriver())
     self.downloader = None
     self.downloader_auth = None