def test_from_epsg_neg(): try: crs.from_epsg(-1) except ValueError: pass except: raise
def linestrings_to_shapefile(linestrings, shp_fname): schema_properties = OrderedDict( [ ("gid", "int") ] ) my_schema = { "geometry": "LineString", "properties": schema_properties } my_driver = "ESRI Shapefile" my_crs = from_epsg(27700) with fiona.open(shp_fname, "w", driver=my_driver, crs=my_crs, schema=my_schema) as outpf: for gid in linestrings: linestring = linestrings[gid] outpf.write({ "geometry": mapping(linestring), "properties": { "gid": gid } })
def geoframe(self, simplify=None, predicate=None, crs=None, epsg=None): """ Return geopandas dataframe :param simplify: Integer or None. Simplify the geometry to a tolerance, in the units of the geometry. :param predicate: A single-argument function to select which records to include in the output. :param crs: Coordinate reference system information :param epsg: Specifiy the CRS as an EPGS number. :return: A Geopandas GeoDataFrame """ import geopandas from shapely.wkt import loads from fiona.crs import from_epsg if crs is None and epsg is None and self.epsg is not None: epsg = self.epsg if crs is None: try: crs = from_epsg(epsg) except TypeError: raise TypeError('Must set either crs or epsg for output.') df = self.dataframe(predicate=predicate) geometry = df['geometry'] if simplify: s = geometry.apply(lambda x: loads(x).simplify(simplify)) else: s = geometry.apply(lambda x: loads(x)) df['geometry'] = geopandas.GeoSeries(s) return geopandas.GeoDataFrame(df, crs=crs, geometry='geometry')
def _prepare_geocode_result(results): """ Helper function for the geocode function Takes a dict where keys are index entries, values are tuples containing: (address, (lat, lon)) """ # Prepare the data for the DataFrame as a dict of lists d = defaultdict(list) index = [] for i, s in iteritems(results): address, loc = s # loc is lat, lon and we want lon, lat if loc is None: p = Point() else: p = Point(loc[1], loc[0]) if address is None: address = np.nan d['geometry'].append(p) d['address'].append(address) index.append(i) df = geopandas.GeoDataFrame(d, index=index) df.crs = from_epsg(4326) return df
def to_crs(self, crs=None, epsg=None): """Transform geometries to a new coordinate reference system This method will transform all points in all objects. It has no notion or projecting entire geometries. All segments joining points are assumed to be lines in the current projection, not geodesics. Objects crossing the dateline (or other projection boundary) will have undesirable behavior. """ from fiona.crs import from_epsg if self.crs is None: raise ValueError('Cannot transform naive geometries. ' 'Please set a crs on the object first.') if crs is None: try: crs = from_epsg(epsg) except TypeError: raise TypeError('Must set either crs or epsg for output.') proj_in = pyproj.Proj(preserve_units=True, **self.crs) proj_out = pyproj.Proj(preserve_units=True, **crs) project = partial(pyproj.transform, proj_in, proj_out) result = self.apply(lambda geom: transform(project, geom)) result.__class__ = GeoSeries result.crs = crs return result
def to_crs(old, new, new_epsg=4326): """Convert old shapefile to new shapefile with new crs. """ crs = from_epsg(new_epsg) with fiona.open(old, 'r') as source: sink_schema = source.schema.copy() p_in = Proj(source.crs) with fiona.open( new, 'w', crs=crs, driver=source.driver, schema=sink_schema, ) as sink: p_out = Proj(sink.crs) for f in source: try: assert f['geometry']['type'] == "Polygon" new_coords = [] for ring in f['geometry']['coordinates']: x2, y2 = transform(p_in, p_out, *zip(*ring)) new_coords.append(zip(x2, y2)) f['geometry']['coordinates'] = new_coords sink.write(f) except Exception, e: # In practice, this won't work for most shapes since they # are frequently of type MultiPolygon. print("Error transforming feature {}".format(f['id'])) raise
def reproject(filename, crs=from_epsg(4326)): out = [] with fiona.open(filename) as inp: output_schema = inp.schema.copy() output_schema['geometry'] = 'MultiPolygon' inp_projection = Proj(inp.crs) out_projection = Proj(crs) for feature in inp: try: geom = feature['geometry'] if geom['type'] == 'Polygon': parts = [geom['coordinates']] elif geom['type'] == 'MultiPolygon': parts = geom['coordinate'] new_coords = [] for part in parts: inner_coords = [] for ring in part: x2, y2 = transform(inp_projection, out_projection, *zip(*ring)) inner_coords.append(zip(x2, y2)) new_coords.append(inner_coords) feature['geometry']['type'] = 'MultiPolygon' feature['geometry']['coordinates'] = new_coords out.append(feature) except Exception: print 'Error transforming feature ' + feature['id'] return out
def boreholes_to_shape(boreholes, shapefile, driver=None, epsg=None, fields=None): '''write boreholes to shapefile as points''' # crs from epsg code if epsg is not None: crs = from_epsg(epsg) else: crs = None # shapefile schema schema = Borehole.schema.copy() if fields is not None: schema['properties'].extend(fields) # shapefile write arguments shape_kwargs = { 'driver': driver, 'schema': schema, 'crs': crs, } keys = [k for k, _ in schema['properties']] log.info('writing to {f:}'.format(f=os.path.basename(shapefile))) with fiona.open(shapefile, 'w', **shape_kwargs) as dst: for borehole in boreholes: record = { 'geometry': borehole.geometry, 'properties': borehole.as_dict(keys=keys), } dst.write(record)
def export_projectionlines(shapefile, cross_sections, driver=None, epsg=None): # crs from epsg code if epsg is not None: crs = from_epsg(epsg) else: crs = None # schema schema = {'geometry': 'LineString', 'properties': {'label': 'str'}} boreholeschema = Borehole.schema.copy() schema['properties'].update(boreholeschema['properties']) keys = [k for k, _ in boreholeschema['properties']] # shapefile write arguments shape_kwargs = { 'driver': driver, 'schema': schema, 'crs': crs, } log.info('writing to {f:}'.format(f=os.path.basename(shapefile))) with fiona.open(shapefile, 'w', **shape_kwargs) as dst: for cs in cross_sections: for distance, borehole in cs.boreholes: projectionline = LineString([ asShape(borehole.geometry), cs.shape.interpolate(distance), ]) properties = {'label': cs.label} properties.update(borehole.as_dict(keys=keys)) dst.write({ 'geometry': mapping(projectionline), 'properties': properties, })
def bbox_convert(bbox, from_epsg, to_epsg): bbox = gpd.GeoSeries([Point(bbox[0], bbox[1]), Point(bbox[2], bbox[3])], crs=crs.from_epsg(from_epsg)) bbox = bbox.to_crs(epsg=to_epsg) bbox = [bbox[0].x, bbox[0].y, bbox[1].x, bbox[1].y] return bbox
def export_endpoints(shapefile, cross_sections, driver=None, epsg=None): # crs from epsg code if epsg is not None: crs = from_epsg(epsg) else: crs = None # schema schema = {'geometry': 'Point', 'properties': {'label': 'str'}} # shapefile write arguments shape_kwargs = { 'driver': driver, 'schema': schema, 'crs': crs, } log.info('writing to {f:}'.format(f=os.path.basename(shapefile))) with fiona.open(shapefile, 'w', **shape_kwargs) as dst: for cs in cross_sections: startpoint = Point(cs.shape.coords[0]) endpoint = Point(cs.shape.coords[-1]) dst.write({ 'geometry': mapping(startpoint), 'properties': {'label': cs.label} }) dst.write({ 'geometry': mapping(endpoint), 'properties': {'label': cs.label + '`'} })
def geoframe(self, sql, simplify=None, crs=None, epsg=4326): """ Return geopandas dataframe :param simplify: Integer or None. Simplify the geometry to a tolerance, in the units of the geometry. :param crs: Coordinate reference system information :param epsg: Specifiy the CRS as an EPGS number. :return: A Geopandas GeoDataFrame """ import geopandas from shapely.wkt import loads from fiona.crs import from_epsg if crs is None: try: crs = from_epsg(epsg) except TypeError: raise TypeError('Must set either crs or epsg for output.') df = self.dataframe(sql) geometry = df['geometry'] if simplify: s = geometry.apply(lambda x: loads(x).simplify(simplify)) else: s = geometry.apply(lambda x: loads(x)) df['geometry'] = geopandas.GeoSeries(s) return geopandas.GeoDataFrame(df, crs=crs, geometry='geometry')
def write_shapefile(shp_gdf, es_df, out_path): """ Join the ES data frame to the shapefile geodataframe and write the result to a new shapefile. Args: shp_gdf: Geodataframe of SEPA waterbody catchments es_df: Data frame of ES presence/absence. out_path: Output file path. Returns: None. A shapefile is written to the specified location. """ # Copy the GDF so that it is not modified shp = shp_gdf.copy() # Delete unwanted columns del shp['WB_Name'], shp['Area_km2'], shp['Status2013'] # Set indices shp.index = shp['WB_ID'] # Join to "inter" WBs df = shp.join(es_df, how='left') # Set spatial ref df.crs = from_epsg(27700) # Fill NoData df.fillna(-9999, inplace=True) # Write to output # Convert back to GDF (even though it already is one!) to avoid gpd errors df = gpd.GeoDataFrame(df) df.to_file(out_path)
def write_hob_shapefile(hob_df,shp_fout,model_epsg): '''Writes the groundwater observation locations to a 2D point shapefile.''' schema = {'geometry':'Point','properties':{'Site':'str','SiteName':'str',\ 'Head (m)':'float','DOW (m)':'float','NWIS_DTW (m)':'float',\ 'DOW-DTW (m)':'float','ModelLand (m)':'float',\ 'WellElev (m)':'float','NWISLand (m)':'float',\ '(Lay,Row,Col)':'str','NWIS_Link':'str','Violation':'str'}} with fiona.collection(shp_fout, "w", "ESRI Shapefile",crs=from_epsg(model_epsg),schema=schema) as output: for index,irow in hob_df.iterrows(): isite = str(index) ilink = http_dict['GW'][0] + str(isite) + http_dict['GW'][1] iname = irow['station_nm'] ihob = irow['HeadObs'] idow = irow['well_depth_va'] idtw = irow['lev_va'] iwellelev = irow['well_elev'] inwis_land = irow['alt_va'] imodel_land = irow['ModelTop'] irowcol = '(%i,%i,%i)'%(irow['Layer'],irow['Row'],irow['Column']) iexclude = irow['Violation'] point = Point(irow['Projected_X'],irow['Projected_Y']) output.write({'geometry': mapping(point), 'properties':{'Site':isite,'SiteName':iname,\ 'Head (m)':ihob,'DOW (m)':idow,'NWIS_DTW (m)':idtw,\ 'DOW-DTW (m)':(idow-idtw),'ModelLand (m)':imodel_land,\ 'WellElev (m)':iwellelev,'NWISLand (m)':inwis_land,\ '(Lay,Row,Col)':irowcol,'NWIS_Link':ilink,'Violation':iexclude}}) return
def write_discharge_shapefile(discharge_df,shp_fout,model_epsg,discharge_label): '''Writes the discharge measurement locations to a 2D point shapefile.''' schema = {'geometry':'Point','properties':{'Site':'int','SiteName':'str',\ 'GageElev':'float','NObs':'int',\ discharge_label:'float','StdError':'float',\ 'NWIS_Link':'str'}} with fiona.collection(shp_fout, "w", "ESRI Shapefile",crs=from_epsg(model_epsg),schema=schema) as output: for index,irow in discharge_df.iterrows(): isite = int(index) isite = str(isite).zfill(8) # NWIS discharge ids minimum 8 characters iname = irow['station_nm'] igage = irow['alt_va'] inobs = irow['NObs'] imean = irow[discharge_label] istd = irow['StdError'] ilink = http_dict['ST'] + str(isite) point = Point(irow['Projected_X'],irow['Projected_Y']) output.write({'geometry': mapping(point), 'properties':{'Site':isite,'SiteName':iname,\ 'GageElev':igage,'NObs':inobs,\ discharge_label:imean,'StdError':istd,\ 'NWIS_Link':ilink}}) return
def make_shapefile(data, name): path = os.path.join('op_data',name + '.shp') crs = crs = from_epsg('29902') if type(data) == dict: a_schema = {'geometry': 'Point', 'properties': {'name':'str', 'address':'str'} } with fiona.open(path, "w", driver= 'ESRI Shapefile', crs= crs, schema= a_schema) as output: for k, v in data.items(): parts = k.split(',') name = parts[0] output.write({ 'properties':{'name':name, 'address':k}, 'geometry':geometry.mapping(v)}) else: geom_type = data.geom_type a_schema = {'geometry': geom_type, 'properties': {'name':'str'} } with fiona.open(path, "w", driver= 'ESRI Shapefile', crs= crs, schema= a_schema) as output: output.write({ 'properties':{'name':name}, 'geometry':geometry.mapping(data)})
def df2shp(df, folder, layername, dtypes, gtype, epsg): """Convert a processed df to a shapefile. 'df' is a dataframe. 'folder' is the path to the folder where the shapefile will be saved. 'layername' is the name of the shapefile. 'dtypes' is an Orderdict containing the dtypes for each field. 'gtype' is the geometry type. 'epsg' is the EPSG code of the output. """ schema = {'geometry': gtype, 'properties': dtypes} with fiona.collection( folder + '/' + layername + '.shp', 'w', driver='ESRI Shapefile', crs=crs.from_epsg(epsg), schema=schema ) as shpfile: for index, row in df.iterrows(): if row['geom'] != 'POINT EMPTY': geometry = loads(row['geom']) props = {} for prop in dtypes: props[prop] = row[prop] shpfile.write({'properties': props, 'geometry': mapping(geometry)}) return 'Extracted {layername} shapefile.'.format(layername=layername)
def to_crs(self, crs=None, epsg=None): """Transform geometries to a new coordinate reference system This method will transform all points in all objects. It has no notion or projecting entire geometries. All segments joining points are assumed to be lines in the current projection, not geodesics. Objects crossing the dateline (or other projection boundary) will have undesirable behavior. `to_crs` passes the `crs` argument to the `Proj` function from the `pyproj` library (with the option `preserve_units=True`). It can therefore accept proj4 projections in any format supported by `Proj`, including dictionaries, or proj4 strings. """ from fiona.crs import from_epsg if self.crs is None: raise ValueError('Cannot transform naive geometries. ' 'Please set a crs on the object first.') if crs is None: try: crs = from_epsg(epsg) except TypeError: raise TypeError('Must set either crs or epsg for output.') proj_in = pyproj.Proj(self.crs, preserve_units=True) proj_out = pyproj.Proj(crs, preserve_units=True) project = partial(pyproj.transform, proj_in, proj_out) result = self.apply(lambda geom: transform(project, geom)) result.__class__ = GeoSeries result.crs = crs result._invalidate_sindex() return result
def write_sdim(self): sdim = self.get_sdim(bounds=True) crs = from_epsg(4326) schema = {'geometry':'Polygon','properties':{'UID':'int:8'}} with fiona.open('/tmp/test.shp','w',driver='ESRI Shapefile',crs=crs,schema=schema) as sink: for ii,poly in enumerate(sdim.geom.polygon.value.flat): row = {'geometry':mapping(poly), 'properties':{'UID':int(sdim.geom.uid.flatten()[ii])}} sink.write(row)
def create_plaid_pantry_shp(): """""" with open(plaid_csv_path) as plaid_csv: # there are a couple of lines at the top of the csv containing # information that is not needed for i in range(2): next(plaid_csv) reader = csv.DictReader(plaid_csv) # create a metadata for shapefile that locations will be written to features = list() metadata = { 'crs': crs.from_epsg(2913), 'driver': 'ESRI Shapefile', 'schema': { 'geometry': 'Point', 'properties': OrderedDict( [(n, 'str') for n in reader.fieldnames if n]) } } latlon2ospn = partial( pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj.Proj(init='epsg:2913', preserve_units=True) ) addr_template = '{num} {pre} {street}, {city}, {st} {zip}' for r in reader: addr_str = addr_template.format( num=r['Address 1'], pre=r['Address 2'], street=r['Street'], city=r['City'], st=r['State'], zip=r['Zip']) rsp = rlis_geocode(addr_str) if isinstance(rsp, int): print 'there seems to a problem in connecting with' print 'the rlis api halting geoprocessing until this' print 'is resolved' exit() elif rsp: geom = Point(rsp['ORSP_x'], rsp['ORSP_y']) else: rsp = google_geocode(addr_str) print rsp geom_wgs84 = Point(rsp['lng'], rsp['lat']) geom = transform(latlon2ospn, geom_wgs84) print geom.x, geom.y feat = { 'geometry': mapping(geom), 'properties': {k: v for k, v in r.items() if k}} features.append(feat) with fiona.open(plaid_shp_path, 'w', **metadata) as plaid_shp: for feat in features: plaid_shp.write(feat)
def write_geom_dict(dct,path=None): crs = from_epsg(4326) driver = 'ESRI Shapefile' schema = {'properties':{'UGID':'int'},'geometry':dct.values()[0].geom_type} path = path or os.path.join(mkdtemp(),'out.shp') with fiona.open(path,'w',driver=driver,crs=crs,schema=schema) as source: for k,v in dct.iteritems(): rec = {'properties':{'UGID':k},'geometry':mapping(v)} source.write(rec) return(path)
def set_projection(self): from fiona.crs import to_string, from_epsg, from_string if self.prj is not None: self.get_proj4() if self.proj4 is not None: self.crs = from_string(self.proj4) elif self.epsg is not None: self.crs = from_epsg(self.epsg) else: pass
def createShp(self, outputfile, coords, epsg_cd=4326, raster_sampling=None): """ Exporting random points to Shapefile format ----------------------- outputfile : str coords : ndarray epsg_cd : int, optional (default: 4326) raster_sampling: tuple(str, sampling_type), optional (default: None) Sampling types: raster_sampling=(rasterfilepath, "gdal") raster_sampling=(rasterfilepath, "rasterio") """ try: self.logger.info("Exporting sample to Shapefile...") if isinstance(raster_sampling, tuple): self.logger.info("Sampling values on rasterfile...") rst_outfile, rst_smpl_type = raster_sampling[0], raster_sampling[1] if rst_smpl_type == "rasterio": smpl_vals = self.__samplePointOnRasterRasterio(rst_outfile, coords) schema = {'geometry': 'Point', 'properties': {'cod_id': 'int', 'value': 'float' } } shp_crs = from_epsg(epsg_cd) bar = pyprind.ProgBar(len(coords)) with collection(outputfile, "w", driver="ESRI Shapefile", schema=schema, crs=shp_crs) as output: for idx, coord in enumerate(coords, 1): geom = mapping(Point(coord[1], coord[0])) value = 0 if raster_sampling: if rst_smpl_type == "gdal": value = self.__samplePointOnRasterGdal(rst_outfile, coord) elif rst_smpl_type == "rasterio": value = float(smpl_vals[idx - 1]) output.write({'properties': {'cod_id': idx, 'value': value}, 'geometry': geom }) bar.update() self.logger.info("Sample successfully exported to Shapefile...") except Exception as err: self.logger.error("Error generating random coordinates sample: {0}".format(err))
def create_shp(filename): schema = { 'geometry' : 'Point', 'properties' : { 'mesto' : 'str', 'ulice' : 'str', 'cp' : 'str', 'telefon' : 'str', 'e-mail' : 'str' } } csob = fiona.open(filename, 'w', driver='ESRI Shapefile', schema=schema, crs=from_epsg(5514), encoding='utf-8') return csob
def extract_tile_items( raster_features, labels, min_x, min_y, tile_width, tile_height ): """Extract label items that belong to the tile defined by the minimum horizontal pixel `min_x` (left tile limit), the minimum vertical pixel `min_y` (upper tile limit) and the sizes ̀tile_width` and `tile_height` measured as a pixel amount. The tile is cropped from the original image raster as follows: - horizontally, between `min_x` and `min_x+tile_width` - vertically, between `min_y` and `min_y+tile_height` This method takes care of original data projection (UTM 37S, Tanzania area), however this parameter may be changed if similar data on another projection is used. Parameters ---------- raster_features : dict Raw image raster geographical features (`north`, `south`, `east` and `west` coordinates, `weight` and `height` measured in pixels) labels : geopandas.GeoDataFrame Raw image labels, as a set of geometries min_x : int Left tile limit, as a horizontal pixel index min_y : int Upper tile limit, as a vertical pixel index tile_width : int Tile width, measured in pixel tile_height : int Tile height, measured in pixel Returns ------- geopandas.GeoDataFrame Set of ground-truth labels contained into the tile, characterized by their type (complete, unfinished or foundation) and their geometry """ area = get_tile_footprint( raster_features, min_x, min_y, tile_width, tile_height ) bdf = gpd.GeoDataFrame( crs=from_epsg(raster_features["srid"]), geometry=[area] ) reproj_labels = labels.to_crs(epsg=raster_features["srid"]) tile_items = gpd.sjoin(reproj_labels, bdf) if tile_items.shape[0] == 0: return tile_items[["condition", "geometry"]] tile_items = gpd.overlay(tile_items, bdf) tile_items = tile_items.explode() # Manage MultiPolygons return tile_items[["condition", "geometry"]]
def get_short_quiet_paths(graph, from_latLon, to_latLon, edge_gdf, node_gdf, nts=[], db_costs={}, remove_geom_prop=False, only_short=False, logging=True): # get origin & destination nodes from_xy = geom_utils.get_xy_from_lat_lon(from_latLon) to_xy = geom_utils.get_xy_from_lat_lon(to_latLon) # find/create origin and destination nodes orig_node = get_nearest_node(graph, from_xy, edge_gdf, node_gdf, nts=nts, db_costs=db_costs) dest_node = get_nearest_node(graph, to_xy, edge_gdf, node_gdf, nts=nts, db_costs=db_costs, orig_node=orig_node) if (orig_node is None): print('could not find origin node at', from_latLon) return None if (dest_node is None): print('could not find destination node at', to_latLon) return None # get shortest path path_list = [] shortest_path = get_shortest_path(graph, orig_node['node'], dest_node['node'], weight='length') if (shortest_path is None): print('could not find shortest path') return None if (only_short == True): return shortest_path path_geom_noises = nw.aggregate_path_geoms_attrs(graph, shortest_path, weight='length', noises=True) path_list.append({**path_geom_noises, **{'id': 'short_p','type': 'short', 'nt': 0}}) # get quiet paths to list for nt in nts: noise_cost_attr = 'nc_'+str(nt) quiet_path = get_shortest_path(graph, orig_node['node'], dest_node['node'], weight=noise_cost_attr) path_geom_noises = nw.aggregate_path_geoms_attrs(graph, quiet_path, weight=noise_cost_attr, noises=True) path_list.append({**path_geom_noises, **{'id': 'q_'+str(nt), 'type': 'quiet', 'nt': nt}}) # remove linking edges of the origin / destination nodes nw.remove_new_node_and_link_edges(graph, orig_node) nw.remove_new_node_and_link_edges(graph, dest_node) # collect quiet paths to gdf paths_gdf = gpd.GeoDataFrame(path_list, crs=from_epsg(3879)) paths_gdf = paths_gdf.drop_duplicates(subset=['type', 'total_length']).sort_values(by=['type', 'total_length'], ascending=[False, True]) # add exposures to noise levels higher than specified threshods (dBs) paths_gdf['th_noises'] = [exps.get_th_exposures(noises, [55, 60, 65, 70]) for noises in paths_gdf['noises']] # add percentages of cumulative distances of different noise levels paths_gdf['noise_pcts'] = paths_gdf.apply(lambda row: exps.get_noise_pcts(row['noises'], row['total_length']), axis=1) # calculate mean noise level paths_gdf['mdB'] = paths_gdf.apply(lambda row: exps.get_mean_noise_level(row['noises'], row['total_length']), axis=1) # calculate noise exposure index (same as noise cost but without noise tolerance coefficient) paths_gdf['nei'] = [round(exps.get_noise_cost(noises=noises, db_costs=db_costs), 1) for noises in paths_gdf['noises']] paths_gdf['nei_norm'] = paths_gdf.apply(lambda row: exps.get_nei_norm(row.nei, row.total_length, db_costs), axis=1) # gdf to dicts path_dicts = qp.get_geojson_from_q_path_gdf(paths_gdf) # group paths with nearly identical geometries unique_paths = qp.remove_duplicate_geom_paths(path_dicts, tolerance=30, remove_geom_prop=remove_geom_prop, logging=False) # calculate exposure differences to shortest path path_comps = get_short_quiet_paths_comparison_for_dicts(unique_paths) # return paths as GeoJSON (FeatureCollection)... return { 'paths': path_comps, 'shortest_path': shortest_path, 'orig_offset': orig_node['offset'], 'dest_offset': dest_node['offset'] }
def process_night_lights(country): """ Clip the nightlights layer to the chosen country boundary and place in desired country folder. Parameters ---------- country : string Three digit ISO country code. """ iso3 = country['iso3'] folder = os.path.join(DATA_INTERMEDIATE, iso3) path_output = os.path.join(folder, 'night_lights.tif') if os.path.exists(path_output): return print('Completed processing of nightlight layer') path_country = os.path.join(folder, 'national_outline.shp') filename = 'F182013.v4c_web.stable_lights.avg_vis.tif' path_night_lights = os.path.join(DATA_RAW, 'nightlights', '2013', filename) country = gpd.read_file(path_country) bbox = country.envelope geo = gpd.GeoDataFrame() geo = gpd.GeoDataFrame({'geometry': bbox}, crs=from_epsg('4326')) coords = [json.loads(geo.to_json())['features'][0]['geometry']] night_lights = rasterio.open(path_night_lights, "r+") night_lights.nodata = 0 out_img, out_transform = mask(night_lights, coords, crop=True) out_meta = night_lights.meta.copy() out_meta.update({ "driver": "GTiff", "height": out_img.shape[1], "width": out_img.shape[2], "transform": out_transform, "crs": 'epsg:4326' }) with rasterio.open(path_output, "w", **out_meta) as dest: dest.write(out_img) return print('Completed processing of night lights layer')
def dist_to_coast(coord_lat, lon=None): """ Comput distance to coast from input points in meters. Parameters: coord_lat (GeoDataFrame or np.array or float): - GeoDataFrame with geometry column in epsg:4326 - np.array with two columns, first for latitude of each point and second with longitude in epsg:4326 - np.array with one dimension containing latitudes in epsg:4326 - float with a latitude value in epsg:4326 lon (np.array or float, optional): - np.array with one dimension containing longitudes in epsg:4326 - float with a longitude value in epsg:4326 Returns: np.array """ if lon is None: if isinstance(coord_lat, (gpd.GeoDataFrame, gpd.GeoSeries)): if not equal_crs(coord_lat.crs, NE_CRS): LOGGER.error('Input CRS is not %s', str(NE_CRS)) raise ValueError geom = coord_lat elif isinstance(coord_lat, np.ndarray): if coord_lat.shape[1] != 2: LOGGER.error('Missing longitude values.') raise ValueError geom = gpd.GeoDataFrame(geometry=list( map(Point, coord_lat[:, 1], coord_lat[:, 0])), crs=NE_CRS) else: LOGGER.error('Missing longitude values.') raise ValueError elif isinstance(lon, np.ndarray): if coord_lat.size != lon.size: LOGGER.error('Wrong input coordinates size: %s != %s', coord_lat.size, lon.size) raise ValueError geom = gpd.GeoDataFrame(geometry=list(map(Point, lon, coord_lat)), crs=NE_CRS) elif isinstance(lon, float): if not isinstance(coord_lat, float): LOGGER.error('Wrong input coordinates values.') raise ValueError geom = gpd.GeoDataFrame(geometry=list(map(Point, [lon], [coord_lat])), crs=NE_CRS) to_crs = from_epsg( convert_wgs_to_utm(geom.geometry.iloc[0].x, geom.geometry.iloc[0].y)) coast = get_coastlines(geom.total_bounds, 10).unary_union coast = gpd.GeoDataFrame(geometry=[coast], crs=NE_CRS).to_crs(to_crs) return geom.to_crs(to_crs).distance(coast.geometry[0]).values
def __init__(self, df, traj_id, obj_id=None, parent=None): """ Create Trajectory from GeoDataFrame. Parameters ---------- df : GeoDataFrame GeoDataFrame with point geometry column and timestamp index traj_id : any Trajectory ID obj_id : any Moving object ID parent : Trajectory Parent trajectory Examples -------- Creating a trajectory from scratch: >>> import pandas as pd >>> import geopandas as gpd >>> import movingpandas as mpd >>> from fiona.crs import from_epsg >>> >>> df = pd.DataFrame([ ... {'geometry':Point(0,0), 't':datetime(2018,1,1,12,0,0)}, ... {'geometry':Point(6,0), 't':datetime(2018,1,1,12,6,0)}, ... {'geometry':Point(6,6), 't':datetime(2018,1,1,12,10,0)}, ... {'geometry':Point(9,9), 't':datetime(2018,1,1,12,15,0)} ... ]).set_index('t') >>> gdf = gpd.GeoDataFrame(df, crs=from_epsg(31256)) >>> traj = mpd.Trajectory(gdf, 1) For more examples, see the tutorial notebooks_. .. _notebooks: https://mybinder.org/v2/gh/anitagraser/movingpandas/binder-tag?filepath=tutorials/0_getting_started.ipynb """ if len(df) < 2: raise ValueError( "Trajectory dataframe must have at least two rows!") self.id = traj_id self.obj_id = obj_id df.sort_index(inplace=True) self.df = df[~df.index.duplicated(keep='first')] self.crs = df.crs self.parent = parent try: crs = CRS.from_user_input(self.crs) self.is_latlon = crs.is_geographic except NameError: self.is_latlon = self.crs['init'] == from_epsg(4326)['init']
def __iter__(self): """ Returns generator over shapefile rows. Note: The first column is an id field, taken from the id value of each shape The middle values are taken from the property_schema The last column is a string named geometry, which has the wkt value, the type is geometry_type. """ # These imports are nere, not at the module level, so the geo # support can be an extra self.start() vfs, shp_file, layer_index = self._open_file_params() with fiona.open(shp_file, vfs=vfs, layer=layer_index) as source: if source.crs.get('init') != 'epsg:4326': # Project back to WGS84 project = partial(pyproj.transform, pyproj.Proj(source.crs, preserve_units=True), pyproj.Proj(from_epsg('4326')) ) else: project = None yield self.headers for i,s in enumerate(source): row_data = s['properties'] shp = asShape(s['geometry']) row = [int(s['id'])] for col_name, elem in row_data.items(): row.append(elem) if project: row.append(transform(project, shp)) else: row.append(shp) yield row self.finish()
def PathMap(sp, df): # stores lines/roads/edges of sp (shortest path) listLine = [] for i in range(len(sp) - 1): # l1 and l2 are row number of each point in data frame l1 = df[df['Point'] == sp[i]].index.values.astype(int) l2 = df[df['Point'] == sp[i + 1]].index.values.astype(int) # from row number we get geometry and form list of line # listLine.append(LineString([df.iloc[l1[0]].geometry, df.iloc[l2[0]].geometry])) point1 = df.iloc[l1[0]].Latitude, df.iloc[l1[0]].Longitude point2 = df.iloc[l2[0]].Latitude, df.iloc[l2[0]].Longitude listLine.append(LineString([point1, point2])) # convert line to dataframe then to geo df dfLine = pd.DataFrame({'geometry': listLine}) gdfLine = geopandas.GeoDataFrame(dfLine) # get orignal df and change to crs # this will be helpful to mark intersection on path map that only has line gdf = geopandas.GeoDataFrame(df, geometry=geopandas.points_from_xy( df.Longitude, df.Latitude)) # gdf2 = CRSConverter(gdf) # gdfLine2 = CRSConverter(gdfLine) gdf.crs = from_epsg(3857) gdfLine.crs = from_epsg(3857) ax = gdfLine.plot(figsize=(10, 10), alpha=1, edgecolor='red') for a in gdf.itertuples(): if sp.__contains__(a.Point): plt.text(a.geometry.x, a.geometry.y, a.Point) # ctx.add_basemap(ax, url=ctx.providers.Stamen.TonerLite, zoom=12) ax.set_axis_off() plt.show()
def get_line_polygons_inters_points(line_geom, polygons): polygons_under_line = get_polygons_under_line(line_geom, polygons) point_geoms = [] for idx, row in polygons_under_line.iterrows(): poly_geom = row['geometry'] inters_geom = poly_geom.intersection(line_geom) if (inters_geom.geom_type == 'MultiLineString'): for inters_line in inters_geom: point_geoms += get_inters_points(inters_line) else: inters_line = inters_geom point_geoms += get_inters_points(inters_line) return gpd.GeoDataFrame(geometry=point_geoms, crs=from_epsg(3879))
def set_projection(self): try: from fiona.crs import to_string, from_epsg, from_string except: print('\nGIS dependencies not installed. Please see readme for instructions on installation') if self.prj is not None: self.get_proj4() if self.proj4 is not None: self.crs = from_string(self.proj4) elif self.epsg is not None: self.crs = from_epsg(self.epsg) else: pass
def test_prepare_result_none(self): p0 = Point(12.3, -45.6) # Treat these as lat/lon d = {'a': ('address0', p0.coords[0]), 'b': (None, None)} df = _prepare_geocode_result(d) assert type(df) is gpd.GeoDataFrame self.assertEqual(from_epsg(4326), df.crs) self.assertEqual(len(df), 2) self.assert_('address' in df) row = df.loc['b'] self.assertEqual(len(row['geometry'].coords), 0) self.assert_(pd.np.isnan(row['address']))
def test_split_by_observation_gap_skip_single_points(self): df = pd.DataFrame([ {'geometry': Point(0, 0), 't': datetime(2018, 1, 1, 12, 0, 0)}, {'geometry': Point(-6, 10), 't': datetime(2018, 1, 1, 12, 1, 0)}, {'geometry': Point(6, 6), 't': datetime(2018, 1, 1, 12, 5, 0)}, {'geometry': Point(6, 16), 't': datetime(2018, 1, 1, 12, 6, 30)} ]).set_index('t') geo_df = GeoDataFrame(df, crs=from_epsg(31256)) traj = Trajectory(1, geo_df) split = traj.split_by_observation_gap(timedelta(seconds=61)) result = len(split) expected_result = 1 self.assertEqual(expected_result, result)
def test_clip_with_no_intersection(self): polygon = Polygon([(105, -5), (107, -5), (107, 12), (105, 12), (105, -5)]) df = pd.DataFrame([ {'geometry': Point(0, 0), 't': datetime(2018, 1, 1, 12, 0, 0)}, {'geometry': Point(6, 0), 't': datetime(2018, 1, 1, 12, 10, 0)}, {'geometry': Point(10, 0), 't': datetime(2018, 1, 1, 12, 15, 0)}, {'geometry': Point(10, 10), 't': datetime(2018, 1, 1, 12, 30, 0)}, {'geometry': Point(0, 10), 't': datetime(2018, 1, 1, 13, 0, 0)}]).set_index('t') geo_df = GeoDataFrame(df, crs=from_epsg(31256)) traj = Trajectory(1, geo_df) result = traj.clip(polygon) expected_result = [] self.assertEqual(expected_result, result)
def test_prepare_result_none(): p0 = Point(12.3, -45.6) # Treat these as lat/lon d = {"a": ("address0", p0.coords[0]), "b": (None, None)} df = _prepare_geocode_result(d) assert type(df) is GeoDataFrame assert from_epsg(4326) == df.crs assert len(df) == 2 assert "address" in df row = df.loc["b"] assert len(row["geometry"].coords) == 0 assert np.isnan(row["address"])
def reopen(self): '''reouvre le fichier s'il aete ferme entre temps''' crs = from_epsg(int(self.srid)) schema = schema_fiona(self.schema, liste_attributs=self.liste_att, l_nom=self.l_max) self.fichier = fiona.open(self.nom, 'a', crs=crs, encoding=self.encoding, driver=self.driver, schema=schema, layer=self.layer)
def test_forward(self): with mock.patch('geopy.geocoders.googlev3.GoogleV3.geocode', ForwardMock()) as m: g = geocode(self.locations, provider='googlev3', timeout=2) self.assertEqual(len(self.locations), m.call_count) n = len(self.locations) self.assertIsInstance(g, gpd.GeoDataFrame) expected = GeoSeries([Point(float(x) + 0.5, float(x)) for x in range(n)], crs=from_epsg(4326)) assert_geoseries_equal(expected, g['geometry']) tm.assert_series_equal(g['address'], pd.Series(self.locations, name='address'))
def test_get_position_with_invalid_method(self): df = pd.DataFrame([ {'geometry': Point(0, 0), 't': datetime(2018, 1, 1, 12, 0, 0)}, {'geometry': Point(6, 0), 't': datetime(2018, 1, 1, 12, 10, 0)}, {'geometry': Point(10, 0), 't': datetime(2018, 1, 1, 12, 20, 0)} ]).set_index('t') geo_df = GeoDataFrame(df, crs=from_epsg(31256)) traj = Trajectory(1, geo_df) try: result = traj.get_position_at(datetime(2018, 1, 1, 12, 10, 0), method='xxx') assert False except ValueError: assert True
def test_offset_minutes(self): df = pd.DataFrame([ {'geometry': Point(0, 0), 't': datetime(2018, 1, 1, 12, 0, 0), 'value': 1}, {'geometry': Point(-6, 10), 't': datetime(2018, 1, 1, 12, 1, 0), 'value': 2}, {'geometry': Point(6, 6), 't': datetime(2018, 1, 1, 12, 2, 0), 'value': 3}, {'geometry': Point(6, 12), 't': datetime(2018, 1, 1, 12, 3, 0), 'value': 4}, {'geometry': Point(6, 18), 't': datetime(2018, 1, 1, 12, 4, 0), 'value': 5} ]).set_index('t') geo_df = GeoDataFrame(df, crs=from_epsg(31256)) traj = Trajectory(1, geo_df) traj.apply_offset_minutes('value', -2) self.assertEqual(5, traj.df.iloc[2].value) self.assertEqual(Point(6, 6), traj.df.iloc[2].geometry)
def get_walkable_network(extent_poly_wgs=None): # define filter for acquiring walkable street network cust_filter = '["area"!~"yes"]["highway"!~"trunk_link|motor|proposed|construction|abandoned|platform|raceway"]["foot"!~"no"]["service"!~"private"]["access"!~"private"]' # query graph g = ox.graph_from_polygon(extent_poly_wgs, custom_filter=cust_filter) print('loaded graph of', g.number_of_edges(), 'edges') # convert graph to undirected graph g_u = ox.get_undirected(g) print('converted graph to undirected graph of', g_u.number_of_edges(), 'edges') # project graph g_u_proj = ox.project_graph(g_u, from_epsg(3879)) return g_u_proj
def create_points_layer(name, format, coord_sys, fields): """Create a new points GIS layer""" # Create the layer schema using data passed to the function. this_schema = {'geometry': 'Point', 'properties': fields} # Create the layer itself. layer = fiona.open(layer_out, 'w', crs=from_epsg(coord_sys), driver=format, schema=this_schema) return layer
def test_prepare_result_none(self): p0 = Point(12.3, -45.6) # Treat these as lat/lon d = {"a": ("address0", p0.coords[0]), "b": (None, None)} df = _prepare_geocode_result(d) assert type(df) is gpd.GeoDataFrame self.assertEqual(from_epsg(4326), df.crs) self.assertEqual(len(df), 2) self.assert_("address" in df) row = df.loc["b"] self.assertEqual(len(row["geometry"].coords), 0) self.assert_(pd.np.isnan(row["address"]))
def convert_csv(self): newschema = { 'geometry': 'Point', 'properties': { 'text': 'str', 'party': 'str' } } with fiona.open(self.red_shp, 'w', driver="ESRI Shapefile", schema=newschema, crs=from_epsg(4296)) as red: with fiona.open(self.blue_shp, 'w', driver="ESRI Shapefile", schema=newschema, crs=from_epsg(4296)) as blue: with open(self.partioned_csv, 'rb') as f: reader = csv.reader(f) #csv.DictReader(f) for row in reader: if row[0] == 'Red': point = Point(float(row[2]), float(row[3])) red.write({ 'properties': { 'text': row[1], 'party': row[0] }, 'geometry': mapping(point) }) elif row[0] == 'Blue': point = Point(float(row[2]), float(row[3])) blue.write({ 'properties': { 'text': row[1], 'party': row[0] }, 'geometry': mapping(point) })
def test_prepare_result_none(): p0 = Point(12.3, -45.6) # Treat these as lat/lon d = {'a': ('address0', p0.coords[0]), 'b': (None, None)} df = _prepare_geocode_result(d) assert type(df) is GeoDataFrame assert from_epsg(4326) == df.crs assert len(df) == 2 assert 'address' in df row = df.loc['b'] assert len(row['geometry'].coords) == 0 assert np.isnan(row['address'])
def explode_lines_to_split_lines(line_df, uniq_id): row_accumulator = [] def split_list_to_rows(row): for line_geom in row['split_lines']: new_row = row.to_dict() new_row['geometry'] = line_geom row_accumulator.append(new_row) line_df.apply(split_list_to_rows, axis=1) new_gdf = gpd.GeoDataFrame(row_accumulator, crs=from_epsg(3879)) new_gdf['length'] = [round(geom.length,3) for geom in new_gdf['geometry']] new_gdf['mid_point'] = [get_line_middle_point(geom) for geom in new_gdf['geometry']] return new_gdf[[uniq_id, 'geometry', 'length', 'mid_point']]
def test_read_vector_pass(self): """ Test one columns data """ shp_file = shapereader.natural_earth(resolution='110m', \ category='cultural', name='populated_places_simple') lat, lon, geometry, intensity = read_vector(shp_file, ['pop_min', 'pop_max']) self.assertEqual(geometry.crs, from_epsg(NE_EPSG)) self.assertEqual(geometry.size, lat.size) self.assertEqual(geometry.crs, from_epsg(NE_EPSG)) self.assertAlmostEqual(lon[0], 12.453386544971766) self.assertAlmostEqual(lon[-1], 114.18306345846304) self.assertAlmostEqual(lat[0], 41.903282179960115) self.assertAlmostEqual(lat[-1], 22.30692675357551) self.assertEqual(intensity.shape, (2, 243)) # population min self.assertEqual(intensity[0, 0], 832) self.assertEqual(intensity[0, -1], 4551579) # population max self.assertEqual(intensity[1, 0], 832) self.assertEqual(intensity[1, -1], 7206000)
def main(): #Calculate zonal statistics. z_stats = zonal_stats(zones_in,vrt, stats=statistics) #Add the results to original shape file and save as new shape file results = gpd.read_file(zones_in) for index, row in results.iterrows(): for stat in statistics: results.loc[index, stat] = z_stats[index][stat] results.crs=from_epsg(3067) results.to_file(zones_out)
def test_reverse(self): with mock.patch('geopy.geocoders.googlev3.GoogleV3.reverse', ReverseMock()) as m: g = reverse_geocode(self.points, provider='googlev3', timeout=2) self.assertEqual(len(self.points), m.call_count) self.assertIsInstance(g, gpd.GeoDataFrame) expected = GeoSeries(self.points, crs=from_epsg(4326)) assert_geoseries_equal(expected, g['geometry']) address = pd.Series(['address' + str(x) for x in range(len(self.points))], name='address') tm.assert_series_equal(g['address'], address)
def test_reverse(self): with mock.patch('geopy.geocoders.googlev3.GoogleV3.reverse', ReverseMock()) as m: g = reverse_geocode(self.points, provider='googlev3', timeout=2) self.assertEqual(len(self.points), m.call_count) self.assertIsInstance(g, gpd.GeoDataFrame) expected = GeoSeries(self.points, crs=from_epsg(4326)) assert_geoseries_equal(expected, g['geometry']) tm.assert_series_equal( g['address'], pd.Series('address' + str(x) for x in range(len(self.points))))
def test_add_heading(self): df = pd.DataFrame([ {'geometry': Point(0, 0), 't': datetime(2018, 1, 1, 12, 0, 0)}, {'geometry': Point(6, 0), 't': datetime(2018, 1, 1, 12, 10, 0)}, {'geometry': Point(6, -6), 't': datetime(2018, 1, 1, 12, 20, 0)}, {'geometry': Point(-6, -6), 't': datetime(2018, 1, 1, 12, 30, 0)} ]).set_index('t') geo_df = GeoDataFrame(df, crs=from_epsg(31256)) traj = Trajectory(1, geo_df) traj.add_direction() result = traj.df[DIRECTION_COL_NAME].tolist() expected_result = [90.0, 90.0, 180.0, 270] self.assertEqual(expected_result, result)
def projectShapes(features, toCRS): import pyproj from functools import partial import fiona.crs as fcrs from shapely.geometry import shape, mapping from shapely.ops import transform as shpTrans project = partial(pyproj.transform, pyproj.Proj(fcrs.from_epsg(4326)), pyproj.Proj(toCRS)) return list( {'geometry': mapping(shpTrans(project, shape(feat['geometry'])))} for feat in features)
def test_split_by_daybreak_same_day_of_year(self): df = pd.DataFrame([ {'geometry': Point(0, 0), 't': datetime(2018, 1, 1, 12, 0, 0)}, {'geometry': Point(-6, 10), 't': datetime(2018, 1, 1, 12, 1, 0)}, {'geometry': Point(6, 6), 't': datetime(2019, 1, 1, 12, 0, 1)}, {'geometry': Point(6, 16), 't': datetime(2019, 1, 1, 12, 5, 1)} ]).set_index('t') geo_df = GeoDataFrame(df, crs=from_epsg(31256)) traj = Trajectory(1, geo_df) split = traj.split_by_date() result = len(split) expected_result = 2 self.assertEqual(expected_result, result)
def create_shapefile(fname, features, epsg=4326): schema = { 'geometry': 'LineString', 'properties': { 'id': 'int', 'source': 'str:24', } } # TODO - get epsg from geojson crs = from_epsg(epsg) with fiona.open(fname, 'w', 'ESRI Shapefile', schema, crs=crs) as output: # ptypes = {k: fiona.prop_type(v) for k, v in output.schema['properties'].items()} output.writerecords(features)
def convert_geodata_to_gis(net, epsg=31467, bus_geodata=True, line_geodata=True): if bus_geodata: g = net.bus_geodata geo = [Point(x, y) for x, y in g[["x", "y"]].values] net.bus_geodata = GeoDataFrame(g, crs=from_epsg(epsg), geometry=geo, index=g.index) if line_geodata: l = net.line_geodata geo = GeoSeries( [LineString(x) for x in net.line_geodata.coords.values], index=net.line_geodata.index, crs=from_epsg(epsg)) net.line_geodata = GeoDataFrame(l, crs=from_epsg(epsg), geometry=geo, index=l.index) net["gis_epsg_code"] = epsg