def ogr_select_by_location(shp, boundary_filter, filtered_output): """ Filter a shp using the location of a boundary_filter shp For now the boundary must have only one feature Writes the filter on a new shp """ import os from osgeo import ogr from gasp.prop.ff import drv_name from gasp.prop.feat import get_geom_type from gasp.mng.gen import copy_feat from gasp.mng.fld import ogr_copy_fields # Open main data dtSrc = ogr.GetDriverByName(drv_name(shp)).Open(shp, 0) lyr = dtSrc.GetLayer() # Get filter geom filter_shp = ogr.GetDriverByName(drv_name(boundary_filter)).Open( boundary_filter, 0) filter_lyr = filter_shp.GetLayer() c = 0 for f in filter_lyr: if c: break geom = f.GetGeometryRef() c += 1 filter_shp.Destroy() # Apply filter lyr.SetSpatialFilter(geom) # Copy filter objects to a new shape out = ogr.GetDriverByName( drv_name(filtered_output)).CreateDataSource(filtered_output) outLyr = out.CreateLayer(os.path.splitext( os.path.basename(filtered_output))[0], geom_type=get_geom_type(shp, gisApi='ogr', name=None, py_cls=True)) # Copy fields ogr_copy_fields(lyr, outLyr) copy_feat(lyr, outLyr, outDefn=outLyr.GetLayerDefn(), only_geom=False, gisApi='ogrlyr')
def elevation_to_pntshp(pnt_shp, epsg, fld_name='ELEVATION'): """ Add an elevation attribute to a point feature class """ from gasp.fm import tbl_to_obj from gasp.prop.feat import get_geom_type from gasp.mng.prj import project from gasp.mng.split import split_df from gasp.to.obj import df_to_dict from gasp.to.shp import df_to_shp # Check Geometries type - shapes should be of type point geomt = get_geom_type(pnt_shp, name=True, gisApi='ogr') if geomt != 'POINT' and geomt != 'MULTIPOINT': raise ValueError('All input geometry must be of type point') src = tbl_to_obj(pnt_shp) if epsg != 4326: src = project(src, None, 4326, gisApi='pandas') # Get str with coords src["coords"] = src["geometry"].y.astype(str) + "," + \ src["geometry"].x.astype(str) # Split dataframe dfs = split_df(src, 250) for df in dfs: coord_str = str(df.coords.str.cat(sep="|")) elvd = pnts_elev(coord_str) data = elvd return data
def servarea_from_points(pntShp, inEPSG, range, outShp, mode='foot-walking', intervals=None): """ Calculate isochrones for all points in a Point Feature Class """ import time from shapely.geometry import shape from threading import Thread from gasp.web.orouteserv import get_keys, isochrones from gasp.fm import tbl_to_obj from gasp.mng.split import split_df_inN from gasp.fm.geom import pointxy_to_cols from gasp.mng.gen import merge_df from gasp.prop.feat import get_geom_type from gasp.mng.prj import project from gasp.to.geom import dict_to_geodf from gasp.to.obj import df_to_dict from gasp.to.shp import df_to_shp # SHP TO GEODATAFRAME pntDf = tbl_to_obj(pntShp) # Check if SHPs are points inGeomType = get_geom_type(pntDf, geomCol="geometry", gisApi='pandas') if inGeomType != 'Point' and inGeomType != 'MultiPoint': raise ValueError('The input geometry must be of type point') # Reproject geodf if necessary if inEPSG != 4326: pntDf = project(pntDf, None, 4326, gisApi='pandas') pntDf["old_fid"] = pntDf.index pntDf = pointxy_to_cols( pntDf, geomCol="geometry", colX="longitude", colY="latitude" ) # Get Keys KEYS = get_keys() df_by_key = split_df_inN(pntDf, KEYS.shape[0]) keys_list = KEYS['key'].tolist() results = [] def get_isochrones(df, key): pntDict = df_to_dict(df) for k in pntDict: iso = isochrones( "{},{}".format(pntDict[k]["longitude"], pntDict[k]["latitude"]), range, range_type='time', modeTransportation=mode, intervals=intervals ) pntDict[k]["geometry"] = shape(iso["features"][0]["geometry"]) time.sleep(5) pntDf = dict_to_geodf(pntDict, "geometry", 4326) results.append(pntDf) # Create threads thrds = [] i = 1 for df in df_by_key: thrds.append(Thread( name='tk{}'.format(str(i)), target=get_isochrones, args=(df, keys_list[i - 1]) )) i += 1 # Start all threads for thr in thrds: thr.start() # Wait for all threads to finish for thr in thrds: thr.join() # Join all dataframes pntDf = merge_df(results, ignIndex=False) if inEPSG != 4326: pntDf = project(pntDf, None, inEPSG, gisApi='pandas') return df_to_shp(pntDf, outShp)
def cost_od(shpOrigins, shpDestinations, epsgOrigins, epsgDestinations, table_result, mode='foot-walking'): """ Matrix od Service Implementation """ import pandas from threading import Thread from gasp.fm.api.orouteserv import get_keys from gasp.fm.api.orouteserv import matrix_od from gasp.fm import shp_to_df from gasp.mng.split import split_df_inN from gasp.fm.geom import pointxy_to_cols from gasp.mng.prj import project from gasp.mng.gen import merge_df from gasp.prop.feat import get_geom_type from gasp.to import obj_to_tbl origensDf = tbl_to_obj( shpOrigins) destinoDf = tbl_to_obj(shpDestinations) # Check if SHPs are points inGeomType = get_geom_type(origensDf, geomCol="geometry", gisApi='pandas') if inGeomType != 'Point' and inGeomType != 'MultiPoint': raise ValueError('The input geometry must be of type point') inGeomType = get_geom_type(destinoDf, geomCol="geometry", gisApi='pandas') if inGeomType != 'Point' and inGeomType != 'MultiPoint': raise ValueError('The input geometry must be of type point') # Re-project if needed if epsgOrigins != 4326: origensDf = project(origensDf, None, 4326, gisApi='pandas') if epsgDestinations != 4326: destinoDf = project(destinoDf, None, 4326, gisApi='pandas') origensDf = pointxy_to_cols( origensDf, geomCol="geometry", colX="longitude", colY="latitude" ); destinoDf = pointxy_to_cols( destinoDf, geomCol="geometry", colX="longitude", colY="latitude" ) origensDf["location"] = origensDf.longitude.astype(str) + "," + \ origensDf.latitude.astype(str) destinoDf["location"] = destinoDf.longitude.astype(str) + "," + \ destinoDf.latitude.astype(str) origensDf["old_fid"] = origensDf.index destinoDf["old_fid"] = destinoDf.index # Get Keys KEYS = get_keys() origensByKey = split_df_inN(origensDf, KEYS.shape[0]) lst_keys = KEYS["key"].tolist() # Produce matrix results = [] def get_matrix(origins, key): origins.reset_index(inplace=True) origins["rqst_idx"] = origins.index.astype(str) destinations = destinoDf.copy() strSource = origins.location.str.cat(sep="|") idxSource = origins.rqst_idx.str.cat(sep=",") destinations["rqst_idx"] = destinations.old_fid + origins.shape[0] destinations["rqst_idx"] = destinations.rqst_idx.astype(str) strDestin = destinations.location.str.cat(sep="|") idxDestin = destinations.rqst_idx.str.cat(sep=",") rslt = matrix_od( strSource + "|" + strDestin, idxSources=idxSource, idxDestinations=idxDestin, useKey=key, modeTransportation=mode ) rslt = pandas.DataFrame(rslt["durations"]) originsFID = origins.old_fid.tolist() destinaFID = destinations.old_fid.tolist() mm = [] for lnh in range(len(originsFID)): for col in range(len(destinaFID)): ll = [ originsFID[lnh], destinaFID[col], rslt.iloc[lnh, col] ] mm.append(ll) matrix = pandas.DataFrame( mm, columns=["fid_origin", "fid_destin", "cost"]) results.append(matrix) # Create threads thrds = [] i= 1 for df in origensByKey: thrds.append(Thread( name="tk{}".format(str(i)), target=get_matrix, args=(df, lst_keys[i - 1]) )) i += 1 # Start all threads for thr in thrds: thr.start() # Wait for all threads to finish for thr in thrds: thr.join() # Join all dataframes RESULT = merge_df(results, ignIndex=False) RESULT = RESULT.merge( origensDf , how='inner', left_on=["fid_origin"], right_on=["old_fid"] ); RESULT.drop([ x for x in origensDf.columns.values if x != "geometry"], axis=1, inplace=True ); RESULT.rename(columns={"geometry" : "origin_geom"}, inplace=True) RESULT = RESULT.merge( destinoDf, how='inner', left_on=["fid_destin"], right_on=["old_fid"] ); RESULT.drop([ x for x in destinoDf.columns.values if x != "geometry"], axis=1, inplace=True ); RESULT.rename(columns={"geometry" : "destin_geom"}, inplace=True) RESULT["origin_geom"] = RESULT.origin_geom.astype(str) RESULT["destin_geom"] = RESULT.destin_geom.astype(str) return obj_to_tbl(RESULT, table_result)
def project(inShp, outShp, outEPSG, inEPSG=None, gisApi='ogr', sql=None): """ Project Geodata using GIS API's Available: * arcpy * ogr * ogr2ogr; * pandas """ import os if gisApi == 'arcpy': """ Execute Data Management > Data Transformations > Projection """ import arcpy from gasp.cpu.arcg.lyr import feat_lyr from gasp.web.srorg import get_wkt_esri layer = feat_lyr(inShp) srs_obj = get_wkt_esri(outEPSG) arcpy.Project_management(layer, outShp, srs_obj) elif gisApi == 'ogr': """ Using ogr Python API """ if not inEPSG: raise ValueError( 'To use ogr API, you should specify the EPSG Code of the' ' input data using inEPSG parameter' ) from osgeo import ogr from gasp.prop.feat import get_geom_type from gasp.prop.ff import drv_name from gasp.mng.fld import ogr_copy_fields from gasp.prop.prj import get_sref_from_epsg from gasp.oss import get_filename def copyShp(out, outDefn, lyr_in, trans): for f in lyr_in: g = f.GetGeometryRef() g.Transform(trans) new = ogr.Feature(outDefn) new.SetGeometry(g) for i in range(0, outDefn.GetFieldCount()): new.SetField(outDefn.GetFieldDefn(i).GetNameRef(), f.GetField(i)) out.CreateFeature(new) new.Destroy() f.Destroy() # ####### # # Project # # ####### # transP = get_trans_param(inEPSG, outEPSG) inData = ogr.GetDriverByName( drv_name(inShp)).Open(inShp, 0) inLyr = inData.GetLayer() out = ogr.GetDriverByName( drv_name(outShp)).CreateDataSource(outShp) outlyr = out.CreateLayer( get_filename(outShp), get_sref_from_epsg(outEPSG), geom_type=get_geom_type( inShp, name=None, py_cls=True, gisApi='ogr' ) ) # Copy fields to the output ogr_copy_fields(inLyr, outlyr) # Copy/transform features from the input to the output outlyrDefn = outlyr.GetLayerDefn() copyShp(outlyr, outlyrDefn, inLyr, transP) inData.Destroy() out.Destroy() elif gisApi == 'ogr2ogr': """ Transform SRS of any OGR Compilant Data. Save the transformed data in a new file TODO: DB - only works with sqlite """ if not inEPSG: raise ValueError('To use ogr2ogr, you must specify inEPSG') from gasp import exec_cmd from gasp.prop.ff import drv_name cmd = ( 'ogr2ogr -f "{}" {} {}{} -s_srs EPSG:{} -t_srs:{}' ).format( drv_name(outShp), outShp, inShp, '' if not sql else ' -dialect sqlite -sql "{}"'.format(sql), str(inEpsg), str(outEpsg) ) outcmd = exec_cmd(cmd) elif gisApi == 'pandas': # Test if input Shp is GeoDataframe from geopandas import GeoDataFrame as gdf if type(inShp) == gdf: # Is DataFrame df = inShp else: # Assuming is file if os.path.exists(inShp): # Is File from gasp.fm import tbl_to_obj df = tbl_to_obj(inShp) else: raise ValueError(( "For pandas API, inShp must be file or GeoDataFrame" )) # Project df newDf = df.to_crs({'init' : 'epsg:{}'.format(str(outEPSG))}) if outShp: # Try to save as file from gasp.to.shp import df_to_shp return df_to_shp(df, outShp) else: return newDf else: raise ValueError('Sorry, API {} is not available'.format(gisApi)) return outShp
def infovalue(landslides, variables, iv_rst, dataEpsg): """ Informative Value using GDAL Library """ import os import math import numpy from osgeo import gdal from gasp.fm.rst import rst_to_array from gasp.fm import tbl_to_obj from gasp.prop.feat import get_geom_type from gasp.prop.rst import rst_shape from gasp.prop.rst import count_cells from gasp.prop.rst import get_cellsize from gasp.stats.rst import frequencies from gasp.oss.ops import create_folder from gasp.to.rst import array_to_raster # Create Workspace for temporary files workspace = create_folder(os.path.join(os.path.dirname(landslides), 'tmp')) # Get Variables Raster Shape and see if there is any difference varShapes = rst_shape(variables, gisApi='gdal') for i in range(1, len(variables)): if varShapes[variables[i - 1]] != varShapes[variables[i]]: raise ValueError( ('All rasters must have the same dimension! ' 'Raster {} and Raster {} have not the same shape!').format( variables[i - 1], variables[i])) # See if landslides are raster or not # Try to open as raster try: land_rst = rst_to_array(landslides) lrows, lcols = land_rst.shape if [lrows, lcols] != varShapes[variables[0]]: raise ValueError( ("Raster with Landslides ({}) has to have the same " "dimension that Raster Variables").format(landslides)) except: # Landslides are not Raster # Open as Feature Class # See if is Point or Polygon land_df = tbl_to_obj(landslides) geomType = get_geom_type(land_df, geomCol="geometry", gisApi='pandas') if geomType == 'Polygon' or geomType == 'MultiPolygon': # it will be converted to raster bellow land_poly = landslides elif geomType == 'Point' or geomType == 'MultiPoint': # Do a Buffer from gasp.anls.prox.bf import geodf_buffer_to_shp land_poly = geodf_buffer_to_shp( land_df, 100, os.path.join(workspace, 'landslides_buffer.shp')) # Convert To Raster from gasp.to.rst import shp_to_raster land_raster = shp_to_raster(land_poly, None, get_cellsize(variables[0], gisApi='gdal'), -9999, os.path.join(workspace, 'landslides_rst.tif'), rst_template=variables[0], api='gdal') land_rst = rst_to_array(land_raster) # Get Number of cells of each raster and number of cells # with landslides landsldCells = frequencies(land_raster)[1] totalCells = count_cells(variables[0]) # Get number of cells by classe in variable freqVar = {r: frequencies(r) for r in variables} for rst in freqVar: for cls in freqVar[rst]: if cls == 0: freqVar[rst][-1] = freqVar[rst][cls] del freqVar[rst][cls] else: continue # Get cell number with landslides by class varArray = {r: rst_to_array(r) for r in variables} for r in varArray: numpy.place(varArray[r], varArray[r] == 0, -1) landArray = {r: land_rst * varArray[r] for r in varArray} freqLndVar = {r: frequencies(landArray[r]) for r in landArray} # Estimate VI for each class on every variable vi = {} for var in freqVar: vi[var] = {} for cls in freqVar[var]: if cls in freqLndVar[var]: vi[var][cls] = math.log10( (float(freqLndVar[var][cls]) / freqVar[var][cls]) / (float(landsldCells) / totalCells)) else: vi[var][cls] = 9999 # Replace Classes without VI, from 9999 to minimum VI vis = [] for d in vi.values(): vis += d.values() min_vi = min(vis) for r in vi: for cls in vi[r]: if vi[r][cls] == 9999: vi[r][cls] = min_vi else: continue # Replace cls by vi in rst_arrays resultArrays = {v: numpy.zeros(varArray[v].shape) for v in varArray} for v in varArray: numpy.place(resultArrays[v], resultArrays[v] == 0, -128) for v in varArray: for cls in vi[v]: numpy.place(resultArrays[v], varArray[v] == cls, vi[v][cls]) # Sum all arrays and save the result as raster vi_rst = resultArrays[variables[0]] + resultArrays[variables[1]] for v in range(2, len(variables)): vi_rst = vi_rst + resultArrays[variables[v]] numpy.place(vi_rst, vi_rst == len(variables) * -128, -128) result = array_to_raster(vi_rst, iv_rst, variables[i], dataEpsg, gdal.GDT_Float32, noData=-128, gisApi='gdal') return iv_rst
def matrix_od(originsShp, destinationShp, originsEpsg, destinationEpsg, resultShp, modeTrans="driving"): """ Use Pandas to Retrieve data from MapBox Matrix OD Service """ import time from threading import Thread from gasp.web.mapbx import get_keys, matrix from gasp.fm import tbl_to_obj from gasp.mng.split import split_df, split_df_inN from gasp.mng.fld.df import listval_to_newcols from gasp.fm.geom import pointxy_to_cols from gasp.mng.prj import project from gasp.mng.gen import merge_df from gasp.prop.feat import get_geom_type from gasp.to.shp import df_to_shp # Data to GeoDataFrame origens = tbl_to_obj( originsShp) destinos = tbl_to_obj(destinationShp) # Check if SHPs are points inGeomType = get_geom_type(origens, geomCol="geometry", gisApi='pandas') if inGeomType != 'Point' and inGeomType != 'MultiPoint': raise ValueError('The input geometry must be of type point') inGeomType = get_geom_type(destinos, geomCol="geometry", gisApi='pandas') if inGeomType != 'Point' and inGeomType != 'MultiPoint': raise ValueError('The input geometry must be of type point') # Re-Project data to WGS if originsEpsg != 4326: origens = project(origens, None, 4326, gisApi='pandas') if destinationEpsg != 4326: destinos = project(destinos, None, 4326, gisApi='pandas') origens = pointxy_to_cols( origens, geomCol="geometry", colX="longitude", colY="latitude" ); destinos = pointxy_to_cols( destinos, geomCol="geometry", colX="longitude", colY="latitude" ) # Prepare coordinates Str origens["location"] = origens.longitude.astype(str) \ + "," + origens.latitude.astype(str) destinos["location"] = destinos.longitude.astype(str) \ + "," + destinos.latitude.astype(str) # Split destinations DataFrame into Dafaframes with # 24 rows lst_destinos = split_df(destinos, 24) # Get Keys to use KEYS = get_keys() # Split origins by key origensByKey = split_df_inN(origens, KEYS.shape[0]) lst_keys= KEYS["key"].tolist() # Produce matrix results = [] def get_matrix(origins, key): def def_apply(row): rowResults = [] for df in lst_destinos: strDest = df.location.str.cat(sep=";") strLocations = row["location"] + ";" + strDest dados = matrix( strLocations, idxSources="0", idxDestinations=";".join([str(i) for i in range(1, df.shape[0] + 1)]), useKey=key, modeTransportation=modeTrans ) time.sleep(5) rowResults += dados["durations"][0] row["od_matrix"] = rowResults return row newOrigins = origins.apply( lambda x: def_apply(x), axis=1 ) results.append(newOrigins) # Create threads thrds = [] i = 1 for df in origensByKey: thrds.append(Thread( name="tk{}".format(str(i)), target=get_matrix, args=(df, lst_keys[i - 1]) )) i += 1 # Start all threads for thr in thrds: thr.start() # Wait for all threads to finish for thr in thrds: thr.join() # Join all dataframes RESULT = merge_df(results, ignIndex=False) RESULT = listval_to_newcols(RESULT, "od_matrix") RESULT.rename( columns={ c: "dest_{}".format(c) for c in RESULT.columns.values if type(c) == int or type(c) == long }, inplace=True ) if originsEpsg != 4326: RESULT = project(RESULT, None, originsEpsg, gisApi='pandas') return df_to_shp(RESULT, resultShp) return results
def address_from_featcls(inShp, outShp, epsg_in): """ Read a point geometry and return a table with the addresses """ from gasp.web.glg.geocod import get_address from gasp.fm import tbl_to_obj from gasp.to.geom import regulardf_to_geodf from gasp.fm.geom import pointxy_to_cols from gasp.prop.feat import get_geom_type from gasp.to.obj import df_to_dict, dict_to_df from gasp.to.shp import df_to_shp # Convert ESRI Shapefile to GeoDataFrame geoDf = tbl_to_obj(inShp) # Get Geometry field name for col in geoDf.columns.values: if col == 'geom' or col == 'geometry': F_GEOM = col break else: continue # Check if inShp has a Geom of Type Point inGeomType = get_geom_type(geoDf, geomCol=F_GEOM, gisApi='pandas') if inGeomType != 'Point' and inGeomType != 'MultiPoint': raise ValueError('The input geometry must be of type point') # Reproject geodf if necessary if epsg_in != 4326: from gasp.mng.prj import project geoDf = project(geoDf, None, 4326, gisApi='pandas') # Get Coords of each point geoDf = pointxy_to_cols(geoDf, F_GEOM, colX="x", colY='y') # Search for addresses geoDict = df_to_dict(geoDf) for idx in geoDict: glg_response = get_address(geoDict[idx]["y"], geoDict[idx]["x"]) geoDict[idx]["G_ADDRESS"] = glg_response[0]['formatted_address'] for i in glg_response[0]["address_components"]: if i["types"][0] == 'street_mumber' : F = "G_PORT" elif i["types"][0] == 'route' : F = "G_STREET" elif i["types"][0] == 'postal_code' : F = "G_ZIPCODE" else: continue geoDict[idx][F] = i["long_name"] # Save results in a new file geoDf = dict_to_df(geoDict) geoDf = regulardf_to_geodf(geoDf, F_GEOM, 4326) geoDf.drop(["x", "y"], axis=1, inplace=True) if epsg_in != 4326: geoDf = project(geoDf, None, epsg_in, gisApi='pandas') df_to_shp(geoDf, outShp) return geoDf
def eachfeat_to_newshp(inShp, outFolder, epsg=None): """ Export each feature in inShp to a new/single File """ import os from osgeo import ogr from gasp.prop.ff import drv_name from gasp.prop.feat import get_geom_type from gasp.mng.fld import lst_fld from gasp.mng.fld import ogr_copy_fields from gasp.oss import get_fileformat, get_filename inDt = ogr.GetDriverByName(drv_name(inShp)).Open(inShp) lyr = inDt.GetLayer() # Get SRS for the output if not epsg: from gasp.prop.prj import get_shp_sref srs = get_shp_sref(lyr) else: from gasp.prop.prj import get_sref_from_epsg srs = get_sref_from_epsg(epsg) # Get fields name fields = lst_fld(lyr) # Get Geometry type geomCls = get_geom_type(inShp, gisApi='ogr', name=None, py_cls=True) # Read features and create a new file for each feature RESULT_SHP = [] for feat in lyr: # Create output newShp = os.path.join( outFolder, "{}_{}{}".format(get_filename(inShp), str(feat.GetFID()), get_fileformat(inShp))) newData = ogr.GetDriverByName( drv_name(newShp)).CreateDataSource(newShp) newLyr = newData.CreateLayer(str(get_filename(newShp)), srs, geom_type=geomCls) # Copy fields from input to output ogr_copy_fields(lyr, newLyr) newLyrDefn = newLyr.GetLayerDefn() # Create new feature newFeat = ogr.Feature(newLyrDefn) # Copy geometry geom = feat.GetGeometryRef() newFeat.SetGeometry(geom) # Set fields attributes for fld in fields: newFeat.SetField(fld, feat.GetField(fld)) # Save feature newLyr.CreateFeature(newFeat) newFeat.Destroy() del newLyr newData.Destroy() RESULT_SHP.append(newShp) return RESULT_SHP
def connect_lines_to_near_lines(inLines, nearLines, outLines, tollerance=1000): """ Connect all vertex in a line to the nearest vertex of the nearest line """ import os from osgeo import ogr from gasp.oss import get_filename from gasp.prop.ff import drv_name from gasp.prop.feat import get_geom_attr # Check Geometries inLinesGeom = get_geom_type(inLines, gisApi='ogr') nearLinesGeom = get_geom_type(nearLines, gisApi='ogr') if inLinesGeom != 'LINESTRING' or \ nearLinesGeom != 'LINESTRING': raise ValueError('This method supports only LINESTRINGS') # Open inLines shpLines = ogr.GetDriverByName(drv_name(inLines)).Open(inLines) # Get Layer lyrLines = shpLines.GetLayer() # Open near shpNear = ogr.GetDriverByName(drv_name(nearLines)).Open(nearLines) # Create Output outSrc = ogr.GetDriverByName(drv_name(outLines)).CreateDataSource(outLines) outLyr = outSrc.CreateLayer(get_filename(outLines), geom_type=ogr.wkbLineString) lineDefn = outLyr.GetLayerDefn() # For each point in 'inLines', find the near point on the # the 'nearLines' layer nearPoints = {} for feat in lyrLines: FID = feat.GetFID() # Get Geometry geom = feat.GetGeometryRef() # Get points nrPnt = geom.GetPointCount() for p in range(nrPnt): x, y, z = geom.GetPoint(p) pnt = ogr.Geometry(ogr.wkbPoint) pnt.AddPoint(x, y) # Get point buffer bufPnt = draw_buffer(pnt, tollerance) # Apply a spatial filter based on the buffer # to restrict the nearLines Layer lyrNear = shpNear.GetLayer() lyrNear.SetSpatialFilter(bufPnt) # For line in the filtered 'nearLyr' # Find the closest point dist = 0 for __feat in lyrNear: __FID = __feat.GetFID() __geom = __feat.GetGeometryRef() points = __geom.GetPointCount() for _p in range(points): _x, _y, _z = __geom.GetPoint(_p) distance = ((x - _x)**2 + (y - _y)**2)**0.5 if not dist: dist = [distance, _x, _y] else: if distance < dist[0]: dist = [distance, _x, _y] # Write a new line line = ogr.Geometry(ogr.wkbLineString) line.AddPoint(x, y) line.AddPoint(dist[1], dist[2]) new_feature = ogr.Feature(lineDefn) new_feature.SetGeometry(line) outLyr.CreateFeature(new_feature) new_feature.Destroy() del lyrNear outSrc.Destroy() shpPnt.Destroy() shpNear.Destroy() return outLines
def connect_points_to_near_line(inPnt, nearLines, outLines, tollerance=1000, nearLinesWpnt=None): """ Connect all points to the nearest line in the perpendicular """ import os import numpy as np from osgeo import ogr from shapely.geometry import LineString, Point from gasp.prop.ff import drv_name from gasp.prop.feat import get_geom_type # Check Geometries inPntGeom = get_geom_type(inPnt, gisApi='ogr') nearLinesGeom = get_geom_type(nearLines, gisApi='ogr') if inPntGeom != 'POINT' or \ nearLinesGeom != 'LINESTRING': raise ValueError('This method supports only LINESTRINGS') # Open inLines shpPnt = ogr.GetDriverByName(drv_name(inPnt)).Open(inPnt) # Get Layer lyrPnt = shpPnt.GetLayer() # Open near shpNear = ogr.GetDriverByName(drv_name(nearLines)).Open(nearLines) # Create Output outSrc = ogr.GetDriverByName(drv_name(outLines)).CreateDataSource(outLines) outLyr = outSrc.CreateLayer(os.path.splitext( os.path.basename(outLines))[0], geom_type=ogr.wkbLineString) if nearLinesWpnt: newPointsInLines = {} lineDefn = outLyr.GetLayerDefn() # For each point in 'inLines', find the near point on the # the 'nearLines' layer for feat in lyrPnt: FID = feat.GetFID() # Get Geometry pnt = feat.GetGeometryRef() x, y = pnt.GetX(), pnt.GetY() # Get point buffer bufPnt = draw_buffer(pnt, tollerance) # Apply a spatial filter based on the buffer # to restrict the nearLines Layer lyrNear = shpNear.GetLayer() lyrNear.SetSpatialFilter(bufPnt) # For line in the filtered 'nearLyr' # Find point in the perpendicular dist = 0 for __feat in lyrNear: __FID = __feat.GetFID() __geom = __feat.GetGeometryRef() points = __geom.GetPointCount() for _p in range(points - 1): # Get line segment x1, y1, z1 = __geom.GetPoint(_p) x2, y2, z2 = __geom.GetPoint(_p + 1) # Create Shapely Geometries lnh = LineString([(x1, y1), (x2, y2)]) pnt = Point(x, y) # Get distance between point and line # Get near point of the line d = pnt.distance(lnh) npnt = lnh.interpolate(lnh.project(pnt)) if not dist: dist = [d, npnt.x, npnt.y] LINE_FID = __FID else: if d < dist[0]: dist = [d, npnt.x, npnt.y] LINE_FID = __FID # Write a new line line = ogr.Geometry(ogr.wkbLineString) line.AddPoint(x, y) line.AddPoint(dist[1], dist[2]) new_feature = ogr.Feature(lineDefn) new_feature.SetGeometry(line) outLyr.CreateFeature(new_feature) new_feature.Destroy() if nearLinesWpnt: if LINE_FID not in newPointsInLines: newPointsInLines[LINE_FID] = [Point(dist[1], dist[2])] else: newPointsInLines[LINE_FID].append(Point(dist[1], dist[2])) del lyrNear outSrc.Destroy() shpPnt.Destroy() shpNear.Destroy() if nearLinesWpnt: from gasp.mng.fld import ogr_copy_fields from shapely.ops import split as lnhSplit shpNear = ogr.GetDriverByName(drv_name(nearLines)).Open(nearLines) updateLines = ogr.GetDriverByName( drv_name(nearLinesWpnt)).CreateDataSource(nearLinesWpnt) upLnhLyr = updateLines.CreateLayer(get_filename(nearLinesWpnt), geom_type=ogr.wkbLineString) # Create shpNear Layer Again lyrNear = shpNear.GetLayer() # Copy fields ogr_copy_fields(lyrNear, upLnhLyr) # Out lyr definition upDefn = upLnhLyr.GetLayerDefn() for feat in lyrNear: LINE_FID = feat.GetFID() print LINE_FID geom = feat.GetGeometryRef() new_feature = ogr.Feature(upDefn) if LINE_FID not in newPointsInLines: # Copy line to updateLines layer new_feature.SetGeometry(geom) else: # Copy to Shapely Line String points = geom.GetPointCount() lstPnt = [] for _p in range(points): x1, y1, z1 = geom.GetPoint(_p) lstPnt.append((x1, y1)) shplyLnh = LineString(lstPnt) # For new point: # Line split and reconstruction for pnt in newPointsInLines[LINE_FID]: try: splitted = lnhSplit(shplyLnh, pnt) except: shpTstL = ogr.GetDriverByName( "ESRI Shapefile").CreateDataSource( r'D:\gis\xyz\lnht.shp') shpL = shpTstL.CreateLayer('lnht', geom_type=ogr.wkbLineString) shpTstP = ogr.GetDriverByName( "ESRI Shapefile").CreateDataSource( r'D:\gis\xyz\pntt.shp') shpP = shpTstL.CreateLayer('pntt', geom_type=ogr.wkbPoint) defnL = shpL.GetLayerDefn() defnP = shpP.GetLayerDefn() featL = ogr.Feature(defnL) featP = ogr.Feature(defnP) geomL = ogr.Geometry(ogr.wkbLineString) for i in list(shplyLnh.coords): geomL.AddPoint(i[0], i[1]) geomP = ogr.Geometry(ogr.wkbPoint) print list(pnt.coords) geomP.AddPoint( list(pnt.coords)[0][0], list(pnt.coords)[0][1]) featL.SetGeometry(geomL) featP.SetGeometry(geomP) shpL.CreateFeature(featL) shpP.CreateFeature(featP) shpTstL.Destroy() shpTstP.Destroy() return pnt, shplyLnh c = 0 for l in splitted: if not c: newLnh = list(l.coords) else: newlnh += list(l.coords)[1:] c += 1 shplyLnh = LineString(newLnh) # Finally copy line to updateLines Layer gLine = ogr.Geometry(ogr.wkbLineString) for __pnt in list(shplyLnh.coords): gLine.AddPoint(__pnt[0], __pnt[1]) for i in range(0, upDefn.GetFieldCount()): new_feature.SetField( upDefn.GetFieldDefn(i).GetNameRef(), feat.GetField(i)) upLnhLyr.CreateFeature(new_feature) new_feature.Destroy() shpNear.Destroy() return outLines
def dist_matrix_by_shp(oShp, dShp, oEpsg, dEpsg, result, transMode=None): """ Create distance matrix using shapes and Google Maps API - Uses my first API_KEY """ import time import pandas from gasp.fm import tbl_to_obj from gasp.mng.split import split_df from gasp.mng.prj import project from gasp.mng.fld.df import listval_to_newcols from gasp.prop.feat import get_geom_type from gasp.mng.gen import merge_df from gasp.web.glg.distmx import dist_matrix from gasp.to import obj_to_tbl from gasp.to.obj import df_to_list from gasp.oss import get_filename # Origins and Destionations to GeoDataframe originsDf = tbl_to_obj(oShp) destnatDf = tbl_to_obj(dShp) # Check Geometries type - shapes should be of type point originsGeom = get_geom_type(originsDf, gisApi='pandas') destGeom = get_geom_type(destnatDf, gisApi='pandas') if (originsGeom != 'Point' and originsGeom != 'MultiPoint') or \ (destGeom != 'Point' and destGeom != 'MultiPoint'): raise ValueError('All input geometries must be of type point') # Re-project GeoDataframes if needed originsDf = originsDf if oEpsg == 4326 else \ project(originsDf, None, 4326, gisApi='pandas') destnatDf = destnatDf if dEpsg == 4326 else \ project(destnatDf, None, 4326, gisApi='pandas') # Geom to Field as str originsDf["geom"] = originsDf["geometry"].y.astype(str) + "," + \ originsDf["geometry"].x.astype(str) destnatDf["geom"] = destnatDf["geometry"].y.astype(str) + "," + \ destnatDf["geometry"].x.astype(str) originsDf["old_fid"] = originsDf.index destnatDf["old_fid"] = destnatDf.index # Split Destinations lstOrigins = split_df(originsDf, 95) for odf in lstOrigins: odf.reset_index(inplace=True) lstDestinations = df_to_list(destnatDf) RESULTS = [] for destino in lstDestinations: for oDf in lstOrigins: matrix = dist_matrix( str(oDf.geom.str.cat(sep="|")), str(destino["geom"]), oDf.shape[0], 1, transport_mode=transMode, useKey='AIzaSyAmyPmqtxD20urqtpCpn4ER74a6J4N403k') matrix = pandas.DataFrame(matrix) matrix = listval_to_newcols(matrix, "elements") matrix = matrix.merge(oDf, how='inner', left_index=True, right_index=True) matrix.rename(columns={ 'old_fid': "fid_origin", 0: "cost" }, inplace=True) matrix["fid_destin"] = destino['old_fid'] RESULTS.append(matrix) time.sleep(5) # Join all dataframes RESULT = merge_df(RESULTS, ignIndex=False) RESULT = sanitizeDataCols(RESULT, "cost") RESULT.drop([ x for x in originsDf.columns.values if x != "geometry" and x != "old_fid" ], axis=1, inplace=True) RESULT.rename(columns={"geometry": "origin_geom"}, inplace=True) RESULT = RESULT.merge(destnatDf, how='inner', left_on=["fid_destin"], right_on=["old_fid"]) RESULT.drop([x for x in destnatDf.columns.values if x != "geometry"], axis=1, inplace=True) RESULT.rename(columns={"geometry": "destin_geom"}, inplace=True) RESULT["origin_geom"] = RESULT.origin_geom.astype(str) RESULT["destin_geom"] = RESULT.destin_geom.astype(str) obj_to_tbl(RESULT, result, sheetsName=get_filename(result)) return result
def pnt_to_facility(pnt, pntSrs, facilities, facSrs, transMode="driving"): """ Calculate distance between points and the nearest facility. # TODO: Add the possibility to save the path between origins and destinations """ import os import time from gasp.fm import tbl_to_obj from gasp.to.geom import regulardf_to_geodf from gasp.mng.prj import project_df from gasp.prop.feat import get_geom_type from gasp.oss import get_filename from gasp.to.obj import df_to_dict, dict_to_df from gasp.to.shp import df_to_shp from gasp.web.glg.distmx import dist_matrix # Convert SHPs to GeoDataFrame pntDf = tbl_to_obj(pnt) tbl_to_obj(facilities) # Check if SHPs are points originsGeom = get_geom_type(pntDf, geomCol="geometry", gisApi='pandas') if originsGeom != 'Point' and originsGeom != 'MultiPoint': raise ValueError('All input geometry must be of type point') destGeom = get_geom_type(facil, geomCol="geometry", gisApi='pandas') if destGeom != 'Point' and destGeom != 'MultiPoint': raise ValueError('All input geometry must be of type point') # Re-Project if necessary pntDf = pntDf if pntSrs == 4326 else project( pntDf, None, 4326, gisApi='pandas') facil = facil if facSrs == 4326 else project( facil, None, 4326, gisApi='pandas') # Coords to cols as str pntDf["geom"] = pntDf["geometry"].y.astype(str) + "," + \ pntDf["geometry"].x.astype(str) facil["geom"] = facil["geometry"].y.astype(str) + "," + \ facil["geometry"].y.astype(str) # Get distance between points and nearest facility pntDict = df_to_dict(pntDf) for idx in pntDict: destStr = str(facil["geom"].str.cat(sep="|")) glg_resp = dist_matrix(pntDict[idx]["geom"], destStr, 1, int(facil.shape[0]), transport_mode=transMode) matrix = pandas.DataFrame(glg_resp[0]["elements"]) matrix.drop(["status", "distance"], axis=1, inplace=True) matrix = pandas.concat([ matrix.drop(["duration"], axis=1), matrix["duration"].apply( pandas.Series) ], axis=1) matrix.drop("text", axis=1, inplace=True) matrix.rename(columns={"value": "duration"}, inplace=True) pntDict[idx]["duration"] = matrix.duration.min() / 60.0 pntDf = dict_to_df(pntDict) pntDf = regulardf_to_geodf(pntDf, "geometry", 4326) if pntSrs != 4326: pntDf = project(pntDf, None, pntSrs, gisApi='pandas') df_to_shp( pntDf, os.path.join(os.path.dirname(pnt), "{}_{}.shp".format(get_filename(pnt), "result"))) return pntDf
def dist_matrix_using_shp(originsShp, destinationsShp, originsEpsg, destinationsEpsg, outTable, transMode=None): """ Create a distance matrix using shapes and Google Maps API """ import time from threading import Thread from gasp.mng.split import split_df, split_df_inN from gasp.mng.prj import project from gasp.prop.feat import get_geom_type from gasp.mng.gen import merge_df from gasp.fm import tbl_to_obj from gasp.to import obj_to_tbl from gasp.web.glg import get_keys from gasp.web.glg.distmx import dist_matrix # Origins and Destionations to GeoDataframe originsDf = tbl_to_obj(originsShp) destnatDf = tbl_to_obj(destinationsShp) # Check Geometries type - shapes should be of type point originsGeom = get_geom_type(originsDf, gisApi='pandas') destGeom = get_geom_type(destnatDf, gisApi='pandas') if (originsGeom != 'Point' and originsGeom != 'MultiPoint') or \ (destGeom != 'Point' and destGeom != 'MultiPoint'): raise ValueError('All input geometries must be of type point') # Re-project GeoDataframes if needed originsDf = originsDf if originsEpsg == 4326 else \ project(originsDf, None, 4326, gisApi='pandas') destnatDf = destnatDf if destinationsEpsg == 4326 else \ project(destnatDf, None, 4326, gisAPi='pandas') # Geom to Field as str originsDf["geom"] = originsDf["geometry"].y.astype(str) + "," + \ originsDf["geometry"].x.astype(str) destnatDf["geom"] = destnatDf["geometry"].y.astype(str) + "," + \ destnatDf["geometry"].x.astype(str) originsDf["old_fid"] = originsDf.index destnatDf["old_fid"] = destnatDf.index # Split destinations DataFrame into Dafaframes with lst_destinos = split_df(destnatDf, 10) # Get Keys KEYS = get_keys() lst_keys = KEYS["key"].tolist() origensByKey = split_df_inN(originsDf, KEYS.shape[0]) if len(origensByKey) == len(lst_keys) + 1: origensByKey[-2] = origensByKey[-2].append(origensByKey[-1]) del origensByKey[-1] # Produce matrix for each origins in origensByKey results = [] def get_matrix(origins, key): subOrigins = split_df(origins, 10) for df in subOrigins: for __df in lst_destinos: matrix = dist_matrix(str(df.geom.str.cat(sep="|")), str(__df.geom.str.cat(sep="|")), df.shape[0], __df.shape[0], transport_mode=transMode, useKey=str(key)) matrix = pandas.DataFrame(matrix) matrix = pandas.concat([ matrix.drop(["elements"], axis=1), matrix["elements"].apply(pandas.Series) ], axis=1) originsFID = df.old_fid.tolist() destinaFID = __df.old_fid.tolist() mm = [] for i in range(len(originsFID)): for e in range(len(destinaFID)): ll = [originsFID[i], destinaFID[e], matrix.iloc[i, e]] mm.append(ll) Fmatrix = pandas.DataFrame( mm, columns=["fid_origin", "fid_destin", "cost"]) results.append(Fmatrix) time.sleep(5) # Create threads thrds = [] i = 1 for df in origensByKey: thrds.append( Thread(name="tk{}".format(str(i)), target=get_matrix, args=(df, lst_keys[i - 1]))) i += 1 # Start all threads for thr in thrds: thr.start() # Wait for all threads to finish for thr in thrds: thr.join() # Join all dataframes RESULT = merge_df(results, ignIndex=False) RESULT = sanitizeDataCols(RESULT, "cost") RESULT = RESULT.merge(originsDf, how='inner', left_on=["fid_origin"], right_on=["old_fid"]) RESULT.drop([x for x in originsDf.columns.values if x != "geometry"], axis=1, inplace=True) RESULT.rename(columns={"geometry": "origin_geom"}, inplace=True) RESULT = RESULT.merge(destnatDf, how='inner', left_on=["fid_destin"], right_on=["old_fid"]) RESULT.drop([x for x in destnatDf.columns.values if x != "geometry"], axis=1, inplace=True) RESULT.rename(columns={"geometry": "destin_geom"}, inplace=True) RESULT["origin_geom"] = RESULT.origin_geom.astype(str) RESULT["destin_geom"] = RESULT.destin_geom.astype(str) return obj_to_tbl(RESULT, outTable)
def shp_to_psql(con_param, shpData, srsEpsgCode, pgTable=None, api="pandas"): """ Send Shapefile to PostgreSQL if api is equal to "pandas" - GeoPandas API will be used; if api is equal to "shp2pgsql" - shp2pgsql tool will be used. """ import os from gasp.oss import get_filename if api == "pandas": from gasp.fm import tbl_to_obj from gasp.prop.feat import get_geom_type elif api == "shp2pgsql": from gasp import exec_cmd from gasp.sql import run_sql_file from gasp.oss.ops import del_file else: raise ValueError( 'api value is not valid. options are: pandas and shp2pgsql') # Check if shp is folder if os.path.isdir(shpData): from gasp.oss import list_files shapes = list_files(shpData, file_format='.shp') else: from gasp import goToList shapes = goToList(shpData) tables = [] for _i in range(len(shapes)): # Get Table name tname = get_filename(shapes[_i], forceLower=True) if not pgTable else \ pgTable[_i] if type(pgTable) == list else pgTable # Import data if api == "pandas": # SHP to DataFrame df = tbl_to_obj(shapes[_i]) df.rename(columns={x: x.lower() for x in df.columns.values}, inplace=True) if "geometry" in df.columns.values: geomCol = "geometry" elif "geom" in df.columns.values: geomCol = "geom" else: print df.columns.values raise ValuError("No Geometry found in shp") # GeoDataFrame to PSQL geodf_to_pgsql(con_param, df, tname, srsEpsgCode, get_geom_type(shapes[_i], name=True, py_cls=False, gisApi='ogr'), colGeom=geomCol) else: sql_script = os.path.join(os.path.dirname(shapes[_i]), tname + '.sql') cmd = ('shp2pgsql -I -s {epsg} -W UTF-8 ' '{shp} public.{name} > {out}').format(epsg=srsEpsgCode, shp=shapes[_i], name=tname, out=sql_script) outcmd = exec_cmd(cmd) run_sql_file(con_param, con_param["DATABASE"], sql_script) del_file(sql_script) tables.append(tname) return tables[0] if len(tables) == 1 else tables