def sgbd_get_feat_not_within(dbcon, inTbl, inGeom, withinTbl, withinGeom, outTbl, inTblCols=None, outTblIsFile=None, apiToUse='OGR_SPATIALITE'): """ Get features not Within with any of the features in withinTbl apiToUse options: * OGR_SPATIALITE; * POSTGIS. """ from gasp import goToList Q = ( "SELECT {selCols} FROM {tbl} AS in_tbl WHERE (" "in_tbl.{in_geom} NOT IN (" "SELECT inin_tbl.{in_geom} FROM {wi_tbl} AS wi_tbl " "INNER JOIN {tbl} AS inin_tbl ON " "ST_Within(wi_tbl.{wi_geom}, inin_tbl.{in_geom})" "))" ).format( selCols = "*" if not inTblCols else ", ".join(goToList(inTblCols)), tbl = inTbl, in_geom = inGeom, wi_tbl = withinTbl, wi_geom = withinGeom ) if apiToUse == "OGR_SPATIALITE": if outTblIsFile: from gasp.anls.exct import sel_by_attr sel_by_attr(dbcon, Q, outTbl, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(dbcon, outTbl, Q, api='ogr2ogr') elif apiToUse == "POSTGIS": if outTblIsFile: from gasp.to.shp import psql_to_shp psql_to_shp( dbcon, Q, outTbl, api='pgsql2shp', geom_col=None, tableIsQuery=True ) else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(dbcon, outTbl, Q, api='psql') else: raise ValueError(( "API {} is not available. OGR_SPATIALITE and POSTGIS " "are the only valid options" )) return outTbl
def FilterAndExport(CLS, cnt): time_x = datetime.datetime.now().replace(microsecond=0) if api == 'SQLITE': shp = sel_by_attr( osmcon, SQL_Q.format(lc=str(CLS), tbl=polyTbl), os.path.join(folder, 'sel_{}.shp'.format(str(CLS))), api_gis='ogr' ) else: shp = sel_by_attr( osmcon, SQL_Q.format(lc=str(CLS), tbl=polyTbl), os.path.join(folder, 'sel_{}.shp'.format(str(CLS))), api='pgsql2shp', geom_col="geometry", tableIsQuery=True ) time_y = datetime.datetime.now().replace(microsecond=0) rstCls = shp_to_raster( shp, None, cellsize, 0, os.path.join(folder, 'sel_{}.tif'.format(str(CLS))), epsg=srscode, rst_template=rstTemplate, api='gdal' ) time_z = datetime.datetime.now().replace(microsecond=0) clsRst[int(CLS)] = rstCls timeGasto[cnt + 1] = ('toshp_{}'.format(str(CLS)), time_y - time_x) timeGasto[cnt + 2] = ('torst_{}'.format(str(CLS)), time_z - time_y)
def selAndExport(CLS, cnt): time_x = datetime.datetime.now().replace(microsecond=0) if api == "SQLITE": shpCls = sel_by_attr( osmLink, SQL_Q.format(c=str(CLS), tbl=polyTbl, w=WHR.format( op=OPERATOR, r=RULE_COL, ga=GEOM_AREA, cls_=CLS )), os.path.join(folder, "{}_{}.shp".format(RULE_COL,CLS)), api_gis='ogr' ) else: shpCls = sel_by_attr( osmLink, SQL_Q.format(c=str(CLS), tbl=polyTbl, w=WHR.format( op=OPERATOR, r=RULE_COL, ga=GEOM_AREA, cls_=CLS )), os.path.join(folder, "{}_{}.shp".format(RULE_COL, str(CLS))), api='pgsql2shp', geom_col="geometry", tableIsQuery=True ) time_y = datetime.datetime.now().replace(microsecond=0) rst = shp_to_raster( shpCls, None, cellsize, 0, os.path.join( folder, "{}_{}.tif".format(RULE_COL, CLS) ), epsg=srscode, rst_template=rstTemplate, api='gdal' ) time_z = datetime.datetime.now().replace(microsecond=0) clsRst[int(CLS)] = rst timeGasto[cnt + 1] = ('sq_to_shp_{}'.format(str(CLS)), time_y - time_x) timeGasto[cnt + 2] = ('shp_to_rst_{}'.format(str(CLS)), time_z - time_y)
def splite_near(sqdb, tbl, nearTbl, tblGeom, nearGeom, output, whrNear=None, outIsFile=None): """ Near Analysis using Spatialite """ Q = ("SELECT m.*, ST_Distance(m.{inGeom}, j.geom) AS dist_near " "FROM {t} AS m, (" "SELECT ST_UnaryUnion(ST_Collect({neargeom})) AS geom " "FROM {tblNear}{nearwhr}" ") AS j").format( inGeom=tblGeom, t=tbl, neargeom=nearGeom, tblNear=nearTbl, nearwhr="" if not whrNear else " WHERE {}".format(whrNear)) if outIsFile: from gasp.anls.exct import sel_by_attr sel_by_attr(sqdb, Q, output, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(sqdb, output, Q, api='ogr2ogr') return output
def osm_to_featurecls(xmlOsm, output, fileFormat='.shp', useXmlName=None): """ OSM to ESRI Shapefile """ import os from gasp.anls.exct import sel_by_attr # Convert xml to sqliteDB sqDB = ogr_btw_driver(xmlOsm, os.path.join(output, 'fresh_osm.sqlite')) # sqliteDB to Feature Class TABLES = ['points', 'lines', 'multilinestrings', 'multipolygons'] for T in TABLES: sel_by_attr( sqDB, "SELECT * FROM {}".format(T), os.path.join( output, "{}{}{}".format( "" if not useXmlName else os.path.splitext( os.path.basename(xmlOsm))[0], T, fileFormat if fileFormat[0] == '.' else "." + fileFormat)), api_gis='ogr') return output
def st_dissolve(db, table, geomColumn, outTable, whrClause=None, diss_cols=None, outTblIsFile=None, api='sqlite'): """ Dissolve a Polygon table """ from gasp import goToList diss_cols = goToList(diss_cols) if diss_cols else None geomcol = "geometry" if api == 'sqlite' else 'geom' sql = ("SELECT{selCols} ST_UnaryUnion(ST_Collect({geom})) AS {gout} " "FROM {tbl}{whr}{grpBy}").format( selCols="" if not diss_cols else " {},".format(", ".join(diss_cols)), geom=geomColumn, tbl=table, whr="" if not whrClause else " WHERE {}".format(whrClause), grpBy="" if not diss_cols else " GROUP BY {}".format( ", ".join(diss_cols)), gout=geomcol) if outTblIsFile: if api == 'sqlite': from gasp.anls.exct import sel_by_attr sel_by_attr(db, sql, outTable, api_gis='ogr') elif api == 'psql': from gasp.to.shp import psql_to_shp psql_to_shp(db, table, outTable, api='pgsql2shp', geom_col=geomColumn, tableIsQuery=True) else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(db, outTable, sql, api='ogr2ogr' if api == 'sqlite' else 'psql') return outTable
def splite_buffer(db, table, dist, geomField, outTbl, cols_select=None, bufferField="geometry", whrClause=None, outTblIsFile=None, dissolve=None): """ Run ST_Buffer if not dissolve, no generalization will be applied; if dissolve == to str or list, a generalization will be accomplish using the fields referenced by this object; if dissolve == 'ALL', all features will be dissolved. """ from gasp import goToList dissolve = goToList(dissolve) if dissolve != "ALL" else "ALL" sql = ( "SELECT{sel}{spFunc}{geom}, {_dist}{endFunc} AS {bf} " "FROM {tbl}{whr}{grpBy}" ).format( sel = " " if not cols_select else " {}, ".format( ", ".join(goToList(cols_select)) ), tbl=table, geom=geomField, _dist=str(dist), bf=bufferField, whr="" if not whrClause else " WHERE {}".format(whrClause), spFunc="ST_Buffer(" if not dissolve else \ "ST_UnaryUnion(ST_Collect(ST_Buffer(", endFunc = ")" if not dissolve else ")))", grpBy="" if not dissolve or dissolve == "ALL" else " GROUP BY {}".format( ", ".join(dissolve) ) ) if outTblIsFile: from gasp.exct import sel_by_attr sel_by_attr(db, sql, outTbl, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(db, outTbl, sql, api='ogr2ogr') return outTbl
def split_whr_attrIsTrue(osm_fc, outputfolder, fields=None, sel_fields=None, basename=None): """ For each field in osm table or in fields, creates a new feature class where the field attribute is not empty """ import os from gasp.mng.fld import lst_fld from gasp.anls.exct import sel_by_attr # List table fields tbl_fields = fields if fields else lst_fld(osm_fc) if type(tbl_fields) == str or type(tbl_fields) == unicode: tbl_fields = [tbl_fields] if sel_fields: sel_fields.append('geometry') aux = 1 else: aux = 0 # Export each field in data outFilename = '{}.shp' if not basename else basename + '_{}.shp' for fld in tbl_fields: a = 0 if not aux: sel_fields = ['geometry', fld] else: if fld not in sel_fields: sel_fields.append(fld) a += 1 sel_by_attr( osm_fc, "SELECT {flds} FROM {t} WHERE {f}<>''".format( f=fld, t=os.path.splitext(os.path.basename(osm_fc))[0], flds=', '.join(sel_fields)), os.path.join( outputfolder, outFilename.format(fld if fld.islower() else fld.lower())), api_gis='ogr') if a: sel_fields.remove(fld)
def build12_torst(buildTbl): LulcCls = query_to_df( osmLink, "SELECT cls FROM {} GROUP BY cls".format(buildTbl), db_api='psql' if apidb == 'POSTGIS' else 'sqlite').cls.tolist() for lulc_cls in LulcCls: time_m = datetime.datetime.now().replace(microsecond=0) # To SHP if apidb == 'SQLITE': shpB = sel_by_attr(osmLink, "SELECT * FROM {} WHERE cls={}".format( buildTbl, str(lulc_cls)), os.path.join( folder, 'nshp_build_{}.shp'.format(lulc_cls)), api_gis='ogr') else: shpB = sel_by_attr(osmLink, "SELECT * FROM {} WHERE cls={}".format( buildTbl, str(lulc_cls)), os.path.join( folder, 'nshp_build_{}.shp'.format(lulc_cls)), api='pgsql2shp', geom_col="geometry", tableIsQuery=True) time_n = datetime.datetime.now().replace(microsecond=0) # To RST brst = shp_to_raster(shpB, None, cells, 0, os.path.join( folder, 'nrst_build_{}.tif'.format(lulc_cls)), srscode, rstT, api='gdal') time_o = datetime.datetime.now().replace(microsecond=0) resLyr[int(lulc_cls)] = [brst] timeGasto[int(lulc_cls)] = ('to_shp_{}'.format(str(lulc_cls)), time_n - time_m) timeGasto[int(lulc_cls) + 1] = ('to_rst_n_{}'.format( str(lulc_cls)), time_o - time_n)
def disjoint_polygons_rel_points(sqBD, pntTbl, pntGeom, polyTbl, polyGeom, outTbl, polySelect=None, pntQuery=None, polyQuery=None, outTblIsFile=None): """ Get Disjoint relation What TODO with this? """ import os if not polySelect: raise ValueError("Man, select something!") sql = ( "SELECT {selCols} FROM {polTable} WHERE (" "{polName}.{polGeom} not in (" "SELECT {polName}.{polGeom} FROM {pntTable} " "INNER JOIN {polTable} ON " "ST_Within({pntName}.{pntGeom_}, {polName}.{polGeom})" "))" ).format( selCols = "*" if not polySelect else polySelect, polTable = polyTbl if not polyQuery else polyQuery, polGeom = polyGeom, pntTable = pntTbl if not pntQuery else pntQuery, pntGeom_ = pntGeom, pntName = pntTbl, polName = polyTbl ) if outTblIsFile: from gasp.anls.exct import sel_by_attr sel_by_attr(sqBD, sql, outTbl, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(sqBD, outTbl, sql, api='ogr2ogr')
def intersect_point_with_polygon(sqDB, pntTbl, pntGeom, polyTbl, polyGeom, outTbl, pntSelect=None, polySelect=None, pntQuery=None, polyQuery=None, outTblIsFile=None): """ Intersect Points with Polygons What TODO with this? """ import os if not pntSelect and not polySelect: raise ValueError("You have to select something") sql = ( "SELECT {colPnt}{colPoly} FROM {pnt_tq} " "INNER JOIN {poly_tq} ON " "ST_Within({pnt}.{pnGeom}, {poly}.{pgeom})" ).format( colPnt = pntSelect if pntSelect else "", colPoly = polySelect if polySelect and not pntSelect else \ ", " + polySelect if polySelect and pntSelect else "", pnt_tq = pntTbl if not pntQuery else pntQuery, poly_tq = polyTbl if not polyQuery else polyQuery, pnt = pntTbl, poly = polyTbl, pnGeom = pntGeom, pgeom = polyGeom ) if outTblIsFile: from gasp.anls.exct import sel_by_attr sel_by_attr(sqDB, sql, outTbl, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(sqDB, outTbl, sql, api='ogr2ogr')
def rename_column(inShp, columns, output, api="ogr2ogr"): """ Rename Columns in Shp TODO: For now implies output. In the future, it option will be removed """ if api == "ogr2ogr": import os from gasp import goToList from gasp.oss import get_filename #from gasp.oss.ops import rename_files_with_same_name, del_file from gasp.anls.exct import sel_by_attr # List Columns cols = lst_fld(inShp) for c in cols: if c in columns: continue else: columns[c] = c columns["geometry"] = "geometry" """ # Rename original shp newFiles = rename_files_with_same_name( os.path.dirname(inShp), get_filename(inShp), get_filename(inShp) + "_xxx" ) """ # Rename columns by selecting data from input outShp = sel_by_attr( inShp, "SELECT {} FROM {}".format( ", ".join(["{} AS {}".format(c, columns[c]) for c in columns]), get_filename(inShp)), output, api_gis='ogr') # Delete tempfile #del_file(newFiles) else: raise ValueError("{} is not available".format(api)) return outShp
def arcg_selection(db, polTbl, fld): """ Select, Dissolve and Reproject using ArcGIS """ import datetime import os from gasp.mng.genze import dissolve from gasp.fm.sql import query_to_df from gasp.anls.exct import sel_by_attr # Get LULC Classes time_a = datetime.datetime.now().replace(microsecond=0) lulcCls = query_to_df( db, ("SELECT selection FROM {} " "WHERE selection IS NOT NULL GROUP BY selection").format(polTbl), db_api='sqlite').selection.tolist() time_b = datetime.datetime.now().replace(microsecond=0) timeGasto = {0: ('check_cls', time_b - time_a)} # Extract Shps from DB clsShp = {} tk = 1 SQL = "SELECT selection, geometry FROM {} WHERE selection={}" for cls in lulcCls: time_x = datetime.datetime.now().replace(microsecond=0) shp = sel_by_attr(db, SQL.format(polTbl, str(cls)), os.path.join(fld, 'rule1_{}.shp'.format(cls)), api_gis='ogr') time_y = datetime.datetime.now().replace(microsecond=0) dShp = dissolve(shp, os.path.join(fld, "rul1_d_{}.shp".format(str(cls))), "FID", geomMultiPart=True) time_z = datetime.datetime.now().replace(microsecond=0) clsShp[int(cls)] = dShp timeGasto[tk] = ("export_{}".format(cls), time_y - time_x) timeGasto[tk + 1] = ("dissolve_{}".format(cls), time_z - time_y) tk += 2 return clsShp, timeGasto
def splitShp_by_range(shp, nrFeat, outFolder): """ Split one feature class by range """ import os from gasp.oss import get_filename, get_fileformat from gasp.prop.feat import feat_count from gasp.mng.fld import lst_fld from gasp.anls.exct import sel_by_attr rowsN = feat_count(shp, gisApi='ogr') nrShp = int(rowsN / float(nrFeat)) + 1 if nrFeat != rowsN else 1 fields = lst_fld(shp) offset = 0 exportedShp = [] for i in range(nrShp): outShp = sel_by_attr( shp, "SELECT {cols}, geometry FROM {t} ORDER BY {cols} LIMIT {l} OFFSET {o}" .format(t=os.path.splitext(os.path.basename(shp))[0], l=str(nrFeat), o=str(offset), cols=", ".join(fields)), os.path.join( outFolder, "{}_{}{}".format(get_filename(shp, forceLower=True), str(i), get_fileformat(shp))), api_gis='ogr') exportedShp.append(outShp) offset += nrFeat return exportedShp
def exportBuild(): time_ee = datetime.datetime.now().replace(microsecond=0) NB = row_num(osmdata, polyTbl, where="building IS NOT NULL", api='sqlite') time_e = datetime.datetime.now().replace(microsecond=0) timeGasto[3] = ('check_builds', time_e - time_ee) if not NB: return bShp = sel_by_attr( osmdata, "SELECT geometry FROM {} WHERE building IS NOT NULL".format( polyTbl), os.path.join(folder, 'road_builds.shp'), api_gis='ogr') time_f = datetime.datetime.now().replace(microsecond=0) bRst = shp_to_raster(bShp, None, cellsize, -1, os.path.join(folder, 'road_builds.tif'), epsg=srs, rst_template=rstTemplate, api='gdal') time_g = datetime.datetime.now().replace(microsecond=0) BUILDINGS.append(bRst) timeGasto[4] = ('export_builds', time_f - time_e) timeGasto[5] = ('builds_to_rst', time_g - time_f)
def arc_roadsrule(osmDb, lineTbl, polyTbl, folder): """ Select Roads and Transform them into polygons """ import datetime from gasp.anls.exct import sel_by_attr from gasp.sql.mng.tbl import row_num from gasp.anls.prox.bf import _buffer # Roads to ArcGIS time_a = datetime.datetime.now().replace(microsecond=0) NR = row_num(osmDb, lineTbl, where="roads IS NOT NULL", api='sqlite') time_b = datetime.datetime.now().replace(microsecond=0) if not NR: return None, {0: ('count_rows_roads', time_b - time_a)} # Export all roads allRoads = sel_by_attr(osmDb, ("SELECT roads, bf_roads, geometry " "FROM {} WHERE roads IS NOT NULL").format(lineTbl), os.path.join(folder, 'all_roads.shp'), api_gis='ogr') time_c = datetime.datetime.now().replace(microsecond=0) # Export Buildings NB = row_num(osmDb, polyTbl, where="building IS NOT NULL", api='sqlite') time_d = datetime.datetime.now().replace(microsecond=0) if NB: from gasp.arcg.anls.prox import near_anls builds = sel_by_attr(osmDb, ("SELECT geometry FROM {} " "WHERE building IS NOT NULL"), os.path.join(folder, "all_builds.shp"), api_gis='ogr') time_e = datetime.datetime.now().replace(microsecond=0) near_anls(allRoads, builds, searchRadius=12) # Update table # Execute near if allBuildings: near_anls(allRoads, allBuildings, searchRadius=12) # Create Buffer for roads near buildings nearBuildRoadsLnh = select_by_attr(allRoads, "NEAR_FID <> -1", os.path.join(WORK, 'roads_near')) nearBuildRoads = _buffer(nearBuildRoadsLnh, "NEAR_DIST", os.path.join(WORK, 'roads_bf_near'), dissolve="ALL", api='arcpy') L.append(nearBuildRoads) for dist in distInstances: # Buffer and export filterDf = osmToSelect[osmToSelect[BF_COL] == dist] Q = "{}" if not allBuildings else "({}) AND NEAR_FID = -1" rdvShp = select_by_attr( allRoads, Q.format(str(filterDf[VALUE_COL].str.cat(sep=" OR "))), os.path.join(WORK, "roads_{}".format(str(dist)))) if not feat_count(rdvShp, gisApi='arcpy'): continue roadsShp = Buffer(rdvShp, os.path.join(WORK, "roads_bf_{}".format(str(dist))), str(int(dist)), dissolve="ALL") L.append(roadsShp) delete(rdvShp) delete(allBuildings) LULC_CLS = 1221 if nomenclature != 'GLOBE_LAND_30' else 801 return {LULC_CLS: L}
def get_not_used_tags(OSM_FILE, OUT_TBL): """ Use a file OSM to detect tags not considered in the OSM2LULC procedure """ import os from gasp.anls.exct import sel_by_attr from gasp.fm.sql import query_to_df from gasp.oss import get_filename from gasp.osm2lulc.utils import osm_to_sqdb from gasp.to import obj_to_tbl OSM_TAG_MAP = { "DB": os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'osmtolulc.sqlite'), "OSM_FEAT": "osm_features", "KEY_COL": "key", "VALUE_COL": "value", "GEOM_COL": "geom" } WORKSPACE = os.path.dirname(OUT_TBL) sqdb = osm_to_sqdb( OSM_FILE, os.path.join(WORKSPACE, get_filename(OSM_FILE) + '.sqlite')) # Get Features we are considering ourOSMFeatures = query_to_df( OSM_TAG_MAP["DB"], ("SELECT {key} AS key_y, {value} AS value_y, {geom} AS geom_y " "FROM {tbl}").format(key=OSM_TAG_MAP["KEY_COL"], value=OSM_TAG_MAP["VALUE_COL"], geom=OSM_TAG_MAP["GEOM_COL"], tbl=OSM_TAG_MAP["OSM_FEAT"]), db_api='sqlite') # Get Features in File TABLES_TAGS = { 'points': ['highway', 'man_made', 'building'], 'lines': ['highway', 'waterway', 'aerialway', 'barrier', 'man_made', 'railway'], 'multipolygons': [ 'aeroway', 'amenity', 'barrier', 'building', 'craft', 'historic', 'land_area', '' 'landuse', 'leisure', 'man_made', 'military', 'natural', 'office', 'place', 'shop', 'sport', 'tourism', 'waterway', 'power', 'railway', 'healthcare', 'highway' ] } Qs = [ " UNION ALL ".join([( "SELECT '{keycol}' AS key, {keycol} AS value, " "'{geomtype}' AS geom FROM {tbl} WHERE " "{keycol} IS NOT NULL" ).format( keycol=c, geomtype='Point' if table == 'points' else 'Line' \ if table == 'lines' else 'Polygon', tbl=table ) for c in TABLES_TAGS[table]]) for table in TABLES_TAGS ] fileOSMFeatures = query_to_df(sqdb, ("SELECT key, value, geom FROM ({}) AS foo " "GROUP BY key, value, geom").format( " UNION ALL ".join(Qs)), db_api='sqlite') _fileOSMFeatures = fileOSMFeatures.merge( ourOSMFeatures, how='outer', left_on=["key", "value", "geom"], right_on=["key_y", "value_y", "geom_y"]) # Select OSM Features of file without correspondence _fileOSMFeatures["isnew"] = _fileOSMFeatures.key_y.fillna(value='nenhum') newTags = _fileOSMFeatures[_fileOSMFeatures.isnew == 'nenhum'] newTags["value"] = newTags.value.str.replace("'", "''") newTags["whr"] = newTags.key.str.encode('utf-8').astype(str) + "='" + \ newTags.value.str.encode('utf-8').astype(str) + "'" # Export OUT_TBL with tags not being used obj_to_tbl(newTags, OUT_TBL, sheetsName="new_tags", sanitizeUtf8=True) # Export tags not being used to new shapefile def to_regular_str(row): from gasp import unicode_to_str san_str = unicode_to_str(row.whr) row["whr_san"] = san_str return row for t in TABLES_TAGS: if t == 'points': filterDf = newTags[newTags.geom == 'Point'] elif t == 'lines': filterDf = newTags[newTags.geom == 'Line'] elif t == 'multipolygons': filterDf = newTags[newTags.geom == 'Polygon'] Q = unicode("SELECT * FROM {} WHERE {}", 'utf-8').format(unicode(t, 'utf-8'), filterDf.whr.str.cat(sep=" OR "), 'utf-8') try: shp = sel_by_attr(sqdb, Q, os.path.join(WORKPSACE, t + '.shp'), api_gis='ogr') except: __filterDf = filterDf.apply(lambda x: to_regular_str(x), axis=1) _Q = "SELECT * FROM {} WHERE {}".format( t, __filterDf.whr_san.str.cat(sep=" OR ")) shp = sel_by_attr(sqdb, _Q, os.path.join(WORKSPACE, t + '.shp')) return OUT_TBL
def prod_matrix(origins, destinations, networkGrs, speedLimitCol, onewayCol, thrdId="1", asCmd=None): """ Get Matrix Distance: """ from gasp.to.shp.grs import shp_to_grs from gasp.cpu.grs.mng import category from gasp.anls.exct import sel_by_attr from gasp.mng.grstbl import add_field, add_table, update_table from gasp.mob.grstbx import network_from_arcs from gasp.mob.grstbx import add_pnts_to_network from gasp.mob.grstbx import run_allpairs from gasp.cpu.grs.mng.feat import geomattr_to_db from gasp.cpu.grs.mng.feat import copy_insame_vector from gasp.mng.gen import merge_feat from gasp.prop.feat import feat_count # Merge Origins and Destinations into the same Feature Class ORIGINS_NFEAT = feat_count(origins, gisApi='pandas') DESTINATIONS_NFEAT = feat_count(destinations, gisApi='pandas') ORIGINS_DESTINATIONS = merge_feat([origins, destinations], os.path.join( os.path.dirname(origins), "points_od_{}.shp".format(thrdId)), api='pandas') pointsGrs = shp_to_grs(ORIGINS_DESTINATIONS, "points_od_{}".format(thrdId), asCMD=asCmd) # Connect Points to Network newNetwork = add_pnts_to_network(networkGrs, pointsGrs, "rdv_points_{}".format(thrdId), asCMD=asCmd) # Sanitize Network Table and Cost Columns newNetwork = category(newNetwork, "rdv_points_time_{}".format(thrdId), "add", LyrN="3", geomType="line", asCMD=asCmd) add_table(newNetwork, ("cat integer,kph double precision,length double precision," "ft_minutes double precision," "tf_minutes double precision,oneway text"), lyrN=3, asCMD=asCmd) copy_insame_vector(newNetwork, "kph", speedLimitCol, 3, geomType="line", asCMD=asCmd) copy_insame_vector(newNetwork, "oneway", onewayCol, 3, geomType="line", asCMD=asCmd) geomattr_to_db(newNetwork, "length", "length", "line", createCol=False, unit="meters", lyrN=3, ascmd=asCmd) update_table(newNetwork, "kph", "3.6", "kph IS NULL", lyrN=3, ascmd=asCmd) update_table(newNetwork, "kph", "3.6", "oneway = 'N'", lyrN=3, ascmd=asCmd) update_table(newNetwork, "ft_minutes", "(length * 60) / (kph * 1000.0)", "ft_minutes IS NULL", lyrN=3, ascmd=asCmd) update_table(newNetwork, "tf_minutes", "(length * 60) / (kph * 1000.0)", "tf_minutes IS NULL", lyrN=3, ascmd=asCmd) # Exagerate Oneway's update_table(newNetwork, "ft_minutes", "1000", "oneway = 'TF'", lyrN=3, ascmd=asCmd) update_table(newNetwork, "tf_minutes", "1000", "oneway = 'FT'", lyrN=3, ascmd=asCmd) # Produce matrix matrix = run_allpairs(newNetwork, "ft_minutes", "tf_minutes", 'result_{}'.format(thrdId), arcLyr=3, nodeLyr=2, asCMD=asCmd) # Exclude unwanted OD Pairs q = "({}) AND ({})".format( " OR ".join( ["from_cat={}".format(str(i + 1)) for i in range(ORIGINS_NFEAT)]), " OR ".join([ "to_cat={}".format(str(ORIGINS_NFEAT + i + 1)) for i in range(DESTINATIONS_NFEAT) ])) matrix_sel = sel_by_attr(matrix, q, "sel_{}".format(matrix), geomType="line", lyrN=3, asCMD=asCmd) add_field(matrix_sel, "from_fid", "INTEGER", lyrN=3, asCMD=asCmd) add_field(matrix_sel, "to_fid", "INTEGER", lyrN=3, asCMD=asCmd) update_table(matrix_sel, "from_fid", "from_cat - 1", "from_fid IS NULL", lyrN=3, ascmd=asCmd) update_table(matrix_sel, "to_fid", "to_cat - {} - 1".format(str(ORIGINS_NFEAT)), "to_fid IS NULL", lyrN=3, ascmd=asCmd) return matrix_sel
def sgbd_get_feat_within(conParam, inTbl, inGeom, withinTbl, withinGeom, outTbl, inTblCols=None, withinCols=None, outTblIsFile=None, apiToUse='OGR_SPATIALITE'): """ Get Features within other Geometries in withinTbl e.g. Intersect points with Polygons apiToUse options: * OGR_SPATIALITE; * POSTGIS. """ from gasp import goToList if not inTblCols and not withinCols: colSelect = "intbl.*, witbl.*" else: if inTblCols and not withinCols: colSelect = ", ".join([ "intbl.{}".format(c) for c in goToList(inTblCols) ]) elif not inTblCols and withinCols: colSelect = ", ".join([ "witbl.{}".format(c) for c in goToList(withinCols) ]) else: colSelect = "{}, {}".format( ", ".join(["intbl.{}".format(c) for c in goToList(inTblCols)]), ", ".join(["witbl.{}".format(c) for c in goToList(withinCols)]) ) Q = ( "SELECT {selcols} FROM {in_tbl} AS intbl " "INNER JOIN {within_tbl} AS witbl ON " "ST_Within(intbl.{in_geom}, witbl.{wi_geom})" ).format( selcols=colSelect, in_tbl=inTbl, within_tbl=withinTbl, in_geom=inGeom, wi_geom=withinGeom ) if apiToUse == "OGR_SPATIALITE": if outTblIsFile: from gasp.anls.exct import sel_by_attr sel_by_attr(conParam, Q, outTbl, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(conParam, outTbl, Q, api='ogr2ogr') elif apiToUse == 'POSTGIS': if outTblIsFile: from gasp.to.shp import psql_to_shp psql_to_shp( conParam, Q, outTbl, api="pgsql2shp", geom_col=None, tableIsQuery=True) else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(conParam, outTbl, Q, api='psql') else: raise ValueError(( "API {} is not available. OGR_SPATIALITE and POSTGIS " "are the only valid options" )) return outTbl