def FilterAndExport(CLS, cnt): time_x = datetime.datetime.now().replace(microsecond=0) if api == 'SQLITE': shp = sel_by_attr(osmdb, SQL_Q.format(lc=str(CLS), tbl=polyTbl), os.path.join(folder, 'sel_{}.shp'.format(str(CLS))), api_gis='ogr') else: shp = sel_by_attr(osmdb, SQL_Q.format(lc=str(CLS), tbl=polyTbl), "geometry", os.path.join(folder, 'sel_{}.shp'.format(str(CLS))), api='pgsql2shp', tableIsQuery=True) time_y = datetime.datetime.now().replace(microsecond=0) rstCls = shp_to_rst(shp, None, cellsize, 0, os.path.join(folder, 'sel_{}.tif'.format(str(CLS))), epsg=srscode, rst_template=rstTemplate, api='gdal') time_z = datetime.datetime.now().replace(microsecond=0) clsRst[int(CLS)] = rstCls timeGasto[cnt + 1] = ('toshp_{}'.format(str(CLS)), time_y - time_x) timeGasto[cnt + 2] = ('torst_{}'.format(str(CLS)), time_z - time_y)
def st_dissolve(db, table, geomColumn, outTable, whrClause=None, diss_cols=None, outTblIsFile=None, api='sqlite'): """ Dissolve a Polygon table API options: * sqlite * psql """ from glass.pys import obj_to_lst diss_cols = obj_to_lst(diss_cols) if diss_cols else None geomcol = "geometry" if api == 'sqlite' else 'geom' sql = ("SELECT{selCols} ST_UnaryUnion(ST_Collect({geom})) AS {gout} " "FROM {tbl}{whr}{grpBy}").format( selCols="" if not diss_cols else " {},".format(", ".join(diss_cols)), geom=geomColumn, tbl=table, whr="" if not whrClause else " WHERE {}".format(whrClause), grpBy="" if not diss_cols else " GROUP BY {}".format( ", ".join(diss_cols)), gout=geomcol) if outTblIsFile: if api == 'sqlite': from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, sql, outTable, api_gis='ogr') elif api == 'psql': from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp(db, table, geomColumn, outTable, api='pgsql2shp', tableIsQuery=True) else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTable, sql, api='ogr2ogr' if api == 'sqlite' else 'psql') return outTable
def splite_buffer(db, table, dist, geomField, outTbl, cols_select=None, bufferField="geometry", whrClause=None, outTblIsFile=None, dissolve=None): """ Run ST_Buffer if not dissolve, no generalization will be applied; if dissolve == to str or list, a generalization will be accomplish using the fields referenced by this object; if dissolve == 'ALL', all features will be dissolved. """ from glass.pys import obj_to_lst dissolve = obj_to_lst(dissolve) if dissolve != "ALL" else "ALL" sql = ( "SELECT{sel}{spFunc}{geom}, {_dist}{endFunc} AS {bf} " "FROM {tbl}{whr}{grpBy}" ).format( sel = " " if not cols_select else " {}, ".format( ", ".join(obj_to_lst(cols_select)) ), tbl=table, geom=geomField, _dist=str(dist), bf=bufferField, whr="" if not whrClause else " WHERE {}".format(whrClause), spFunc="ST_Buffer(" if not dissolve else \ "ST_UnaryUnion(ST_Collect(ST_Buffer(", endFunc = ")" if not dissolve else ")))", grpBy="" if not dissolve or dissolve == "ALL" else " GROUP BY {}".format( ", ".join(dissolve) ) ) if outTblIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, sql, outTbl, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, sql, api='ogr2ogr') return outTbl
def split_whr_attrIsTrue(osm_fc, outputfolder, fields=None, sel_fields=None, basename=None): """ For each field in osm table or in fields, creates a new feature class where the field attribute is not empty """ import os from glass.g.prop.feat import lst_fld from glass.g.tbl.filter import sel_by_attr # List table fields tbl_fields = fields if fields else lst_fld(osm_fc) if type(tbl_fields) == str: tbl_fields = [tbl_fields] if sel_fields: sel_fields.append('geometry') aux = 1 else: aux = 0 # Export each field in data outFilename = '{}.shp' if not basename else basename + '_{}.shp' for fld in tbl_fields: a = 0 if not aux: sel_fields = ['geometry', fld] else: if fld not in sel_fields: sel_fields.append(fld) a += 1 sel_by_attr( osm_fc, "SELECT {flds} FROM {t} WHERE {f}<>''".format( f=fld, t=os.path.splitext(os.path.basename(osm_fc))[0], flds=', '.join(sel_fields) ), os.path.join( outputfolder, outFilename.format(fld if fld.islower() else fld.lower()) ), api_gis='ogr' ) if a: sel_fields.remove(fld)
def selAndExport(CLS, cnt): time_x = dt.datetime.now().replace(microsecond=0) if api == "SQLITE": shpCls = sel_by_attr(db, SQL_Q.format(c=str(CLS), tbl=polyTbl, w=WHR.format(op=OPERATOR, r=RULE_COL, ga=GEOM_AREA, cls_=CLS)), os.path.join( folder, "{}_{}.shp".format(RULE_COL, CLS)), api_gis='ogr') else: shpCls = sel_by_attr(db, SQL_Q.format(c=str(CLS), tbl=polyTbl, w=WHR.format(op=OPERATOR, r=RULE_COL, ga=GEOM_AREA, cls_=CLS)), "geometry", os.path.join( folder, "{}_{}.shp".format(RULE_COL, str(CLS))), api='pgsql2shp', tableIsQuery=True) time_y = dt.datetime.now().replace(microsecond=0) rst = shp_to_rst(shpCls, None, cellsize, 0, os.path.join(folder, "{}_{}.tif".format(RULE_COL, CLS)), epsg=srscode, rst_template=rstTemplate, api='gdal') time_z = dt.datetime.now().replace(microsecond=0) clsRst[int(CLS)] = rst timeGasto[cnt + 1] = ('sq_to_shp_{}'.format(str(CLS)), time_y - time_x) timeGasto[cnt + 2] = ('shp_to_rst_{}'.format(str(CLS)), time_z - time_y)
def osm_to_featcls(xmlOsm, output, fileFormat='.shp', useXmlName=None, outepsg=4326): """ OSM to ESRI Shapefile """ import os from glass.g.tbl.filter import sel_by_attr from glass.pys.oss import fprop, del_file # Convert xml to sqliteDB gpkg = osm_to_gpkg(xmlOsm, os.path.join(output, fprop(xmlOsm, 'fn') + '.gpkg')) # sqliteDB to Feature Class TABLES = { 'points': 'pnt', 'lines': 'lnh', 'multilinestrings': 'mlnh', 'multipolygons': 'poly' } for T in TABLES: sel_by_attr( gpkg, "SELECT * FROM {}".format(T), os.path.join( output, "{}{}{}".format( "" if not useXmlName else fprop(xmlOsm, 'fn') + "_", TABLES[T], fileFormat if fileFormat[0] == '.' else "." + fileFormat)), api_gis='ogr', oEPSG=None if outepsg == 4326 else outepsg, iEPSG=4326) # Del temp DB del_file(gpkg) return output
def disjoint_polygons_rel_points(sqBD, pntTbl, pntGeom, polyTbl, polyGeom, outTbl, polySelect=None, pntQuery=None, polyQuery=None, outTblIsFile=None): """ Get Disjoint relation What TODO with this? """ import os if not polySelect: raise ValueError("Man, select something!") sql = ( "SELECT {selCols} FROM {polTable} WHERE (" "{polName}.{polGeom} not in (" "SELECT {polName}.{polGeom} FROM {pntTable} " "INNER JOIN {polTable} ON " "ST_Within({pntName}.{pntGeom_}, {polName}.{polGeom})" "))" ).format( selCols = "*" if not polySelect else polySelect, polTable = polyTbl if not polyQuery else polyQuery, polGeom = polyGeom, pntTable = pntTbl if not pntQuery else pntQuery, pntGeom_ = pntGeom, pntName = pntTbl, polName = polyTbl ) if outTblIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(sqBD, sql, outTbl, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(sqBD, outTbl, sql, api='ogr2ogr')
def intersect_point_with_polygon(sqDB, pntTbl, pntGeom, polyTbl, polyGeom, outTbl, pntSelect=None, polySelect=None, pntQuery=None, polyQuery=None, outTblIsFile=None): """ Intersect Points with Polygons What TODO with this? """ import os if not pntSelect and not polySelect: raise ValueError("You have to select something") sql = ( "SELECT {colPnt}{colPoly} FROM {pnt_tq} " "INNER JOIN {poly_tq} ON " "ST_Within({pnt}.{pnGeom}, {poly}.{pgeom})" ).format( colPnt = pntSelect if pntSelect else "", colPoly = polySelect if polySelect and not pntSelect else \ ", " + polySelect if polySelect and pntSelect else "", pnt_tq = pntTbl if not pntQuery else pntQuery, poly_tq = polyTbl if not polyQuery else polyQuery, pnt = pntTbl, poly = polyTbl, pnGeom = pntGeom, pgeom = polyGeom ) if outTblIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(sqDB, sql, outTbl, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(sqDB, outTbl, sql, api='ogr2ogr')
def exportBuild(): time_ee = dt.datetime.now().replace(microsecond=0) NB = row_num(osmdata, polyTbl, where="building IS NOT NULL", api='sqlite') time_e = dt.datetime.now().replace(microsecond=0) timeGasto[3] = ('check_builds', time_e - time_ee) if not NB: return bShp = sel_by_attr( osmdata, "SELECT geometry FROM {} WHERE building IS NOT NULL".format( polyTbl), os.path.join(folder, 'road_builds.shp'), api_gis='ogr') time_f = dt.datetime.now().replace(microsecond=0) bRst = shp_to_rst(bShp, None, cellsize, -1, os.path.join(folder, 'road_builds.tif'), epsg=srs, rst_template=rstTemplate, api='gdal') time_g = dt.datetime.now().replace(microsecond=0) BUILDINGS.append(bRst) timeGasto[4] = ('export_builds', time_f - time_e) timeGasto[5] = ('builds_to_rst', time_g - time_f)
def splitShp_by_range(shp, nrFeat, outFolder): """ Split one feature class by range """ import os from glass.pys.oss import fprop from glass.g.prop.feat import feat_count, lst_fld from glass.g.tbl.filter import sel_by_attr rowsN = feat_count(shp, gisApi='ogr') nrShp = int(rowsN / float(nrFeat)) + 1 if nrFeat != rowsN else 1 fields = lst_fld(shp) offset = 0 exportedShp = [] for i in range(nrShp): f = fprop(shp, ['fn', 'ff'], forceLower=True) outShp = sel_by_attr( shp, "SELECT {cols}, geometry FROM {t} ORDER BY {cols} LIMIT {l} OFFSET {o}" .format(t=os.path.splitext(os.path.basename(shp))[0], l=str(nrFeat), o=str(offset), cols=", ".join(fields)), os.path.join( outFolder, "{}_{}{}".format(f['filename'], str(i), f['fileformat'])), api_gis='ogr') exportedShp.append(outShp) offset += nrFeat return exportedShp
def st_near(db, inTbl, inGeom, nearTbl, nearGeom, output, near_col='near', api='psql', whrNear=None, outIsFile=None, until_dist=None, cols_in_tbl=None, intbl_pk=None, cols_near_tbl=None): """ Near tool for PostGIS and Spatialite api options: * psql * splite or spatialite """ if api == 'psql' and not intbl_pk: from glass.pys import obj_to_lst from glass.ng.sql.q import q_to_ntbl _out = q_to_ntbl( db, output, ("SELECT m.*, ST_Distance(m.{ingeom}, j.geom) AS {distCol} " "FROM {t} AS m, (" "SELECT ST_UnaryUnion(ST_Collect({neargeom})) AS geom " "FROM {tblNear}{nearwhr}" ") AS j").format(ingeom=inGeom, distCol=near_col, t=inTbl, neargeom=nearGeom, tblNear=nearTbl), api='psql') return output elif api == 'psql' and intbl_pk: from glass.pys import obj_to_lst from glass.ng.sql.q import q_to_ntbl _out = q_to_ntbl( db, output, ("SELECT DISTINCT ON (s.{col_pk}) " "{inTblCols}, {nearTblCols}" "ST_Distance(" "s.{ingeomCol}, h.{negeomCol}" ") AS {nearCol} FROM {in_tbl} AS s " "LEFT JOIN {near_tbl} AS h " "ON ST_DWithin(s.{ingeomCol}, h.{negeomCol}, {dist_v}) " "ORDER BY s.{col_pk}, ST_Distance(s.{ingeomCol}, h.{negeomCol})" ).format( col_pk=intbl_pk, inTblCols="s.*" if not cols_in_tbl else ", ".join( ["s.{}".format(x) for x in obj_to_lst(cols_in_tbl)]), nearTblCols="" if not cols_near_tbl else ", ".join( ["h.{}".format(x) for x in obj_to_lst(cols_near_tbl)]) + ", ", ingeomCol=inGeom, negeomCol=nearGeom, nearCol=near_col, in_tbl=inTbl, near_tbl=nearTbl, dist_v="100000" if not until_dist else until_dist), api='psql') return output elif api == 'splite' or api == 'spatialite': Q = ("SELECT m.*, ST_Distance(m.{ingeom}, j.geom) AS {distCol} " "FROM {t} AS m, (" "SELECT ST_UnaryUnion(ST_Collect({neargeom})) AS geom " "FROM {tblNear}{nearwhr}" ") AS j").format( ingeom=inGeom, distCol=near_col, t=inTbl, neargeom=nearGeom, tblNear=nearTbl, nearwhr="" if not whrNear else " WHERE {}".format(whrNear)) if outIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, Q, output, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, output, Q, api='ogr2ogr') return output else: raise ValueError("api {} does not exist!".format(api))
def get_not_used_tags(OSM_FILE, OUT_TBL): """ Use a file OSM to detect tags not considered in the OSM2LULC procedure """ import os from glass.ng.wt import obj_to_tbl from glass.g.tbl.filter import sel_by_attr from glass.ng.sql.q import q_to_obj from glass.ng.pd.split import df_split from glass.pys.oss import fprop from glass.g.it.osm import osm_to_gpkg OSM_TAG_MAP = { "DB" : os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'osmtolulc.sqlite' ), "OSM_FEAT" : "osm_features", "KEY_COL" : "key", "VALUE_COL" : "value", "GEOM_COL" : "geom" } WORKSPACE = os.path.dirname(OUT_TBL) sqdb = osm_to_gpkg(OSM_FILE, os.path.join( WORKSPACE, fprop(OSM_FILE, 'fn') + '.gpkg' )) # Get Features we are considering ourOSMFeatures = q_to_obj(OSM_TAG_MAP["DB"], ( "SELECT {key} AS key_y, {value} AS value_y, {geom} AS geom_y " "FROM {tbl}" ).format( key=OSM_TAG_MAP["KEY_COL"], value=OSM_TAG_MAP["VALUE_COL"], geom=OSM_TAG_MAP["GEOM_COL"], tbl=OSM_TAG_MAP["OSM_FEAT"] ), db_api='sqlite') # Get Features in File TABLES_TAGS = { 'points' : ['highway', 'man_made', 'building'], 'lines' : ['highway', 'waterway', 'aerialway', 'barrier', 'man_made', 'railway'], 'multipolygons' : ['aeroway', 'amenity', 'barrier', 'building', 'craft', 'historic', 'land_area', '' 'landuse', 'leisure', 'man_made', 'military', 'natural', 'office', 'place', 'shop', 'sport', 'tourism', 'waterway', 'power', 'railway', 'healthcare', 'highway'] } Qs = [ " UNION ALL ".join([( "SELECT '{keycol}' AS key, {keycol} AS value, " "'{geomtype}' AS geom FROM {tbl} WHERE " "{keycol} IS NOT NULL" ).format( keycol=c, geomtype='Point' if table == 'points' else 'Line' \ if table == 'lines' else 'Polygon', tbl=table ) for c in TABLES_TAGS[table]]) for table in TABLES_TAGS ] fileOSMFeatures = q_to_obj(sqdb, ( "SELECT key, value, geom FROM ({}) AS foo " "GROUP BY key, value, geom" ).format(" UNION ALL ".join(Qs)), db_api='sqlite') _fileOSMFeatures = fileOSMFeatures.merge( ourOSMFeatures, how='outer', left_on=["key", "value", "geom"], right_on=["key_y", "value_y", "geom_y"] ) # Select OSM Features of file without correspondence _fileOSMFeatures["isnew"] =_fileOSMFeatures.key_y.fillna(value='nenhum') newTags = _fileOSMFeatures[_fileOSMFeatures.isnew == 'nenhum'] newTags["value"] = newTags.value.str.replace("'", "''") newTags["whr"] = newTags.key + "='" + newTags.value + "'" # Export tags not being used to new shapefile def to_regular_str(row): san_str = row.whr row["whr_san"] = san_str return row for t in TABLES_TAGS: if t == 'points': filterDf = newTags[newTags.geom == 'Point'] elif t == 'lines': filterDf = newTags[newTags.geom == 'Line'] elif t == 'multipolygons': filterDf = newTags[newTags.geom == 'Polygon'] if filterDf.shape[0] > 500: dfs = df_split(filterDf, 500, nrows=True) else: dfs = [filterDf] Q = "SELECT * FROM {} WHERE {}".format( t, filterDf.whr.str.cat(sep=" OR ")) i = 1 for df in dfs: fn = t + '.shp' if len(dfs) == 1 else '{}_{}.shp'.format( t, str(i) ) try: shp = sel_by_attr(sqdb, Q.format( t, df.whr.str.cat(sep=" OR ") ), os.path.join(WORKSPACE, fn), api_gis='ogr') except: __df = df.apply(lambda x: to_regular_str(x), axis=1) shp = sel_by_attr(sqdb, Q.format( t, __df.whr.str.cat(sep=" OR ") ), os.path.join(WORKSPACE, fn)) i += 1 # Export OUT_TBL with tags not being used newTags.drop(['key_y', 'value_y', 'geom_y', 'isnew', 'whr'], axis=1, inplace=True) obj_to_tbl(newTags, OUT_TBL, sheetsName="new_tags", sanitizeUtf8=True) return OUT_TBL
def rn_cols(inShp, columns, api="ogr2ogr"): """ Rename Columns in Shp api options: * ogr2ogr; * grass; * pygrass; """ if api == "ogr2ogr": import os from glass.pys.oss import fprop from glass.pys.oss import del_file, lst_ff from glass.g.tbl.filter import sel_by_attr from glass.g.prop.col import lst_cols # List Columns cols = lst_cols(inShp) for c in cols: if c in columns: continue else: columns[c] = c columns["geometry"] = "geometry" # Get inShp Folder inshpfld = os.path.dirname(inShp) # Get inShp Filename and format inshpname = fprop(inShp, 'fn') # Temporary output output = os.path.join(inshpfld, inshpname + '_xtmp.shp') # Rename columns by selecting data from input outShp = sel_by_attr( inShp, "SELECT {} FROM {}".format( ", ".join(["{} AS {}".format(c, columns[c]) for c in columns]), inshpname), output, api_gis='ogr') # Delete Original file infiles = lst_ff(inshpfld, filename=inshpname) del_file(infiles) # Rename Output file oufiles = lst_ff(inshpfld, filename=inshpname + '_xtmp') for f in oufiles: os.rename(f, os.path.join(inshpfld, inshpname + fprop(f, 'ff'))) elif api == 'grass': from glass.pys import execmd for col in columns: rcmd = execmd( ("v.db.renamecolumn map={} layer=1 column={},{}").format( inShp, col, columns[col])) elif api == 'pygrass': from grass.pygrass.modules import Module for col in columns: func = Module("v.db.renamecolumn", map=inShp, column="{},{}".format(col, columns[col]), quiet=True, run_=False) func() else: raise ValueError("{} is not available".format(api)) return inShp
def feat_not_within(db, inTbl, inGeom, withinTbl, withinGeom, outTbl, inTblCols=None, outTblIsFile=None, apiToUse='OGR_SPATIALITE', geom_col=None): """ Get features not Within with any of the features in withinTbl apiToUse options: * OGR_SPATIALITE; * POSTGIS. """ from glass.pys import obj_to_lst Q = ( "SELECT {selCols} FROM {tbl} AS in_tbl WHERE (" "in_tbl.{in_geom} NOT IN (" "SELECT inin_tbl.{in_geom} FROM {wi_tbl} AS wi_tbl " "INNER JOIN {tbl} AS inin_tbl ON " "ST_Within(wi_tbl.{wi_geom}, inin_tbl.{in_geom})" "))" ).format( selCols = "*" if not inTblCols else ", ".join(obj_to_lst(inTblCols)), tbl = inTbl, in_geom = inGeom, wi_tbl = withinTbl, wi_geom = withinGeom ) if apiToUse == "OGR_SPATIALITE": if outTblIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, Q, outTbl, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='ogr2ogr') elif apiToUse == "POSTGIS": if outTblIsFile: if not geom_col: raise ValueError(( "To export a PostGIS table to file, " "geom_col must be specified" )) from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp( db, Q, geom_col, outTbl, api='pgsql2shp', tableIsQuery=True ) else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='psql') else: raise ValueError(( "API {} is not available. OGR_SPATIALITE and POSTGIS " "are the only valid options" )) return outTbl
def feat_within(db, inTbl, inGeom, withinTbl, withinGeom, outTbl, inTblCols=None, withinCols=None, outTblIsFile=None, apiToUse='OGR_SPATIALITE', geom_col=None): """ Get Features within other Geometries in withinTbl e.g. Intersect points with Polygons apiToUse options: * OGR_SPATIALITE; * POSTGIS. """ from glass.pys import obj_to_lst if not inTblCols and not withinCols: colSelect = "intbl.*, witbl.*" else: if inTblCols and not withinCols: colSelect = ", ".join([ "intbl.{}".format(c) for c in obj_to_lst(inTblCols) ]) elif not inTblCols and withinCols: colSelect = ", ".join([ "witbl.{}".format(c) for c in obj_to_lst(withinCols) ]) else: colSelect = "{}, {}".format( ", ".join(["intbl.{}".format(c) for c in obj_to_lst(inTblCols)]), ", ".join(["witbl.{}".format(c) for c in obj_to_lst(withinCols)]) ) Q = ( "SELECT {selcols} FROM {in_tbl} AS intbl " "INNER JOIN {within_tbl} AS witbl ON " "ST_Within(intbl.{in_geom}, witbl.{wi_geom})" ).format( selcols=colSelect, in_tbl=inTbl, within_tbl=withinTbl, in_geom=inGeom, wi_geom=withinGeom ) if apiToUse == "OGR_SPATIALITE": if outTblIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, Q, outTbl, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='ogr2ogr') elif apiToUse == 'POSTGIS': if outTblIsFile: if not geom_col: raise ValueError(( "To export a PostGIS table to file, geom_col " "must be specified!" )) from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp( db, Q, geom_col, outTbl, api="pgsql2shp", tableIsQuery=True) else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='psql') else: raise ValueError(( "API {} is not available. OGR_SPATIALITE and POSTGIS " "are the only valid options" )) return outTbl
def prod_matrix(origins, destinations, networkGrs, speedLimitCol, onewayCol, thrdId="1", asCmd=None): """ Get Matrix Distance: """ from glass.g.tbl import category from glass.g.tbl.filter import sel_by_attr from glass.g.tbl.col import add_fields from glass.g.tbl.grs import add_table, update_table from glass.g.mob.grstbx.vnet import add_pnts_to_network from glass.g.mob.grstbx.vnet import run_allpairs from glass.g.cp import copy_insame_vector from glass.g.tbl.attr import geomattr_to_db from glass.g.dp.mge import shps_to_shp from glass.g.prop.feat import feat_count from glass.g.it.shp import shp_to_grs # Merge Origins and Destinations into the same Feature Class ORIGINS_NFEAT = feat_count(origins, gisApi='pandas') DESTINATIONS_NFEAT = feat_count(destinations, gisApi='pandas') ORIGINS_DESTINATIONS = shps_to_shp([origins, destinations], os.path.join( os.path.dirname(origins), "points_od_{}.shp".format(thrdId)), api='pandas') pointsGrs = shp_to_grs(ORIGINS_DESTINATIONS, "points_od_{}".format(thrdId), asCMD=asCmd) # Connect Points to Network newNetwork = add_pnts_to_network(networkGrs, pointsGrs, "rdv_points_{}".format(thrdId), asCMD=asCmd) # Sanitize Network Table and Cost Columns newNetwork = category(newNetwork, "rdv_points_time_{}".format(thrdId), "add", LyrN="3", geomType="line", asCMD=asCmd) add_table(newNetwork, ("cat integer,kph double precision,length double precision," "ft_minutes double precision," "tf_minutes double precision,oneway text"), lyrN=3, asCMD=asCmd) copy_insame_vector(newNetwork, "kph", speedLimitCol, 3, geomType="line", asCMD=asCmd) copy_insame_vector(newNetwork, "oneway", onewayCol, 3, geomType="line", asCMD=asCmd) geomattr_to_db(newNetwork, "length", "length", "line", createCol=False, unit="meters", lyrN=3, ascmd=asCmd) update_table(newNetwork, "kph", "3.6", "kph IS NULL", lyrN=3, ascmd=asCmd) update_table(newNetwork, "kph", "3.6", "oneway = 'N'", lyrN=3, ascmd=asCmd) update_table(newNetwork, "ft_minutes", "(length * 60) / (kph * 1000.0)", "ft_minutes IS NULL", lyrN=3, ascmd=asCmd) update_table(newNetwork, "tf_minutes", "(length * 60) / (kph * 1000.0)", "tf_minutes IS NULL", lyrN=3, ascmd=asCmd) # Exagerate Oneway's update_table(newNetwork, "ft_minutes", "1000", "oneway = 'TF'", lyrN=3, ascmd=asCmd) update_table(newNetwork, "tf_minutes", "1000", "oneway = 'FT'", lyrN=3, ascmd=asCmd) # Produce matrix matrix = run_allpairs(newNetwork, "ft_minutes", "tf_minutes", 'result_{}'.format(thrdId), arcLyr=3, nodeLyr=2, asCMD=asCmd) # Exclude unwanted OD Pairs q = "({}) AND ({})".format( " OR ".join( ["from_cat={}".format(str(i + 1)) for i in range(ORIGINS_NFEAT)]), " OR ".join([ "to_cat={}".format(str(ORIGINS_NFEAT + i + 1)) for i in range(DESTINATIONS_NFEAT) ])) matrix_sel = sel_by_attr(matrix, q, "sel_{}".format(matrix), geomType="line", lyrN=3, asCMD=asCmd) add_fields(matrix_sel, "from_fid", "INTEGER", lyrN=3, asCMD=asCmd) add_fields(matrix_sel, "to_fid", "INTEGER", lyrN=3, asCMD=asCmd) update_table(matrix_sel, "from_fid", "from_cat - 1", "from_fid IS NULL", lyrN=3, ascmd=asCmd) update_table(matrix_sel, "to_fid", "to_cat - {} - 1".format(str(ORIGINS_NFEAT)), "to_fid IS NULL", lyrN=3, ascmd=asCmd) return matrix_sel