def join_dfs(df_a, df_b, colA, colB, onlyCombinations=True): """ Join two Pandas Dataframes """ from gasp import goToList _HOW_ = 'inner' if onlyCombinations else 'outer' if colA == 'index' and colB == 'index': newDf = df_a.merge( df_b, how=_HOW_, left_index=True, right_index=True ) elif colA == 'index' and colB != 'index': newDf = df_a.merge( df_b, how=_HOW_, left_index=True, right_on=goToList(colB) ) elif colA != 'index' and colB == 'index': newDf = df_a.merge( df_b, how=_HOW_, left_on=goToList(colA), right_index=True ) else: newDf = df_a.merge( df_b, how=_HOW_, left_on=goToList(colA), right_on=goToList(colB) ) return newDf
def st_buffer(conParam, inTbl, bfDist, geomCol, outTbl, bufferField="geometry", whrClause=None, dissolve=None, cols_select=None, outTblIsFile=None): """ Using Buffer on PostGIS Data """ from gasp import goToList dissolve = goToList(dissolve) if dissolve != "ALL" else "ALL" SEL_COLS = "" if not cols_select else ", ".join(goToList(cols_select)) DISS_COLS = "" if not dissolve or dissolve == "ALL" else ", ".join( dissolve) GRP_BY = "" if not dissolve else "{}, {}".format(SEL_COLS, DISS_COLS) if \ SEL_COLS != "" and DISS_COLS != "" else SEL_COLS \ if SEL_COLS != "" else DISS_COLS if DISS_COLS != "" else "" Q = ( "SELECT{sel}{spFunc}{geom}, {_dist}{endFunc} AS {bf} " "FROM {t}{whr}{grpBy}" ).format( sel = " " if not cols_select else " {}, ".format(SEL_COLS), spFunc="ST_Buffer(" if not dissolve else \ "ST_UnaryUnion(ST_Collect(ST_Buffer(", geom=geomCol, _dist=bfDist, endFunc=")" if not dissolve else ")))", t=inTbl, grpBy=" GROUP BY {}".format(GRP_BY) if GRP_BY != "" else "", whr="" if not whrClause else " WHERE {}".format(whrClause), bf=bufferField ) if not outTblIsFile: from gasp.sql.mng.qw import ntbl_by_query outTbl = ntbl_by_query(conParam, outTbl, Q, api='psql') else: from gasp.to.shp import psql_to_shp psql_to_shp(conParam, Q, outTbl, api='pgsql2shp', geom_col=bufferField, tableIsQuery=True) return outTbl
def splite_buffer(db, table, dist, geomField, outTbl, cols_select=None, bufferField="geometry", whrClause=None, outTblIsFile=None, dissolve=None): """ Run ST_Buffer if not dissolve, no generalization will be applied; if dissolve == to str or list, a generalization will be accomplish using the fields referenced by this object; if dissolve == 'ALL', all features will be dissolved. """ from gasp import goToList dissolve = goToList(dissolve) if dissolve != "ALL" else "ALL" sql = ( "SELECT{sel}{spFunc}{geom}, {_dist}{endFunc} AS {bf} " "FROM {tbl}{whr}{grpBy}" ).format( sel = " " if not cols_select else " {}, ".format( ", ".join(goToList(cols_select)) ), tbl=table, geom=geomField, _dist=str(dist), bf=bufferField, whr="" if not whrClause else " WHERE {}".format(whrClause), spFunc="ST_Buffer(" if not dissolve else \ "ST_UnaryUnion(ST_Collect(ST_Buffer(", endFunc = ")" if not dissolve else ")))", grpBy="" if not dissolve or dissolve == "ALL" else " GROUP BY {}".format( ", ".join(dissolve) ) ) if outTblIsFile: from gasp.exct import sel_by_attr sel_by_attr(db, sql, outTbl, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(db, outTbl, sql, api='ogr2ogr') return outTbl
def join_bgrishp_with_bgridata(bgriShp, bgriCsv, outShp, shpJoinField="BGRI11", dataJoinField="GEO_COD", joinFieldsMantain=None, newNames=None): """ Join BGRI ESRI Shapefile with the CSV with the BGRI Data """ from gasp import goToList from gasp.fm import tbl_to_obj from gasp.to.shp import df_to_shp # Read main_table mainDf = tbl_to_obj(bgriShp) # Read join table joinDf = tbl_to_obj(bgriCsv, _delimiter=';', encoding_='utf-8') # Sanitize GEO_COD of bgriCsv joinDf[dataJoinField] = joinDf[dataJoinField].str.replace("'", "") if joinFieldsMantain: joinFieldsMantain = goToList(joinFieldsMantain) dropCols = [] for col in joinDf.columns.values: if col not in [dataJoinField] + joinFieldsMantain: dropCols.append(col) joinDf.drop(dropCols, axis=1, inplace=True) resultDf = mainDf.merge(joinDf, how='inner', left_on=shpJoinField, right_on=dataJoinField) if newNames: newNames = goToList(newNames) renDict = { joinFieldsMantain[n]: newNames[n] for n in range(len(joinFieldsMantain)) } resultDf.rename(columns=renDict, inplace=True) df_to_shp(resultDf, outShp) return outShp
def copy_fromdb_todb2(conFrom, conTo, tables): """ Send PGSQL Tables from one database to another using pg_dump and pg_restore """ import os from gasp import goToList from gasp.oss.ops import create_folder, del_folder from gasp.sql.mng.tbl import dump_table from gasp.sql.mng.tbl import restore_table tmpFolder = create_folder(os.path.dirname(os.path.abspath(__file__)), randName=True) tables = goToList(tables) for table in tables: # Dump sqlScript = dump_table(conFrom, table, os.path.join(tmpFolder, table + ".sql")) # Restore tblname = restore_table(conTo, sqlScript, table) del_folder(tmpFolder)
def copy_fromdb_todb(conFromDb, conToDb, tables, qForTbl=None): """ Send PGSQL Tables from one database to other """ import pandas from gasp import goToList from gasp.fm.sql import query_to_df from gasp.sql.mng.fld import cols_name from gasp.to.sql import df_to_db tables = goToList(tables) for table in tables: cols = cols_name(conFromDb, table) if not qForTbl: tblDf = query_to_df(conFromDb, "SELECT {} FROM {}".format( ", ".join(cols), table), db_api='psql') else: if table not in qForTbl: tblDf = query_to_df(conFromDb, "SELECT {} FROM {}".format( ", ".join(cols), table), db_api='psql') else: tblDf = query_to_df(conFromDb, qForTbl[table], db_api='psql') df_to_db(conToDb, tblDf, table, api='psql')
def geom_to_points(conParam, table, geomCol, outTable, selCols=None, newGeomCol=None): """ Convert a Polygon/Polyline Geometry to Points Equivalent to feature to point tool """ from gasp import goToList from gasp.sql.mng.qw import ntbl_by_query selCols = goToList(selCols) Q = ("SELECT {cols}(ST_DumpPoints({geom})).geom AS {newCol} " "FROM {tbl}").format( cols="" if not selCols else "{}, ".format(", ".join(selCols)), geom=geomCol, newCol="geom" if not newGeomCol else newGeomCol, tbl=table) return ntbl_by_query(conParam, outTable, Q, api='psql')
def fix_geom(conParam, table, geom, out_tbl, colsSelect=None, whr=None): """ Remove some topological incorrections on the PostGIS data """ from gasp.sql.mng.qw import ntbl_by_query if not colsSelect: from gasp.sql.mng.fld import cols_name cols_tbl = [ '{}.{}'.format(table, x) for x in cols_name(conParam, table, sanitizeSpecialWords=None) if x != geom ] else: from gasp import goToList cols_tbl = [ '{}.{}'.format(table, x) for x in goToList(colsSelect) if x != geom ] Q = "SELECT {c}, ST_MakeValid({g}) AS {g} FROM {t}{w}".format( c=", ".join(cols_tbl), g=geom, t=table, w="" if not whr else " WHERE {}".format(whr)) ntbl = ntbl_by_query(conParam, out_tbl, Q, api='psql') return ntbl
def get_rows_notin_query(conParam, tblA, tblB, joinCols, newTable, cols_to_mantain=None, tblAisQuery=None, tblBisQuery=None): """ Get rows from tblA that are not present in tblB joinCols = {colTblA : colTblB} """ from gasp import goToList from gasp.sql.mng.qw import ntbl_by_query cols_to_mantain = goToList(cols_to_mantain) q = ("SELECT {cls} FROM {ta} LEFT JOIN {tb} ON " "{rel} WHERE {tblB}.{fldB} IS NULL").format( cls=cols_to_mantain if cols_to_mantain else "{}.*".format(tblA), ta=tblA if not tblAisQuery else tblAisQuery, tb=tblB if not tblBisQuery else tblBisQuery, rel=" AND ".join([ "{ta}.{ca} = {tb}.{cb}".format(ta=tblA, tb=tblB, ca=k, cb=joinCols[k]) for k in joinCols ])) newTable = ntbl_by_query(conParam, newTable, q, api='psql') return newTable
def intersect_in_same_table(conParam, table, geomA, geomB, outtable, intersectField='intersects', intersectGeom=None, colsSel=None): """ Intersect two Geometries in the same table """ from gasp import goToList from gasp.sql.c import psqlcon from gasp.sql.mng.qw import ntbl_by_query COLS = goToList(colsSel) return ntbl_by_query( conParam, outtable, ("SELECT {cls}, CASE WHEN interse IS TRUE THEN 1 ELSE 0 END AS {intF} " "{intgeomF}FROM (" "SELECT {cls}, ST_Intersects({gA}, {gB}) AS interse " "{intgeom}FROM {t}" ") AS tst").format( gA=geomA, gB=geomB, t=table, intF=intersectField, cls="*" if not COLS else ", ".join(COLS), intgeom= "" if not intersectGeom else \ ", ST_Intersection({}, {}) AS intersect_geom".format( geomA, geomB ), intgeomF = "" if not intersectGeom else ", intersect_geom" ), api='psql' )
def change_field_type(lnk, table, fields, outable, cols=None): """ Imagine a table with numeric data saved as text. This method convert that numeric data to a numeric field. fields = {'field_name' : 'field_type'} """ if not cols: cols = cols_name(lnk, table) else: from gasp import goToList cols = goToList(cols) select_fields = [f for f in cols if f not in fields] con = psqlcon(lnk) # Create new table with the new field with converted values cursor = con.cursor() cursor.execute(('CREATE TABLE {} AS SELECT {}, {} FROM {}').format( outable, ', '.join(select_fields), ', '.join([ 'CAST({f_} AS {t}) AS {f_}'.format(f_=f, t=fields[f]) for f in fields ]), table)) con.commit() cursor.close() con.close()
def join_table_with_tables(table, idTable, join_tables, join_fields=None): """ Join table with all tables referred in join_tables join_tables = { table_name : join_field_name ... } join_fields specify the fields from the join table to add in table join_fields = { table_name : field, table_name : [field_1, field_2, ...] } """ from gasp import goToList for tbl in join_tables: if join_fields: if tbl in join_fields: fld_to_join = goToList(join_fields[tbl]) else: fld_to_join = "" else: fld_to_join = "" join_table( table, idTable, tbl, join_tables[tbl], fld_to_f_tbl=fld_to_join )
def spatial_join(inShp, joinShp, outShp, attr=None): """ Join two tables based in spatial relation """ import os from gasp import goToList attr = goToList(attr) if attr: ATTR = [( "{fld} \"{fld}\" true true false 30 Text 0 0 ," "First,#,{shp},{fld},-1,-1" ).format( fld=x, shp=os.path.splitext(os.path.basename(joinShp))[0] ) for x in attr] else: ATTR = "" arcpy.SpatialJoin_analysis( inShp, joinShp, outShp, "JOIN_ONE_TO_ONE", "KEEP_ALL", ";".join(ATTR), "INTERSECT", "", "" ) return outShp
def sqlite_insert_query(db, table, cols, new_values, execute_many=None): """ Method to insert data into SQLITE Database """ import sqlite3 from gasp import goToList cols = goToList(cols) if not cols: raise ValueError('cols value is not valid') conn = sqlite3.connect(db) cs = conn.cursor() if not execute_many: cs.execute("INSERT INTO {} ({}) VALUES {}".join( table, ', '.join(cols), ', '.join(['({})'.format(', '.join(row)) for row in new_values]))) else: cs.executemany( '''INSERT INTO {} ({}) VALUES ({})'''.format( table, ', '.join(cols), ', '.join(['?' for i in range(len(cols))])), new_values) conn.commit() cs.close() conn.close()
def del_fld_notin_geodf(df, flds, geomCol=None): """ Delete columns not in flds """ from gasp import goToList cols = df.columns.values if not geomCol: for c in cols: if c == 'geometry' or c == 'geom': F_GEOM = c break else: F_GEOM = geomCol if not flds: Icols = [F_GEOM] else: Icols = goToList(flds) + [F_GEOM] DEL_COLS = [c for c in cols if c not in Icols] df.drop(DEL_COLS, axis=1, inplace=True) return df
def sgbd_get_feat_not_within(dbcon, inTbl, inGeom, withinTbl, withinGeom, outTbl, inTblCols=None, outTblIsFile=None, apiToUse='OGR_SPATIALITE'): """ Get features not Within with any of the features in withinTbl apiToUse options: * OGR_SPATIALITE; * POSTGIS. """ from gasp import goToList Q = ( "SELECT {selCols} FROM {tbl} AS in_tbl WHERE (" "in_tbl.{in_geom} NOT IN (" "SELECT inin_tbl.{in_geom} FROM {wi_tbl} AS wi_tbl " "INNER JOIN {tbl} AS inin_tbl ON " "ST_Within(wi_tbl.{wi_geom}, inin_tbl.{in_geom})" "))" ).format( selCols = "*" if not inTblCols else ", ".join(goToList(inTblCols)), tbl = inTbl, in_geom = inGeom, wi_tbl = withinTbl, wi_geom = withinGeom ) if apiToUse == "OGR_SPATIALITE": if outTblIsFile: from gasp.anls.exct import sel_by_attr sel_by_attr(dbcon, Q, outTbl, api_gis='ogr') else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(dbcon, outTbl, Q, api='ogr2ogr') elif apiToUse == "POSTGIS": if outTblIsFile: from gasp.to.shp import psql_to_shp psql_to_shp( dbcon, Q, outTbl, api='pgsql2shp', geom_col=None, tableIsQuery=True ) else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(dbcon, outTbl, Q, api='psql') else: raise ValueError(( "API {} is not available. OGR_SPATIALITE and POSTGIS " "are the only valid options" )) return outTbl
def del_topoerror_shps(conParam, shps, epsg, outfolder): """ Remove topological errors from Feature Class data using PostGIS """ import os from gasp import goToList from gasp.sql.mng.fld import cols_name from gasp.sql.mng.qw import ntbl_by_query from gasp.to.sql import shp_to_psql from gasp.to.shp import psql_to_shp shps = goToList(shps) TABLES = shp_to_psql(conParam, shps, epsg, api="shp2pgsql") NTABLE = [ntbl_by_query( conParam, "nt_{}".format(t), "SELECT {cols}, ST_MakeValid({tbl}.geom) AS geom FROM {tbl}".format( cols = ", ".join(["{}.{}".format(TABLES[t], x) for x in cols_name( conParam, TABLES[t], sanitizeSpecialWords=None ) if x != 'geom']), tbl=TABLES[t] ), api='psql' ) for t in range(len(TABLES))] for t in range(len(NTABLE)): psql_to_shp( conParam, NTABLE[t], os.path.join(outfolder, TABLES[t]), tableIsQuery=None, api='pgsql2shp', geom_col="geom" )
def lst_dataframe(mxdObj, dfNames=None): """ List Dataframes in a MXD Project """ from gasp import goToList dfNames = goToList(dfNames) dfs = arcpy.mapping.ListDataFrames(mxdObj) if dfNames: dfObjs = [df for df in dfs if str(df.name) in dfNames] else: dfObjs = dfs if len(dfObjs) == 0: return None elif len(dfObjs) == 1: return dfObjs[0] else: return dfObjs
def raster_report(rst, rel, _units=None, ascmd=None): """ Units options: * Options: mi, me, k, a, h, c, p ** mi: area in square miles ** me: area in square meters ** k: area in square kilometers ** a: area in acres ** h: area in hectares ** c: number of cells ** p: percent cover """ if not ascmd: from grass.pygrass.modules import Module report = Module("r.report", map=rst, flags="h", output=rel, units=_units, run_=False, quiet=True) report() else: from gasp import exec_cmd, goToList rcmd = exec_cmd("r.report map={} output={}{} -h".format( rst, rel, " units={}".format(",".join(goToList(_units))) if _units else "")) return rel
def text_columns_to_column(conParam, inTable, columns, strSep, newCol, outTable=None): """ Several text columns to a single column """ from gasp import goToList from gasp.sql.mng.qw import ntbl_by_query mergeCols = goToList(columns) tblCols = get_columns_type(conParam, inTable, sanitizeColName=None, pyType=False) for col in mergeCols: if tblCols[col] != 'text' and tblCols[col] != 'varchar': raise ValueError('{} should be of type text'.format(col)) coalesce = "" for i in range(len(mergeCols)): if not i: coalesce += "COALESCE({}, '')".format(mergeCols[i]) else: coalesce += " || '{}' || COALESCE({}, '')".format( strSep, mergeCols[i]) if outTable: # Write new table colsToSelect = [_c for _c in tblCols if _c not in mergeCols] if not colsToSelect: sel = coalesce + " AS {}".format(newCol) else: sel = "{}, {}".format(", ".join(colsToSelect), coalesce + " AS {}".format(newCol)) ntbl_by_query(conParam, outTable, "SELECT {} FROM {}".format(sel, inTable), api='psql') return outTable else: # Add column to inTable from gasp.sql.mng.qw import update_table add_field(conParam, inTable, {newCol: 'text'}) update_table(conParam, inTable, {newCol: coalesce}) return inTable
def tweets_to_df(keyword=None, inGeom=None, epsg=None, LANG='pt', NTWEETS=1000, tweetType='mixed', apiKey=None, dropFields=None): """ Search for Tweets and Export them to XLS """ from gasp import goToList if not inGeom and not keyword: raise ValueError('inGeom or keyword, one of them are required') if inGeom and not epsg: raise ValueError('inGeom implies epsg') if inGeom: from gasp.anls.prox.bf import getBufferParam x, y, dist = getBufferParam(inGeom, epsg, outSRS=4326) dist = float(dist) / 1000 else: x, y, dist = None, None, None data = search_tweets(lat=y, lng=x, radius=dist, keyword=keyword, NR_ITEMS=NTWEETS, only_geo=None, __lang=LANG, resultType=tweetType, key=apiKey) try: if not data: return 0 except: pass if keyword: data["keyword"] = keyword else: data["keyword"] = 'nan' dropFields = goToList(dropFields) if dropFields: data.drop(dropFields, axis=1, inplace=True) return data
def st_near(link, inTbl, inTblPK, inGeom, nearTbl, nearGeom, output, near_col='near', untilDist=None, colsInTbl=None, colsNearTbl=None): """ Near tool for PostGIS """ from gasp import goToList from gasp.sql.mng.qw import ntbl_by_query _out = ntbl_by_query( link, output, ("SELECT DISTINCT ON (s.{colPk}) " "{inTblCols}, {nearTblCols}" "ST_Distance(" "s.{ingeomCol}, h.{negeomCol}" ") AS {nearCol} FROM {in_tbl} AS s " "LEFT JOIN {near_tbl} AS h " "ON ST_DWithin(s.{ingeomCol}, h.{negeomCol}, {dist_v}) " "ORDER BY s.{colPk}, ST_Distance(s.{ingeomCol}, h.{negeomCol})" ).format(colPk=inTblPK, inTblCols="s.*" if not colsInTbl else ", ".join( ["s.{}".format(x) for x in goToList(colsInTbl)]), nearTblCols="" if not colsNearTbl else ", ".join(["h.{}".format(x) for x in goToList(colsNearTbl)]) + ", ", ingeomCol=inGeom, negeomCol=nearGeom, nearCol=near_col, in_tbl=inTbl, near_tbl=nearTbl, dist_v="100000" if not untilDist else untilDist), api='psql') return output
def list_files(w, file_format=None, filename=None): """ List the abs path of all files with a specific extension on a folder """ from gasp import goToList # Prepare file format list if file_format: formats = goToList(file_format) for f in range(len(formats)): if formats[f][0] != '.': formats[f] = '.' + formats[f] # List files r = [] for (d, _d_, f) in os.walk(w): r.extend(f) break # Filter files by format or not if not file_format: t = [os.path.join(w, i) for i in r] else: t = [ os.path.join(w, i) for i in r if os.path.splitext(os.path.basename(i))[1] in formats ] # Filter by filename if not filename: return t else: filename = goToList(filename) _t = [] for i in t: if get_filename(i) in filename: _t.append(i) return _t
def del_file(_file): """ Delete files if exists """ from gasp import goToList for ff in goToList(_file): if os.path.isfile(ff) and os.path.exists(ff): os.remove(ff)
def st_dissolve(db, table, geomColumn, outTable, whrClause=None, diss_cols=None, outTblIsFile=None, api='sqlite'): """ Dissolve a Polygon table """ from gasp import goToList diss_cols = goToList(diss_cols) if diss_cols else None geomcol = "geometry" if api == 'sqlite' else 'geom' sql = ("SELECT{selCols} ST_UnaryUnion(ST_Collect({geom})) AS {gout} " "FROM {tbl}{whr}{grpBy}").format( selCols="" if not diss_cols else " {},".format(", ".join(diss_cols)), geom=geomColumn, tbl=table, whr="" if not whrClause else " WHERE {}".format(whrClause), grpBy="" if not diss_cols else " GROUP BY {}".format( ", ".join(diss_cols)), gout=geomcol) if outTblIsFile: if api == 'sqlite': from gasp.anls.exct import sel_by_attr sel_by_attr(db, sql, outTable, api_gis='ogr') elif api == 'psql': from gasp.to.shp import psql_to_shp psql_to_shp(db, table, outTable, api='pgsql2shp', geom_col=geomColumn, tableIsQuery=True) else: from gasp.sql.mng.qw import ntbl_by_query ntbl_by_query(db, outTable, sql, api='ogr2ogr' if api == 'sqlite' else 'psql') return outTable
def shape_to_rst_wShapeCheck(inShp, maxCellNumber, desiredCellsizes, outRst, inEPSG): """ Convert one Feature Class to Raster using the cellsizes included in desiredCellsizes. For each cellsize, check if the number of cells exceeds maxCellNumber. The raster with lower cellsize but lower than maxCellNumber will be the returned raster """ import os from gasp import goToList from gasp.prop.rst import rst_shape desiredCellsizes = goToList(desiredCellsizes) if not desiredCellsizes: raise ValueError( 'desiredCellsizes does not have a valid value' ) workspace = os.path.dirname(outRst) RASTERS = [shp_to_raster( inShp, cellsize, -1, os.path.join( workspace, 'tst_cell_{}.tif'.format(cellSize) ), inEPSG ) for cellSize in desiredCellsizes] tstShape = rst_shape(RASTERS, gisApi='gdal') for rst in tstShape: NCELLS = tstShape[rst][0] * tstShape[rst][1] tstShape[rst] = NCELLS NICE_RASTER = None for i in range(len(desiredCellsizes)): if tstShape[RASTERS[i]] <= maxCellNumber: NICE_RASTER = RASTERS[i] break else: continue if not NICE_RASTER: return None else: os.rename(NICE_RASTER, outRst) for rst in RASTERS: if os.path.isfile(rst) and os.path.exists(rst): os.remove(rst) return outRst
def copy_fromdb_todb(conFromDb, conToDb, tables, qForTbl=None, api='pandas'): """ Send PGSQL Tables from one database to other """ from gasp import goToList api = 'pandas' if api != 'pandas' and api != 'psql' else api tables = goToList(tables) if api == 'pandas': from gasp.fm.sql import query_to_df from gasp.to.sql import df_to_db for table in tables: if not qForTbl: tblDf = query_to_df(conFromDb, "SELECT * FROM {}".format(table), db_api='psql') else: if table not in qForTbl: tblDf = query_to_df(conFromDb, "SELECT * FROM {}".format(table), db_api='psql') else: tblDf = query_to_df(conFromDb, qForTbl[table], db_api='psql') df_to_db(conToDb, tblDf, table, api='psql') else: import os from gasp.oss.ops import create_folder, del_folder from gasp.sql.mng.tbl import dump_table from gasp.sql.mng.tbl import restore_table tmpFolder = create_folder(os.path.dirname(os.path.abspath(__file__)), randName=True) for table in tables: # Dump sqlScript = dump_table(conFromDb, table, os.path.join(tmpFolder, table + ".sql")) # Restore tblname = restore_table(conToDb, sqlScript, table) del_folder(tmpFolder)
def trim_char_in_col(conParam, pgtable, cols, trim_str, outTable, onlyTrailing=None, onlyLeading=None): """ Python implementation of the TRIM PSQL Function The PostgreSQL trim function is used to remove spaces or set of characters from the leading or trailing or both side from a string. """ from gasp import goToList from gasp.sql.mng.qw import ntbl_by_query cols = goToList(cols) colsTypes = get_columns_type(conParam, pgtable, sanitizeColName=None, pyType=False) for col in cols: if colsTypes[col] != 'text' and colsTypes[col] != 'varchar': raise ValueError('{} should be of type text'.format(col)) colsToSelect = [_c for _c in colsTypes if _c not in cols] tail_lead_str = "" if not onlyTrailing and not onlyLeading else \ "TRAILING " if onlyTrailing and not onlyLeading else \ "LEADING " if not onlyTrailing and onlyLeading else "" trimCols = [ "TRIM({tol}{char} FROM {c}) AS {c}".format(c=col, tol=tail_lead_str, char=trim_str) for col in cols ] if not colsToSelect: cols_to_select = "{}".format(", ".join(trimCols)) else: cols_to_select = "{}, {}".format(", ".join(colsToSelect), ", ".join(colsReplace)) ntbl_by_query(conParam, outTable, "SELECT {} FROM {}".format(colsToSelect, pgtable), api='psql')
def get_distinct_values(lnk, pgtable, column): """ Get distinct values in one column of one pgtable """ from gasp import goToList from gasp.fm.sql import query_to_df data = query_to_df(lnk, "SELECT {col} FROM {t} GROUP BY {col};".format( col=", ".join(goToList(column)), t=pgtable), db_api='psql').to_dict(orient="records") return data
def lst_layers(__mxd, dataFrames=None, lyrNames=None, storeDfs=None): """ List layers objects in mxd """ from gasp import goToList lyrNames = goToList(lyrNames) dataFrames = goToList(dataFrames) dfs = arcpy.mapping.ListDataFrames(__mxd) __lyr = [] for df in dfs: if dataFrames: if str(df.name) not in dataFrames: continue lyr = arcpy.mapping.ListLayers(__mxd, data_frame=df) if lyrNames: lyr = [i for i in lyr if i.name in lyrNames] if storeDfs: lyr = [(df, i) for i in lyr] __lyr += lyr if len(__lyr) == 0: return None elif len(__lyr) == 1: return __lyr[0] else: return __lyr