def distinct_to_table(db, pgtable, outable, cols=None): """ Distinct values of one column to a new table """ from gasp.pyt import obj_to_lst from gasp.sql.c import sqlcon cols = obj_to_lst(cols) if not cols: from gasp.sql.i import cols_name cols = cols_name(db, pgtable, api='psql') con = sqlcon(db) cs = con.cursor() cs.execute( ("CREATE TABLE {nt} AS " "SELECT {cls} FROM {t} GROUP BY {cls}").format(nt=outable, cls=', '.join(cols), t=pgtable)) con.commit() cs.close() con.close() return outable
def select_main_geom_type(db, table, outbl, geomCol='geom'): """ Assuming a table with several geometry types, this method counts the rows for each geometry type and select the rows with a geometry type with more rows """ from gasp.sql.to import q_to_ntbl from gasp.sql.i import cols_name COLS = [x for x in cols_name( db, table, sanitizeSpecialWords=None ) if x != geomCol] Q = ( "SELECT {cols}, {geomcol} FROM (" "SELECT *, MAX(jtbl.geom_cont) OVER (PARTITION BY " "jtbl.tst) AS max_cnt FROM (" "SELECT {cols}, (ST_Dump({geomcol})).geom AS {geomcol}, " "ST_GeometryType((ST_Dump({geomcol})).geom) AS geom_type " "FROM {tbl}" ") AS foo INNER JOIN (" "SELECT ST_GeometryType((ST_Dump({geomcol})).geom) AS gt, " "COUNT(ST_GeometryType((ST_Dump({geomcol})).geom)) AS geom_cont, " "1 AS tst FROM {tbl} GROUP BY ST_GeometryType((ST_Dump({geomcol})).geom)" ") AS jtbl ON foo.geom_type = jtbl.gt" ") AS foo WHERE geom_cont = max_cnt" ).format( cols=", ".join(COLS), geomcol=geomCol, tbl=table ) return q_to_ntbl(db, outbl, Q, api='psql')
def split_colval_into_cols(db_name, table, column, splitChar, new_cols, new_table): """ Split column value into several columns """ from gasp.sql.i import cols_name if type(new_cols) != list: raise ValueError('new_cols should be a list') nr_cols = len(new_cols) if nr_cols < 2: raise ValueError('new_cols should have 2 or more elements') # Get columns types from table tblCols = cols_name(db_name, table) # SQL construction SQL = "SELECT {}, {} FROM {}".format( ", ".join(tblCols), ", ".join([ "split_part({}, '{}', {}) AS {}".format(column, splitChar, i + 1, new_cols[i]) for i in range(len(new_cols)) ]), table) q_to_ntbl(db_name, new_table, SQL, api='psql') return new_table
def change_field_type(db, table, fields, outable, cols=None): """ Imagine a table with numeric data saved as text. This method convert that numeric data to a numeric field. fields = {'field_name' : 'field_type'} """ from gasp.sql.i import cols_name if not cols: cols = cols_name(db, table) else: from gasp.pyt import obj_to_lst cols = obj_to_lst(cols) select_fields = [f for f in cols if f not in fields] con = sqlcon(db) # Create new table with the new field with converted values cursor = con.cursor() cursor.execute(('CREATE TABLE {} AS SELECT {}, {} FROM {}').format( outable, ', '.join(select_fields), ', '.join([ 'CAST({f_} AS {t}) AS {f_}'.format(f_=f, t=fields[f]) for f in fields ]), table)) con.commit() cursor.close() con.close()
def q_to_obj(dbname, query, db_api='psql', geomCol=None, epsg=None, of='df', cols=None): """ Query database and convert data to Pandas Dataframe/GeoDataFrame API's Available: * psql; * sqlite; * mysql; output format options ("of" parameter): * df (Pandas Dataframe); * dict (Python Dict); """ if not query.startswith('SELECT '): # Assuming query is a table name from gasp.pyt import obj_to_lst from gasp.sql.i import cols_name cols = cols_name(dbname, query) if not cols else \ obj_to_lst(cols) query = "SELECT {} FROM {}".format( ", ".join(["{t}.{c} AS {c}".format(t=query, c=i) for i in cols]), query) if not geomCol: import pandas from gasp.sql.c import alchemy_engine pgengine = alchemy_engine(dbname, api=db_api) df = pandas.read_sql(query, pgengine, columns=None) else: from geopandas import GeoDataFrame from gasp.sql.c import sqlcon con = sqlcon(dbname, sqlAPI='psql') df = GeoDataFrame.from_postgis( query, con, geom_col=geomCol, crs="epsg:{}".format(str(epsg)) if epsg else None) if of == 'dict': df = df.to_dict(orient="records") return df
def sql_proj(dbname, tbl, otbl, oepsg, cols=None, geomCol=None, newGeom=None, whr=None, new_pk=None): """ Reproject geometric layer to another spatial reference system (srs) """ from gasp.pyt import obj_to_lst from gasp.sql.to import q_to_ntbl geomCol = 'geom' if not geomCol else geomCol newGeom = 'geom' if not newGeom else newGeom if not cols: from gasp.sql.i import cols_name cols = cols_name(dbname, tbl) cols.remove(geomCol) else: cols = obj_to_lst(cols) if geomCol in cols and geomCol == newGeom: cols.remove(geomCol) cols.append('{c} AS old_{c}'.format(c=geomCol)) Q = ("SELECT {}, ST_Transform({}, {}) AS {} " "FROM {}{}").format(", ".join(cols), geomCol, str(oepsg), newGeom, tbl, "" if not whr else " WHERE {}".format(whr)) otbl = q_to_ntbl(dbname, otbl, Q, api='psql') if new_pk: from gasp.sql.k import create_pk create_pk(dbname, otbl, new_pk) return otbl
def add_endpnt_to_tbl(db, inTable, outTable, idCol='gid', geomCol='geom', startCol="start_vertex", endCol="end_vertex"): """ Add start/end points columns to table """ from gasp.sql.to import q_to_ntbl from gasp.sql.i import cols_name return q_to_ntbl(db, outTable, ("SELECT {cols}, {stPnt}, {endPnt} FROM (" "SELECT *, lead({stPnt}) OVER (" "PARTITION BY {colId} ORDER BY pnt_idx) AS {endPnt} " "FROM (" "SELECT {cols}, pnt_idx, {stPnt}, " "CASE " "WHEN pnt_idx = 1 OR pnt_idx = MAX(pnt_idx) " "OVER (PARTITION BY {colId}) " "THEN 1 ELSE 0 END AS pnt_cat " "FROM (" "SELECT {cols}, " "(ST_DumpPoints({geomF})).path[1] AS pnt_idx, " "(ST_DumpPoints({geomF})).geom AS {stPnt} " "FROM {table}" ") AS foo" ") AS foo2 " "WHERE pnt_cat = 1" ") AS foo3 " "WHERE {endPnt} IS NOT NULL " "ORDER BY {colId}, pnt_idx").format(cols=", ".join( cols_name(db, inTable)), stPnt=startCol, endPnt=endCol, colId=idCol, geomF=geomCol, table=inTable), api='psql')
def split_lines_on_pnt(db, inTbl, pntTbl, outTbl, idlnhPnt, lnhid): """ Split lines on point locations """ from gasp.sql.i import cols_name from gasp.sql.to import q_to_ntbl # Get cols of lnhTbl cols = ", ".join([ c for c in cols_name(db, inTbl, sanitizeSpecialWords=True, api='psql') if c != 'geom' and c != idlnhPnt ]) # Force MultiLineString to LineString sanQ = ("SELECT {lid}, {cln}, (ST_Dump(geom)).geom AS geom " "FROM {t}) AS mtbl").format(lid=lnhid, cln=cols, t=inTbl) # Split Query Q = ("SELECT {lid}, {cln}, (ST_Dump(geom)).geom AS geom FROM (" "SELECT mtbl.{lid}, {cln}, " "CASE " "WHEN jtbl.{pid} IS NULL THEN mtbl.geom " "ELSE ST_Split(mtbl.geom, jtbl.geom) " "END AS geom " "FROM {lnh_tbl} LEFT JOIN (" "SELECT {pid}, ST_Collect(geom) AS geom " "FROM {pnt_tbl} " "GROUP BY {pid}" ") AS jtbl on mtbl.{lid} = jtbl.{pid}" ") AS foo").format(lid=lnhid, cln=cols, pid=idlnhPnt, lnh_tbl=sanQ, pnt_tbl=pntTbl) # Produce new table and return it return q_to_ntbl(db, outTbl, Q)
def split_table_by_range(db, table, row_number): """ Split tables in several """ from gasp.sql.i import cols_name, row_num from gasp.sql.to import q_to_ntbl rowsN = row_num(db, table, api='psql') nrTables = int(rowsN / float(row_number)) + 1 COLS = cols_name(db, table) offset = 0 for i in range(nrTables): q_to_ntbl( db, '{}_{}'.format(table, str(i)), "SELECT * FROM {} ORDER BY {} OFFSET {} LIMIT {} ;".format( table, ', '.join(COLS), str(offset), str(row_number) ), api='psql' ) offset += row_number
def get_stop_words(inTbl, fidCol, txtCol, outFile, lang='portuguese', inSheet=None, db=None): """ Pick a text column and save it in a new column only with the stop words. Uses PostgreSQL dictionaries to get stop words """ from gasp.pyt.oss import fprop from gasp.sql.i import cols_name from gasp.sql.db import create_db from gasp.sql.to import tbl_to_db from gasp.to import db_to_tbl FILENAME = fprop(inTbl, 'fn') # Create Temp database db = create_db("db_" + FILENAME if not db else db) # Send table to PostgreSQL tbl = tbl_to_db(inTbl, db, FILENAME, sheet=inSheet, api_db='psql') cols = cols_name(db, tbl, sanitizeSpecialWords=None, api='psql') # Sanitize data and create a new column only with stop words Q1 = ("(SELECT *, to_tsvector('{_lang}', regexp_replace(" "regexp_replace(lower(unaccent({txt_c})), 'http://[^:\s]+(\S+)', " "' ', 'g'), '[^\w]+', ' ', 'g')) " "AS txt_data FROM {t}) AS stop_table").format(_lang=lang, txt_c=txtCol, t=tbl) Q2 = ("SELECT {selCols}, ARRAY_TO_STRING(array_agg(" "word ORDER BY word_index), ' ', '*') AS {outCol}, " "REPLACE(CAST(STRIP(" "stop_table.txt_data) AS text), '''', '') AS no_duplicated " "FROM (" "SELECT fid, word, CAST(UNNEST(word_index) AS integer) " "AS word_index FROM (" "SELECT fid, SPLIT_PART(tst, ';', 1) AS word, " "STRING_TO_ARRAY(SPLIT_PART(tst, ';', 2), ',') AS word_index FROM (" "SELECT {fid} AS fid, REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(" "CAST(UNNEST(txt_data) AS text), " "',{{', ',\"{{'), ',\"{{', ';'), '}}\"', ''), " "'(', ''), '}}', '') AS tst " "FROM {tbl}" ") AS foo" ") AS foo2" ") AS foo3 INNER JOIN {tbl} ON foo3.fid = stop_table.{fid} " "GROUP BY {selCols}, stop_table.txt_data").format( outCol="clean_" + txtCol, tbl=Q1, fid=fidCol, selCols=", ".join(["stop_table.{}".format(i) for i in cols])) # Export new table return db_to_tbl(db, Q2, outFile, sheetsNames=inSheet)
def osm_to_relationaldb(osmData, inSchema, osmGeoTbl, osmCatTbl, osmRelTbl, outSQL=None, db_name=None): """ PostgreSQL - OSM Data to Relational Model TODO: Just work for one geom table at once E.g. osmData = '/home/jasp/flainar/osm_centro.xml' inSchema = { "TBL" : ['points', 'lines', 'multipolygons'], 'FID' : 'CAST(osm_id AS bigint)', "COLS" : [ 'name', "ST_X(wkb_geometry) AS longitude", "ST_Y(wkb_geometry) AS latitude", "wkb_geometry AS geom", "NULL AS featurecategoryid", "NULL AS flainarcategoryid", "NULL AS createdby", "NOW() AS createdon", "NULL AS updatedon", "NULL AS deletedon" ], "NOT_KEYS" : [ 'ogc_fid', 'osm_id', 'name', "wkb_geometry", 'healthcare2', 'other_tags' ] } osmGeoTbl = {"TBL" : 'position', "FID" : 'positionid'} osmCatTbl = { "TBL" : 'osmcategory', "FID" : "osmcategoryid", "KEY_COL" : "keycategory", "VAL_COL" : "value", "COLS" : [ "NULL AS createdby", "NOW() AS createdon", "NULL AS updatedon", "NULL AS deletedon" ] } osmRelTbl = { "TBL" : "position_osmcat", "FID" : 'pososmcatid' } """ from gasp.pyt import obj_to_lst from gasp.pyt.oss import fprop from gasp.sql.i import cols_name from gasp.sql.to import q_to_ntbl from gasp.sql.db import create_db inSchema["TBL"] = obj_to_lst(inSchema["TBL"]) # Create DB db = create_db(fprop(osmData, 'fn') if not db_name else db_name, api='psql') # Send OSM data to Database osm_to_psql(osmData, db) # Get KEYS COLUMNS transcols = {} for tbl in inSchema["TBL"]: transcols[tbl] = [ c for c in cols_name(db, tbl, sanitizeSpecialWords=None) if c not in inSchema["NOT_KEYS"] ] # Create osmGeoTbl osmgeotbl = [ q_to_ntbl(db, osmGeoTbl[tbl]['TBL'], ("SELECT {} AS {}, {} FROM {}").format( inSchema["FID"], osmGeoTbl[tbl]["FID"], ", ".join(inSchema["COLS"]), tbl), api='psql') for tbl in inSchema["TBL"] ] # Create OSM categories table qs = [] for tbl in inSchema["TBL"]: qs.extend([ ("SELECT '{keyV}' AS {keyC}, CAST({t}.{keyV} AS text) AS {valC} " "FROM {t} WHERE {t}.{keyV} IS NOT NULL " "GROUP BY {t}.{keyV}").format(keyV=c, t=tbl, keyC=osmCatTbl["KEY_COL"], valC=osmCatTbl["VAL_COL"]) for c in transcols[tbl] ]) osmcatbl = q_to_ntbl( db, osmCatTbl["TBL"], ("SELECT row_number() OVER(ORDER BY {keyC}) " "AS {osmcatid}, {keyC}, {valC}{ocols} " "FROM ({q}) AS foo").format( q="SELECT {k}, {v} FROM ({t}) AS kvtbl GROUP BY {k}, {v}".format( k=osmCatTbl["KEY_COL"], v=osmCatTbl["VAL_COL"], t=" UNION ALL ".join(qs), ) if len(inSchema["TBL"]) > 1 else " UNION ALL ".join(qs), keyC=osmCatTbl["KEY_COL"], osmcatid=osmCatTbl["FID"], valC=osmCatTbl["VAL_COL"], ocols="" if "COLS" not in osmCatTbl else ", {}".format(", ".join( osmCatTbl["COLS"]))), api='psql') # Create relation table osmreltbl = [] for tbl in inSchema["TBL"]: qs = [( "SELECT {fid}, '{keyV}' AS key, CAST({t}.{keyV} AS text) AS osmval " "FROM {t} WHERE {t}.{keyV} IS NOT NULL").format( fid=inSchema["FID"], keyV=c, t=tbl) for c in transcols[tbl]] osmreltbl.append( q_to_ntbl( db, osmRelTbl[tbl]["TBL"], ("SELECT foo.{fid} AS {nfid}, catbl.{osmcatfid} " "FROM ({mtbl}) AS foo INNER JOIN {catTbl} AS catbl " "ON foo.key = catbl.{catkey} AND foo.osmval = catbl.{catval}" ).format(mtbl=" UNION ALL ".join(qs), fid=inSchema["FID"], nfid=osmRelTbl[tbl]["FID"], catTbl=osmCatTbl["TBL"], osmcatfid=osmCatTbl["FID"], catkey=osmCatTbl["KEY_COL"], catval=osmCatTbl["VAL_COL"]), api='psql')) if not outSQL: return osmgeotbl, osmcatbl, osmreltbl else: from gasp.sql.fm import dump_tbls return dump_tbls(db, osmgeotbl + [osmcatbl] + osmreltbl, outSQL)
def check_endpoint_ispoint(db, lnhTable, pntTable, outTable, nodeStart, nodeEnd, pointId, pntGeom="geom"): """ Check if a Start/End point in a table with line geometries is a point in other table. """ from gasp.sql.to import q_to_ntbl from gasp.sql.i import cols_name tCols = [x for x in cols_name( db, lnhTable) if x != nodeStart and x != nodeEnd ] return q_to_ntbl(db, outTable, ( "SELECT * FROM (" "SELECT {fooCols}, foo.{stPnt}, foo.{endPnt}, " "CASE " "WHEN start_tbl.start_x IS NOT NULL THEN 1 ELSE 0 " "END AS start_isstop, " "CASE " "WHEN end_tbl.end_x IS NOT NULL THEN 1 ELSE 0 " "END AS end_isstop, start_tbl.start_id, end_tbl.end_id " "FROM (" "SELECT *, " "CAST(((round(CAST(ST_X({stPnt}) AS numeric), 4)) * 10000) " "AS integer) AS start_x, " "CAST(((round(CAST(ST_Y({stPnt}) AS numeric), 4)) * 10000) " "AS integer) AS start_y, " "CAST(((round(CAST(ST_X({endPnt}) AS numeric), 4)) * 10000) " "AS integer) AS end_x, " "CAST(((round(CAST(ST_Y({endPnt}) AS numeric), 4)) * 10000) " "AS integer) AS end_y " "FROM {lnhT}" ") AS foo " "LEFT JOIN (" "SELECT CAST(((round(CAST(ST_X({pntG}) AS numeric), 4)) " "* 10000) AS integer) AS start_x, " "CAST(((round(CAST(ST_Y({pntG}) AS numeric), 4)) " "* 10000) AS integer) AS start_y, " "{pntid} AS start_id FROM {pntT}" ") AS start_tbl " "ON foo.start_x = start_tbl.start_x AND " "foo.start_y = start_tbl.start_y " "LEFT JOIN (" "SELECT CAST(((round(CAST(ST_X({pntG}) AS numeric), 4)) " "* 10000) AS integer) AS end_x, " "CAST(((round(CAST(ST_Y({pntG}) AS numeric), 4)) " "* 10000) as integer) AS end_y, " "{pntid} AS end_id FROM {pntT}" ") AS end_tbl " "ON foo.end_x = end_tbl.end_x AND foo.end_y = end_tbl.end_y" ") AS foo2 " "GROUP BY {cols}, {stPnt}, {endPnt}, start_isstop, end_isstop, " "start_id, end_id" ).format( fooCols = ", ".join(["foo.{}".format(c) for c in tCols]), stPnt = nodeStart, endPnt = nodeEnd, lnhT = lnhTable, pntT = pntTable, pntG = pntGeom, cols = ", ".join(tCols), pntid=pointId ), api='psql')
def pgtables_to_layer_withStyle_by_col(pgtables, sldData, db, workName=None, storeName=None, sldGeom='Polygon', DATATYPE='QUANTITATIVE', TABLE_DESIGNATION=None, COL_DESIGNATION=None, exclude_cols=None, pathToSLDfiles=None): """ Create a new Geoserver Workspace, create a postgis store and one layer for each table in 'pgtables'. Each layer will have multiple styles - one style by column in it. Parameters: 1) pgtables - List of PSQL tables to be transformed as Geoserver Layers 2) sldData - sldData should be a xls table with the styles specifications. For QUANTITATIVE DATA: The table should have two sheets: one for colors and other for intervals: COLORS SHEET STRUCTURE (Sheet Index = 0): cls_id | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W 1 | X | X | X | X | X | X | 1 2 | X | X | X | X | X | X | 1 3 | X | X | X | X | X | X | 1 4 | X | X | X | X | X | X | 1 5 | X | X | X | X | X | X | 1 INTERVALS SHEET STRUCTURE (Sheet Index = 1): | 0 | 1 | 2 | 3 | 4 | 5 col_0 | 0 | 5 | 10 | 15 | 20 | 25 col_1 | 0 | 6 | 12 | 18 | 24 | 30 ... col_n | 0 | 5 | 10 | 15 | 20 | 25 For CATEGORICAL DATA: The table should have only one sheet: CATEGORICAL SHEET STRUCTURE | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W attr_1 | X | X | X | X | X | X | 1 attr_2 | X | X | X | X | X | X | 1 ... attr_n | X | X | X | X | X | X | 1 3) pgsql_con - Dict with parameters that will be used to connect to PostgreSQL d = { 'HOST' : 'localhost', 'PORT' : '5432', 'USER' : 'postgres', 'PASSWORD' : 'admin', 'DATABASE' : 'db_name' } 4) workName - String with the name of the Geoserver workspace that will be created 5) storeName - String with the name of the Geoserver store that will be created 6) geoserver_con - Dict with parameters to connect to Geoserver 7) sldGeom - Data Geometry. At the moment, the options 'Polygon' and 'Line' are valid. 8) DATATYPE='QUANTITATIVE' | 'CATEGORICAL' 9) TABLE_DESIGNATION - Table with the association between pgtables name and the designation to be used to name the Geoserver Layer. 10) COL_DESIGNATION - Table xls with association between each column and one style that will be used to present the information of that column. The style designation could not have blank characters. 11) exclude_cols - One style will be created for each column in one pgtable. The columns in 'exclude_cols' will not have a style. 12) pathToSLDfiles - Absolute path to the folder where the SLD (Style Layer Descriptor) will be stored. """ import os from gasp.pyt import obj_to_lst from gasp.fm import tbl_to_obj from gasp.pyt.oss import mkdir from gasp.sql.i import cols_name from gasp.web.geosrv.ws import create_ws from gasp.web.geosrv.stores import create_pgstore from gasp.web.geosrv.lyrs import pub_pglyr from gasp.web.geosrv.sty import create_style from gasp.web.geosrv.sty import lst_styles from gasp.web.geosrv.sty import del_style from gasp.web.geosrv.sty import assign_style_to_layer from gasp.web.geosrv.sld import write_sld # Sanitize PGtables pgtables = obj_to_lst(pgtables) if not pgtables: raise ValueError('pgtables value is not valid!') exclude_cols = obj_to_lst(exclude_cols) STY_DESIGNATION = tbl_to_obj(COL_DESIGNATION, useFirstColAsIndex=True, output='dict', colsAsArray=True) if COL_DESIGNATION else None LYR_DESIGNATION = tbl_to_obj( TABLE_DESIGNATION, useFirstColAsIndex=True, output='dict', colsAsArray=True) if TABLE_DESIGNATION else None # Get intervals and colors data if DATATYPE == 'QUANTITATIVE': if os.path.exists(sldData): FF = os.path.splitext(sldData)[1] if FF == '.xls' or FF == '.xlsx': colorsDict = tbl_to_obj(sldData, sheet=0, useFirstColAsIndex=True, output='dict') intervalsDict = tbl_to_obj(sldData, sheet=1, useFirstColAsIndex=True, output='dict') else: raise ValueError( ('At the moment, for DATATYPE QUANTITATIVE, sldData ' 'has to be a xls table')) else: raise ValueError(('{} is not a valid file').format(sldData)) elif DATATYPE == 'CATEGORICAL': if os.path.exists(sldData): if os.path.splitext(sldData)[1] == 'xls': colorsDict = tbl_to_obj(sldData, sheet=0, useFirstColAsIndex=True, output='dict') else: raise ValueError( ('At the moment, for DATATYPE CATEGORICAL, sldData ' 'has to be a xls table')) else: raise ValueError(('{} is not a valid file').format(sldData)) else: raise ValueError('{} is not avaiable at the moment'.format(DATATYPE)) # Create Workspace workName = 'w_{}'.format(db) if not workName else workName create_ws(workName, overwrite=True) # Create Store storeName = db if not storeName else storeName create_pgstore(storeName, workName, db) # Create folder for sld's wTmp = mkdir( os.path.join(os.path.dirname(sldData), 'sldFiles')) if not pathToSLDfiles else pathToSLDfiles # List styles in geoserver STYLES = lst_styles() # For each table in PGTABLES for PGTABLE in pgtables: # Publish Postgis table TITLE = None if not LYR_DESIGNATION else LYR_DESIGNATION[PGTABLE][0] pub_pglyr(workName, storeName, PGTABLE, title=TITLE) # List PGTABLE columns pgCols = cols_name(db, PGTABLE) # For each column for col in pgCols: if exclude_cols and col in exclude_cols: continue STYLE_NAME = '{}_{}'.format( PGTABLE, STY_DESIGNATION[col] [0]) if STY_DESIGNATION else '{}_{}'.format(PGTABLE, col) if STYLE_NAME in STYLES: del_style(STYLE_NAME) # Create Object with association between colors and intervals d = {} OPACITY = str(colorsDict[1]['OPACITY']) for i in colorsDict: d[i] = { 'R': colorsDict[i]['R'], 'G': colorsDict[i]['G'], 'B': colorsDict[i]['B'] } if DATATYPE == 'QUANTITATIVE': d[i]['min'] = intervalsDict[col][i - 1] d[i]['max'] = intervalsDict[col][i] if 'STROKE_R' in colorsDict[i] and \ 'STROKE_G' in colorsDict[i] and \ 'STROKE_B' in colorsDict[i]: d[i]['STROKE'] = { 'R': colorsDict[i]['STROKE_R'], 'G': colorsDict[i]['STROKE_G'], 'B': colorsDict[i]['STROKE_B'] } if 'STROKE_W' in colorsDict[i]: d[i]['STROKE']['WIDTH'] = colorsDict[i]['STROKE_W'] # Create SLD sldFile = write_sld(col, d, os.path.join(wTmp, '{}.sld'.format(col)), geometry=sldGeom, DATA=DATATYPE, opacity=OPACITY) # Create Style create_style(STYLE_NAME, sldFile) # Apply SLD assign_style_to_layer(STYLE_NAME, PGTABLE)