示例#1
0
文件: split.py 项目: zonakre/gasp
def split_table_by_range(conP, table, row_number):
    """
    Split tables in several
    """

    from gasp.sql.mng.fld import cols_name
    from gasp.sql.mng.tbl import row_num
    from gasp.sql.mng.qw import ntbl_by_query

    rowsN = row_num(conP, table, api='psql')

    nrTables = int(rowsN / float(row_number)) + 1

    COLS = cols_name(conP, table)

    offset = 0
    for i in range(nrTables):
        ntbl_by_query(
            conP,
            '{}_{}'.format(table, str(i)),
            "SELECT * FROM {} ORDER BY {} OFFSET {} LIMIT {} ;".format(
                table, ', '.join(COLS), str(offset), str(row_number)),
            api='psql')

        offset += row_number
示例#2
0
def del_topoerror_shps(conParam, shps, epsg, outfolder):
    """
    Remove topological errors from Feature Class data using PostGIS
    """
    
    import os
    from gasp             import goToList
    from gasp.sql.mng.fld import cols_name
    from gasp.sql.mng.qw  import ntbl_by_query
    from gasp.to.sql      import shp_to_psql
    from gasp.to.shp      import psql_to_shp
    
    shps = goToList(shps)
    
    TABLES = shp_to_psql(conParam, shps, epsg, api="shp2pgsql")
    
    NTABLE = [ntbl_by_query(
        conParam, "nt_{}".format(t),
        "SELECT {cols}, ST_MakeValid({tbl}.geom) AS geom FROM {tbl}".format(
            cols = ", ".join(["{}.{}".format(TABLES[t], x) for x in cols_name(
                conParam, TABLES[t], sanitizeSpecialWords=None
            ) if x != 'geom']),
            tbl=TABLES[t]
        ), api='psql'
    ) for t in range(len(TABLES))]
    
    for t in range(len(NTABLE)):
        psql_to_shp(
            conParam, NTABLE[t],
            os.path.join(outfolder, TABLES[t]), tableIsQuery=None,
            api='pgsql2shp', geom_col="geom"
        )
示例#3
0
def copy_fromdb_todb(conFromDb, conToDb, tables, qForTbl=None):
    """
    Send PGSQL Tables from one database to other
    """

    import pandas
    from gasp import goToList
    from gasp.fm.sql import query_to_df
    from gasp.sql.mng.fld import cols_name
    from gasp.to.sql import df_to_db

    tables = goToList(tables)

    for table in tables:
        cols = cols_name(conFromDb, table)

        if not qForTbl:
            tblDf = query_to_df(conFromDb,
                                "SELECT {} FROM {}".format(
                                    ", ".join(cols), table),
                                db_api='psql')

        else:
            if table not in qForTbl:
                tblDf = query_to_df(conFromDb,
                                    "SELECT {} FROM {}".format(
                                        ", ".join(cols), table),
                                    db_api='psql')

            else:
                tblDf = query_to_df(conFromDb, qForTbl[table], db_api='psql')

        df_to_db(conToDb, tblDf, table, api='psql')
示例#4
0
def pg_erase(conParam, inTbl, eraseTbl, inGeom, eraseGeom, outTbl):
    """
    Erase
    """
    
    from gasp.sql.mng.fld import cols_name
    from gasp.sql.mng.qw import ntbl_by_query
    
    cols = ["mtbl.{}".format(
        x) for x in cols_name(conParam, inTbl, api='psql') if x != inGeom]
    
    q = (
        "SELECT {}, ST_Difference(mtbl.{}, foo.erase_geom) AS {} "
        "FROM {} AS mtbl, "
        "("
            "SELECT ST_UnaryUnion(ST_Collect(eetbl.{})) AS erase_geom "
            "FROM {} AS eetbl "
            "INNER JOIN {} AS jtbl ON ST_Intersects(eetbl.{}, jtbl.{})"
        ") AS foo"
    ).format(
        ", ".join(cols), inGeom, inGeom, inTbl, eraseGeom, eraseTbl,
        inTbl, eraseGeom, inGeom
    )
    
    return ntbl_by_query(conParam, outTbl, q, api='psql')
示例#5
0
文件: geom.py 项目: zonakre/gasp
def fix_geom(conParam, table, geom, out_tbl, colsSelect=None, whr=None):
    """
    Remove some topological incorrections on the PostGIS data
    """

    from gasp.sql.mng.qw import ntbl_by_query

    if not colsSelect:
        from gasp.sql.mng.fld import cols_name

        cols_tbl = [
            '{}.{}'.format(table, x)
            for x in cols_name(conParam, table, sanitizeSpecialWords=None)
            if x != geom
        ]
    else:
        from gasp import goToList

        cols_tbl = [
            '{}.{}'.format(table, x) for x in goToList(colsSelect) if x != geom
        ]

    Q = "SELECT {c}, ST_MakeValid({g}) AS {g} FROM {t}{w}".format(
        c=", ".join(cols_tbl),
        g=geom,
        t=table,
        w="" if not whr else " WHERE {}".format(whr))

    ntbl = ntbl_by_query(conParam, out_tbl, Q, api='psql')

    return ntbl
示例#6
0
文件: geom.py 项目: zonakre/gasp
def select_main_geom_type(conparam, table, outbl, geomCol='geom'):
    """
    Assuming a table with several geometry types, this method
    counts the rows for each geometry type and select the rows with a geometry
    type with more rows
    """

    from gasp.sql.mng.qw import ntbl_by_query
    from gasp.sql.mng.fld import cols_name

    COLS = [
        x for x in cols_name(conparam, table, sanitizeSpecialWords=None)
        if x != geomCol
    ]

    Q = (
        "SELECT {cols}, {geomcol} FROM ("
        "SELECT *, MAX(jtbl.geom_cont) OVER (PARTITION BY "
        "jtbl.tst) AS max_cnt FROM ("
        "SELECT {cols}, (ST_Dump({geomcol})).geom AS {geomcol}, "
        "ST_GeometryType((ST_Dump({geomcol})).geom) AS geom_type "
        "FROM {tbl}"
        ") AS foo INNER JOIN ("
        "SELECT ST_GeometryType((ST_Dump({geomcol})).geom) AS gt, "
        "COUNT(ST_GeometryType((ST_Dump({geomcol})).geom)) AS geom_cont, "
        "1 AS tst FROM {tbl} GROUP BY ST_GeometryType((ST_Dump({geomcol})).geom)"
        ") AS jtbl ON foo.geom_type = jtbl.gt"
        ") AS foo WHERE geom_cont = max_cnt").format(cols=", ".join(COLS),
                                                     geomcol=geomCol,
                                                     tbl=table)

    return ntbl_by_query(conparam, outbl, Q, api='psql')
示例#7
0
def tbl_to_dict(tbl, con, cols=None, apidb='psql'):
    """
    PG TABLE DATA to Python dict
    """

    from gasp import goToList
    from gasp.sql.mng.fld import cols_name

    cols = cols_name(con, tbl) if not cols else \
        goToList(cols)

    data = query_to_df(con,
                       'SELECT {cols_} FROM {table}'.format(
                           cols_=', '.join(cols), table=tbl),
                       db_api=apidb).to_dict(orient="records")

    return data
示例#8
0
文件: geom.py 项目: zonakre/gasp
def add_endpoints_to_table(conP,
                           inTable,
                           outTable,
                           idCol='gid',
                           geomCol='geom',
                           startCol="start_vertex",
                           endCol="end_vertex"):
    """
    Add start/end points columns to table
    """

    from gasp.sql.mng.qw import ntbl_by_query
    from gasp.sql.mng.fld import cols_name

    return ntbl_by_query(conP,
                         outTable,
                         ("SELECT {cols}, {stPnt}, {endPnt} FROM ("
                          "SELECT *, lead({stPnt}) OVER ("
                          "PARTITION BY {colId} ORDER BY pnt_idx) AS {endPnt} "
                          "FROM ("
                          "SELECT {cols}, pnt_idx, {stPnt}, "
                          "CASE "
                          "WHEN pnt_idx = 1 OR pnt_idx = MAX(pnt_idx) "
                          "OVER (PARTITION BY {colId}) "
                          "THEN 1 ELSE 0 END AS pnt_cat "
                          "FROM ("
                          "SELECT {cols}, "
                          "(ST_DumpPoints({geomF})).path[1] AS pnt_idx, "
                          "(ST_DumpPoints({geomF})).geom AS {stPnt} "
                          "FROM {table}"
                          ") AS foo"
                          ") AS foo2 "
                          "WHERE pnt_cat = 1"
                          ") AS foo3 "
                          "WHERE {endPnt} IS NOT NULL "
                          "ORDER BY {colId}, pnt_idx").format(cols=", ".join(
                              cols_name(conP, inTable)),
                                                              stPnt=startCol,
                                                              endPnt=endCol,
                                                              colId=idCol,
                                                              geomF=geomCol,
                                                              table=inTable),
                         api='psql')
示例#9
0
def intersection(lnk, aShp, bShp, pk, aGeom, bGeom, output,
                 primitive, priority, new_pk='fid_pk', new_geom='geom'):
    """
    Intersect two layers

    primitive is the geometric primitive (point, line, polygons)

    priority is an indication of the fields that the user wants to include in
    the output - fields of aShp or fields of bShp.
    The user could giver a list (with fields for selection) as value for the
    priority argument.
    """
    
    from gasp.sql.mng.fld import cols_name

    if priority == 'a':
        cols_tbl = cols_name(lnk, aShp)
        cols_tbl.remove(aGeom)
    elif priority == 'b':
        cols_tbl = cols_name(lnk, bShp)
        cols_tbl.remove(bGeom)
    elif type(priority) == type([0]):
        cols_tbl = priority
    cols_tbl.remove(pk)
    conn = psqlcon(
        lnk['HOST'], lnk['USER'], lnk['PASSWORD'],
        lnk['PORT'], lnk['DATABASE']
    )
    cursor = conn.cursor()

    if primitive == 'point':
        cols_tbl = ['{t}.{c}'.format(t=aShp, c=x) for x in cols_tbl]
        if priority == 'a':
            sel_geom = "{f}.{g}".format(f=aShp, g=aGeom)
        elif priority == 'b' or type(priority) == type([]):
            sel_geom = "{f}.{g}".format(f=bShp, g=bGeom)
        cursor.execute((
            "CREATE TABLE {out} AS SELECT {cols}, {int_geom} AS {ngeom} FROM {pnt} "
            "INNER JOIN {poly} ON ST_Within({pnt}.{geom_a}, "
            "{poly}.{geom_b});").format(
                out=output,
                cols=','.join(cols_tbl),
                pnt=aShp,
                geom_a=aGeom,
                geom_b=bGeom,
                poly=bShp,
                int_geom=sel_geom, ngeom=new_geom
        ))

    elif primitive == 'line':
        cols_tbl = ['{t}.{c}'.format(t=output, c=x) for x in cols_tbl]
        cols_tbl.append(new_geom)
        cursor.execute((
            "CREATE TABLE {out} AS SELECT {cols} FROM (SELECT {shp_a}.*, "
            "(ST_DUMP(ST_Intersection({shp_b}.geom, {shp_a}.{geom_fld}))).geom "
            "FROM {shp_b} INNER JOIN {shp_a} ON ST_Intersects({shp_b}.geom, "
            "{shp_a}.{geom_fld})) As {out} WHERE ST_Dimension({out}.geom) = "
            "1;").format(
                out=output,
                cols=','.join(cols_tbl),
                shp_a=aShp,
                shp_b=bShp,
                geom_fld=aGeom
        ))

    elif primitive == 'polygon':
        cols_tbl = ['{t}.{c}'.format(t=aShp, c=x) for x in cols_tbl]
        cursor.execute((
            'CREATE TABLE {out} AS SELECT {cols}, ST_Multi(ST_Buffer'
            '(ST_Intersection({shp_b}.geom, {shp_a}.{geom_fld}), 0.0)) As '
            '{ngeom} FROM {shp_b} INNER JOIN {shp_a} ON ST_Intersects({shp_b}.geom, '
            '{shp_a}.{geom_fld}) WHERE Not ST_IsEmpty(ST_Buffer('
            'ST_Intersection({shp_b}.geom, {shp_a}.{geom_fld}), 0.0));').format(
                out=output,
                cols=','.join(cols_tbl),
                shp_a=aShp,
                shp_b = bShp,
                geom_fld=aGeom, ngeom=new_geom
        ))

    cursor.execute(
        "ALTER TABLE {out} ADD COLUMN {fid_pk} BIGSERIAL PRIMARY KEY;".format(
            out=output, fid_pk=new_pk))

    conn.commit()
    cursor.close()
    conn.close()
    return output, new_pk, new_geom
示例#10
0
def get_unused_data_on_lulcp(
    pbf,
    pgsql_data,
    out_work,
    nomenclature,
    dicPostgre={
        'HOST': 'localhost',
        'USER': '******',
        'DATABASE': 'osm',
        'TEMPLATE': 'template_postgis',
        'PASSWORD': '******',
        'PORT': '5432'
    }):
    """
    Return data not used in osm2lulc procedure
    """

    import os
    from gasp.sql import run_sql_file
    from gasp.sql.mng.db import create_db
    from gasp.fm.sql import query_to_df
    from gasp.sql.mng.fld import cols_name
    from gasp.sql.k import create_pk
    from gasp.to.shp import psql_to_shp
    from gasp.to.psql import osm_to_pgsql
    # TODO: replace postgis
    from gasp.postgis.analysis import new_geom_table

    # ################ #
    # Global Variables #
    # ################ #
    # Name of the tables with osmdata after the import with osm2pgsql
    OsmData = {
        'polygon': 'planet_osm_polygon',
        'line': 'planet_osm_line',
        'point': 'planet_osm_point'
    }

    IrrelevantCols = [
        'addr:housename', 'addr:housenumber', 'addr:interpolation',
        'generator:source', 'tower:type'
    ]

    if nomenclature == 'URBAN_ATLAS':
        tbl_relation = 'rel_osm_ua'
        id_osm = 'osm_id'
    elif nomenclature == 'CORINE_LAND_COVER':
        tbl_relation = 'rel_osm_clc'
        id_osm = 'osm_id'
    elif nomenclature == 'GLOB_LAND_30':
        tbl_relation = 'rel_osm_global'
        id_osm = 'id_osm'

    # ################ #
    # Auxiliar Methods #
    # ################ #
    def get_where_string(dic, operator, table):
        l = []
        for fid in dic:
            if dic[fid][0] == '' or dic[fid][1] == '' or dic[fid][0] == 'sidewalk'\
               or dic[fid][0] == 'cycleway' or dic[fid][0] == 'busway'\
               or dic[fid][0] == 'enity' or dic[fid][0] == 'healthcare':
                continue
            l.append("{t}.{col}{o}'{val}'".format(col=dic[fid][0],
                                                  o=operator,
                                                  val=dic[fid][1],
                                                  t=table))
        return " OR ".join(l)

    create_db(dicPostgre, dicPostgre['DATABASE'])
    run_sql_file(dicPostgre, dicPostgre['DATABASE'], pgsql_data)
    run_osm2pgsql(pbf, dicPostgre['DATABASE'], dicPostgre['HOST'],
                  dicPostgre['USER'])
    # 1. Obtain data used on OSM2LULC procedure
    # 1.1 Get key and value for osm features
    id_related_with = [
        x[0]
        for x in query_to_df(dicPostgre,
                             "SELECT {fid} FROM {t}".format(t=tbl_relation,
                                                            fid=id_osm),
                             db_api='psql')[id_osm].tolist()
    ]
    key_value = {
        x[0]: [x[1], x[2]]
        for x in query_to_df(
            dicPostgre,
            "SELECT id, key, value FROM osm_features WHERE {s}".format(
                s=' OR '.join(
                    ['id={y}'.format(y=str(x)) for x in id_related_with])),
            db_api='psql').values.tolist()
    }
    # 1.2 Extract data with this combinations of keys and values
    for tbl in OsmData:
        # Create new primary key
        create_pk(dicPostgre, OsmData[tbl], 'pk_fid')
        cols = cols_name(dicPostgre, OsmData[tbl])
        cols_clean = []
        for i in cols:
            if i not in IrrelevantCols:
                if i == 'natural':
                    cols_clean.append("{t}.{col}".format(t=OsmData[tbl],
                                                         col=i))
                else:
                    cols_clean.append(i)
        whr = get_where_string(key_value, "=", OsmData[tbl])
        new_geom_table(dicPostgre,
                       cols_clean,
                       OsmData[tbl],
                       whr,
                       'used{t}'.format(t=OsmData[tbl]),
                       pk=False)
        export = psql_to_shp(
            dicPostgre,
            'used{t}'.format(t=OsmData[tbl]),
            os.path.join(out_work, 'used{t}.shp'.format(t=OsmData[tbl])),
            api="pgsql2shp",
            geom_col='way')
    # 2. Obtain data not used on OSM2LULC procedure
    for tbl in OsmData:
        new_geom_table(dicPostgre, ['*'],
                       OsmData[tbl],
                       "{t}.pk_fid NOT IN (SELECT pk_fid FROM used{t})".format(
                           t=OsmData[tbl]),
                       'unused{t}'.format(t=OsmData[tbl]),
                       pk=False)
        export = psql_to_shp(dicPostgre,
                             'unused{t}'.format(t=OsmData[tbl]),
                             os.path.join(
                                 out_work,
                                 'unused{t}.shp'.format(t=OsmData[tbl])),
                             api="pgsql2shp",
                             geom_col='way')
示例#11
0
def pgtables_to_layer_withStyle_by_col(pgtables,
                                       sldData,
                                       pgsql_con,
                                       workName=None,
                                       storeName=None,
                                       geoserver_con={
                                           'USER': '******',
                                           'PASSWORD': '******',
                                           'HOST': 'localhost',
                                           'PORT': '8888'
                                       },
                                       sldGeom='Polygon',
                                       DATATYPE='QUANTITATIVE',
                                       TABLE_DESIGNATION=None,
                                       COL_DESIGNATION=None,
                                       exclude_cols=None,
                                       pathToSLDfiles=None):
    """
    Create a new Geoserver Workspace, create a postgis store and one layer
    for each table in 'pgtables'. Each layer will have
    multiple styles - one style by column in it.
    
    Parameters:
    1) pgtables
    - List of PSQL tables to be transformed as Geoserver Layers
    
    2) sldData
    - sldData should be a xls table with the styles specifications.
    For QUANTITATIVE DATA: The table should have two sheets: one for
    colors and other for intervals:
    
    COLORS SHEET STRUCTURE (Sheet Index = 0):
    cls_id | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W
       1   | X | X | X |    X     |    X     |    X     |    1
       2   | X | X | X |    X     |    X     |    X     |    1
       3   | X | X | X |    X     |    X     |    X     |    1
       4   | X | X | X |    X     |    X     |    X     |    1
       5   | X | X | X |    X     |    X     |    X     |    1
    
    INTERVALS SHEET STRUCTURE (Sheet Index = 1):
          | 0 | 1 |  2 |  3 |  4 |  5
    col_0 | 0 | 5 | 10 | 15 | 20 | 25
    col_1 | 0 | 6 | 12 | 18 | 24 | 30
    ...
    col_n | 0 | 5 | 10 | 15 | 20 | 25
    
    For CATEGORICAL DATA: The table should have only one sheet:
    CATEGORICAL SHEET STRUCTURE
           | R | G | B | STROKE_R | STROKE_G | STROKE_B | STROKE_W
    attr_1 | X | X | X |    X     |    X     |    X     |    1
    attr_2 | X | X | X |    X     |    X     |    X     |    1
    ...
    attr_n | X | X | X |    X     |    X     |    X     |    1
    
    3) pgsql_con
    - Dict with parameters that will be used to connect to PostgreSQL
    d = {
        'HOST' : 'localhost', 'PORT' : '5432', 'USER' : 'postgres',
        'PASSWORD' : 'admin', 'DATABASE' : 'db_name'
    }
    
    4) workName
    - String with the name of the Geoserver workspace that will be created
    
    5) storeName
    - String with the name of the Geoserver store that will be created
    
    6) geoserver_con
    - Dict with parameters to connect to Geoserver
    
    7) sldGeom
    - Data Geometry. At the moment, the options 'Polygon' and 'Line' are
    valid.
    
    8) DATATYPE='QUANTITATIVE' | 'CATEGORICAL'
    
    9) TABLE_DESIGNATION
    - Table with the association between pgtables name and the designation
    to be used to name the Geoserver Layer.
    
    10) COL_DESIGNATION 
    - Table xls with association between each column and one
    style that will be used to present the information of that column.
    The style designation could not have blank characters.
    
    11) exclude_cols
    - One style will be created for each column in one pgtable. The columns
    in 'exclude_cols' will not have a style.
    
    12) pathToSLDfiles
    - Absolute path to the folder where the SLD (Style Layer Descriptor)
    will be stored.
    """

    import os
    from gasp import goToList
    from gasp.fm import tbl_to_obj
    from gasp.oss.ops import create_folder
    from gasp.sql.mng.fld import cols_name
    from gasp.web.geosrv.wspace import create_workspace
    from gasp.web.geosrv.stores import create_psqlstore
    from gasp.web.geosrv.lyrs import publish_postgis_layer
    from gasp.web.geosrv.styl import create_style
    from gasp.web.geosrv.styl import list_styles
    from gasp.web.geosrv.styl import del_style
    from gasp.web.geosrv.styl.assign import assign_style_to_layer
    from gasp.web.geosrv.styl.sld import write_sld

    # Sanitize PGtables
    pgtables = goToList(pgtables)

    if not pgtables:
        raise ValueError('pgtables value is not valid!')

    exclude_cols = goToList(exclude_cols)

    STY_DESIGNATION = tbl_to_obj(COL_DESIGNATION,
                                 useFirstColAsIndex=True,
                                 output='dict',
                                 colsAsArray=True) if COL_DESIGNATION else None

    LYR_DESIGNATION = tbl_to_obj(
        TABLE_DESIGNATION,
        useFirstColAsIndex=True,
        output='dict',
        colsAsArray=True) if TABLE_DESIGNATION else None

    # Get intervals and colors data
    if DATATYPE == 'QUANTITATIVE':
        if os.path.exists(sldData):
            FF = os.path.splitext(sldData)[1]
            if FF == '.xls' or FF == '.xlsx':
                colorsDict = tbl_to_obj(sldData,
                                        sheet=0,
                                        useFirstColAsIndex=True,
                                        output='dict')
                intervalsDict = tbl_to_obj(sldData,
                                           sheet=1,
                                           useFirstColAsIndex=True,
                                           output='dict')

            else:
                raise ValueError(
                    ('At the moment, for DATATYPE QUANTITATIVE, sldData '
                     'has to be a xls table'))

        else:
            raise ValueError(('{} is not a valid file').format(sldData))

    elif DATATYPE == 'CATEGORICAL':
        if os.path.exists(sldData):
            if os.path.splitext(sldData)[1] == 'xls':
                colorsDict = tbl_to_obj(sldData,
                                        sheet=0,
                                        useFirstColAsIndex=True,
                                        output='dict')

            else:
                raise ValueError(
                    ('At the moment, for DATATYPE CATEGORICAL, sldData '
                     'has to be a xls table'))
        else:
            raise ValueError(('{} is not a valid file').format(sldData))

    else:
        raise ValueError('{} is not avaiable at the moment'.format(DATATYPE))

    # Create Workspace
    workName = 'w_{}'.format(
        pgsql_con['DATABASE']) if not workName else workName

    create_workspace(workName, conf=geoserver_con, overwrite=True)

    # Create Store
    storeName = pgsql_con['DATABASE'] if not storeName else storeName
    create_psqlstore(storeName, workName, pgsql_con, gs_con=geoserver_con)

    # Create folder for sld's
    wTmp = create_folder(
        os.path.join(os.path.dirname(sldData),
                     'sldFiles')) if not pathToSLDfiles else pathToSLDfiles

    # List styles in geoserver
    STYLES = list_styles(conf=geoserver_con)

    # For each table in PGTABLES
    for PGTABLE in pgtables:
        # Publish Postgis table
        TITLE = None if not LYR_DESIGNATION else LYR_DESIGNATION[PGTABLE][0]
        publish_postgis_layer(workName,
                              storeName,
                              PGTABLE,
                              title=TITLE,
                              gs_con=geoserver_con)

        # List PGTABLE columns
        pgCols = cols_name(pgsql_con, PGTABLE)

        # For each column
        for col in pgCols:
            if exclude_cols and col in exclude_cols:
                continue

            STYLE_NAME = '{}_{}'.format(
                PGTABLE, STY_DESIGNATION[col]
                [0]) if STY_DESIGNATION else '{}_{}'.format(PGTABLE, col)

            if STYLE_NAME in STYLES:
                del_style(STYLE_NAME, geoserver_con)

            # Create Object with association between colors and intervals
            d = {}
            OPACITY = str(colorsDict[1]['OPACITY'])
            for i in colorsDict:
                d[i] = {
                    'R': colorsDict[i]['R'],
                    'G': colorsDict[i]['G'],
                    'B': colorsDict[i]['B']
                }

                if DATATYPE == 'QUANTITATIVE':
                    d[i]['min'] = intervalsDict[col][i - 1]
                    d[i]['max'] = intervalsDict[col][i]

                if 'STROKE_R' in colorsDict[i] and \
                   'STROKE_G' in colorsDict[i] and \
                   'STROKE_B' in colorsDict[i]:
                    d[i]['STROKE'] = {
                        'R': colorsDict[i]['STROKE_R'],
                        'G': colorsDict[i]['STROKE_G'],
                        'B': colorsDict[i]['STROKE_B']
                    }

                    if 'STROKE_W' in colorsDict[i]:
                        d[i]['STROKE']['WIDTH'] = colorsDict[i]['STROKE_W']

            # Create SLD
            sldFile = write_sld(col,
                                d,
                                os.path.join(wTmp, '{}.sld'.format(col)),
                                geometry=sldGeom,
                                DATA=DATATYPE,
                                opacity=OPACITY)

            # Create Style
            create_style(STYLE_NAME, sldFile, conf=geoserver_con)

            # Apply SLD
            assign_style_to_layer(STYLE_NAME, PGTABLE, geoserver_con)
示例#12
0
文件: geom.py 项目: zonakre/gasp
def check_endpoint_ispoint(conParam,
                           lnhTable,
                           pntTable,
                           outTable,
                           nodeStart,
                           nodeEnd,
                           pointId,
                           pntGeom="geom"):
    """
    Check if a Start/End point in a table with line geometries is a point 
    in other table.
    """

    from gasp.sql.mng.qw import ntbl_by_query
    from gasp.sql.mng.fld import cols_name

    tCols = [
        x for x in cols_name(conParam, lnhTable)
        if x != nodeStart and x != nodeEnd
    ]

    return ntbl_by_query(
        conParam,
        outTable,
        ("SELECT * FROM ("
         "SELECT {fooCols}, foo.{stPnt}, foo.{endPnt}, "
         "CASE "
         "WHEN start_tbl.start_x IS NOT NULL THEN 1 ELSE 0 "
         "END AS start_isstop, "
         "CASE "
         "WHEN end_tbl.end_x IS NOT NULL THEN 1 ELSE 0 "
         "END AS end_isstop, start_tbl.start_id, end_tbl.end_id "
         "FROM ("
         "SELECT *, "
         "CAST(((round(CAST(ST_X({stPnt}) AS numeric), 4)) * 10000) "
         "AS integer) AS start_x, "
         "CAST(((round(CAST(ST_Y({stPnt}) AS numeric), 4)) * 10000) "
         "AS integer) AS start_y, "
         "CAST(((round(CAST(ST_X({endPnt}) AS numeric), 4)) * 10000) "
         "AS integer) AS end_x, "
         "CAST(((round(CAST(ST_Y({endPnt}) AS numeric), 4)) * 10000) "
         "AS integer) AS end_y "
         "FROM {lnhT}"
         ") AS foo "
         "LEFT JOIN ("
         "SELECT CAST(((round(CAST(ST_X({pntG}) AS numeric), 4)) "
         "* 10000) AS integer) AS start_x, "
         "CAST(((round(CAST(ST_Y({pntG}) AS numeric), 4)) "
         "* 10000) AS integer) AS start_y, "
         "{pntid} AS start_id FROM {pntT}"
         ") AS start_tbl "
         "ON foo.start_x = start_tbl.start_x AND "
         "foo.start_y = start_tbl.start_y "
         "LEFT JOIN ("
         "SELECT CAST(((round(CAST(ST_X({pntG}) AS numeric), 4)) "
         "* 10000) AS integer) AS end_x, "
         "CAST(((round(CAST(ST_Y({pntG}) AS numeric), 4)) "
         "* 10000) as integer) AS end_y, "
         "{pntid} AS end_id FROM {pntT}"
         ") AS end_tbl "
         "ON foo.end_x = end_tbl.end_x AND foo.end_y = end_tbl.end_y"
         ") AS foo2 "
         "GROUP BY {cols}, {stPnt}, {endPnt}, start_isstop, end_isstop, "
         "start_id, end_id").format(fooCols=", ".join(
             ["foo.{}".format(c) for c in tCols]),
                                    stPnt=nodeStart,
                                    endPnt=nodeEnd,
                                    lnhT=lnhTable,
                                    pntT=pntTable,
                                    pntG=pntGeom,
                                    cols=", ".join(tCols),
                                    pntid=pointId),
        api='psql')