Example #1
0
File: prox.py Project: jasp382/gasp
def st_buffer(db,
              inTbl,
              bfDist,
              geomCol,
              outTbl,
              bufferField="geometry",
              whrClause=None,
              dissolve=None,
              cols_select=None,
              outTblIsFile=None):
    """
    Using Buffer on PostGIS Data
    """

    from gasp.pyt import obj_to_lst

    dissolve = obj_to_lst(dissolve) if dissolve != "ALL" else "ALL"

    SEL_COLS = "" if not cols_select else ", ".join(obj_to_lst(cols_select))
    DISS_COLS = "" if not dissolve or dissolve == "ALL" else ", ".join(
        dissolve)
    GRP_BY = "" if not dissolve else "{}, {}".format(SEL_COLS, DISS_COLS) if \
        SEL_COLS != "" and DISS_COLS != "" else SEL_COLS \
        if SEL_COLS != "" else DISS_COLS if DISS_COLS != "" else ""

    Q = (
        "SELECT{sel}{spFunc}{geom}, {_dist}{endFunc} AS {bf} "
        "FROM {t}{whr}{grpBy}"
    ).format(
        sel = " " if not cols_select else " {}, ".format(SEL_COLS),
        spFunc="ST_Buffer(" if not dissolve else \
            "ST_UnaryUnion(ST_Collect(ST_Buffer(",
        geom=geomCol, _dist=bfDist,
        endFunc=")" if not dissolve else ")))",
        t=inTbl,
        grpBy=" GROUP BY {}".format(GRP_BY) if GRP_BY != "" else "",
        whr="" if not whrClause else " WHERE {}".format(whrClause),
        bf=bufferField
    )

    if not outTblIsFile:
        from gasp.sql.to import q_to_ntbl

        outTbl = q_to_ntbl(db, outTbl, Q, api='psql')

    else:
        from gasp.gt.toshp.db import dbtbl_to_shp

        dbtbl_to_shp(db,
                     Q,
                     bufferField,
                     outTbl,
                     api='pgsql2shp',
                     tableIsQuery=True)

    return outTbl
Example #2
0
def st_dissolve(db, table, geomColumn, outTable, whrClause=None,
                diss_cols=None, outTblIsFile=None, api='sqlite'):
    """
    Dissolve a Polygon table
    """
    
    from gasp.pyt import obj_to_lst
    
    diss_cols = obj_to_lst(diss_cols) if diss_cols else None
    geomcol = "geometry" if api == 'sqlite' else 'geom'
    
    sql = (
        "SELECT{selCols} ST_UnaryUnion(ST_Collect({geom})) AS {gout} "
        "FROM {tbl}{whr}{grpBy}"
    ).format(
        selCols="" if not diss_cols else " {},".format(", ".join(diss_cols)),
        geom=geomColumn, tbl=table,
        whr="" if not whrClause else " WHERE {}".format(whrClause),
        grpBy="" if not diss_cols else " GROUP BY {}".format(
            ", ".join(diss_cols)
        ), gout=geomcol
    )
    
    if outTblIsFile:
        if api == 'sqlite':
            from gasp.gt.attr import sel_by_attr
            
            sel_by_attr(db, sql, outTable, api_gis='ogr')
        
        elif api == 'psql':
            from gasp.gt.toshp.db import dbtbl_to_shp
            
            dbtbl_to_shp(
                db, table, geomColumn, outTable, api='pgsql2shp',
                tableIsQuery=True
            )
    
    else:
        from gasp.sql.to import q_to_ntbl
        
        q_to_ntbl(
            db, outTable, sql, api='ogr2ogr' if api == 'sqlite' else 'psql'
        )
    
    return outTable
Example #3
0
File: brk.py Project: jasp382/gasp
def break_lines_on_points(lineShp, pntShp, outShp, lnhidonpnt,
                          api='shply', db=None):
    """
    Break lines on points location
    
    api's available:
    - shply (shapely);
    - psql (postgis);
    """
    
    if api == 'shply':
        result = shply_break_lines_on_points(
            lineShp, pntShp, lnhidonpnt, outShp)
    
    elif api == 'psql':
        from gasp.pyt.oss     import fprop
        from gasp.sql.db      import create_db
        from gasp.gql.to      import shp_to_psql
        from gasp.gt.toshp.db import dbtbl_to_shp
        from gasp.gql.brk     import split_lines_on_pnt
        
        # Create DB
        if not db:
            db = create_db(fprop(lineShp, 'fn', forceLower=True), api='psql')
        
        else:
            from gasp.sql.i import db_exists

            isDb = db_exists(db)
            
            if not isDb:
                db = create_db(db, api='psql')
        
        # Send Data to BD
        lnhTbl = shp_to_psql(db, lineShp, api="shp2pgsql")
        pntTbl = shp_to_psql(db,  pntShp, api="shp2pgsql")
        
        # Get result
        outTbl = split_lines_on_pnt(
            db, lnhTbl, pntTbl,
            fprop(outShp, 'fn', forceLower=True),
            lnhidonpnt, 'gid'
        )
        
        # Export result
        result = dbtbl_to_shp(
            db, outTbl, "geom", outShp, inDB='psql', tableIsQuery=None,
            api="pgsql2shp"
        )
    
    else:
        raise ValueError(
            "API {} is not available".format(api)
        )
    
    return result
Example #4
0
def line_intersect_to_pnt(inShp, outShp, db=None):
    """
    Get Points where two line features of the same feature class
    intersects.
    """

    from gasp.pyt.oss import fprop
    from gasp.gt.toshp.db import dbtbl_to_shp
    from gasp.sql.db import create_db
    from gasp.gql.to import shp_to_psql
    from gasp.gql.ovly import line_intersection_pnt

    # Create DB if necessary
    if not db:
        db = create_db(fprop(inShp, 'fn', forceLower=True), api='psql')

    else:
        from gasp.sql.i import db_exists

        isDb = db_exists(db)

        if not isDb:
            create_db(db, api='psql')

    # Send data to DB
    inTbl = shp_to_psql(db, inShp, api="shp2pgsql")

    # Get result
    outTbl = line_intersection_pnt(db, inTbl,
                                   fprop(outShp, 'fn', forceLower=True))

    # Export data from DB
    outShp = dbtbl_to_shp(db,
                          outTbl,
                          "geom",
                          outShp,
                          inDB='psql',
                          tableIsQuery=None,
                          api="pgsql2shp")

    return outShp
Example #5
0
def dsn_data_collection_by_multibuffer(inBuffers, workspace, db, datasource,
                                       keywords=None):
    """
    Extract Digital Social Network Data for each sub-buffer in buffer.
    A sub-buffer is a buffer with a radius equals to the main buffer radius /2
    and with a central point at North, South, East, West, Northeast, Northwest,
    Southwest and Southeast of the main buffer central point.
    
    inBuffers = {
        "lisbon"    : {
            'x'      : -89004.994779, # in meters
            'y'      : -102815.866054, # in meters
            'radius' : 10000,
            'epsg'   : 3763
        },
        "london     : {
            'x'      : -14210.551441, # in meters
            'y'      : 6711542.47559, # in meters
            'radius' : 10000,
            'epsg'   : 3857
        }
    }
    or
    inBuffers = {
        "lisbon" : {
            "path" : /path/to/file.shp,
            "epsg" : 3763
        }
    }
    
    keywords = ['flood', 'accident', 'fire apartment', 'graffiti', 'homeless']
    
    datasource = 'facebook' or datasource = 'flickr'
    TODO: Only works for Flickr and Facebook
    """
    
    import os; from osgeo import ogr
    from gasp.pyt         import obj_to_lst
    from gasp.sql.db      import create_db
    from gasp.sql.to      import q_to_ntbl
    from gasp.sql.to      import df_to_db
    from gasp.gql.to      import shp_to_psql
    from gasp.gt.toshp    import df_to_shp
    from gasp.gt.toshp.db import dbtbl_to_shp
    from gasp.gt.prox.bf  import get_sub_buffers, dic_buffer_array_to_shp
    
    if datasource == 'flickr':
        from gasp.sde.dsn.flickr import photos_location
    
    elif datasource == 'facebook':
        from gasp.sde.dsn.fb.places import places_by_query
    
    keywords = obj_to_lst(keywords)
    keywords = ["None"] if not keywords else keywords
    
    # Create Database to Store Data
    create_db(db, overwrite=True, api='psql')
    
    for city in inBuffers:
        # Get Smaller Buffers
        if "path" in inBuffers[city]:
            # Get X, Y and Radius
            from gasp.gt.prop.feat.bf import bf_prop
            
            __bfprop = bf_prop(
                inBuffers[city]["path"], inBuffers[city]["epsg"], isFile=True
            )
            
            inBuffers[city]["x"]      = __bfprop["X"]
            inBuffers[city]["y"]      = __bfprop["Y"]
            inBuffers[city]["radius"] = __bfprop["R"]
        
        inBuffers[city]["list_buffer"] = [{
            'X' : inBuffers[city]["x"], 'Y' : inBuffers[city]["y"],
            'RADIUS' : inBuffers[city]['radius'], 'cardeal' : 'major'
        }] + get_sub_buffers(
            inBuffers[city]["x"], inBuffers[city]["y"],
            inBuffers[city]["radius"]
        )
        
        # Smaller Buffers to File
        multiBuffer = os.path.join(workspace, 'buffers_{}.shp'.format(city))
        dic_buffer_array_to_shp(
            inBuffers[city]["list_buffer"], multiBuffer,
            inBuffers[city]['epsg'], fields={'cardeal' : ogr.OFTString}
        )
        
        # Retrive data for each keyword and buffer
        # Record these elements in one dataframe
        c       = None
        tblData = None
        for bf in inBuffers[city]["list_buffer"]:
            for k in keywords:
                if datasource == 'flickr':
                    tmpData = photos_location(
                        bf, inBuffers[city]["epsg"],
                        keyword=k if k != 'None' else None,
                        epsg_out=inBuffers[city]["epsg"],
                        onlySearchAreaContained=False
                    )
                
                elif datasource == 'facebook':
                    tmpData = places_by_query(
                        bf, inBuffers[city]["epsg"],
                        keyword=k if k != 'None' else None,
                        epsgOut=inBuffers[city]["epsg"],
                        onlySearchAreaContained=False
                    )
                
                if type(tmpData) == int:
                    print("NoData finded for buffer '{}' and keyword '{}'".format(
                        bf['cardeal'], k
                    ))
                    
                    continue
                
                tmpData["keyword"]   = k
                tmpData["buffer_or"] = bf["cardeal"]
                
                if not c:
                    tblData = tmpData
                    c = 1
                else:
                    tblData = tblData.append(tmpData, ignore_index=True)
        
        inBuffers[city]["data"] = tblData
        
        # Get data columns names
        cols = inBuffers[city]["data"].columns.values
        dataColumns = [
            c for c in cols if c != 'geom' and c != 'keyword' \
            and c != 'buffer_or' and c != 'geometry'
        ]
        
        # Send data to PostgreSQL
        if 'geometry' in cols:
            cgeom = 'geometry'
        
        else:
            cgeom = 'geom'
        
        inBuffers[city]["table"] = 'tbldata_{}'.format(city)
        
        df_to_db(
            db, inBuffers[city]["data"],
            inBuffers[city]["table"], api='psql',
            epsg=inBuffers[city]["epsg"], geomType='POINT', colGeom=cgeom
        )
        
        # Send Buffers data to PostgreSQL
        inBuffers[city]["pg_buffer"] = shp_to_psql(
            db, multiBuffer, pgTable='buffers_{}'.format(city),
            api="shp2pgsql", srsEpsgCode=inBuffers[city]["epsg"]
        )
        
        inBuffers[city]["filter_table"] = q_to_ntbl(
            db, "filter_{}".format(inBuffers[city]["table"]), (
                "SELECT srcdata.*, "
                "array_agg(buffersg.cardeal ORDER BY buffersg.cardeal) "
                "AS intersect_buffer FROM ("
                    "SELECT {cols}, keyword, geom, "
                    "array_agg(buffer_or ORDER BY buffer_or) AS extracted_buffer "
                    "FROM {pgtable} "
                    "GROUP BY {cols}, keyword, geom"
                ") AS srcdata, ("
                    "SELECT cardeal, geom AS bfg FROM {bftable}"
                ") AS buffersg "
                "WHERE ST_Intersects(srcdata.geom, buffersg.bfg) IS TRUE "
                "GROUP BY {cols}, keyword, geom, extracted_buffer"
            ).format(
                cols    = ", ".join(dataColumns),
                pgtable = inBuffers[city]["table"],
                bftable = inBuffers[city]["pg_buffer"]
            ), api='psql'
        )
        
        inBuffers[city]["outside_table"] = q_to_ntbl(
            db, "outside_{}".format(inBuffers[city]["table"]), (
                "SELECT * FROM ("
                "SELECT srcdata.*, "
                "array_agg(buffersg.cardeal ORDER BY buffersg.cardeal) "
                "AS not_intersect_buffer FROM ("
                    "SELECT {cols}, keyword, geom, "
                    "array_agg(buffer_or ORDER BY buffer_or) AS extracted_buffer "
                    "FROM {pgtable} "
                    "GROUP BY {cols}, keyword, geom"
                ") AS srcdata, ("
                    "SELECT cardeal, geom AS bfg FROM {bftable}"
                ") AS buffersg "
                "WHERE ST_Intersects(srcdata.geom, buffersg.bfg) IS NOT TRUE "
                "GROUP BY {cols}, keyword, geom, extracted_buffer"
                ") AS foo WHERE array_length(not_intersect_buffer, 1) = 9"
            ).format(
                cols    = ", ".join(dataColumns),
                pgtable = inBuffers[city]["table"],
                bftable = inBuffers[city]["pg_buffer"]
            ), api='psql'
        )
        
        # Union these two tables
        inBuffers[city]["table"] = q_to_ntbl(db, "data_{}".format(city), (
            "SELECT * FROM {intbl} UNION ALL "
            "SELECT {cols}, keyword, geom, extracted_buffer, "
            "CASE WHEN array_length(not_intersect_buffer, 1) = 9 "
            "THEN '{array_symbol}' ELSE not_intersect_buffer END AS "
            "intersect_buffer FROM {outbl}"
        ).format(
            intbl        = inBuffers[city]["filter_table"],
            outbl        = inBuffers[city]["outside_table"],
            cols         = ", ".join(dataColumns),
            array_symbol = '{' + '}'
        ), api='psql')
        
        """
        Get Buffers table with info related:
        -> pnt_obtidos = nr pontos obtidos usando esse buffer
        -> pnt_obtidos_fora = nt pontos obtidos fora desse buffer, mas 
        obtidos com ele
        -> pnt_intersect = nt pontos que se intersectam com o buffer
        -> pnt_intersect_non_obtain = nr pontos que se intersectam mas nao 
        foram obtidos como buffer
        """
        inBuffers[city]["pg_buffer"] = q_to_ntbl(
            db, "dt_{}".format(inBuffers[city]["pg_buffer"]), (
                "SELECT main.*, get_obtidos.pnt_obtidos, "
                "obtidos_fora.pnt_obtidos_fora, intersecting.pnt_intersect, "
                "int_not_obtained.pnt_intersect_non_obtain "
                "FROM {bf_table} AS main "
                "LEFT JOIN ("
                    "SELECT gid, cardeal, COUNT(gid) AS pnt_obtidos "
                    "FROM {bf_table} AS bf "
                    "INNER JOIN {dt_table} AS dt "
                    "ON bf.cardeal = ANY(dt.extracted_buffer) "
                    "GROUP BY gid, cardeal"
                ") AS get_obtidos ON main.gid = get_obtidos.gid "
                "LEFT JOIN ("
                    "SELECT gid, cardeal, COUNT(gid) AS pnt_obtidos_fora "
                    "FROM {bf_table} AS bf "
                    "INNER JOIN {dt_table} AS dt "
                    "ON bf.cardeal = ANY(dt.extracted_buffer) "
                    "WHERE ST_Intersects(bf.geom, dt.geom) IS NOT TRUE "
                    "GROUP BY gid, cardeal"
                ") AS obtidos_fora ON main.gid = obtidos_fora.gid "
                "LEFT JOIN ("
                    "SELECT gid, cardeal, COUNT(gid) AS pnt_intersect "
                    "FROM {bf_table} AS bf "
                    "INNER JOIN {dt_table} AS dt "
                    "ON bf.cardeal = ANY(dt.intersect_buffer) "
                    "GROUP BY gid, cardeal"
                ") AS intersecting ON main.gid = intersecting.gid "
                "LEFT JOIN ("
                    "SELECT gid, cardeal, COUNT(gid) AS pnt_intersect_non_obtain "
                    "FROM {bf_table} AS bf "
                    "INNER JOIN {dt_table} AS dt "
                    "ON bf.cardeal = ANY(dt.intersect_buffer) "
                    "WHERE NOT (bf.cardeal = ANY(dt.extracted_buffer)) "
                    "GROUP BY gid, cardeal"
                ") AS int_not_obtained "
                "ON main.gid = int_not_obtained.gid "
                "ORDER BY main.gid"
            ).format(
                bf_table = inBuffers[city]["pg_buffer"],
                dt_table = inBuffers[city]["table"]
            ), api='psql'
        )
        
        """
        Get Points table with info related:
        -> nobtido = n vezes um ponto foi obtido
        -> obtido_e_intersect = n vezes um ponto foi obtido usando um buffer 
        com o qual se intersecta
        -> obtido_sem_intersect = n vezes um ponto foi obtido usando um buffer
        com o qual nao se intersecta
        -> nintersect = n vezes que um ponto se intersecta com um buffer
        -> intersect_sem_obtido = n vezes que um ponto nao foi obtido apesar
        de se intersectar com o buffer
        """
        inBuffers[city]["table"] = q_to_ntbl(
            db, "info_{}".format(city), (
                "SELECT {cols}, dt.keyword, dt.geom, "
                "CAST(dt.extracted_buffer AS text) AS extracted_buffer, "
                "CAST(dt.intersect_buffer AS text) AS intersect_buffer, "
                "array_length(extracted_buffer, 1) AS nobtido, "
                "SUM(CASE WHEN ST_Intersects(bf.geom, dt.geom) IS TRUE "
                    "THEN 1 ELSE 0 END) AS obtido_e_intersect, "
                "(array_length(extracted_buffer, 1) - SUM("
                    "CASE WHEN ST_Intersects(bf.geom, dt.geom) IS TRUE "
                    "THEN 1 ELSE 0 END)) AS obtido_sem_intersect, "
                "array_length(intersect_buffer, 1) AS nintersect, "
                "(array_length(intersect_buffer, 1) - SUM("
                    "CASE WHEN ST_Intersects(bf.geom, dt.geom) IS TRUE "
                    "THEN 1 ELSE 0 END)) AS intersect_sem_obtido "
                "FROM {dt_table} AS dt "
                "INNER JOIN {bf_table} AS bf "
                "ON bf.cardeal = ANY(dt.extracted_buffer) "
                "GROUP BY {cols}, dt.keyword, dt.geom, "
                "dt.extracted_buffer, dt.intersect_buffer"
            ).format(
                dt_table = inBuffers[city]["table"],
                bf_table = inBuffers[city]["pg_buffer"],
                cols     = ", ".join(["dt.{}".format(x) for x in dataColumns])
            ), api='psql'
        )
        
        # Export Results
        dbtbl_to_shp(
            db, inBuffers[city]["table"], 'geom',
            os.path.join(workspace, "{}.shp".format(inBuffers[city]["table"])),
            api='psql', epsg=inBuffers[city]["epsg"]
        )
        
        dbtbl_to_shp(
            db, inBuffers[city]["pg_buffer"], 'geom',
            os.path.join(workspace, "{}.shp".format(inBuffers[city]["pg_buffer"])),
            api='psql', epsg=inBuffers[city]["epsg"]
        )
    
    return inBuffers
Example #6
0
def dsnsearch_by_cell(GRID_PNT, EPSG, RADIUS, DATA_SOURCE, db, OUTPUT_TABLE):
    """
    Search for data in DSN and other platforms by cell
    """
    
    import time;
    from gasp.gt.fmshp          import shp_to_obj
    from gasp.sql.db            import create_db
    from gasp.sde.dsn.fb.places import places_by_query
    from gasp.g.prj             import df_prj
    from gasp.pyt.df.to         import merge_df
    from gasp.gt.toshp.db       import dbtbl_to_shp
    from gasp.sql.to            import q_to_ntbl
    from gasp.sql.to            import df_to_db
    
    # Open GRID SHP
    GRID_DF = shp_to_obj(GRID_PNT)
    GRID_DF = df_prj(GRID_DF, 4326) if EPSG != 4326 else GRID_DF
    
    GRID_DF["lng"]     = GRID_DF.geometry.x.astype(float)
    GRID_DF["lat"]     = GRID_DF.geometry.y.astype(float)
    GRID_DF["grid_id"] = GRID_DF.index
    
    # GET DATA
    RESULTS = []
    def get_data(row, datasrc):
        if datasrc == 'facebook':
            d = places_by_query(
                {'x' : row.lng, 'y' : row.lat, 'r' : RADIUS}, 4326,
                keyword=None, epsgOut=EPSG, _limit='100',
                onlySearchAreaContained=None
            )
        
        else:
            raise ValueError('{} as datasource is not a valid value'.format(datasrc))
        
        if type(d) == int:
            return
        
        d['grid_id'] = row.grid_id
        
        RESULTS.append(d)
        
        time.sleep(5)
    
    GRID_DF.apply(lambda x: get_data(x, DATA_SOURCE), axis=1)
    
    RT = merge_df(RESULTS)
    
    # Create DB
    create_db(db, overwrite=True, api='psql')
    
    # Send Data to PostgreSQL
    df_to_db(
        db, RT, "{}_data".format(DATA_SOURCE),
        EPSG, "POINT",
        colGeom='geometry' if 'geometry' in RT.columns.values else 'geom'
    )
    
    COLS = [
        x for x in RT.columns.values if x != "geometry" and \
        x != 'geom' and x != "grid_id"
    ] + ["geom"]
    
    GRP_BY_TBL = q_to_ntbl(db, "{}_grpby".format(DATA_SOURCE), (
        "SELECT {cols}, CAST(array_agg(grid_id) AS text) AS grid_id "
        "FROM {dtsrc}_data GROUP BY {cols}"
    ).format(cols=", ".join(COLS), dtsrc=DATA_SOURCE), api='psql')
    
    dbtbl_to_shp(
        db, GRP_BY_TBL, "geom", OUTPUT_TABLE,
        api="psql", epsg=EPSG
    )
    
    return OUTPUT_TABLE
Example #7
0
def shps_to_shp(shps, outShp, api="ogr2ogr", fformat='.shp', dbname=None):
    """
    Get all features in several Shapefiles and save them in one file

    api options:
    * ogr2ogr;
    * psql;
    * pandas;
    * psql;
    """

    import os

    if type(shps) != list:
        # Check if is dir
        if os.path.isdir(shps):
            from gasp.pyt.oss import lst_ff
            # List shps in dir
            shps = lst_ff(shps, file_format=fformat)

        else:
            raise ValueError((
                'shps should be a list with paths for Feature Classes or a path to '
                'folder with Feature Classes'))

    if api == "ogr2ogr":
        from gasp import exec_cmd
        from gasp.gt.prop.ff import drv_name

        out_drv = drv_name(outShp)

        # Create output and copy some features of one layer (first in shps)
        cmdout = exec_cmd('ogr2ogr -f "{}" {} {}'.format(
            out_drv, outShp, shps[0]))

        # Append remaining layers
        lcmd = [
            exec_cmd('ogr2ogr -f "{}" -update -append {} {}'.format(
                out_drv, outShp, shps[i])) for i in range(1, len(shps))
        ]

    elif api == 'pandas':
        """
        Merge SHP using pandas
        """

        from gasp.gt.fmshp import shp_to_obj
        from gasp.gt.toshp import df_to_shp

        if type(shps) != list:
            raise ValueError(
                'shps should be a list with paths for Feature Classes')

        dfs = [shp_to_obj(shp) for shp in shps]

        result = dfs[0]

        for df in dfs[1:]:
            result = result.append(df, ignore_index=True, sort=True)

        df_to_shp(result, outShp)

    elif api == 'psql':
        import os
        from gasp.sql.tbl import tbls_to_tbl, del_tables
        from gasp.gql.to import shp_to_psql

        if not dbname:
            from gasp.sql.db import create_db

            create_db(dbname, api='psql')

        pg_tbls = shp_to_psql(dbname, shps, api="shp2pgsql")

        if os.path.isfile(outShp):
            from gasp.pyt.oss import fprop
            outbl = fprop(outShp, 'fn')

        else:
            outbl = outShp

        tbls_to_tbl(dbname, pg_tbls, outbl)

        if outbl != outShp:
            from gasp.gt.toshp.db import dbtbl_to_shp

            dbtbl_to_shp(dbname,
                         outbl,
                         'geom',
                         outShp,
                         inDB='psql',
                         api="pgsql2shp")

        del_tables(dbname, pg_tbls)

    elif api == 'grass':
        from gasp import exec_cmd

        rcmd = exec_cmd(
            ("v.patch input={} output={} --overwrite --quiet").format(
                ",".join(shps), outShp))

    else:
        raise ValueError("{} API is not available")

    return outShp
Example #8
0
File: prj.py Project: jasp382/gasp
def proj(inShp, outShp, outEPSG, inEPSG=None,
        gisApi='ogr', sql=None, db_name=None):
    """
    Project Geodata using GIS
    
    API's Available:
    * ogr;
    * ogr2ogr;
    * pandas;
    * ogr2ogr_SQLITE;
    * psql;
    """
    import os
    
    if gisApi == 'ogr':
        """
        Using ogr Python API
        """
        
        if not inEPSG:
            raise ValueError(
                'To use ogr API, you should specify the EPSG Code of the'
                ' input data using inEPSG parameter'
            )
        
        from osgeo             import ogr
        from gasp.g.lyr.fld    import copy_flds
        from gasp.gt.prop.feat import get_gtype
        from gasp.gt.prop.ff   import drv_name
        from gasp.gt.prop.prj  import get_sref_from_epsg, get_trans_param
        from gasp.pyt.oss      import fprop
        
        def copyShp(out, outDefn, lyr_in, trans):
            for f in lyr_in:
                g = f.GetGeometryRef()
                g.Transform(trans)
                new = ogr.Feature(outDefn)
                new.SetGeometry(g)
                for i in range(0, outDefn.GetFieldCount()):
                    new.SetField(outDefn.GetFieldDefn(i).GetNameRef(), f.GetField(i))
                out.CreateFeature(new)
                new.Destroy()
                f.Destroy()
        
        # ####### #
        # Project #
        # ####### #
        transP = get_trans_param(inEPSG, outEPSG)
        
        inData = ogr.GetDriverByName(
            drv_name(inShp)).Open(inShp, 0)
        
        inLyr = inData.GetLayer()
        out = ogr.GetDriverByName(
            drv_name(outShp)).CreateDataSource(outShp)
        
        outlyr = out.CreateLayer(
            fprop(outShp, 'fn'), get_sref_from_epsg(outEPSG),
            geom_type=get_gtype(
                inShp, name=None, py_cls=True, gisApi='ogr'
            )
        )
        
        # Copy fields to the output
        copy_flds(inLyr, outlyr)
        # Copy/transform features from the input to the output
        outlyrDefn = outlyr.GetLayerDefn()
        copyShp(outlyr, outlyrDefn, inLyr, transP)
        
        inData.Destroy()
        out.Destroy()
    
    elif gisApi == 'ogr2ogr':
        """
        Transform SRS of any OGR Compilant Data. Save the transformed data
        in a new file
        """

        if not inEPSG:
            from gasp.gt.prop.prj import get_epsg_shp
            inEPSG = get_epsg_shp(inShp)
        
        if not inEPSG:
            raise ValueError('To use ogr2ogr, you must specify inEPSG')
        
        from gasp            import exec_cmd
        from gasp.gt.prop.ff import drv_name
        
        cmd = (
            'ogr2ogr -f "{}" {} {}{} -s_srs EPSG:{} -t_srs EPSG:{}'
        ).format(
            drv_name(outShp), outShp, inShp,
            '' if not sql else ' -dialect sqlite -sql "{}"'.format(sql),
            str(inEPSG), str(outEPSG)
        )
        
        outcmd = exec_cmd(cmd)
    
    elif gisApi == 'ogr2ogr_SQLITE':
        """
        Transform SRS of a SQLITE DB table. Save the transformed data in a
        new table
        """
        
        from gasp import exec_cmd
        
        if not inEPSG:
            raise ValueError((
                'With ogr2ogr_SQLITE, the definition of inEPSG is '
                'demandatory.'
            ))
        
        # TODO: Verify if database is sqlite
        
        db, tbl = inShp['DB'], inShp['TABLE']
        sql = 'SELECT * FROM {}'.format(tbl) if not sql else sql
        
        outcmd = exec_cmd((
            'ogr2ogr -update -append -f "SQLite" {db} -nln "{nt}" '
            '-dialect sqlite -sql "{_sql}" -s_srs EPSG:{inepsg} '
            '-t_srs EPSG:{outepsg} {db}'
        ).format(
            db=db, nt=outShp, _sql=sql, inepsg=str(inEPSG),
            outepsg=str(outEPSG)
        ))
    
    elif gisApi == 'pandas':
        # Test if input Shp is GeoDataframe
        from gasp.gt.fmshp import shp_to_obj
        from gasp.gt.toshp import df_to_shp

        df = shp_to_obj(inShp)
        
        # Project df
        newDf = df.to_crs({'init' : 'epsg:{}'.format(str(outEPSG))})
        
        # Save as file 
            
        return df_to_shp(df, outShp)
    
    elif gisApi == 'psql':
        from gasp.sql.db      import create_db
        from gasp.pyt.oss     import fprop
        from gasp.gql.to      import shp_to_psql
        from gasp.gt.toshp.db import dbtbl_to_shp
        from gasp.gql.prj     import sql_proj

        # Create Database
        if not db_name:
            db_name = create_db(fprop(
                outShp, 'fn', forceLower=True), api='psql'
            )
        
        else:
            from gasp.sql.i import db_exists

            isDb = db_exists(db_name)

            if not isDb:
                create_db(db_name, api='psql')

        # Import Data
        inTbl = shp_to_psql(db_name, inShp, api='shp2pgsql', encoding="LATIN1")

        # Transform
        oTbl = sql_proj(
            db_name, inTbl, fprop(outShp, 'fn', forceLower=True),
            outEPSG, geomCol='geom', newGeom='geom'
        )

        # Export
        outShp = dbtbl_to_shp(
            db_name, oTbl, 'geom', outShp, api='psql', epsg=outEPSG
        )
    
    else:
        raise ValueError('Sorry, API {} is not available'.format(gisApi))
    
    return outShp
Example #9
0
def lnh_to_polygons(inShp, outShp, api='saga', db=None):
    """
    Line to Polygons
    
    API's Available:
    * saga;
    * grass;
    * pygrass;
    * psql;
    """

    if api == 'saga':
        """
        http://www.saga-gis.org/saga_tool_doc/7.0.0/shapes_polygons_3.html
        
        Converts lines to polygons. Line arcs are closed to polygons simply
        by connecting the last point with the first. Optionally parts of
        polylines can be merged into one polygon optionally. 
        """

        from gasp import exec_cmd

        rcmd = exec_cmd(("saga_cmd shapes_polygons 3 -POLYGONS {} "
                         "LINES {} -SINGLE 1 -MERGE 1").format(outShp, inShp))

    elif api == 'grass' or api == 'pygrass':
        # Do it using GRASS GIS

        import os
        from gasp.gt.wenv.grs import run_grass
        from gasp.pyt.oss import fprop

        # Create GRASS GIS Session
        wk = os.path.dirname(outShp)
        lo = fprop(outShp, 'fn', forceLower=True)

        gs = run_grass(wk, lo, srs=inShp)

        import grass.script as grass
        import grass.script.setup as gsetup
        gsetup.init(gs, wk, lo, 'PERMANENT')

        # Import Packages
        from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
        from gasp.gt.toshp.cgeo import line_to_polyline
        from gasp.gt.toshp.cgeo import geomtype_to_geomtype
        from gasp.gt.toshp.cgeo import boundary_to_areas

        # Send data to GRASS GIS
        lnh_shp = shp_to_grs(inShp,
                             fprop(inShp, 'fn', forceLower=True),
                             asCMD=True if api == 'grass' else None)

        # Build Polylines
        pol_lnh = line_to_polyline(lnh_shp,
                                   "polylines",
                                   asCmd=True if api == 'grass' else None)

        # Polyline to boundary
        bound = geomtype_to_geomtype(pol_lnh,
                                     'bound_shp',
                                     'line',
                                     'boundary',
                                     cmd=True if api == 'grass' else None)

        # Boundary to Area
        areas_shp = boundary_to_areas(bound,
                                      lo,
                                      useCMD=True if api == 'grass' else None)

        # Export data
        outShp = grs_to_shp(areas_shp,
                            outShp,
                            'area',
                            asCMD=True if api == 'grass' else None)

    elif api == 'psql':
        """ Do it using PostGIS """
        from gasp.pyt.oss import fprop
        from gasp.sql.db import create_db
        from gasp.gql.to import shp_to_psql
        from gasp.gt.toshp.db import dbtbl_to_shp
        from gasp.gql.cnv import lnh_to_polg
        from gasp.gt.prop.prj import get_epsg_shp

        # Create DB
        if not db:
            db = create_db(fprop(inShp, 'fn', forceLower=True), api='psql')

        else:
            from gasp.sql.i import db_exists
            isDB = db_exists(db)

            if not isDB:
                create_db(db, api='psql')

        # Send data to DB
        in_tbl = shp_to_psql(db, inShp, api="shp2pgsql")

        # Get Result
        result = lnh_to_polg(db, in_tbl, fprop(outShp, 'fn', forceLower=True))

        # Export Result
        outshp = dbtbl_to_shp(db,
                              result,
                              "geom",
                              outShp,
                              api='psql',
                              epsg=get_epsg_shp(inShp))

    else:
        raise ValueError("API {} is not available".format(api))

    return outShp
Example #10
0
File: mod2.py Project: jasp382/gasp
def grs_vec_roads(osmdb, lineTbl, polyTbl):
    """
    Select Roads for GRASS GIS
    """
    
    import datetime
    from gasp.sql.i        import row_num
    from gasp.gt.toshp.db  import dbtbl_to_shp
    from gasp.gt.prox.bf   import _buffer
    from gasp.gt.gop.genze import dissolve
    from gasp.gt.tbl.grs   import add_table
    
    # Roads to GRASS GIS
    time_a = datetime.datetime.now().replace(microsecond=0)
    NR = row_num(osmdb, lineTbl, where="roads IS NOT NULL", api='sqlite')
    time_b = datetime.datetime.now().replace(microsecond=0)
    
    if not NR: return None, {0 : ('count_rows_roads', time_b - time_a)}
    
    roadsVect = dbtbl_to_shp(
        osmdb, lineTbl, "geometry", "all_roads", where="roads IS NOT NULL",
        inDB='sqlite', outShpIsGRASS=True
    )
    time_c = datetime.datetime.now().replace(microsecond=0)
    
    # Buildings to GRASS GIS
    NB = row_num(osmdb, polyTbl, where="building IS NOT NULL", api='sqlite')
    time_d = datetime.datetime.now().replace(microsecond=0)
    
    if NB:
        from gasp.gt.prox    import grs_near as near
        from gasp.gt.tbl.grs import update_table
        
        builds = dbtbl_to_shp(
            osmdb, polyTbl, "geometry", "all_builds", where="building IS NOT NULL",
            filterByReg=True, inDB='sqlite', outShpIsGRASS=True
        )
        time_e = datetime.datetime.now().replace(microsecond=0)
        
        near(roadsVect, builds, nearDistCol="todist", maxDist=12, as_cmd=True)
        time_f = datetime.datetime.now().replace(microsecond=0)
        update_table(
            roadsVect, "bf_roads", "round(todist,0)",
            "\"todist > 0\"",
            lyrN=1, ascmd=True
        )
        time_g = datetime.datetime.now().replace(microsecond=0)
    
    else:
        time_e = None; time_f = None; time_g = None
    
    # Run Buffer tool
    roadsBf = _buffer(
        roadsVect, "bf_roads", "bf_roads",
        api='grass', geom_type="line"
    )
    time_h = datetime.datetime.now().replace(microsecond=0)
    
    # Dissolve Roads
    roadsDiss = dissolve(roadsBf, "diss_roads", "roads", api="grass")
    
    add_table(roadsDiss, None, lyrN=1, asCMD=True)
    time_i = datetime.datetime.now().replace(microsecond=0)
    
    return roadsDiss, {
        0 : ('count_rows_roads', time_b - time_a),
        1 : ('import_roads', time_c - time_b),
        2 : ('count_rows_build', time_d - time_c),
        3 : None if not time_e else ('import_builds', time_e - time_d),
        4 : None if not time_f else ('near_analysis', time_f - time_e),
        5 : None if not time_g else ('update_buffer_tbl', time_g - time_f),
        6 : ('buffer_roads', time_h - time_g if time_g else time_h - time_d),
        7 : ('diss_roads', time_i - time_h)
    }
Example #11
0
File: mod2.py Project: jasp382/gasp
def grs_rst_roads(osmdb, lineTbl, polyTbl, dataFolder, LULC_CLS):
    """
    Raster Roads for GRASS
    """
    
    import os;             import datetime
    from gasp.gt.toshp.cff import shp_to_grs
    from gasp.gt.toshp.db  import dbtbl_to_shp
    from gasp.gt.torst     import shp_to_rst
    from gasp.gql.prox     import splite_buffer
    from gasp.sql.i        import row_num
    
    time_a = datetime.datetime.now().replace(microsecond=0)
    NR = row_num(osmdb, lineTbl, where="roads IS NOT NULL", api='sqlite')
    time_b = datetime.datetime.now().replace(microsecond=0)
    
    if not NR: return None, {0 : ('count_rows_roads', time_b - time_a)}
    
    roadFile = splite_buffer(
        osmdb, lineTbl, "bf_roads", "geometry", 'bfu_roads',
        #os.path.join(dataFolder, 'bf_roads.gml'),
        whrClause="roads IS NOT NULL",
        outTblIsFile=None, dissolve="ALL"
    )
    time_c = datetime.datetime.now().replace(microsecond=0)
    
    #roadGrs = shp_to_grs(roadFile, "bf_roads", filterByReg=True, asCMD=True)
    roadGrs = dbtbl_to_shp(
        osmdb, roadFile, "geom", 'bf_roads',
        notTable=True, outShpIsGRASS=True, inDB='sqlite'
    )
    time_d = datetime.datetime.now().replace(microsecond=0)
    roadRst = shp_to_rst(
        roadGrs, int(LULC_CLS), None, None, "rst_roads", api="grass"
    )
    time_e = datetime.datetime.now().replace(microsecond=0)
    
    # Builds to GRASS and to RASTER
    NB = row_num(osmdb, polyTbl, where="building IS NOT NULL", api='sqlite')
    time_f = datetime.datetime.now().replace(microsecond=0)
    
    if NB:
        from gasp.gt.nop.alg  import rstcalc
        from gasp.gt.nop.rcls import set_null, null_to_value
        
        buildsShp = dbtbl_to_shp(
            osmdb, polyTbl, "geom", "all_builds",
            where="building IS NOT NULL",
            notTable=True, outShpIsGRASS=True, inDB='sqlite'
        )
        time_g = datetime.datetime.now().replace(microsecond=0)
        
        buildsRst = shp_to_rst(
            buildsShp, 1, None, None, "rst_builds", api="grass"
        )
        time_h = datetime.datetime.now().replace(microsecond=0)
        
        # Buildings to nodata | Nodata to 0
        null_to_value(buildsRst, 0, as_cmd=True)
        time_i = datetime.datetime.now().replace(microsecond=0)
        set_null(buildsRst, 1, ascmd=True)
        time_j = datetime.datetime.now().replace(microsecond=0)
        
        # Do the math: roads + builds | if builds and roads at the same cell
        # cell will be null in the road layer
        roadsRes = rstcalc(
            "{} + {}".format(roadRst, buildsRst), "cls_roads", api="grass")
        time_l = datetime.datetime.now().replace(microsecond=0)
        
        return {LULC_CLS : roadsRes}, {
            0 : ('count_rows_roads', time_b - time_a),
            1 : ('buffer_roads', time_c - time_b),
            2 : ('import_roads', time_d - time_c),
            3 : ('roads_to_rst', time_e - time_d),
            4 : ('count_build', time_f - time_e),
            5 : ('builds_to_grs', time_g - time_f),
            6 : ('builds_to_rst', time_h - time_g),
            7 : ('bnull_to_val', time_i - time_h),
            8 : ('builds_to_nd', time_j - time_i),
            9 : ('roads_build_mc', time_l - time_j)
        }
    
    else:
        return {LULC_CLS : roadRst}, {
            0 : ('count_rows_roads', time_b - time_a),
            1 : ('buffer_roads', time_c - time_b),
            2 : ('import_roads', time_d - time_c),
            3 : ('roads_to_rst', time_e - time_d),
            4 : ('count_build', time_f - time_e)
        }
Example #12
0
File: brk.py Project: jasp382/gasp
def v_break_at_points(workspace, loc, lineShp, pntShp, db, srs, out_correct,
            out_tocorrect):
    """
    Break lines at points - Based on GRASS GIS v.edit
    
    Use PostGIS to sanitize the result
    
    TODO: Confirm utility
    Problem: GRASS GIS always uses the first line to break.
    """
    
    import os
    from gasp.gql.to       import shp_to_psql
    from gasp.gt.toshp.db  import dbtbl_to_shp
    from gasp.gt.wenv.grs  import run_grass
    from gasp.pyt.oss      import fprop
    from gasp.sql.db       import create_db
    from gasp.sql.to       import q_to_ntbl
    
    tmpFiles = os.path.join(workspace, loc)
    
    gbase = run_grass(workspace, location=loc, srs=srs)
    
    import grass.script       as grass
    import grass.script.setup as gsetup
    
    gsetup.init(gbase, workspace, loc, 'PERMANENT')
    
    from gasp.gt.toshp.cff import shp_to_grs, grs_to_shp
    
    grsLine = shp_to_grs(
        lineShp, fprop(lineShp, 'fn', forceLower=True)
    )
    
    vedit_break(grsLine, pntShp, geomType='line')
    
    LINES = grs_to_shp(grsLine, os.path.join(
        tmpFiles, grsLine + '_v1.shp'), 'line')
    
    # Sanitize output of v.edit.break using PostGIS
    create_db(db, overwrite=True, api='psql')
    
    LINES_TABLE = shp_to_psql(
        db, LINES, srsEpsgCode=srs,
        pgTable=fprop(LINES, 'fn', forceLower=True), api="shp2pgsql"
    )
    
    # Delete old/original lines and stay only with the breaked one
    Q = (
        "SELECT {t}.*, foo.cat_count FROM {t} INNER JOIN ("
            "SELECT cat, COUNT(cat) AS cat_count, "
            "MAX(ST_Length(geom)) AS max_len "
            "FROM {t} GROUP BY cat"
        ") AS foo ON {t}.cat = foo.cat "
        "WHERE foo.cat_count = 1 OR foo.cat_count = 2 OR ("
            "foo.cat_count = 3 AND ST_Length({t}.geom) <= foo.max_len)"
    ).format(t=LINES_TABLE)
    
    CORR_LINES = q_to_ntbl(
        db, "{}_corrected".format(LINES_TABLE), Q, api='psql'
    )
    
    # TODO: Delete Rows that have exactly the same geometry
    
    # Highlight problems that the user must solve case by case
    Q = (
        "SELECT {t}.*, foo.cat_count FROM {t} INNER JOIN ("
            "SELECT cat, COUNT(cat) AS cat_count FROM {t} GROUP BY cat"
        ") AS foo ON {t}.cat = foo.cat "
        "WHERE foo.cat_count > 3"
    ).format(t=LINES_TABLE)
    
    ERROR_LINES = q_to_ntbl(
        db, "{}_not_corr".format(LINES_TABLE), Q, api='psql'
    )
    
    dbtbl_to_shp(
        db, CORR_LINES, "geom", out_correct,
        api="pgsql2shp"
    )
    
    dbtbl_to_shp(
        db, ERROR_LINES, "geom", out_tocorrect,
        api="pgsql2shp"
    )