Beispiel #1
0
def extract_random_features(inshp, nfeat, outshp, is_percentage=None):
    """
    Extract Random features from one Feature Class
    and save them in a new file
    """

    import numpy as np
    from glass.g.rd.shp import shp_to_obj
    from glass.g.wt.shp import obj_to_shp
    from glass.g.prop.prj import get_shp_epsg

    # Open data
    df = shp_to_obj(inshp)

    # Get number of random features
    n = int(round(nfeat * df.shape[0] / 100, 0)) if is_percentage else nfeat

    # Get random sample
    df['idx'] = df.index
    rnd = np.random.choice(df.idx, n, replace=False)

    # Filter features
    rnd_df = df[df.idx.isin(rnd)]

    rnd_df.drop('idx', axis=1, inplace=True)

    # Save result
    epsg = get_shp_epsg(inshp)
    return obj_to_shp(rnd_df, 'geometry', epsg, outshp)
Beispiel #2
0
def buffer_ext(inShp, meterTolerance, outShp, inEpsg=None):
    """
    For all geometries, calculate the boundary given by 
    the sum between the feature extent and the Tolerance variable
    """

    from glass.g.rd.shp import shp_to_obj
    from glass.g.wt.shp import df_to_shp
    from glass.g.gp.prox.bfing.obj import df_buffer_extent
    from glass.g.prop.prj import get_shp_epsg

    inDf = shp_to_obj(inShp)

    epsg = get_shp_epsg(inShp) if not inEpsg else inEpsg

    result = df_buffer_extent(inDf, epsg, meterTolerance)

    return df_to_shp(result, outShp)
Beispiel #3
0
def otp_closest_facility(incidents, facilities, hourday, date, output):
    """
    Closest Facility using OTP
    """

    import os
    from glass.g.rd.shp import shp_to_obj
    from glass.g.prop.prj import get_shp_epsg
    from glass.g.wt.shp import obj_to_shp
    from glass.pys.oss import fprop
    from glass.g.prj.obj import df_prj
    from glass.g.mob.otp.log import clsfacility

    # Open Data
    incidents_df = df_prj(shp_to_obj(incidents), 4326)
    facilities_df = df_prj(shp_to_obj(facilities), 4326)

    # Run closest facility
    out_epsg = get_shp_epsg(incidents)
    res, logs = clsfacility(incidents_df,
                            facilities_df,
                            hourday,
                            date,
                            out_epsg=out_epsg)

    # Export result
    obj_to_shp(res, "geom", out_epsg, output)

    # Write logs
    if len(logs):
        with open(
                os.path.join(os.path.dirname(output),
                             fprop(output, 'fn') + '_log.txt'), 'w') as txt:
            for i in logs:
                txt.write(("Incident_id: {}\n"
                           "Facility_id: {}\n"
                           "ERROR message:\n"
                           "{}\n"
                           "\n\n\n\n\n\n").format(str(i[0]), str(i[1]),
                                                  str(i[2])))

    return output
Beispiel #4
0
def count_pntinpol(inpnt, inpoly, cntcol, out):
    """
    Count points inside polygons
    """

    from glass.g.gp.ovl.obj import count_pnt_inside_poly
    from glass.g.rd.shp import shp_to_obj
    from glass.g.wt.shp import obj_to_shp
    from glass.g.prop.prj import get_shp_epsg

    # Open data
    pnt_df = shp_to_obj(inpnt)
    pol_df = shp_to_obj(inpoly)

    # Count points
    pol_df = count_pnt_inside_poly(pnt_df, cntcol, pol_df)

    # Export to file
    obj_to_shp(pol_df, "geometry", get_shp_epsg(inpoly), out)

    return out
Beispiel #5
0
def points_by_polutation(pnt,
                         mapunits,
                         popcol,
                         outcol,
                         output,
                         count_pnt=None,
                         inhabitants=1000,
                         pntattr=None):
    """
    Useful to calculate pharmacies by 1000 inabitants
    """

    import geopandas as gp
    from glass.g.rd.shp import shp_to_obj
    from glass.g.prop.prj import get_shp_epsg
    from glass.g.wt.shp import obj_to_shp
    from glass.g.gp.ovl.obj import count_pnt_inside_poly

    # Open Data
    pnt_df = shp_to_obj(pnt)
    units_df = shp_to_obj(mapunits)

    cpnt = 'count_pnt' if not count_pnt else count_pnt
    pntattr = None if not pntattr else pntattr \
        if pntattr in list(pnt_df.columns.values) else None
    inhabitants = 1 if not inhabitants else inhabitants

    units_df = count_pnt_inside_poly(pnt_df, cpnt, units_df, pntattr=pntattr)
    units_df[outcol] = (units_df[count_pnt] / units_df[popcol]) * inhabitants

    if not count_pnt:
        units_df.drop([cpnt], axis=1, inplace=True)

    obj_to_shp(units_df, "geometry", get_shp_epsg(mapunits), output)

    return output
Beispiel #6
0
def shpext_to_boundary(in_shp, out_srs=None):
    """
    Read one feature class extent and create a boundary with that
    extent
    """

    from glass.g.prop.ext import get_ext
    from glass.g.gobj     import create_polygon

    # Get Extent
    ext = get_ext(in_shp)

    # Create points of the new boundary based on the extent
    boundary_points = [
        (ext[0], ext[3]), (ext[1], ext[3]),
        (ext[1], ext[2]), (ext[0], ext[2]), (ext[0], ext[3])
    ]
    polygon = create_polygon(boundary_points)

    if out_srs:
        from glass.g.prop.prj import get_shp_epsg

        in_srs = get_shp_epsg(in_shp)

        if in_srs != out_srs:
            from glass.g.prj.obj import prj_ogrgeom

            poly = prj_ogrgeom(polygon, in_srs, out_srs,
                api='shply')

            return poly
        
        else:
            return polygon
    else:
        return polygon
Beispiel #7
0
def shp_to_djg_mdl(in_shp, app, mdl, cols_map, djg_proj):
    """
    Add Geometries to Django Model
    """

    from django.contrib.gis.geos import GEOSGeometry
    from django.contrib.gis.db import models
    from glass.pys import __import
    from glass.webg.djg import get_djgprj
    from glass.g.rd.shp import shp_to_obj
    from glass.g.prop.prj import get_shp_epsg
    from shapely.geometry.multipolygon import MultiPolygon

    def force_multi(geom):
        if geom.geom_type == 'Polygon':
            return MultiPolygon([geom])
        else:
            return geom

    application = get_djgprj(djg_proj)

    mdl_cls = __import('{}.models.{}'.format(app, mdl))
    mdl_obj = mdl_cls()

    in_df = shp_to_obj(in_shp)
    # Check if we need to import the SHP FID
    if 'FID' in cols_map.values():
        in_df["FID"] = in_df.index.astype(int)

    epsg = int(get_shp_epsg(in_shp))
    if not epsg:
        raise ValueError('Is not possible to recognize EPSG code of in_shp')

    OGR_GEOMS = [
        'POLYGON', 'MULTIPOLYGON', 'MULTILINESTRING', 'LINESTRING', 'POINT',
        'MULTIPOINT'
    ]

    def updateModel(row):
        for FLD in cols_map:
            if cols_map[FLD] not in OGR_GEOMS:
                # Check if field is foreign key
                field_obj = mdl_cls._meta.get_field(FLD)

                if not isinstance(field_obj, models.ForeignKey):
                    setattr(mdl_obj, FLD, row[cols_map[FLD]])

                else:
                    # If yes, use the model instance of the related table
                    # Get model of the table related com aquela cujos dados
                    # estao a ser restaurados
                    related_name = field_obj.related_model.__name__

                    related_model = __import('{}.models.{}'.format(
                        app, related_name))

                    related_obj = related_model.objects.get(
                        pk=int(row[cols_map[FLD]]))

                    setattr(mdl_obj, FLD, related_obj)

            else:
                if cols_map[FLD] == 'MULTIPOLYGON':
                    geom = force_multi(row.geometry)

                else:
                    geom = row.geometry

                setattr(mdl_obj, FLD, GEOSGeometry(geom.wkt, srid=epsg))

        mdl_obj.save()

    in_df.apply(lambda x: updateModel(x), axis=1)

    return 1
Beispiel #8
0
def otp_cf_based_on_rel(incidents, group_incidents_col, facilities,
                        facilities_id, rel_inc_fac, sheet, group_fk,
                        facilities_fk, hour, day, output):
    """
    Calculate time travel considering specific facilities
    for each group of incidents

    Relations between incidents and facilities are in a auxiliar table (rel_inc_fac).
    Auxiliar table must be a xlsx file
    """

    import os
    import pandas as pd
    from glass.ng.rd import tbl_to_obj
    from glass.g.rd.shp import shp_to_obj
    from glass.g.wt.shp import obj_to_shp
    from glass.g.mob.otp.log import clsfacility
    from glass.g.prop.prj import get_shp_epsg
    from glass.ng.pd import merge_df
    from glass.pys.oss import fprop
    from glass.g.prj.obj import df_prj

    # Avoid problems when facilities_id == facilities_fk
    facilities_fk = facilities_fk + '_fk' if facilities_id == facilities_fk else \
        facilities_fk

    # Open data
    idf = df_prj(shp_to_obj(incidents), 4326)
    fdf = df_prj(shp_to_obj(facilities), 4326)

    rel_df = tbl_to_obj(rel_inc_fac, sheet=sheet)

    oepsg = get_shp_epsg(incidents)

    # Relate facilities with incidents groups
    fdf = fdf.merge(rel_df,
                    how='inner',
                    left_on=facilities_id,
                    right_on=facilities_fk)

    # List Groups
    grp_df = pd.DataFrame({
        'cnttemp':
        idf.groupby([group_incidents_col])[group_incidents_col].agg('count')
    }).reset_index()

    # Do calculations
    res = []
    logs = []
    for idx, row in grp_df.iterrows():
        # Get incidents for that group
        new_i = idf[idf[group_incidents_col] == row[group_incidents_col]]

        # Get facilities for that group
        new_f = fdf[fdf[group_fk] == row[group_incidents_col]]

        # calculate closest facility
        cfres, l = clsfacility(new_i, new_f, hour, day, out_epsg=oepsg)

        res.append(cfres)
        logs.extend(l)

    # Merge results
    out_df = merge_df(res)

    # Recovery facility id
    fdf.drop([c for c in fdf.columns.values if c != facilities_id],
             axis=1,
             inplace=True)
    out_df = out_df.merge(fdf, how='left', left_on='ffid', right_index=True)

    # Export result
    obj_to_shp(out_df, "geom", oepsg, output)

    # Write logs
    if len(logs) > 0:
        with open(
                os.path.join(os.path.dirname(output),
                             fprop(output, 'fn') + '_log.txt'), 'w') as txt:
            for i in logs:
                txt.write(("Incident_id: {}\n"
                           "Facility_id: {}\n"
                           "ERROR message:\n"
                           "{}\n"
                           "\n\n\n\n\n\n").format(str(i[0]), str(i[1]),
                                                  str(i[2])))

    return output
Beispiel #9
0
def otp_servarea(facilities, hourday, date, breaks, output, vel=None):
    """
    OTP Service Area
    """

    import requests
    import os
    from glass.cons.otp import ISO_URL
    from glass.g.rd.shp import shp_to_obj
    from glass.g.prj.obj import df_prj
    from glass.g.prop.prj import get_shp_epsg
    from glass.g.wt.shp import obj_to_shp
    from glass.pys.oss import fprop
    from glass.g.it.pd import json_obj_to_geodf
    from glass.ng.pd import merge_df
    from glass.pys import obj_to_lst

    breaks = obj_to_lst(breaks)

    # Open Data
    facilities_df = df_prj(shp_to_obj(facilities), 4326)

    # Place request parameters
    get_params = [('mode', 'WALK,TRANSIT'), ('date', date), ('time', hourday),
                  ('maxWalkDistance', 50000),
                  ('walkSpeed', 3 if not vel else vel)]

    breaks.sort()

    for b in breaks:
        get_params.append(('cutoffSec', b))

    # Do the math
    error_logs = []
    results = []

    for i, r in facilities_df.iterrows():
        fromPlace = str(r.geometry.y) + ',' + str(r.geometry.x)

        if not i:
            get_params.append(('fromPlace', fromPlace))
        else:
            get_params[-1] = ('fromPlace', fromPlace)

        resp = requests.get(ISO_URL,
                            get_params,
                            headers={'accept': 'application/json'})

        try:
            data = resp.json()
        except:
            error_logs.append([i, 'Cannot retrieve JSON Response'])
            continue

        gdf = json_obj_to_geodf(data, 4326)
        gdf['ffid'] = i

        results.append(gdf)

    # Merge all Isochrones
    df_res = merge_df(results)

    out_epsg = get_shp_epsg(facilities)

    if out_epsg != 4326:
        df_res = df_prj(df_res, out_epsg)

    obj_to_shp(df_res, "geometry", out_epsg, output)

    # Write logs
    if len(error_logs):
        with open(
                os.path.join(os.path.dirname(output),
                             fprop(output, 'fn') + '.log.txt'), 'w') as txt:
            for i in error_logs:
                txt.write(("Facility_id: {}\n"
                           "ERROR message:\n"
                           "{}\n"
                           "\n\n\n\n\n\n").format(str(i[0]), i[1]))

    return output
Beispiel #10
0
def exp_by_group_relfeat(shp, group_col, relfeat, relfeat_id, reltbl,
                         reltbl_sheet, group_fk, relfeat_fk, out_folder,
                         out_tbl):
    """
    Identify groups in shp, get features related with
    these groups and export group features and related
    features to new file
    """

    import os
    import pandas as pd
    from glass.ng.rd import tbl_to_obj
    from glass.ng.wt import obj_to_tbl
    from glass.g.rd.shp import shp_to_obj
    from glass.g.wt.shp import obj_to_shp
    from glass.g.prop.prj import get_shp_epsg

    epsg = get_shp_epsg(shp)

    # Open data
    shp_df = shp_to_obj(shp)
    rel_df = shp_to_obj(relfeat)

    # Get table with relations N-N
    nn_tbl = tbl_to_obj(reltbl, sheet=reltbl_sheet)

    # Relate relfeat with shp groups
    rel_df = rel_df.merge(nn_tbl,
                          how='inner',
                          left_on=relfeat_id,
                          right_on=relfeat_fk)

    # List Groups
    grp_df = pd.DataFrame({
        'cnttemp':
        shp_df.groupby([group_col])[group_col].agg('count')
    }).reset_index()

    ntbls = []
    # Filter and export
    for idx, row in grp_df.iterrows():
        # Get shp_df filter
        new_shp = shp_df[shp_df[group_col] == row[group_col]]

        # Get relfeat filter
        new_relf = rel_df[rel_df[group_fk] == row[group_col]]

        # Export
        shp_i = obj_to_shp(
            new_shp, 'geometry', epsg,
            os.path.join(out_folder, 'lyr_{}.shp'.format(row[group_col])))
        rel_i = obj_to_shp(
            new_relf, 'geometry', epsg,
            os.path.join(out_folder, 'rel_{}.shp'.format(row[group_col])))

        ntbls.append([row[group_col], shp_i, rel_i])

    ntbls = pd.DataFrame(ntbls, columns=['group_id', 'shp_i', 'rel_i'])

    obj_to_tbl(ntbls, out_tbl)

    return out_tbl
Beispiel #11
0
def shply_break_lines_on_points(lineShp, pointShp, lineIdInPntShp, splitedShp):
    """
    Break lines on points location
    
    The points should be contained by the lines;
    The points table should have a column with the id of the
    line that contains the point.
    
    lineIDInPntShp is a reference to the FID of lineShp
    """

    from shapely.ops import split
    from shapely.geometry import Point, LineString
    from glass.g.rd.shp import shp_to_obj
    from glass.ng.pd.dagg import col_list_val_to_row
    from glass.g.prop.prj import get_shp_epsg
    from glass.g.wt.shp import df_to_shp
    from glass.ng.pd import dict_to_df

    srs_code = get_shp_epsg(lineShp)

    # Sanitize line geometry
    def fix_line(line, point):
        buff = point.buffer(0.0001)

        splitLine = split(line, buff)

        nline = LineString(
            list(splitLine[0].coords) + list(point.coords) +
            list(splitLine[-1].coords))

        return nline

    pnts = shp_to_obj(shp_to_obj)
    lines = shp_to_obj(shp_to_obj, output='dict')

    # Split Rows
    def split_geom(row):
        # Get related line
        rel_line = lines[row[lineIdInPntShp]]

        if type(rel_line["GEOM"]) != list:
            line_geom = fix_line(rel_line["GEOM"], row.geometry)

            split_lines = split(line_geom, row.geometry)

            lines[row[lineIdInPntShp]]["GEOM"] = [l for l in split_lines]

        else:
            for i in range(len(rel_line["GEOM"])):
                if rel_line["GEOM"][i].distance(row.geometry) < 1e-8:
                    line_geom = fix_line(rel_line["GEOM"][i], row.geometry)
                    split_lines = split(line_geom, row.geometry)
                    split_lines = [l for l in split_lines]

                    lines[row[lineIdInPntShp]]["GEOM"][i] = split_lines[0]
                    lines[row[lineIdInPntShp]]["GEOM"] += split_lines[1:]

                    break

                else:
                    continue
        return row

    pnts = pnts.apply(lambda x: split_geom(x), axis=1)

    # Result to Dataframe
    linesDf = dict_to_df(lines)

    # Where GEOM is a List, create a new row for each element in list
    linesDf = col_list_val_to_row(linesDf,
                                  "GEOM",
                                  geomCol="GEOM",
                                  epsg=srs_code)

    # Save result
    return df_to_shp(linesDf, splitedShp)
Beispiel #12
0
def lnh_to_polygons(inShp, outShp, api='saga', db=None):
    """
    Line to Polygons
    
    API's Available:
    * saga;
    * grass;
    * pygrass;
    * psql;
    """

    if api == 'saga':
        """
        http://www.saga-gis.org/saga_tool_doc/7.0.0/shapes_polygons_3.html
        
        Converts lines to polygons. Line arcs are closed to polygons simply
        by connecting the last point with the first. Optionally parts of
        polylines can be merged into one polygon optionally. 
        """

        from glass.pys import execmd

        rcmd = execmd(("saga_cmd shapes_polygons 3 -POLYGONS {} "
                       "LINES {} -SINGLE 1 -MERGE 1").format(outShp, inShp))

    elif api == 'grass' or api == 'pygrass':
        # Do it using GRASS GIS

        import os
        from glass.g.wenv.grs import run_grass
        from glass.pys.oss import fprop

        # Create GRASS GIS Session
        wk = os.path.dirname(outShp)
        lo = fprop(outShp, 'fn', forceLower=True)

        gs = run_grass(wk, lo, srs=inShp)

        import grass.script as grass
        import grass.script.setup as gsetup
        gsetup.init(gs, wk, lo, 'PERMANENT')

        # Import Packages
        from glass.g.it.shp import shp_to_grs, grs_to_shp

        # Send data to GRASS GIS
        lnh_shp = shp_to_grs(inShp,
                             fprop(inShp, 'fn', forceLower=True),
                             asCMD=True if api == 'grass' else None)

        # Build Polylines
        pol_lnh = line_to_polyline(lnh_shp,
                                   "polylines",
                                   asCmd=True if api == 'grass' else None)

        # Polyline to boundary
        bound = geomtype_to_geomtype(pol_lnh,
                                     'bound_shp',
                                     'line',
                                     'boundary',
                                     cmd=True if api == 'grass' else None)

        # Boundary to Area
        areas_shp = boundary_to_areas(bound,
                                      lo,
                                      useCMD=True if api == 'grass' else None)

        # Export data
        outShp = grs_to_shp(areas_shp,
                            outShp,
                            'area',
                            asCMD=True if api == 'grass' else None)

    elif api == 'psql':
        """ Do it using PostGIS """
        from glass.pys.oss import fprop
        from glass.ng.sql.db import create_db
        from glass.g.it.db import shp_to_psql
        from glass.g.it.shp import dbtbl_to_shp
        from glass.g.dp.cg.sql import lnh_to_polg
        from glass.g.prop.prj import get_shp_epsg

        # Create DB
        if not db:
            db = create_db(fprop(inShp, 'fn', forceLower=True), api='psql')

        else:
            from glass.ng.prop.sql import db_exists
            isDB = db_exists(db)

            if not isDB:
                create_db(db, api='psql')

        # Send data to DB
        in_tbl = shp_to_psql(db, inShp, api="shp2pgsql")

        # Get Result
        result = lnh_to_polg(db, in_tbl, fprop(outShp, 'fn', forceLower=True))

        # Export Result
        outshp = dbtbl_to_shp(db,
                              result,
                              "geom",
                              outShp,
                              api='psql',
                              epsg=get_shp_epsg(inShp))

    else:
        raise ValueError("API {} is not available".format(api))

    return outShp
Beispiel #13
0
def shps_to_shp(shps, outShp, api="ogr2ogr", fformat='.shp',
    dbname=None):
    """
    Get all features in several Shapefiles and save them in one file

    api options:
    * ogr2ogr;
    * psql;
    * pandas;
    * psql;
    * grass;
    """

    import os

    if type(shps) != list:
        # Check if is dir
        if os.path.isdir(shps):
            from glass.pys.oss import lst_ff
            # List shps in dir
            shps = lst_ff(shps, file_format=fformat)
        
        else:
            raise ValueError((
                'shps should be a list with paths for Feature Classes or a path to '
                'folder with Feature Classes'
            ))

    
    if api == "ogr2ogr":
        from glass.pys             import execmd
        from glass.g.prop import drv_name
        
        out_drv = drv_name(outShp)
        
        # Create output and copy some features of one layer (first in shps)
        cmdout = execmd('ogr2ogr -f "{}" {} {}'.format(
            out_drv, outShp, shps[0]
        ))
        
        # Append remaining layers
        lcmd = [execmd(
            'ogr2ogr -f "{}" -update -append {} {}'.format(
                out_drv, outShp, shps[i]
            )
        ) for i in range(1, len(shps))]
    
    elif api == 'pandas':
        """
        Merge SHP using pandas
        """
        
        from glass.g.rd.shp import shp_to_obj
        from glass.g.wt.shp import df_to_shp
        
        if type(shps) != list:
            raise ValueError('shps should be a list with paths for Feature Classes')
        
        dfs = [shp_to_obj(shp) for shp in shps]
        
        result = dfs[0]
        
        for df in dfs[1:]:
            result = result.append(df, ignore_index=True, sort=True)
        
        df_to_shp(result, outShp)
    
    elif api == 'psql':
        import os
        from glass.ng.sql.tbl import tbls_to_tbl, del_tables
        from glass.g.it.db import shp_to_psql

        if not dbname:
            from glass.ng.sql.db import create_db

            create_db(dbname, api='psql')

        pg_tbls = shp_to_psql(
            dbname, shps, api="shp2pgsql"
        )

        if os.path.isfile(outShp):
            from glass.pys.oss import fprop
            outbl = fprop(outShp, 'fn')
        
        else:
            outbl = outShp

        tbls_to_tbl(dbname, pg_tbls, outbl)

        if outbl != outShp:
            from glass.g.it.shp import dbtbl_to_shp

            dbtbl_to_shp(
                dbname, outbl, 'geom', outShp, inDB='psql',
                api="pgsql2shp"
            )

        del_tables(dbname, pg_tbls)
    
    elif api == 'grass':
        from glass.g.wenv.grs import run_grass
        from glass.pys.oss    import fprop, lst_ff
        from glass.g.prop.prj import get_shp_epsg

        lshps = lst_ff(shps, file_format='.shp')
        
        epsg = get_shp_epsg(lshps[0])

        gwork = os.path.dirname(outShp)
        outshpname = fprop(outShp, "fn")
        loc   = f'loc_{outshpname}'
        gbase = run_grass(gwork, loc=loc, srs=epsg)

        import grass.script.setup as gsetup
        gsetup.init(gbase, gwork, loc, 'PERMANENT')

        from glass.g.it.shp import shp_to_grs, grs_to_shp

        # Import data
        gshps = [shp_to_grs(s, fprop(s, 'fn'), asCMD=True) for s in lshps]

        patch = vpatch(gshps, outshpname)

        grs_to_shp(patch, outShp, "area")
       
    else:
        raise ValueError(
            "{} API is not available"
        )
    
    return outShp
Beispiel #14
0
def closest_facility(incidents,
                     incidents_id,
                     facilities,
                     output,
                     impedance='TravelTime'):
    """
    impedance options:
    * TravelTime;
    * WalkTime;
    """

    import requests
    import pandas as pd
    import numpy as np
    from glass.cons.esri import rest_token, CF_URL
    from glass.g.it.esri import json_to_gjson
    from glass.g.rd.shp import shp_to_obj
    from glass.g.wt.shp import df_to_shp
    from glass.ng.pd.split import df_split
    from glass.ng.pd import merge_df
    from glass.g.prop.prj import get_shp_epsg
    from glass.g.prj.obj import df_prj
    from glass.g.it.pd import df_to_geodf
    from glass.g.it.pd import json_obj_to_geodf
    from glass.cons.esri import get_tv_by_impedancetype

    # Get API token
    token = rest_token()

    # Data to Pandas DataFrames
    fdf = shp_to_obj(facilities)
    idf = shp_to_obj(incidents)

    # Re-project to WGS84
    fdf = df_prj(fdf, 4326)
    idf = df_prj(idf, 4326)

    # Geomtries to Str - inputs for requests
    fdf['coords'] = fdf.geometry.x.astype(str) + ',' + fdf.geometry.y.astype(
        str)
    idf['coords'] = idf.geometry.x.astype(str) + ',' + idf.geometry.y.astype(
        str)

    # Delete geometry from facilities DF
    idf.drop(['geometry'], axis=1, inplace=True)

    # Split data
    # ArcGIS API only accepts 100 facilities
    # # and 100 incidents in each request
    fdfs = df_split(fdf, 100, nrows=True) if fdf.shape[0] > 100 else [fdf]
    idfs = df_split(idf, 100, nrows=True) if idf.shape[0] > 100 else [idf]

    for i in range(len(idfs)):
        idfs[i].reset_index(inplace=True)
        idfs[i].drop(['index'], axis=1, inplace=True)

    for i in range(len(fdfs)):
        fdfs[i].reset_index(inplace=True)
        fdfs[i].drop(['index'], axis=1, inplace=True)

    # Get travel mode
    tv = get_tv_by_impedancetype(impedance)

    # Ask for results
    results = []

    drop_cols = [
        'ObjectID', 'FacilityID', 'FacilityRank', 'Name',
        'IncidentCurbApproach', 'FacilityCurbApproach', 'IncidentID',
        'StartTime', 'EndTime', 'StartTimeUTC', 'EndTimeUTC', 'Total_Minutes',
        'Total_TruckMinutes', 'Total_TruckTravelTime', 'Total_Miles'
    ]

    if impedance == 'WalkTime':
        tv_col = 'walktime'
        rn_cols = {'Total_WalkTime': tv_col}

        ndrop = ['Total_Kilometers', 'Total_TravelTime', 'Total_Minutes']

    elif impedance == 'metric':
        tv_col = 'kilomts'
        rn_cols = {'Total_Kilometers': tv_col}

        ndrop = ['Total_WalkTime', 'Total_TravelTime', 'Total_Minutes']

    else:
        tv_col = 'traveltime'
        rn_cols = {'Total_TravelTime': tv_col}

        ndrop = ['Total_Kilometers', 'Total_WalkTime', 'Total_Minutes']

    drop_cols.extend(ndrop)

    for i_df in idfs:
        incidents_str = i_df.coords.str.cat(sep=';')

        for f_df in fdfs:
            facilities_str = f_df.coords.str.cat(sep=';')

            # Make request
            r = requests.get(CF_URL,
                             params={
                                 'facilities': facilities_str,
                                 'incidents': incidents_str,
                                 'token': token,
                                 'f': 'json',
                                 'travelModel': tv,
                                 'defaultTargetFacilityCount': '1',
                                 'returnCFRoutes': True,
                                 'travelDirection':
                                 'esriNATravelDirectionToFacility',
                                 'impedanceAttributeName': impedance
                             })

            if r.status_code != 200:
                raise ValueError('Error when requesting from: {}'.format(
                    str(r.url)))

            # Convert ESRI json to GeoJson
            esri_geom = r.json()
            geom = json_to_gjson(esri_geom.get('routes'))

            # GeoJSON to GeoDataFrame
            gdf = json_obj_to_geodf(geom, 4326)

            # Delete unwanted columns
            gdf.drop(drop_cols, axis=1, inplace=True)

            # Rename some interest columns
            gdf.rename(columns=rn_cols, inplace=True)

            # Add to results original attributes of incidents
            r_df = gdf.merge(i_df,
                             how='left',
                             left_index=True,
                             right_index=True)

            results.append(r_df)

    # Compute final result
    # Put every DataFrame in a single DataFrame
    fgdf = merge_df(results)

    # Since facilities were divided
    # The same incident has several "nearest" facilities
    # We just want one neares facility
    # Lets group by using min operator
    gpdf = pd.DataFrame(fgdf.groupby([incidents_id]).agg({tv_col: 'min'
                                                          })).reset_index()

    gpdf.rename(columns={incidents_id: 'iid', tv_col: 'impcol'}, inplace=True)

    # Recovery geometry
    fgdf = fgdf.merge(gpdf, how='left', left_on=incidents_id, right_on='iid')
    fgdf = fgdf[fgdf[tv_col] == fgdf.impcol]
    fgdf = df_to_geodf(fgdf, 'geometry', 4326)

    # Remove repeated units
    g = fgdf.groupby('iid')
    fgdf['rn'] = g[tv_col].rank(method='first')
    fgdf = fgdf[fgdf.rn == 1]

    fgdf.drop(['iid', 'rn'], axis=1, inplace=True)

    # Re-project to original SRS
    epsg = get_shp_epsg(facilities)
    fgdf = df_prj(fgdf, epsg)

    # Export result
    df_to_shp(fgdf, output)

    return output
Beispiel #15
0
def service_areas(facilities, breaks, output, impedance='TravelTime'):
    """
    Produce Service Areas Polygons
    """

    import requests
    from glass.cons.esri import rest_token, SA_URL
    from glass.g.rd.shp import shp_to_obj
    from glass.g.prj.obj import df_prj
    from glass.g.it.esri import json_to_gjson
    from glass.g.it.pd import json_obj_to_geodf
    from glass.g.wt.shp import df_to_shp
    from glass.cons.esri import get_tv_by_impedancetype
    from glass.ng.pd.split import df_split
    from glass.ng.pd import merge_df
    from glass.g.prop.prj import get_shp_epsg

    # Get Token
    token = rest_token()

    # Get data
    pntdf = shp_to_obj(facilities)

    pntdf = df_prj(pntdf, 4326)

    pntdf['coords'] = pntdf.geometry.x.astype(
        str) + ',' + pntdf.geometry.y.astype(str)

    pntdf.drop(['geometry'], axis=1, inplace=True)

    dfs = df_split(pntdf, 100, nrows=True)

    # Make requests
    gdfs = []
    for df in dfs:
        facilities_str = df.coords.str.cat(sep=';')

        tv = get_tv_by_impedancetype(impedance)

        r = requests.get(
            SA_URL,
            params={
                'facilities': facilities_str,
                'token': token,
                'f': 'json',
                'travelModel': tv,
                'defaultBreaks': ','.join(breaks),
                'travelDirection': 'esriNATravelDirectionToFacility',
                #'travelDirection'        : 'esriNATravelDirectionFromFacility',
                'outputPolygons': 'esriNAOutputPolygonDetailed',
                'impedanceAttributeName': impedance
            })

        if r.status_code != 200:
            raise ValueError('Error when requesting from: {}'.format(str(
                r.url)))

        esri_geom = r.json()
        geom = json_to_gjson(esri_geom.get('saPolygons'))

        gdf = json_obj_to_geodf(geom, 4326)

        gdf = gdf.merge(df, how='left', left_index=True, right_index=True)

        gdfs.append(gdf)

    # Compute final result
    fgdf = merge_df(gdfs)

    epsg = get_shp_epsg(facilities)
    fgdf = df_prj(fgdf, epsg)

    df_to_shp(fgdf, output)

    return output
Beispiel #16
0
def cf_based_on_relations(incidents,
                          incidents_id,
                          group_incidents_col,
                          facilities,
                          facilities_id,
                          rel_inc_fac,
                          sheet,
                          group_fk,
                          facilities_fk,
                          output,
                          impedante='TravelTime'):
    """
    Calculate time travel considering specific facilities
    for each group of incidents

    Relations between incidents and facilities are in a auxiliar table (rel_inc_fac).
    Auxiliar table must be a xlsx file
    """

    import os
    import pandas as pd
    from glass.ng.rd import tbl_to_obj
    from glass.g.rd.shp import shp_to_obj
    from glass.g.wt.shp import obj_to_shp
    from glass.g.prop.prj import get_shp_epsg
    from glass.pys.oss import mkdir, fprop
    from glass.g.dp.mge import shps_to_shp

    # Avoid problems when facilities_id == facilities_fk
    facilities_fk = facilities_fk + '_fk' if facilities_id == facilities_fk else \
        facilities_fk

    # Open data
    incidents_df = shp_to_obj(incidents)
    facilities_df = shp_to_obj(facilities)

    rel_df = tbl_to_obj(rel_inc_fac, sheet=sheet)

    # Get SRS
    epsg = get_shp_epsg(incidents)

    # Create dir for temporary files
    tmpdir = mkdir(os.path.join(os.path.dirname(output), fprop(output, 'fn')),
                   overwrite=True)

    # Relate facilities with incidents groups
    facilities_df = facilities_df.merge(rel_df,
                                        how='inner',
                                        left_on=facilities_id,
                                        right_on=facilities_fk)

    # List Groups
    grp_df = pd.DataFrame({
        'cnttemp':
        incidents_df.groupby([group_incidents_col
                              ])[group_incidents_col].agg('count')
    }).reset_index()

    # Do the calculations
    res = []
    for idx, row in grp_df.iterrows():
        # Get incidents for that group
        new_i = incidents_df[incidents_df[group_incidents_col] ==
                             row[group_incidents_col]]

        new_i = obj_to_shp(
            new_i, 'geometry', epsg,
            os.path.join(tmpdir, 'i_{}.shp'.format(row[group_incidents_col])))

        # Get facilities for that group
        new_f = facilities_df[facilities_df[group_fk] ==
                              row[group_incidents_col]]
        new_f = obj_to_shp(
            new_f, 'geometry', epsg,
            os.path.join(tmpdir, 'f_{}.shp'.format(row[group_incidents_col])))

        # calculate closest facility
        cf = closest_facility(
            new_i, incidents_id, new_f,
            os.path.join(tmpdir, 'cf_{}.shp'.format(row[group_incidents_col])))

        res.append(cf)

    # Produce final result
    shps_to_shp(res, output, api="pandas")

    return output
Beispiel #17
0
def thrd_viewshed_v2(dbname, dem, pnt_obs, obs_id):
    """
    Compute Viewshed for all points in pnt_obs using
    a multiprocessing approach
    """

    import os
    import pandas          as pd
    import numpy           as np
    from osgeo             import gdal
    import multiprocessing as mp
    from glass.g.rd.shp    import shp_to_obj
    from glass.pys.oss     import cpu_cores, mkdir
    from glass.ng.pd.split import df_split
    from glass.g.wenv.grs  import run_grass
    from glass.g.prop.prj  import get_shp_epsg
    from glass.g.wt.sql    import df_to_db
    from glass.pys.oss     import del_file
    from glass.ng.sql.db   import create_db
    from glass.pys.num     import get_minmax_fm_seq_values
    
    # Get Work EPSG
    epsg = get_shp_epsg(pnt_obs)
    
    # Points to DataFrame
    obs_df = shp_to_obj(pnt_obs)

    # Split DF by the number of cores
    n_cpu = cpu_cores()
    dfs   = df_split(obs_df, n_cpu)

    def run_viewshed_by_cpu(tid, db, obs, dem, srs,
        vis_basename='vis', maxdst=None, obselevation=None):
        # Create Database
        new_db = create_db("{}_{}".format(db, str(tid)), api='psql')
        
        # Points to Database
        pnt_tbl = df_to_db(
            new_db, obs, 'pnt_tbl', api='psql', 
            epsg=srs, geomType='Point', colGeom='geometry')

        # Create GRASS GIS Session
        workspace = mkdir(os.path.join(
            os.path.dirname(dem), 'work_{}'.format(str(tid))
        ))
        loc_name = 'vis_loc'
        gbase = run_grass(workspace, location=loc_name, srs=dem)

        # Start GRASS GIS Session
        import grass.script as grass
        import grass.script.setup as gsetup
        gsetup.init(gbase, workspace, loc_name, 'PERMANENT')

        from glass.g.it.rst   import rst_to_grs, grs_to_rst
        from glass.g.rst.surf import grs_viewshed
        from glass.g.deldt    import del_rst

        # Send DEM to GRASS GIS
        grs_dem = rst_to_grs(dem, 'grs_dem', as_cmd=True)
    
        # Produce Viewshed for each point in obs
        for idx, row in obs.iterrows():
            # Get Viewshed raster
            vrst = grs_viewshed(
                grs_dem, (row.geometry.x, row.geometry.y),
                '{}_{}'.format(vis_basename, str(row[obs_id])),
                max_dist=maxdst, obs_elv=obselevation
            )
            
            # Export Raster to File
            frst = grs_to_rst(vrst, os.path.join(workspace, vrst + '.tif'))
            
            # Raster to Array
            img = gdal.Open(frst)
            num = img.ReadAsArray()
            
            # Two Dimension to One Dimension
            # Reshape Array
            numone = num.reshape(num.shape[0] * num.shape[1])
            
            # Get Indexes with visibility
            visnum = np.arange(numone.shape[0]).astype(np.uint32)
            visnum = visnum[numone == 1]

            # Get Indexes intervals
            visint = get_minmax_fm_seq_values(visnum)
            
            # Get rows indexes
            _visint = visint.reshape(visint.shape[0] * visint.shape[1])
            visrow = _visint / num.shape[1]
            visrow = visrow.astype(np.uint32)
            
            # Get cols indexes
            viscol = _visint - (visrow * num.shape[1])

            # Reshape
            visrow = visrow.reshape(visint.shape)
            viscol = viscol.reshape(visint.shape)

            # Split array
            irow, erow = np.vsplit(visrow.T, 1)[0]
            icol, ecol = np.vsplit(viscol.T, 1)[0]
            
            # Visibility indexes to Pandas DataFrame
            idxnum = np.full(irow.shape, row[obs_id])
            
            visdf = pd.DataFrame({
                'pntid' : idxnum, 'rowi' : irow, 'rowe' : erow,
                'coli': icol, 'cole' : ecol
            })
            
            # Pandas DF to database
            # Create Visibility table
            df_to_db(
                new_db, visdf, vis_basename,
                api='psql', colGeom=None,
                append=None if not idx else True
            )
            
            # Delete all variables
            numone  = None
            visnum  = None
            visint  = None
            _visint = None
            visrow  = None
            viscol  = None
            irow    = None
            erow    = None
            icol    = None
            ecol    = None
            idxnum  = None
            visdf   = None
            del img
            
            # Delete GRASS GIS File
            del_rst(vrst)
            
            # Delete TIFF File
            del_file(frst)
            frst = None

    thrds = [mp.Process(
        target=run_viewshed_by_cpu, name='th-{}'.format(str(i+1)),
        args=(i+1, dbname, dfs[i], dem, epsg,
            'vistoburn', 10000, 500)
    ) for i in range(len(dfs))]

    for t in thrds:
        t.start()
    
    for t in thrds:
        t.join()
    
    return 1
Beispiel #18
0
def get_ref_raster(refBoundBox, folder, cellsize=None):
    """
    Get Reference Raster
    """

    import os
    from glass.g.prop import check_isRaster

    # Check if refRaster is really a Raster
    isRst = check_isRaster(refBoundBox)

    if not isRst:
        from glass.g.prop import check_isShp

        if not check_isShp(refBoundBox):
            raise ValueError((
                'refRaster File has an invalid file format. Please give a file '
                'with one of the following extensions: '
                'shp, gml, json, kml, tif or img'))

        else:
            # We have a shapefile

            # Check SRS and see if it is a projected SRS
            from glass.g.prop.prj import get_shp_epsg

            epsg, isProj = get_shp_epsg(refBoundBox, returnIsProj=True)

            if not epsg:
                raise ValueError(
                    'Cannot get epsg code from {}'.format(refBoundBox))

            if not isProj:
                # A conversion between SRS is needed
                from glass.g.prj import proj

                ref_shp = proj(refBoundBox,
                               os.path.join(folder, 'tmp_ref_shp.shp'),
                               outEPSG=3857,
                               inEPSG=epsg,
                               gisApi='ogr2ogr')
                epsg = 3857
            else:
                ref_shp = refBoundBox

            # Convert to Raster
            from glass.g.dp.torst import shp_to_rst

            refRaster = shp_to_rst(ref_shp,
                                   None,
                                   2 if not cellsize else cellsize,
                                   -1,
                                   os.path.join(folder, 'ref_raster.tif'),
                                   api='gdal')

    else:
        # We have a raster
        from glass.g.prop.prj import get_rst_epsg

        epsg, isProj = get_rst_epsg(refBoundBox, returnIsProj=True)

        if not epsg:
            raise ValueError(
                'Cannot get epsg code from {}'.format(refBoundBox))

        # Check if Raster has a SRS with projected coordinates
        if not isProj:
            # We need to reproject raster
            from glass.g.prj import reprj_rst

            refRaster = reprj_rst(refBoundBox,
                                  os.path.join(folder, 'refrst_3857.tif'),
                                  epsg, 3857)
            epsg = 3857
        else:
            refRaster = refBoundBox

    return refRaster, epsg
Beispiel #19
0
def osm_extraction(boundary, osmdata, output, each_feat=None, epsg=None):
    """
    Extract OSM Data from a xml file with osmosis
    
    The extraction is done using the extent of a boundary
    """

    import os
    from glass.pys import execmd
    from glass.g.prj.obj import prj_ogrgeom
    from glass.g.prop import check_isRaster

    # Check if boundary is a file
    if os.path.isfile(boundary):
        # Check if boundary is a raster
        is_rst = check_isRaster(boundary)

        if is_rst:
            # Get Raster EPSG and Extent
            from glass.g.prop.prj import get_rst_epsg
            from glass.g.prop.rst import rst_ext
            from glass.g.gobj import create_polygon

            in_epsg = get_rst_epsg(boundary)
            left, right, bottom, top = rst_ext(boundary)
            boundaries = [
                create_polygon([(left, top), (right, top), (right, bottom),
                                (left, bottom), (left, top)])
            ]

        else:
            # Get Shape EPSG
            from glass.g.prop.prj import get_shp_epsg

            in_epsg = get_shp_epsg(boundary)

            if not each_feat:
                # Get Shape Extent
                from glass.g.prop.feat import get_ext
                from glass.g.gobj import create_polygon

                left, right, bottom, top = get_ext(boundary)
                boundaries = [
                    create_polygon([(left, top), (right, top), (right, bottom),
                                    (left, bottom), (left, top)])
                ]

            else:
                # Get Extent of each feature
                from osgeo import ogr
                from glass.g.prop import drv_name

                src = ogr.GetDriverByName(drv_name(boundary)).Open(boundary)
                lyr = src.GetLayer()

                boundaries = [feat.GetGeometryRef() for feat in lyr]
    else:
        from glass.g.gobj import wkt_to_geom

        in_epsg = 4326 if not epsg else epsg

        if type(boundary) == str:
            # Assuming it is a WKT string
            wkt_boundaries = [boundary]
        elif type(boundary) == list:
            # Assuming it is a List with WKT strings
            wkt_boundaries = boundary
        else:
            raise ValueError('Given boundary has a not valid value')

        boundaries = [wkt_to_geom(g) for g in wkt_boundaries]

        if None in boundaries:
            raise ValueError(
                ("boundary parameter is a string, but it is not a valid path "
                 "to a file or a valid WKT string"))

    # Get output files
    if len(boundaries) == 1:
        if os.path.isdir(output):
            fn, ff = os.path.splitext(os.path.basename(osmdata))

            out_files = [os.path.join(output, "ect_{}.{}".format(fn, ff))]
        else:
            out_files = [output]
    else:
        fn, ff = os.path.splitext(os.path.basename(osmdata))
        path = output if os.path.isdir(output) else os.path.dirname(output)
        out_files = [
            os.path.join(path, "ect_{}_{}.{}".format(fn, str(i), ff))
            for i in range(len(boundaries))
        ]

    # Extract data using OSMOSIS
    cmd = ("osmosis --read-{_f} {dtparse}file={_in} "
           "--bounding-box top={t} left={l} bottom={b} right={r} "
           "--write-{outext} file={_out}")
    for g in range(len(boundaries)):
        # Convert boundary to WGS84 -EPSG 4326
        geom_wgs = prj_ogrgeom(
            boundaries[g], int(in_epsg), 4326,
            api='shapely') if int(in_epsg) != 4326 else boundaries[g]

        # Get boundary extent
        left, right, bottom, top = geom_wgs.GetEnvelope()

        # Osmosis shell comand
        osmext = os.path.splitext(osmdata)[1]

        # Execute command
        outcmd = execmd(
            cmd.format(
                _f='pbf' if osmext == '.pbf' else 'xml',
                _in=osmdata,
                t=str(top),
                l=str(left),
                b=str(bottom),
                r=str(right),
                _out=out_files[g],
                outext=os.path.splitext(out_files[g])[1][1:],
                dtparse="" if osmext == '.pbf' else "enableDataParsing=no "))

    return output
Beispiel #20
0
def proj(inShp,
         outShp,
         outEPSG,
         inEPSG=None,
         gisApi='ogr',
         sql=None,
         db_name=None):
    """
    Project Geodata using GIS
    
    API's Available:
    * ogr;
    * ogr2ogr;
    * pandas;
    * ogr2ogr_SQLITE;
    * psql;
    """
    import os

    if gisApi == 'ogr':
        """
        Using ogr Python API
        """

        if not inEPSG:
            raise ValueError(
                'To use ogr API, you should specify the EPSG Code of the'
                ' input data using inEPSG parameter')

        from osgeo import ogr
        from glass.g.lyr.fld import copy_flds
        from glass.g.prop.feat import get_gtype
        from glass.g.prop import drv_name
        from glass.g.prop.prj import get_sref_from_epsg, get_trans_param
        from glass.pys.oss import fprop

        def copyShp(out, outDefn, lyr_in, trans):
            for f in lyr_in:
                g = f.GetGeometryRef()
                g.Transform(trans)
                new = ogr.Feature(outDefn)
                new.SetGeometry(g)
                for i in range(0, outDefn.GetFieldCount()):
                    new.SetField(
                        outDefn.GetFieldDefn(i).GetNameRef(), f.GetField(i))
                out.CreateFeature(new)
                new.Destroy()
                f.Destroy()

        # ####### #
        # Project #
        # ####### #
        transP = get_trans_param(inEPSG, outEPSG)

        inData = ogr.GetDriverByName(drv_name(inShp)).Open(inShp, 0)

        inLyr = inData.GetLayer()
        out = ogr.GetDriverByName(drv_name(outShp)).CreateDataSource(outShp)

        outlyr = out.CreateLayer(fprop(outShp, 'fn'),
                                 get_sref_from_epsg(outEPSG),
                                 geom_type=get_gtype(inShp,
                                                     name=None,
                                                     py_cls=True,
                                                     gisApi='ogr'))

        # Copy fields to the output
        copy_flds(inLyr, outlyr)
        # Copy/transform features from the input to the output
        outlyrDefn = outlyr.GetLayerDefn()
        copyShp(outlyr, outlyrDefn, inLyr, transP)

        inData.Destroy()
        out.Destroy()

    elif gisApi == 'ogr2ogr':
        """
        Transform SRS of any OGR Compilant Data. Save the transformed data
        in a new file
        """

        if not inEPSG:
            from glass.g.prop.prj import get_shp_epsg
            inEPSG = get_shp_epsg(inShp)

        if not inEPSG:
            raise ValueError('To use ogr2ogr, you must specify inEPSG')

        from glass.pys import execmd
        from glass.g.prop import drv_name

        cmd = ('ogr2ogr -f "{}" {} {}{} -s_srs EPSG:{} -t_srs EPSG:{}').format(
            drv_name(outShp), outShp, inShp,
            '' if not sql else ' -dialect sqlite -sql "{}"'.format(sql),
            str(inEPSG), str(outEPSG))

        outcmd = execmd(cmd)

    elif gisApi == 'ogr2ogr_SQLITE':
        """
        Transform SRS of a SQLITE DB table. Save the transformed data in a
        new table
        """

        from glass.pys import execmd

        if not inEPSG:
            raise ValueError(
                ('With ogr2ogr_SQLITE, the definition of inEPSG is '
                 'demandatory.'))

        # TODO: Verify if database is sqlite

        db, tbl = inShp['DB'], inShp['TABLE']
        sql = 'SELECT * FROM {}'.format(tbl) if not sql else sql

        outcmd = execmd(
            ('ogr2ogr -update -append -f "SQLite" {db} -nln "{nt}" '
             '-dialect sqlite -sql "{_sql}" -s_srs EPSG:{inepsg} '
             '-t_srs EPSG:{outepsg} {db}').format(db=db,
                                                  nt=outShp,
                                                  _sql=sql,
                                                  inepsg=str(inEPSG),
                                                  outepsg=str(outEPSG)))

    elif gisApi == 'pandas':
        # Test if input Shp is GeoDataframe
        from glass.g.rd.shp import shp_to_obj
        from glass.g.wt.shp import df_to_shp

        df = shp_to_obj(inShp)

        # Project df
        newDf = df.to_crs('EPSG:{}'.format(str(outEPSG)))

        # Save as file

        return df_to_shp(df, outShp)

    elif gisApi == 'psql':
        from glass.ng.sql.db import create_db
        from glass.pys.oss import fprop
        from glass.g.it.db import shp_to_psql
        from glass.g.it.shp import dbtbl_to_shp
        from glass.g.prj.sql import sql_proj

        # Create Database
        if not db_name:
            db_name = create_db(fprop(outShp, 'fn', forceLower=True),
                                api='psql')

        else:
            from glass.ng.prop.sql import db_exists

            isDb = db_exists(db_name)

            if not isDb:
                create_db(db_name, api='psql')

        # Import Data
        inTbl = shp_to_psql(db_name, inShp, api='shp2pgsql', encoding="LATIN1")

        # Transform
        oTbl = sql_proj(db_name,
                        inTbl,
                        fprop(outShp, 'fn', forceLower=True),
                        outEPSG,
                        geomCol='geom',
                        newGeom='geom')

        # Export
        outShp = dbtbl_to_shp(db_name,
                              oTbl,
                              'geom',
                              outShp,
                              api='psql',
                              epsg=outEPSG)

    else:
        raise ValueError('Sorry, API {} is not available'.format(gisApi))

    return outShp
Beispiel #21
0
def shp_to_psql(dbname,
                shpData,
                pgTable=None,
                api="pandas",
                mapCols=None,
                srsEpsgCode=None,
                encoding="UTF-8",
                dbset='default'):
    """
    Send Shapefile to PostgreSQL
    
    if api is equal to "pandas" - GeoPandas API will be used;
    if api is equal to "shp2pgsql" - shp2pgsql tool will be used.
    """

    import os
    from glass.pys.oss import fprop
    from glass.g.prop.prj import get_shp_epsg

    # If defined, srsEpsgCode must be a integer value
    if srsEpsgCode:
        if type(srsEpsgCode) != int:
            raise ValueError('srsEpsgCode should be a integer value')

    if api == "pandas":
        from glass.ng.rd import tbl_to_obj
        from glass.g.wt.sql import df_to_db
        from glass.g.prop.feat import get_gtype

    elif api == "shp2pgsql":
        from glass.pys import execmd
        from glass.ng.sql import psql_cmd
        from glass.pys.oss import del_file

    else:
        raise ValueError(
            'api value is not valid. options are: pandas and shp2pgsql')

    # Check if shp is folder
    if os.path.isdir(shpData):
        from glass.pys.oss import lst_ff

        shapes = lst_ff(shpData, file_format='.shp')

    else:
        from glass.pys import obj_to_lst

        shapes = obj_to_lst(shpData)

    epsgs = [get_shp_epsg(i)
             for i in shapes] if not srsEpsgCode else [srsEpsgCode]

    if None in epsgs:
        raise ValueError(
            ("Cannot obtain EPSG code. Use the srsEpsgCode parameter "
             "to specify the EPSG code of your data."))

    tables = []
    for _i in range(len(shapes)):
        # Get Table name
        tname = fprop(shapes[_i], 'fn', forceLower=True) if not pgTable else \
            pgTable[_i] if type(pgTable) == list else pgTable if len(shapes) == 1 \
            else pgTable + '_{}'.format(_i+1)

        # Import data
        if api == "pandas":
            # SHP to DataFrame
            df = tbl_to_obj(shapes[_i])

            if not mapCols:
                df.rename(columns={x: x.lower()
                                   for x in df.columns.values},
                          inplace=True)
            else:
                renameD = {
                    x : mapCols[x].lower() if x in mapCols else \
                    x.lower() for x in df.columns.values
                }
                df.rename(columns=renameD, inplace=True)

            if "geometry" in df.columns.values:
                geomCol = "geometry"

            elif "geom" in df.columns.values:
                geomCol = "geom"

            else:
                raise ValueError("No Geometry found in shp")

            # GeoDataFrame to PSQL
            df_to_db(dbname,
                     df,
                     tname,
                     append=True,
                     api='psql',
                     epsg=epsgs[_i] if not srsEpsgCode else srsEpsgCode,
                     colGeom=geomCol,
                     geomType=get_gtype(shapes[_i],
                                        name=True,
                                        py_cls=False,
                                        gisApi='ogr'))

        else:
            sql_script = os.path.join(os.path.dirname(shapes[_i]),
                                      tname + '.sql')

            cmd = ('shp2pgsql -I -s {epsg} -W {enc} '
                   '{shp} public.{name} > {out}').format(
                       epsg=epsgs[_i] if not srsEpsgCode else srsEpsgCode,
                       shp=shapes[_i],
                       name=tname,
                       out=sql_script,
                       enc=encoding)

            outcmd = execmd(cmd)

            psql_cmd(dbname, sql_script, dbcon=dbset)

            del_file(sql_script)

        tables.append(tname)

    return tables[0] if len(tables) == 1 else tables