Example #1
0
def arcg_area(polyTbl, lulcNomenclature, UPPER=True):
    """
    Select Features with area upper than
    """

    from gasp.osm2lulc.utils import osm_features_by_rule
    from gasp.cpu.arcg.anls.exct import select_by_attr
    from gasp.cpu.arcg.mng.fld import add_geom_attr
    from gasp.cpu.arcg.mng.gen import delete
    from gasp.mng.genze import dissolve
    from gasp.prop.feat import feat_count

    KEY_COL = DB_SCHEMA["OSM_FEATURES"]["OSM_KEY"]
    VALUE_COL = DB_SCHEMA["OSM_FEATURES"]["OSM_VALUE"]
    LULC_COL = DB_SCHEMA[lulcNomenclature]["CLS_FK"]
    RULE_COL = DB_SCHEMA[lulcNomenclature]["RULES_FIELDS"]["AREA"]

    # Get OSM Features to be selected for this rule
    __serv = 'area_upper' if UPPER else 'area_lower'
    osmToSelect = osm_features_by_rule(lulcNomenclature, __serv)

    operator = " > " if UPPER else " < "
    osmToSelect[VALUE_COL] = "(" + osmToSelect[KEY_COL] + "='" + \
        osmToSelect[VALUE_COL] + "' AND shp_area" + operator + \
        osmToSelect[RULE_COL].astype(str) + ")"

    lulcCls = osmToSelect[LULC_COL].unique()

    clsVect = {}
    WORK = os.path.dirname(polyTbl)

    for cls in lulcCls:
        # Select and Export
        filterDf = osmToSelect[osmToSelect[LULC_COL] == cls]

        fShp = select_by_attr(
            polyTbl, str(filterDf[VALUE_COL].str.cat(sep=" OR ")),
            os.path.join(WORK, "{}_{}".format(__serv, str(cls))))

        if not feat_count(fShp, gisApi='arcpy'): continue

        # Dissolve
        dissShp = dissolve(fShp,
                           os.path.join(WORK,
                                        "{}_d_{}".format(__serv, str(cls))),
                           "OBJECTID",
                           geomMultiPart=None,
                           api='arcpy')

        if not feat_count(dissShp, gisApi='arcpy'): continue

        clsVect[int(cls)] = dissShp

        delete(fShp)

    return clsVect
Example #2
0
def grs_vect_selbyarea(osmcon, polyTbl, UPPER=True, apidb='SQLITE'):
    """
    Select features with area upper than.
    
    A field with threshold is needed in the database.
    """

    import datetime
    from gasp.mng.genze import dissolve
    from gasp.mng.grstbl import add_table
    from gasp.osm2lulc.var import GEOM_AREA
    from gasp.sql.mng.tbl import row_num as cnt_row
    if apidb != 'POSTGIS':
        from gasp.to.shp.grs import sqlite_to_shp as db_to_shp
    else:
        from gasp.to.shp.grs import psql_to_grs as db_to_shp

    OPERATOR = ">" if UPPER else "<"
    DIRECTION = "upper" if UPPER else "lower"

    WHR = "{ga} {op} t_area_{r} and area_{r} IS NOT NULL".format(op=OPERATOR,
                                                                 r=DIRECTION,
                                                                 ga=GEOM_AREA)

    # Check if we have interest data
    time_a = datetime.datetime.now().replace(microsecond=0)
    N = cnt_row(osmcon,
                polyTbl,
                where=WHR,
                api='psql' if apidb == 'POSTGIS' else 'sqlite')
    time_b = datetime.datetime.now().replace(microsecond=0)

    if not N: return None, {0: ('count_rows', time_b - time_a)}

    # Data to GRASS GIS
    grsVect = db_to_shp(osmcon,
                        polyTbl,
                        "area_{}".format(DIRECTION),
                        where=WHR,
                        filterByReg=True)
    time_c = datetime.datetime.now().replace(microsecond=0)

    dissVect = dissolve(grsVect,
                        "diss_area_{}".format(DIRECTION),
                        "area_{}".format(DIRECTION),
                        api="grass")

    add_table(dissVect, None, lyrN=1, asCMD=True)
    time_d = datetime.datetime.now().replace(microsecond=0)

    return dissVect, {
        0: ('count_rows', time_b - time_a),
        1: ('import', time_c - time_b),
        2: ('dissolve', time_d - time_c)
    }
Example #3
0
def grs_vect_bbuffer(osmdata, lineTbl, api_db='SQLITE'):
    """
    Basic Buffer strategie
    """

    import datetime
    from gasp.anls.prox.bf import _buffer
    from gasp.mng.genze import dissolve
    from gasp.mng.grstbl import add_table
    from gasp.sql.mng.tbl import row_num as cnt_row
    if api_db != 'POSTGIS':
        from gasp.to.shp.grs import sqlite_to_shp as db_to_shp
    else:
        from gasp.to.shp.grs import psql_to_grs as db_to_shp

    WHR = "basic_buffer IS NOT NULL"

    # Check if we have data
    time_a = datetime.datetime.now().replace(microsecond=0)
    N = cnt_row(osmdata,
                lineTbl,
                where=WHR,
                api='psql' if api_db == 'POSTGIS' else 'sqlite')
    time_b = datetime.datetime.now().replace(microsecond=0)

    if not N: return None, {0: ('count_rows_roads', time_b - time_a)}

    grsVect = db_to_shp(osmdata,
                        lineTbl,
                        "bb_lnh",
                        where=WHR,
                        filterByReg=True)
    time_c = datetime.datetime.now().replace(microsecond=0)

    grsBuf = _buffer(grsVect,
                     "bf_basic_buffer",
                     "bb_poly",
                     api="grass",
                     geom_type="line")
    time_d = datetime.datetime.now().replace(microsecond=0)

    grsDiss = dissolve(grsBuf, "bb_diss", "basic_buffer", api="grass")
    add_table(grsDiss, None, lyrN=1, asCMD=True)
    time_e = datetime.datetime.now().replace(microsecond=0)

    return grsDiss, {
        0: ('count_rows', time_b - time_a),
        1: ('import', time_c - time_b),
        2: ('buffer', time_d - time_c),
        3: ('dissolve', time_e - time_d)
    }
Example #4
0
def grs_vector(dbcon, polyTable, apidb='SQLITE'):
    """
    Simple Selection using GRASS GIS
    """

    import datetime
    from gasp.mng.genze import dissolve
    from gasp.mng.grstbl import add_table
    from gasp.sql.mng.tbl import row_num as cont_row
    if apidb != 'POSTGIS':
        from gasp.to.shp.grs import sqlite_to_shp as db_to_grs
    else:
        from gasp.to.shp.grs import psql_to_grs as db_to_grs

    WHR = "selection IS NOT NULL"

    # Check if we have interest data
    time_a = datetime.datetime.now().replace(microsecond=0)
    N = cont_row(dbcon,
                 polyTable,
                 where=WHR,
                 api='psql' if apidb == 'POSTGIS' else 'sqlite')
    time_b = datetime.datetime.now().replace(microsecond=0)

    if not N: return None, {0: ('count_rows', time_b - time_a)}

    # Data to GRASS GIS
    grsVect = db_to_grs(dbcon,
                        polyTable,
                        "sel_rule",
                        where=WHR,
                        filterByReg=True)
    time_c = datetime.datetime.now().replace(microsecond=0)

    dissVect = dissolve(grsVect,
                        "diss_sel_rule",
                        field="selection",
                        api="grass")

    add_table(dissVect, None, lyrN=1, asCMD=True)
    time_d = datetime.datetime.now().replace(microsecond=0)

    return dissVect, {
        0: ('count_rows', time_b - time_a),
        1: ('import', time_c - time_b),
        2: ('dissolve', time_d - time_c)
    }
Example #5
0
def arcg_selection(db, polTbl, fld):
    """
    Select, Dissolve and Reproject using ArcGIS
    """

    import datetime
    import os
    from gasp.mng.genze import dissolve
    from gasp.fm.sql import query_to_df
    from gasp.anls.exct import sel_by_attr

    # Get LULC Classes
    time_a = datetime.datetime.now().replace(microsecond=0)
    lulcCls = query_to_df(
        db,
        ("SELECT selection FROM {} "
         "WHERE selection IS NOT NULL GROUP BY selection").format(polTbl),
        db_api='sqlite').selection.tolist()
    time_b = datetime.datetime.now().replace(microsecond=0)

    timeGasto = {0: ('check_cls', time_b - time_a)}

    # Extract Shps from DB
    clsShp = {}
    tk = 1
    SQL = "SELECT selection, geometry FROM {} WHERE selection={}"
    for cls in lulcCls:
        time_x = datetime.datetime.now().replace(microsecond=0)
        shp = sel_by_attr(db,
                          SQL.format(polTbl, str(cls)),
                          os.path.join(fld, 'rule1_{}.shp'.format(cls)),
                          api_gis='ogr')
        time_y = datetime.datetime.now().replace(microsecond=0)

        dShp = dissolve(shp,
                        os.path.join(fld, "rul1_d_{}.shp".format(str(cls))),
                        "FID",
                        geomMultiPart=True)
        time_z = datetime.datetime.now().replace(microsecond=0)

        clsShp[int(cls)] = dShp
        timeGasto[tk] = ("export_{}".format(cls), time_y - time_x)
        timeGasto[tk + 1] = ("dissolve_{}".format(cls), time_z - time_y)

        tk += 2

    return clsShp, timeGasto
Example #6
0
File: calc.py Project: zonakre/gasp
def area_by_population(polygons,
                       inhabitants,
                       field_inhabitants,
                       work,
                       area_field='area_pop'):
    """
    Feature area (polygons) by feature inhabitant (inhabitants)
    """

    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.mng.fld import add_field
    from gasp.mng.genze import dissolve
    from gasp.cpu.arcg.anls.ovlay import intersect
    from gasp.cpu.arcg.anls.exct import select_by_attr

    # ArcGIS environment
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = work

    inhabitant_lyr = feat_lyr(inhabitants)

    add_field(inhabitant_lyr, area_field, "FLOAT", "")

    cursor = arcpy.UpdateCursor(inhabitant_lyr)
    lnh = cursor.next()

    while lnh:
        """TODO: Use intersection only once"""
        f = int(lnh.getValue("FID"))

        poly_extracted = "poly_{fid}.shp".format(fid=str(f))
        select_by_attr(inhabitant_lyr, "FID = {fid}".format(fid=str(f)),
                       poly_extracted)

        intersected = "int_{fid}.shp".format(fid=str(f))
        intersect([polygons, poly_extracted], intersected)

        dissolved = dissolve(intersected,
                             "diss_{f_}.shp".format(f_=str(f)),
                             "FID",
                             api='arcpy')

        cs = arcpy.SearchCursor(dissolved)

        l = cs.next()

        geom = arcpy.Describe(dissolved).shapeFieldName

        while l:
            area = float(l.getValue(geom).area)
            l = cs.next()

        pop = int(lnh.getValue(field_inhabitants))

        try:
            indicator = area / pop
        except:
            indicator = 0.0 / pop

        lnh.setValue(area_field)
        cursor.updateRow(lnh)

        lnh = cursor.next()
Example #7
0
def vector_based(osmdata,
                 nomenclature,
                 refRaster,
                 lulcShp,
                 overwrite=None,
                 dataStore=None,
                 RoadsAPI='SQLITE'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An vector based approach.
    
    TODO: Add a detailed description.
    
    RoadsAPI Options:
    * SQLITE
    * POSTGIS
    """

    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime
    import os
    import json
    # ************************************************************************ #
    # GASP dependencies #
    # ************************************************************************ #
    from gasp.oss.ops import create_folder
    from gasp.prop.rst import get_epsg_raster
    from gasp.session import run_grass
    if RoadsAPI == 'POSTGIS':
        from gasp.sql.mng.db import create_db
        from gasp.osm2lulc.utils import osm_to_pgsql
    else:
        from gasp.osm2lulc.utils import osm_to_sqdb
    from gasp.osm2lulc.utils import osm_project, add_lulc_to_osmfeat
    from gasp.mng.gen import merge_feat
    from gasp.osm2lulc.mod1 import grs_vector
    if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS':
        from gasp.osm2lulc.mod2 import roads_sqdb
    else:
        from gasp.osm2lulc.mod2 import grs_vec_roads
    from gasp.osm2lulc.m3_4 import grs_vect_selbyarea
    from gasp.osm2lulc.mod5 import grs_vect_bbuffer
    from gasp.osm2lulc.mod6 import vector_assign_pntags_to_build
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    if not os.path.exists(os.path.dirname(lulcShp)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcShp)))

    # Get Parameters to connect to PostgreSQL
    conPGSQL = json.load(
        open(
            os.path.join(os.path.dirname(os.path.abspath(__file__)),
                         'con-postgresql.json'),
            'r')) if RoadsAPI == 'POSTGIS' else None

    # Get EPSG of Reference Raster
    epsg = get_epsg_raster(refRaster)
    if not epsg:
        raise ValueError('Cannot get epsg code of ref raster')

    time_a = datetime.datetime.now().replace(microsecond=0)
    from gasp.osm2lulc.var import osmTableData, PRIORITIES

    workspace = os.path.join(os.path.dirname(lulcShp),
                             'osmtolulc') if not dataStore else dataStore

    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            create_folder(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        create_folder(workspace)

    __priorities = PRIORITIES[nomenclature]
    time_b = datetime.datetime.now().replace(microsecond=0)
    if RoadsAPI != 'POSTGIS':
        # ******************************************************************** #
        # Convert OSM file to SQLITE DB #
        # ******************************************************************** #
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    else:
        # Convert OSM file to POSTGRESQL DB #
        conPGSQL["DATABASE"] = create_db(conPGSQL,
                                         os.path.splitext(
                                             os.path.basename(osmdata))[0],
                                         overwrite=True)
        osm_db = osm_to_pgsql(osmdata, conPGSQL)
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
                        osmTableData,
                        nomenclature,
                        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI)
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        epsg,
        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI)
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace,
                           grassBIN='grass76',
                           location='grloc',
                           srs=epsg)
    #import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')

    # ************************************************************************ #
    # IMPORT SOME GASP MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from gasp.anls.ovlay import erase
    from gasp.prop.grs import rst_to_region
    from gasp.mng.genze import dissolve
    from gasp.mng.grstbl import add_and_update, reset_table
    from gasp.to.shp.grs import shp_to_grs, grs_to_shp
    from gasp.to.rst import rst_to_grs
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    osmShps = []
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    ruleOneShp, timeCheck1 = grs_vector(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData['polygons'],
        apidb=RoadsAPI)
    osmShps.append(ruleOneShp)

    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    ruleRowShp, timeCheck2 = roads_sqdb(
        osm_db if RoadsAPI == 'SQLITE' else conPGSQL,
        osmTableData['lines'],
        osmTableData['polygons'],
        apidb=RoadsAPI
    ) if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS' else grs_vec_roads(
        osm_db, osmTableData['lines'], osmTableData['polygons'])

    osmShps.append(ruleRowShp)
    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    ruleThreeShp, timeCheck3 = grs_vect_selbyarea(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData['polygons'],
        UPPER=True,
        apidb=RoadsAPI)

    osmShps.append(ruleThreeShp)
    time_l = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    ruleFourShp, timeCheck4 = grs_vect_selbyarea(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData['polygons'],
        UPPER=False,
        apidb=RoadsAPI)

    osmShps.append(ruleFourShp)
    time_j = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    ruleFiveShp, timeCheck5 = grs_vect_bbuffer(
        osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
        osmTableData["lines"],
        api_db=RoadsAPI)

    osmShps.append(ruleFiveShp)
    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleSeven11, ruleSeven12, timeCheck7 = vector_assign_pntags_to_build(
            osm_db if RoadsAPI != 'POSTGIS' else conPGSQL,
            osmTableData['points'],
            osmTableData['polygons'],
            apidb=RoadsAPI)

        if ruleSeven11:
            osmShps.append(ruleSeven11)

        if ruleSeven12:
            osmShps.append(ruleSeven12)

        time_n = datetime.datetime.now().replace(microsecond=0)

    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)

    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Get Shps with all geometries related with one class - One Shape for Classe
    """

    from gasp.mng.gen import same_attr_to_shp

    _osmShps = []
    for i in range(len(osmShps)):
        if not osmShps[i]: continue

        _osmShps.append(
            grs_to_shp(osmShps[i],
                       os.path.join(workspace, osmShps[i] + '.shp'),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    _osmShps = same_attr_to_shp(_osmShps,
                                "cat",
                                workspace,
                                "osm_",
                                resultDict=True)
    del osmShps

    time_o = datetime.datetime.now().replace(microsecond=0)
    """
    Merge all Classes into one feature class using a priority rule
    """

    osmShps = {}
    for cls in _osmShps:
        if cls == '1':
            osmShps[1221] = shp_to_grs(_osmShps[cls], "osm_1221", asCMD=True)

        else:
            osmShps[int(cls)] = shp_to_grs(_osmShps[cls],
                                           "osm_" + cls,
                                           asCMD=True)

    # Erase overlapping areas by priority
    import copy
    osmNameRef = copy.deepcopy(osmShps)

    for e in range(len(__priorities)):
        if e + 1 == len(__priorities): break

        if __priorities[e] not in osmShps:
            continue
        else:
            for i in range(e + 1, len(__priorities)):
                if __priorities[i] not in osmShps:
                    continue
                else:
                    osmShps[__priorities[i]] = erase(
                        osmShps[__priorities[i]],
                        osmShps[__priorities[e]],
                        "{}_{}".format(osmNameRef[__priorities[i]], e),
                        notTbl=True,
                        api='pygrass')

    time_p = datetime.datetime.now().replace(microsecond=0)

    # Export all classes
    lst_merge = []
    for cls in osmShps:
        if cls == __priorities[0]:
            reset_table(osmShps[cls], {'cls': 'varchar(5)'}, {'cls': str(cls)})
        else:
            add_and_update(osmShps[cls], {'cls': 'varchar(5)'},
                           {'cls': str(cls)})

        ds = dissolve(osmShps[cls],
                      'dl_{}'.format(str(cls)),
                      'cls',
                      api="grass")

        lst_merge.append(
            grs_to_shp(ds,
                       os.path.join(workspace, "lulc_{}.shp".format(str(cls))),
                       'auto',
                       lyrN=1,
                       asCMD=True,
                       asMultiPart=None))

    time_q = datetime.datetime.now().replace(microsecond=0)

    merge_feat(lst_merge, lulcShp, api='pandas')

    time_r = datetime.datetime.now().replace(microsecond=0)

    return lulcShp, {
        0: ('set_settings', time_b - time_a),
        1: ('osm_to_sqdb', time_c - time_b),
        2: ('cls_in_sqdb', time_d - time_c),
        3: ('proj_data', time_e - time_d),
        4: ('set_grass', time_f - time_e),
        5: ('rule_1', time_g - time_f, timeCheck1),
        6: ('rule_2', time_h - time_g, timeCheck2),
        7: ('rule_3', time_l - time_h, timeCheck3),
        8: ('rule_4', time_j - time_l, timeCheck4),
        9: ('rule_5', time_m - time_j, timeCheck5),
        10: None if not timeCheck7 else
        ('rule_7', time_n - time_m, timeCheck7),
        11: ('disj_cls', time_o - time_n),
        12: ('priority_rule', time_p - time_o),
        13: ('export_cls', time_q - time_p),
        14: ('merge_cls', time_r - time_q)
    }
Example #8
0
def arcg_mean_time_WByPop(netDt,
                          rdv,
                          infraestruturas,
                          unidades,
                          conjuntos,
                          popf,
                          w,
                          output,
                          oneway=None):
    """
    Tempo medio ponderado pela populacao residente a infra-estrutura mais
    proxima (min)
    
    * netDt = Path to Network Dataset
    * infraestruturas = Points of destiny
    * unidades = BGRI; Freg; Concelhos
    * conjuntos = Freg; Concelhos; NUT - field
    * popf = Field with the population of the statistic unity
    * w = Workspace
    * output = Path to store the final output
    * rdv = Name of feature class with the streets network
    """

    import arcpy
    import os
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.mng.feat import feat_to_pnt
    from gasp.cpu.arcg.mng.fld import add_field
    from gasp.cpu.arcg.mng.fld import calc_fld
    from gasp.cpu.arcg.mng.joins import join_table
    from gasp.mng.genze import dissolve
    from gasp.mng.gen import copy_feat
    from gasp.mob.arctbx.closest import closest_facility

    def get_freg_denominator(shp, groups, population, fld_time="Total_Minu"):
        cursor = arcpy.SearchCursor(shp)

        groups_sum = {}
        for lnh in cursor:
            group = lnh.getValue(groups)
            nrInd = float(lnh.getValue(population))
            time = float(lnh.getValue(fld_time))

            if group not in groups_sum.keys():
                groups_sum[group] = time * nrInd

            else:
                groups_sum[group] += time * nrInd

        del cursor, lnh

        return groups_sum

    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = w

    # Start Procedure #
    # Create copy of statitic unities to preserve the original data
    copy_unities = copy_feat(unidades,
                             os.path.join(w, os.path.basename(unidades)),
                             gisApi='arcpy')

    # Generate centroids of the statistic unities - unidades
    lyr_unidades = feat_lyr(copy_unities)
    pnt_unidades = feat_to_pnt(lyr_unidades, 'pnt_unidades.shp')

    # Network Processing - Distance between CENTROID and Destiny points
    closest_facility(netDt,
                     rdv,
                     infraestruturas,
                     pnt_unidades,
                     os.path.join(w, "cls_table.dbf"),
                     oneway_restriction=oneway)
    add_field("cls_table.dbf", 'j', "SHORT", "6")
    calc_fld("cls_table.dbf", 'j', "[IncidentID]-1")
    join_table(lyr_unidades, "FID", "cls_table.dbf", "j", "Total_Minu")

    # Calculo dos somatorios por freguesia (conjunto)
    groups = get_freg_denominator(lyr_unidades, conjuntos, popf)
    add_field(lyr_unidades, "tm", "FLOAT", "10", "3")

    cs = arcpy.UpdateCursor(lyr_unidades)
    linha = cs.next()
    while linha:
        group = linha.getValue(conjuntos)
        t = float(linha.getValue("Total_Minu"))
        p = int(linha.getValue(popf))
        total = groups[group]
        indi = ((t * p) / total) * t
        linha.setValue("tm", indi)
        cs.updateRow(linha)
        linha = cs.next()

    return dissolve(lyr_unidades,
                    output,
                    conjuntos,
                    statistics="tm SUM",
                    api="arcpy")
Example #9
0
def gdl_mean_time_wByPop(unities,
                         unities_groups,
                         population_field,
                         destinations,
                         output,
                         workspace=None,
                         unities_epsg=4326,
                         destinations_epsg=4326):
    """
    Tempo medio ponderado pela populacao residente a infra-estrutura mais 
    proxima
    
    # TODO: Migrate to Pandas
    """

    import os
    from osgeo import ogr
    from gasp.prop.ff import drv_name
    from gasp.fm import points_to_list
    from gasp.mng.feat import feat_to_pnt
    from gasp.mng.prj import project_geom
    from gasp.mng.fld import add_fields
    from gasp.mng.genze import dissolve
    from gasp.web.glg.direct import get_time_pnt_destinations

    workspace = workspace if workspace else \
        os.path.dirname(output)

    # Unities to centroid
    pnt_unities = feat_to_pnt(
        unities, os.path.join(workspace, 'pnt_' + os.path.basename(unities)))

    # List destinations
    lst_destinies = points_to_list(destinations,
                                   listVal="dict",
                                   inEpsg=destinations_epsg,
                                   outEpsg=4326)

    # Calculate indicator
    polyUnit = ogr.GetDriverByName(drv_name(unities)).Open(unities, 1)

    polyLyr = polyUnit.GetLayer()

    polyLyr = add_fields(polyLyr, {'meantime': ogr.OFTReal})

    pntUnit = ogr.GetDriverByName(drv_name(pnt_unities)).Open(pnt_unities, 0)

    pntLyr = pntUnit.GetLayer()

    polyFeat = polyLyr.GetNextFeature()
    distUnities = {}
    groups = {}
    for pntFeat in pntLyr:
        geom = pntFeat.GetGeometryRef()

        if unities_epsg == 4326:
            originGeom = geom
        else:
            originGeom = project_geom(geom, unities_epsg, 4326, api='ogr')

        _id, duration, distance = get_time_pnt_destinations(
            originGeom, lst_destinies)

        __min = duration['value'] / 60.0
        pop = polyFeat.GetField(population_field)
        group = polyFeat.GetField(unities_groups)

        distUnities[polyFeat.GetFID()] = (__min, __min * pop)

        if group not in groups:
            groups[group] = __min * pop
        else:
            groups[group] += __min * pop

        polyFeat = polyLyr.GetNextFeature()

    del polyLyr
    polyUnit.Destroy()

    polyUnit = ogr.GetDriverByName(drv_name(unities)).Open(unities, 1)

    polyLyr = polyUnit.GetLayer()

    for feat in polyLyr:
        unitId = feat.GetFID()
        groupId = feat.GetField(unities_groups)

        indicator = (distUnities[unitId][1] /
                     groups[groupId]) * distUnities[unitId][0]

        feat.SetField('meantime', indicator)

        polyLyr.SetFeature(feat)

    del polyLyr, pntLyr
    polyUnit.Destroy()
    pntUnit.Destroy()

    dissolve(unities,
             output,
             unities_groups,
             statistics={'meantime': 'SUM'},
             api='ogr')
Example #10
0
def population_within_point_buffer(netDataset,
                                   rdvName,
                                   pointShp,
                                   populationShp,
                                   popField,
                                   bufferDist,
                                   epsg,
                                   output,
                                   workspace=None,
                                   bufferIsTimeMinutes=None,
                                   useOneway=None):
    """
    Assign to points the population within a certain distance (metric or time)
    
    * Creates a Service Area Polygon for each point in pointShp;
    * Intersect the Service Area Polygons with the populationShp;
    * Count the number of persons within each Service Area Polygon
    (this number will be weighted by the area % of the statistic unit
    intersected with the Service Area Polygon).
    """

    import arcpy
    import os
    from geopandas import GeoDataFrame
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.anls.ovlay import intersect
    from gasp.mng.gen import copy_feat
    from gasp.cpu.arcg.mng.fld import add_geom_attr
    from gasp.cpu.arcg.mng.fld import add_field
    from gasp.cpu.arcg.mng.fld import calc_fld
    from gasp.mng.genze import dissolve
    from gasp.mob.arctbx.svarea import service_area_use_meters
    from gasp.mob.arctbx.svarea import service_area_polygon
    from gasp.fm import tbl_to_obj
    from gasp.to.shp import df_to_shp

    workspace = os.path.dirname(pointShp) if not workspace else workspace

    if not os.path.exists(workspace):
        from gasp.oss.ops import create_folder
        workspace = create_folder(workspace, overwrite=False)

    # Copy population layer
    populationShp = copy_feat(
        populationShp,
        os.path.join(workspace,
                     'cop_{}'.format(os.path.basename(populationShp))),
        gisApi='arcpy')

    # Create layer
    pntLyr = feat_lyr(pointShp)
    popLyr = feat_lyr(populationShp)

    # Create Service Area
    if not bufferIsTimeMinutes:
        servArea = service_area_use_meters(
            netDataset,
            rdvName,
            bufferDist,
            pointShp,
            os.path.join(workspace,
                         'servare_{}'.format(os.path.basename(pointShp))),
            OVERLAP=False,
            ONEWAY=useOneway)

    else:
        servArea = service_area_polygon(
            netDataset,
            rdvName,
            bufferDist,
            pointShp,
            os.path.join(workspace,
                         "servare_{}".format(os.path.basename(pointShp))),
            ONEWAY_RESTRICTION=useOneway,
            OVERLAP=None)

    servAreaLyr = feat_lyr(servArea)

    # Add Column with Polygons area to Feature Class population
    add_geom_attr(popLyr, "total", geom_attr="AREA")

    # Intersect buffer and Population Feature Class
    intSrc = intersect([servAreaLyr, popLyr],
                       os.path.join(workspace, "int_servarea_pop.shp"))

    intLyr = feat_lyr(intSrc)

    # Get area of intersected statistical unities with population
    add_geom_attr(intLyr, "partarea", geom_attr="AREA")

    # Get population weighted by area intersected
    calc_fld(intLyr, "population",
             "((([partarea] * 100) / [total]) * [{}]) / 100".format(popField),
             {
                 "TYPE": "DOUBLE",
                 "LENGTH": "10",
                 "PRECISION": "3"
             })

    # Dissolve service area by Facility ID
    diss = dissolve(intLyr,
                    os.path.join(workspace, 'diss_servpop.shp'),
                    "FacilityID",
                    statistics="population SUM")

    # Get original Point FID from FacilityID
    calc_fld(diss, "pnt_fid", "[FacilityID] - 1", {
        "TYPE": "INTEGER",
        "LENGTH": "5",
        "PRECISION": None
    })

    dfPnt = tbl_to_obj(pointShp)
    dfDiss = tbl_to_obj(diss)

    dfDiss.rename(columns={"SUM_popula": "n_pessoas"}, inplace=True)

    resultDf = dfPnt.merge(dfDiss,
                           how='inner',
                           left_index=True,
                           right_on="pnt_fid")

    resultDf.drop('geometry_y', axis=1, inplace=True)

    resultDf = GeoDataFrame(resultDf,
                            crs={'init': 'epsg:{}'.format(epsg)},
                            geometry='geometry_x')

    df_to_shp(resultDf, output)

    return output
Example #11
0
def pop_less_dist_x2(net_dataset,
                     rdv_name,
                     locations,
                     interval,
                     unities,
                     fld_groups,
                     fld_pop,
                     w,
                     output,
                     useOneway=None):
    """
    Network processing - executar service area de modo a conhecer as areas a
    menos de x minutos de qualquer coisa
    """

    import arcpy
    import numpy
    import os
    import pandas
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.mng.genze import dissolve
    from gasp.cpu.arcg.anls.ovlay import intersect
    from gasp.cpu.arcg.mng.fld import calc_fld
    from gasp.mob.arctbx.svarea import service_area_polygon
    from gasp.fm import tbl_to_obj
    from gasp.oss import get_filename
    from gasp.to.shp import df_to_shp
    from gasp.cpu.arcg.mng.fld import del_field

    if arcpy.CheckExtension("Network") == "Available":
        arcpy.CheckOutExtension("Network")
    # Procedure #
    # Generate Service Area
    svArea = service_area_polygon(net_dataset,
                                  rdv_name,
                                  interval,
                                  locations,
                                  os.path.join(w, "servarea.shp"),
                                  ONEWAY_RESTRICTION=useOneway)

    # Dissolve Service Area
    svArea = dissolve(svArea,
                      os.path.join(w, 'svarea_diss.shp'),
                      "FID",
                      api="arcpy")

    # Intersect unities with Service Area
    lyr_unities = feat_lyr(unities)
    unities_servarea = intersect([lyr_unities, svArea],
                                 os.path.join(w, "unidades_mx.shp"))

    # In the original Unities SHP, create a col with the population
    # only for the unities intersected with service area
    intersectDf = tbl_to_obj(unities_servarea)

    unities_less_than = intersectDf[fld_pop].unique()
    unities_less_than = pandas.DataFrame(unities_less_than, columns=['cod_'])

    popDf = tbl_to_obj(unities)
    popDf = popDf.merge(unities_less_than,
                        how='outer',
                        left_on=fld_pop,
                        right_on="cod_")
    popDf["less_than"] = popDf.cod_.fillna(value='0')
    popDf["less_than"] = numpy.where(popDf["less_than"] != '0', '1', '0')
    popDf["population"] = numpy.where(popDf["less_than"] == '1',
                                      popDf[fld_pop], 0)
    popDf["original"] = popDf[fld_pop]

    newUnities = df_to_shp(popDf, os.path.join(w, 'unities_pop.shp'))

    # Dissolve and Get result
    result = dissolve(newUnities,
                      output,
                      fld_groups,
                      statistics="original SUM;population SUM",
                      api="arcpy")

    calc_fld(result, "pop_{}".format(interval), "[SUM_popula]", {
        "TYPE": "INTEGER",
        "LENGTH": "10",
        "PRECISION": ""
    })

    calc_fld(result, fld_pop, "[SUM_origin]", {
        "TYPE": "INTEGER",
        "LENGTH": "10",
        "PRECISION": ""
    })

    calc_fld(result, "pop_{}_p".format(interval),
             "([pop_{}] / [{}]) *100".format(interval, fld_pop), {
                 "TYPE": "DOUBLE",
                 "LENGTH": "6",
                 "PRECISION": "2"
             })

    del_field(result, "SUM_popula")
    del_field(result, "SUM_origin")

    return result
Example #12
0
def pop_less_dist_x(net_dataset, rdv_name, junctions_name, locations, interval,
                    unities, fld_groups, fld_pop, w, output):
    """
    Network processing - executar service area de modo a conhecer as areas a
    menos de x minutos de qualquer coisa
    """

    import arcpy
    import os
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.mng.genze import dissolve
    from gasp.cpu.arcg.anls.ovlay import intersect
    from gasp.cpu.arcg.mng.fld import add_field
    from gasp.mob.arctbx.svarea import service_area_polygon

    def GetUnitiesIntersected(shpintersected, shpUnities):
        # AND IF SHPUNITIES HAS LESS THAN 6 CHARACTERS
        if len(os.path.basename(shpUnities)) > 6:
            fld_tag = os.path.basename(shpUnities)[:6]
        else:
            fld_tag = os.path.basename(shpUnities)
        c = arcpy.SearchCursor(shpintersected)
        l = c.next()
        u = []
        while l:
            fid_entity = int(l.getValue("FID_{name}".format(name=fld_tag)))
            if fid_entity not in u:
                u.append(fid_entity)
            l = c.next()
            return l

    def WritePopLessXMin(shp, fld_pop, lst_intersected):
        add_field(shp, "poxX", "SHORT", "8")
        cursor = arcpy.UpdateCursor(shp)
        linha = cursor.next()
        while linha:
            bgri = int(linha.getValue("FID"))
            if bgri in lst_intersected:
                p = int(linha.getValue(fld_pop))
                linha.setValue("popX", p)
                cursor.UpdateRow(linha)
            linha = cursor.next()
        return "popX"

    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = w

    # Procedure #
    # Generate Service Area
    ServiceArea = service_area_polygon(net_dataset, rdv_name, junctions_name,
                                       interval, locations, "servarea.shp")

    # Intersect unities with Service Area
    lyr_unities = feat_lyr(unities)
    unities_servarea = intersect([lyr_unities, ServiceArea], "unidades_mx.shp")

    # Get the FID of the unities that intersects with the service area
    id_unities = GetUnitiesIntersected(unities_servarea, unities)
    # Update original shape with the population a menos de x minutes
    fld_pop_less_x = WritePopLessXMin(lyr_unities, fld_pop, id_unities)
    groups = dissolve(
        lyr_unities, output, fld_groups,
        "{pop} SUM;{popx} SUM".format(pop=fld_pop, popx=fld_pop_less_x))
    # Estimate population percent
    if len(fld_pop) > 6:
        fld_pop_tag = fld_pop[:6]
    else:
        fld_pop_tag = fld_pop

    add_field(shp, "lessX", "FLOAT", "8", "3")
    cursor = arcpy.UpdateCursor(output)
    linha = cursor.next()

    while linha:
        p = float(linha.getValue("SUM_{pop}".format(pop=fld_pop_tag)))
        pt = float(linha.getValue("SUM_{p}".format(p=fld_pop_less_x)))
        per = (p / pt) * 100.0
        linha.setValue("lessX", per)
        cursor.updateRow(linha)
        linha = cursor.next()
    return output
Example #13
0
def mean_time_by_influence_area(netDt,
                                rdv,
                                infraestruturas,
                                fld_infraestruturas,
                                unidades,
                                id_unidade,
                                conjuntos,
                                popf,
                                influence_areas_unities,
                                w,
                                output,
                                oneway=True):
    """
    Tempo medio ponderado pela populacao residente a infra-estrutura mais
    proxima (min), por area de influencia
    
    * netDt - Path to Network Dataset
    * infraestruturas - Points of destiny
    * fld_infraestruturas - Field on destiny points to relate with influence area
    * unidades - BGRI; Freg; Concelhos
    * conjuntos - Freg; Concelhos; NUT - field
    * popf - Field with the population of the statistic unity
    * influence_areas_unities - Field on statistic unities layer to relate
    with influence area
    * w = Workspace
    * output = Path to store the final output
    * rdv - Name of feature class with the streets network
    * junctions - Name of feature class with the junctions
    """

    import arcpy
    import os
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.cpu.arcg.mng.feat import feat_to_pnt
    from gasp.cpu.arcg.mng.gen import merge
    from gasp.mng.gen import copy_feat
    from gasp.mng.genze import dissolve
    from gasp.cpu.arcg.mng.fld import add_field
    from gasp.cpu.arcg.mng.fld import calc_fld
    from gasp.cpu.arcg.mng.fld import field_statistics
    from gasp.cpu.arcg.mng.fld import type_fields
    from gasp.cpu.arcg.mng.joins import join_table
    from gasp.cpu.arcg.anls.exct import select_by_attr
    from gasp.cpu.arcg.netanlst.closest import closest_facility
    """if arcpy.CheckExtension("Network") == "Available":
        arcpy.CheckOutExtension("Network")
    
    else:
        raise ValueError('Network analyst extension is not avaiable')"""
    def ListGroupArea(lyr, fld_ia, fld_grp):
        d = {}
        cs = arcpy.SearchCursor(lyr)
        for lnh in cs:
            id_group = lnh.getValue(fld_grp)
            id_ia = lnh.getValue(fld_ia)
            if id_group not in d.keys():
                d[id_group] = [id_ia]
            else:
                if id_ia not in d[id_group]:
                    d[id_group].append(id_ia)
        return d

    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = w

    # Procedure #
    copy_unities = copy_feat(unidades,
                             os.path.join(w, os.path.basename(unidades)),
                             gisApi='arcpy')

    # Generate centroids of the statistic unities - unidades
    lyr_unidades = feat_lyr(copy_unities)
    pnt_unidades = feat_to_pnt(lyr_unidades,
                               'pnt_unidades.shp',
                               pnt_position="INSIDE")
    # List all groups of unities (conjuntos)
    group_areas = ListGroupArea(lyr_unidades, influence_areas_unities,
                                conjuntos)
    # Create Layers
    lyr_pnt_unidades = feat_lyr(pnt_unidades)
    lyr_pnt_facilities = feat_lyr(infraestruturas)

    result_list = []

    fld_type_unities = type_fields(lyr_pnt_unidades, field=conjuntos)
    SELECT_UNITIES = '{fld}=\'{c}\'' if str(fld_type_unities) == 'String' \
        else '{fld}={c}'

    fld_type_facilities = type_fields(lyr_pnt_facilities,
                                      field=fld_infraestruturas)
    SELECT_FACILITIES = '{fld}=\'{obj}\'' if str(fld_type_facilities) == 'String' \
        else '{fld}={obj}'
    for group in group_areas.keys():
        # Select centroids of interest
        interest_centroids = select_by_attr(
            lyr_pnt_unidades, SELECT_UNITIES.format(c=str(group),
                                                    fld=conjuntos),
            'pnt_{c}.shp'.format(c=str(group)))
        # Select facilities of interest
        expression = ' OR '.join([
            SELECT_FACILITIES.format(fld=fld_infraestruturas,
                                     obj=str(group_areas[group][i]))
            for i in range(len(group_areas[group]))
        ])

        interest_facilities = select_by_attr(
            lyr_pnt_facilities, expression,
            'facilities_{c}.shp'.format(c=str(group)))
        # Run closest facilitie - Distance between selected CENTROID and selected facilities
        cls_fac_table = os.path.join(w, "clsf_{c}.dbf".format(c=str(group)))
        closest_facility(netDt,
                         rdv,
                         interest_facilities,
                         interest_centroids,
                         cls_fac_table,
                         oneway_restriction=oneway)
        add_field(cls_fac_table, 'j', "SHORT", "6")
        calc_fld(cls_fac_table, 'j', "[IncidentID]-1")
        join_table(interest_centroids, "FID", cls_fac_table, "j", "Total_Minu")
        # Calculate sum of time x population
        add_field(interest_centroids, 'sum', "DOUBLE", "10", "3")
        calc_fld(interest_centroids, 'sum',
                 "[{pop}]*[Total_Minu]".format(pop=popf))
        denominador = field_statistics(interest_centroids, 'sum', 'SUM')
        add_field(interest_centroids, 'tm', "DOUBLE", "10", "3")
        calc_fld(
            interest_centroids, 'tm',
            "([sum]/{sumatorio})*[Total_Minu]".format(
                sumatorio=str(denominador)))
        result_list.append(interest_centroids)

    merge_shp = merge(result_list, "merge_centroids.shp")
    join_table(lyr_unidades, id_unidade, "merge_centroids.shp", id_unidade,
               "tm")

    return dissolve(lyr_unidades,
                    output,
                    conjuntos,
                    statistics="tm SUM",
                    api='arcpy')
Example #14
0
def vector_assign_pntags_to_build(osmdb, pntTable, polyTable, apidb='SQLITE'):
    """
    Replace buildings with tag yes using the info in the Points Layer
    
    Only used for URBAN ATLAS and CORINE LAND COVER
    """

    import datetime
    from gasp.sql.mng.tbl import row_num as cnt_row
    if apidb != "POSTGIS":
        from gasp.to.shp.grs import sqlite_to_shp as db_to_shp
    else:
        from gasp.to.shp.grs import psql_to_grs as db_to_shp
    from gasp.sql.anls.ovlay import sgbd_get_feat_within
    from gasp.sql.anls.ovlay import sgbd_get_feat_not_within
    from gasp.mng.genze import dissolve
    from gasp.mng.grstbl import add_table

    time_a = datetime.datetime.now().replace(microsecond=0)
    new_build = sgbd_get_feat_within(
        osmdb,
        ("(SELECT buildings AS pnt_build, geometry AS pnt_geom "
         "FROM {} WHERE buildings IS NOT NULL)").format(pntTable),
        "pnt_geom",
        ("(SELECT buildings AS poly_build, geometry AS poly_geom "
         "FROM {} WHERE buildings IS NOT NULL)").format(polyTable),
        "poly_geom",
        "new_buildings",
        inTblCols="pnt_build AS cls",
        withinCols="poly_geom AS geometry",
        outTblIsFile=None,
        apiToUse="OGR_SPATIALITE" if apidb != "POSTGIS" else apidb)
    time_b = datetime.datetime.now().replace(microsecond=0)

    yes_build = sgbd_get_feat_not_within(
        osmdb,
        ("(SELECT buildings AS poly_build, geometry AS poly_geom "
         "FROM {} WHERE buildings IS NOT NULL)").format(polyTable),
        "poly_geom",
        ("(SELECT buildings AS pnt_build, geometry AS pnt_geom "
         "FROM {} WHERE buildings IS NOT NULL)").format(pntTable),
        "pnt_geom",
        "yes_builds",
        inTblCols="poly_geom AS geometry, 11 AS cls",
        outTblIsFile=None,
        apiToUse="OGR_SPATIALITE" if apidb != "POSTGIS" else apidb)
    time_c = datetime.datetime.now().replace(microsecond=0)

    N12 = cnt_row(osmdb,
                  new_build,
                  api='psql' if apidb == 'POSTGIS' else 'sqlite')
    time_d = datetime.datetime.now().replace(microsecond=0)
    N11 = cnt_row(osmdb,
                  yes_build,
                  api='psql' if apidb == 'POSTGIS' else 'sqlite')
    time_e = datetime.datetime.now().replace(microsecond=0)

    if N11:
        # Add data into grasss
        grsBuild11 = db_to_shp(osmdb,
                               yes_build,
                               "yes_builds",
                               filterByReg=True)
        time_f = datetime.datetime.now().replace(microsecond=0)

        # Dissolve
        dissVect = dissolve(grsBuild11,
                            "dss_{}".format(grsBuild11),
                            'cls',
                            api="grass")

        add_table(dissVect, None, lyrN=1, asCMD=True)
        time_g = datetime.datetime.now().replace(microsecond=0)
    else:
        dissVect = None
        time_f = None
        time_g = None

    if N12:
        # Add data into GRASS GIS
        grsBuild12 = db_to_shp(osmdb, new_build, "pnt_build", filterByReg=True)

        time_h = datetime.datetime.now().replace(microsecond=0)

        # Dissolve
        dissVect12 = dissolve(grsBuild12,
                              "dss_{}".format(grsBuild12),
                              'cls',
                              api="grass")

        add_table(dissVect12, None, lyrN=1, asCMD=True)
        time_i = datetime.datetime.now().replace(microsecond=0)

    else:
        dissVect12 = None
        time_h = None
        time_i = None

    return dissVect, dissVect12, {
        0: ('intersect', time_b - time_a),
        1: ('disjoint', time_c - time_b),
        2: ('count_b12', time_d - time_c),
        3: ('count_b11', time_e - time_d),
        4:
        None if not time_f else ('import_b11', time_f - time_e),
        5:
        None if not time_g else ('dissolve_b11', time_g - time_f),
        6:
        None if not time_h else
        ('import_b12', time_h - time_g if time_g else time_h - time_e),
        7:
        None if not time_i else ('dissolve_b12', time_i - time_h)
    }
Example #15
0
def grs_vec_roads(osmdb, lineTbl, polyTbl):
    """
    Select Roads for GRASS GIS
    """

    import datetime
    from gasp.sql.mng.tbl import row_num
    from gasp.to.shp.grs import sqlite_to_shp
    from gasp.anls.prox.bf import _buffer
    from gasp.mng.genze import dissolve
    from gasp.mng.grstbl import add_table

    # Roads to GRASS GIS
    time_a = datetime.datetime.now().replace(microsecond=0)
    NR = row_num(osmdb, lineTbl, where="roads IS NOT NULL", api='sqlite')
    time_b = datetime.datetime.now().replace(microsecond=0)

    if not NR: return None, {0: ('count_rows_roads', time_b - time_a)}

    roadsVect = sqlite_to_shp(osmdb,
                              lineTbl,
                              "all_roads",
                              where="roads IS NOT NULL")
    time_c = datetime.datetime.now().replace(microsecond=0)

    # Buildings to GRASS GIS
    NB = row_num(osmdb, polyTbl, where="building IS NOT NULL", api='sqlite')
    time_d = datetime.datetime.now().replace(microsecond=0)

    if NB:
        from gasp.anls.prox import grs_near as near
        from gasp.mng.grstbl import update_table

        builds = sqlite_to_shp(osmdb,
                               polyTbl,
                               "all_builds",
                               where="building IS NOT NULL",
                               filterByReg=True)
        time_e = datetime.datetime.now().replace(microsecond=0)

        near(roadsVect, builds, nearDistCol="todist", maxDist=12, as_cmd=True)
        time_f = datetime.datetime.now().replace(microsecond=0)
        update_table(roadsVect,
                     "bf_roads",
                     "round(todist,0)",
                     "\"todist > 0\"",
                     lyrN=1,
                     ascmd=True)
        time_g = datetime.datetime.now().replace(microsecond=0)

    else:
        time_e = None
        time_f = None
        time_g = None

    # Run Buffer tool
    roadsBf = _buffer(roadsVect,
                      "bf_roads",
                      "bf_roads",
                      api='grass',
                      geom_type="line")
    time_h = datetime.datetime.now().replace(microsecond=0)

    # Dissolve Roads
    roadsDiss = dissolve(roadsBf, "diss_roads", field="roads", api="grass")

    add_table(roadsDiss, None, lyrN=1, asCMD=True)
    time_i = datetime.datetime.now().replace(microsecond=0)

    return roadsDiss, {
        0: ('count_rows_roads', time_b - time_a),
        1: ('import_roads', time_c - time_b),
        2: ('count_rows_build', time_d - time_c),
        3: None if not time_e else ('import_builds', time_e - time_d),
        4: None if not time_f else ('near_analysis', time_f - time_e),
        5: None if not time_g else ('update_buffer_tbl', time_g - time_f),
        6: ('buffer_roads', time_h - time_g if time_g else time_h - time_d),
        7: ('diss_roads', time_i - time_h)
    }