コード例 #1
0
ファイル: places.py プロジェクト: zonakre/gasp
def find_places(inShp, epsg, radius, output, keyword=None, type=None):
    """
    Extract places from Google Maps
    """

    import pandas
    import time
    from gasp.fm import tbl_to_obj
    from gasp.to.geom import pnt_dfwxy_to_geodf
    from gasp.mng.prj import project
    from gasp.mng.fld.df import listval_to_newcols
    from gasp.to.shp import df_to_shp

    pntDf = tbl_to_obj(inShp)
    pntDf = project(pntDf, None, 4326,
                    gisApi='pandas') if epsg != 4326 else pntDf

    pntDf['latitude'] = pntDf.geometry.y.astype(str)
    pntDf['longitude'] = pntDf.geometry.x.astype(str)

    DATA = 1

    def get_places(row):
        places = get_places_by_radius(row.latitude, row.longitude, radius,
                                      keyword, type)

        if type(DATA) == int:
            DATA = pandas.DataFrame(places['results'])

        else:
            DATA = DATA.append(pandas.DataFrame(places['results']),
                               ignore_index=True)

    a = pntDf.apply(lambda x: get_places(x), axis=1)

    DATA = listval_to_newcols(DATA, 'geometry')
    fldsToDelete = ['viewport', 'opening_hours', 'icon', 'plus_code', 'photos']
    realDeletion = [x for x in fldsToDelete if x in DATA.columns.values]
    DATA.drop(realDeletion, axis=1, inplace=True)

    DATA = listval_to_newcols(DATA, 'location')

    DATA = pnt_dfwxy_to_geodf(DATA, 'lng', 'lat', 4326)

    if epsg != 4326:
        DATA = project(DATA, None, epsg, gisApi='pandas')

    DATA["types"] = DATA.types.astype(str)

    df_to_shp(DATA, output)

    return output
コード例 #2
0
ファイル: mapqst.py プロジェクト: zonakre/gasp
def rev_geocode_df(df, epsg, key=None, isOpen=None):
    """
    Add Locations after Reverse Geocoding to Pandas Dataframe
    """

    from gasp.web.mapqst import rev_geocode
    from gasp.mng.prj import project

    __df = df.copy() if epsg == 4326 else project(
        df, None, 4326, gisApi='pandas')

    __df["latitude"] = __df.geometry.y.astype(float)
    __df["longitude"] = __df.geometry.x.astype(float)

    def add_locations(row):
        data = rev_geocode(row.latitude,
                           row.longitude,
                           keyToUse=key,
                           useOpen=isOpen)

        row["locations"] = data["results"][0]["locations"][0]

        return row

    __df = __df.apply(lambda x: add_locations(x), axis=1)

    return __df
コード例 #3
0
def path_from_coords_to_shp(latOrigin, lngOrigin, latDest, lngDest, outshp,
                            transmod='foot-walking', outepsg=4326):
    """
    Receive coords and get path. Save path as Feature Class
    """
    
    import pandas
    from gasp.web.orouteserv import directions
    from gasp.to.geom        import regulardf_to_geodf, json_obj_to_geodf
    from gasp.to.shp         import df_to_shp
    
    path = directions(
        latOrigin, lngOrigin, latDest, lngDest,
        modeTransportation=transmod
    )
    
    geodf = json_obj_to_geodf(path, 4326)
    
    geodf.drop(['segments', 'bbox', 'way_points'], axis=1, inplace=True)
    
    geodf["summary"] = geodf['summary'][0]
    
    geodf = pandas.concat([
        geodf.drop(['summary'], axis=1),
        geodf['summary'].apply(pandas.Series)
    ], axis=1)
    
    geodf = regulardf_to_geodf(geodf, "geometry", 4326)
    
    if outepsg != 4326:
        from gasp.mng.prj import project
        geodf = project(geodf, None, outepsg, gisApi='pandas')
    
    return df_to_shp(geodf, outshp)
コード例 #4
0
ファイル: __init__.py プロジェクト: fagan2888/gasp
def points_to_list(pntShp, listVal='tuple', inEpsg=None, outEpsg=None):
    """
    Return a list as:
    
    if listVal == 'tuple'
    l = [(x_coord, y_coord), ..., (x_coord, y_coord)]
    
    elif listVal == 'dict'
    l = [
        {id : fid_value, x : x_coord, y : y_coord},
        ...
        {id : fid_value, x : x_coord, y : y_coord}
    ]
    """
    
    geoDf = tbl_to_obj(pntShp)
    
    if inEpsg and outEpsg:
        if inEpsg != outEpsg:
            from gasp.mng.prj import project
            geoDf = project(geoDf, None, outEpsg, gisApi='pandas')
    
    geoDf["x"] = geoDf.geometry.x.astype(float)
    geoDf["y"] = geoDf.geometry.y.astype(float)
    
    if listVal == 'tuple':
        subset = geoDf[['x', 'y']]
    
        coords = [tuple(x) for x in subset.values]
    
    elif listVal == 'dict':
        geoDf["id"] = geoDf.index
        subset = geoDf[['id', 'x', 'y']]
        
        coords = subset.to_dict(orient='records')
    
    else:
        raise ValueError(
            'Value of listVal is not Valid. Please use "tuple" or "dict"'
        )
    
    return coords
コード例 #5
0
ファイル: elev.py プロジェクト: zonakre/gasp
def elevation_to_pntshp(pnt_shp, epsg, fld_name='ELEVATION'):
    """
    Add an elevation attribute to a point feature class
    """

    from gasp.fm import tbl_to_obj
    from gasp.prop.feat import get_geom_type
    from gasp.mng.prj import project
    from gasp.mng.split import split_df
    from gasp.to.obj import df_to_dict
    from gasp.to.shp import df_to_shp

    # Check Geometries type - shapes should be of type point
    geomt = get_geom_type(pnt_shp, name=True, gisApi='ogr')
    if geomt != 'POINT' and geomt != 'MULTIPOINT':
        raise ValueError('All input geometry must be of type point')

    src = tbl_to_obj(pnt_shp)
    if epsg != 4326:
        src = project(src, None, 4326, gisApi='pandas')

    # Get str with coords
    src["coords"] = src["geometry"].y.astype(str) + "," + \
        src["geometry"].x.astype(str)

    # Split dataframe
    dfs = split_df(src, 250)

    for df in dfs:
        coord_str = str(df.coords.str.cat(sep="|"))

        elvd = pnts_elev(coord_str)

    data = elvd

    return data
コード例 #6
0
ファイル: dmx.py プロジェクト: zonakre/gasp
def pnt_to_facility(pnt, pntSrs, facilities, facSrs, transMode="driving"):
    """
    Calculate distance between points and the nearest facility.
    
    # TODO: Add the possibility to save the path between origins and
    destinations
    """

    import os
    import time
    from gasp.fm import tbl_to_obj
    from gasp.to.geom import regulardf_to_geodf
    from gasp.mng.prj import project_df
    from gasp.prop.feat import get_geom_type
    from gasp.oss import get_filename
    from gasp.to.obj import df_to_dict, dict_to_df
    from gasp.to.shp import df_to_shp
    from gasp.web.glg.distmx import dist_matrix

    # Convert SHPs to GeoDataFrame
    pntDf = tbl_to_obj(pnt)
    tbl_to_obj(facilities)

    # Check if SHPs are points
    originsGeom = get_geom_type(pntDf, geomCol="geometry", gisApi='pandas')
    if originsGeom != 'Point' and originsGeom != 'MultiPoint':
        raise ValueError('All input geometry must be of type point')

    destGeom = get_geom_type(facil, geomCol="geometry", gisApi='pandas')
    if destGeom != 'Point' and destGeom != 'MultiPoint':
        raise ValueError('All input geometry must be of type point')

    # Re-Project if necessary
    pntDf = pntDf if pntSrs == 4326 else project(
        pntDf, None, 4326, gisApi='pandas')
    facil = facil if facSrs == 4326 else project(
        facil, None, 4326, gisApi='pandas')

    # Coords to cols as str
    pntDf["geom"] = pntDf["geometry"].y.astype(str) + "," + \
        pntDf["geometry"].x.astype(str)

    facil["geom"] = facil["geometry"].y.astype(str) + "," + \
        facil["geometry"].y.astype(str)

    # Get distance between points and nearest facility
    pntDict = df_to_dict(pntDf)

    for idx in pntDict:
        destStr = str(facil["geom"].str.cat(sep="|"))

        glg_resp = dist_matrix(pntDict[idx]["geom"],
                               destStr,
                               1,
                               int(facil.shape[0]),
                               transport_mode=transMode)

        matrix = pandas.DataFrame(glg_resp[0]["elements"])

        matrix.drop(["status", "distance"], axis=1, inplace=True)
        matrix = pandas.concat([
            matrix.drop(["duration"], axis=1), matrix["duration"].apply(
                pandas.Series)
        ],
                               axis=1)

        matrix.drop("text", axis=1, inplace=True)
        matrix.rename(columns={"value": "duration"}, inplace=True)

        pntDict[idx]["duration"] = matrix.duration.min() / 60.0

    pntDf = dict_to_df(pntDict)
    pntDf = regulardf_to_geodf(pntDf, "geometry", 4326)

    if pntSrs != 4326:
        pntDf = project(pntDf, None, pntSrs, gisApi='pandas')

    df_to_shp(
        pntDf,
        os.path.join(os.path.dirname(pnt),
                     "{}_{}.shp".format(get_filename(pnt), "result")))

    return pntDf
コード例 #7
0
def servarea_from_points(pntShp, inEPSG, range, outShp,
                         mode='foot-walking', intervals=None):
    """
    Calculate isochrones for all points in a Point Feature Class
    """
    
    import time
    from shapely.geometry    import shape
    from threading           import Thread
    from gasp.web.orouteserv import get_keys, isochrones
    from gasp.fm             import tbl_to_obj
    from gasp.mng.split      import split_df_inN
    from gasp.fm.geom        import pointxy_to_cols
    from gasp.mng.gen        import merge_df
    from gasp.prop.feat      import get_geom_type
    from gasp.mng.prj        import project
    from gasp.to.geom        import dict_to_geodf
    from gasp.to.obj         import df_to_dict
    from gasp.to.shp         import df_to_shp
    
    # SHP TO GEODATAFRAME
    pntDf = tbl_to_obj(pntShp)
    
    # Check if SHPs are points
    inGeomType = get_geom_type(pntDf, geomCol="geometry", gisApi='pandas')
    
    if inGeomType != 'Point' and inGeomType != 'MultiPoint':
        raise ValueError('The input geometry must be of type point')
    
    # Reproject geodf if necessary
    if inEPSG != 4326:
        pntDf = project(pntDf, None, 4326, gisApi='pandas')
    
    pntDf["old_fid"] = pntDf.index
    
    pntDf = pointxy_to_cols(
        pntDf, geomCol="geometry",
        colX="longitude", colY="latitude"
    )
    
    # Get Keys
    KEYS = get_keys()
    
    df_by_key = split_df_inN(pntDf, KEYS.shape[0])
    
    keys_list = KEYS['key'].tolist()
    
    results = []
    
    def get_isochrones(df, key):
        pntDict = df_to_dict(df)
    
        for k in pntDict:
            iso = isochrones(
                "{},{}".format(pntDict[k]["longitude"], pntDict[k]["latitude"]),
                range, range_type='time', modeTransportation=mode,
                intervals=intervals
            )
        
            pntDict[k]["geometry"] = shape(iso["features"][0]["geometry"])
        
            time.sleep(5)
    
            pntDf = dict_to_geodf(pntDict, "geometry", 4326)
        
        results.append(pntDf)
    
    # Create threads
    thrds = []
    i = 1
    for df in df_by_key:
        thrds.append(Thread(
            name='tk{}'.format(str(i)), target=get_isochrones,
            args=(df, keys_list[i - 1])
        ))
        i += 1
    
    # Start all threads
    for thr in thrds:
        thr.start()
    
    # Wait for all threads to finish
    for thr in thrds:
        thr.join()
    
    # Join all dataframes
    pntDf = merge_df(results, ignIndex=False)
    
    if inEPSG != 4326:
        pntDf = project(pntDf, None, inEPSG, gisApi='pandas')
    
    return df_to_shp(pntDf, outShp)
コード例 #8
0
def cost_od(shpOrigins, shpDestinations, epsgOrigins, epsgDestinations,
            table_result, mode='foot-walking'):
    """
    Matrix od Service Implementation
    """
    
    import pandas
    from threading              import Thread
    from gasp.fm.api.orouteserv import get_keys
    from gasp.fm.api.orouteserv import matrix_od
    from gasp.fm                import shp_to_df
    from gasp.mng.split         import split_df_inN
    from gasp.fm.geom           import pointxy_to_cols
    from gasp.mng.prj           import project
    from gasp.mng.gen           import merge_df
    from gasp.prop.feat         import get_geom_type
    from gasp.to                import obj_to_tbl
    
    origensDf = tbl_to_obj(     shpOrigins)
    destinoDf = tbl_to_obj(shpDestinations)
    
    # Check if SHPs are points
    inGeomType = get_geom_type(origensDf, geomCol="geometry", gisApi='pandas')
    
    if inGeomType != 'Point' and inGeomType != 'MultiPoint':
        raise ValueError('The input geometry must be of type point')
    
    inGeomType = get_geom_type(destinoDf, geomCol="geometry", gisApi='pandas')
    
    if inGeomType != 'Point' and inGeomType != 'MultiPoint':
        raise ValueError('The input geometry must be of type point')
    
    # Re-project if needed
    if epsgOrigins != 4326:
        origensDf = project(origensDf, None, 4326, gisApi='pandas')
    
    if epsgDestinations != 4326:
        destinoDf = project(destinoDf, None, 4326, gisApi='pandas')
    
    origensDf = pointxy_to_cols(
        origensDf, geomCol="geometry",
        colX="longitude", colY="latitude"
    ); destinoDf = pointxy_to_cols(
        destinoDf, geomCol="geometry",
        colX="longitude", colY="latitude"
    )
    
    origensDf["location"] = origensDf.longitude.astype(str) + "," + \
        origensDf.latitude.astype(str)
    destinoDf["location"] = destinoDf.longitude.astype(str) + "," + \
        destinoDf.latitude.astype(str)
    
    origensDf["old_fid"] = origensDf.index
    destinoDf["old_fid"] = destinoDf.index
    
    # Get Keys
    KEYS = get_keys()

    origensByKey = split_df_inN(origensDf, KEYS.shape[0])

    lst_keys = KEYS["key"].tolist()
    
    # Produce matrix
    results = []
    def get_matrix(origins, key):
        origins.reset_index(inplace=True)
        origins["rqst_idx"] = origins.index.astype(str)
        
        destinations = destinoDf.copy()
        
        strSource = origins.location.str.cat(sep="|")
        idxSource = origins.rqst_idx.str.cat(sep=",")
        
        destinations["rqst_idx"] = destinations.old_fid + origins.shape[0]
        destinations["rqst_idx"] = destinations.rqst_idx.astype(str)
        strDestin = destinations.location.str.cat(sep="|")
        idxDestin = destinations.rqst_idx.str.cat(sep=",")
        
        rslt = matrix_od(
            strSource + "|" + strDestin,
            idxSources=idxSource, idxDestinations=idxDestin,
            useKey=key, modeTransportation=mode
        )
        
        rslt = pandas.DataFrame(rslt["durations"])
        
        originsFID = origins.old_fid.tolist()
        destinaFID = destinations.old_fid.tolist()
        
        mm = []
        for lnh in range(len(originsFID)):
            for col in range(len(destinaFID)):
                ll = [
                    originsFID[lnh], destinaFID[col], rslt.iloc[lnh, col]
                ]
                mm.append(ll)
        
        matrix = pandas.DataFrame(
            mm, columns=["fid_origin", "fid_destin", "cost"])
        
        results.append(matrix)
    
    # Create threads
    thrds = []
    i= 1
    for df in origensByKey:
        thrds.append(Thread(
            name="tk{}".format(str(i)), target=get_matrix,
            args=(df, lst_keys[i - 1])
        ))
        i += 1
    
    # Start all threads
    for thr in thrds:
        thr.start()
    
    # Wait for all threads to finish
    for thr in thrds:
        thr.join()
    
    # Join all dataframes
    RESULT = merge_df(results, ignIndex=False)
    
    RESULT = RESULT.merge(
        origensDf             , how='inner',
        left_on=["fid_origin"], right_on=["old_fid"]
    ); RESULT.drop([
        x for x in origensDf.columns.values if x != "geometry"],
        axis=1, inplace=True
    ); RESULT.rename(columns={"geometry" : "origin_geom"}, inplace=True)
    
    RESULT = RESULT.merge(
        destinoDf, how='inner',
        left_on=["fid_destin"], right_on=["old_fid"]
    ); RESULT.drop([
        x for x in destinoDf.columns.values if x != "geometry"],
        axis=1, inplace=True
    ); RESULT.rename(columns={"geometry" : "destin_geom"}, inplace=True)
    
    RESULT["origin_geom"] = RESULT.origin_geom.astype(str)
    RESULT["destin_geom"] = RESULT.destin_geom.astype(str)
    
    return obj_to_tbl(RESULT, table_result)
コード例 #9
0
ファイル: glgpaths.py プロジェクト: zonakre/gasp
def dist_onedest_network(pntShp, pntRouteId, networkShp, netRouteId, netOrder,
                         netDuration, srs, output):
    """
    Imagine-se uma rede cujos arcos levam a um unico destino
    Este programa calcula a distancia entre um ponto e esse destino
    em duas fases:
    * calculo da distancia ao ponto de entrada na rede mais proximo;
    * distancia entre essa entrada o destino da rede.
    
    A rede e composta por circuitos, e suposto partir-se de um ponto,
    entrar num circuito e nunca sair dele ate ao destino. O circuito
    associado a cada ponto deve constar na tabela dos pontos.
    """

    import pandas
    import time
    from geopandas import GeoDataFrame
    from gasp.fm import tbl_to_obj
    from gasp.web.glg.direct import pnt_to_pnt_duration
    from gasp.to.geom import regulardf_to_geodf, pnt_dfwxy_to_geodf
    from gasp.mng.df import df_groupBy
    from gasp.mng.prj import project
    from gasp.fm.geom import geom_endpoints_to_cols, pointxy_to_cols
    from gasp.mng.fld.df import distinct_of_distinct
    from gasp.to.obj import df_to_dict, dict_to_df
    from gasp.to.shp import df_to_shp

    netDataFrame = tbl_to_obj(networkShp)
    pntDataFrame = tbl_to_obj(pntShp)

    # Get entrance nodes
    netDataFrame = geom_endpoints_to_cols(netDataFrame, geomCol="geometry")
    geoEntrances = pnt_dfwxy_to_geodf(netDataFrame, "start_x", "start_y", srs)

    # To WGS
    if srs != 4326:
        geoEntrances = project(geoEntrances, None, 4326, gisApi='pandas')
        pntDataFrame = project(pntDataFrame, None, 4326, gisApi='pandas')

    # Get entrances by circuit
    routesEntrances = distinct_of_distinct(geoEntrances, netRouteId, netOrder)

    pntRelStops = pntDataFrame.merge(geoEntrances,
                                     how='inner',
                                     left_on=pntRouteId,
                                     right_on=netRouteId)

    pntRelStops = pointxy_to_cols(pntRelStops,
                                  geomCol="geometry",
                                  colX="start_x",
                                  colY="start_y")
    pntRelStops = pointxy_to_cols(pntRelStops,
                                  geomCol="geometry",
                                  colX="node_x",
                                  colY="node_y")

    pntRelStopsDict = df_to_dict(pntRelStops)

    for idx in pntRelStopsDict:
        ape = pnt_to_pnt_duration(pntRelStopsDict[idx]["start_y"],
                                  pntRelStopsDict[idx]["start_x"],
                                  pntRelStopsDict[idx]["node_y"],
                                  pntRelStopsDict[idx]["node_x"],
                                  mode="walking")

        time.sleep(5)

        pntRelStopsDict[idx]["gduration"] = ape

    pntRelStops = dict_to_df(pntRelStopsDict)

    pntRelStops_gp = df_groupBy(
        pntRelStops,
        [x for x in list(pntDataFrame.columns.values) if x != "geometry"],
        STAT='MIN',
        STAT_FIELD="gduration")

    pntRelStops_gp = pntRelStops_gp.merge(
        pntRelStops,
        how='inner',
        left_on=list(pntRelStops_gp.columns.values),
        right_on=list(pntRelStops_gp.columns.values))

    final_time = []
    for idx, row in pntRelStops_gp.iterrows():
        circ = row[pntRouteId]
        order = row[netOrder]

        for i in range(len(routesEntrances[circ])):
            if order == routesEntrances[circ][i]:
                checkpoints = routesEntrances[circ][i:]

            else:
                continue

        timedistance = []
        for check in checkpoints:
            val = int(netDataFrame.loc[(netDataFrame[netRouteId] == circ) &
                                       (netDataFrame[netOrder] == check),
                                       [netDuration]][netDuration])

            timedistance.append(val)

        final_time.append(row["gduration"] + sum(timedistance))

    pntRelStops_gp["final_time"] = pandas.Series(final_time)

    # Save result
    pntRelStops_gp.drop(["geometry_y"], axis=1, inplace=True)

    gd = regulardf_to_geodf(pntRelStops_gp, "geometry_x", 4326)

    if srs != 4326:
        gd = project(gd, None, srs, gisApi='pandas')

    df_to_shp(gd, output)

    return output
コード例 #10
0
ファイル: mapbx.py プロジェクト: zonakre/gasp
def matrix_od(originsShp, destinationShp, originsEpsg, destinationEpsg,
              resultShp, modeTrans="driving"):
    """
    Use Pandas to Retrieve data from MapBox Matrix OD Service
    """
    
    import time
    from threading       import Thread
    from gasp.web.mapbx  import get_keys, matrix
    from gasp.fm         import tbl_to_obj
    from gasp.mng.split  import split_df, split_df_inN
    from gasp.mng.fld.df import listval_to_newcols
    from gasp.fm.geom    import pointxy_to_cols
    from gasp.mng.prj    import project
    from gasp.mng.gen    import merge_df
    from gasp.prop.feat  import get_geom_type
    from gasp.to.shp     import df_to_shp
    
    # Data to GeoDataFrame
    origens  = tbl_to_obj(    originsShp)
    destinos = tbl_to_obj(destinationShp)
    
    # Check if SHPs are points
    inGeomType = get_geom_type(origens, geomCol="geometry", gisApi='pandas')
    
    if inGeomType != 'Point' and inGeomType != 'MultiPoint':
        raise ValueError('The input geometry must be of type point')
    
    inGeomType = get_geom_type(destinos, geomCol="geometry", gisApi='pandas')
    
    if inGeomType != 'Point' and inGeomType != 'MultiPoint':
        raise ValueError('The input geometry must be of type point')
    
    # Re-Project data to WGS
    if originsEpsg != 4326:
        origens = project(origens, None, 4326, gisApi='pandas')
    
    if destinationEpsg != 4326:
        destinos = project(destinos, None, 4326, gisApi='pandas')
    
    origens = pointxy_to_cols(
        origens, geomCol="geometry",
        colX="longitude", colY="latitude"
    ); destinos = pointxy_to_cols(
        destinos, geomCol="geometry",
        colX="longitude", colY="latitude"
    )
    
    # Prepare coordinates Str
    origens["location"]  = origens.longitude.astype(str) \
        + "," + origens.latitude.astype(str)
    
    destinos["location"] = destinos.longitude.astype(str) \
        + "," + destinos.latitude.astype(str)
    
    # Split destinations DataFrame into Dafaframes with
    # 24 rows
    lst_destinos = split_df(destinos, 24)
    
    # Get Keys to use
    KEYS = get_keys()
    # Split origins by key
    origensByKey = split_df_inN(origens, KEYS.shape[0])
    
    lst_keys= KEYS["key"].tolist()
    
    # Produce matrix
    results = []
    def get_matrix(origins, key):
        def def_apply(row):
            rowResults = []
            for df in lst_destinos:
                strDest = df.location.str.cat(sep=";")
                
                strLocations = row["location"] + ";" + strDest
                
                dados = matrix(
                    strLocations, idxSources="0",
                    idxDestinations=";".join([str(i) for i in range(1, df.shape[0] + 1)]),
                    useKey=key, modeTransportation=modeTrans
                )
                time.sleep(5)
                
                rowResults += dados["durations"][0]
            
            row["od_matrix"] = rowResults
            
            return row
        
        newOrigins = origins.apply(
            lambda x: def_apply(x), axis=1
        )
        
        results.append(newOrigins)
    
    # Create threads
    thrds = []
    i     = 1
    for df in origensByKey:
        thrds.append(Thread(
            name="tk{}".format(str(i)), target=get_matrix,
            args=(df, lst_keys[i - 1])
        ))
        i += 1
    
    # Start all threads
    for thr in thrds:
        thr.start()
    
    # Wait for all threads to finish
    for thr in thrds:
        thr.join()
    
    # Join all dataframes
    RESULT = merge_df(results, ignIndex=False)
    
    RESULT = listval_to_newcols(RESULT, "od_matrix")
    
    RESULT.rename(
        columns={
            c: "dest_{}".format(c)
            for c in RESULT.columns.values if type(c) == int or type(c) == long
        }, inplace=True
    )
    
    if originsEpsg != 4326:
        RESULT = project(RESULT, None, originsEpsg, gisApi='pandas')
    
    return df_to_shp(RESULT, resultShp)


    
    return results
コード例 #11
0
ファイル: geocode.py プロジェクト: zonakre/gasp
def shp_from_address(inTbl, idAddr, addrCol, outShp,
                     epsg_out=4326, sheet_name=None,
                     doorNumber=None, zip4=None, zip3=None,
                     city=None, language=None, useComponents=None,
                     country=None):
    """
    Receive a table with a list of addresses and use the Google Maps API
    to get their position
    
    Preferred Address Structure:
    Rua+dos+Combatentes+da+Grande+Guerra,+14,+3030-185,+Coimbra
    
    Table Structure:
    idAddr | addrCol | doorNumber | zip4 | zip3 | city
    idAddr field and address field are demandatory
    
    For now, The input table must be a excel file or a dbf table
    
    # TODO: Some of the rows could not have Geometry
    """
    
    from gasp.web.glg.geocod import get_position
    from gasp.fm             import tbl_to_obj
    from gasp.to.geom        import pnt_dfwxy_to_geodf
    from gasp.oss            import get_fileformat
    from gasp.mng.fld.df     import fld_types
    from gasp.to.obj         import df_to_dict, dict_to_df
    from gasp.to.shp         import df_to_shp
    
    # Get Addresses
    tblFormat = get_fileformat(inTbl)
    
    tblAdr = tbl_to_obj(inTbl, sheet=sheet_name)
    
    # Check if given columns are in table
    fields = [idAddr, addrCol, doorNumber, zip4, zip3, city]
    for f in fields:
        if f:
            if f not in tblAdr.columns.values:
                raise ValueError("{} column not in {}".format(f, inTbl))
    
    # Convert numeric fields to unicode
    colTypes = fld_types(tblAdr)
    for col in colTypes:
        if colTypes[col] != 'object':
            if colTypes[col] == 'float32' or colTypes[col] == 'float64':
                tblAdr[col] = tblAdr[col].astype(int)
            
        tblAdr[col] = tblAdr[col].astype(unicode, 'utf-8')
    
    # Create search field
    if not useComponents:
        if doorNumber and zip4 and zip3 and city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[doorNumber].astype(unicode, "utf-8") + unicode(",", "utf-8") + \
                tblAdr[zip4] + unicode("-", "utf-8") + \
                tblAdr[zip3] + unicode(",", "utf-8") + tblAdr[city]
    
        elif not doorNumber and zip4 and zip3 and city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[zip4] + unicode("-", "utf-8") + \
                tblAdr[zip3] + unicode(",", "utf-8") + tblAdr[city]
    
        elif doorNumber and not zip4 and not zip3 and not city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[doorNumber]
    
        elif not doorNumber and not zip4 and not zip3 and not city:
            tblAdr["search"] = tblAdr[addrCol]
    
        elif doorNumber and zip4 and not zip3 and city:
            tblAdr = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[doorNumber] + unicode(",", "utf-8") + \
                tblAdr[zip4] + unicode(",", "utf-8") + tblAdr[city]
    
        elif doorNumber and not zip4 and not zip3 and city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[doorNumber] + unicode(",", "utf-8") + tblAdr[city]
    
        elif not doorNumber and zip4 and not zip3 and city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[city]
    
        elif not doorNumber and zip4 and zip3 and not city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[zip4] + unicode("-", "utf-8") + tblAdr[zip3]
    
        elif doorNumber and zip4 and not zip3 and not city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[doorNumber] + unicode(",", "utf-8") + tblAdr[zip4]
    
        elif doorNumber and zip4 and zip3 and not city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[doorNumber] + unicode(",", "utf-8") + tblAdr[zip4] + \
                unicode("-", "utf-8") + tblAdr[zip3]
    
        elif not doorNumber and zip4 and not zip3 and not city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[zip4]
    
        elif not doorNumber and not zip4 and not zip3 and city:
            tblAdr["search"] = tblAdr[addrCol] + unicode(",", "utf-8") + \
                tblAdr[city]
    
        else:
            raise ValueError('Parameters are not valid')
    
        # Sanitize search string
        tblAdr["search"] = tblAdr.search.str.replace(' ', '+')
    tblAdr = df_to_dict(tblAdr)
    
    # Geocode Addresses
    for idx in tblAdr:
        if not useComponents:
            glg_response = get_position(
                tblAdr[idx]["search"], country=None,
                language=None, locality=None, postal_code=None
            )
        else:
            if zip4 and zip3:
                _postal = u'{}-{}'.format(tblAdr[idx][zip4], tblAdr[idx][zip3])
            
            elif zip4 and not zip3:
                _postal = u'{}'.format(tblAdr[idx][zip4])
            
            else:
                _postal = None
            
            glg_response = get_position(
                u"{}{}".format(
                    tblAdr[idx][addrCol], u",{}".format(
                        tblAdr[idx][doorNumber]
                    ) if doorNumber else u""
                ),
                country=country, language=language,
                locality=tblAdr[idx][city].replace(" ", "+") if city else None,
                postal_code=_postal.replace(" ", "+")
            )
        
        if not glg_response: continue
        
        tblAdr[idx]["x"] = glg_response[0]['geometry']['location']['lng']
        tblAdr[idx]["y"] = glg_response[0]['geometry']['location']['lat']
        
        tblAdr[idx]["G_ADRS"] = glg_response[0]["formatted_address"]
        
        for i in glg_response[0]["address_components"]:
            if   i["types"][0] == 'street_number' : F = "G_PORT"
            elif i["types"][0] == 'route'         : F = "G_STREET"
            elif i["types"][0] == 'postal_code'   : F = "G_ZIPCODE"
            else: continue
            
            tblAdr[idx][F] = i["long_name"]
    
    # Convert Dataframe to GeoDataframe
    geoAdr = pnt_dfwxy_to_geodf(dict_to_df(tblAdr), "x", "y", 4326)
    
    # Reproject if user wants it
    if epsg_out != 4326:
        from gasp.mng.prj import project
        geoAdr = project(geoAdr, None, epsg_out, gisApi='pandas')
    
    # To Shapefile
    df_to_shp(geoAdr, outShp)
    
    return geoAdr
コード例 #12
0
ファイル: geocode.py プロジェクト: zonakre/gasp
def address_from_featcls(inShp, outShp, epsg_in):
    """
    Read a point geometry and return a table with the addresses
    """
    
    from gasp.web.glg.geocod import get_address
    from gasp.fm             import tbl_to_obj
    from gasp.to.geom        import regulardf_to_geodf
    from gasp.fm.geom        import pointxy_to_cols
    from gasp.prop.feat      import get_geom_type
    from gasp.to.obj         import df_to_dict, dict_to_df
    from gasp.to.shp         import df_to_shp
    
    # Convert ESRI Shapefile to GeoDataFrame
    geoDf = tbl_to_obj(inShp)
    
    # Get Geometry field name
    for col in geoDf.columns.values:
        if col == 'geom' or col ==  'geometry':
            F_GEOM = col
            break
        else:
            continue
    
    # Check if inShp has a Geom of Type Point
    inGeomType = get_geom_type(geoDf, geomCol=F_GEOM, gisApi='pandas')
    
    if inGeomType != 'Point' and inGeomType != 'MultiPoint':
        raise ValueError('The input geometry must be of type point')
    
    # Reproject geodf if necessary
    if epsg_in != 4326:
        from gasp.mng.prj import project
        
        geoDf = project(geoDf, None, 4326, gisApi='pandas')
    
    # Get Coords of each point
    geoDf = pointxy_to_cols(geoDf, F_GEOM, colX="x", colY='y')
    
    # Search for addresses
    geoDict = df_to_dict(geoDf)
    for idx in geoDict:
        glg_response = get_address(geoDict[idx]["y"], geoDict[idx]["x"])
        
        geoDict[idx]["G_ADDRESS"] = glg_response[0]['formatted_address']
        
        for i in glg_response[0]["address_components"]:
            if   i["types"][0] == 'street_mumber' : F = "G_PORT"
            elif i["types"][0] == 'route'         : F = "G_STREET"
            elif i["types"][0] == 'postal_code'   : F = "G_ZIPCODE"
            else: continue
            
            geoDict[idx][F] = i["long_name"]
    
    # Save results in a new file
    geoDf = dict_to_df(geoDict)
    geoDf = regulardf_to_geodf(geoDf, F_GEOM, 4326)
    
    geoDf.drop(["x", "y"], axis=1, inplace=True)
    
    if epsg_in != 4326:
        geoDf = project(geoDf, None, epsg_in, gisApi='pandas')
    
    df_to_shp(geoDf, outShp)
    
    return geoDf
コード例 #13
0
def service_area_as_sup_cst(networkDataset,
                            rdvName,
                            extentLyr,
                            originsLyr,
                            output,
                            epsg=3763,
                            REF_DISTANCE=3000,
                            NR_INTERVALS=20):
    """
    Create a Cost Distance Surface using Service Area Tool
    
    Same result of the Cost Distance tool but using Service Area Tool
    
    Supported by Google Maps API
    
    PROBLEM: REF_DISTANCE should be greater if the extent is higher.
    """

    import os

    from gasp.prop.ext import get_extent
    from gasp.cpu.arcg.lyr import feat_lyr
    from gasp.to.shp.arcg import geomArray_to_fc
    from gasp.mob.arctbx.svarea import service_area_polygon
    from gasp.web.glg.distmatrix import get_max_dist

    if epsg != 4326:
        from gasp.mng.prj import project

    arcpy.env.overwriteOutput = True

    # Get extent
    minX, maxX, minY, maxY = get_extent(extentLyr, gisApi='arcpy')

    # Get Reference points through the boundary
    # Get North West to East
    north_west_east = [(minX, maxY)]
    # Get South West to East
    south_west_east = [(minX, minY)]
    c = minX
    while c < maxX:
        north_west_east.append((c, maxY))
        south_west_east.append((c, minY))
        c += REF_DISTANCE

    north_west_east.append((maxX, maxY))
    south_west_east.append((maxX, minY))

    # Get West North to South
    west_north_to_south = [(minX, maxY)]
    # Get East North to South
    east_north_to_south = [(maxX, maxY)]
    c = maxY
    while c > minY:
        west_north_to_south.append((minX, c))
        east_north_to_south.append((maxX, c))
        c -= REF_DISTANCE

    west_north_to_south.append((minX, minY))
    east_north_to_south.append((maxX, minY))

    south_west_east.reverse()
    west_north_to_south.reverse()

    # Export Ref Points to a file only to see the result
    REF_GEOM = north_west_east + east_north_to_south + \
        south_west_east + west_north_to_south
    line_array = [{'FID': 0, "GEOM": REF_GEOM}]

    REF_POINTS = os.path.join(os.path.dirname(output), 'extent.shp')
    geomArray_to_fc(line_array, REF_POINTS, "POLYLINE", epsg, overwrite=True)

    # Calculate time-distance between origins Lyr and reference points
    # Get Geom of originsLyr
    # Convert to WGS84
    if epsg != 4326:
        originsWGS = project(
            originsLyr,
            os.path.join(os.path.dirname(output), 'origins_wgs84.shp'), 4326)
    else:
        originsWGS = originsLyr

    origLyr = feat_lyr(originsWGS)
    origPoint = []
    for line in arcpy.SearchCursor(origLyr):
        pnt = line.Shape.centroid

        origPoint.append((pnt.X, pnt.Y))

    # Get WGS REF POINTS
    if epsg != 4326:
        refWGS = project(
            REF_POINTS, os.path.join(os.path.dirname(output),
                                     'extent_wgs.shp'), 4326)
    else:
        refWGS = REF_POINTS

    refPointsLyr = feat_lyr(refWGS)
    refPoints = []
    for line in arcpy.SearchCursor(refPointsLyr):
        geom = line.getValue("Shape")

        for vector in geom:
            for pnt in vector:
                pnt_str = str(pnt).split(' ')
                refPoints.append((pnt_str[0], pnt_str[1]))

    # From that distances, get time intervals
    max_distance = get_max_dist(origPoint, refPoints)
    INTERVAL_RANGE = int(round(max_distance / NR_INTERVALS, 0))

    c = 0
    INTERVALS = []
    for i in range(NR_INTERVALS):
        INTERVALS.append(c + INTERVAL_RANGE)
        c += INTERVAL_RANGE

    # Run Service Area Tool
    service_area_polygon(networkDataset, rdvName, INTERVALS, originsLyr,
                         output)

    return output
コード例 #14
0
ファイル: dmx.py プロジェクト: zonakre/gasp
def dist_matrix_by_shp(oShp, dShp, oEpsg, dEpsg, result, transMode=None):
    """
    Create distance matrix using shapes and Google Maps API
    
    - Uses my first API_KEY
    """

    import time
    import pandas
    from gasp.fm import tbl_to_obj
    from gasp.mng.split import split_df
    from gasp.mng.prj import project
    from gasp.mng.fld.df import listval_to_newcols
    from gasp.prop.feat import get_geom_type
    from gasp.mng.gen import merge_df
    from gasp.web.glg.distmx import dist_matrix
    from gasp.to import obj_to_tbl
    from gasp.to.obj import df_to_list
    from gasp.oss import get_filename

    # Origins and Destionations to GeoDataframe
    originsDf = tbl_to_obj(oShp)
    destnatDf = tbl_to_obj(dShp)

    # Check Geometries type - shapes should be of type point
    originsGeom = get_geom_type(originsDf, gisApi='pandas')
    destGeom = get_geom_type(destnatDf, gisApi='pandas')
    if (originsGeom != 'Point' and originsGeom != 'MultiPoint') or \
        (destGeom != 'Point' and destGeom != 'MultiPoint'):
        raise ValueError('All input geometries must be of type point')

    # Re-project GeoDataframes if needed
    originsDf = originsDf if oEpsg == 4326 else \
        project(originsDf, None, 4326, gisApi='pandas')

    destnatDf = destnatDf if dEpsg == 4326 else \
        project(destnatDf, None, 4326, gisApi='pandas')

    # Geom to Field as str
    originsDf["geom"] = originsDf["geometry"].y.astype(str) + "," + \
        originsDf["geometry"].x.astype(str)

    destnatDf["geom"] = destnatDf["geometry"].y.astype(str) + "," + \
        destnatDf["geometry"].x.astype(str)

    originsDf["old_fid"] = originsDf.index
    destnatDf["old_fid"] = destnatDf.index

    # Split Destinations
    lstOrigins = split_df(originsDf, 95)
    for odf in lstOrigins:
        odf.reset_index(inplace=True)

    lstDestinations = df_to_list(destnatDf)
    RESULTS = []
    for destino in lstDestinations:
        for oDf in lstOrigins:
            matrix = dist_matrix(
                str(oDf.geom.str.cat(sep="|")),
                str(destino["geom"]),
                oDf.shape[0],
                1,
                transport_mode=transMode,
                useKey='AIzaSyAmyPmqtxD20urqtpCpn4ER74a6J4N403k')

            matrix = pandas.DataFrame(matrix)
            matrix = listval_to_newcols(matrix, "elements")

            matrix = matrix.merge(oDf,
                                  how='inner',
                                  left_index=True,
                                  right_index=True)

            matrix.rename(columns={
                'old_fid': "fid_origin",
                0: "cost"
            },
                          inplace=True)

            matrix["fid_destin"] = destino['old_fid']

            RESULTS.append(matrix)

            time.sleep(5)

    # Join all dataframes
    RESULT = merge_df(RESULTS, ignIndex=False)
    RESULT = sanitizeDataCols(RESULT, "cost")

    RESULT.drop([
        x
        for x in originsDf.columns.values if x != "geometry" and x != "old_fid"
    ],
                axis=1,
                inplace=True)
    RESULT.rename(columns={"geometry": "origin_geom"}, inplace=True)

    RESULT = RESULT.merge(destnatDf,
                          how='inner',
                          left_on=["fid_destin"],
                          right_on=["old_fid"])
    RESULT.drop([x for x in destnatDf.columns.values if x != "geometry"],
                axis=1,
                inplace=True)
    RESULT.rename(columns={"geometry": "destin_geom"}, inplace=True)

    RESULT["origin_geom"] = RESULT.origin_geom.astype(str)
    RESULT["destin_geom"] = RESULT.destin_geom.astype(str)

    obj_to_tbl(RESULT, result, sheetsName=get_filename(result))

    return result
コード例 #15
0
ファイル: glgpaths.py プロジェクト: zonakre/gasp
def assign_cost_to_line(inLines, outLines, epsg):
    """
    Assign Movement Cost to Line
    """

    import time
    import pandas
    from geopandas import GeoDataFrame
    from gasp.fm import tbl_to_obj
    from gasp.mng.prj import project
    from gasp.web.glg.direct import pnt_to_pnt_duration
    from gasp.to.shp import df_to_shp

    # Data to GeoDataFrame
    linesDf = tbl_to_obj(inLines)

    # Re-Project input data
    if epsg != 4326:
        linesDf = project(linesDf, None, 4326, gisApi='pandas')

    def get_points(row):
        row["points"] = [pnt for pnt in row["geometry"].coords]

        return row

    linesDf = linesDf.apply(lambda x: get_points(x), axis=1)

    linesDict = linesDf.to_dict(orient='index')

    # Get All possible vertex pairs in each line
    # Get distance between the start and end of each line formed by one pair
    # Sum all distances and associate the new value to the original line
    for idx in linesDict:
        points = linesDict[idx]["points"]

        pairs = [
            pnt_to_pnt_duration(points[i - 1][1],
                                points[i - 1][0],
                                points[i][1],
                                points[i][0],
                                mode="driving") for i in range(1, len(points))
        ]

        time.sleep(5)

        linesDict[idx]["duration"] = sum(pairs)

    linesDff = pandas.DataFrame.from_dict(linesDict, orient='index')

    # Re-Project input data
    linesDff = GeoDataFrame(linesDff,
                            crs={"init": "epsg:4326"},
                            geometry="geometry")

    if epsg != 4326:
        linesDff = project(linesDff, None, epsg, gisApi='pandas')

    linesDff.drop("points", axis=1, inplace=True)

    df_to_shp(linesDff, outLines)

    return outLines
コード例 #16
0
ファイル: dmx.py プロジェクト: zonakre/gasp
def dist_matrix_using_shp(originsShp,
                          destinationsShp,
                          originsEpsg,
                          destinationsEpsg,
                          outTable,
                          transMode=None):
    """
    Create a distance matrix using shapes and Google Maps API
    """

    import time
    from threading import Thread
    from gasp.mng.split import split_df, split_df_inN
    from gasp.mng.prj import project
    from gasp.prop.feat import get_geom_type
    from gasp.mng.gen import merge_df
    from gasp.fm import tbl_to_obj
    from gasp.to import obj_to_tbl
    from gasp.web.glg import get_keys
    from gasp.web.glg.distmx import dist_matrix

    # Origins and Destionations to GeoDataframe
    originsDf = tbl_to_obj(originsShp)
    destnatDf = tbl_to_obj(destinationsShp)

    # Check Geometries type - shapes should be of type point
    originsGeom = get_geom_type(originsDf, gisApi='pandas')
    destGeom = get_geom_type(destnatDf, gisApi='pandas')
    if (originsGeom != 'Point' and originsGeom != 'MultiPoint') or \
        (destGeom != 'Point' and destGeom != 'MultiPoint'):
        raise ValueError('All input geometries must be of type point')

    # Re-project GeoDataframes if needed
    originsDf = originsDf if originsEpsg == 4326 else \
        project(originsDf, None, 4326, gisApi='pandas')

    destnatDf = destnatDf if destinationsEpsg == 4326 else \
        project(destnatDf, None, 4326, gisAPi='pandas')

    # Geom to Field as str
    originsDf["geom"] = originsDf["geometry"].y.astype(str) + "," + \
        originsDf["geometry"].x.astype(str)

    destnatDf["geom"] = destnatDf["geometry"].y.astype(str) + "," + \
        destnatDf["geometry"].x.astype(str)

    originsDf["old_fid"] = originsDf.index
    destnatDf["old_fid"] = destnatDf.index

    # Split destinations DataFrame into Dafaframes with
    lst_destinos = split_df(destnatDf, 10)

    # Get Keys
    KEYS = get_keys()
    lst_keys = KEYS["key"].tolist()
    origensByKey = split_df_inN(originsDf, KEYS.shape[0])

    if len(origensByKey) == len(lst_keys) + 1:
        origensByKey[-2] = origensByKey[-2].append(origensByKey[-1])
        del origensByKey[-1]

    # Produce matrix for each origins in origensByKey
    results = []

    def get_matrix(origins, key):
        subOrigins = split_df(origins, 10)

        for df in subOrigins:
            for __df in lst_destinos:
                matrix = dist_matrix(str(df.geom.str.cat(sep="|")),
                                     str(__df.geom.str.cat(sep="|")),
                                     df.shape[0],
                                     __df.shape[0],
                                     transport_mode=transMode,
                                     useKey=str(key))

                matrix = pandas.DataFrame(matrix)
                matrix = pandas.concat([
                    matrix.drop(["elements"], axis=1),
                    matrix["elements"].apply(pandas.Series)
                ],
                                       axis=1)

                originsFID = df.old_fid.tolist()
                destinaFID = __df.old_fid.tolist()

                mm = []
                for i in range(len(originsFID)):
                    for e in range(len(destinaFID)):
                        ll = [originsFID[i], destinaFID[e], matrix.iloc[i, e]]
                        mm.append(ll)

                Fmatrix = pandas.DataFrame(
                    mm, columns=["fid_origin", "fid_destin", "cost"])

                results.append(Fmatrix)

                time.sleep(5)

    # Create threads
    thrds = []
    i = 1

    for df in origensByKey:
        thrds.append(
            Thread(name="tk{}".format(str(i)),
                   target=get_matrix,
                   args=(df, lst_keys[i - 1])))
        i += 1

    # Start all threads
    for thr in thrds:
        thr.start()

    # Wait for all threads to finish
    for thr in thrds:
        thr.join()

    # Join all dataframes
    RESULT = merge_df(results, ignIndex=False)
    RESULT = sanitizeDataCols(RESULT, "cost")

    RESULT = RESULT.merge(originsDf,
                          how='inner',
                          left_on=["fid_origin"],
                          right_on=["old_fid"])
    RESULT.drop([x for x in originsDf.columns.values if x != "geometry"],
                axis=1,
                inplace=True)
    RESULT.rename(columns={"geometry": "origin_geom"}, inplace=True)

    RESULT = RESULT.merge(destnatDf,
                          how='inner',
                          left_on=["fid_destin"],
                          right_on=["old_fid"])
    RESULT.drop([x for x in destnatDf.columns.values if x != "geometry"],
                axis=1,
                inplace=True)
    RESULT.rename(columns={"geometry": "destin_geom"}, inplace=True)

    RESULT["origin_geom"] = RESULT.origin_geom.astype(str)
    RESULT["destin_geom"] = RESULT.destin_geom.astype(str)

    return obj_to_tbl(RESULT, outTable)
コード例 #17
0
ファイル: openelv.py プロジェクト: zonakre/gasp
def get_points_elv(pntShp, output, epsg, elevationColumn="openelv"):
    """
    Extract elevation for several points
    """

    import pandas
    from threading import Thread
    from gasp.fm import tbl_to_obj
    from gasp.mng.split import split_df
    from gasp.mng.prj import project
    from gasp.fm.geom import pointxy_to_cols
    from gasp.mng.gen import merge_df
    from gasp.prop.feat import df_geom_type
    from gasp.to.obj import df_to_list
    from gasp.to.shp import df_to_shp

    # SHP TO DF
    df = tbl_to_obj(pntShp)

    # Check Geometry - shape should be of type point
    dfGeom = get_geom_type(df, geomCol="geometry", gisApi='pandas')
    if dfGeom != 'Point' and dfGeom != 'MultiPoint':
        raise ValueError('Geometries must be of type point')

    # Re-project GeoDataframes if needed
    if epsg != 4326:
        df = project(df, None, 4326, gisApi='pandas')

    df = pointxy_to_cols(df,
                         geomCol="geometry",
                         colX="longitude",
                         colY="latitude")

    df2 = df.drop(
        [c for c in df.columns.values if c != "longitude" and c != "latitude"],
        axis=1,
        inplace=False)

    dfs = split_df(df2, 200)

    RESULTS = []

    # Go to the net and extract elevation
    def extraction(pntDf):
        locations = df_to_list(pntDf)

        __result = locations_elev({"locations": locations})

        RESULTS.append(pandas.DataFrame(__result["results"]))

    thrds = []
    for i in range(len(dfs)):
        thrds.append(
            Thread(name="elvt{}".format(str(i)),
                   target=extraction,
                   args=(dfs[i], )))

    for t in thrds:
        t.start()

    for t in thrds:
        t.join()

    finalDf = merge_df(RESULTS, ignIndex=True)
    finalDf.rename(columns={"elevation": elevationColumn}, inplace=True)

    # Join with Original Shape
    df["long_join"] = df.longitude.round(6) * 1000000
    df["long_join"] = df.long_join.astype(int)
    df["lat_join"] = df.latitude.round(6) * 1000000
    df["lat_join"] = df.lat_join.astype(int)
    finalDf["jlat"] = finalDf.latitude.round(6) * 1000000
    finalDf["jlat"] = finalDf.jlat.astype(int)
    finalDf["jlng"] = finalDf.longitude.round(6) * 1000000
    finalDf["jlng"] = finalDf.jlng.astype(int)

    newDf = df.merge(finalDf,
                     how="outer",
                     left_on=["long_join", "lat_join"],
                     right_on=["jlng", "jlat"])

    if epsg != 4326:
        newDf = project(newDf, None, epsg, gisApi='pandas')

    newDf.drop([
        "longitude_x", "longitude_y", "latitude_x", "latitude_y", "long_join",
        "lat_join", "jlng", "jlat"
    ],
               axis=1,
               inplace=True)

    return df_to_shp(newDf, output)
コード例 #18
0
def tbl_to_obj(tblFile,
               sheet=None,
               useFirstColAsIndex=None,
               _delimiter=None,
               encoding_='utf8',
               output='df',
               fields="ALL",
               geomCol=None,
               colsAsArray=None,
               geomAsWkt=None,
               srsTo=None):
    """
    Table File to Pandas DataFrame
    
    output Options:
    - df;
    - dict;
    - array;
    """

    from gasp.oss import get_fileformat

    fFormat = get_fileformat(tblFile)

    if fFormat == '.dbf':
        """
        Convert dBase to Pandas Dataframe
        """

        from simpledbf import Dbf5

        dbfObj = Dbf5(tblFile)

        tableDf = dbfObj.to_dataframe()

    elif fFormat == '.ods':
        """
        ODS file to Pandas Dataframe
        """

        import json
        import pandas
        from pyexcel_ods import get_data

        if not sheet:
            raise ValueError(
                "You must specify sheet name when converting ods files")
        data = get_data(tblFile)[sheet]

        tableDf = pandas.DataFrame(data[1:], columns=data[0])

    elif fFormat == '.xls' or fFormat == '.xlsx':
        """
        XLS to Pandas Dataframe
        """

        import pandas

        sheet = 0 if sheet == None else sheet

        indexCol = 0 if useFirstColAsIndex else None

        tableDf = pandas.read_excel(tblFile,
                                    sheet,
                                    index_col=indexCol,
                                    encoding='utf-8',
                                    dtype='object')

    elif fFormat == '.txt' or fFormat == '.csv':
        """
        Text file to Pandas Dataframe
        """

        import pandas

        if not _delimiter:
            raise ValueError(
                "You must specify _delimiter when converting txt files")

        tableDf = pandas.read_csv(tblFile,
                                  sep=_delimiter,
                                  low_memory=False,
                                  encoding=encoding_)

    elif fFormat == '.shp':
        """
        ESRI Shapefile to Pandas Dataframe
        """

        import geopandas

        tableDf = geopandas.read_file(tblFile)

        if output:
            if not geomCol:
                for c in tableDf.columns.values:
                    if c == 'geometry' or c == 'geom':
                        geomCol = c
                        break

            if fields != "ALL":
                from gasp.mng.fld.df import del_fld_notin_geodf

                tableDf = del_fld_notin_geodf(tableDf, fields, geomCol=geomCol)

            if srsTo:
                from gasp.mng.prj import project

                tableDf = project(tableDf, None, srsTo, gisApi='pandas')

            tableDf.rename(columns={geomCol: "GEOM"}, inplace=True)

            if geomAsWkt:
                tableDf["GEOM"] = tableDf.GEOM.astype(str)

    else:
        raise ValueError('{} is not a valid table format!'.format(fFormat))

    if fFormat != '.shp' and fields != "ALL":
        from gasp import goToList

        fields = goToList(fields)
        if fields:
            delCols = []
            for fld in list(tableDf.columns.values):
                if fld not in fields:
                    delCols.append(fld)

            if delCols:
                tableDf.drop(delCols, axis=1, inplace=True)

    if output:
        if output == 'dict':
            orientation = "index" if not colsAsArray else "list"

        elif output == 'array':
            tableDf["FID"] = tableDf.index

            orientation = "records"

        tableDf = tableDf.to_dict(orient=orientation)

    return tableDf
コード例 #19
0
ファイル: __init__.py プロジェクト: fagan2888/gasp
def get_ref_raster(refBoundBox, folder, cellsize=None):
    """
    Get Reference Raster
    """

    import os
    from gasp.prop.ff import check_isRaster

    # Check if refRaster is really a Raster
    isRst = check_isRaster(refBoundBox)

    if not isRst:
        from gasp.prop.ff import check_isShp

        if not check_isShp(refBoundBox):
            raise ValueError((
                'refRaster File has an invalid file format. Please give a file '
                'with one of the following extensions: '
                'shp, gml, json, kml, tif or img'))

        else:
            # We have a shapefile

            # Check SRS and see if it is a projected SRS
            from gasp.prop.prj import get_epsg_shp

            epsg, isProj = get_epsg_shp(refBoundBox, returnIsProj=True)

            if not epsg:
                raise ValueError(
                    'Cannot get epsg code from {}'.format(refBoundBox))

            if not isProj:
                # A conversion between SRS is needed
                from gasp.mng.prj import project

                ref_shp = project(refBoundBox,
                                  os.path.join(folder, 'tmp_ref_shp.shp'),
                                  outEPSG=3857,
                                  inEPSG=epsg,
                                  gisApi='ogr2ogr')
                epsg = 3857
            else:
                ref_shp = refBoundBox

            # Convert to Raster
            from gasp.to.rst import shp_to_raster

            refRaster = shp_to_raster(ref_shp,
                                      None,
                                      2 if not cellsize else cellsize,
                                      -1,
                                      os.path.join(folder, 'ref_raster.tif'),
                                      api='gdal')

    else:
        # We have a raster

        from gasp.prop.rst import get_epsg_raster

        epsg, isProj = get_epsg_raster(refBoundBox, returnIsProj=True)

        if not epsg:
            raise ValueError(
                'Cannot get epsg code from {}'.format(refBoundBox))

        # Check if Raster has a SRS with projected coordinates
        if not isProj:
            # We need to reproject raster
            from gasp.mng.prj import gdal_reproject_raster

            refRaster = gdal_reproject_raster(
                refBoundBox,
                os.path.join(folder, 'refrst_3857.tif'),
                epsg,
                3857,
                cellsize=2 if not cellsize else cellsize)
            epsg = 3857
        else:
            refRaster = refBoundBox

    return refRaster, epsg