コード例 #1
0
def sdf_from_xyz(df, x_col, y_col, z_col=None, sr=None):
    """builds a SpatialDataFrame from DataFrame with
    x, y, and z columns
    
    args:
    df - the dataframe
    x_col - the dataframe column corresponding to x coordinate
    y_col - the dataframe column corresponding to y coordinate
    z_col - optional, the dataframe column corresponding to z coordinate
    sr - the spatial reference for the spatial data frame
    """

    if not z_col:
        return SpatialDataFrame.from_xy(df, x_col, y_col, sr)

    def point_for_row(x, y, z, sr):
        return Point({'x': x, 'y': y, 'z': z, "spatialReference": sr})

    if sr is None:
        sr = SpatialReference({'wkid': 4326})

    df_geom = df.apply(
        lambda row: point_for_row(row[x_col], row[y_col], row[z_col], sr),
        axis=1)
    return SpatialDataFrame(data=df, geometry=df_geom, sr=sr)
コード例 #2
0
def insert_new_results(selected_auth_fl, selected_res_fl, authoritative_fc,
                       schema):
    selected_auth_sdf = SpatialDataFrame.from_featureclass(selected_auth_fl)
    print(len(selected_auth_sdf))
    selected_res_sdf = SpatialDataFrame.from_featureclass(selected_res_fl)
    print(len(selected_res_sdf))

    fields = field_schema.get(schema)
    #for f in fields:
    #    print(f)

    # Write this function
    dtypes = dts.get(schema)
    fields = field_schema.get(schema)

    for idx, sel_auth_row in enumerate(selected_auth_sdf.iterrows()):

        geom = sel_auth_row[1].SHAPE.buffer(-.01)
        oid = sel_auth_row[1].OBJECTID

        # print(oid)

        ext = geom.extent

        sq = selected_res_sdf['SHAPE'].disjoint(geom) == False
        df_current = selected_res_sdf[sq].copy()
        df_current.reset_index(inplace=True)
        #print(df_current.head())

        if len(df_current) > 0:
            # print("Here")
            #['MEAN_CE90']
            for f in fields:
                try:
                    cur_val = df_current.loc[0].at[f]
                    #print(cur_val)
                    selected_auth_sdf.at[idx, f] = cur_val
                except:
                    # break
                    print("Field doesn't exist")

    insert_df = selected_auth_sdf.drop(['SHAPE'], axis=1, inplace=False)

    records = insert_df.to_records(index=False)

    rows = np.array(records, dtype=dtypes)

    array = rows  # np.array(rows, dtypes)
    da.ExtendTable(authoritative_fc, "OID@", array, "_ID", False)

    return authoritative_fc
コード例 #3
0
def build_ways_sdf(o_response, g_type):

    # Extract Relevant Way Elements from OSM Response
    if g_type == 'polygon':
        ways = [
            e for e in o_response
            if e['type'] == 'way' and e['nodes'][0] == e['nodes'][-1]
        ]
    else:
        ways = [
            e for e in o_response
            if e['type'] == 'way' and e['nodes'][0] != e['nodes'][-1]
        ]

    # Dictionary For Geometries & IDs
    geo_dict = {'geo': []}
    val_dict = {'osm_id': []}

    # Dictionary For Incoming Tags
    for w in ways:
        w_tags = w['tags'].keys()
        for tag in w_tags:
            if tag not in val_dict.keys():
                val_dict[tag] = []

    # Build Lists
    for w in ways:
        try:
            # Populate Tags
            for tag in [key for key in val_dict.keys() if key != 'osm_id']:
                val_dict[tag].append(str(w['tags'].get(tag, 'Null')))

            # Populate Geometries & IDs
            coords = [[e['lon'], e['lat']] for e in w.get('geometry')]
            if g_type == 'polygon':
                poly = Polygon({
                    "rings": [coords],
                    "spatialReference": {
                        "wkid": 4326
                    }
                })
            else:
                poly = Polyline({
                    "paths": [coords],
                    "spatialReference": {
                        "wkid": 4326
                    }
                })

            geo_dict['geo'].append(poly)
            val_dict['osm_id'].append(str(w['id']))

        except Exception as ex:
            print('Way ID {0} Raised Exception: {1}'.format(w['id'], str(ex)))

    try:
        return SpatialDataFrame(val_dict, geometry=geo_dict['geo'])

    except TypeError:
        raise Exception('Ensure ArcPy is Included in Python Interpreter')
コード例 #4
0
def _from_xy(df, x_column, y_column, sr=None):
    """

    """
    from arcgis.geometry import SpatialReference, Geometry
    from arcgis.features import SpatialDataFrame
    if sr is None:
        sr = SpatialReference({'wkid': 4326})
    if not isinstance(sr, SpatialReference):
        if isinstance(sr, dict):
            sr = SpatialReference(sr)
        elif isinstance(sr, int):
            sr = SpatialReference({'wkid': sr})
        elif isinstance(sr, str):
            sr = SpatialReference({'wkt': sr})
    geoms = []
    for idx, row in df.iterrows():
        geoms.append(
            Geometry({
                'x': row[x_column],
                'y': row[y_column],
                'spatialReference': sr
            }))
    df['SHAPE'] = geoms
    return SpatialDataFrame(data=df, sr=sr)
コード例 #5
0
def build_node_sdf(n_list, excludedattributes):
    '''
    Function to convert returned OSM point data to Esri SpatialDataFrame.
    Returns an ESRI SpatialDataFrame.
    @param n_list: The list of nodes as returned by th get_osm_elements function 
    @param excludedattributes: The attributes exluded in the configuration file osmconfig.json
    '''

    # Dictionary For Geometries & IDs
    geo_dict = {"geo": []}
    val_dict = {'osm_id': [], 'timestamp': []}

    # Dictionary For Incoming Tags
    for n in n_list:
        n_tags = n['tags'].keys()
        for tag in n_tags:
            if tag not in val_dict.keys() and tag not in excludedattributes:
                tagname = tag
                val_dict[tagname] = []

    print('Constructing points...')
    p = 0
    pbar = createpbar(len(n_list))
    # Build Lists
    for n in n_list:
        try:
            p = updatepbar(p, pbar)
            # Populate Tags
            for tag in [
                    key for key in val_dict.keys()
                    if key not in ['osm_id', 'timestamp']
                    and key not in excludedattributes
            ]:
                val_dict[tag].append(n['tags'].get(str(tag), ''))

            # Populate Geometries & IDs
            point = Point({
                "x": n['lon'],
                "y": n['lat'],
                "spatialReference": {
                    "wkid": 4326
                }
            })
            geo_dict['geo'].append(point)
            val_dict['osm_id'].append(str(n['id']))
            val_dict['timestamp'].append(
                dt.strptime(n['timestamp'], '%Y-%m-%dT%H:%M:%SZ'))

        except Exception as ex:
            print('Node ID {0} Raised Exception: {1}'.format(n['id'], str(ex)))

    try:
        val_dict = {k: v for k, v in val_dict.items() if v is not None}
        return SpatialDataFrame(val_dict, geometry=geo_dict['geo'])

    except TypeError:
        raise Exception('Ensure ArcPy is Included in Python Interpreter')
コード例 #6
0
ファイル: hu_bb.py プロジェクト: hpoharvard/campusmaplivedata
def bb():    
    try:
        # script to update the status of the blue bikes
        r = requests.get('https://gbfs.bluebikes.com/gbfs/en/station_status.json')
        r = r.json()['data']['stations']
        df = pd.DataFrame.from_dict(json_normalize(r), orient='columns')
        df = df.rename(columns={'last_reported':'last_reported','station_id':'description', 'num_bikes_available':'bikes_available',
                                'num_docks_available':'docks_available'})

        
        bb_table = df[['last_reported','description', 'bikes_available', 'docks_available']]
        
        bb_features = gis.content.get(fc_id).layers[7]
        bb_features
        bb_fset = SpatialDataFrame.from_layer(bb_features)
        bb_fset = bb_features.query(where="category_subtype = 'Blue Bikes'") #querying without any conditions returns all the features    

        overlap_rows = pd.merge(left = bb_fset.sdf, right = bb_table, how='inner', on = 'description')
        
        #print (overlap_rows.head())
        features_for_update = [] #list containing corrected features
        all_features = bb_fset.features

        # inspect one of the features
        #print (all_features[2])

        # update all features that were joined
        for root_id in overlap_rows['description']:    
            # get the feature to be updated
            original_feature = [f for f in all_features if f.attributes['description'] == root_id][0]

            feature_to_be_updated = deepcopy(original_feature)        

            # get the matching row from csv
            matching_row = bb_table.where(bb_table.description == root_id).dropna()
            #print('snr' + tablenumber + '', 'snr' + tablenumber + 'average', float(matching_row['snrval'].values))
            
            timestamp = matching_row['last_reported'].values[0]
            
            #print (str(datetime.fromtimestamp(timestamp)))
            feature_to_be_updated.attributes['use_type'] = matching_row['bikes_available'].values[0]
            feature_to_be_updated.attributes['source_name'] = matching_row['docks_available'].values[0]
            #feature_to_be_updated.attributes['source_type'] = str(datetime.fromtimestamp(matching_row['last_reported'].values[0]))
            
            feature_to_be_updated.attributes['source_type'] = str(datetime.fromtimestamp(matching_row['last_reported'].values[0]).strftime("%I:%M %p"))

            features_for_update.append(feature_to_be_updated)
        #print (features_for_update)
        bb_features.edit_features(updates= features_for_update)
    except:
        print("Something went wrong")
        os._exit(0)
コード例 #7
0
    def _get_sdf(self, path_directory):
        """
        Load the photos into a spatial data frame.
        :param path_directory: Path to directory containing photos.
        :return: SpatialDataFrame
        """
        # load up the photos as a list of dictionaries
        photo_list = [
            Photo(file).dictionary for file in os.listdir(path_directory)
            if file.lower().endswith('.jpg')
        ]

        # convert this list of dictionaries into a SpatialDataFrame
        photo_sdf = SpatialDataFrame(photo_list)

        # set the geometry field property
        photo_sdf.set_geometry('SHAPE',
                               inplace=True,
                               sr=SpatialReference(wkid=4326))

        # reindex all the data and return the result
        return photo_sdf.reset_index(drop=True)
コード例 #8
0
def build_node_sdf(n_list):

    # Dictionary For Geometries & IDs
    geo_dict = {"geo": []}
    val_dict = {'osm_id': []}

    # Dictionary For Incoming Tags
    for n in n_list:
        n_tags = n['tags'].keys()
        for tag in n_tags:
            if tag not in val_dict.keys():
                val_dict[tag] = []

    # Build Lists
    for n in n_list:
        try:
            # Populate Tags
            for tag in [key for key in val_dict.keys() if key != 'osm_id']:
                val_dict[tag].append(str(n['tags'].get(tag, 'Null')))

            # Populate Geometries & IDs
            point = Point({
                "x": n['lon'],
                "y": n['lat'],
                "spatialReference": {
                    "wkid": 4326
                }
            })
            geo_dict['geo'].append(point)
            val_dict['osm_id'].append(str(n['id']))

        except Exception as ex:
            print('Node ID {0} Raised Exception: {1}'.format(n['id'], str(ex)))

    try:
        return SpatialDataFrame(val_dict, geometry=geo_dict['geo'])

    except TypeError:
        raise Exception('Ensure ArcPy is Included in Python Interpreter')
コード例 #9
0
fc_parcels = r"I:\Projects\Darren\PPA_V2_GIS\PPA_V2.gdb\TEST_parcels_for_mixIndex_point2"

out_csv = r'I:\Projects\Darren\PPA_V2_GIS\CSV\Simpson_div_idx_x_taz{}.csv'.format(
    dt_suffix)
csv_lutypes_lookup = r"I:\Projects\Darren\PPA_V2_GIS\CSV\lutypes_lookup.csv"

col_taz = 'TAZ07'
col_netarea = 'GISAc'
col_lutype = 'LUTYPE'
col_lutype_rev = 'LUTYPE_rev'

excl_lutypes = 'EXCLUDE'

cols = [col_taz, col_netarea, col_lutype]

sdf_parcel = SDF.from_featureclass(fc_parcels)

df_x_taz_and_type = sdf_parcel.groupby(
    [col_taz, col_lutype])[col_netarea].sum()  #total TAZ area x land use type

dfpp = df_x_taz_and_type.reset_index()

#lookup of land use type sto consolidate similar land use types
df_lutypes_lookup = pd.read_csv(csv_lutypes_lookup)

dfpp = dfpp.merge(df_lutypes_lookup, on=col_lutype)

#pivot: cols = land use types, rows = TAZ IDs, values = total net acres of that land use on that TAZ
df_pivot = dfpp.pivot_table(values=col_netarea,
                            index=col_taz,
                            columns=col_lutype_rev,
コード例 #10
0
def spatial_join(df1,
                 df2,
                 left_tag="_left",
                 right_tag="_right",
                 keep_all=True):
    """
    Joins two spatailly enabled dataframes based on spatial location based
    on if the two geometries are intersected.

    Parameters:
      :df1: left spatial dataframe
      :df2: right spatial dataframe
      :left_tag: if the same column is in the left and right dataframe,
       this will append that string value to the field
      :right_tag: if the same column is in the left and right dataframe,
       this will append that string value to the field
      :keep_all: if set to true all df1 will be kept regardless of spatial
       matches
    :output:
      Spatial Dataframe
    """
    import numpy as np
    import pandas as pd
    from arcgis.features import SpatialDataFrame
    if not isinstance(df1, SpatialDataFrame):
        raise ValueError("df1 must be a spatial dataframe")
    if not isinstance(df2, SpatialDataFrame):
        raise ValueError("df2 must be a spatial dataframe")
    right_index = df2.sindex
    join_idx = []
    if not df1.geometry is None:
        geom_field = df1.geometry.name
    else:
        raise ValueError("df1 is missing a geometry column")
    if not df2.geometry is None:
        geom_field2 = df2.geometry.name
    else:
        raise ValueError("df2 is missing a geometry column")
    for idx, row in df1.iterrows():
        geom = row[geom_field]
        if isinstance(geom.extent, tuple):
            ext = (geom.extent[0], geom.extent[1], geom.extent[2],
                   geom.extent[3])
        else:
            ext = (geom.extent.XMin, geom.exten.YMin, geom.extent.XMax,
                   geom.extent.YMax)
        select_idx = right_index.intersect(ext)
        if len(select_idx) > 0:
            sub = df2.loc[select_idx]
            res = sub[sub.disjoint(geom) == False]
            if len(res) > 0:
                for idx2, row2 in res.iterrows():
                    join_idx.append([idx, idx2])
                    del idx2, row2
            elif len(res) == 0 and keep_all:
                join_idx.append([idx, None])
            del sub, res
        elif len(select_idx) == 0 and \
             keep_all:
            join_idx.append([idx, None])
        del geom
        del ext
        del select_idx
        del idx
    join_field_names = ["TARGET_OID", "JOIN_OID"]
    df2 = df2.copy()
    del df2[df2.geometry.name]
    join_df = pd.DataFrame(data=join_idx, columns=join_field_names)
    join_df = join_df.merge(df1,
                            left_on=join_field_names[0],
                            right_index=True,
                            how='left',
                            suffixes=(left_tag, right_tag))
    join_df = join_df.merge(df2,
                            left_on=join_field_names[1],
                            right_index=True,
                            how='left',
                            suffixes=(left_tag, right_tag),
                            copy=True)
    join_df = SpatialDataFrame(join_df)
    join_df.geometry = join_df[df1.geometry.name]
    del join_idx
    join_df.reset_index(drop=True, inplace=True)
    return join_df
コード例 #11
0
from arcgis import GIS
import pandas as pd
from arcgis.features import GeoAccessor, GeoSeriesAccessor
from arcgis.geocoding import batch_geocode
from arcgis.features import SpatialDataFrame

gis = GIS('https://siarcgisweb01.trssllc.com/portal/home', 'sazdrake')

map1 = gis.map('New York, NY')
map1.basemap = "osm"
map1

address_frame = pd.read_csv('data/city_of_new_york.csv')

address_shp = SpatialDataFrame.from_xy(address_frame, 'LON', 'LAT')

address_shp.spatial.plot(map_widget=map1)
コード例 #12
0
def completeness(out_sdf, df_list, osm_sdf):

    print('Running Completeness')

    for idx, row in enumerate(out_sdf.iterrows()):

        before_val = None
        geom = Geometry(row[1].SHAPE)

        # Unpack Geom Extent as OSM Expects
        bbox = (geom.extent[1], geom.extent[0], geom.extent[3], geom.extent[2])

        # Fetch OSM SpatialDataFrame
        osm_sdf = gen_osm_sdf('line', bbox, osm_tag='highway')

        data_sdf = df_list[idx]
        if len(data_sdf) == 0:
            before_val = 0

        else:
            sq = data_sdf[data_sdf.geometry.notnull()].geometry.disjoint(
                geom) == False
            df_before = data_sdf[sq].copy()
            geoms_before = df_before.clip(geom.extent)
            geoms_before_sdf = SpatialDataFrame(geometry=geoms_before)

            q_before = geoms_before_sdf['SHAPE'] == {"paths": []}
            geoms_before_sdf = geoms_before_sdf[~q_before].copy()
            geoms_before_sdf.reset_index(inplace=True, drop=True)

        geometry_type = osm_sdf.geometry_type

        sq = osm_sdf[osm_sdf.geometry.notnull()].geometry.disjoint(
            geom) == False
        df_after = osm_sdf[sq].copy()

        geoms_after = df_after.clip(geom.extent)

        geoms_after_sdf = SpatialDataFrame(geometry=geoms_after)
        #geoms_after_sdf = SpatialDataFrame({'Pass': '******'}, geometry=geoms_after, index=[0])

        q_after = geoms_after_sdf['SHAPE'] == {"paths": []}
        geoms_after_sdf = geoms_after_sdf[~q_after].copy()
        geoms_after_sdf.reset_index(inplace=True, drop=True)

        # This Need Work
        if geometry_type == "Polygon":
            if before_val == None:
                before_val = geoms_before_sdf.geometry.project_as(
                    4326).get_area('GEODESIC', 'SQUAREKILOMETERS').sum()
            after_val = geoms_after_sdf.geometry.project_as(4326).get_area(
                'GEODESIC', 'SQUAREKILOMETERS').sum()
            if after_val > 0:
                score = get_cp_score(ratio=before_val / after_val,
                                     baseVal=before_val,
                                     inputVal=after_val)
            else:
                score = get_cp_score(0, before_val, after_val)

            out_sdf.set_value(idx,
                              field_schema.get('cmpl')[0],
                              round(before_val, 1))
            out_sdf.set_value(idx,
                              field_schema.get('cmpl')[1], round(after_val, 1))
            out_sdf.set_value(idx,
                              field_schema.get('cmpl')[3],
                              round(before_val - after_val, 1))
            out_sdf.set_value(idx, field_schema.get('cmpl')[2], score)

        elif geometry_type == "Polyline":
            if before_val == None:

                geom = geoms_before_sdf.geometry
                geom_projected = geoms_before_sdf.geometry.project_as(3857)
                before_val = int(sum(geom_projected.length.tolist()))

            geom_projected = geoms_before_sdf.geometry.project_as(3857)
            after_val = int(sum(geom_projected.length.tolist()))

            if after_val > 0:
                score = get_cp_score(ratio=before_val / after_val,
                                     baseVal=before_val,
                                     inputVal=after_val)
            else:
                score = get_cp_score(0, before_val, after_val)

            out_sdf.set_value(idx,
                              field_schema.get('cmpl')[0],
                              round(before_val, 1))
            out_sdf.set_value(idx,
                              field_schema.get('cmpl')[1], round(after_val, 1))
            out_sdf.set_value(idx,
                              field_schema.get('cmpl')[3],
                              round(before_val - after_val, 1))
            out_sdf.set_value(idx, field_schema.get('cmpl')[2], score)

        else:
            if before_val == None:
                before_count = len(geoms_before_sdf)
            else:
                before_count = 0
            after_count = len(geoms_after_sdf)
            if after_count > 0:
                score = get_cp_score(ratio=before_count / after_count,
                                     baseVal=before_count,
                                     inputVal=after_count)
            else:
                score = get_cp_score(ratio=0,
                                     baseVal=before_count,
                                     inputVal=after_count)

            out_sdf.set_value(idx, field_schema.get('cmpl')[0], before_count)
            out_sdf.set_value(idx, field_schema.get('cmpl')[1], after_count)
            out_sdf.set_value(idx,
                              field_schema.get('cmpl')[3],
                              before_count - after_count)
            out_sdf.set_value(idx, field_schema.get('cmpl')[2], score)

        del sq
        del df_after
        del geom
        if before_val != None:
            print(before_val)
        #    del df_before

    return out_sdf
コード例 #13
0
from arcgis.gis import GIS
from arcgis.features import SpatialDataFrame
from IPython.display import display
import passwords

gis = GIS("https://www.arcgis.com", passwords.user_name, passwords.password)

search_results = gis.content.search(
    query='title: "Park*" AND type: "Feature Service"')
display(search_results)

# In[2]:

feature_service_item = search_results[0]
feature_layer = feature_service_item.layers[0]
display(feature_layer)

# In[3]:

# Search for an item and construct a Spatial Data Frame

# In[4]:

sdf = SpatialDataFrame.from_layer(feature_layer)
sdf.head()

# In[ ]:

# sdf.to_featureclass(out_location = 'path/to/save/data', out_name = 'file.shp')
コード例 #14
0
from arcgis.gis import GIS
from arcgis.features import SpatialDataFrame
from arcgis.raster import ImageryLayer
from arcgis.geometry import Polygon
from arcgis.geometry import Geometry
import sys
import json
import arcgis_config

# type of coordinate referrence system 
crs_id = 3857
gis = GIS("https://www.arcgis.com", arcgis_config.username, arcgis_config.password)

shp_file = 'raw/bottom_part.shp'
building_data = SpatialDataFrame.from_featureclass(shp_file)
# print(type(building_data.geometry))
# print(df.dtypes)
# print(df.shape)

# naip = gis.content.search('Views', 'Imagery Layer', outside_org=True)
naip = gis.content.get('3f8d2d3828f24c00ae279db4af26d566')
# for layer in naip.layers:
#     print(layer)
naip_image_layer = naip.layers[0]
# naip_image_layer = apply(naip_image_layer, 'FalseColorComposite')
# print(naip_image_layer.extent)

# redefine occupancy type to be residential(1) and non-residential(2)
with open('residential_occupancy_types.json', 'r') as f:
    res_types = json.load(f)
コード例 #15
0
def build_ways_sdf_toline(o_response, excludedattributes):
    '''
    Function to convert returned OSM polyline data to Esri SpatialDataFrame.
    Returns an ESRI SpatialDataFrame.
    @param o_response: The valid response data from the OSM server containing the way elements
    @param excludedattributes: The attributes exluded in the configuration file osmconfig.json
    '''
    # Extract Relevant Way Elements from OSM Response
    ways = [
        e for e in o_response
        if e['type'] == 'way' and e['nodes'][0] != e['nodes'][-1]
    ]

    # Dictionary For Geometries & IDs
    geo_dict = {'geo': []}
    val_dict = {'osm_id': [], 'timestamp': []}

    # Dictionary For Incoming Tags
    for w in ways:
        w_tags = w['tags'].keys()
        for tag in w_tags:
            if tag not in val_dict.keys() and tag not in excludedattributes:
                tagname = tag
                val_dict[tagname] = []

    print('Constructing lines...')
    p = 0
    pbar = createpbar(len(ways))
    # Build Lists
    for w in ways:
        try:
            p = updatepbar(p, pbar)
            # Populate Tags
            for tag in [
                    key for key in val_dict.keys()
                    if key not in ['osm_id', 'timestamp']
                    and key not in excludedattributes
            ]:
                val_dict[tag].append(w['tags'].get(str(tag), ''))

            # Populate Geometries & IDs
            coords = [[e['lon'], e['lat']] for e in w.get('geometry')]
            poly = Polyline({
                "paths": [coords],
                "spatialReference": {
                    "wkid": 4326
                }
            })

            geo_dict['geo'].append(poly)
            val_dict['osm_id'].append(str(w['id']))
            val_dict['timestamp'].append(
                dt.strptime(w['timestamp'], '%Y-%m-%dT%H:%M:%SZ'))

        except Exception as ex:
            print('Way ID {0} Raised Exception: {1}'.format(w['id'], str(ex)))

    try:
        geo_dict = geo_dict['geo']
        val_dict = {k: v for k, v in val_dict.items() if v is not None}
        return SpatialDataFrame(val_dict, geometry=geo_dict)

    except TypeError:
        raise Exception('Ensure ArcPy is Included in Python Interpreter')
コード例 #16
0
def build_ways_sdf_topoly(o_response, excludedattributes, o_r_response=None):
    '''
    Function to convert returned OSM polygon data to Esri SpatialDataFrame.
    Returns an ESRI SpatialDataFrame.
    @param o_response: The valid response data from the OSM server containing the way elements
    @param excludedattributes: The attributes exluded in the configuration file osmconfig.json
    @param o_r_response: The optional valid response data from the OSM server containing the relation elements
    '''
    # Extract relevant relations and way elements from OSM response
    if o_r_response:
        relations = [e for e in o_r_response if e['type'] == 'relation']
    ways = [
        e for e in o_response
        if e['type'] == 'way' and e['nodes'][0] == e['nodes'][-1]
    ]

    # Dictionary for geometries & IDs
    geo_dict_r = {'geo': []}
    val_dict_r = {'osm_id': [], 'timestamp': []}
    geo_dict_w = {'geo': []}
    val_dict_w = {'osm_id': [], 'timestamp': []}
    invalid_rel_idx_list = []
    for r in range(len(relations)):
        nodesinrel = []
        nodesinrel = [
            relations[r]['members'].index(item)
            for item in filter(lambda n: n.get('type') == 'node',
                               lget(relations, r)['members'])
        ]
        if len(nodesinrel) > 0:
            invalid_rel_idx_list.append(r)

    relations = [
        lget(relations, r) for r in range(len(relations))
        if r not in invalid_rel_idx_list
    ]

    # Dictionary for incoming relation tags
    for r in relations:
        r_tags = r['tags'].keys()
        for tag in r_tags:
            if tag not in val_dict_r.keys() and tag not in excludedattributes:
                tagname = tag
                val_dict_r[tagname] = []

    valid_list = []
    # Build Lists
    print('Constructing complex polygons...')
    p = 0
    pbar = createpbar(len(relations))
    for r in relations:
        try:
            p = updatepbar(p, pbar)
            relation = r["members"]
            relation = sorted(relation, key=lambda item: item['role'])
            outerlist = [memb for memb in relation if memb['role'] == 'outer']
            innerlist = [memb for memb in relation if memb['role'] == 'inner']
            innerlistgeomtuple = []
            outerlistgeomtuple = []
            innerlistgeomtuple2 = []
            outerlistgeomtuple2 = []
            rngitn_i = False
            rngitn_o = False

            innerlistgeomtuple = [[(x['lon'], x['lat'])
                                   for x in ol['geometry']]
                                  for ol in innerlist]
            if len(innerlistgeomtuple) > 1:
                all_rings_connected_i = checkrings_connected(
                    innerlistgeomtuple, r)
                if all_rings_connected_i and innerlistgeomtuple:
                    for ring in innerlistgeomtuple:
                        innerlistgeomtuple2 += ring
                    innerlistgeomtuple = []
                    innerlistgeomtuple.append(
                        l_ordered_remove_duplicates(innerlistgeomtuple2))
                    innerlistgeomtuple2 = innerlistgeomtuple
                    innerlistgeomtuple = []
            all_rings_closed_i = checkrings_closed(innerlistgeomtuple, r)
            if all_rings_closed_i and innerlistgeomtuple:
                for ring in innerlistgeomtuple:
                    innerlistgeomtuple2.append(ring)
                    innerlistgeomtuple = []
            innerlistgeomtuple3 = []
            if innerlistgeomtuple2:
                all_rings_connected_i = checkrings_connected(
                    innerlistgeomtuple2, r)
            if not innerlistgeomtuple2:
                all_rings_connected_i = checkrings_connected(
                    innerlistgeomtuple, r)
            if innerlistgeomtuple and not all_rings_connected_i:
                innerlistgeomtuple2 = innerlistgeomtuple
                innerlistgeomtuple3.append(innerlistgeomtuple2[0])
                startgeom = innerlistgeomtuple2[0][0]
                for ridx in range(0, len(innerlistgeomtuple2)):
                    next_ring = innerlistgeomtuple3[-1][-1]
                    if next_ring == startgeom and ridx < len(
                            innerlistgeomtuple2) - 1:
                        next_ring = innerlistgeomtuple2[len(
                            innerlistgeomtuple3)][-1]
                        startgeom = next_ring
                    idx_next_ring = lfindgetsingleidx(innerlistgeomtuple2,
                                                      next_ring, ridx)
                    try:
                        if innerlistgeomtuple2[
                                idx_next_ring[0]] not in innerlistgeomtuple3:
                            if innerlistgeomtuple2[idx_next_ring[0]][
                                    -1] == innerlistgeomtuple3[-1][-1]:
                                innerlistgeomtuple2[idx_next_ring[0]].reverse()
                            innerlistgeomtuple3.append(
                                innerlistgeomtuple2[idx_next_ring[0]])
                        rngitn_i = True
                    except:
                        continue
            if rngitn_i:
                innerlistgeomtuple4 = []
                innerlistgeomtuple4 = merge_sublist_items(innerlistgeomtuple3)
                rings = detect_rings(innerlistgeomtuple4)
                if len(rings) > 1:
                    rings = [l_ordered_remove_duplicates(rng) for rng in rings]
                    all_rings_closed_i = checkrings_closed(rings, r)
                else:
                    innerlistgeomtuple3 = l_ordered_remove_duplicates(
                        innerlistgeomtuple4)
                    all_rings_closed_i = checkrings_closed(
                        [innerlistgeomtuple3], r)
            if innerlistgeomtuple and rngitn_i and all_rings_closed_i:
                innerlistgeomtuple2 = []
                all_rings_connected_i = True
                if len(rings) < 2:
                    innerlistgeomtuple2.append([])
                    innerlistgeomtuple2[0] = innerlistgeomtuple3
                else:
                    innerlistgeomtuple2 = rings

            outerlistgeomtuple = [[(x['lon'], x['lat'])
                                   for x in ol['geometry']]
                                  for ol in outerlist]
            if len(outerlistgeomtuple) > 1:
                all_rings_connected_o = checkrings_connected(
                    outerlistgeomtuple, r)
                if all_rings_connected_o:
                    for ring in outerlistgeomtuple:
                        outerlistgeomtuple2 += ring
                    outerlistgeomtuple = []
                    outerlistgeomtuple.append(
                        l_ordered_remove_duplicates(outerlistgeomtuple2))
                    outerlistgeomtuple2 = outerlistgeomtuple
                    outerlistgeomtuple = []
            all_rings_closed_o = checkrings_closed(outerlistgeomtuple, r)
            if all_rings_closed_o:
                for ring in outerlistgeomtuple:
                    outerlistgeomtuple2.append(ring)
                    outerlistgeomtuple = []
            outerlistgeomtuple3 = []
            if outerlistgeomtuple2:
                all_rings_connected_o = checkrings_connected(
                    outerlistgeomtuple2, r)
            if not outerlistgeomtuple2:
                all_rings_connected_o = checkrings_connected(
                    outerlistgeomtuple, r)
            if not all_rings_connected_o:
                outerlistgeomtuple2 = outerlistgeomtuple
                outerlistgeomtuple3.append(outerlistgeomtuple2[0])
                startgeom = outerlistgeomtuple2[0][0]
                for ridx in range(0, len(outerlistgeomtuple2)):
                    next_ring = outerlistgeomtuple3[-1][-1]
                    if next_ring == startgeom and ridx < len(
                            outerlistgeomtuple2) - 1:
                        next_ring = outerlistgeomtuple2[len(
                            outerlistgeomtuple3)][-1]
                        startgeom = next_ring
                    idx_next_ring = lfindgetsingleidx(outerlistgeomtuple2,
                                                      next_ring, ridx)
                    try:
                        if outerlistgeomtuple2[
                                idx_next_ring[0]] not in outerlistgeomtuple3:
                            if outerlistgeomtuple2[idx_next_ring[0]][
                                    -1] == outerlistgeomtuple3[-1][-1]:
                                outerlistgeomtuple2[idx_next_ring[0]].reverse()
                            outerlistgeomtuple3.append(
                                outerlistgeomtuple2[idx_next_ring[0]])
                        rngitn_o = True
                    except:
                        continue
            if rngitn_o:
                outerlistgeomtuple4 = []
                outerlistgeomtuple4 = merge_sublist_items(outerlistgeomtuple3)
                rings = detect_rings(outerlistgeomtuple4)
                if len(rings) > 1:
                    rings = [l_ordered_remove_duplicates(rng) for rng in rings]
                    all_rings_closed_o = checkrings_closed(rings, r)
                else:
                    outerlistgeomtuple3 = outerlistgeomtuple4
                    outerlistgeomtuple3 = l_ordered_remove_duplicates(
                        outerlistgeomtuple4)
                    all_rings_closed_o = checkrings_closed(
                        [outerlistgeomtuple3], r)
                if outerlistgeomtuple and rngitn_o and all_rings_closed_o:
                    outerlistgeomtuple2 = []
                    all_rings_connected_o = True
                if len(rings) < 2:
                    outerlistgeomtuple2.append([])
                    outerlistgeomtuple2[0] = outerlistgeomtuple3
                else:
                    outerlistgeomtuple2 = rings

            if innerlistgeomtuple or innerlistgeomtuple2:
                innerlistgeomtuple = innerlistgeomtuple2
            if outerlistgeomtuple2:
                outerlistgeomtuple = outerlistgeomtuple2

            l_polygon_rings = []
            for subelement in range(len(outerlistgeomtuple)):
                ncoordsouter = [(n[0], n[1])
                                for n in outerlistgeomtuple[subelement]]
                temppolyarea = Polygon({
                    "rings": [ncoordsouter],
                    "spatialReference": {
                        "wkid": 4326
                    }
                }).area
                if temppolyarea < 0.0:
                    ncoordsouter.reverse()
                if ncoordsouter:
                    l_polygon_rings.append(ncoordsouter)

            for subelement in range(len(innerlistgeomtuple)):
                if innerlistgeomtuple[subelement]:
                    ncoordsinner = [(n[0], n[1])
                                    for n in innerlistgeomtuple[subelement]]
                    temppolyarea = Polygon({
                        "rings": [ncoordsinner],
                        "spatialReference": {
                            "wkid": 4326
                        }
                    }).area
                    if temppolyarea > 0.0:
                        ncoordsinner.reverse()
                    if ncoordsinner:
                        l_polygon_rings.append(ncoordsinner)

            poly = Polygon({
                "rings": l_polygon_rings,
                "spatialReference": {
                    "wkid": 4326
                }
            })
            valid_list.append([poly.is_valid, r['id']])

            if poly.is_valid:
                geo_dict_r['geo'].append(poly)
                val_dict_r['osm_id'].append(str(r['id']))
                val_dict_r['timestamp'].append(
                    dt.strptime(r['timestamp'], '%Y-%m-%dT%H:%M:%SZ'))

            # Populate Relation tags
            for tag in [
                    key for key in val_dict_r.keys()
                    if key not in ['osm_id', 'timestamp']
                    and key not in excludedattributes
            ]:
                val_dict_r[tag].append(r['tags'].get(str(tag), ''))

        except Exception as ex:
            tb = traceback.format_exc()
            print('Relation ID {0} Raised Exception: {1}'.format(
                r['id'], str(tb)))

    # Dictionary For Incoming Way Tags
    for w in ways:
        w_tags = w['tags'].keys()
        for tag in w_tags:
            if tag not in val_dict_w.keys() and tag not in excludedattributes:
                tagname = tag
                val_dict_w[tagname] = []

    print('Constructing simple polygons...')
    p = 0
    pbar = createpbar(len(ways))
    # Build Lists
    for w in ways:
        try:
            p = updatepbar(p, pbar)
            # Populate Tags
            for tag in [
                    key for key in val_dict_w.keys()
                    if key not in ['osm_id', 'timestamp']
                    and key not in excludedattributes
            ]:
                val_dict_w[tag].append(w['tags'].get(str(tag), ''))
            # Populate Geometries & IDs
            coords = [[e['lon'], e['lat']] for e in w.get('geometry')]
            poly = Polygon({
                "rings": [coords],
                "spatialReference": {
                    "wkid": 4326
                }
            })
            geo_dict_w['geo'].append(poly)
            val_dict_w['osm_id'].append(str(w['id']))
            val_dict_w['timestamp'].append(
                dt.strptime(w['timestamp'], '%Y-%m-%dT%H:%M:%SZ'))

        except Exception as ex:
            print('Way ID {0} Raised Exception: {1}'.format(w['id'], str(ex)))

    try:
        val_dict = merge_dict_lists(val_dict_r, val_dict_w)
        val_dict = {k: v for k, v in val_dict.items() if v is not None}
        geo_dict = geo_dict_r['geo'] + geo_dict_w['geo']
        len(valid_list)
        return SpatialDataFrame(val_dict, geometry=geo_dict)

    except TypeError:
        raise Exception('Ensure ArcPy is Included in Python Interpreter')
コード例 #17
0
#%%
df.to_csv(custAcctFile,header=False, index=True)

#%% [markdown]
# [```gis.features.SpatialDataFrame()```](https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.features.toc.html?highlight=spatialdataframe#arcgis.features.SpatialDataFrame.from_xy)

#%%
from arcgis.features import SpatialDataFrame
from arcgis.gis import GIS
from getpass import getpass
from IPython.display import display


#%%
sdf = SpatialDataFrame.from_xy(df,"POINT_X","POINT_Y")
gis = GIS(arcpy.GetActivePortalURL(), username=input("Enter User Name "), password=(getpass()))
#gis = GIS()
#portalDesc = arcpy.GetPortalDescription()
# search and list all items owned by connected user
#query=f'owner:{portalDesc["user"]["username"]} AND title:CW BaseMap'
#itemType="Feature Layer"
#sortField="title"
#sortOrder="asc"
# default max__items is 10
#maxItems=100
#m = gis.content.search(query,itemType,sortField,sortOrder,maxItems)


#%%
consumptionLyr = gis.content.import_data(sdf)
コード例 #18
0
def logical_consisitency(gis, template_fc, template_gdb, filename, tabname,
                         data_sdf, input_features, output_features,
                         grid_filter, geom, def_cnt_field, def_field):

    try:
        stList = set(data_sdf['F_CODE'].values)
        fc = input_features

        alias_table = get_field_alias(template_fc)
        fc_domain_dict = get_fc_domains(template_gdb)

        specificAttributeDict, attrCheck = create_attr_dict(filename, tabname)

        temp_result_df = pd.DataFrame(columns=FIELDS)  #, dtypes=DTYPES)

        geoms = []
        counter = 0
        total_feature_count = len(data_sdf)
        for idx, row in data_sdf.iterrows():
            #print(row['F_CODE'])
            if row['F_CODE'] in stList:
                if row['F_CODE'] in specificAttributeDict:
                    vals = []
                    vals = [alias_table[i] for i in specificAttributeDict[row['F_CODE']] \
                                               if row[i] in empty]

                    line = row['SHAPE']
                    def_count = len(vals)
                    polyline = Polyline(line)
                    geoms.append(polyline)
                    if def_count > 0:
                        fs = ",".join(vals)
                        oid = row['OBJECTID']
                        ERROR = str(fc) + r" | " + str(
                            fc_domain_dict[row['F_CODE']]) + r" | OID: " + str(
                                oid) + r" | " + fs

                        temp_result_df.set_value(counter, FIELDS[0], fs)
                        temp_result_df.set_value(counter, FIELDS[1], fc)
                        temp_result_df.set_value(
                            counter, FIELDS[2],
                            (fc_domain_dict[row['F_CODE']]))
                        temp_result_df.set_value(counter, FIELDS[3],
                                                 round(oid))
                        temp_result_df.set_value(counter, FIELDS[4], len(vals))

                    else:
                        temp_result_df.set_value(counter, FIELDS[0], 'N/A')
                        temp_result_df.set_value(counter, FIELDS[1], fc)
                        temp_result_df.set_value(
                            counter, FIELDS[2],
                            (fc_domain_dict[row['F_CODE']]))
                        temp_result_df.set_value(counter, FIELDS[3],
                                                 round(oid))
                        temp_result_df.set_value(counter, FIELDS[4], len(vals))
                    counter = counter + 1
        assessed_feature_count = len(temp_result_df)

        attr_sdf = SpatialDataFrame(temp_result_df, geometry=geoms)

        out_fl = FeatureLayer(gis=gis, url=output_features)
        out_sdf = out_fl.query(geometry_filter=grid_filter,
                               return_geometry=True,
                               return_all_records=True).df

        df_current = attr_sdf
        fcount = len(df_current)

        error_field_count = def_cnt_field
        error_field_def = def_field

        errors = []
        attrs = []
        if fcount > 0:  #len(df_current) > 0:
            errors += df_current[error_field_count].tolist()

            def process(x):
                #print(x)
                return [
                    va for va in x.replace(' ', '').split('|')[-1].split(',')
                    if len(va) > 1
                ]

            for e in df_current[error_field_def].apply(process).tolist():
                attrs += e
                del e

        results = get_answers(0, errors, attrs, fcount)

        out_sdf[SUM_FIELDS[0]][0] = results[1]
        out_sdf[SUM_FIELDS[1]][0] = results[2]
        out_sdf[SUM_FIELDS[2]][0] = results[3]
        out_sdf[SUM_FIELDS[3]][0] = results[4]
        out_sdf[SUM_FIELDS[4]][0] = results[5]
        out_sdf[SUM_FIELDS[5]][0] = results[6]
        out_sdf[SUM_FIELDS[6]][0] = results[7]
        out_sdf[SUM_FIELDS[7]][0] = results[8]
        out_sdf[SUM_FIELDS[8]][0] = results[9]
        out_sdf[SUM_FIELDS[9]][0] = results[10]
        out_sdf[SUM_FIELDS[10]][0] = results[11]
        out_sdf[SUM_FIELDS[11]][0] = results[12]
        out_sdf[SUM_FIELDS[12]][0] = results[13]
        out_sdf[SUM_FIELDS[13]][0] = results[14]
        out_sdf[SUM_FIELDS[14]][0] = results[15]
        out_sdf[SUM_FIELDS[15]][0] = results[16]

        print(out_sdf.columns.values)

        return out_sdf, out_fl

    except arcpy.ExecuteError:
        line, filename, synerror = trace()

    except FunctionError as f_e:
        messages = f_e.args[0]

    except:
        line, filename, synerror = trace()


#--------------------------------------------------------------------------
##if __name__ == "__main__":
##    #env.overwriteOutput = True
##    argv = tuple(arcpy.GetParameterAsText(i)
##    for i in range(arcpy.GetArgumentCount()))
##    main(*argv)
コード例 #19
0
def plot(df,
         map_widget=None,
         name=None,
         renderer_type=None,
         symbol_type=None,
         symbol_style=None,
         col=None,
         colors='jet',
         alpha=1,
         **kwargs):
    """

    Plot draws the data on a web map. The user can describe in simple terms how to
    renderer spatial data using symbol.  To make the process simplier a pallette
    for which colors are drawn from can be used instead of explicit colors.


    ======================  =========================================================
    **Explicit Argument**   **Description**
    ----------------------  ---------------------------------------------------------
    df                      required SpatialDataFrame or GeoSeries. This is the data
                            to map.
    ----------------------  ---------------------------------------------------------
    map_widget              optional WebMap object. This is the map to display the
                            data on.
    ----------------------  ---------------------------------------------------------
    colors                  optional string/dict.  Color mapping.  For simple renderer,
                            just provide a string.  For more robust renderers like
                            unique renderer, a dictionary can be given.
    ----------------------  ---------------------------------------------------------
    renderer_type           optional string.  Determines the type of renderer to use
                            for the provided dataset. The default is 's' which is for
                            simple renderers.

                            Allowed values:

                            + 's' - is a simple renderer that uses one symbol only.
                            + 'u' - unique renderer symbolizes features based on one
                                    or more matching string attributes.
                            + 'c' - A class breaks renderer symbolizes based on the
                                    value of some numeric attribute.
                            + 'h' - heatmap renders point data into a raster
                                    visualization that emphasizes areas of higher
                                    density or weighted values.
    ----------------------  ---------------------------------------------------------
    symbol_type             optional string. This is the type of symbol the user
                            needs to create.  Valid inputs are: simple, picture, text,
                            or carto.  The default is simple.
    ----------------------  ---------------------------------------------------------
    symbol_type             optional string. This is the symbology used by the
                            geometry.  For example 's' for a Line geometry is a solid
                            line. And '-' is a dash line.

                            Allowed symbol types based on geometries:

                            **Point Symbols**

                             + 'o' - Circle (default)
                             + '+' - Cross
                             + 'D' - Diamond
                             + 's' - Square
                             + 'x' - X

                             **Polyline Symbols**

                             + 's' - Solid (default)
                             + '-' - Dash
                             + '-.' - Dash Dot
                             + '-..' - Dash Dot Dot
                             + '.' - Dot
                             + '--' - Long Dash
                             + '--.' - Long Dash Dot
                             + 'n' - Null
                             + 's-' - Short Dash
                             + 's-.' - Short Dash Dot
                             + 's-..' - Short Dash Dot Dot
                             + 's.' - Short Dot

                             **Polygon Symbols**

                             + 's' - Solid Fill (default)
                             + '\' - Backward Diagonal
                             + '/' - Forward Diagonal
                             + '|' - Vertical Bar
                             + '-' - Horizontal Bar
                             + 'x' - Diagonal Cross
                             + '+' - Cross

    ----------------------  ---------------------------------------------------------
    col                     optional string/list. Field or fields used for heatmap,
                            class breaks, or unique renderers.
    ----------------------  ---------------------------------------------------------
    colors                  optional string. The color map to draw from in order to
                            visualize the data.  The default cmap is 'jet'. To get a
                            visual representation of the allowed color maps,use
                            the **display_colormaps** method.
    ----------------------  ---------------------------------------------------------
    alpha                   optional float.  This is a value between 0 and 1 with 1
                            being the default value.  The alpha sets the transparancy
                            of the renderer when applicable.
    ======================  =========================================================

    The kwargs parameter accepts all parameters of the create_symbol method and the
    create_renderer method.


    """

    if isinstance(df, GeoSeries):
        fid = [[i] for i in range(len(df))]
        sdf = SpatialDataFrame(data=fid, geometry=df)
        plot(df=sdf,
             map_widget=map_widget,
             name=name,
             renderer_type=renderer_type,
             symbol_type=symbol_type,
             symbol_style=symbol_style,
             col=col,
             colors=colors,
             alpha=1,
             **kwargs)
        return
    r = None
    if isinstance(col, str):
        col = [col]
    map_exists = True
    if symbol_type is None:
        symbol_type = 'simple'
    if name is None:
        import uuid
        name = uuid.uuid4().hex[:7]
    if map_widget is None:
        map_exists = False
        map_widget = MapView()

    fc = df.to_feature_collection(name=name)
    if renderer_type in [None, 's']:
        renderer_type = 's'  # simple (default)
        r = generate_renderer(geometry_type=df.geometry_type.lower(),
                              sdf_or_series=df,
                              label=name,
                              symbol_type=symbol_type,
                              symbol_style=symbol_style,
                              render_type=renderer_type,
                              colors=colors,
                              alpha=alpha,
                              **kwargs)
        fc.layer['layerDefinition']['drawingInfo']['renderer'] = r
    elif isinstance(col, str) and \
         col not in df.columns:
        raise ValueError("Columns %s does not exist." % col)
    elif isinstance(col, (tuple, list, str)) and \
         all([c in df.columns for c in col]) == True and \
         renderer_type in ['u', 'c']:
        if isinstance(col, str):
            col = [col]
        idx = 1
        if renderer_type == 'u':
            for c in col:
                kwargs['field%s' % idx] = c
                idx += 1
        elif renderer_type == 'c':
            kwargs['field'] = col[0]
        r = generate_renderer(geometry_type=df.geometry_type.lower(),
                              sdf_or_series=df,
                              label=name,
                              symbol_type=symbol_type,
                              symbol_style=symbol_style,
                              render_type=renderer_type,
                              colors=colors,
                              alpha=alpha,
                              **kwargs)
        fc.layer['layerDefinition']['drawingInfo']['renderer'] = r
    elif renderer_type == 'h':
        r = generate_renderer(geometry_type=df.geometry_type.lower(),
                              sdf_or_series=df,
                              label=name,
                              symbol_type=symbol_type,
                              symbol_style=symbol_style,
                              render_type=renderer_type,
                              colors=colors,
                              alpha=alpha,
                              **kwargs)
        fc.layer['layerDefinition']['drawingInfo']['renderer'] = r
    elif renderer_type == 'str':
        r = generate_renderer(geometry_type=df.geometry_type.lower(),
                              sdf_or_series=df,
                              label=name,
                              symbol_type=None,
                              symbol_style=None,
                              render_type=renderer_type,
                              colors=colors,
                              alpha=alpha,
                              **kwargs)
        fc.layer['layerDefinition']['drawingInfo']['renderer'] = r
    elif renderer_type == 't':
        r = generate_renderer(geometry_type=df.geometry_type.lower(),
                              sdf_or_series=df,
                              label=name,
                              symbol_type=None,
                              symbol_style=None,
                              render_type=renderer_type,
                              colors=colors,
                              alpha=alpha,
                              **kwargs)
        fc.layer['layerDefinition']['drawingInfo']['renderer'] = r
    if map_exists:
        map_widget.add_layer(fc, options={'title': name})
    else:
        map_widget.add_layer(fc, options={'title': name})
        return map_widget
コード例 #20
0
gis = GIS("URLofESRIPortal", UserID, Password)
print("Connected")
siteMessages = []
for key, value in sites.items():
    #print(value)
    item = gis.content.get(value)
    #print(item)

    flayer = item.layers[0]
    #print(flayer)
    #Query produces a Feature Set, which is needed to call .features
    fSet = flayer.query()
    #Stores all features so that ones needing update can be copied
    all_features = fSet.features
    #Pulls data into Data Frame for manipulation with Pandas
    ESRIdf = SpatialDataFrame.from_layer(flayer)
    #print(ESRIdf.head())

    features_for_update = []
    #Iterate over ESRI Data Frame
    print("Starting Query")
    for index, row in ESRIdf.iterrows():
        # skip rows that already have a gateway IP or don't contain a machine
        if pd.notnull(row['gatewayip']) or row['followup'] != 'No':
            #print("Skipped Row")
            continue
        # Skip rows without an asset tag
        elif pd.isnull(row['asset']):
            #print("Skipped " + str(row['objectid']))
            continue
        else:
コード例 #21
0
ファイル: fileops.py プロジェクト: Samakwa/VRP-TCC-For-RSS
def from_featureclass(filename, **kwargs):
    """
    Returns a GeoDataFrame from a feature class.
    Inputs:
     filename: full path to the feature class
    Optional Parameters:
     sql_clause: sql clause to parse data down
     where_clause: where statement
     sr: spatial reference object
     fields: list of fields to extract from the table
    """
    from .. import SpatialDataFrame
    from arcgis.geometry import _types
    if HASARCPY:
        sql_clause = kwargs.pop('sql_clause', (None,None))
        where_clause = kwargs.pop('where_clause', None)
        sr = kwargs.pop('sr', arcpy.Describe(filename).spatialReference or arcpy.SpatialReference(4326))
        fields = kwargs.pop('fields', None)
        desc = arcpy.Describe(filename)
        if not fields:
            fields = [field.name for field in arcpy.ListFields(filename) \
                      if field.type not in ['Geometry']]

            if hasattr(desc, 'areaFieldName'):
                afn = desc.areaFieldName
                if afn in fields:
                    fields.remove(afn)
            if hasattr(desc, 'lengthFieldName'):
                lfn = desc.lengthFieldName
                if lfn in fields:
                    fields.remove(lfn)
        geom_fields = fields + ['SHAPE@']
        flds = fields + ['SHAPE']
        vals = []
        geoms = []
        geom_idx = flds.index('SHAPE')
        shape_type = desc.shapeType
        default_polygon = _types.Geometry(arcpy.Polygon(arcpy.Array([arcpy.Point(0,0)]* 3)))
        default_polyline = _types.Geometry(arcpy.Polyline(arcpy.Array([arcpy.Point(0,0)]* 2)))
        default_point = _types.Geometry(arcpy.PointGeometry(arcpy.Point()))
        default_multipoint = _types.Geometry(arcpy.Multipoint(arcpy.Array([arcpy.Point()])))
        with arcpy.da.SearchCursor(filename,
                                   field_names=geom_fields,
                                   where_clause=where_clause,
                                   sql_clause=sql_clause,
                                   spatial_reference=sr) as rows:

            for row in rows:
                row = list(row)
                # Prevent curves/arcs
                if row[geom_idx] is None:
                    row.pop(geom_idx)
                    g = {}
                elif row[geom_idx].type in ['polyline', 'polygon']:
                    g = _types.Geometry(row.pop(geom_idx).generalize(0))
                else:
                    g = _types.Geometry(row.pop(geom_idx))
                if g == {}:
                    if shape_type.lower() == 'point':
                        g = default_point
                    elif shape_type.lower() == 'polygon':
                        g = default_polygon
                    elif shape_type.lower() == 'polyline':
                        g = default_point
                    elif shape_type.lower() == 'multipoint':
                        g = default_multipoint
                geoms.append(g)
                vals.append(row)
                del row
            del rows
        df = pd.DataFrame(data=vals, columns=fields)
        sdf = SpatialDataFrame(data=df, geometry=geoms)
        sdf.reset_index(drop=True, inplace=True)
        del df
        if sdf.sr is None:
            if sr is not None:
                sdf.sr = sr
            else:
                sdf.sr = sdf.geometry[sdf.geometry.first_valid_index()].spatialReference
        return sdf
    elif HASARCPY == False and \
         HASPYSHP == True and\
         filename.lower().find('.shp') > -1:
        geoms = []
        records = []
        reader = shapefile.Reader(filename)
        fields = [field[0] for field in reader.fields if field[0] != 'DeletionFlag']
        for r in reader.shapeRecords():
            atr = dict(zip(fields, r.record))
            g = r.shape.__geo_interface__
            g = _geojson_to_esrijson(g)
            geom = _types.Geometry(g)
            atr['SHAPE'] = geom
            records.append(atr)
            del atr
            del r, g
            del geom
        sdf = SpatialDataFrame(records)
        sdf.set_geometry(col='SHAPE')
        sdf.reset_index(inplace=True)
        return sdf
    elif HASARCPY == False and \
         HASPYSHP == False and \
         HASFIONA == True and \
         (filename.lower().find('.shp') > -1 or \
          os.path.dirname(filename).lower().find('.gdb') > -1):
        is_gdb = os.path.dirname(filename).lower().find('.gdb') > -1
        if is_gdb:
            with fiona.drivers():
                from arcgis.geometry import _types
                fp = os.path.dirname(filename)
                fn = os.path.basename(filename)
                geoms = []
                atts = []
                with fiona.open(path=fp, layer=fn) as source:
                    meta = source.meta
                    cols = list(source.schema['properties'].keys())
                    for idx, row in source.items():
                        geoms.append(_types.Geometry(row['geometry']))
                        atts.append(list(row['properties'].values()))
                        del idx, row
                    df = pd.DataFrame(data=atts, columns=cols)
                    return SpatialDataFrame(data=df, geometry=geoms)
        else:
            with fiona.drivers():
                from arcgis.geometry import _types
                geoms = []
                atts = []
                with fiona.open(path=filename) as source:
                    meta = source.meta
                    cols = list(source.schema['properties'].keys())
                    for idx, row in source.items():
                        geoms.append(_types.Geometry(row['geometry']))
                        atts.append(list(row['properties'].values()))
                        del idx, row
                    df = pd.DataFrame(data=atts, columns=cols)
                    return SpatialDataFrame(data=df, geometry=geoms)
    return
コード例 #22
0
def completeness(gis, df_after, df_before, output_features, grid_filter, geom):
    """ main driver of program """
    try:

        out_fl = FeatureLayer(gis=gis, url=output_features)
        out_sdf = out_fl.query(geometry_filter=grid_filter,return_geometry=True,
                return_all_records=True).df

        geometry_type = df_after.geometry_type

        sq = df_before[df_before.geometry.notnull()].geometry.disjoint(geom) == False
        df_before = df_before[sq].copy()
        before_count = len(df_before)
        sq = df_after[df_after.geometry.notnull()].geometry.disjoint(geom) == False
        df_after = df_after[sq].copy()
        after_count = len(df_after)
        geoms_after = df_after.clip(geom.extent)
        geoms_before = df_before.clip(geom.extent)

        geoms_before_sdf = SpatialDataFrame(geometry=geoms_before)
        geoms_after_sdf = SpatialDataFrame(geometry=geoms_after)

        q_after = geoms_after_sdf.geometry.JSON == '{"paths":[]}'
        geoms_after_sdf = geoms_after_sdf[~q_after].copy()
        geoms_after_sdf.reset_index(inplace=True, drop=True)
        q_before = geoms_before_sdf.geometry.JSON == '{"paths":[]}'
        geoms_before_sdf = geoms_before_sdf[~q_before].copy()
        geoms_before_sdf.reset_index(inplace=True, drop=True)

        if geometry_type == "Polygon":
            before_val = geoms_before_sdf.geometry.get_area('GEODESIC','SQUAREKILOMETERS').sum()
            after_val = geoms_after_sdf.geometry.get_area('GEODESIC','SQUAREKILOMETERS').sum()
            if after_val > 0:
                score = get_score(ratio=before_val/after_val,
                        baseVal=before_val,
                        inputVal=after_val)
            else:
                score = get_score(0, before_val, after_val)

            out_sdf[FIELDS[0]][0] = round(before_val,1)
            out_sdf[FIELDS[1]][0] = round(after_val,1)
            out_sdf[FIELDS[3]][0] = round(before_val - after_val,1)
            out_sdf[FIELDS[2]][0] = score

        elif geometry_type == "Polyline":
            before_val = geoms_before_sdf.geometry.get_length('GEODESIC','KILOMETERS').sum()
            after_val = geoms_after_sdf.geometry.get_length('GEODESIC','KILOMETERS').sum()

            if after_val > 0:
                score = get_score(ratio=before_val/after_val,
                        baseVal=before_val,
                        inputVal=after_val)
            else:
                score = get_score(0, before_val, after_val)

            out_sdf[FIELDS[0]][0] = round(before_val,1)
            out_sdf[FIELDS[1]][0] = round(after_val,1)
            out_sdf[FIELDS[3]][0] = round(before_val - after_val,1)
            out_sdf[FIELDS[2]][0] = score

        else:
            before_count = len(geoms_before_sdf)
            after_count = len(geoms_after_sdf)
            if after_count > 0:
                score = get_score(ratio=before_count/after_count,
                        baseVal=before_count,
                        inputVal=after_count)
            else:
                score = get_score(ratio=0,
                        baseVal=before_count,
                        inputVal=after_count)

            out_sdf[FIELDS[0]][0] = before_count
            out_sdf[FIELDS[1]][0] = after_count
            out_sdf[FIELDS[3]][0] = before_count - after_count
            out_sdf[FIELDS[2]][0] = score

        del sq
        del df_after
        del df_before
        del geom

        return out_sdf, out_fl

        #arcpy.SetParameterAsText(4, out_grid)
    except FunctionError as f_e:
        messages = f_e.args[0]

    except:
        line, filename, synerror = trace()

#--------------------------------------------------------------------------
##if __name__ == "__main__":
##    #env.overwriteOutput = True
##    argv = tuple(arcpy.GetParameterAsText(i)
##    for i in range(arcpy.GetArgumentCount()))
##    main(*argv)