Пример #1
0
def main(*argv):
    """ main driver of program """
    try:
        attr_features = argv[0]
        sql_clause = argv[1]
        polygon_grid = argv[2]
        error_field_count = str(argv[3])  #'NULL_COUNT'#
        error_field_def = str(argv[4])  #'NULL_COLUMNS'#
        output_fc = argv[5]
        out_fc_exists = arcpy.Exists(output_fc)

        #  Local Variable
        #
        scratchFolder = env.scratchFolder
        scratchGDB = env.scratchGDB
        results = []
        #  Logic
        #
        if not out_fc_exists:
            output_gdb = validate_workspace(os.path.dirname(output_fc))
            #  Create the grid
            #
            out_grid = arcpy.CopyFeatures_management(polygon_grid,
                                                     output_fc)[0]
            out_grid = extend_table(out_grid)
            where_clause = None
        else:
            arcpy.MakeFeatureLayer_management(output_fc, "lyr")
            arcpy.SelectLayerByLocation_management("lyr",
                                                   "HAVE_THEIR_CENTER_IN",
                                                   polygon_grid)
            oids = [row[0] for row in arcpy.da.SearchCursor("lyr", "OID@")]
            if len(oids) > 1:
                oids_string = str(tuple(oids))
            else:
                oids_string = str('(' + str(oids[0]) + ')')

            where_clause = 'OBJECTID IN ' + oids_string

        #  Process the Data
        #
        error_field = (error_field_def, error_field_count)
        grid_sdf = SpatialDataFrame.from_featureclass(
            filename=output_fc, where_clause=where_clause)
        if sql_clause:
            attr_sdf = SpatialDataFrame.from_featureclass(
                attr_features, fields=error_field, where_clause=sql_clause)
        else:
            attr_sdf = SpatialDataFrame.from_featureclass(attr_features,
                                                          fields=error_field)
        index = attr_sdf.sindex
        for idx, row in enumerate(grid_sdf.iterrows()):
            errors = []
            attrs = []
            geom = row[1].SHAPE
            oid = row[1].OBJECTID
            print(str(oid))
            ext = [
                geom.extent.lowerLeft.X, geom.extent.lowerLeft.Y,
                geom.extent.upperRight.X, geom.extent.upperRight.Y
            ]
            row_oids = list(index.intersect(ext))
            df_current = attr_sdf.loc[row_oids]  #.copy()
            sq = df_current.geometry.disjoint(geom) == False
            fcount = len(df_current[sq])  # Total Count
            q2 = df_current[error_field_count] > 0
            #& q2
            df_current = df_current[sq].copy(
            )  # Get the # of features with deficiency_cnt > 0
            #print("here")
            if fcount > 0:  #len(df_current) > 0:
                errors += df_current[error_field_count].tolist()
                arcpy.AddMessage(str(errors))

                def process(x):
                    print(x)
                    return [
                        va
                        for va in x.replace(' ', '').split('|')[-1].split(',')
                        if len(va) > 1
                    ]

                for e in df_current[error_field_def].apply(process).tolist():
                    attrs += e
                    del e
            row = get_answers(oid=oid,
                              err=errors,
                              attr=attrs,
                              feature_count=fcount)
            results.append(row)
            if len(results) > 250:
                extend_table(table=output_fc, rows=results)
                results = []
            del idx
            del row
            del errors
            del attrs
            del geom
            del oid
            del ext
            del row_oids
            del df_current
            del sq
            del q2
        if len(results) > 0:
            extend_table(table=output_fc, rows=results)
        del index
        del results
        del grid_sdf
        del attr_sdf
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
def main(*argv):
    """ main driver of program """
    try:
        new_fc = argv[0]
        uid_field = argv[1]
        old_fc = argv[2]
        out_gdb = argv[3]
        #  Local Variables
        #
        scratchGDB = env.scratchGDB
        scratchFolder = env.scratchFolder
        out_table = os.path.join(out_gdb, "InformationTable")
        out_fc = os.path.join(out_gdb, "changed_features")
        change_csv = os.path.join(env.scratchFolder, "changes.csv")
        change_table = os.path.join(out_gdb, "change_table")
        adds_fc = None
        removes_fc = None
        #  Logic
        #
        if arcpy.Exists(out_table):
            arcpy.Delete_management(out_table)
        if arcpy.Exists(out_fc):
            arcpy.Delete_management(out_fc)
        tbl = arcpy.CreateTable_management(out_path=out_gdb,
                                           out_name="InformationTable")[0]
        array = np.array([],
                         np.dtype([('_id', np.int32), ('OLD_COUNT', '|S25'),
                                   ('NEW_COUNT', '|S25'),
                                   ('REMOVE_FIELDS', '|S256'),
                                   ('ADDED_FIELDS', '|S256'),
                                   ('SR_OLD', '|S256'), ('SR_NEW', '|S256')]))

        arcpy.da.ExtendTable(tbl,
                             arcpy.Describe(tbl).OIDFieldName, array, "_id")
        del array
        fields_old = set([field.name for field in arcpy.ListFields(old_fc)])
        fields_new = set([field.name for field in arcpy.ListFields(new_fc)])
        desc_old = arcpy.Describe(old_fc)
        desc_new = arcpy.Describe(new_fc)
        row = [
            str(arcpy.GetCount_management(old_fc)[0]),
            str(arcpy.GetCount_management(new_fc)[0]),
            ",".join(list(fields_old - fields_new)),
            ",".join(list(fields_new - fields_old)),
            str(desc_old.spatialReference.factoryCode),
            str(desc_new.spatialReference.factoryCode)
        ]
        #del desc_new, desc_old
        icur = arcpy.da.InsertCursor(tbl, [
            'OLD_COUNT', 'NEW_COUNT', 'REMOVE_FIELDS', 'ADDED_FIELDS',
            'SR_OLD', 'SR_NEW'
        ])
        icur.insertRow(row)
        del icur, row
        # 2). Output Feature Class: Geometry, ACTION, Summary of differences
        old_sdf = SpatialDataFrame.from_featureclass(old_fc)
        len_old_sfd = len(old_sdf)
        old_sdf = old_sdf.drop_duplicates(subset=uid_field, keep=False)
        len_old_sfd_after = len(old_sdf)
        if len_old_sfd - len_old_sfd_after != 0:
            arcpy.AddMessage(len_old_sfd)
            arcpy.AddMessage(len_old_sfd_after)
            arcpy.AddMessage("Dropping Dublicates from Old Feature Classs")

        if arcpy.Describe(old_fc).oidFieldName in old_sdf.columns:
            arcpy.AddMessage("deleting oid field")
            del old_sdf[arcpy.Describe(old_fc).oidFieldName]
        new_sdf = SpatialDataFrame.from_featureclass(new_fc)
        len_new_sfd = len(new_sdf)
        new_sdf = new_sdf.drop_duplicates(subset=uid_field, keep=False)
        len_new_sfd_after = len(new_sdf)
        if len_new_sfd - len_new_sfd_after != 0:
            arcpy.AddMessage(len_new_sfd)
            arcpy.AddMessage(len_new_sfd_after)
            arcpy.AddMessage("Dropping Dublicates from New Feature Class")

        if arcpy.Describe(new_fc).oidFieldName in new_sdf.columns:
            arcpy.AddMessage("deleting oid field")
            del new_sdf[arcpy.Describe(new_fc).oidFieldName]

        # A). Find Adds, Deletes and Matching Values
        old_uids = set(old_sdf[uid_field].unique().tolist())
        new_uids = set(new_sdf[uid_field].unique().tolist())
        common_uids = list(new_uids.intersection(old_uids))
        fields = [field for field in old_sdf.columns.tolist() \
                      if field in new_sdf.columns.tolist()]
        if pd.__version__ == "0.16.1":
            old_sdf['temp_key'] = old_sdf[uid_field]
            new_sdf['temp_key'] = new_sdf[uid_field]
            merged = old_sdf.merge(new_sdf,
                                   on=['temp_key'],
                                   how='outer',
                                   suffixes=['', '_'])
            adds = merged.loc[merged[uid_field + "_"].notnull()
                              & merged[uid_field].isnull()]
            deletes = merged.loc[merged[uid_field + "_"].isnull()
                                 & merged[uid_field].notnull()]
            ignore_ids = adds[uid_field + '_'].unique().tolist(
            ) + deletes[uid_field].unique().tolist()
        else:  #if pd.__version__ == "foo":
            merged = old_sdf.merge(new_sdf,
                                   on=[uid_field],
                                   how='outer',
                                   suffixes=['', '_'],
                                   indicator=True)
            adds = merged.loc[merged._merge.eq('right_only')]
            deletes = merged.loc[merged._merge.eq('left_only')]
            ignore_ids = adds[uid_field].unique().tolist(
            ) + deletes[uid_field].unique().tolist()
        if len(adds) > 0:
            q = new_sdf[uid_field].isin(adds[uid_field].tolist())
            adds_fc = new_sdf[q].to_featureclass(out_location=out_gdb,
                                                 out_name="added_features",
                                                 overwrite=True,
                                                 skip_invalid=True)
        if len(deletes) > 0:
            q = old_sdf[uid_field].isin(deletes[uid_field].tolist())
            removes_fc = old_sdf[q].to_featureclass(
                out_location=out_gdb,
                out_name="deleted_features",
                overwrite=True,
                skip_invalid=True)
        # Compare Attributes of SDF
        fields = [field for field in old_sdf.columns.tolist() \
                  if field in new_sdf.columns.tolist()]
        if 'SHAPE' in fields:
            fields.remove("SHAPE")
            #del old_sdf["SHAPE"]
            #del new_sdf["SHAPE"]

        cq = new_sdf[uid_field].isin(common_uids)
        cq1 = old_sdf[uid_field].isin(common_uids)
        old_sdf = old_sdf[cq1].copy()
        new_sdf = new_sdf[cq].copy()

        new_sdf.index = new_sdf[uid_field]
        old_sdf.index = old_sdf[uid_field]
        old_sdf.dtypes
        new_sdf.dtypes
        old_sdf.sort_index(inplace=True)
        print(old_sdf.head())
        new_sdf.sort_index(inplace=True)
        print(new_sdf.head())
        ne = (old_sdf[fields] != new_sdf[fields]).any(1)
        ne_stacked = (old_sdf[fields] != new_sdf[fields]).stack()
        changed = ne_stacked[ne_stacked]

        changed.index.names = [uid_field, 'col']
        difference_locations = np.where(old_sdf[fields] != new_sdf[fields])
        changed_from = new_sdf[fields].values[difference_locations]
        changed_to = old_sdf[fields].values[difference_locations]
        df_new = pd.DataFrame({
            'from_val': changed_from,
            'to_val': changed_to
        },
                              index=changed.index)
        df_new.reset_index(level=['col'], inplace=True)
        q3 = df_new['from_val'].isnull() & df_new['to_val'].isnull()
        df_new[~q3].to_csv(change_csv)
        joined_sdf = SpatialDataFrame.merge(new_sdf,
                                            df_new[~q3],
                                            right_index=True,
                                            left_index=True)
        ##    arcpy.AddMessage('everywhere')
        ##    arcpy.AddMessage(old_sdf)
        ##    arcpy.AddMessage('----------------')
        ##    arcpy.AddMessage(joined_sdf)
        ##    arcpy.AddMessage('----------------')
        q4 = joined_sdf['from_val'].isnull() & joined_sdf['to_val'].isnull()
        stripped_sdf = joined_sdf[~q4]
        stripped_sdf.drop('from_val', axis=1, inplace=True)
        stripped_sdf.drop('to_val', axis=1, inplace=True)
        stripped_sdf.drop('col', axis=1, inplace=True)
        stripped_sdf['Edit Count'] = stripped_sdf.groupby([uid_field]).size()
        stripped_sdf.drop_duplicates(subset=uid_field,
                                     keep='last',
                                     inplace=True)
        #arcpy.AddMessage(stripped_sdf)
        #arcpy.AddMessage('----------------')
        #joined_sdf.sr = desc_new.spatialReference
        stripped_sdf.to_featureclass(out_location=out_gdb,
                                     out_name="changed_features",
                                     overwrite=True,
                                     skip_invalid=True)
        arcpy.AddMessage('Done.')
        change_tbl = arcpy.CopyRows_management(change_csv, change_table)[0]
        arcpy.SetParameterAsText(4, change_tbl)  # Change Table
        arcpy.SetParameterAsText(5, tbl)  # Information Table
        arcpy.SetParameterAsText(6, out_fc)
        if adds_fc:
            arcpy.SetParameterAsText(7, adds_fc)  # added rows
        if removes_fc:
            arcpy.SetParameterAsText(8, removes_fc)  # deleted rows
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
Пример #3
0
def main(*argv):
    """ main driver of program """
    try:
        before_feature = argv[0]
        after_feature = argv[1]
        polygon_grid = argv[2]
        out_grid = argv[3]
        out_fc_exists = arcpy.Exists(out_grid)

        #output_gdb = argv[3]
        #  Local Variable
        #
        scratchFolder = env.scratchFolder
        scratchGDB = env.scratchGDB
        results = []
        #out_grid = os.path.join(output_gdb, os.path.basename(before_feature) + "_compare")
        #  Logic
        #
        output_gdb = os.path.dirname(out_grid)

        output_gdb = validate_workspace(output_gdb)
        #  Create the grid
        #
        if not out_fc_exists:
            out_grid_temp = "in_memory\\temp"  #os.path.join(output_gdb, os.path.basename(before_feature) + "_temp")
            out_grid_temp = arcpy.CopyFeatures_management(
                polygon_grid, out_grid_temp)[0]
            out_grid_temp = extend_table(out_grid_temp)
            where_clause = None
            grid_sdf = SpatialDataFrame.from_featureclass(
                filename=out_grid_temp, where_clause=where_clause)
        else:
            arcpy.MakeFeatureLayer_management(out_grid, "lyr")
            arcpy.SelectLayerByLocation_management("lyr",
                                                   "HAVE_THEIR_CENTER_IN",
                                                   polygon_grid)
            oids = [row[0] for row in arcpy.da.SearchCursor("lyr", "OID@")]
            oids_string = str(tuple(oids))
            where_clause = 'OBJECTID IN ' + oids_string
            grid_sdf = SpatialDataFrame.from_featureclass(
                filename=out_grid, where_clause=where_clause)

        before_sdf = SpatialDataFrame.from_featureclass(before_feature)
        after_sdf = SpatialDataFrame.from_featureclass(after_feature)

        before_index = before_sdf.sindex
        after_index = after_sdf.sindex
        geometry_type = after_sdf.geometry_type
        for idx, row in enumerate(grid_sdf.iterrows()):
            geom = row[1].SHAPE
            oid = row[1].OBJECTID
            #print ([idx, oid])
            ext = [
                geom.extent.lowerLeft.X, geom.extent.lowerLeft.Y,
                geom.extent.upperRight.X, geom.extent.upperRight.Y
            ]
            row_oids_before = list(before_index.intersect(ext))
            row_oids_after = list(after_index.intersect(ext))
            df_before = before_sdf.loc[row_oids_before]
            sq = df_before[df_before.geometry.notnull()].geometry.disjoint(
                geom) == False
            df_before = df_before[sq].copy()
            before_count = len(df_before)
            df_after = after_sdf.loc[row_oids_after]
            sq = df_after[df_after.geometry.notnull()].geometry.disjoint(
                geom) == False
            df_after = df_after[sq].copy()
            after_count = len(df_after)
            geoms_after = df_after.clip(geom.extent)
            geoms_before = df_before.clip(geom.extent)
            if geometry_type == "polygon":
                before_val = geoms_before.getArea('GEODESIC',
                                                  'SQUAREKILOMETERS').sum()
                after_val = geoms_after.getArea('GEODESIC',
                                                'SQUAREKILOMETERS').sum()
                grid_sdf.loc[[idx], 'TDS_DENSITY'] = round(before_val, 1)
                grid_sdf.loc[[idx], 'COMP_DENSITY'] = round(after_val, 1)
                grid_sdf.loc[[idx],
                             'DIFFERENCE'] = round(before_val - after_val, 1)
                if after_val > 0:
                    score = get_score(ratio=before_val / after_val,
                                      baseVal=before_val,
                                      inputVal=after_val)
                    grid_sdf.loc[[idx], 'COMPLETENESS_VALUE'] = get_score(
                        before_val / after_val, before_val, after_val)
                else:
                    score = get_score(0, before_val, after_val)
                    grid_sdf.loc[[idx], 'COMPLETENESS_VALUE'] = get_score(
                        0, before_val, after_val)
                results.append((oid, round(before_val, 1), round(after_val,
                                                                 1), score,
                                round(before_val - after_val, 1)))
            elif geometry_type == "polyline":
                before_val = geoms_before.getLength('GEODESIC',
                                                    'KILOMETERS').sum()
                after_val = geoms_after.getLength('GEODESIC',
                                                  'KILOMETERS').sum()
                grid_sdf.loc[[idx], 'TDS_DENSITY'] = round(before_val, 1)
                grid_sdf.loc[[idx], 'COMP_DENSITY'] = round(after_val, 1)
                grid_sdf.loc[[idx],
                             'DIFFERENCE'] = round(before_val - after_val, 1)
                if after_val > 0:
                    score = get_score(ratio=before_val / after_val,
                                      baseVal=before_val,
                                      inputVal=after_val)
                    grid_sdf.loc[[idx], 'COMPLETENESS_VALUE'] = get_score(
                        ratio=before_val / after_val,
                        baseVal=before_val,
                        inputVal=after_val)
                else:
                    score = get_score(0, before_val, after_val)
                    grid_sdf.loc[[idx], 'COMPLETENESS_VALUE'] = get_score(
                        0, before_val, after_val)
                results.append((oid, round(before_val, 1), round(after_val,
                                                                 1), score,
                                round(before_val - after_val, 1)))
            else:
                grid_sdf.loc[[idx], 'TDS_DENSITY'] = before_count
                grid_sdf.loc[[idx], 'COMP_DENSITY'] = after_count
                grid_sdf.loc[[idx], 'DIFFERENCE'] = before_count - after_count
                if after_val > 0:
                    score = get_score(ratio=before_count / after_count,
                                      baseVal=before_count,
                                      inputVal=after_count)
                    grid_sdf.loc[[idx], 'COMPLETENESS_VALUE'] = get_score(
                        ratio=before_count / after_count,
                        baseVal=before_count,
                        inputVal=after_count)
                else:
                    score = get_score(ratio=0,
                                      baseVal=before_count,
                                      inputVal=after_count)
                    grid_sdf.loc[[idx], 'COMPLETENESS_VALUE'] = get_score(
                        ratio=0, baseVal=before_count, inputVal=after_count)
                results.append((oid, before_count, after_count, score,
                                before_count - after_count))

            del sq
            del row_oids_after
            del row_oids_before
            del df_after
            del df_before
            del idx
            del row
            del geom
            del oid
            del ext
        if not out_fc_exists:
            out_grid = grid_sdf.to_featureclass(
                out_location=os.path.dirname(out_grid),
                out_name=os.path.basename(out_grid))
        else:
            extend_table(out_grid, results)

        #arcpy.SetParameterAsText(4, out_grid)
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
Пример #4
0
def main(*argv):
    """ main driver of program """
    try:
        arcpy.CheckOutExtension('spatial')

        features =   str(argv[0])#.split(';')
        value_field = argv[1].upper()
        population = argv[2]
        polygon_grid = argv[3]
        out_grid = argv[4]
        out_fc_exists = arcpy.Exists(out_grid)
        output_gdb, out_name = os.path.split(out_grid)

        #  Local Variables
        #
        scratchFolder = env.scratchFolder
        scratchGDB = env.scratchGDB
        grid_polygon = None
        fcs = []
        results = []

        fc = features
        #for fc in features:
        #out_name = "srcTA_%s_%s" % (os.path.basename(fc[:-3]), fc[-3:])
        #out_grid = os.path.join(output_gdb, out_name)
        if not out_fc_exists:
            print("creating fc")
            output_gdb = validate_workspace(output_gdb)
            print ("process the raster dataset")
            polygon_grid = create_grls(grid=polygon_grid,
                                   population=population)
            polygon_grid = extend_table(table=polygon_grid)
            print ('end raster processing')
            arcpy.AddMessage("Working on feature class: %s" % os.path.basename(fc))
            arcpy.CopyFeatures_management(polygon_grid, out_grid)
            grid_sdf = SpatialDataFrame.from_featureclass(filename=out_grid)
        else:
            print("FC exists")
            arcpy.MakeFeatureLayer_management(out_grid, "lyr")
            arcpy.SelectLayerByLocation_management("lyr", "HAVE_THEIR_CENTER_IN", polygon_grid)
            oids = [row[0] for row in arcpy.da.SearchCursor("lyr", "OID@")]
            if len(oids) >1:
                oids_string = str(tuple(oids))
            else:
                oids_string = str('('+ str(oids[0]) + ')')

            where_clause = 'OBJECTID IN ' + oids_string
            grid_sdf = SpatialDataFrame.from_featureclass(out_grid,
                                        where_clause=where_clause)



        poly_desc = arcpy.Describe(polygon_grid)
        fc_desc = arcpy.Describe(fc)
        if poly_desc.extent.within(fc_desc.extent):

            temp_fc = 'in_memory/clip'
            arcpy.AddMessage('Clipping features to polygon')
            arcpy.Clip_analysis(fc, polygon_grid, temp_fc)
            arcpy.AddMessage('Created in_memory fc')
            data_sdf = SpatialDataFrame.from_featureclass(temp_fc,
                                                            fields=[value_field])
            arcpy.AddMessage('features read into spatial dataframe after clipping')
        else:
            data_sdf = SpatialDataFrame.from_featureclass(fc, fields=[value_field])
            arcpy.AddMessage('features read into spatial dataframe without clipping')

        #data_sdf = SpatialDataFrame.from_featureclass(fc, fields=[value_field])
        index = data_sdf.sindex
        for idx, row in enumerate(grid_sdf.iterrows()):
            geom = row[1].SHAPE
            oid = row[1].OBJECTID
            ext = [geom.extent.lowerLeft.X, geom.extent.lowerLeft.Y,
                   geom.extent.upperRight.X, geom.extent.upperRight.Y]
            row_oids = list(index.intersect(ext))
            df_current = data_sdf.loc[data_sdf.index.isin(row_oids)]
            sq = df_current['SHAPE'].disjoint(geom) == False
            df_current = df_current[sq].copy()

            if len(df_current) > 0:
                #arcpy.AddMessage(str(len(df_current)))
                #print ('do stuff here')
                #df_current['EQUAL'] = 0
                count = len(df_current)
                max_val = df_current[value_field].max()
                max_scale = 100 * (len(df_current[df_current[value_field] == max_val])/count)
                min_val = df_current[value_field].min()
                min_scale = 100 * (len(df_current[df_current[value_field] == min_val])/count)
                vc = df_current[value_field].value_counts()
                common = df_current[value_field].mode() # Used in MSP
                if len(common) > 0:
                    common = common[0]
                    common_count = vc[common]
                    common_per = (vc[common]/count) * 100
                else:
                    common = min_val
                    common_count = 1
                    common_per = 100
                count_2500 = 0
                count_5000 = 0
                count_12500 = 0
                count_25000 = 0
                count_50000 = 0
                count_100000 = 0
                count_250000 = 0
                count_500000 = 0
                count_1000000 = 0
                if 2500 in vc:
                    count_2500 = vc[2500]
                if 5000 in vc:
                    count_5000 = vc[5000]
                if 12500 in vc:
                    count_12500 = vc[12500]
                if 25000 in vc:
                    count_25000 = vc[25000]
                if 50000 in vc:
                    count_50000 = vc[50000]
                if 100000 in vc:
                    count_100000 = vc[100000]
                if 250000 in vc:
                    count_250000 = vc[250000]
                if 500000 in vc:
                    count_500000 = vc[500000]
                if 1000000 in vc:
                    count_1000000 = vc[1000000]
                # 1).
                MSP = get_msp(scale=common) # SHOULD UPDATE MISSION_PLANNING FIELD
                #

                SCORE_VALUE = get_equal_breaks_score(mean=row[1].MEAN)# get_equal_breaks_score(output_features, ['MEAN','EQUAL']) # PUT SCORE IN EQUAL
                GRLS = SCORE_VALUE
                domScale = common
                # FIELD 1 is the source, Field 2 is the field to be updated
                #df_current['EQUAL'] = SCORE_VALUE # ASSIGNS EQUAL TO LANSCAN_SCALE
                #29 field
                result = (oid,# OID
                          common,# median
                          common_count, # % common
                          round(common_per,1),
                          min_val,#
                          round(min_scale,1),#
                          max_val,#
                          round(max_scale,1),#
                          count_2500,
                          count_5000,
                          count_12500,
                          count_25000,
                          count_50000,
                          count_100000,
                          count_250000,
                          count_500000,
                          count_1000000,
                          round(count_2500*100/count,1),
                          round(count_5000*100/count,1),
                          round(count_12500*100/count,1),
                          round(count_25000*100/count,1),
                          round(count_50000*100/count,1),
                          round(count_100000*100/count,1),
                          round(count_250000*100/count,1),
                          round(count_500000*100/count,1),
                          round(count_1000000*100/count,1),
                          count,
                          str(MSP), #MISSION_PLANNING FIELD
                          SCORE_VALUE,#), # THEMATIC SCALE VALUE
                          population_scale(common, SCORE_VALUE) # POPULATION_SCALE
                          )
                #arcpy.AddMessage(result)
                results.append(result)
                #arcpy.AddMessage('appended result')
            else:
                #arcpy.AddMessage('appending the default result.')
                results.append(tuple([oid]  + [-1] * 25 + [0] + ['N/A']*2 + [0]))
                #arcpy.AddMessage('append the default result.')
            del geom, oid
            del row_oids
            del sq
            del df_current
        del fc
        fcs.append(extend_table(table=out_grid, rows=results))
        del results
        del grid_sdf,data_sdf, index
        #arcpy.SetParameter(5, fcs)
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
    finally:
        arcpy.CheckInExtension('spatial')
def main(*argv):
    """ main driver of program """
    try:
        fc_new = argv[0]
        fc_old = argv[1]
        unique_field = argv[2]
        out_gdb = argv[3]

        #  Local Variables
        #
        scratchGDB = env.scratchGDB
        dis_new = os.path.join(scratchGDB, "dis_new")
        dis_old = os.path.join(scratchGDB, "dis_old")
        out_fc = os.path.join(out_gdb, os.path.basename(fc_old))
        #  Logic
        #
        dis_new = arcpy.Dissolve_management(in_features=fc_new,
                                            out_feature_class=dis_new,
                                            dissolve_field=unique_field)[0]
        dis_old = arcpy.Dissolve_management(in_features=fc_old,
                                            out_feature_class=dis_old,
                                            dissolve_field=unique_field)[0]
        fields = [unique_field, 'SHAPE@']
        new_sdf = SpatialDataFrame.from_featureclass(dis_new, fields=[unique_field])
        old_sdf = SpatialDataFrame.from_featureclass(dis_old, fields=[unique_field])
        #  Find Added and Removed Features
        #
        unew = set(new_sdf[unique_field].unique().tolist())
        uold = set(old_sdf[unique_field].unique().tolist())
        adds = list(unew - uold)
        deletes = list(uold - unew)
        old_df = old_sdf[old_sdf[unique_field].isin(deletes)].copy()
        old_df['STATUS'] = "REMOVED FEATURE"
        new_df = new_sdf[new_sdf[unique_field].isin(adds)].copy()
        new_df['STATUS'] = "NEW FEATURE"
        # Find Geometry Differences
        #
        df2 = new_sdf[~new_sdf[unique_field].isin(adds)].copy()
        df2.index = df2[unique_field]
        df1 = old_sdf[~old_sdf[unique_field].isin(deletes)].copy()
        df1.index = df1[unique_field]
        ne = df1 != df2
        ne = ne['SHAPE']
        updated = df2[ne].copy()
        updated['STATUS'] = "GEOMETRY MODIFIED"
        updated.reset_index(inplace=True, drop=True)
        del ne
        del df1
        del df2
        del new_sdf
        del old_sdf
        joined = pd.concat([updated,
                            old_df,
                            new_df])
        joined.reset_index(inplace=True, drop=True)
        del updated
        del new_df
        del old_df
        out_fc = joined.to_featureclass(out_gdb, "modifed_dataset")
        del joined
        arcpy.SetParameterAsText(4, out_fc)
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
Пример #6
0
def main(*argv):
    """ main driver of program """
    try:
        tds_features = argv[0]
        in_fields = str(argv[1]).upper()  #'zi001_vsn'.upper()
        comparative_features = argv[2]
        polygon_grid = argv[3]
        output_gdb = argv[4]
        distanceTolerance = float(argv[5])  #25 #assumes meters
        sample_size = int(
            argv[6]
        )  #2 #if 100 or less, is a percent. if 101 or greater, is a number of features
        #  Local Variables
        #
        #output_features = output_grid + "_lines"
        sr = arcpy.SpatialReference(3857)
        is_percentage = False
        results = []
        #  Logic
        #
        import datetime
        start = datetime.datetime.now()
        master_times = datetime.datetime.now()
        arcpy.AddMessage("Copying Features.")
        tds_features = arcpy.CopyFeatures_management(
            tds_features, os.path.join(output_gdb,
                                       "PositionalOffset_Roads"))[0]
        polygon_grid = arcpy.CopyFeatures_management(
            polygon_grid, os.path.join(output_gdb, "PositionalOffset_Grid"))[0]
        arcpy.AddMessage("Total Time to Copy %s" %
                         (datetime.datetime.now() - master_times))
        desc = arcpy.Describe(tds_features)
        efields = ['SHAPE', 'OID', 'FID', 'OBJECTID', 'SHAPE_LENGTH']

        efields = efields + in_fields.upper().split(';')
        if hasattr(desc, 'OIDFieldName'):
            efields.append(desc.OIDFieldName)
        if hasattr(desc, 'lengthFieldName'):
            efields.append(desc.lengthFieldName)
        if hasattr(desc, 'areaFieldName'):
            efields.append(desc.areaFieldName)
        for f in [field.name for field in arcpy.ListFields(tds_features)]:
            if f.upper() not in efields:
                try:
                    arcpy.DeleteField_management(tds_features, f)
                    arcpy.AddMessage(" Deleted " + f)
                except:
                    arcpy.AddMessage("Failed to Delete " + f)
            del f
        del desc, efields
        if sample_size <= 100:
            sample_size = sample_size / 100
            is_percentage = True
        tds_sdf = SpatialDataFrame.from_featureclass(tds_features, sr=sr)
        if is_percentage:
            sample_size = int(len(tds_sdf) * sample_size)
        sampled_sdf = tds_sdf.sample(n=sample_size).copy()
        cols = {c: c.upper() for c in sampled_sdf.columns}
        sampled_sdf.rename(columns=cols, inplace=True)
        # Reproject to SR specified above.
        #
        sampled_sdf.geometry = sampled_sdf.geometry.projectAs(sr)
        comparison_sdf = SpatialDataFrame.from_featureclass(
            comparative_features, sr=sr)
        cols = {c: c.upper() for c in comparison_sdf.columns}
        comparison_sdf.rename(columns=cols, inplace=True)
        #comparison_sdf.geometry = comparison_sdf.geometry.projectAs(sr)
        grid_sdf = SpatialDataFrame.from_featureclass(polygon_grid, sr=sr)
        cols = {c: c.upper() for c in grid_sdf.columns}
        grid_sdf.rename(columns=cols, inplace=True)
        #grid_sdf.geometry = grid_sdf.geometry.projectAs(sr)
        grid_sdf['MEAN_CE90'] = 0.0
        grid_sdf['OFFSET_METERS'] = 0.0
        grid_sdf['OSM_MATCH'] = ""
        compare_sindex = comparison_sdf.sindex
        sample_sindex = sampled_sdf.sindex
        counter = 0
        for idx, row in grid_sdf.iterrows():
            print('Processing Grid Number ' + str(counter))
            arcpy.AddMessage('Processing Grid Number ' + str(counter))
            counter = counter + 1
            g = row['SHAPE']
            oid = row['OBJECTID']
            ext = [g.extent.XMin, g.extent.YMin, g.extent.XMax, g.extent.YMax]
            # extract select_compare_df
            select_compare_df = comparison_sdf.loc[compare_sindex.intersect(
                ext)]  # OSM Source
            q = select_compare_df.geometry.disjoint(g) == False
            select_compare_df = select_compare_df[q].copy()
            # extract sample df rows
            select_sample_df = sampled_sdf.loc[sample_sindex.intersect(
                ext)]  # source NGA
            q = select_sample_df.geometry.disjoint(g) == False
            select_sample_df = select_sample_df[q].copy()
            # Find distances to each other.
            for idx_s, row_s in select_sample_df.iterrows():
                geom = row_s['SHAPE']
                sample_oid = row_s['OBJECTID']
                print('Processing Feature ' + str(sample_oid))
                arcpy.AddMessage('Processing Feature ' + str(sample_oid))
                #geom = geom.as_arcpy
                first_point = arcpy.PointGeometry(row_s['SHAPE'].firstPoint,
                                                  sr)
                last_point = arcpy.PointGeometry(row_s['SHAPE'].lastPoint, sr)
                source_angle = (first_point.angleAndDistanceTo(last_point)[0] +
                                360) % 360
                buffer_shape = geom.buffer(distanceTolerance)
                q = select_compare_df.geometry.disjoint(buffer_shape) == False
                for idx_c, row_c in select_compare_df[q].iterrows():
                    geom_clipped = row_c['SHAPE'].intersect(
                        geom.buffer(distanceTolerance), 2)
                    if geom_clipped and geom_clipped.length > 0:
                        first_point = arcpy.PointGeometry(
                            geom_clipped.firstPoint, sr)
                        last_point = arcpy.PointGeometry(
                            geom_clipped.lastPoint, sr)
                        comparative_angle = (
                            first_point.angleAndDistanceTo(last_point)[0] +
                            360) % 360

                        if ((comparative_angle <= source_angle + 7.5
                             and comparative_angle >= source_angle - 7.5) or
                            (comparative_angle <= source_angle + 180 + 7.5
                             and comparative_angle >= source_angle + 180 - 7.5)
                                or
                            (comparative_angle <= source_angle - 180 + 7.5 and
                             comparative_angle >= source_angle - 180 - 7.5)):
                            near_distance = geom.distanceTo(geom_clipped)
                            #if offset_meters < 0 or near_distance < offset_meters:
                            #offset_meters = near_distance
                            results.append(
                                (sample_oid, row_c['OBJECTID'], oid, "YES",
                                 near_distance)
                            )  # OID of Source (NGA), OID of Grid, Yes, distance to compare feature
                    del idx_c, row_c
                del idx_s
                del row_s
                del geom
                del q
                del buffer_shape

            del ext
            del idx, row
        print(datetime.datetime.now() - start)
        dtypes = np.dtype([
            ('SAMPLE_OID', np.int),  # SAMPLE OID
            ('COMPARE_OID', np.int),  # COMPARE OID
            ('GRID_OID', np.int),  # GRID OID
            ('NEAROSM', '|S255'),  #
            ('FEATURE_DISTANCE', np.float64)
        ])
        array = np.array(results, dtype=dtypes)
        df = pd.DataFrame(data=array,
                          columns=[
                              'SAMPLE_OID', 'COMPARE_OID', 'GRID_OID',
                              'NEAROSM', 'FEATURE_DISTANCE'
                          ])
        del df['NEAROSM']
        print("join average grid distance to grid")
        sample_mean_distance = df.groupby(
            by='GRID_OID', as_index=False)['FEATURE_DISTANCE'].mean().copy()
        sample_mean_distance['MEAN_CE90'] = sample_mean_distance[
            'FEATURE_DISTANCE']
        sample_mean_distance = sample_mean_distance.to_records(index=False)
        dt1 = np.dtype([('GRID_OID', '<i4'), ('MEAN_CE90', '<f8')])
        join_sample = np.array(sample_mean_distance, dtype=dt1)
        oidName = arcpy.Describe(polygon_grid).oidFieldName
        da.ExtendTable(in_table=polygon_grid,
                       table_match_field=oidName,
                       in_array=join_sample,
                       array_match_field="GRID_OID",
                       append_only=False)
        del sample_mean_distance
        del oidName
        del join_sample
        del dt1
        print("join closest distance to tds_features")
        dt2 = np.dtype([('SAMPLE_OID', '<i4'), ('FEATURE_DISTANCE', '<f8')])
        tds_join_data = df.groupby(
            by='SAMPLE_OID', as_index=False)['FEATURE_DISTANCE'].min().copy()
        tds_join_data = tds_join_data.to_records(index=False)
        join_sample = np.array(tds_join_data, dtype=dt2)
        oidName = arcpy.Describe(tds_features).oidFieldName
        da.ExtendTable(in_table=tds_features,
                       table_match_field=oidName,
                       in_array=join_sample,
                       array_match_field="SAMPLE_OID",
                       append_only=False)
        del dt2
        del tds_join_data
        del join_sample
        del oidName
        print('return results')
        arcpy.SetParameterAsText(7, polygon_grid)
        arcpy.SetParameterAsText(8, tds_features)
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)