def add_additive_specification_range_domain_to_gdb(in_gdb, in_features, fields, domain_name, field_type="DOUBLE", range_min=0, range_max=25): """This function will add additive shared-row specification domains for categorical fields based on a CSV. Uses Pandas. :param - in_gdb - input geodatabase to add domains to :param - in_features - optional input that is used to assign domains to feature class is chosen. :param - domain_name - name of new range domain :param - field_type - field type used by domains :param - fields - fields to assign domains too :param - range_min - the minimum value allowed by the range domain :param - range_max - the maximum value allowed by the range domain """ try: domain_description = str(domain_name) + "_Range_Domain" try: srl.arc_print("Adding range domain for numeric values...") arcpy.CreateDomain_management(in_gdb, domain_name, domain_description, field_type, "RANGE") except: arcpy.AddWarning("Could not create domain. Either it already exists or some other error...") srl.arc_print("Set min and max values of domains...") arcpy.SetValueForRangeDomain_management(in_gdb, domain_name, range_min, range_max) srl.arc_print("Attempting to assign numeric fields ...") for field in fields: try: if len(arcpy.ListFields(in_features, field)) > 0: arcpy.AssignDomainToField_management(in_features, field, domain_name) except: arcpy.AddWarning("Could not assign domain to field {0}...".format(field)) except Exception as e: srl.arc_print("Tool Script Error!") import traceback, sys tb = sys.exc_info()[2] srl.arc_print("An error occurred on line %i" % tb.tb_lineno) arcpy.AddError("The error occurred on line {0}...".format(tb.tb_lineno))
def generate_slice_spec_geojson_file(input_features, sharedstreetid, slice_fields_csv, output_geojson): """This function will create a slice specification compliant geojson file. :param - input_features - feature class that has all of the fields from the crosswalk file :param - sharedstreetid - unique street ID as defined by SharedStreets. :param - slice_fields_csv - the csv with fields required for the slice specification compliant geojson. :param - output_geojson - output slice based geojson where each line geometry has slices as properties :return - output_geojson -path to output geojson """ try: arcpy.env.overwriteOutput = True output_temp_features = os.path.join("in_memory", "Temporary_Slice_Features") srl.arc_print("Reading input features...") pre_fields = [ f.name for f in arcpy.ListFields(input_features) if f.type not in ["OID", "Geometry"] and f.name.lower() not in ["shape_area", "shape_length"] ] fields = ["SHAPE@"] + pre_fields cw_df = srl.arcgis_table_to_df(input_features, fields) cw_groups = cw_df.groupby(sharedstreetid) sr = arcpy.Describe(input_features).spatialReference output_path, output_name = os.path.split(output_temp_features) arcpy.CreateFeatureclass_management(output_path, output_name, "POLYLINE", spatial_reference=sr) srl.arc_print("Adding fields to intermediate features...") slice_fields = srl.add_fields_from_csv(output_temp_features, slice_fields_csv) slice_fields = ["SHAPE@"] + slice_fields with arcpy.da.InsertCursor(output_temp_features, slice_fields) as insertCursor: srl.arc_print( "Established insert cursor for intermediate slice file...", True) lineCounter = 0 for street_id, street_group in cw_groups: lineCounter += 1 try: shape = street_group["SHAPE@"].iloc[0] cw_fields = [ "type", "width", "height", "direction", "material", "meta" ] slice_group = street_group[cw_fields] json_slices = slice_group.to_json(orient="records") slice_row = [shape, street_id, json_slices] insertCursor.insertRow(slice_row) if lineCounter % 500 == 0: srl.arc_print( "Iterated through feature " + str(lineCounter) + ".", True) except Exception as e: srl.arc_print( "Failed to iterate through feature " + str(lineCounter) + ".", True) arcpy.AddWarning(str(e.args[0])) del insertCursor, fields, pre_fields, lineCounter srl.arc_print("Exporting intermediate feature class to geojson...") arcpy.FeaturesToJSON_conversion(output_temp_features, output_geojson, format_json="FORMATTED", geoJSON="GEOJSON", outputToWGS84="WGS84", use_field_alias="USE_FIELD_ALIAS") srl.arc_print("Script Complete!") except Exception as e: srl.arc_print("Tool Script Error!") import traceback, sys tb = sys.exc_info()[2] srl.arc_print("An error occurred on line %i" % tb.tb_lineno) arcpy.AddError("The error occurred on line {0}...".format( tb.tb_lineno))
def add_additive_specification_coded_domains_to_gdb(in_gdb, in_features, csv, prepended_name="srs_additive", field_name_col="FieldName", domain_desc_col="DomainDescription", field_type_col="FieldType", domain_type_col="DomainType", coded_value_col="CodedValues", value_descrip_col="ValueDescriptions"): """This function will add additive shared-row specification domains for categorical fields based on a CSV. Uses Pandas. :param - in_gdb - input geodatabase to add domains to :param - in_features - optional input that is used to assign domains to feature class is chosen. :param - csv - csv with field specification domains defined :param - prepended_name - name prepended to each domain :param = field_name_col - name of column in csv with used field names :param - domain_desc_col - name of column in csv with domain descriptions :param - field_type_col - name of column in csv with field types :param - domain_type_col - name of column in csv with domain types (CODED vs RANGE) :param - coded_value_col - name of column in csv with a semi-colon delimited list of possible values :param - value_descript_col - name of column in csv with semi-colon delimited list of possible values descriptions """ try: arcpy.env.overwriteOutput = True srl.arc_print("Reading input CSV...") df = pd.read_csv(csv) for index, row in df.iterrows(): # Process: Create the coded value domain try: dom_name = str(prepended_name) + "_" + str(row[field_name_col]) dom_description = row[domain_desc_col] field_type = row[field_type_col] domain_type = row[domain_type_col] if str(domain_type).upper() != "RANGE" or str(domain_type).upper() != "CODED": domain_type = "CODED" srl.arc_print("Creating domain named {0}...".format(str(dom_name))) arcpy.CreateDomain_management(in_gdb, dom_name, dom_description, field_type, domain_type) # Store all the domain values in a dictionary with the domain code as the "key" and the # domain description as the "value" (domDict[code]) except: arcpy.AddWarning("Could not add domain {0} - QAQC.".format(dom_name)) coded_value_list = str(row[coded_value_col]).split(";") coded_value_descrip_list = str(row[value_descrip_col]).split(";") if len(coded_value_list) != len(coded_value_descrip_list): arcpy.AddWarning("The length of coded values and the length of coded descriptions for domain" "named {0} do not match. Check output coded values if no error occurs..." .format(dom_name)) dom_dictionary = OrderedDict() srl.arc_print("Setting up domain dictionary...") for value, description in zip(coded_value_list, coded_value_descrip_list): dom_dictionary[str(value)] = str(description) # Process: Add valid material types to the domain # use a for loop to cycle through all the domain codes in the dictionary srl.arc_print("Adding coded values for domain...") for code in dom_dictionary: try: arcpy.AddCodedValueToDomain_management(in_gdb, dom_name, code, str(dom_dictionary[code]).strip()) except: srl.arc_print("Could not add coded values and descriptions for value {0} " "and description {1}...".format(code, [dom_dictionary[code]])) if arcpy.Exists(in_features): srl.arc_print("Assign domains to in features fields...") try: for field in pd.unique(df[field_name_col]): dom_name = str(prepended_name) + "_" + str(field) arcpy.AssignDomainToField_management(in_features, field, dom_name) except: arcpy.AddError( "Could not assign all domains to fields in feature class. Check inputs for correct fields...") srl.arc_print("Script Complete!") except Exception as e: srl.arc_print("Tool Script Error!") import traceback, sys tb = sys.exc_info()[2] srl.arc_print("An error occurred on line %i" % tb.tb_lineno) arcpy.AddError("The error occurred on line {0}...".format(tb.tb_lineno))
def generate_crosswalk_file(in_features, output_features, slice_fields_csv, additive_spec_slice_order, zone_meta_dict={}): """This function will add additive shared-row specification domains for categorical fields based on a CSV. Uses Pandas. Depends on a function center_editor_function near top. :param - in_features - feature class that has additive specification fields for cross-walk creation :param - output_features - crosswalk feature class with indices indicating slices in an output feature class :param - slice_fields_csv - csv with fields to be added for the crosswalk file :param - additive_spec_slice_order - list of fields going from left to right to be added to the slice specification :param - zone_meta_dict - nested dictionaries - of key-value pairs where keys are additive width fields, and values are dictionaries indicating the values to fill the crosswalk type, heights, directions, etc. It takes the form of: {additive_field: {"type":value,"height":0,...} :return - feature class where each geometry is copied and slices named based on additive specification """ try: arcpy.env.overwriteOutput = True srl.arc_print("Reading input features...") pre_fields = [f.name for f in arcpy.ListFields(in_features) if f.type not in ["OID"] and f.name.lower() not in ["shape_area", "shape_length"]] fields = ["SHAPE@"] + pre_fields cursor = arcpy.da.SearchCursor(in_features, fields) output_path, output_name = os.path.split(output_features) arcpy.CreateFeatureclass_management(output_path, output_name, "POLYLINE") srl.arc_print("Adding fields to crosswalk...") crosswalk_fields = srl.add_fields_from_csv(output_features, slice_fields_csv) crosswalk_fields = ["SHAPE@"] + crosswalk_fields additive_dict = srl.construct_index_dict(fields) with arcpy.da.InsertCursor(output_features, crosswalk_fields) as insertCursor: srl.arc_print("Established insert cursor for crosswalk output feature class...", True) lineCounter = 0 for index, street in enumerate(cursor, start=1): try: lineCounter += 1 linegeo = street[0] additive_slice_values = srl.retrieve_row_values(street, additive_fields_slice_order, additive_dict) non_zero_width_fields = [(col, val) for col, val in zip(additive_spec_slice_order, additive_slice_values) if val] slice_id = 0 sharedstreetid = street[additive_dict["SharedStreetID"]] if street[ additive_dict["SharedStreetID"]] else lineCounter for field, width in non_zero_width_fields: current_meta_field = str(field) + "_Meta" if srl.field_exist(in_features, current_meta_field): meta_tag_value = street[additive_dict[current_meta_field]] zone_meta_dict[field].setdefault("meta", json.dumps({"type": meta_tag_value})) if "CENTER" in str(field).upper(): slices_added, slice_id = center_editor_function(insertCursor, width, meta_tag_value, linegeo, sharedstreetid, slice_id) if slices_added: # If slices were added already, continue to next field continue else: # other wise, add slice as normal pass type = zone_meta_dict.get(field, {}).get("type") width = abs(float(width)) height = zone_meta_dict.get(field, {}).get("height", 0) direction = zone_meta_dict.get(field, {}).get("direction", "bidirectional") material = zone_meta_dict.get(field, {}).get("material", "asphalt") meta = zone_meta_dict.get(field, json.dumps({})).get("meta") slice_row = [linegeo, sharedstreetid, slice_id, type, width, height, direction, material, meta] slice_id += 1 insertCursor.insertRow(slice_row) if lineCounter % 500 == 0: srl.arc_print("Iterated through feature " + str(lineCounter) + ".", True) except Exception as e: srl.arc_print("Failed to iterate through feature " + str(lineCounter) + ".", True) arcpy.AddWarning(str(e.args[0])) del cursor, insertCursor, fields, pre_fields, lineCounter srl.arc_print("Script Complete!") except Exception as e: srl.arc_print("Tool Script Error!") import traceback, sys tb = sys.exc_info()[2] srl.arc_print("An error occurred on line %i" % tb.tb_lineno) arcpy.AddError("The error occurred on line {0}...".format(tb.tb_lineno))
def consolidate_centerline(in_fc, out_fc, out_consolidation_table, merge_field, merge_distance, sum_fields=[], mean_fields=[], first_fields=[], concat_fields=[], character_field=None): """This function collapse a center line and compile collapsed fields using a combination of the the MergeDividedRoads tool in ArcGIS and pandas manipulation of its merge table. :param - in_fc - the input feature class that will be consolidated :param - out_fc - output consolidated feature class :param - out_consolidation_table - defaults to in memory, but the output table of consolidated lines :param - merge_field - usually a oneway field, this field identifies segments to merge with a 1, and locks those with a zero. :param - merge_distance - distance apart a line can be to consider for consolidation (avoid larger than a block) :param - sum_fields - based on the consolidation table created by the MergeDividedRoads tool, these fields will be attempted to be summed in a new field based on the two matching collapsed segments :param - mean_fields - based on the consolidation table created by the MergeDividedRoads tool, these fields will be attempted to be averaged in a new field based on the two matching collapsed segments. :param - first_fields - based on the consolidation table created by the MergeDividedRoads tool, these fields will be attempted to be first value found in a new field based on the two matching collapsed segments. :param - concat_fields - for text or categorical data these file :param - character_field - this field assists the MergeDividedRoads tool to merge roads appropriately. See ArcGIS DOCs. """ try: arcpy.env.overwriteOutput = True workspace = os.path.dirname(in_fc) object_id_field = arcpy.Describe(in_fc).OIDFieldName srl.arc_print("Merging divided roads...") arcpy.MergeDividedRoads_cartography(in_fc, merge_field, merge_distance, out_fc, out_displacement_features=None, character_field=character_field, out_table=out_consolidation_table) srl.arc_print("Reading consolidation table...") output_fid, input_fid = "OUTPUT_FID", "INPUT_FID" consolidation_df = srl.arcgis_table_to_df(out_consolidation_table, [output_fid, input_fid]) sql_query = "{0} in {1}".format( object_id_field, tuple(i for i in consolidation_df[input_fid].unique())) srl.arc_print("Reading input feature classes consolidated...") all_fields = sum_fields + mean_fields + first_fields + concat_fields all_fields = [i for i in all_fields if srl.field_exist(in_fc, i) ] # Filter Non-Existent Fields consolidated_input_df = srl.arcgis_table_to_df(in_fc, all_fields, sql_query) consolidated_input_df = consolidated_input_df.merge(consolidation_df, how="left", left_index=True, right_on=input_fid) srl.arc_print( "Summarizing statistics of fields by the priority of sums,means, first, and concat..." ) consolidated_input_df_groups = consolidated_input_df.groupby( output_fid) agg_dict = {} new_columns = {} for field in all_fields: if field in sum_fields: agg_dict[field] = "sum" new_columns[field] = "sum_" + str(field) elif field in mean_fields: agg_dict[field] = "mean" new_columns[field] = "mean_" + str(field) elif field in first_fields: agg_dict[field] = "first" new_columns[field] = "first_" + str(field) else: agg_dict[field] = lambda x: ";".join(x) new_columns[field] = "concat_" + str(field) summarized_features = consolidated_input_df_groups.agg(agg_dict) summarized_features = summarized_features.rename(columns=new_columns) join_fields = list(summarized_features.columns) summarized_features = summarized_features.reset_index() temp_summary = os.path.join("in_memory", "summary_table") srl.arc_print("Exporting out summary table...") scratch_ws = arcpy.env.scratchFolder temp_csv = os.path.join(scratch_ws, "summarized_features.csv") summarized_features.to_csv(temp_csv) arcpy.TableToTable_conversion(temp_csv, "in_memory", "summary_table") os.remove(temp_csv) os.rmdir(scratch_ws) out_oid_field = arcpy.Describe(output_feature_class).OIDFieldName srl.arc_print("Joining summary fields to output...") join_field = "CenterID" arcpy.AddField_management(output_feature_class, join_field, "LONG") arcpy.CalculateField_management(output_feature_class, join_field, "!{0}!".format(out_oid_field)) arcpy.JoinField_management(output_feature_class, join_field, temp_summary, output_fid, join_fields) srl.arc_print("Populating non-collapsed values with originals...") calc_func = """def fill_if_none(old_field, new_field): if new_field is None: return old_field else: return new_field """ for i in all_fields: arcpy.CalculateField_management(output_feature_class, new_columns[i], "fill_if_none(!{0}!,!{1}!)".format( i, new_columns[i]), "PYTHON", code_block=calc_func) srl.arc_print("Script Complete!") except Exception as e: srl.arc_print("Tool Script Error!") import traceback, sys tb = sys.exc_info()[2] srl.arc_print(e.args[0]) arcpy.AddError("The error occurred on line {0}...".format( tb.tb_lineno))
def generate_complete_street_attributes(input_features, output_features, null_value = 0): """ Create complete street rule attributes from the Additive Shared Spec @:param - input_features - input feature class with additive spec attributes. @:param - output_features - output feature class with complete street attributes @:param - null_value = 0 - int - the value used in place of null values """ try: arcpy.env.overwriteOutput = True srl.arc_print("Reading input features...") # Step 1 - create a copy of the feature class to a new output location arcpy.CopyFeatures_management(input_features, output_features) srl.arc_print("Adding Complete Street Rule Attribute fields...") # Step 2 - Add Fields to the feature class for the Complete Street Rule complete_street_attributes = ["streetWidth", "sidewalkWidthRight", "sidewalkWidthLeft", "Left_Buffer_Width", "Left_Bike_Lane_Width", "Left_Parking_Width","Center_Width", "Right_Buffer_Width", "Right_Bike_Lane_Width", "Right_Parking_Width"] complete_street_attributes_text = ["Center_Type", "Left_Buffer_Type", "Right_Buffer_Type"] for rule_attr in complete_street_attributes: srl.add_new_field(output_features, rule_attr, "DOUBLE") for rule_attr in complete_street_attributes_text: srl.add_new_field(output_features, rule_attr, "TEXT") # Step 3 - Use Update Cursor add values to fields based on Additive Fields fields = [f.name for f in arcpy.ListFields(output_features) if f.type not in ["OID"] and f.name.lower() not in ["shape", "shape_area", "shape_length"]] num_fields = [f.name for f in arcpy.ListFields(output_features) if f.type in ["Double", "Long", "Short", "Float"]] text_fields = [f.name for f in arcpy.ListFields(output_features) if f.type in ["Text"]] field_dictionary = srl.construct_index_dict(fields) srl.arc_print("Update rows with complete street rule values...") with arcpy.da.UpdateCursor(output_features, fields) as cursor: for row in cursor: # Set all None Values to Zero in Starting Row for field in num_fields: index = field_dictionary.get(field, None) if index is None: continue value = row[index] if value is None: row[field_dictionary.get(field)] = null_value # Finding the Street Width center_lane_width = row[field_dictionary.get('Center_Lane')] left_bike_buffer_width = row[field_dictionary.get("Left_Bike_Buffer")] left_bike_lane_width = row[field_dictionary.get("Left_Bike_Lane")] left_transit_width = row[field_dictionary.get("Left_Transit_Lane")] left_parking_width = row[field_dictionary.get("Left_Parking_Lane")] right_bike_buffer_width = row[field_dictionary.get("Right_Bike_Buffer")] right_bike_lane_width = row[field_dictionary.get("Right_Bike_Lane")] right_parking_width = row[field_dictionary.get("Right_Parking_Lane")] right_transit_width = row[field_dictionary.get("Right_Transit_Lane")] # Support the 4 right and left through lanes LTL_List = [f.name for f in arcpy.ListFields(output_features) if "Left_Through_Lane" in f.name] LTL_Num_List = [row[field_dictionary.get(index)] for index in LTL_List if row[field_dictionary.get(index)] is not None] Left_Lane_Widths = sum(LTL_Num_List) RTL_List = [f.name for f in arcpy.ListFields(output_features) if "Right_Through_Lane" in f.name] RTL_Num_List = [row[field_dictionary.get(index)] for index in RTL_List if row[field_dictionary.get(index)] is not None] Right_Lane_Widths = sum(RTL_Num_List) # For Sidewalks Widths ls_frontage_width = row[field_dictionary.get("Left_Sidewalk_Frontage_Zone")] ls_furniture_width = row[field_dictionary.get("Left_Sidewalk_Furniture_Zone")] ls_through_width = row[field_dictionary.get("Left_Sidewalk_Through_Zone")] rs_frontage_width = row[field_dictionary.get("Right_Sidewalk_Frontage_Zone")] rs_furniture_width = row[field_dictionary.get("Right_Sidewalk_Furniture_Zone")] rs_through_width = row[field_dictionary.get("Right_Sidewalk_Through_Zone")] # Adding up all found values for sidewalk amd street scape. streetWidthValue = (center_lane_width + right_bike_lane_width + left_bike_lane_width + left_parking_width + right_parking_width + Right_Lane_Widths + Left_Lane_Widths + left_bike_buffer_width + right_bike_buffer_width + right_transit_width + left_transit_width) sidewalkLeftValue = ls_frontage_width + ls_furniture_width + ls_through_width sidewalkRightValue = rs_frontage_width + rs_furniture_width + rs_through_width row[field_dictionary.get("streetWidth")] = streetWidthValue row[field_dictionary.get("Center_Width")] = center_lane_width row[field_dictionary.get("sidewalkWidthRight")] = sidewalkRightValue row[field_dictionary.get("sidewalkWidthLeft")] = sidewalkLeftValue # Update Fields to have bike lane, bike lane buffers, and bike lane buffer types OR parking lanes row[field_dictionary.get("Left_Bike_Lane_Width")] = left_bike_lane_width row[field_dictionary.get("Left_Buffer_Width")] = left_bike_buffer_width row[field_dictionary.get("Left_Parking_Width")] = left_parking_width row[field_dictionary.get("Right_Bike_Lane_Width")] = right_bike_lane_width row[field_dictionary.get("Right_Buffer_Width")] = right_bike_buffer_width row[field_dictionary.get("Right_Parking_Width")] = right_parking_width # Find if the text value is Null for field2 in text_fields: index2 = field_dictionary.get(field2, None) if index2 is None: continue text = row[index2] if text is None: row[field_dictionary.get(field2)] = "None" # Finds all Text fields that center_lane_type = row[field_dictionary.get("Center_Lane_Meta")] left_buffer_type = row[field_dictionary.get("Left_Bike_Buffer_Meta")] right_buffer_type = row[field_dictionary.get("Right_Bike_Buffer_Meta")] # Make the text values the CityEngine Complete Streets Attributes row[field_dictionary.get("Center_Type")] = center_lane_type row[field_dictionary.get("Left_Buffer_Type")] = left_buffer_type row[field_dictionary.get("Right_Buffer_Type")] = right_buffer_type cursor.updateRow(row) pass # Step 4 - Delete All Old Additive Fields fields_to_delete = [i for i in fields if i not in complete_street_attributes] arcpy.DeleteField_management(output_features, fields_to_delete) srl.arc_print("Script Complete!") except Exception as e: srl.arc_print("Tool Script Error!") import traceback, sys tb = sys.exc_info()[2] srl.arc_print("An error occurred on line %i" % tb.tb_lineno) arcpy.AddError("The error occurred on line {0}...".format(tb.tb_lineno))