Ejemplo n.º 1
0
def compute_adjacency_list(input_points, input_network, id_attribute,
                           impedance_attribute, accumulator_attributes,
                           search_radius, output_location, adj_dbf_name):
    """
  |input_points|: point shape file marking entity (e.g. building) locations
  |input_network|: street network in which |input_points| is located
  |id_attribute|: the name of attribute that distinguishes between input points
  |impedance_attribute|: distance between neighboring nodes will be based on
      this attribute
  |accumulator_attributes|: distance between neighboring nodes will also be
      recorded for these attributes
  |search_radius|: the maximum extent for centrality computation
  |output_location|: adjacency list dbf will be saved here
  |adj_dbf_name|: the name of the adjacency list dbf
  """

    # Number of points in |input_points|
    input_point_count = int(GetCount_management(input_points).getOutput(0))

    # Make a directory to store all auxiliary files
    auxiliary_dir = join(output_location, AUXILIARY_DIR_NAME)
    if not Exists(auxiliary_dir):
        mkdir(auxiliary_dir)

    # Record the edge and junction source names of |input_network|
    junction_feature, edge_feature = network_features(input_network)

    # Calculate network locations if not already calculated
    test_input_point = UpdateCursor(input_points).next()
    locations_calculated = all(
        row_has_field(test_input_point, field)
        for field in NETWORK_LOCATION_FIELDS)
    if not locations_calculated:
        calculate_network_locations(input_points, input_network)

    # Calculate barrier cost per input point if not already calculated
    barrier_costs_calculated = row_has_field(test_input_point,
                                             trim(BARRIER_COST_FIELD))
    if not barrier_costs_calculated:
        AddMessage(BARRIER_COST_COMPUTATION_STARTED)
        # Add |BARRIER_COST_FIELD| column in |input_points|
        AddField_management(in_table=input_points,
                            field_name=trim(BARRIER_COST_FIELD),
                            field_type="DOUBLE",
                            field_is_nullable="NON_NULLABLE")

        # Initialize a dictionary to store the frequencies of (SnapX, SnapY) values
        xy_count = {}
        # A method to retrieve a (SnapX, SnapY) pair for a row in |input_points|
        get_xy = lambda row: (row.getValue(trim("SnapX")),
                              row.getValue(trim("SnapY")))

        barrier_pre_progress = Progress_Bar(input_point_count, 1,
                                            BARRIER_COST_PRE_PROCESSING)
        rows = UpdateCursor(input_points)
        for row in rows:
            snap_xy = get_xy(row)
            if snap_xy in xy_count:
                xy_count[snap_xy] += 1
            else:
                xy_count[snap_xy] = 1
            barrier_pre_progress.step()

        # Populate |BARRIER_COST_FIELD|, this will be used in OD matrix computation
        barrier_progress = Progress_Bar(input_point_count, 1,
                                        BARRIER_COST_COMPUTATION)
        rows = UpdateCursor(input_points)
        for row in rows:
            barrier_cost = BARRIER_COST / xy_count[get_xy(row)]
            row.setValue(trim(BARRIER_COST_FIELD), barrier_cost)
            rows.updateRow(row)
            barrier_progress.step()
        AddMessage(BARRIER_COST_COMPUTATION_FINISHED)

    # Necessary files
    od_cost_matrix_layer = join(auxiliary_dir, OD_COST_MATRIX_LAYER_NAME)
    od_cost_matrix_lines = join(od_cost_matrix_layer, OD_COST_MATRIX_LINES)
    temp_adj_dbf_name = TEMP_ADJACENCY_DBF_NAME(adj_dbf_name)
    temp_adj_dbf = join(output_location, temp_adj_dbf_name)
    adj_dbf = join(output_location, adj_dbf_name)
    partial_adj_dbf = join(auxiliary_dir, PARTIAL_ADJACENCY_LIST_NAME)
    polygons = join(auxiliary_dir, POLYGONS_SHAPEFILE_NAME)
    raster = join(auxiliary_dir, RASTER_NAME)
    polygons_layer = join(auxiliary_dir, POLYGONS_LAYER_NAME)
    input_points_layer = join(auxiliary_dir, INPUT_POINTS_LAYER_NAME)

    # Make sure none of these files already exists
    for path in [
            od_cost_matrix_layer, temp_adj_dbf, adj_dbf, partial_adj_dbf,
            polygons, raster, polygons_layer, input_points_layer,
            od_cost_matrix_lines
    ]:
        delete(path)

    # Cutoff radius for OD matrix computation
    cutoff_radius = 2 * BARRIER_COST + min(search_radius, BARRIER_COST / 2)

    # Compute OD matrix
    MakeODCostMatrixLayer_na(in_network_dataset=input_network,
                             out_network_analysis_layer=od_cost_matrix_layer,
                             impedance_attribute=impedance_attribute,
                             default_cutoff=str(cutoff_radius),
                             accumulate_attribute_name=accumulator_attributes,
                             UTurn_policy="ALLOW_UTURNS",
                             hierarchy="NO_HIERARCHY",
                             output_path_shape="NO_LINES")

    # Determine raster cell size
    points_per_raster_cell = OD_MATRIX_ENTRIES / input_point_count
    raster_cell_count = max(1, input_point_count / points_per_raster_cell)
    input_points_extent = Describe(input_points).Extent
    raster_cell_area = (input_points_extent.width *
                        input_points_extent.height / raster_cell_count)
    raster_cell_size = int(sqrt(raster_cell_area))

    # Construct |raster| from |input_points|
    PointToRaster_conversion(in_features=input_points,
                             value_field=id_attribute,
                             out_rasterdataset=raster,
                             cell_assignment="MOST_FREQUENT",
                             priority_field="NONE",
                             cellsize=str(raster_cell_size))

    # Construct |polygons| from |raster|
    RasterToPolygon_conversion(in_raster=raster,
                               out_polygon_features=polygons,
                               simplify="NO_SIMPLIFY",
                               raster_field="VALUE")

    # Export empty |od_cost_matrix_lines| to |temp_dbf| to start adjacency list
    TableToTable_conversion(in_rows=od_cost_matrix_lines,
                            out_path=output_location,
                            out_name=temp_adj_dbf_name)

    # Construct |polygons_layer| and |input_points_layer|
    for (feature, layer) in [(polygons, polygons_layer),
                             (input_points, input_points_layer)]:
        MakeFeatureLayer_management(in_features=feature, out_layer=layer)

    def add_locations(sub_layer, field_mappings=""):
        """
    |sub_layer|: one of "Origins", "Destinations", "Barrier Points"
    |field_mappings|: field mappings in addition to those for "Name" and
        "CurbApproach"
    """
        AddLocations_na(in_network_analysis_layer=od_cost_matrix_layer,
                        sub_layer=sub_layer,
                        in_table=input_points_layer,
                        field_mappings=("Name %s #; CurbApproach # 0; %s" %
                                        (id_attribute, field_mappings)),
                        search_tolerance=SEARCH_TOLERANCE,
                        search_criteria=("%s SHAPE; %s SHAPE;" %
                                         (junction_feature, edge_feature)),
                        append="CLEAR",
                        snap_to_position_along_network="SNAP",
                        snap_offset=SNAP_OFFSET)

    # OD cost matrix destinations
    AddMessage(ADDING_DESTINATIONS_STARTED)
    SelectLayerByLocation_management(in_layer=input_points_layer)
    add_locations("Destinations")
    AddMessage(ADDING_DESTINATIONS_FINISHED)

    # OD cost matrix point barriers
    AddMessage(ADDING_BARRIERS_STARTED)
    add_locations("Point Barriers",
                  ("FullEdge # 0; BarrierType # 2;"
                   "Attr_%s %s #;" %
                   (impedance_attribute, trim(BARRIER_COST_FIELD))))
    AddMessage(ADDING_BARRIERS_FINISHED)

    # Compute adjacency list, one raster cell at a time
    progress = Progress_Bar(raster_cell_count, 1, STEP_1)
    rows = UpdateCursor(polygons)
    for row in rows:
        # Select the current polygon
        SelectLayerByAttribute_management(in_layer_or_view=polygons_layer,
                                          selection_type="NEW_SELECTION",
                                          where_clause="FID = %s" %
                                          str(row.FID))

        # Origins
        SelectLayerByLocation_management(in_layer=input_points_layer,
                                         select_features=polygons_layer)
        add_locations("Origins")

        # Solve OD Cost matrix
        Solve_na(in_network_analysis_layer=od_cost_matrix_layer,
                 ignore_invalids="SKIP")

        # Add origin and destination fields to the adjacency list dbf
        for (index, field) in [(0, ORIGIN_ID_FIELD_NAME),
                               (1, DESTINATION_ID_FIELD_NAME)]:
            CalculateField_management(in_table=od_cost_matrix_lines,
                                      field=field,
                                      expression="!Name!.split(' - ')[%d]" %
                                      index,
                                      expression_type="PYTHON")

        # Record actual distance between neighboring nodes
        distance_field = "Total_%s" % impedance_attribute
        CalculateField_management(in_table=od_cost_matrix_lines,
                                  field=distance_field,
                                  expression="!%s! - 2 * %d" %
                                  (distance_field, BARRIER_COST),
                                  expression_type="PYTHON")

        # Append result to |temp_adj_dbf|
        TableToTable_conversion(in_rows=od_cost_matrix_lines,
                                out_path=auxiliary_dir,
                                out_name=PARTIAL_ADJACENCY_LIST_NAME)
        Append_management(inputs=partial_adj_dbf,
                          target=temp_adj_dbf,
                          schema_type="TEST")

        progress.step()

    # Copy data from |temp_adj_dbf| to |adj_dbf|
    Rename_management(in_data=temp_adj_dbf, out_data=adj_dbf)

    # Clean up
    for path in [
            od_cost_matrix_layer, partial_adj_dbf, polygons, raster,
            polygons_layer, input_points_layer, auxiliary_dir
    ]:
        delete(path)
Ejemplo n.º 2
0
      try:
        compute_adjacency_list(inputs[INPUT_POINTS], inputs[INPUT_NETWORK],
            inputs[ID_ATTRIBUTE], inputs[IMPEDANCE_ATTRIBUTE],
            inputs[ACCUMULATOR_ATTRIBUTES], inputs[SEARCH_RADIUS],
            inputs[OUTPUT_LOCATION], adj_dbf_name)
        AddMessage(STEP_1_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_1_FAILED)
        success = False

  # Step 2
  if success:
    AddMessage(STEP_2_STARTED)
    try:
      distance_field = trim("Total_%s" % inputs[IMPEDANCE_ATTRIBUTE])
      accumulator_fields = set([trim("Total_%s" % accumulator_attribute)
          for accumulator_attribute in inputs[ACCUMULATOR_ATTRIBUTES].split(";")
          if accumulator_attribute != "#"])
      # Graph representation: dictionary mapping node id's to Node objects
      nodes = {}
      # The number of rows in |adj_dbf|
      directed_edge_count = int(GetCount_management(adj_dbf).getOutput(0))
      graph_progress = Progress_Bar(directed_edge_count, 1, STEP_2)
      rows = UpdateCursor(adj_dbf)
      for row in rows:
        # Get neighboring nodes, and the distance between them
        origin_id = row.getValue(trim(ORIGIN_ID_FIELD_NAME))
        destination_id = row.getValue(trim(DESTINATION_ID_FIELD_NAME))
        distance = float(row.getValue(distance_field))
        # Make sure the nodes are recorded in the graph
Ejemplo n.º 3
0
      try:
        compute_adjacency_list(inputs[INPUT_POINTS], inputs[INPUT_NETWORK],
            inputs[ID_ATTRIBUTE], inputs[IMPEDANCE_ATTRIBUTE],
            inputs[ACCUMULATOR_ATTRIBUTES], inputs[SEARCH_RADIUS],
            inputs[OUTPUT_LOCATION], adj_dbf_name)
        AddMessage(STEP_1_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_1_FAILED)
        success = False

  # Step 2
  if success:
    AddMessage(STEP_2_STARTED)
    try:
      distance_field = trim("Total_%s" % inputs[IMPEDANCE_ATTRIBUTE])
      accumulator_fields = set([trim("Total_%s" % accumulator_attribute)
          for accumulator_attribute in inputs[ACCUMULATOR_ATTRIBUTES].split(";")
          if accumulator_attribute != "#"])
      # Graph representation: dictionary mapping node id's to Node objects
      nodes = {}
      # The number of rows in |adj_dbf|
      directed_edge_count = int(GetCount_management(adj_dbf).getOutput(0))
      graph_progress = Progress_Bar(directed_edge_count, 1, STEP_2)
      rows = UpdateCursor(adj_dbf)
      for row in rows:
        # Get neighboring nodes, and the distance between them
        origin_id = row.getValue(trim(ORIGIN_ID_FIELD_NAME))
        destination_id = row.getValue(trim(DESTINATION_ID_FIELD_NAME))
        distance = float(row.getValue(distance_field))
        # Make sure the nodes are recorded in the graph
def compute_adjacency_list(input_points, input_network, id_attribute,
    impedance_attribute, accumulator_attributes, search_radius, output_location,
    adj_dbf_name):
  """
  |input_points|: point shape file marking entity (e.g. building) locations
  |input_network|: street network in which |input_points| is located
  |id_attribute|: the name of attribute that distinguishes between input points
  |impedance_attribute|: distance between neighboring nodes will be based on
      this attribute
  |accumulator_attributes|: distance between neighboring nodes will also be
      recorded for these attributes
  |search_radius|: the maximum extent for centrality computation
  |output_location|: adjacency list dbf will be saved here
  |adj_dbf_name|: the name of the adjacency list dbf
  """

  # Number of points in |input_points|
  input_point_count = int(GetCount_management(input_points).getOutput(0))

  # Make a directory to store all auxiliary files
  auxiliary_dir = join(output_location, AUXILIARY_DIR_NAME)
  if not Exists(auxiliary_dir):
    mkdir(auxiliary_dir)

  # Record the edge and junction source names of |input_network|
  edge_feature = None
  junction_feature = None
  for source in Describe(input_network).sources:
    if source.sourceType == EDGE_FEATURE:
      edge_feature = source.name
    elif source.sourceType in JUNCTION_FEATURE:
      junction_feature = source.name
  if edge_feature == None:
    AddWarning(WARNING_NO_EDGE_FEATURE(input_network))
    raise Invalid_Input_Exception("Input Network")
  if junction_feature == None:
    AddWarning(WARNING_NO_JUNCTION_FEATURE(input_network))
    raise Invalid_Input_Exception("Input Network")

  # Calculate network locations if not already calculated
  test_input_point = UpdateCursor(input_points).next()
  locations_calculated = all(row_has_field(test_input_point, field)
      for field in NETWORK_LOCATION_FIELDS)
  if not locations_calculated:
    AddMessage(CALCULATE_LOCATIONS_STARTED)
    CalculateLocations_na(in_point_features=input_points,
        in_network_dataset=input_network,
        search_tolerance=SEARCH_TOLERANCE,
        search_criteria=("%s SHAPE; %s SHAPE;" %
            (junction_feature, edge_feature)),
        exclude_restricted_elements="INCLUDE")
    AddMessage(CALCULATE_LOCATIONS_FINISHED)

  # Calculate barrier cost per input point if not already calculated
  barrier_costs_calculated = row_has_field(test_input_point,
      trim(BARRIER_COST_FIELD))
  if not barrier_costs_calculated:
    AddMessage(BARRIER_COST_COMPUTATION_STARTED)
    # Add |BARRIER_COST_FIELD| column in |input_points|
    AddField_management(in_table=input_points,
        field_name=trim(BARRIER_COST_FIELD), field_type="DOUBLE",
        field_is_nullable="NON_NULLABLE")

    # Initialize a dictionary to store the frequencies of (SnapX, SnapY) values
    xy_count = {}
    # A method to retrieve a (SnapX, SnapY) pair for a row in |input_points|
    get_xy = lambda row: (row.getValue(trim("SnapX")),
        row.getValue(trim("SnapY")))

    barrier_pre_progress = Progress_Bar(input_point_count, 1,
        BARRIER_COST_PRE_PROCESSING)
    rows = UpdateCursor(input_points)
    for row in rows:
      snap_xy = get_xy(row)
      if snap_xy in xy_count:
        xy_count[snap_xy] += 1
      else:
        xy_count[snap_xy] = 1
      barrier_pre_progress.step()

    # Populate |BARRIER_COST_FIELD|, this will be used in OD matrix computation
    barrier_progress = Progress_Bar(input_point_count, 1,
        BARRIER_COST_COMPUTATION)
    rows = UpdateCursor(input_points)
    for row in rows:
      barrier_cost = BARRIER_COST / xy_count[get_xy(row)]
      row.setValue(trim(BARRIER_COST_FIELD), barrier_cost)
      rows.updateRow(row)
      barrier_progress.step()
    AddMessage(BARRIER_COST_COMPUTATION_FINISHED)

  # Necessary files
  od_cost_matrix_layer = join(auxiliary_dir, OD_COST_MATRIX_LAYER_NAME)
  od_cost_matrix_lines = join(od_cost_matrix_layer, OD_COST_MATRIX_LINES)
  temp_adj_dbf_name = "%s~.dbf" % adj_dbf_name[:-4]
  temp_adj_dbf = join(output_location, temp_adj_dbf_name)
  adj_dbf = join(output_location, adj_dbf_name)
  partial_adj_dbf = join(auxiliary_dir, PARTIAL_ADJACENCY_LIST_NAME)
  polygons = join(auxiliary_dir, POLYGONS_SHAPEFILE_NAME)
  raster = join(auxiliary_dir, RASTER_NAME)
  polygons_layer = join(auxiliary_dir, POLYGONS_LAYER_NAME)
  input_points_layer = join(auxiliary_dir, INPUT_POINTS_LAYER_NAME)

  # Make sure none of these files already exists
  for path in [od_cost_matrix_layer, temp_adj_dbf, adj_dbf, partial_adj_dbf,
      polygons, raster, polygons_layer, input_points_layer,
      od_cost_matrix_lines]:
    delete(path)

  # Cutoff radius for OD matrix computation
  cutoff_radius = 2 * BARRIER_COST + min(search_radius, BARRIER_COST / 2)

  # Compute OD matrix
  MakeODCostMatrixLayer_na(in_network_dataset=input_network,
      out_network_analysis_layer=od_cost_matrix_layer,
      impedance_attribute=impedance_attribute,
      default_cutoff=str(cutoff_radius),
      accumulate_attribute_name=accumulator_attributes,
      UTurn_policy="ALLOW_UTURNS", hierarchy="NO_HIERARCHY",
      output_path_shape="NO_LINES")

  # Determine raster cell size
  points_per_raster_cell = OD_MATRIX_ENTRIES / input_point_count
  raster_cell_count = max(1, input_point_count / points_per_raster_cell)
  input_points_extent = Describe(input_points).Extent
  raster_cell_area = (input_points_extent.width * input_points_extent.height /
      raster_cell_count)
  raster_cell_size = int(sqrt(raster_cell_area))

  # Construct |raster| from |input_points|
  PointToRaster_conversion(in_features=input_points,
      value_field=id_attribute, out_rasterdataset=raster,
      cell_assignment="MOST_FREQUENT", priority_field="NONE",
      cellsize=str(raster_cell_size))

  # Construct |polygons| from |raster|
  RasterToPolygon_conversion(in_raster=raster,
      out_polygon_features=polygons, simplify="NO_SIMPLIFY",
      raster_field="VALUE")

  # Export empty |od_cost_matrix_lines| to |temp_dbf| to start adjacency list
  TableToTable_conversion(in_rows=od_cost_matrix_lines,
      out_path=output_location, out_name=temp_adj_dbf_name)

  # Construct |polygons_layer| and |input_points_layer|
  for (feature, layer) in [(polygons, polygons_layer),
      (input_points, input_points_layer)]:
    MakeFeatureLayer_management(in_features=feature, out_layer=layer)

  def add_locations(sub_layer, field_mappings=""):
    """
    |sub_layer|: one of "Origins", "Destinations", "Barrier Points"
    |field_mappings|: field mappings in addition to those for "Name" and
        "CurbApproach"
    """
    AddLocations_na(in_network_analysis_layer=od_cost_matrix_layer,
        sub_layer=sub_layer, in_table=input_points_layer,
        field_mappings=("Name %s #; CurbApproach # 0; %s" %
            (id_attribute, field_mappings)),
        search_tolerance=SEARCH_TOLERANCE,
        search_criteria=("%s SHAPE; %s SHAPE;" %
            (junction_feature, edge_feature)),
        append="CLEAR", snap_to_position_along_network="SNAP",
        snap_offset=SNAP_OFFSET)

  # OD cost matrix destinations
  AddMessage(ADDING_DESTINATIONS_STARTED)
  SelectLayerByLocation_management(in_layer=input_points_layer)
  add_locations("Destinations")
  AddMessage(ADDING_DESTINATIONS_FINISHED)

  # OD cost matrix point barriers
  AddMessage(ADDING_BARRIERS_STARTED)
  add_locations("Point Barriers", ("FullEdge # 0; BarrierType # 2;"
      "Attr_%s %s #;" % (impedance_attribute, trim(BARRIER_COST_FIELD))))
  AddMessage(ADDING_BARRIERS_FINISHED)

  # Compute adjacency list, one raster cell at a time
  progress = Progress_Bar(raster_cell_count, 1, STEP_1)
  rows = UpdateCursor(polygons)
  for row in rows:
    # Select the current polygon
    SelectLayerByAttribute_management(in_layer_or_view=polygons_layer,
        selection_type="NEW_SELECTION", where_clause="FID = %s" % str(row.FID))

    # Origins
    SelectLayerByLocation_management(in_layer=input_points_layer,
        select_features=polygons_layer)
    add_locations("Origins")

    # Solve OD Cost matrix
    Solve_na(in_network_analysis_layer=od_cost_matrix_layer,
        ignore_invalids="SKIP")

    # Add origin and destination fields to the adjacency list dbf
    for (index, field) in [(0, ORIGIN_ID_FIELD_NAME),
        (1, DESTINATION_ID_FIELD_NAME)]:
      CalculateField_management(in_table=od_cost_matrix_lines,
          field=field, expression="!Name!.split(' - ')[%d]" % index,
          expression_type="PYTHON")

    # Record actual distance between neighboring nodes
    distance_field = "Total_%s" % impedance_attribute
    CalculateField_management(in_table=od_cost_matrix_lines,
        field=distance_field,
        expression="!%s! - 2 * %d" % (distance_field, BARRIER_COST),
        expression_type="PYTHON")

    # Append result to |temp_adj_dbf|
    TableToTable_conversion(in_rows=od_cost_matrix_lines,
        out_path=auxiliary_dir, out_name=PARTIAL_ADJACENCY_LIST_NAME)
    Append_management(inputs=partial_adj_dbf, target=temp_adj_dbf,
        schema_type="TEST")

    progress.step()

  # Copy data from |temp_adj_dbf| to |adj_dbf|
  Rename_management(in_data=temp_adj_dbf, out_data=adj_dbf)

  # Clean up
  for path in [od_cost_matrix_layer, partial_adj_dbf, polygons, raster,
      polygons_layer, input_points_layer, auxiliary_dir]:
    delete(path)
def main():
  """
  Runs the centrality tool.
  """
  env.overwriteOutput = True # Enable overwritting
  CheckOutExtension("Network")

  # Success of the program through the six steps
  success = True

  # Inputs to the tool
  if len(argv) != INPUT_COUNT + 1:
    raise Exception("Invalid number of inputs")
  input_number = index()
  input_number.next() # Skip over sys.argv[0]
  inputs = {}
  inputs[INPUT_BUILDINGS] = argv[input_number.next()]
  inputs[POINT_LOCATION] = ("INSIDE" if argv[input_number.next()] == "true" else
      "CENTROID")
  inputs[INPUT_NETWORK] = argv[input_number.next()]
  inputs[COMPUTE_REACH] = argv[input_number.next()] == "true"
  inputs[COMPUTE_GRAVITY] = argv[input_number.next()] == "true"
  inputs[COMPUTE_BETWEENNESS] = argv[input_number.next()] == "true"
  inputs[COMPUTE_CLOSENESS] = argv[input_number.next()] == "true"
  inputs[COMPUTE_STRAIGHTNESS] = argv[input_number.next()] == "true"
  inputs[ID_ATTRIBUTE] = argv[input_number.next()]
  inputs[NODE_WEIGHT_ATTRIBUTE] = argv[input_number.next()]
  inputs[IMPEDANCE_ATTRIBUTE] = argv[input_number.next()]
  try: inputs[SEARCH_RADIUS] = float(argv[input_number.next()])
  except: inputs[SEARCH_RADIUS] = INFINITE_RADIUS
  inputs[USE_NETWORK_RADIUS] = (argv[input_number.next()] ==
      ON_THE_NETWORK_OPTION)
  try: inputs[BETA] = float(argv[input_number.next()])
  except: raise Invalid_Input_Exception("Beta")
  inputs[NORMALIZE_RESULTS] = [measure for measure in
      argv[input_number.next()].split(";") if measure != "#"]
  inputs[OUTPUT_LOCATION] = argv[input_number.next()]
  inputs[OUTPUT_FILE_NAME] = argv[input_number.next()]
  inputs[ACCUMULATOR_ATTRIBUTES] = argv[input_number.next()]

  # Record the origin nodes for centrality measurements
  # This is important if the user selects a subset of the features to be origins
  selected_features = all_values_in_column(inputs[INPUT_BUILDINGS],
    inputs[ID_ATTRIBUTE])
  # Clear selection if we got a layer file
  try:
    SelectLayerByAttribute_management(inputs[INPUT_BUILDINGS],
      "CLEAR_SELECTION")
  except:
    pass

  # Adjacency List table name
  node_locations_needed = (inputs[COMPUTE_STRAIGHTNESS] or
      not inputs[USE_NETWORK_RADIUS])
  adj_dbf_name = ("%s_%s_%s_%s_%s_%s.dbf" % (ADJACENCY_LIST_NAME,
      basename(inputs[INPUT_BUILDINGS]), basename(inputs[INPUT_NETWORK]),
      inputs[ID_ATTRIBUTE], inputs[IMPEDANCE_ATTRIBUTE],
      inputs[ACCUMULATOR_ATTRIBUTES])).replace("#", "None")
  if len(adj_dbf_name) > MAX_FILE_NAME_LENGTH:
    AddWarning(WARNING_LARGE_ADJ_FILE_NAME)
  adj_dbf = join(inputs[OUTPUT_LOCATION], adj_dbf_name)

  # Output file names
  output_feature_class_name = feature_class_name(inputs[OUTPUT_FILE_NAME])
  output_feature_class = "%s.shp" % join(inputs[OUTPUT_LOCATION],
      output_feature_class_name)
  # Create a feature class that is a copy of the input buildings
  try:
    AddMessage(INPUT_BUILDINGS_COPY_STARTED)
    CreateFeatureclass_management(out_path=inputs[OUTPUT_LOCATION],
        out_name=output_feature_class_name)
    CopyFeatures_management(in_features=inputs[INPUT_BUILDINGS],
        out_feature_class=output_feature_class)
    AddMessage(INPUT_BUILDINGS_COPY_FINISHED)
  except:
    AddWarning(GetMessages(2))
    AddMessage(INPUT_BUILDINGS_COPY_FAILED)
    success = False
  output_layer_name = layer_name(inputs[OUTPUT_FILE_NAME])
  output_layer = "%s.lyr" % join(inputs[OUTPUT_LOCATION], output_layer_name)

  # If output has already been created, don't carry on
  if Exists(output_layer):
    AddWarning(WARNING_OUTPUT_ALREADY_EXISTS)
    success = False

  # We will convert polygon input buildings to point feature class
  buildings_description = Describe(output_feature_class)
  if buildings_description.shapeType == "Point":
    # Input buildings are already a point shape file
    inputs[INPUT_POINTS] = output_feature_class
  elif buildings_description.shapeType == "Polygon":
    # Input buildings need to be converted to point feature class
    point_feature_class_name = POINT_FEATURE_CLASS_NAME(
        basename(output_feature_class), inputs[POINT_LOCATION])
    inputs[INPUT_POINTS] = "%s.shp" % join(inputs[OUTPUT_LOCATION],
        point_feature_class_name)
    # If FID is used as ID attribute, we need to change it since a point
    #     shapefile will be in use
    if inputs[ID_ATTRIBUTE] == "FID":
      inputs[ID_ATTRIBUTE] = ORIGINAL_FID
  else:
    # Input buildings need to be either points or polygons
    raise Invalid_Input_Exception("Input Buildings")

  # Find the appropriate symbology layer
  for metric_index in range(len(METRICS)):
      if inputs[COMPUTE_REACH + metric_index]:
          first_metric = METRICS[metric_index]
          break
  symbology_layer_name = get_symbology_layer_name(
      buildings_description.shapeType, first_metric)
  symbology_layer = join(SYMBOLOGY_DIR, symbology_layer_name)

  def clean_up():
    """
    Removes all auxiliary files
    """
    auxiliary_dir = join(inputs[OUTPUT_LOCATION], AUXILIARY_DIR_NAME)
    od_cost_matrix_layer = join(auxiliary_dir, OD_COST_MATRIX_LAYER_NAME)
    od_cost_matrix_lines = join(auxiliary_dir, OD_COST_MATRIX_LINES)
    temp_adj_dbf_name = "%s~.dbf" % adj_dbf_name[-4]
    temp_adj_dbf = join(inputs[OUTPUT_LOCATION], temp_adj_dbf_name)
    partial_adj_dbf = join(auxiliary_dir, PARTIAL_ADJACENCY_LIST_NAME)
    polygons = join(auxiliary_dir, POLYGONS_SHAPEFILE_NAME)
    raster = join(auxiliary_dir, RASTER_NAME)
    polygons_layer = join(auxiliary_dir, POLYGONS_LAYER_NAME)
    input_points_layer = join(auxiliary_dir, INPUT_POINTS_LAYER_NAME)
    for delete_path in [input_points_layer, polygons_layer, raster, polygons,
        partial_adj_dbf, temp_adj_dbf, od_cost_matrix_lines,
        od_cost_matrix_layer, auxiliary_dir]:
      delete(delete_path)

  try:
    """
    Here we carry out the six steps of the tool
    """
    # Step 1
    if success:
      AddMessage(STEP_1_STARTED)
      # If necessary, convert input buildings to point feature class
      if buildings_description.shapeType == "Polygon":
        AddMessage(POINT_CONVERSION_STARTED)
        to_point_feature_class(output_feature_class, inputs[INPUT_POINTS],
            inputs[POINT_LOCATION])
        AddMessage(POINT_CONVERSION_FINISHED)
      if Exists(adj_dbf):
        AddMessage(ADJACENCY_LIST_COMPUTED)
        if node_locations_needed:
          calculate_network_locations(inputs[INPUT_POINTS],
              inputs[INPUT_NETWORK])
        AddMessage(STEP_1_FINISHED)
      else:
        try:
          compute_adjacency_list(inputs[INPUT_POINTS], inputs[INPUT_NETWORK],
              inputs[ID_ATTRIBUTE], inputs[IMPEDANCE_ATTRIBUTE],
              inputs[ACCUMULATOR_ATTRIBUTES], inputs[SEARCH_RADIUS],
              inputs[OUTPUT_LOCATION], adj_dbf_name)
          AddMessage(STEP_1_FINISHED)
        except:
          AddWarning(GetMessages(2))
          AddMessage(STEP_1_FAILED)
          success = False

    # Step 2
    if success:
      AddMessage(STEP_2_STARTED)
      try:
        distance_field = trim("Total_%s" % inputs[IMPEDANCE_ATTRIBUTE])
        accumulator_fields = set([trim("Total_%s" % accumulator_attribute)
            for accumulator_attribute in inputs[ACCUMULATOR_ATTRIBUTES].split(
            ";") if accumulator_attribute != "#"])
        # Graph representation: dictionary mapping node id's to Node objects
        nodes = {}
        # The number of rows in |adj_dbf|
        directed_edge_count = int(GetCount_management(adj_dbf).getOutput(0))
        graph_progress = Progress_Bar(directed_edge_count, 1, STEP_2)
        rows = UpdateCursor(adj_dbf)
        for row in rows:
          # Get neighboring nodes, and the distance between them
          origin_id = row.getValue(trim(ORIGIN_ID_FIELD_NAME))
          destination_id = row.getValue(trim(DESTINATION_ID_FIELD_NAME))
          distance = float(row.getValue(distance_field))
          # Make sure the nodes are recorded in the graph
          for id in [origin_id, destination_id]:
            if not id in nodes:
              nodes[id] = Node()
          # Make sure that the nodes are neighbors in the graph
          if origin_id != destination_id and distance >= 0:
            accumulations = {}
            for field in accumulator_fields:
              accumulations[field] = float(row.getValue(field))
            nodes[origin_id].add_neighbor(destination_id, distance,
              accumulations)
            nodes[destination_id].add_neighbor(origin_id, distance,
              accumulations)
          graph_progress.step()
        N = len(nodes) # The number of nodes in the graph
        if N == 0:
          AddWarning(WARNING_NO_NODES)
          success = False
        AddMessage(STEP_2_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_2_FAILED)
        success = False

    # Step 3
    if success:
      AddMessage(STEP_3_STARTED)
      try:
        get_weights = inputs[NODE_WEIGHT_ATTRIBUTE] != "#"
        get_locations = node_locations_needed
        # Keep track of number nodes in input points not present in the graph
        point_not_in_graph_count = 0
        input_point_count = int(
            GetCount_management(inputs[INPUT_POINTS]).getOutput(0))
        node_attribute_progress = Progress_Bar(input_point_count, 1, STEP_3)
        rows = UpdateCursor(inputs[INPUT_POINTS])
        for row in rows:
          id = row.getValue(inputs[ID_ATTRIBUTE])
          if not id in nodes:
            point_not_in_graph_count += 1
            continue
          if get_weights:
            setattr(nodes[id], WEIGHT,
                row.getValue(trim(inputs[NODE_WEIGHT_ATTRIBUTE])))
          if get_locations:
            snap_x = row.getValue(trim("SnapX"))
            snap_y = row.getValue(trim("SnapY"))
            setattr(nodes[id], LOCATION, (snap_x, snap_y))
          node_attribute_progress.step()
        if point_not_in_graph_count:
          AddWarning(WARNING_POINTS_NOT_IN_GRAPH(N,
              point_not_in_graph_count))
        AddMessage(STEP_3_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_3_FAILED)
        success = False

    # Step 4
    if success:
      AddMessage(STEP_4_STARTED)
      try:
        # Compute measures
        compute_centrality(nodes, selected_features, inputs[COMPUTE_REACH],
            inputs[COMPUTE_GRAVITY], inputs[COMPUTE_BETWEENNESS],
            inputs[COMPUTE_CLOSENESS], inputs[COMPUTE_STRAIGHTNESS],
            inputs[SEARCH_RADIUS], inputs[USE_NETWORK_RADIUS], inputs[BETA],
            inputs[NORMALIZE_RESULTS], accumulator_fields)
        AddMessage(STEP_4_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_4_FAILED)
        success = False

    # Step 5
    if success:
      AddMessage(STEP_5_STARTED)
      try:
        # Make output layer
        MakeFeatureLayer_management(in_features=output_feature_class,
            out_layer=output_layer_name)
        # Save output layer
        SaveToLayerFile_management(output_layer_name, output_layer,
            "ABSOLUTE")
        # Use a test node to figure out which metrics were computed
        test_node_id = selected_features.pop()
        # Make sure the test node is in the graph
        while test_node_id not in nodes:
          test_node_id = selected_features.pop()
        test_node = nodes[test_node_id]
        measures = set([measure for measure in dir(test_node) if (measure in
            FINAL_ATTRIBUTES or is_accumulator_field(measure))])
        # Add a field in the output layer for each computed metric
        for measure in measures:
          AddField_management(in_table=output_layer, field_name=trim(measure),
              field_type="DOUBLE", field_is_nullable="NON_NULLABLE")
        # Figure out the id field to use based on the type of input buildings
        if (buildings_description.shapeType == "Polygon" and
            inputs[ID_ATTRIBUTE] == ORIGINAL_FID):
          id_field = "FID"
        else:
          id_field = inputs[ID_ATTRIBUTE]
        # Fill the layer with the metric values
        write_progress = Progress_Bar(N, 1, STEP_5)
        layer_rows = UpdateCursor(output_layer)
        for row in layer_rows:
            id = row.getValue(id_field)
            for measure in measures:
              # If no value was computed for this node id, set value to 0
              value = 0
              if id in nodes and hasattr(nodes[id], measure):
                value = getattr(nodes[id], measure)
              row.setValue(trim(measure), value)
            layer_rows.updateRow(row)
            write_progress.step()
        # Save to toolbox output
        SetParameterAsText(OUTPUT_FEATURE_CLASS, output_feature_class)
        AddMessage(STEP_5_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_5_FAILED)
        success = False

    # Step 6
    if success:
      AddMessage(STEP_6_STARTED)
      # Apply symbology
      try:
        ApplySymbologyFromLayer_management(in_layer=output_layer,
            in_symbology_layer=symbology_layer)
      except:
        AddWarning(WARNING_APPLY_SYMBOLOGY_FAILED)
        AddWarning(GetMessages(2))
        AddMessage(STEP_6_FAILED)
      # Display
      try:
        current_map_document = mapping.MapDocument("CURRENT")
        data_frame = mapping.ListDataFrames(current_map_document,
            "Layers")[0]
        add_layer = mapping.Layer(output_layer)
        mapping.AddLayer(data_frame, add_layer, "AUTO_ARRANGE")
        AddMessage(STEP_6_FINISHED)
      except:
        AddWarning(WARNING_FAIL_TO_DISPLAY)
        AddWarning(GetMessages(2))
        AddMessage(STEP_6_FAILED)

    # Clean up
    clean_up()

    AddMessage(SUCCESS if success else FAILURE)

  except ExecuteAbort:
    clean_up()