コード例 #1
0
ファイル: ftot_setup.py プロジェクト: VolpeUSDOT/FTOT-Public
def setup(the_scenario, logger):

    logger.debug("start: setup")
    start_time = datetime.datetime.now()
    logger.info("Scenario Name: \t{}".format(the_scenario.scenario_name))
    logger.debug("Scenario Description: \t{}".format(
        the_scenario.scenario_description))
    logger.info("Scenario Start Date/Time: \t{}".format(start_time))

    # create a folder for debug and intermediate files
    # delete everything in there if it exists
    # ------------------------------------------------
    debug_directory = os.path.join(the_scenario.scenario_run_directory,
                                   "debug")

    if os.path.exists(debug_directory):
        logger.debug("deleting debug_directory and contents.")
        rmtree(debug_directory)

    if not os.path.exists(debug_directory):
        os.makedirs(debug_directory)
        logger.debug("creating debug_directory.")

    # create the scenario database main.db
    create_main_db(logger, the_scenario)

    # create the scenario geodatabase; main.gdb
    create_main_gdb(logger, the_scenario)

    logger.debug("finish: SETUP:  Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))
コード例 #2
0
def export_fcs_from_main_gdb(the_scenario, logger):
    # export fcs from the main.GDB to individual shapefiles
    logger.info("start: export_fcs_from_main_gdb")
    start_time = datetime.datetime.now()

    # export network and locations fc's to shapefiles
    main_gdb = the_scenario.main_gdb
    output_path = the_scenario.lyr_files_dir
    input_features = "\""

    logger.debug("delete the temp_networkx_shp_files dir")
    if os.path.exists(output_path):
        logger.debug("deleting temp_networkx_shp_files directory.")
        rmtree(output_path)

    if not os.path.exists(output_path):
        os.makedirs(output_path)
        logger.debug("finished: create_temp_gdbs_dir")

    # get the locations and network feature layers
    for fc in ['\\locations;', '\\network\\intermodal;', '\\network\\locks;', '\\network\\pipeline_prod_trf_rts;',
               '\\network\\pipeline_crude_trf_rts;', '\\network\\water;', '\\network\\rail;', '\\network\\road']:
        input_features += main_gdb + fc
    input_features += "\""
    arcpy.FeatureClassToShapefile_conversion(Input_Features=input_features, Output_Folder=output_path)

    logger.debug("finished: export_fcs_from_main_gdb: Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))
コード例 #3
0
def gis_populate_fc(the_scenario, logger):

    logger.info("start: gis_populate_fc")

    start_time = datetime.datetime.now()

    # populate the destinations fc in main.gdb
    gis_ultimate_destinations_setup_fc(the_scenario, logger)

    # populate the RMPs fc in main.gdb
    gis_rmp_setup_fc(the_scenario, logger)

    # populate the processors fc in main.gdb
    gis_processors_setup_fc(the_scenario, logger)

    logger.debug("finished: gis_populate_fc: Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))
コード例 #4
0
def make_networkx_graph(the_scenario, logger):
    # High level work flow:
    # ------------------------
    # make_networkx_graph
    # create the multidigraph
    # convert the node labels to integers
    # reverse the graph and compose with self

    logger.info("start: make_networkx_graph")
    start_time = datetime.datetime.now()

    # read the shapefiles in the customized read_shp method
    input_path = the_scenario.lyr_files_dir

    logger.debug("start: read_shp")
    G = read_shp(input_path, logger)  # note this custom and not nx.read_shp()

    # cleanup the node labels
    logger.debug("start: convert node labels")
    G = nx.convert_node_labels_to_integers(G,
                                           first_label=0,
                                           ordering='default',
                                           label_attribute="x_y_location")

    # create a reversed graph
    logger.debug("start: reverse G graph to H")
    H = G.reverse()  # this is a reversed version of the graph.

    # set the a new attribute for every edge that says its a "reversed" link
    # we will use this to delete edges that shouldn't be reversed later.
    logger.debug("start: set 'reversed' attribute in H")
    nx.set_edge_attributes(H, 1, "REVERSED")

    # add the two graphs together
    logger.debug("start: compose G and H")
    G = nx.compose(G, H)

    # print out some stats on the Graph
    logger.info("Number of nodes in the raw graph: {}".format(G.order()))
    logger.info("Number of edges in the raw graph: {}".format(G.size()))

    logger.debug("finished: make_networkx_graph: Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))

    return G
コード例 #5
0
def gis_clean_fc(the_scenario, logger):

    logger.info("start: gis_clean_fc")

    start_time = datetime.datetime.now()

    # clear the destinations
    gis_clear_feature_class(the_scenario.destinations_fc, logger)

    # clear the RMPs
    gis_clear_feature_class(the_scenario.rmp_fc, logger)

    # clear the processors
    gis_clear_feature_class(the_scenario.processors_fc, logger)

    # clear the processors
    gis_clear_feature_class(the_scenario.locations_fc, logger)

    logger.debug("finished: gis_clean_fc: Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))
コード例 #6
0
    except:

        stack_trace = traceback.format_exc()
        split_stack_trace = stack_trace.split('\n')
        logger.error(
            "!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EXCEPTION RAISED !!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
        )
        for i in range(0, len(split_stack_trace)):
            trace_line = split_stack_trace[i].rstrip()
            if trace_line != "":  # issue #182 - check if the line is blank. if it isn't, record it in the log.
                logger.error(trace_line)
        logger.error(
            "!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EXCEPTION RAISED !!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
        )

        sys.exit(1)

    logger.info(
        "======================== FTOT RUN FINISHED: {:2} =================================="
        .format(str(args.task).upper()))
    logger.info(
        "======================== Total Runtime (HMS): \t{} \t ".format(
            ftot_supporting.get_total_runtime_string(start_time)))
    logger.info(
        "================================================================================="
    )
    logger.runtime("{} Step - Total Runtime (HMS): \t{}".format(
        args.task, ftot_supporting.get_total_runtime_string(start_time)))
    logging.shutdown()
コード例 #7
0
def clean_networkx_graph(the_scenario, G, logger):
    # VERSION 3:
    # renamed clean_networkx_graph ()
    # remove reversed links for pipeline
    # selectivity remove links for location _IN and _OUT nodes
    # preserve the route_cost_scaling factor in an attribute by phase of matter
    # -------------------------------------------------------------------------
    
    logger.info("start: clean_networkx_graph")
    start_time = datetime.datetime.now()

    logger.debug("Processing the {} edges in the uncosted graph.".format(G.size()))

    # use the artificial and reversed attribute to determine if
    # the link is kept
    # -------------------------------------------------------------
    edge_attrs = {}  # for storing the edge attributes which are set all at once
    deleted_edge_count = 0

    # note: For digraphs, edges=out_edges
    # for some reason it shows up as out_edges in the debugger, but
    # when caching to the database both in_edges and out_edges are stored.
    for u, v, keys, artificial in G.edges(data='Artificial', keys=True):

        # initialize the route_cost_scaling variable to something
        # absurd so we know if its getting set properly in the loop:
        route_cost_scaling = -999999999

        # check if the link is reversed
        if 'REVERSED' in G.edges[u, v, keys]:
            reversed_link = G.edges[u, v, keys]['REVERSED']
        else:
            reversed_link = 0

        # check if capacity is 0
        # Network Edges - artificial == 0
        # -----------------------------------
        if artificial == 0:

            # check the mode type
            # ----------------------
            mode_type = G.edges[u, v, keys]['MODE_TYPE']

            # set the mode specific weights
            # -----------------------------

            if mode_type == "rail":
                d_code = G.edges[u, v, keys]["DENSITY_CO"]
                if d_code in [7]:
                    route_cost_scaling = the_scenario.rail_dc_7
                elif d_code in [6]:
                    route_cost_scaling = the_scenario.rail_dc_6
                elif d_code in [5]:
                    route_cost_scaling = the_scenario.rail_dc_5
                elif d_code in [4]:
                    route_cost_scaling = the_scenario.rail_dc_4
                elif d_code in [3]:
                    route_cost_scaling = the_scenario.rail_dc_3
                elif d_code in [2]:
                    route_cost_scaling = the_scenario.rail_dc_2
                elif d_code in [1]:
                    route_cost_scaling = the_scenario.rail_dc_1
                elif d_code in [0]:
                    route_cost_scaling = the_scenario.rail_dc_0
                else:
                    logger.warning("The d_code {} is not supported".format(d_code))

            elif mode_type == "water":

                # get the total vol of water traffic
                tot_vol = G.edges[u, v, keys]['TOT_UP_DWN']
                if tot_vol >= 10000000:
                    route_cost_scaling = the_scenario.water_high_vol
                elif 1000000 <= tot_vol < 10000000:
                    route_cost_scaling = the_scenario.water_med_vol
                elif 1 <= tot_vol < 1000000:
                    route_cost_scaling = the_scenario.water_low_vol
                else:
                    route_cost_scaling = the_scenario.water_no_vol

            elif mode_type == "road":

                # get fclass
                fclass = G.edges[u, v, keys]['FCLASS']
                if fclass in [1]:
                    route_cost_scaling = the_scenario.truck_interstate
                elif fclass in [2, 3]:
                    route_cost_scaling = the_scenario.truck_pr_art
                elif fclass in [4]:
                    route_cost_scaling = the_scenario.truck_m_art
                else:
                    route_cost_scaling = the_scenario.truck_local

            elif 'pipeline' in mode_type:
                if reversed_link == 1:
                    G.remove_edge(u, v, keys)
                    deleted_edge_count += 1
                    continue  # move on to the next edge
                else:
                    route_cost_scaling = (((float(G.edges[u, v, keys]['base_rate']) / 100) / 42.0) * 1000.0)

        # Intermodal Edges - artificial == 2
        # ------------------------------------
        elif artificial == 2:
            # set it to 1 because we'll multiply by the appropriate
            # link_cost later for transloading
            route_cost_scaling = 1

            # nothing else to do with intermodal edges.
            # they need to be unscaled in both directions

        # Artificial Edge - artificial == 1
        # ----------------------------------
        # need to check if its an IN location or an OUT location and delete selectively.
        # assume always connecting from the node to the network.
        # so _OUT locations should delete the reversed link
        # _IN locations should delete the non-reversed link.
        elif artificial == 1:
            # delete edges we dont want

			try:
				if G.edges[u, v, keys]['LOCATION_1'].find("_OUT") > -1 and reversed_link == 1:
					G.remove_edge(u, v, keys)
					deleted_edge_count += 1
					continue  # move on to the next edge
				elif G.edges[u, v, keys]['LOCATION_1'].find("_IN") > -1 and reversed_link == 0:
					G.remove_edge(u, v, keys)
					deleted_edge_count += 1
					continue  # move on to the next edge

				# there is no scaling of artificial links.
				# the cost_penalty is calculated in get_network_link_cost()
				else:
					route_cost_scaling = 1
			except:
				logger.warning("the following keys didn't work:u - {}, v- {}".format(u, v)) 
        else:
            logger.warning("found an edge without artificial attribute: {} ")
            continue

        edge_attrs[u, v, keys] = {
            'route_cost_scaling': route_cost_scaling
            }

    nx.set_edge_attributes(G, edge_attrs)

    # print out some stats on the Graph
    logger.info("Number of nodes in the clean graph: {}".format(G.order()))
    logger.info("Number of edges in the clean graph: {}".format(G.size()))

    logger.debug("finished: clean_networkx_graph: Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))

    return G
コード例 #8
0
def gis_processors_setup_fc(the_scenario, logger):

    logger.info("start: gis_processors_setup_fc")
    start_time = datetime.datetime.now()

    if str(the_scenario.base_processors_layer).lower() == "null" or \
       str(the_scenario.base_processors_layer).lower() == "none":
        # create an empty processors layer
        # -------------------------
        processors_fc = the_scenario.processors_fc

        if arcpy.Exists(processors_fc):
            arcpy.Delete_management(processors_fc)
            logger.debug("deleted existing {} layer".format(processors_fc))

        arcpy.CreateFeatureclass_management(the_scenario.main_gdb, "processors", \
                                            "POINT", "#", "DISABLED", "DISABLED", ftot_supporting_gis.LCC_PROJ, "#",
                                            "0", "0", "0")

        arcpy.AddField_management(processors_fc, "Facility_Name", "TEXT", "#",
                                  "#", "25", "#", "NULLABLE", "NON_REQUIRED",
                                  "#")
        arcpy.AddField_management(processors_fc, "Candidate", "SHORT")
        # logger.info("note: processors layer specified in the XML: {}".format(the_scenario.base_processors_layer))
        # empty_processors_fc = str("{}\\facilities\\test_facilities.gdb\\test_processors_empty"
        #                           .format(the_scenario.common_data_folder))
        # processors_fc = the_scenario.processors_fc
        # arcpy.Project_management(empty_processors_fc, processors_fc, ftot_supporting_gis.LCC_PROJ)

    else:
        # copy the processors from the baseline data to the working gdb
        # ----------------------------------------------------------------
        if not arcpy.Exists(the_scenario.base_processors_layer):
            error = "can't find baseline data processors layer {}".format(
                the_scenario.base_processors_layer)
            raise IOError(error)

        processors_fc = the_scenario.processors_fc
        arcpy.Project_management(the_scenario.base_processors_layer,
                                 processors_fc, ftot_supporting_gis.LCC_PROJ)

        arcpy.AddField_management(processors_fc, "Candidate", "SHORT")

        # Delete features with no data in csv-- cleans up GIS output and eliminates unnecessary GIS processing
        # --------------------------------------------------------------
        # create a temp dict to store values from CSV
        temp_facility_commodities_dict = {}
        counter = 0

        # read through facility_commodities input CSV
        import csv
        with open(the_scenario.processors_commodity_data, 'rb') as f:

            reader = csv.DictReader(f)
            for row in reader:
                facility_name = str(row["facility_name"])
                commodity_quantity = row["value"]

                if facility_name not in temp_facility_commodities_dict.keys():
                    if commodity_quantity > 0:
                        temp_facility_commodities_dict[facility_name] = True

        with arcpy.da.UpdateCursor(processors_fc, ['Facility_Name']) as cursor:
            for row in cursor:
                if row[0] in temp_facility_commodities_dict:
                    pass
                else:
                    cursor.deleteRow()
                    counter += 1

        del cursor
        logger.config(
            "Number of processors removed due to lack of commodity data: \t{}".
            format(counter))

        with arcpy.da.SearchCursor(
                processors_fc,
            ['Facility_Name', 'SHAPE@X', 'SHAPE@Y']) as scursor:
            for row in scursor:
                # Check if coordinates of facility are roughly within North America
                if -6500000 < row[1] < 6500000 and -3000000 < row[2] < 5000000:
                    pass
                else:
                    logger.warning(
                        "Facility: {} is not located in North America.".format(
                            row[0]))
                    logger.info(
                        "remove the facility from the scenario or make adjustments to the facility's location "
                        "in the processors feature class: {}".format(
                            the_scenario.base_processors_layer))
                    error = "Facilities outside North America are not supported in FTOT"
                    logger.error(error)
                    raise Exception(error)

        del scursor

    # check for candidates or other processors specified in either XML or
    layers_to_merge = []

    # add the candidates_for_merging if they exists.
    if arcpy.Exists(the_scenario.processor_candidates_fc):
        logger.info(
            "adding {} candidate processors to the processors fc".format(
                gis_get_feature_count(the_scenario.processor_candidates_fc,
                                      logger)))
        layers_to_merge.append(the_scenario.processor_candidates_fc)
        gis_merge_processor_fc(the_scenario, layers_to_merge, logger)

    result = gis_get_feature_count(processors_fc, logger)

    logger.config("Number of Processors: \t{}".format(result))

    logger.debug("finish: gis_processors_setup_fc: Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))
コード例 #9
0
def gis_rmp_setup_fc(the_scenario, logger):

    logger.info("start: gis_rmp_setup_fc")
    start_time = datetime.datetime.now()

    # copy the rmp from the baseline data to the working gdb
    # ----------------------------------------------------------------
    if not arcpy.Exists(the_scenario.base_rmp_layer):
        error = "can't find baseline data rmp layer {}".format(
            the_scenario.base_rmp_layer)
        raise IOError(error)

    rmp_fc = the_scenario.rmp_fc
    arcpy.Project_management(the_scenario.base_rmp_layer, rmp_fc,
                             ftot_supporting_gis.LCC_PROJ)

    # Delete features with no data in csv-- cleans up GIS output and eliminates unnecessary GIS processing
    # --------------------------------------------------------------
    # create a temp dict to store values from CSV
    temp_facility_commodities_dict = {}
    counter = 0

    # read through facility_commodities input CSV
    import csv
    with open(the_scenario.rmp_commodity_data, 'rb') as f:

        reader = csv.DictReader(f)
        for row in reader:
            facility_name = str(row["facility_name"])
            commodity_quantity = row["value"]

            if not facility_name in temp_facility_commodities_dict.keys():
                if commodity_quantity > 0:
                    temp_facility_commodities_dict[facility_name] = True

    with arcpy.da.UpdateCursor(rmp_fc, ['Facility_Name']) as cursor:
        for row in cursor:
            if row[0] in temp_facility_commodities_dict:
                pass
            else:
                cursor.deleteRow()
                counter += 1
    del cursor
    logger.config(
        "Number of RMPs removed due to lack of commodity data: \t{}".format(
            counter))

    with arcpy.da.SearchCursor(
            rmp_fc, ['Facility_Name', 'SHAPE@X', 'SHAPE@Y']) as scursor:
        for row in scursor:
            # Check if coordinates of facility are roughly within North America
            if -6500000 < row[1] < 6500000 and -3000000 < row[2] < 5000000:
                pass
            else:
                logger.warning(
                    "Facility: {} is not located in North America.".format(
                        row[0]))
                logger.info(
                    "remove the facility from the scenario or make adjustments to the facility's location in "
                    "the RMP feature class: {}".format(
                        the_scenario.base_rmp_layer))
                error = "Facilities outside North America are not supported in FTOT"
                logger.error(error)
                raise Exception(error)

    del scursor

    result = gis_get_feature_count(rmp_fc, logger)
    logger.config("Number of RMPs: \t{}".format(result))

    logger.debug("finished: gis_rmp_setup_fc: Runtime (HMS): \t{}".format(
        ftot_supporting.get_total_runtime_string(start_time)))
コード例 #10
0
ファイル: ftot.py プロジェクト: jiezhao1219/FTOT-Public
        elif args.task == "m":
            from ftot_maps import new_map_creation
            new_map_creation(the_scenario, logger)

        # Time and Commodity Mapping
        elif args.task == "m2":
            from ftot_maps import prepare_time_commodity_subsets_for_mapping
            prepare_time_commodity_subsets_for_mapping(the_scenario, logger)

        elif args.task == "test":
            logger.info("in the test case")

    except:

        stack_trace = traceback.format_exc()

        logger.error("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
        logger.error("\n\n" + stack_trace)
        logger.error("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")

        sys.exit(1)

    logger.info("======================== FTOT RUN FINISHED: {:2} ==================================".format(
        str(args.task).upper()))
    logger.info("======================== Total Runtime (HMS): \t{} \t ".format(
        ftot_supporting.get_total_runtime_string(start_time)))
    logger.info("=================================================================================")
    logger.runtime(
        "{} Step - Total Runtime (HMS): \t{}".format(args.task, ftot_supporting.get_total_runtime_string(start_time)))
    logging.shutdown()