Beispiel #1
0
def get_output_dict_with_less_parameter(predict_speed_dict, target_dt, time_slot_interval):
    save_filename_list = ["way_types", "way_type_avg_speed_limit", "final_way_table", "final_node_table"]
    temp_map_dates = graph_reader(Path("graph/"), SAVE_TYPE_PICKLE, save_filename_list)
    way_types = temp_map_dates[0]
    way_type_avg_speed_limit = temp_map_dates[1]
    final_way_table = temp_map_dates[2]
    final_node_table = temp_map_dates[3]

    interval_idx = (target_dt.hour * 60 + target_dt.minute) // time_slot_interval

    return get_output_dict(predict_speed_dict, target_dt, time_slot_interval, interval_idx,final_node_table,
                           final_way_table, way_types, way_type_avg_speed_limit)
Beispiel #2
0
def generate_way_structure_json(save_path="static/mapdata/way_structure.json"):
    save_filename_list = ["final_node_table", "final_way_table"]
    temp_map_dates = graph_reader(Path("graph/"), SAVE_TYPE_PICKLE, save_filename_list)
    final_node_table = temp_map_dates[0]
    final_way_table = temp_map_dates[1]
    way_structure = {}
    for way, waypoints in final_way_table.items():
        points = []
        for waypoint in waypoints:
            points.append([final_node_table[waypoint][0], final_node_table[waypoint][1]])
        way_structure[way] = points

    temp_filepath = Path(save_path)
    temp_filepath.parent.mkdir(parents=True, exist_ok=True)
    with open(temp_filepath, 'w') as f:
        # json.dump(output_dict, f, indent=2)
        json.dump(way_structure, f)
    return 0
Beispiel #3
0
def show_traffic_speed(final_way_table, final_node_table, road_speeds, time_range_start_index,
                       time_range_end_index, time_slot_interval, map_type="OSM"):
    """

    Parameters
    ----------
    final_node_table: Dictionary
        A dictionary that stored the node id and the latitude/longitude coordinates as a key value pair.
    final_way_table: Dictionary
        A dictionary that stored the way id and a list of node id's as a key value pair.
    road_speeds : Dictionary
        A dictionay that stored the [speed matrix] use way id as key and use list to store each row
    time_range_start_index : int
        The index of the start of the time range
    time_range_end_index : int
        The index of the end of the time range (include the end time range)
    time_slot_interval : int
        The length of each time interval in minutes. The input number should be divisible by 1440 (24 hour * 60 min)
        by default it is 5 min
    map_type : str, optional
        The map platform of the generate map. Could be GoogleMap or OSM, by default "OSM"

    Returns
    -------
    None
    """
    save_filename_list = ["way_types", "way_type_avg_speed_limit"]
    temp_map_dates = graph_reader(Path("graph/"), SAVE_TYPE_PICKLE, save_filename_list)
    way_types = temp_map_dates[0]
    way_type_avg_speed_limit = temp_map_dates[1]

    if map_type == "GoogleMap":
        return show_traffic_speed_googlemap(final_way_table, final_node_table, road_speeds, time_range_start_index,
                                            time_range_end_index, time_slot_interval, way_types,
                                            way_type_avg_speed_limit)
    elif map_type == "OSM":
        return show_traffic_speed_OSM(final_way_table, final_node_table, road_speeds, time_range_start_index,
                                      time_range_end_index, time_slot_interval, way_types, way_type_avg_speed_limit)
    else:
        print("Unknown map_type")
    return 0
Beispiel #4
0
def show_traffic_speed(road_speeds, timestamp, map_type="OSM"):
    graph_need_read = [
        "way_types", "way_type_avg_speed_limit", "final_node_table",
        "final_way_table"
    ]
    temp_map_dates = graph_reader(Path("graph"), SAVE_TYPE_PICKLE,
                                  graph_need_read)
    way_types = temp_map_dates[0]
    way_type_avg_speed_limit = temp_map_dates[1]
    final_node_table = temp_map_dates[2]
    final_way_table = temp_map_dates[3]

    if map_type == "GoogleMap":
        return show_traffic_speed_googlemap(final_way_table, final_node_table,
                                            road_speeds, timestamp)
    elif map_type == "OSM":
        return show_traffic_speed_OSM(final_way_table, final_node_table,
                                      road_speeds, timestamp, way_types,
                                      way_type_avg_speed_limit)
    else:
        print("Unknown map_type")
    return 0
Beispiel #5
0
def generate_prediction_in_large_batches(predict_timestamp=int(datetime.now().timestamp()), interval=5,
                                         result_file_path="data/{0}/result/{0}_{1}_min_road.csv",
                                         config_history_date=PREDICT_ROAD_CONDITION_CONFIG_HISTORY_DATE,
                                         config_history_data_range=PREDICT_ROAD_CONDITION_CONFIG_HISTORY_DATA_RANGE,
                                         config_weight=PREDICT_ROAD_CONDITION_CONFIG_WEIGHT):
    """
    Generate prediction in large batches (in day(s))

    predict_timestamp: int (timestamp)
        10-digit timestamp, use current time if not provided
        This timestamp will be use to get the date ONLY

    interval: int
        The length of each time interval in minutes. The input number should be divisible by 1440 (24 hour * 60 min)
        by default it is 5 min.

    config_history_date: List of int
        This parameter specifies which historical days of data the function needs to use in the computation. It should
        be a List of int, where each int means the offset of the day that need predict. e.g. -1 means yesterday
        by default it will use a predetermined configuration

    config_history_data_range: List of int
        This parameter specifies the range and the order of using the nearby data to replace the missing data. If the
        value is [-1, 1] and if the data we are looking for located on the ith index is missing. We will try to use the
        value at i-1 or i+1 as the data located in ith index.
        by default it will use a predetermined configuration

    config_weight: List of float
        This parameter specifies the weight of each day's data when compute the weighted sum.
        by default it will use a predetermined configuration

    Returns
    -------
    None

    """
    # Check input, load data and preparation
    if len(config_history_date) != len(config_weight):
        print("error: len(config_history_date) != len(config_weight)")
        return -1

    save_filename_list = ["way_graph", "way_types", "way_type_avg_speed_limit", "final_node_table", "final_way_table"]
    temp_map_dates = graph_reader(Path("graph/"), SAVE_TYPE_PICKLE, save_filename_list)
    way_graph = temp_map_dates[0]
    way_types = temp_map_dates[1]
    way_type_avg_speed_limit = temp_map_dates[2]
    final_node_table = temp_map_dates[3]
    final_way_table = temp_map_dates[4]

    # Find the time of the predict, also find the range index in the day.
    predict_time = datetime.fromtimestamp(predict_timestamp)
    predict_time_date_str = predict_time.strftime("%Y%m%d")

    # Prepare of load history data
    history_data_date_str = []
    for offset in config_history_date:
        history_data_date_str.append((predict_time + timedelta(days=offset)).strftime("%Y%m%d"))

    if FLAG_DEBUG:
        print("Predict time: {}".format(predict_time.strftime("%Y-%m-%d %H:%M:%S")))
        print("History data date_str:{}".format(history_data_date_str))

    # Load history data
    history_speed_matrix_list, config_weight = \
        predict_road_condition.get_history_speed_matrix_list(history_data_date_str, config_weight, interval,
                                                             result_file_path)

    if len(history_speed_matrix_list) == 0:
        print({"Error": "No enough data for predict"})
        return {"Error": "No enough data for predict"}

    if FLAG_DEBUG:
        print("{} day(s) load".format(len(history_speed_matrix_list)))

    full_way_id_set, usable_way_id_set = predict_road_condition.get_way_id_set(history_speed_matrix_list)

    for interval_idx in tqdm(range(int(1440 / interval))):
        # print(interval_idx, time_range_index_to_str(interval_idx, interval))
        predict_speed_dict = \
            predict_road_condition.compute_speed_dict(interval, interval_idx, history_speed_matrix_list,
                                                      full_way_id_set,
                                                      usable_way_id_set, config_history_data_range, config_weight,
                                                      way_graph, way_types, way_type_avg_speed_limit)
        predict_speed_dict_to_json(predict_speed_dict, predict_time, interval, interval_idx,
                                   final_node_table, final_way_table, way_types, way_type_avg_speed_limit)

    return 0
Beispiel #6
0
            save_type = SAVE_TYPE_JSON
        elif sys.argv[4] == "pickle":
            save_type = SAVE_TYPE_PICKLE
        else:
            print("invalid Save format")
            print(
                "Save format: the format to save the result, by default is pickle"
            )
            print("             possible value: JSON and pickle")
            exit(0)

    print("datapoint  : %s" % datapoint)
    print("Result path: %s" % result_file_path)
    if save_type == SAVE_TYPE_PICKLE:
        print("Result type: pickle")
    elif save_type == SAVE_TYPE_JSON:
        print("Result type: JSON")

    save_filename_list = [
        "final_node_table", "final_way_table", "final_relation_table"
    ]
    map_dates = graph_reader(result_file_path, save_type, save_filename_list)

    final_node_table = map_dates[0]
    final_way_table = map_dates[1]
    final_relation_table = map_dates[2]

    # 9345830 is 35A
    find_nearest_road(final_node_table, final_way_table, final_relation_table,
                      [9345830], datapoint)
Beispiel #7
0
from datetime import datetime
from pathlib import Path
from helper.global_var import SAVE_TYPE_PICKLE
from helper.graph_reader import graph_reader

if __name__ == '__main__':
    date_str = datetime.today().strftime('%Y%m%d')
    # date_str = "20220131"
    data_root = Path(".") / 'data'
    process_data.preprocess_data(date_str, overwrite=True, min_file_size=10)
    reformat_data.reformat_by_bus(date_str)
    reformat_data.sort_reformat_data(date_str)

    save_filename_list = [
        "final_node_table", "final_way_table", "final_relation_table"
    ]
    map_dates = graph_reader(Path("graph"), SAVE_TYPE_PICKLE,
                             save_filename_list)
    final_node_table = map_dates[0]
    final_way_table = map_dates[1]
    final_relation_table = map_dates[2]

    time_slot_intervals = [5, 15]
    for interval in time_slot_intervals:
        print("Processing interval:", interval)
        find_traffic_speed.find_traffic_speed(date_str,
                                              final_node_table,
                                              final_way_table,
                                              final_relation_table,
                                              time_slot_interval=interval)
Beispiel #8
0
def retrieve_traffic_data(timestamp, time_interval):
    """
    Get the traffic data for the given timestamp (at the nearest time interval/slot) and time_interval.

    Parameters
    ----------
    timestamp: str
        A 10 digit timestamp

    time_interval: str
        The time interval size that the user has selected, e.g., 15, 30, and 45.

    Returns
    -------
    Serialized Json object, containing traffic data, which has the following format,
    {
        "generate_timestr": "2021-03-02 19:52:44",
        "generate_timestamp": 1614732764, .
        "time_slot_interval": 15,
        "interval_idx": 0,
        "predict_time_range": "2020-07-30 00:00 - 00:14",
        "road_speed": {way_id: {"speed": 16.274295463111194,
                                "speed_ratio": 0.46497987037460553},
                        ...
                      }
    }
    """
    # retrieve traffic at the specific timestamp (during the nearest interval)
    # print(timestamp)
    dt_target = datetime.fromtimestamp(int(timestamp))
    dt_now = datetime.now()
    time_interval = int(time_interval)

    dt_diff_min = int((dt_target - dt_now).seconds / 60)
    dt_diff_day = int((dt_target.date() - dt_now.date()).days)

    if dt_diff_day >= 0:
        if dt_diff_min <= 120:
            # Less then 2 hours
            return jsonify({"error": "no_data"})
        else:
            # find the nearest interval of the timestamp based on the time_interval size
            interval_idx = get_nearest_interval(dt_target, time_interval)

            data_path = Path(
                GPILB_CACHE_PATH.format(dt_target.strftime('%Y%m%d'),
                                        time_interval, interval_idx))
            if data_path.is_file():
                with open(data_path, 'r') as fp:
                    data = json.load(fp)
            else:
                predict_speed_dict = predict_road_condition.predict_road_condition(
                    dt_target.timestamp(), interval=int(time_interval))
                data = predict_result_helper.get_output_dict_with_less_parameter(
                    predict_speed_dict, dt_target, time_interval)

            return jsonify(data)  # serialize and use JSON headers
    else:
        # Past time, can use existing data
        interval_idx = get_nearest_interval(dt_target, time_interval)
        date_str = dt_target.strftime("%Y%m%d")
        temp_filepath_csv = Path("data/{0}/result/{0}_{1}_min_road.csv".format(
            date_str, time_interval))
        temp_filepath_p = temp_filepath_csv.with_suffix('.p')
        speed_matrix = {}
        if temp_filepath_p.is_file():
            with open(temp_filepath_p, 'rb') as f:
                speed_matrix = pickle.load(f)
        elif temp_filepath_csv.is_file():
            speed_matrix = predict_road_condition.read_speed_matrix_from_file(
                temp_filepath_csv)
        else:
            return jsonify({"error": "no_data"})
        predict_speed_dict = {}
        for way_id, his_speeds in speed_matrix.items():
            predict_speed_dict[way_id] = his_speeds[interval_idx]

        save_filename_list = [
            "way_graph", "way_types", "way_type_avg_speed_limit"
        ]
        temp_map_dates = graph_reader(Path("graph/"), SAVE_TYPE_PICKLE,
                                      save_filename_list)
        way_graph = temp_map_dates[0]
        way_types = temp_map_dates[1]
        way_type_avg_speed_limit = temp_map_dates[2]
        predict_speed_dict = predict_road_condition.estimate_no_data_road_speed_using_BFS(
            predict_speed_dict, way_graph, way_types, way_type_avg_speed_limit)
        data = predict_result_helper.get_output_dict_with_less_parameter(
            predict_speed_dict, dt_target, time_interval)
        return jsonify(data)
Beispiel #9
0
def predict_road_condition(predict_timestamp=int(datetime.now().timestamp()), interval=5,
                           result_file_path="data/{0}/result/{0}_{1}_min_road.csv",
                           config_history_date=PREDICT_ROAD_CONDITION_CONFIG_HISTORY_DATE,
                           config_history_data_range=PREDICT_ROAD_CONDITION_CONFIG_HISTORY_DATA_RANGE,
                           config_weight=PREDICT_ROAD_CONDITION_CONFIG_WEIGHT):
    """
    This function will reading historical data and using weighted sum calculate the bus speed on the road at the
    given time.

    Parameters
    ----------
    predict_timestamp: timestamp
        10-digit timestamp, use current time if not provided

    interval: int
        The length of each time interval in minutes. The input number should be divisible by 1440 (24 hour * 60 min)
        by default it is 5 min.

    result_file_path: String
        The path (also the format) to the result .csv files.
        {0} is a 8-digit date_str in yyyyMMdd format.
        {1} is the value of interval
        by default it will use the project's file format.

    config_history_date: List of int
        This parameter specifies which historical days of data the function needs to use in the computation. It should
        be a List of int, where each int means the offset of the day that need predict. e.g. -1 means yesterday
        by default it will use a predetermined configuration

    config_history_data_range: List of int
        This parameter specifies the range and the order of using the nearby data to replace the missing data. If the
        value is [-1, 1] and if the data we are looking for located on the ith index is missing. We will try to use the
        value at i-1 or i+1 as the data located in ith index.
        by default it will use a predetermined configuration

    config_weight: List of float
        This parameter specifies the weight of each day's data when compute the weighted sum.
        by default it will use a predetermined configuration

    Returns
    -------
    predict_speed_dict: Dictionary
    A dictionary that use way_id as key and the predict bus speed on that road at the given time as value.
    When there is an error, it will return a dictionary with key "Error" and the detail of the error as the value

    """
    # TODO: This function could be implemented using pandas and numpy for better performance(Shiluo)
    # Check input, load data and preparation
    if len(config_history_date) != len(config_weight):
        return -1

    save_filename_list = ["way_graph", "way_types", "way_type_avg_speed_limit"]
    temp_map_dates = graph_reader(Path("graph/"), SAVE_TYPE_PICKLE, save_filename_list)
    way_graph = temp_map_dates[0]
    way_types = temp_map_dates[1]
    way_type_avg_speed_limit = temp_map_dates[2]

    # Find the time of the predict, also find the range index in the day.
    predict_time = datetime.fromtimestamp(predict_timestamp)
    # predict_time_date_str = predict_time.strftime("%Y%m%d")
    interval_idx = math.floor((predict_time.hour * 3600 + predict_time.minute * 60 + predict_time.second) /
                              (interval * 60))

    # Prepare of load history data
    history_data_date_str = []
    for offset in config_history_date:
        history_data_date_str.append((predict_time + timedelta(days=offset)).strftime("%Y%m%d"))

    if FLAG_DEBUG:
        print("Predict time: {}".format(predict_time.strftime("%Y-%m-%d %H:%M:%S")))
        print("History data date_str:{}".format(history_data_date_str))

    # Load history data
    history_speed_matrix_list, config_weight = get_history_speed_matrix_list(history_data_date_str, config_weight,
                                                                             interval, result_file_path)

    if len(history_speed_matrix_list) == 0:
        return {"Error": "No enough data for predict"}

    if FLAG_DEBUG:
        print("{} day(s) load".format(len(history_speed_matrix_list)))

    full_way_id_set, usable_way_id_set = get_way_id_set(history_speed_matrix_list)

    predict_speed_dict = compute_speed_dict(interval, interval_idx, history_speed_matrix_list, full_way_id_set,
                                            usable_way_id_set, config_history_data_range, config_weight,
                                            way_graph, way_types, way_type_avg_speed_limit)

    if FLAG_DEBUG:
        print(show_traffic_speed(predict_speed_dict, predict_timestamp))
        # print(predict_speed_dict)

    return predict_speed_dict