Пример #1
0
def find_dua_times(options):
    """
    Get all combinations between start and end reservation edges and calculates
    the travel time between all combinations with duarouter in an empty net.
    """

    edge_pair_time = {}
    os.mkdir('temp_dua')

    # define trips between all edge combinations
    route_edges = []
    for person, reservation in parse_fast_nested(options.reservations,
                                                 "person", "depart", "ride",
                                                 ("from", "to", "lines")):
        route_edges.extend((reservation.attr_from, reservation.to))

    combination_edges = combinations(set(route_edges), 2)
    with open("temp_dua/dua_file.xml", "w+") as dua_file:
        dua_file.write("<routes>\n")
        for comb_edges in list(combination_edges):
            dua_file.write(
                """\t<trip id="%s_%s" depart="0" from="%s" to="%s"/>\n"""  # noqa
                % (comb_edges[0], comb_edges[1], comb_edges[0], comb_edges[1]))
            dua_file.write(
                """\t<trip id="%s_%s" depart="0" from="%s" to="%s"/>\n"""  # noqa
                % (comb_edges[1], comb_edges[0], comb_edges[1], comb_edges[0]))
        dua_file.write("</routes>\n")

    # run duarouter:
    duarouter = checkBinary('duarouter')

    subprocess.call([
        duarouter, "-n", options.network, "--route-files",
        "temp_dua/dua_file.xml", "-o", "temp_dua/dua_output.xml",
        "--ignore-errors", "true", "--no-warnings", "true", "--bulk-routing",
        "true"
    ])

    # parse travel time between edges
    with open("edges_pair_graph.xml", "w+") as pair_file:
        for trip, route in parse_fast_nested("temp_dua/dua_output.alt.xml",
                                             "vehicle",
                                             "id",
                                             "route",
                                             "cost",
                                             optional=True):
            if route.cost:
                edge_pair_time[trip.id] = float(route.cost)
                pair_file.write('<pair id="%s" cost="%s"/>\n' %
                                (trip.id, float(route.cost)))

    # remove extra dua files
    shutil.rmtree('temp_dua')

    return edge_pair_time
Пример #2
0
def import_osm(options, net):
    """
    Imports the routes of the public transport lines from osm.
    """
    if options.repair:
        if options.verbose:
            print("Import and repair osm routes")
        osm_routes = repair_routes(options, net)
    else:
        if options.verbose:
            print("Import osm routes")
        osm_routes = {}
        for ptline, ptline_route in parse_fast_nested(
                options.osm_routes, "ptLine", ("id", "name", "line", "type"),
                "route", "edges"):
            if ptline.type not in options.modes:
                continue
            if len(ptline_route.edges) > 2:
                line_orig = ptline_route.edges.split(" ")[0]
                x, y = net.getEdge(line_orig).getFromNode().getCoord()
                line_orig = net.convertXY2LonLat(x, y)

                line_dest = ptline_route.edges.split(" ")[-1]
                x, y = net.getEdge(line_dest).getFromNode().getCoord()
                line_dest = net.convertXY2LonLat(x, y)

                line_dir = get_line_dir(line_orig, line_dest)

                osm_routes[ptline.id] = (ptline.attr_name, ptline.line,
                                         ptline.type, line_dir,
                                         ptline_route.edges)
    return osm_routes
Пример #3
0
def main(options):

    routes = defaultdict(list)  # vehID -> recorded edges
    minSpeed = defaultdict(lambda: 1000)
    active = set()  # vehicles that have passed the first filterRoute edge
    entryTime = {}  # vehID -> time when entering corridor
    delayTime = {
    }  # vehID -> time when vehicle speed first dropped below threshold
    for timestep, vehicle in parse_fast_nested(options.fcdfile, 'timestep',
                                               ['time'], 'vehicle',
                                               ['id', 'speed', 'lane']):
        time = parseTime(timestep.time)
        vehID = vehicle.id
        edge = vehicle.lane[0:vehicle.lane.rfind('_')]
        prevEdge = None if len(routes[vehID]) == 0 else routes[vehID][-1]
        if prevEdge != edge:
            if options.exit and prevEdge in options.exit:
                # vehicle has left the filtered corridor
                continue
            routes[vehID].append(edge)
        if vehID not in active:
            if not options.entry or edge in options.entry:
                # vehicle has entered the filtered corridor
                active.add(vehID)
                entryTime[vehID] = time
            else:
                continue
        speed = float(vehicle.speed)
        if speed < minSpeed[vehID]:
            minSpeed[vehID] = speed
            if speed < options.minspeed:
                delayTime[vehID] = time

    vehs = []
    numDelayed = 0

    for vehID, route in routes.items():
        skip = False
        for required in options.filterRoute:
            if required not in route:
                skip = True
                break
        if not skip:
            if minSpeed[vehID] < options.minspeed:
                numDelayed += 1
                vehs.append((entryTime[vehID], delayTime[vehID], vehID))
            else:
                vehs.append((entryTime[vehID], -1, vehID))

    vehs.sort()
    n = len(vehs)
    print("n=%s d=%s coordinationFactor=%.2f" % (n, numDelayed,
                                                 (n - numDelayed) / float(n)))

    if options.fullOutput:
        with open(options.fullOutput, 'w') as outf:
            outf.write("# entryTime delayTime vehID\n")
            for record in vehs:
                outf.write(" ".join(map(str, record)) + "\n")
Пример #4
0
def main(options):
    fig = plt.figure(figsize=(14, 9), dpi=100)
    fig.canvas.mpl_connect('pick_event', onpick)

    xdata = 2
    ydata = 1
    typespec = {
            't' : ('Time', 0),
            's' : ('Speed', 1),
            'd' : ('Distance', 2),
            'a' : ('Acceleration', 3),
            'i' : ('Angle', 4),
            'x' : ('x-Position', 5),
            'y' : ('y-Position', 6),
            }

    if (len(options.ttype) == 2 
            and options.ttype[0] in typespec
            and options.ttype[1] in typespec):
        xLabel, xdata = typespec[options.ttype[0]]
        yLabel, ydata = typespec[options.ttype[1]]
        plt.xlabel(xLabel)
        plt.ylabel(yLabel)
        plt.title(options.fcdfile if options.label is None else options.label)
    else:
        sys.exit("unsupported plot type '%s'" % options.ttype)

    routes = defaultdict(list)  # vehID -> recorded edges
    data = defaultdict(lambda: ([], [], [], [], [], [], []))  # vehID -> (times, speeds, distances, accelerations, angles, xPositions, yPositions)
    for timestep, vehicle in parse_fast_nested(options.fcdfile, 'timestep', ['time'],
                                               'vehicle', ['id', 'x', 'y', 'angle', 'speed', 'lane']):
        time = float(timestep.time)
        speed = float(vehicle.speed)
        prevTime = time
        prevSpeed = speed
        prevDist = 0
        if vehicle.id in data:
            prevTime = data[vehicle.id][0][-1]
            prevSpeed = data[vehicle.id][1][-1]
            prevDist = data[vehicle.id][2][-1]
        data[vehicle.id][0].append(time)
        data[vehicle.id][1].append(speed)
        data[vehicle.id][4].append(float(vehicle.angle))
        data[vehicle.id][5].append(float(vehicle.x))
        data[vehicle.id][6].append(float(vehicle.y))
        if prevTime == time:
            data[vehicle.id][3].append(0)
        else:
            data[vehicle.id][3].append((speed - prevSpeed) / (time - prevTime))

        if options.ballistic:
            avgSpeed = (speed + prevSpeed) / 2
        else:
            avgSpeed = speed
        data[vehicle.id][2].append(prevDist + (time - prevTime) * avgSpeed)
        edge = vehicle.lane[0:vehicle.lane.rfind('_')]
        if len(routes[vehicle.id]) == 0 or routes[vehicle.id][-1] != edge:
            routes[vehicle.id].append(edge)

    def line_picker(line, mouseevent):
        if mouseevent.xdata is None:
            return False, dict()
        #minxy = None
        #mindist = 10000
        for x, y in zip(line.get_xdata(), line.get_ydata()):
            dist = math.sqrt((x - mouseevent.xdata) ** 2 + (y - mouseevent.ydata) ** 2)
            if dist < options.pickDist:
                return True, dict(label=line.get_label())
            #else:
            #    if dist < mindist:
            #        print("   ", x,y, dist, (x - mouseevent.xdata) ** 2, (y - mouseevent.ydata) ** 2)
            #        mindist = dist
            #        minxy = (x, y)
        #print(mouseevent.xdata, mouseevent.ydata, minxy, dist,
        #        line.get_label())
        return False, dict()

    minY = uMax
    maxY = uMin
    minX = uMax
    maxX = uMin

    for vehID, d in data.items():
        if options.filterRoute is not None:
            skip = False
            route = routes[vehID]
            for required in options.filterRoute:
                if required not in route:
                    skip = True
                    break
            if skip:
                continue
        if options.invertDistanceAngle is not None:
            avgAngle = sum(d[4]) / len(d[4])
            if abs(avgAngle - options.invertDistanceAngle) < 45:
                maxDist = d[2][-1]
                for i,v in enumerate(d[2]):
                    d[2][i] = maxDist - v

        minY = min(minY, min(d[ydata]))
        maxY = max(maxY, max(d[ydata]))
        minX = min(minX, min(d[xdata]))
        maxX = max(maxX, max(d[xdata]))

        plt.plot(d[xdata], d[ydata], picker=line_picker, label=vehID)
    if options.invertYAxis:
        plt.axis([minX, maxX, maxY, minY])


    plt.savefig(options.output)
    if options.csv_output is not None:
        write_csv(data, options.csv_output)
    if options.show:
        plt.show()
Пример #5
0
def repair_routes(options, net):
    """
    Runs duarouter to repair the given osm routes.
    """
    osm_routes = {}
    # write dua input file
    with io.open("dua_input.xml", 'w+', encoding="utf8") as dua_file:
        dua_file.write(u"<routes>\n")
        for key, value in OSM2SUMO_MODES.items():
            dua_file.write(u'    <vType id="%s" vClass="%s"/>\n' %
                           (key, value))
        num_read = discard_type = discard_net = 0
        sumo_edges = set([sumo_edge.getID() for sumo_edge in net.getEdges()])
        for ptline, ptline_route in parse_fast_nested(
                options.osm_routes, "ptLine", ("id", "name", "line", "type"),
                "route", "edges"):
            num_read += 1
            if ptline.type not in options.modes:
                discard_type += 1
                continue

            route_edges = [
                edge for edge in ptline_route.edges.split()
                if edge in sumo_edges
            ]
            if not route_edges:
                discard_net += 1
                continue

            # transform ptLine origin and destination to geo coordinates
            x, y = net.getEdge(route_edges[0]).getFromNode().getCoord()
            line_orig = net.convertXY2LonLat(x, y)
            x, y = net.getEdge(route_edges[-1]).getFromNode().getCoord()
            line_dest = net.convertXY2LonLat(x, y)

            # find ptLine direction
            line_dir = get_line_dir(line_orig, line_dest)

            osm_routes[ptline.id] = (ptline.attr_name, ptline.line,
                                     ptline.type, line_dir)
            dua_file.write(
                u'    <trip id="%s" type="%s" depart="0" via="%s"/>\n' %
                (ptline.id, ptline.type, (" ").join(route_edges)))
        dua_file.write(u"</routes>\n")

    if options.verbose:
        print(
            "%s routes read, discarded for wrong mode: %s, outside of net %s, keeping %s"
            % (num_read, discard_type, discard_net, len(osm_routes)))
    # run duarouter
    subprocess.check_call([
        sumolib.checkBinary('duarouter'), '-n', options.network,
        '--route-files', 'dua_input.xml', '--repair', '-o', 'dua_output.xml',
        '--ignore-errors', '--error-log', options.dua_repair_output
    ])

    # parse repaired routes
    n_routes = len(osm_routes)
    for ptline, ptline_route in parse_fast_nested("dua_output.xml", "vehicle",
                                                  "id", "route", "edges"):
        osm_routes[ptline.id] += (ptline_route.edges, )

    # remove dua files
    os.remove("dua_input.xml")
    os.remove("dua_output.xml")
    os.remove("dua_output.alt.xml")

    # remove invalid routes from dict
    [
        osm_routes.pop(line) for line in list(osm_routes)
        if len(osm_routes[line]) < 5
    ]

    if n_routes != len(osm_routes):
        print(
            "%s of %s routes have been imported, see '%s' for more information."
            % (len(osm_routes), n_routes, options.dua_repair_output))

    return osm_routes
Пример #6
0
def main(options):

    # ----------------------- Import SUMO net ---------------------------------

    print("Import net")
    net = sumolib.net.readNet(options.network)

    # ----------------------- gtfs, osm and sumo modes ------------------------
    sumo_vClass = {
        'bus': 'bus',
        'train': 'rail',
        'tram': 'tram',
        'subway': 'rail_urban',
        'ferry': 'ship'
    }

    gtfs_modes = {
        # https://developers.google.com/transit/gtfs/reference/#routestxt
        '0': 'tram',
        '1': 'subway',
        '2': 'train',
        '3': 'bus',
        '4': 'ferry',
        # '5':  'cableTram',
        # '6':  'aerialLift',
        # '7':  'funicular',
        # https://developers.google.com/transit/gtfs/reference/extended-route-types
        '100': 'train',  # DB
        '109': 'train',  # S-Bahn
        '400': 'subway',  # U-Bahn
        '1000': 'ferry',  # Faehre
        # additional modes used in Hamburg
        '402': 'subway',  # U-Bahn
        '1200': 'ferry',  # Faehre
        # modes used by hafas
        's': 'train',
        'RE': 'train',
        'RB': 'train',
        'IXB': 'train',  # tbd
        'ICE': 'train',
        'IC': 'train',
        'IRX': 'train',  # tbd
        'EC': 'train',
        'NJ': 'train',  # tbd
        'RHI': 'train',  # tbd
        'DPN': 'train',  # tbd
        'SCH': 'train',  # tbd
        'Bsv': 'train',  # tbd
        'KAT': 'train',  # tbd
        'AIR': 'train',  # tbd
        'DPS': 'train',  # tbd
        'lt': 'train',  # tbd
        'BUS': 'bus',  # tbd
        'Str': 'tram',  # tbd
        'DPF': 'train',  # tbd
    }
    # https://developers.google.com/transit/gtfs/reference/extended-route-types
    for i in range(700, 717):
        gtfs_modes[str(i)] = 'bus'
    for i in range(900, 907):
        gtfs_modes[str(i)] = 'tram'

    # -----------------------  Import route-paths from OSM --------------------

    if options.repair:
        print("Import and repair osm routes")
        osm_routes = repair_routes(options, net, sumo_vClass)
    else:
        print("Import osm routes")
        osm_routes = {}
        for ptline, ptline_route in parse_fast_nested(
                options.osm_routes,
                "ptLine",
            ("id", "name", "line", "type"),  # noqa
                "route",
                "edges"):
            if ptline.type not in options.pt_types:
                continue
            if len(ptline_route.edges) > 2:
                line_orig = ptline_route.edges.split(" ")[0]
                x, y = net.getEdge(line_orig).getFromNode().getCoord()
                line_orig = net.convertXY2LonLat(x, y)

                line_dest = ptline_route.edges.split(" ")[-1]
                x, y = net.getEdge(line_dest).getFromNode().getCoord()
                line_dest = net.convertXY2LonLat(x, y)

                line_dir = get_line_dir(line_orig, line_dest)

                osm_routes[ptline.id] = (ptline.attr_name, ptline.line,
                                         ptline.type, line_dir,
                                         ptline_route.edges)

    # -----------------------  Import GTFS data -------------------------------
    print("Import gtfs data")

    gtfsZip = zipfile.ZipFile(options.gtfs)
    routes = pd.read_csv(gtfsZip.open('routes.txt'), dtype=str)
    stops = pd.read_csv(gtfsZip.open('stops.txt'), dtype=str)
    stop_times = pd.read_csv(gtfsZip.open('stop_times.txt'), dtype=str)
    trips = pd.read_csv(gtfsZip.open('trips.txt'), dtype=str)
    shapes = pd.read_csv(gtfsZip.open('shapes.txt'), dtype=str)
    calendar_dates = pd.read_csv(gtfsZip.open('calendar_dates.txt'), dtype=str)
    calendar = pd.read_csv(gtfsZip.open('calendar.txt'), dtype=str)

    # change col types
    stops['stop_lat'] = stops['stop_lat'].astype(float)
    stops['stop_lon'] = stops['stop_lon'].astype(float)
    shapes['shape_pt_lat'] = shapes['shape_pt_lat'].astype(float)
    shapes['shape_pt_lon'] = shapes['shape_pt_lon'].astype(float)
    shapes['shape_pt_sequence'] = shapes['shape_pt_sequence'].astype(float)
    stop_times['stop_sequence'] = stop_times['stop_sequence'].astype(float)

    # filter trips for a representative date
    # from gtfs2fcd.py
    weekday = 'monday tuesday wednesday thursday friday saturday sunday'.split(
    )[datetime.datetime.strptime(options.date, "%Y%m%d").weekday()]
    removed = calendar_dates[(calendar_dates.date == options.date)
                             & (calendar_dates.exception_type == '2')]
    services = calendar[(calendar.start_date <= options.date)
                        & (calendar.end_date >= options.date) &
                        (calendar[weekday] == '1') &
                        (~calendar.service_id.isin(removed.service_id))]
    added = calendar_dates[(calendar_dates.date == options.date)
                           & (calendar_dates.exception_type == '1')]
    gtfs_data = trips[trips.service_id.isin(services.service_id)
                      | trips.service_id.isin(added.service_id)]

    # merge gtfs data from stop_times / trips / routes / stops
    gtfs_data = pd.merge(
        pd.merge(
            pd.merge(gtfs_data, stop_times, on='trip_id'),  # noqa
            stops,
            on='stop_id'),
        routes,
        on='route_id')

    # filter given pt types
    filter_gtfs_modes = [
        key for key, value in gtfs_modes.items() if value in options.pt_types
    ]
    gtfs_data = gtfs_data[gtfs_data['route_type'].isin(filter_gtfs_modes)]

    # Filter relevant information
    gtfs_data = gtfs_data[[
        'route_id', 'shape_id', 'trip_id', 'stop_id', 'route_short_name',
        'route_type', 'trip_headsign', 'direction_id', 'stop_name', 'stop_lat',
        'stop_lon', 'stop_sequence', 'arrival_time', 'departure_time'
    ]]

    # replace characters
    gtfs_data['stop_name'] = gtfs_data['stop_name'].str.replace(
        '[/|\'\";,!<>&*?\t\n\r]', ' ')  # noqa
    gtfs_data['trip_headsign'] = gtfs_data['trip_headsign'].str.replace(
        '[/|\'\";,!<>&*?\t\n\r]', ' ')  # noqa

    # filter data inside SUMO net by stop location and shape
    gtfs_data = gtfs_data[(options.region[1] <= gtfs_data['stop_lat'])
                          & (gtfs_data['stop_lat'] <= options.region[3]) &
                          (options.region[0] <= gtfs_data['stop_lon']) &
                          (gtfs_data['stop_lon'] <= options.region[2])]
    shapes = shapes[(options.region[1] <= shapes['shape_pt_lat'])
                    & (shapes['shape_pt_lat'] <= options.region[3]) &
                    (options.region[0] <= shapes['shape_pt_lon']) &
                    (shapes['shape_pt_lon'] <= options.region[2])]

    # times to sec to enable sorting
    trip_list = gtfs_data[gtfs_data["stop_sequence"] == 0]
    trip_list['departure'] = pd.to_timedelta(trip_list['arrival_time'])

    # add column for unambiguous stop_id and sumo edge
    gtfs_data["stop_item_id"] = None
    gtfs_data["edge_id"] = None

    # search main and secondary shapes for each pt line (route and direction)
    filter_stops = gtfs_data.groupby(['route_id', 'direction_id',
                                      'shape_id']).agg({
                                          'stop_sequence': 'max'
                                      }).reset_index()  # noqa
    group_shapes = filter_stops.groupby([
        'route_id', 'direction_id'
    ]).shape_id.aggregate(lambda x: set(x)).reset_index()  # noqa
    filter_stops = filter_stops.loc[filter_stops.groupby(
        ['route_id', 'direction_id'])['stop_sequence'].idxmax()][[  # noqa
            'route_id', 'shape_id', 'direction_id'
        ]]
    filter_stops = pd.merge(filter_stops,
                            group_shapes,
                            on=['route_id', 'direction_id'])  # noqa

    # create dict with shape and main shape
    shapes_dict = {}
    for row in filter_stops.itertuples():
        for sec_shape in row.shape_id_y:
            shapes_dict[sec_shape] = row.shape_id_x

    # create data frame with main shape for stop location
    filter_stops = gtfs_data[gtfs_data['shape_id'].isin(
        filter_stops.shape_id_x)]  # noqa
    filter_stops = filter_stops[[
        'route_id', 'shape_id', 'stop_id', 'route_short_name', 'route_type',
        'trip_headsign', 'direction_id', 'stop_name', 'stop_lat', 'stop_lon'
    ]].drop_duplicates()

    # -----------------------  Define Stops and Routes ------------------------
    print("Map stops and routes")

    map_routes = {}
    map_stops = {}
    # gtfs stops are grouped (not in exact geo position), so a large radius
    # for mapping is needed
    radius = 200

    missing_stops = []
    missing_lines = []

    for row in filter_stops.itertuples():
        # check if route already discarded
        if row.shape_id in missing_lines:
            continue

        # check if gtfs route already mapped to osm route
        if not map_routes.get(row.shape_id, False):
            # if route not mapped, find the osm route for shape id
            pt_line_name = row.route_short_name
            pt_type = gtfs_modes[row.route_type]

            # get shape definition and define pt direction
            aux_shapes = shapes[shapes['shape_id'] == row.shape_id]
            pt_orig = aux_shapes[aux_shapes.shape_pt_sequence ==
                                 aux_shapes.shape_pt_sequence.min()]
            pt_dest = aux_shapes[aux_shapes.shape_pt_sequence ==
                                 aux_shapes.shape_pt_sequence.max()]
            line_dir = get_line_dir(
                (pt_orig.shape_pt_lon, pt_orig.shape_pt_lat),
                (pt_dest.shape_pt_lon, pt_dest.shape_pt_lat))

            # get osm lines with same route name and pt type
            osm_lines = [
                ptline_id for ptline_id, value in osm_routes.items()
                if value[1] == pt_line_name and value[2] == pt_type
            ]
            if len(osm_lines) > 1:
                # get the direction for the found routes and take the route
                # with lower difference
                aux_dif = [
                    abs(line_dir - osm_routes[ptline_id][3])
                    for ptline_id in osm_lines
                ]
                osm_id = osm_lines[aux_dif.index(min(aux_dif))]

                # add mapped osm route to dict
                map_routes[row.shape_id] = (osm_id,
                                            osm_routes[osm_id][4].split(" ")
                                            )  # noqa
            else:
                # no osm route found, do not map stops of route
                missing_lines.append((pt_line_name, row.trip_headsign))
                continue

        # check if stop already mapped
        stop_mapped = [
            stop_item_id for stop_item_id in map_stops.keys()
            if stop_item_id.split("_")[0] == row.stop_id
        ]
        stop_item_id = 0  # for pt stops with different stop points

        # set stop's type, class and length
        pt_type = gtfs_modes[row.route_type]
        pt_class = sumo_vClass[pt_type]
        if pt_class == "bus":
            stop_length = options.bus_stop_length
        elif pt_class == "tram":
            stop_length = options.tram_stop_length
        else:
            stop_length = options.train_stop_length

        if stop_mapped:
            # get maximum item for stop
            stop_item_id = [int(stop.split("_")[1]) for stop in stop_mapped]
            stop_item_id = max(stop_item_id) + 1

            # check if the stop is already define
            for stop in stop_mapped:
                # for item of mapped stop
                stop_edge = map_stops[stop][1].split("_")[0]
                if stop_edge in map_routes[row.shape_id][1]:
                    # if edge in route, the stops are the same
                    # add the shape id to the stop
                    map_stops[stop][5].append(row.shape_id)
                    # add to data frame
                    shape_list = [
                        sec_shape for sec_shape, main_shape in
                        shapes_dict.items()  # noqa
                        if main_shape == row.shape_id
                    ]
                    gtfs_data.loc[(gtfs_data["stop_id"] == row.stop_id) &
                                  (gtfs_data["shape_id"].isin(shape_list)),
                                  "stop_item_id"] = stop
                    gtfs_data.loc[(gtfs_data["stop_id"] == row.stop_id) &
                                  (gtfs_data["shape_id"].isin(shape_list)),
                                  "edge_id"] = stop_edge

                    stop_mapped = True
                    break
                else:
                    # check if the wrong edge was adopted
                    # get edges near stop location
                    x, y = net.convertLonLat2XY(row.stop_lon, row.stop_lat)
                    edges = net.getNeighboringEdges(
                        x, y, radius, includeJunctions=False)  # noqa
                    edges.sort(key=lambda x: x[1])  # sort by distance

                    # interseccion between route edges of all shapes in stop
                    edge_inter = set(map_routes[row.shape_id][1])
                    for shape_item in map_stops[stop][5]:  # shapes id of stop
                        edge_inter = set(edge_inter) & set(
                            map_routes[shape_item][1])  # noqa

                    # find edge
                    new_edge = [
                        edge[0] for edge in edges
                        if edge[0].getID() in edge_inter
                        and edge[0].getLength() >= stop_length * 1.20
                    ]  # filter length
                    if not new_edge:
                        new_edge = [
                            edge[0] for edge in edges
                            if edge[0].getID() in edge_inter
                        ]
                    if not new_edge:
                        continue  # stops are not same

                    # if the edge is in all routes
                    for lane in new_edge[0].getLanes():
                        # update the lane id, start and end and add shape
                        if lane.allows(pt_class):
                            lane_id = lane.getID()
                            pos = int(lane.getClosestLanePosAndDist((x, y))[0])
                            start = max(0, pos - stop_length)
                            end = min(start + stop_length, lane.getLength())
                            map_stops[stop][1:4] = [lane_id, start, end]
                            map_stops[stop][5].append(row.shape_id)
                            break
                    # update edge in data frame
                    gtfs_data.loc[gtfs_data["stop_item_id"] == stop,
                                  "edge_id"] = new_edge[0].getID()  # noqa
                    # add to data frame
                    shape_list = [
                        sec_shape for sec_shape, main_shape in
                        shapes_dict.items()  # noqa
                        if main_shape == row.shape_id
                    ]
                    gtfs_data.loc[(gtfs_data["stop_id"] == row.stop_id) &
                                  (gtfs_data["shape_id"].isin(shape_list)),
                                  "stop_item_id"] = stop
                    gtfs_data.loc[(gtfs_data["stop_id"] == row.stop_id) &
                                  (gtfs_data["shape_id"].isin(shape_list)),
                                  "edge_id"] = new_edge[0].getID()

                    stop_mapped = True
                    break

            if stop_mapped is not True:
                stop_mapped = None  # if stop not the same, search stop

        # if stop not mapped
        if not stop_mapped:
            # get edges near stop location
            x, y = net.convertLonLat2XY(row.stop_lon, row.stop_lat)
            edges = net.getNeighboringEdges(x,
                                            y,
                                            radius,
                                            includeJunctions=False)  # noqa
            # filter edges by length
            edges = [
                edge for edge in edges
                if edge[0].getLength() >= stop_length * 1.20
            ]
            edges.sort(key=lambda x: x[1])  # sort by distance

            for edge in edges:
                if not edge[0].getID() in map_routes[row.shape_id][1]:
                    # if edge not in pt line route
                    continue

                for lane in edge[0].getLanes():
                    if not lane.allows(pt_class):
                        continue
                    lane_id = lane.getID()
                    pos = int(lane.getClosestLanePosAndDist((x, y))[0])
                    start = max(0, pos - stop_length)
                    end = min(start + stop_length, lane.getLength())
                    stop_item_id = "%s_%s" % (row.stop_id, stop_item_id)
                    map_stops[stop_item_id] = [
                        row.stop_name, lane_id, start, end, pt_type,
                        [row.shape_id]
                    ]
                    # add data to data frame
                    shape_list = [
                        sec_shape for sec_shape, main_shape in
                        shapes_dict.items()  # noqa
                        if main_shape == row.shape_id
                    ]
                    gtfs_data.loc[(gtfs_data["stop_id"] == row.stop_id) &
                                  (gtfs_data["shape_id"].isin(shape_list)),
                                  "stop_item_id"] = stop_item_id
                    gtfs_data.loc[(gtfs_data["stop_id"] == row.stop_id) &
                                  (gtfs_data["shape_id"].isin(shape_list)),
                                  "edge_id"] = edge[0].getID()

                    stop_mapped = True
                    break
                break

        # if stop not mapped, add to missing stops
        if not stop_mapped:
            missing_stops.append(
                (row.stop_id, row.stop_name, row.route_short_name))

    # -----------------------   Write Stops Output ----------------------------

    print("Generates stops output")

    stop_output = "gtfs_stops.add.xml"
    with open(stop_output, 'w', encoding="utf8") as output_file:
        sumolib.xml.writeHeader(output_file, stop_output, "additional")
        for stop, value in map_stops.items():
            name, lane, start_pos, end_pos, v_type = value[:5]
            if v_type == "bus":
                output_file.write(
                    '    <busStop id="%s" lane="%s" startPos="%s" endPos="%s" name="%s" friendlyPos="true"/>\n'
                    %  # noqa
                    (stop, lane, start_pos, end_pos, name))
            else:
                # from gtfs2pt.py
                output_file.write(
                    '    <trainStop id="%s" lane="%s" startPos="%s" endPos="%s" name="%s" friendlyPos="true">\n'
                    %  # noqa
                    (stop, lane, start_pos, end_pos, name))

                ap = sumolib.geomhelper.positionAtShapeOffset(
                    net.getLane(lane).getShape(), start_pos)  # noqa
                numAccess = 0
                for accessEdge, _ in sorted(net.getNeighboringEdges(*ap,
                                                                    r=100),
                                            key=lambda i: i[1]):  # noqa
                    if accessEdge.getID() != stop.split("_")[
                            0] and accessEdge.allows("pedestrian"):  # noqa
                        lane_id = [
                            lane.getID() for lane in accessEdge.getLanes()
                            if lane.allows("pedestrian")
                        ][0]  # noqa
                        _, accessPos, accessDist = accessEdge.getClosestLanePosDist(
                            ap)  # noqa
                        output_file.write((
                            '        <access friendlyPos="true" lane="%s" pos="%s" length="%s"/>\n'
                        ) %  # noqa
                                          (lane_id, int(accessPos),
                                           1.5 * int(accessDist)))  # noqa
                        numAccess += 1
                        if numAccess == 5:
                            break
                output_file.write('    </trainStop>\n')
        output_file.write('</additional>\n')

    print("Generates routes output")

    sequence_errors = []
    route_output = "gtfs_ptline.rou.xml"

    with open(route_output, 'w', encoding="utf8") as output_file:
        sumolib.xml.writeHeader(output_file, route_output, "routes")
        for osm_type, sumo_class in sumo_vClass.items():
            output_file.write('    <vType id="%s" vClass="%s"/>\n' %
                              (osm_type, sumo_class))  # noqa

        for row in trip_list.sort_values("departure").itertuples():

            main_shape = shapes_dict.get(row.shape_id, None)
            if not map_routes.get(main_shape, None):
                # if route not mapped
                continue

            pt_type = gtfs_modes[row.route_type]
            edges_list = map_routes[main_shape][1]
            stop_list = gtfs_data[gtfs_data["trip_id"] ==
                                  row.trip_id].sort_values("stop_sequence")
            stop_index = [
                edges_list.index(stop.edge_id)
                for stop in stop_list.itertuples()
                if stop.edge_id in edges_list
            ]

            if len(set(stop_index)) < options.min_stops:
                # Not enough stops mapped
                continue
            veh_attr = (row.route_short_name, row.trip_id, row.route_id,
                        row.direction_id, row.arrival_time, min(stop_index),
                        max(stop_index), pt_type, row.trip_headsign)
            output_file.write(
                '    <vehicle id="%s_%s" line="%s_%s" depart="%s" departEdge="%s" arrivalEdge="%s" type="%s"><!--%s-->\n'  # noqa
                % veh_attr)
            output_file.write('        <route edges="%s"/>\n' %
                              (" ".join(edges_list)))  # noqa

            check_seq = -1
            for stop in stop_list.itertuples():
                if not stop.stop_item_id:
                    # if stop not mapped
                    continue
                stop_index = edges_list.index(stop.edge_id)
                if stop_index > check_seq:
                    check_seq = stop_index
                    stop_attr = (stop.stop_item_id, stop.arrival_time,
                                 options.duration, stop.departure_time,
                                 stop.stop_name)
                    output_file.write(
                        '        <stop busStop="%s" arrival="%s" duration="%s" until="%s"/><!--%s-->\n'  # noqa
                        % stop_attr)
                elif stop_index < check_seq:
                    # stop not downstream
                    sequence_errors.append(
                        (stop.stop_item_id, row.route_short_name,
                         row.trip_headsign, stop.trip_id))

            output_file.write('    </vehicle>\n')
        output_file.write('</routes>\n')

    # -----------------------   Save missing data ------------------
    if any([missing_stops, missing_lines, sequence_errors]):
        print(
            "Not all given gtfs elements have been mapped, see 'gtfs_missing.xml' for more information"
        )  # noqa
        with open("gtfs_missing.xml", 'w', encoding="utf8") as output_file:
            output_file.write('<missingElements>\n')
            for stop in set(missing_stops):
                output_file.write(
                    '    <stop id="%s" name="%s" ptLine="%s"/>\n' % stop)
            for line in set(missing_lines):
                output_file.write(
                    '    <ptLine id="%s" trip_headsign="%s"/>\n' % line)
            for stop in set(sequence_errors):
                output_file.write(
                    '    <stopSequence stop_id="%s" ptLine="%s" trip_headsign="%s" trip_id="%s"/>\n'
                    % stop)  # noqa
            output_file.write('</missingElements>\n')
Пример #7
0
def repair_routes(options, net, sumo_vClass):
    # use duarouter to repair the given osm routes
    osm_routes = {}
    # write dua input file
    with open("dua_input.xml", 'w+', encoding="utf8") as dua_file:
        dua_file.write("<routes>\n")
        for key, value in sumo_vClass.items():
            dua_file.write('\t<vType id="%s" vClass="%s"/>\n' % (key, value))

        sumo_edges = [sumo_edge.getID() for sumo_edge in net.getEdges()]
        for ptline, ptline_route in parse_fast_nested(
                options.osm_routes,
                "ptLine",
            ("id", "name", "line", "type"),  # noqa
                "route",
                "edges"):
            if ptline.type not in options.pt_types:
                continue

            route_edges = ptline_route.edges.split(" ")
            # search ptLine origin
            index = 0
            line_orig = route_edges[index]
            while line_orig not in sumo_edges and index + 1 < len(route_edges):
                # search for first route edge included in the sumo network
                index += 1
                line_orig = route_edges[index]
            if line_orig not in sumo_edges:
                # if no edge found, discard ptLine
                continue
            # adapt osm route to sumo network
            route_edges = route_edges[index:]

            # search ptLine destination
            index = -1
            line_dest = route_edges[index]
            while line_dest not in sumo_edges and index - 1 < -len(route_edges):
                # search for last route edge included in the sumo network
                index += -1
                line_orig = route_edges[index]
            if line_dest not in sumo_edges:
                # if no edges found, discard ptLine
                continue
            # adapt osm route to sumo network
            route_edges = route_edges[:index - 1]

            # consider only edges in sumo network
            route_edges = [edge for edge in route_edges if edge in sumo_edges]
            if not route_edges:
                # if no edges found, discard ptLine
                continue

            # transform ptLine origin and destination to geo coordinates
            x, y = net.getEdge(line_orig).getFromNode().getCoord()
            line_orig = net.convertXY2LonLat(x, y)
            x, y = net.getEdge(line_dest).getFromNode().getCoord()
            line_dest = net.convertXY2LonLat(x, y)

            # find ptLine direction
            line_dir = get_line_dir(line_orig, line_dest)

            osm_routes[ptline.id] = (ptline.attr_name, ptline.line,
                                     ptline.type, line_dir)
            dua_file.write(
                """\t<trip id="%s" type="%s" depart="0" via="%s"/>\n"""
                %  # noqa
                (ptline.id, ptline.type, (" ").join(route_edges)))
        dua_file.write("</routes>\n")

    # run duarouter
    run_dua = subprocess.call([
        sumolib.checkBinary('duarouter'), '-n', options.network,
        '--route-files', 'dua_input.xml', '--repair', '-o', 'dua_output.xml',
        '--ignore-errors', '--error-log', 'invalid_osm_routes.txt'
    ])
    if run_dua == 1:
        # exit the program
        sys.exit(
            "Traying to repair OSM routes failed. Duarouter quits with error, see 'invalid_osm_routes.txt'"
        )  # noqa

    # parse repaired routes
    n_routes = len(osm_routes)

    for ptline, ptline_route in parse_fast_nested("dua_output.xml", "vehicle",
                                                  "id", "route", "edges"):
        if len(ptline_route.edges) > 2:
            osm_routes[ptline.id] += (ptline_route.edges, )

    # remove dua files
    os.remove("dua_input.xml")
    os.remove("dua_output.xml")
    os.remove("dua_output.alt.xml")

    # remove invalid routes from dict
    [
        osm_routes.pop(line) for line in list(osm_routes)
        if len(osm_routes[line]) < 5
    ]

    if n_routes != len(osm_routes):
        print(
            "Not all given routes have been imported, see 'invalid_osm_routes.txt' for more information"
        )  # noqa

    return osm_routes
Пример #8
0
def main():
    options = get_options()
    if options.verbose:
        print("parsing network from", options.network)
    net = readNet(options.network, withInternal=True)
    read = 0
    routeInfos = {}  # id-> RouteInfo
    skipped = set()
    for routeFile in options.routeFiles:
        if options.verbose:
            print("parsing routes from", routeFile)
        idx = 0
        if options.standalone:
            for idx, route in enumerate(parse(routeFile, 'route')):
                if options.verbose and idx > 0 and idx % 100000 == 0:
                    print(idx, "routes read")
                addOrSkip(routeInfos, skipped, route.id, route, options.min_edges)
        else:
            if options.heterogeneous:
                for idx, vehicle in enumerate(parse(routeFile, 'vehicle')):
                    if options.verbose and idx > 0 and idx % 100000 == 0:
                        print(idx, "vehicles read")
                    addOrSkip(routeInfos, skipped, vehicle.id, vehicle.route[0], options.min_edges)
            else:
                prev = (None, None)
                for vehicle, route in parse_fast_nested(routeFile, 'vehicle', 'id', 'route', 'edges'):
                    if prev[0] != vehicle.id:
                        if options.verbose and idx > 0 and idx % 500000 == 0:
                            print(idx, "vehicles read")
                        if prev[0] is not None:
                            addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges)
                        prev = (vehicle.id, route)
                        idx += 1
                if prev[0] is not None:
                    addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges)
        read += idx
    if options.verbose:
        print(read, "routes read", len(skipped), "short routes skipped")

    if options.verbose:
        print("calculating air distance and checking loops")
    for idx, ri in enumerate(routeInfos.values()):
        if options.verbose and idx > 0 and idx % 100000 == 0:
            print(idx, "routes checked")
        calcDistAndLoops(ri, net, options)

    prefix = os.path.commonprefix(options.routeFiles)
    duarouterOutput = prefix + '.rerouted.rou.xml'
    duarouterAltOutput = prefix + '.rerouted.rou.alt.xml'
    if os.path.exists(duarouterAltOutput) and options.reuse_routing:
        if options.verbose:
            print("reusing old duarouter file", duarouterAltOutput)
    else:
        if options.standalone:
            duarouterInput = prefix
            # generate suitable input file for duarouter
            duarouterInput += ".vehRoutes.xml"
            with open(duarouterInput, 'w') as outf:
                outf.write('<routes>\n')
                for rID, rInfo in routeInfos.items():
                    outf.write('    <vehicle id="%s" depart="0">\n' % rID)
                    outf.write('        <route edges="%s"/>\n' % ' '.join(rInfo.edges))
                    outf.write('    </vehicle>\n')
                outf.write('</routes>\n')
        else:
            duarouterInput = ",".join(options.routeFiles)

        command = [sumolib.checkBinary('duarouter'), '-n', options.network,
                   '-r', duarouterInput, '-o', duarouterOutput,
                   '--no-step-log', '--routing-threads', str(options.threads),
                   '--routing-algorithm', 'astar', '--aggregate-warnings',  '1']
        if options.verbose:
            command += ["-v"]
        if options.verbose:
            print("calling duarouter:", " ".join(command))
        subprocess.call(command)

    for vehicle in parse(duarouterAltOutput, 'vehicle'):
        if vehicle.id in skipped:
            continue
        routeAlts = vehicle.routeDistribution[0].route
        if len(routeAlts) == 1:
            routeInfos[vehicle.id].detour = 0
            routeInfos[vehicle.id].detourRatio = 1
            routeInfos[vehicle.id].shortest_path_distance = routeInfos[vehicle.id].length
        else:
            oldCosts = float(routeAlts[0].cost)
            newCosts = float(routeAlts[1].cost)
            assert(routeAlts[0].edges.split() == routeInfos[vehicle.id].edges)
            routeInfos[vehicle.id].shortest_path_distance = sumolib.route.getLength(net, routeAlts[1].edges.split())
            if oldCosts <= newCosts:
                routeInfos[vehicle.id].detour = 0
                routeInfos[vehicle.id].detourRatio = 1
                if oldCosts < newCosts:
                    sys.stderr.write(("Warning: fastest route for '%s' is slower than original route " +
                                      "(old=%s, new=%s). Check vehicle types\n") % (
                        vehicle.id, oldCosts, newCosts))
            else:
                routeInfos[vehicle.id].detour = oldCosts - newCosts
                routeInfos[vehicle.id].detourRatio = oldCosts / newCosts

    implausible = []
    allRoutesStats = Statistics("overall implausiblity")
    implausibleRoutesStats = Statistics("implausiblity above threshold")
    for rID in sorted(routeInfos.keys()):
        ri = routeInfos[rID]
        ri.implausibility = (options.airdist_ratio_factor * ri.airDistRatio +
                             options.detour_factor * ri.detour +
                             options.detour_ratio_factor * ri.detourRatio +
                             max(0, options.min_dist / ri.shortest_path_distance - 1) +
                             max(0, options.min_air_dist / ri.airDist - 1))
        allRoutesStats.add(ri.implausibility, rID)
        if ri.implausibility > options.threshold or ri.edgeLoop or ri.nodeLoop:
            implausible.append((ri.implausibility, rID, ri))
            implausibleRoutesStats.add(ri.implausibility, rID)

    # generate restrictions
    if options.restrictions_output is not None:
        with open(options.restrictions_output, 'w') as outf:
            for score, rID, ri in sorted(implausible):
                edges = ri.edges
                if options.odrestrictions and len(edges) > 2:
                    edges = [edges[0], edges[-1]]
                outf.write("0 %s\n" % " ".join(edges))

    # write xml output
    if options.xmlOutput is not None:
        with open(options.xmlOutput, 'w') as outf:
            sumolib.writeXMLHeader(outf, "$Id$", options=options)  # noqa
            outf.write('<implausibleRoutes>\n')
            for score, rID, ri in sorted(implausible):
                edges = " ".join(ri.edges)
                outf.write('    <route id="%s" edges="%s" score="%s"/>\n' % (
                    rID, edges, score))
            outf.write('</implausibleRoutes>\n')

    if options.ignore_routes is not None:
        numImplausible = len(implausible)
        ignored = set([r.strip() for r in open(options.ignore_routes)])
        implausible = [r for r in implausible if r not in ignored]
        print("Loaded %s routes to ignore. Reducing implausible from %s to %s" % (
            len(ignored), numImplausible, len(implausible)))

    # generate polygons
    polyOutput = prefix + '.implausible.add.xml'
    colorgen = Colorgen(("random", 1, 1))
    with open(polyOutput, 'w') as outf:
        outf.write('<additional>\n')
        for score, rID, ri in sorted(implausible):
            generate_poly(options, net, rID, colorgen(), ri.edges, outf, score)
        outf.write('</additional>\n')

    sys.stdout.write('score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist, edgeLoop, nodeLoop)\n')
    for score, rID, ri in sorted(implausible):
        # , ' '.join(ri.edges)))
        sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio,
                                                          ri.detour, ri.shortest_path_distance,
                                                          ri.airDist, ri.edgeLoop, ri.nodeLoop)))

    print(allRoutesStats)
    print(implausibleRoutesStats)
Пример #9
0
def main(options):
    fig = plt.figure(figsize=(14, 9), dpi=100)
    fig.canvas.mpl_connect('pick_event', onpick)

    xdata = 2
    ydata = 1
    typespec = {
        't': ('Time', 0),
        's': ('Speed', 1),
        'd': ('Distance', 2),
        'a': ('Acceleration', 3),
        'i': ('Angle', 4),
        'x': ('x-Position', 5),
        'y': ('y-Position', 6),
        'k': ('kilometrage', 7),
    }

    shortFileNames = short_names(options.fcdfiles)
    if (len(options.ttype) == 2 and options.ttype[0] in typespec
            and options.ttype[1] in typespec):
        xLabel, xdata = typespec[options.ttype[0]]
        yLabel, ydata = typespec[options.ttype[1]]
        plt.xlabel(xLabel)
        plt.ylabel(yLabel)
        plt.title(','.join(shortFileNames
                           ) if options.label is None else options.label)
    else:
        sys.exit("unsupported plot type '%s'" % options.ttype)

    element = 'vehicle'
    location = 'lane'
    if options.persons:
        element = 'person'
        location = 'edge'

    routes = defaultdict(list)  # vehID -> recorded edges
    # vehID -> (times, speeds, distances, accelerations, angles, xPositions, yPositions, kilometrage)
    attrs = ['id', 'x', 'y', 'angle', 'speed', location]
    if 'k' in options.ttype:
        attrs.append('distance')
    data = defaultdict(lambda: tuple(([] for i in range(len(attrs) + 1))))
    for fileIndex, fcdfile in enumerate(options.fcdfiles):
        totalVehs = 0
        filteredVehs = 0
        for timestep, vehicle in parse_fast_nested(fcdfile, 'timestep',
                                                   ['time'], element, attrs):
            totalVehs += 1
            vehID = vehicle.id
            if options.filterIDs and vehID not in options.filterIDs:
                continue
            if len(options.fcdfiles) > 1:
                suffix = shortFileNames[fileIndex]
                if len(suffix) > 0:
                    vehID += "#" + suffix
            if options.persons:
                edge = vehicle.edge
            else:
                edge = vehicle.lane[0:vehicle.lane.rfind('_')]
            if len(routes[vehID]) == 0 or routes[vehID][-1] != edge:
                routes[vehID].append(edge)
            if options.filterEdges and edge not in options.filterEdges:
                continue
            time = parseTime(timestep.time)
            speed = float(vehicle.speed)
            prevTime = time
            prevSpeed = speed
            prevDist = 0
            if vehID in data:
                prevTime = data[vehID][0][-1]
                prevSpeed = data[vehID][1][-1]
                prevDist = data[vehID][2][-1]
            data[vehID][0].append(time)
            data[vehID][1].append(speed)
            data[vehID][4].append(float(vehicle.angle))
            data[vehID][5].append(float(vehicle.x))
            data[vehID][6].append(float(vehicle.y))
            if 'k' in options.ttype:
                data[vehID][7].append(float(vehicle.distance))
            if prevTime == time:
                data[vehID][3].append(0)
            else:
                data[vehID][3].append((speed - prevSpeed) / (time - prevTime))

            if options.ballistic:
                avgSpeed = (speed + prevSpeed) / 2
            else:
                avgSpeed = speed
            data[vehID][2].append(prevDist + (time - prevTime) * avgSpeed)
            filteredVehs += 1
        if totalVehs == 0 or filteredVehs == 0 or options.verbose:
            print("Found %s datapoints in %s and kept %s" %
                  (totalVehs, fcdfile, filteredVehs))

    if filteredVehs == 0:
        sys.exit()

    def line_picker(line, mouseevent):
        if mouseevent.xdata is None:
            return False, dict()
        # minxy = None
        # mindist = 10000
        for x, y in zip(line.get_xdata(), line.get_ydata()):
            dist = math.sqrt((x - mouseevent.xdata)**2 +
                             (y - mouseevent.ydata)**2)
            if dist < options.pickDist:
                return True, dict(label=line.get_label())
            # else:
            #    if dist < mindist:
            #        print("   ", x,y, dist, (x - mouseevent.xdata) ** 2, (y - mouseevent.ydata) ** 2)
            #        mindist = dist
            #        minxy = (x, y)
        # print(mouseevent.xdata, mouseevent.ydata, minxy, dist,
        #        line.get_label())
        return False, dict()

    minY = uMax
    maxY = uMin
    minX = uMax
    maxX = uMin

    for vehID, d in data.items():
        if options.filterRoute is not None:
            skip = False
            route = routes[vehID]
            for required in options.filterRoute:
                if required not in route:
                    skip = True
                    break
            if skip:
                continue
        if options.invertDistanceAngle is not None:
            avgAngle = sum(d[4]) / len(d[4])
            if abs(avgAngle - options.invertDistanceAngle) < 45:
                maxDist = d[2][-1]
                for i, v in enumerate(d[2]):
                    d[2][i] = maxDist - v

        minY = min(minY, min(d[ydata]))
        maxY = max(maxY, max(d[ydata]))
        minX = min(minX, min(d[xdata]))
        maxX = max(maxX, max(d[xdata]))

        plt.plot(d[xdata], d[ydata], picker=line_picker, label=vehID)
    if options.invertYAxis:
        plt.axis([minX, maxX, maxY, minY])

    if options.legend > 0:
        plt.legend()

    plt.savefig(options.output)
    if options.csv_output is not None:
        write_csv(data, options.csv_output)
    if options.show:
        plt.show()
Пример #10
0
def main(options):
    fig = plt.figure(figsize=(14, 9), dpi=100)
    fig.canvas.mpl_connect('pick_event', onpick)

    xdata = 2
    ydata = 1
    if options.ttype == 'ds':
        plt.xlabel("Distance")
        plt.ylabel("Speed")
    elif options.ttype == 'ts':
        plt.xlabel("Time")
        plt.ylabel("Speed")
        xdata = 0
        ydata = 1
    elif options.ttype == 'td':
        plt.xlabel("Time")
        plt.ylabel("Distance")
        xdata = 0
        ydata = 2
    elif options.ttype == 'ta':
        plt.xlabel("Time")
        plt.ylabel("Acceleration")
        xdata = 0
        ydata = 3
    elif options.ttype == 'da':
        plt.xlabel("Distance")
        plt.ylabel("Acceleration")
        xdata = 2
        ydata = 3
    else:
        sys.exit("unsupported plot type '%s'" % options.ttype)

    routes = defaultdict(list)  # vehID -> recorded edges
    data = defaultdict(lambda: ([], [], [], []))  # vehID -> (times, speeds, distances, accelerations)
    for timestep, vehicle in parse_fast_nested(options.fcdfile, 'timestep', ['time'],
                                               'vehicle', ['id', 'speed', 'lane']):
        time = float(timestep.time)
        speed = float(vehicle.speed)
        prevTime = time
        prevSpeed = speed
        prevDist = 0
        if vehicle.id in data:
            prevTime = data[vehicle.id][0][-1]
            prevSpeed = data[vehicle.id][1][-1]
            prevDist = data[vehicle.id][2][-1]
        data[vehicle.id][0].append(time)
        data[vehicle.id][1].append(speed)
        if prevTime == time:
            data[vehicle.id][3].append(0)
        else:
            data[vehicle.id][3].append((speed - prevSpeed) / (time - prevTime))

        if options.ballistic:
            avgSpeed = (speed + prevSpeed) / 2
        else:
            avgSpeed = speed
        data[vehicle.id][2].append(prevDist + (time - prevTime) * avgSpeed)
        edge = vehicle.lane[0:vehicle.lane.rfind('_')]
        if len(routes[vehicle.id]) == 0 or routes[vehicle.id][-1] != edge:
            routes[vehicle.id].append(edge)

    def line_picker(line, mouseevent):
        if mouseevent.xdata is None:
            return False, dict()
        for x, y in zip(line.get_xdata(), line.get_ydata()):
            if (x - mouseevent.xdata) ** 2 + (y - mouseevent.ydata) ** 2 < options.pickDist:
                return True, dict(label=line.get_label())
        return False, dict()

    for vehID, d in data.items():
        if options.filterRoute is not None:
            skip = False
            route = routes[vehID]
            for required in options.filterRoute:
                if required not in route:
                    skip = True
                    break
            if skip:
                continue
        plt.plot(d[xdata], d[ydata], picker=line_picker, label=vehID)

    plt.savefig(options.output)
    if options.csv_output is not None:
        write_csv(data, options.csv_output)
    if options.show:
        plt.show()
Пример #11
0
def main(options):
    fig = plt.figure(figsize=(14, 9), dpi=100)
    fig.canvas.mpl_connect('pick_event', onpick)

    xdata = 2
    ydata = 1
    typespec = {
        't': ('Time', 0),
        's': ('Speed', 1),
        'd': ('Distance', 2),
        'a': ('Acceleration', 3),
        'i': ('Angle', 4),
        'x': ('x-Position', 5),
        'y': ('y-Position', 6),
    }

    if (len(options.ttype) == 2
            and options.ttype[0] in typespec
            and options.ttype[1] in typespec):
        xLabel, xdata = typespec[options.ttype[0]]
        yLabel, ydata = typespec[options.ttype[1]]
        plt.xlabel(xLabel)
        plt.ylabel(yLabel)
        plt.title(options.fcdfile if options.label is None else options.label)
    else:
        sys.exit("unsupported plot type '%s'" % options.ttype)

    routes = defaultdict(list)  # vehID -> recorded edges
    # vehID -> (times, speeds, distances, accelerations, angles, xPositions, yPositions)
    data = defaultdict(lambda: ([], [], [], [], [], [], []))
    for timestep, vehicle in parse_fast_nested(options.fcdfile, 'timestep', ['time'],
                                               'vehicle', ['id', 'x', 'y', 'angle', 'speed', 'lane']):
        time = float(timestep.time)
        speed = float(vehicle.speed)
        prevTime = time
        prevSpeed = speed
        prevDist = 0
        if vehicle.id in data:
            prevTime = data[vehicle.id][0][-1]
            prevSpeed = data[vehicle.id][1][-1]
            prevDist = data[vehicle.id][2][-1]
        data[vehicle.id][0].append(time)
        data[vehicle.id][1].append(speed)
        data[vehicle.id][4].append(float(vehicle.angle))
        data[vehicle.id][5].append(float(vehicle.x))
        data[vehicle.id][6].append(float(vehicle.y))
        if prevTime == time:
            data[vehicle.id][3].append(0)
        else:
            data[vehicle.id][3].append((speed - prevSpeed) / (time - prevTime))

        if options.ballistic:
            avgSpeed = (speed + prevSpeed) / 2
        else:
            avgSpeed = speed
        data[vehicle.id][2].append(prevDist + (time - prevTime) * avgSpeed)
        edge = vehicle.lane[0:vehicle.lane.rfind('_')]
        if len(routes[vehicle.id]) == 0 or routes[vehicle.id][-1] != edge:
            routes[vehicle.id].append(edge)

    def line_picker(line, mouseevent):
        if mouseevent.xdata is None:
            return False, dict()
        # minxy = None
        # mindist = 10000
        for x, y in zip(line.get_xdata(), line.get_ydata()):
            dist = math.sqrt((x - mouseevent.xdata) ** 2 + (y - mouseevent.ydata) ** 2)
            if dist < options.pickDist:
                return True, dict(label=line.get_label())
            # else:
            #    if dist < mindist:
            #        print("   ", x,y, dist, (x - mouseevent.xdata) ** 2, (y - mouseevent.ydata) ** 2)
            #        mindist = dist
            #        minxy = (x, y)
        # print(mouseevent.xdata, mouseevent.ydata, minxy, dist,
        #        line.get_label())
        return False, dict()

    minY = uMax
    maxY = uMin
    minX = uMax
    maxX = uMin

    for vehID, d in data.items():
        if options.filterRoute is not None:
            skip = False
            route = routes[vehID]
            for required in options.filterRoute:
                if required not in route:
                    skip = True
                    break
            if skip:
                continue
        if options.invertDistanceAngle is not None:
            avgAngle = sum(d[4]) / len(d[4])
            if abs(avgAngle - options.invertDistanceAngle) < 45:
                maxDist = d[2][-1]
                for i, v in enumerate(d[2]):
                    d[2][i] = maxDist - v

        minY = min(minY, min(d[ydata]))
        maxY = max(maxY, max(d[ydata]))
        minX = min(minX, min(d[xdata]))
        maxX = max(maxX, max(d[xdata]))

        plt.plot(d[xdata], d[ydata], picker=line_picker, label=vehID)
    if options.invertYAxis:
        plt.axis([minX, maxX, maxY, minY])

    plt.savefig(options.output)
    if options.csv_output is not None:
        write_csv(data, options.csv_output)
    if options.show:
        plt.show()