Exemplo n.º 1
0
def main():
    arguments = docopt(__doc__,
                       version='gtfsdbloader %s' % gtfslib.__version__)
    if arguments['--id'] is None:
        arguments['--id'] = ""

    # TODO Configure logging properly?
    logger = logging.getLogger('libgtfs')
    logger.setLevel(logging.INFO)
    logger.addHandler(StreamHandler(sys.stdout))

    dao = Dao(arguments['<database>'],
              sql_logging=arguments['--logsql'],
              schema=arguments['--schema'])

    if arguments['--list']:
        for feed in dao.feeds():
            print(feed.feed_id if feed.feed_id != "" else "(default)")

    if arguments['--delete'] or arguments['--load']:
        feed_id = arguments['--id']
        existing_feed = dao.feed(feed_id)
        if existing_feed:
            logger.warn("Deleting existing feed ID '%s'" % feed_id)
            dao.delete_feed(feed_id)
            dao.commit()

    if arguments['--load']:
        dao.load_gtfs(arguments['--load'],
                      feed_id=arguments['--id'],
                      lenient=arguments['--lenient'],
                      disable_normalization=arguments['--disablenormalize'])
Exemplo n.º 2
0
    def test_custom_queries(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(DUMMY_GTFS)

        # A simple custom query: count the number of stops per type (stop/station)
        # SQL equivalent: SELECT stop.location_type, count(stop.location_type) FROM stop GROUP BY stop.location_type
        for type, stop_count in dao.session() \
                    .query(Stop.location_type, func.count(Stop.location_type)) \
                    .group_by(Stop.location_type) \
                   .all():
            # print("type %d : %d stops" % (type, stop_count))
            if type == Stop.TYPE_STATION:
                self.assertTrue(stop_count == 3)
            if type == Stop.TYPE_STOP:
                self.assertTrue(stop_count > 15 and stop_count < 30)

        # A more complex custom query: count the number of trips per calendar date per route on june/july
        from_date = CalendarDate.ymd(2016, 6, 1)
        to_date = CalendarDate.ymd(2016, 7, 31)
        for date, route, trip_count in dao.session() \
                    .query(CalendarDate.date, Route, func.count(Trip.trip_id)) \
                    .join(Calendar).join(Trip).join(Route) \
                    .filter((func.date(CalendarDate.date) >= from_date.date) & (func.date(CalendarDate.date) <= to_date.date)) \
                    .group_by(CalendarDate.date, Route.route_short_name) \
                    .all():
            # print("%s / %20s : %d trips" % (date, route.route_short_name + " " + route.route_long_name, trip_count))
            self.assertTrue(date >= from_date.as_date())
            self.assertTrue(date <= to_date.as_date())
            self.assertTrue(trip_count > 0)
Exemplo n.º 3
0
    def test_broken(self):
        exception = False
        try:
            dao = Dao("")
            dao.load_gtfs(BROKEN_GTFS, lenient=False)
        except KeyError:
            exception = True
        self.assertTrue(exception)

        dao = Dao("")
        dao.load_gtfs(BROKEN_GTFS, lenient=True)

        # The following are based on BROKEN GTFS content,
        # that is the entities count minus broken ones.
        self.assertTrue(len(dao.routes()) == 4)
        self.assertTrue(len(list(dao.stops())) == 12)
        self.assertTrue(len(dao.calendars()) == 2)
        self.assertTrue(len(list(dao.trips())) == 104)
        self.assertTrue(len(dao.stoptimes()) == 500)
        self.assertTrue(len(dao.fare_attributes()) == 2)
        self.assertTrue(len(dao.fare_rules()) == 4)
        # This stop has missing coordinates in the broken file
        stop00 = dao.stop('FUR_CREEK_RES3')
        self.assertAlmostEquals(stop00.stop_lat, 0.0, 5)
        self.assertAlmostEquals(stop00.stop_lon, 0.0, 5)
Exemplo n.º 4
0
def main():
    arguments = docopt(__doc__, version='gtfsdbloader %s' % gtfslib.__version__)
    if arguments['--id'] is None:
        arguments['--id'] = ""

    # TODO Configure logging properly?
    logger = logging.getLogger('libgtfs')
    logger.setLevel(logging.INFO)
    logger.addHandler(StreamHandler(sys.stdout))

    dao = Dao(arguments['<database>'],
              sql_logging=arguments['--logsql'],
              schema=arguments['--schema'])

    if arguments['--list']:
        for feed in dao.feeds():
            print(feed.feed_id if feed.feed_id != "" else "(default)")

    if arguments['--delete'] or arguments['--load']:
        feed_id = arguments['--id']
        existing_feed = dao.feed(feed_id)
        if existing_feed:
            logger.warn("Deleting existing feed ID '%s'" % feed_id)
            dao.delete_feed(feed_id)
            dao.commit()

    if arguments['--load']:
        dao.load_gtfs(arguments['--load'],
                      feed_id=arguments['--id'],
                      lenient=arguments['--lenient'],
                      disable_normalization=arguments['--disablenormalize'])
Exemplo n.º 5
0
    def test_broken(self):
        exception = False
        try:
            clear_mappers()
            dao = Dao("")
            dao.load_gtfs(BROKEN_GTFS, lenient=False)
        except KeyError:
            exception = True
        self.assertTrue(exception)

        clear_mappers()
        dao = Dao("")
        dao.load_gtfs(BROKEN_GTFS, lenient=True)

        # The following are based on BROKEN GTFS content,
        # that is the entities count minus broken ones.
        self.assertTrue(len(dao.routes()) == 4)
        self.assertTrue(len(list(dao.stops())) == 12)
        self.assertTrue(len(dao.calendars()) == 2)
        self.assertTrue(len(list(dao.trips())) == 104)
        self.assertTrue(len(dao.stoptimes()) == 500)
        self.assertTrue(len(dao.fare_attributes()) == 2)
        self.assertTrue(len(dao.fare_rules()) == 4)
        # This stop has missing coordinates in the broken file
        stop00 = dao.stop('FUR_CREEK_RES3')
        self.assertAlmostEquals(stop00.stop_lat, 0.0, 5)
        self.assertAlmostEquals(stop00.stop_lon, 0.0, 5)
Exemplo n.º 6
0
    def test_whitespace_stripping(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(MINI_GTFS)

        # Check if whitespace stripping works
        a = dao.agency("A")
        self.assertTrue(a.agency_name == "Mini Agency")
        self.assertTrue(a.agency_lang == "en")
Exemplo n.º 7
0
    def test_whitespace_stripping(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(MINI_GTFS)

        # Check if whitespace stripping works
        a = dao.agency("A")
        self.assertTrue(a.agency_name == "Mini Agency")
        self.assertTrue(a.agency_lang == "en")
Exemplo n.º 8
0
    def test_complex_queries(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(DUMMY_GTFS)

        # Get the list of departures:
        # 1) from "Porte de Bourgogne"
        # 2) on 4th July
        # 3) between 10:00 and 14:00
        # 4) on route type BUS
        # 5) not the last of trip (only departing)
        porte_bourgogne = dao.stop("BBG")
        july4 = CalendarDate.ymd(2016, 7, 4)
        from_time = gtfstime(10, 00)
        to_time = gtfstime(14, 00)
        departures = dao.stoptimes(
                    fltr=(StopTime.stop == porte_bourgogne) & (StopTime.departure_time >= from_time) & (StopTime.departure_time <= to_time)
                    & (Route.route_type == Route.TYPE_BUS) & (func.date(CalendarDate.date) == july4.date),
                    prefetch_trips=True)

        n = 0
        for dep in departures:
            self.assertTrue(dep.stop == porte_bourgogne)
            self.assertTrue(july4 in dep.trip.calendar.dates)
            self.assertTrue(dep.trip.route.route_type == Route.TYPE_BUS)
            self.assertTrue(dep.departure_time >= from_time and dep.departure_time <= to_time)
            n += 1
        self.assertTrue(n > 10)

        # Plage is a stop that is used only in summer (hence the name!)
        plage = dao.stop("BPG")
        # Get the list of stops used by some route:
        # 1) All-year round
        route_red = dao.route("BR")
        stoplist_all = list(dao.stops(fltr=Trip.route == route_red))
        # 2) Only in january
        from_date = CalendarDate.ymd(2016, 1, 1)
        to_date = CalendarDate.ymd(2016, 1, 31)
        stoplist_jan = list(dao.stops(
                    fltr=(Trip.route == route_red) & (func.date(CalendarDate.date) >= from_date.date) & (func.date(CalendarDate.date) <= to_date.date)))
        # Now, make some tests
        self.assertTrue(len(stoplist_all) > 5)
        self.assertTrue(plage in stoplist_all)
        self.assertFalse(plage in stoplist_jan)
        stoplist = list(stoplist_all)
        stoplist.remove(plage)
        self.assertTrue(set(stoplist) == set(stoplist_jan))

        # Get all routes passing by the set of stops
        routes = dao.routes(fltr=or_(StopTime.stop == stop for stop in stoplist_jan))
        stopset = set()
        for route in routes:
            for trip in route.trips:
                for stoptime in trip.stop_times:
                    stopset.add(stoptime.stop)
        self.assertTrue(set(stoplist_jan).issubset(stopset))
Exemplo n.º 9
0
    def test_hops(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(MINI_GTFS)

        # Get all hops
        hops = dao.hops()
        nhops = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            nhops += 1
        # 2 standard trips + 2 frequency generated (8 total)
        self.assertTrue(nhops == 2 * 2 + 8 * 2)

        # Get all hops with a distance <= 70km
        hops = dao.hops(fltr=(dao.hop_second().shape_dist_traveled -
                              dao.hop_first().shape_dist_traveled <= 70000))
        nhops1 = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            self.assertTrue(
                st2.shape_dist_traveled - st1.shape_dist_traveled <= 70000)
            nhops1 += 1

        # Get all hops with a distance > 70km
        hops = dao.hops(fltr=(dao.hop_second().shape_dist_traveled -
                              dao.hop_first().shape_dist_traveled > 70000))
        nhops2 = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            self.assertTrue(
                st2.shape_dist_traveled - st1.shape_dist_traveled > 70000)
            nhops2 += 1
        self.assertTrue(nhops == nhops1 + nhops2)

        # Get all hops with a time +/- 1h
        hops = dao.hops(fltr=(dao.hop_second().arrival_time -
                              dao.hop_first().departure_time >= 3600))
        for st1, st2 in hops:
            self.assertTrue(st2.arrival_time - st1.departure_time >= 3600)
        hops = dao.hops(fltr=(dao.hop_second().arrival_time -
                              dao.hop_first().departure_time < 3600))
        for st1, st2 in hops:
            self.assertTrue(st2.arrival_time - st1.departure_time < 3600)

        # Get hops with a delta of 2
        hops = dao.hops(delta=2)
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 2 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
Exemplo n.º 10
0
    def test_demo(self):
        dao = Dao(DAO_URL, sql_logging=False)
        dao.load_gtfs(DUMMY_GTFS)

        print("List of stops named '...Bordeaux...':")
        stops_bordeaux = list(dao.stops(fltr=(Stop.stop_name.ilike('%Bordeaux%')) & (Stop.location_type == Stop.TYPE_STOP)))
        for stop in stops_bordeaux:
            print(stop.stop_name)

        print("List of routes passing by those stops:")
        routes_bordeaux = dao.routes(fltr=or_(StopTime.stop == stop for stop in stops_bordeaux))
        for route in routes_bordeaux:
            print("%s - %s" % (route.route_short_name, route.route_long_name))

        july4 = CalendarDate.ymd(2016, 7, 4)
        print("All departures from those stops on %s:" % (july4.as_date()))
        departures = list(dao.stoptimes(fltr=(or_(StopTime.stop == stop for stop in stops_bordeaux)) & (StopTime.departure_time != None) & (func.date(CalendarDate.date) == july4.date)))
        print("There is %d departures" % (len(departures)))
        for departure in departures:
            print("%30.30s %10.10s %-20.20s > %s" % (departure.stop.stop_name, fmttime(departure.departure_time), departure.trip.route.route_long_name, departure.trip.trip_headsign))

        print("Number of departures and time range per stop on %s:" % (july4.as_date()))
        departure_by_stop = defaultdict(list)
        for departure in departures:
            departure_by_stop[departure.stop].append(departure)
        for stop, deps in departure_by_stop.items():
            min_dep = min(d.departure_time for d in deps)
            max_dep = max(d.departure_time for d in deps)
            print("%30.30s %3d departures (from %s to %s)" % (stop.stop_name, len(deps), fmttime(min_dep), fmttime(max_dep)))

        # Compute the average distance and time to next stop by route type
        ntd = [ [0, 0, 0.0] for type in range(0, Route.TYPE_FUNICULAR + 1) ]
        for departure in departures:
            # The following is guaranteed to succeed as we have departure_time == Null for last stop time in trip
            next_arrival = departure.trip.stop_times[departure.stop_sequence + 1]
            hop_dist = next_arrival.shape_dist_traveled - departure.shape_dist_traveled
            hop_time = next_arrival.arrival_time - departure.departure_time
            route_type = departure.trip.route.route_type
            ntd[route_type][0] += 1
            ntd[route_type][1] += hop_time
            ntd[route_type][2] += hop_dist
        for route_type in range(0, len(ntd)):
            n, t, d = ntd[route_type]
            if n > 0:
                print("The average distance to the next stop on those departures for route type %d is %.2f meters" % (route_type, d / n))
                print("The average time in sec to the next stop on those departures for route type %d is %s" % (route_type, fmttime(t / n)))
Exemplo n.º 11
0
    def test_hops(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(MINI_GTFS)

        # Get all hops
        hops = dao.hops()
        nhops = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            nhops += 1
        # 2 standard trips + 2 frequency generated (8 total)
        self.assertTrue(nhops == 2 * 2 + 8 * 2)

        # Get all hops with a distance <= 70km
        hops = dao.hops(fltr=(dao.hop_second().shape_dist_traveled - dao.hop_first().shape_dist_traveled <= 70000))
        nhops1 = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            self.assertTrue(st2.shape_dist_traveled - st1.shape_dist_traveled <= 70000)
            nhops1 += 1

        # Get all hops with a distance > 70km
        hops = dao.hops(fltr=(dao.hop_second().shape_dist_traveled - dao.hop_first().shape_dist_traveled > 70000))
        nhops2 = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            self.assertTrue(st2.shape_dist_traveled - st1.shape_dist_traveled > 70000)
            nhops2 += 1
        self.assertTrue(nhops == nhops1 + nhops2)

        # Get all hops with a time +/- 1h
        hops = dao.hops(fltr=(dao.hop_second().arrival_time - dao.hop_first().departure_time >= 3600))
        for st1, st2 in hops:
            self.assertTrue(st2.arrival_time - st1.departure_time >= 3600)
        hops = dao.hops(fltr=(dao.hop_second().arrival_time - dao.hop_first().departure_time < 3600))
        for st1, st2 in hops:
            self.assertTrue(st2.arrival_time - st1.departure_time < 3600)

        # Get hops with a delta of 2
        hops = dao.hops(delta=2)
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 2 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
Exemplo n.º 12
0
    def test_clusterizer(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(DUMMY_GTFS)

        # Merge stops closer than 300m together
        sc = SpatialClusterizer(300.0)
        for stop in dao.stops():
            sc.add_point(stop)
        sc.clusterize()

        # for cluster in sc.clusters():
        #    print("---CLUSTER: %d stops" % (len(cluster)))
        #    for stop in cluster:
        #        print("%s %s" % (stop.stop_id, stop.stop_name))

        gare1 = dao.stop("GBSJT")
        gare2 = dao.stop("GBSJ")
        gare3 = dao.stop("GBSJB")
        self.assertTrue(sc.in_same_cluster(gare1, gare2))
        self.assertTrue(sc.in_same_cluster(gare1, gare3))
        self.assertTrue(sc.in_same_cluster(gare2, gare3))

        bq = dao.stop("BQ")
        bq1 = dao.stop("BQA")
        bq2 = dao.stop("BQD")
        self.assertTrue(sc.in_same_cluster(bq, bq1))
        self.assertTrue(sc.in_same_cluster(bq, bq2))

        bs = dao.stop("BS")
        bs1 = dao.stop("BS1")
        bs2 = dao.stop("BS2")
        self.assertTrue(sc.in_same_cluster(bs, bs1))
        self.assertTrue(sc.in_same_cluster(bs, bs2))

        self.assertFalse(sc.in_same_cluster(gare1, bq))
        self.assertFalse(sc.in_same_cluster(gare1, bs))
        self.assertFalse(sc.in_same_cluster(gare3, bs2))

        bjb = dao.stop("BJB")
        self.assertFalse(sc.in_same_cluster(bjb, gare1))
        self.assertFalse(sc.in_same_cluster(bjb, bs))
        self.assertFalse(sc.in_same_cluster(bjb, bq))
Exemplo n.º 13
0
    def test_gtfs_data(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(MINI_GTFS)

        # Check feed
        feed = dao.feed()
        self.assertTrue(feed.feed_id == "")
        self.assertTrue(feed.feed_publisher_name is None)
        self.assertTrue(feed.feed_publisher_url is None)
        self.assertTrue(feed.feed_contact_email is None)
        self.assertTrue(feed.feed_contact_url is None)
        self.assertTrue(feed.feed_start_date is None)
        self.assertTrue(feed.feed_end_date is None)
        self.assertTrue(len(dao.agencies()) == 1)
        self.assertTrue(len(dao.routes()) == 1)
        self.assertTrue(len(feed.agencies) == 1)
        self.assertTrue(len(feed.routes) == 1)

        # Check if optional route agency is set
        a = dao.agency("A")
        self.assertTrue(a is not None)
        self.assertTrue(len(a.routes) == 1)

        # Check for frequency-generated trips
        # They should all have the same delta
        trips = dao.trips(fltr=(Trip.frequency_generated == True),
                          prefetch_stop_times=True)
        n_trips = 0
        deltas = {}
        for trip in trips:
            original_trip_id = trip.trip_id.rsplit('@', 1)[0]
            delta1 = []
            for st1, st2 in trip.hops():
                delta1.append(st2.arrival_time - st1.departure_time)
            delta2 = deltas.get(original_trip_id)
            if delta2 is not None:
                self.assertTrue(delta1 == delta2)
            else:
                deltas[original_trip_id] = delta1
            n_trips += 1
        self.assertTrue(n_trips == 8)
Exemplo n.º 14
0
    def test_gtfs_data(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(MINI_GTFS)

        # Check feed
        feed = dao.feed()
        self.assertTrue(feed.feed_id == "")
        self.assertTrue(feed.feed_publisher_name is None)
        self.assertTrue(feed.feed_publisher_url is None)
        self.assertTrue(feed.feed_contact_email is None)
        self.assertTrue(feed.feed_contact_url is None)
        self.assertTrue(feed.feed_start_date is None)
        self.assertTrue(feed.feed_end_date is None)
        self.assertTrue(len(dao.agencies()) == 1)
        self.assertTrue(len(dao.routes()) == 1)
        self.assertTrue(len(feed.agencies) == 1)
        self.assertTrue(len(feed.routes) == 1)

        # Check if optional route agency is set
        a = dao.agency("A")
        self.assertTrue(a is not None)
        self.assertTrue(len(a.routes) == 1)

        # Check for frequency-generated trips
        # They should all have the same delta
        trips = dao.trips(fltr=(Trip.frequency_generated == True), prefetch_stop_times=True)
        n_trips = 0
        deltas = {}
        for trip in trips:
            original_trip_id = trip.trip_id.rsplit('@', 1)[0]
            delta1 = []
            for st1, st2 in trip.hops():
                delta1.append(st2.arrival_time - st1.departure_time)
            delta2 = deltas.get(original_trip_id)
            if delta2 is not None:
                self.assertTrue(delta1 == delta2)
            else:
                deltas[original_trip_id] = delta1
            n_trips += 1
        self.assertTrue(n_trips == 8)
Exemplo n.º 15
0
    def test_non_overlapping_feeds(self):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        # Load twice the same data under two distinct namespaces
        dao.load_gtfs(DUMMY_GTFS, feed_id='A')
        dao.load_gtfs(DUMMY_GTFS, feed_id='B')

        # Check that each feed only return it's own data
        feed_a = dao.feed('A')
        self.assertTrue(feed_a.feed_id == 'A')
        feed_b = dao.feed('B')
        self.assertTrue(feed_b.feed_id == 'B')
        self.assertTrue(len(dao.agencies()) == 4)
        self.assertTrue(len(feed_a.agencies) == 2)
        self.assertTrue(len(feed_b.agencies) == 2)
        self.assertTrue(len(feed_a.routes) * 2 == len(dao.routes()))
        self.assertTrue(len(feed_b.routes) * 2 == len(dao.routes()))
        self.assertTrue(len(feed_a.stops) * 2 == len(list(dao.stops())))
        self.assertTrue(len(feed_b.stops) * 2 == len(list(dao.stops())))
        self.assertTrue(len(feed_a.calendars) * 2 == len(dao.calendars()))
        self.assertTrue(len(feed_b.calendars) * 2 == len(dao.calendars()))
        self.assertTrue(len(feed_a.trips) * 2 == len(list(dao.trips())))
        self.assertTrue(len(feed_b.trips) * 2 == len(list(dao.trips())))
Exemplo n.º 16
0
Arquivo: NMBS.py Projeto: luxn/geoinf
from gtfslib.dao import Dao

# DAO ist ein Data Access Object
dao = Dao("gtfs-nmbs.db.sqlite")
dao.load_gtfs("Feeds/NMBS_belgium.zip")

    
Exemplo n.º 17
0
    def _test_one_gtfs(self, gtfs):
        clear_mappers()
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(gtfs)

        # Check stop time normalization and interpolation
        for trip in dao.trips(prefetch_stop_times=True):
            stopseq = 0
            n_stoptimes = len(trip.stop_times)
            last_stop = None
            distance = trip.stop_times[0].shape_dist_traveled
            last_stoptime = None
            last_interpolated_speed = None
            for stoptime in trip.stop_times:
                self.assertTrue(stoptime.stop_sequence == stopseq)
                if stopseq == 0:
                    self.assertTrue(stoptime.arrival_time is None)
                else:
                    self.assertTrue(stoptime.arrival_time is not None)
                if stopseq == n_stoptimes - 1:
                    self.assertTrue(stoptime.departure_time is None)
                else:
                    self.assertTrue(stoptime.departure_time is not None)
                if last_stop is not None:
                    distance += orthodromic_distance(last_stop, stoptime.stop)
                last_stop = stoptime.stop
                if trip.shape is not None:
                    self.assertTrue(stoptime.shape_dist_traveled >= distance)
                else:
                    self.assertAlmostEqual(stoptime.shape_dist_traveled,
                                           distance, 1)
                stopseq += 1
                if stoptime.interpolated or (last_stoptime is not None
                                             and last_stoptime.interpolated):
                    dist = stoptime.shape_dist_traveled - last_stoptime.shape_dist_traveled
                    time = stoptime.arrival_time - last_stoptime.departure_time
                    speed = dist * 1.0 / time
                    if last_interpolated_speed is not None:
                        self.assertAlmostEqual(speed, last_interpolated_speed,
                                               2)
                    last_interpolated_speed = speed
                if not stoptime.interpolated:
                    last_interpolated_speed = None
                last_stoptime = stoptime

        # Get all hops
        hops = dao.hops()
        nhops = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            nhops += 1

        # Get hops with a delta of 2
        hops = dao.hops(delta=2)
        nhops2 = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 2 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            nhops2 += 1
        ntrips = len(list(dao.trips()))
        # Assume all trips have len > 2
        self.assertTrue(nhops == nhops2 + ntrips)

        # Test shape_dist_traveled on stoptimes
        for trip in dao.trips():
            # Assume no shapes for now
            distance = 0.0
            last_stop = None
            for stoptime in trip.stop_times:
                if last_stop is not None:
                    distance += orthodromic_distance(last_stop, stoptime.stop)
                last_stop = stoptime.stop
                if trip.shape:
                    self.assertTrue(stoptime.shape_dist_traveled >= distance)
                else:
                    self.assertAlmostEqual(stoptime.shape_dist_traveled,
                                           distance, 2)

        # Test shape normalization
        for shape in dao.shapes():
            distance = 0.0
            last_pt = None
            ptseq = 0
            for point in shape.points:
                if last_pt is not None:
                    distance += orthodromic_distance(last_pt, point)
                last_pt = point
                self.assertAlmostEqual(point.shape_dist_traveled, distance, 2)
                self.assertTrue(point.shape_pt_sequence == ptseq)
                ptseq += 1

        # Check zone-stop relationship
        for zone in dao.zones(prefetch_stops=True):
            for stop in zone.stops:
                self.assertTrue(stop.zone == zone)
        for stop in dao.stops():
            if stop.zone:
                self.assertTrue(stop in stop.zone.stops)
Exemplo n.º 18
0
    def test_demo(self):
        dao = Dao(DAO_URL, sql_logging=False)
        dao.load_gtfs(DUMMY_GTFS)

        print("List of stops named '...Bordeaux...':")
        stops_bordeaux = list(
            dao.stops(fltr=(Stop.stop_name.ilike('%Bordeaux%'))
                      & (Stop.location_type == Stop.TYPE_STOP)))
        for stop in stops_bordeaux:
            print(stop.stop_name)

        print("List of routes passing by those stops:")
        routes_bordeaux = dao.routes(fltr=or_(StopTime.stop == stop
                                              for stop in stops_bordeaux))
        for route in routes_bordeaux:
            print("%s - %s" % (route.route_short_name, route.route_long_name))

        july4 = CalendarDate.ymd(2016, 7, 4)
        print("All departures from those stops on %s:" % (july4.as_date()))
        departures = list(
            dao.stoptimes(fltr=(or_(StopTime.stop == stop
                                    for stop in stops_bordeaux))
                          & (StopTime.departure_time != None)
                          & (func.date(CalendarDate.date) == july4.date)))
        print("There is %d departures" % (len(departures)))
        for departure in departures:
            print("%30.30s %10.10s %-20.20s > %s" %
                  (departure.stop.stop_name, fmttime(departure.departure_time),
                   departure.trip.route.route_long_name,
                   departure.trip.trip_headsign))

        print("Number of departures and time range per stop on %s:" %
              (july4.as_date()))
        departure_by_stop = defaultdict(list)
        for departure in departures:
            departure_by_stop[departure.stop].append(departure)
        for stop, deps in departure_by_stop.items():
            min_dep = min(d.departure_time for d in deps)
            max_dep = max(d.departure_time for d in deps)
            print("%30.30s %3d departures (from %s to %s)" %
                  (stop.stop_name, len(deps), fmttime(min_dep),
                   fmttime(max_dep)))

        # Compute the average distance and time to next stop by route type
        ntd = [[0, 0, 0.0] for type in range(0, Route.TYPE_FUNICULAR + 1)]
        for departure in departures:
            # The following is guaranteed to succeed as we have departure_time == Null for last stop time in trip
            next_arrival = departure.trip.stop_times[departure.stop_sequence +
                                                     1]
            hop_dist = next_arrival.shape_dist_traveled - departure.shape_dist_traveled
            hop_time = next_arrival.arrival_time - departure.departure_time
            route_type = departure.trip.route.route_type
            ntd[route_type][0] += 1
            ntd[route_type][1] += hop_time
            ntd[route_type][2] += hop_dist
        for route_type in range(0, len(ntd)):
            n, t, d = ntd[route_type]
            if n > 0:
                print(
                    "The average distance to the next stop on those departures for route type %d is %.2f meters"
                    % (route_type, d / n))
                print(
                    "The average time in sec to the next stop on those departures for route type %d is %s"
                    % (route_type, fmttime(t / n)))
Exemplo n.º 19
0
from gtfslib.dao import Dao

dao = Dao('db.sqlite')
dao.load_gtfs('gtfs_new.zip')
Exemplo n.º 20
0
    def test_all_gtfs(self):

        if not ENABLE:
            print("This test is disabled as it is very time-consuming.")
            print("If you want to enable it, please see in the code.")
            return

        # Create temporary directory if not there
        if not os.path.isdir(DIR):
            os.mkdir(DIR)

        # Create a DAO. Re-use any existing present.
        logging.basicConfig(level=logging.INFO)
        dao = Dao("%s/all_gtfs.sqlite" % (DIR))

        deids = IDS_TO_LOAD
        if deids is None:
            print("Downloading meta-info for all agencies...")
            resource_url = "http://www.gtfs-data-exchange.com/api/agencies?format=json"
            response = requests.get(resource_url).json()
            if response.get('status_code') != 200:
                raise IOError()
            deids = []
            for entry in response.get('data'):
                deid = entry.get('dataexchange_id')
                deids.append(deid)
            # Randomize the list, otherwise we will always load ABCBus, then ...
            random.shuffle(deids)

        for deid in deids:
            try:
                local_filename = "%s/%s.gtfs.zip" % (DIR, deid)
                if os.path.exists(local_filename) and SKIP_EXISTING:
                    print("Skipping [%s], GTFS already present." % (deid))
                    continue

                print("Downloading meta-info for ID [%s]" % (deid))
                resource_url = "http://www.gtfs-data-exchange.com/api/agency?agency=%s&format=json" % deid
                response = requests.get(resource_url).json()
                status_code = response.get('status_code')
                if status_code != 200:
                    raise IOError("Error %d (%s)" %
                                  (status_code, response.get('status_txt')))
                data = response.get('data')
                agency_data = data.get('agency')
                agency_name = agency_data.get('name')
                agency_area = agency_data.get('area')
                agency_country = agency_data.get('country')

                print("Processing [%s] %s (%s / %s)" %
                      (deid, agency_name, agency_country, agency_area))
                date_max = 0.0
                file_url = None
                file_size = 0
                file_md5 = None
                for datafile in data.get('datafiles'):
                    date_added = datafile.get('date_added')
                    if date_added > date_max:
                        date_max = date_added
                        file_url = datafile.get('file_url')
                        file_size = datafile.get('size')
                        file_md5 = datafile.get('md5sum')
                if file_url is None:
                    print("No datafile available, skipping.")
                    continue

                if file_size > MAX_GTFS_SIZE:
                    print("GTFS too large (%d bytes > max %d), skipping." %
                          (file_size, MAX_GTFS_SIZE))
                    continue

                # Check if the file is present and do not download it.
                try:
                    existing_md5 = hashlib.md5(
                        open(local_filename, 'rb').read()).hexdigest()
                except:
                    existing_md5 = None
                if existing_md5 == file_md5:
                    print("Using existing file '%s': MD5 checksum matches." %
                          (local_filename))
                else:
                    print("Downloading file '%s' to '%s' (%d bytes)" %
                          (file_url, local_filename, file_size))
                    with open(local_filename, 'wb') as local_file:
                        cnx = requests.get(file_url, stream=True)
                        for block in cnx.iter_content(1024):
                            local_file.write(block)
                    cnx.close()

                feed = dao.feed(deid)
                if feed is not None:
                    print("Removing existing data for feed [%s]" % (deid))
                    dao.delete_feed(deid)
                print("Importing into DAO as ID [%s]" % (deid))
                try:
                    dao.load_gtfs("%s/%s.gtfs.zip" % (DIR, deid), feed_id=deid)
                except:
                    error_filename = "%s/%s.error" % (DIR, deid)
                    print("Import of [%s]: FAILED. Logging error to '%s'" %
                          (deid, error_filename))
                    with open(error_filename, 'wb') as errfile:
                        errfile.write(traceback.format_exc())
                    raise
                print("Import of [%s]: OK." % (deid))

            except Exception as error:
                logging.exception(error)
                continue
Exemplo n.º 21
0
from gtfslib.dao import Dao

# DAO ist ein Data Access Object
dao = Dao("gtfs-deutsche_bahn.db.sqlite")
dao.load_gtfs("Feeds/DB_germany.zip")

    
Exemplo n.º 22
0
from gtfslib.dao import Dao

# DAO ist ein Data Access Object
dao = Dao("gtfs-sncf.db.sqlite")
dao.load_gtfs("Feeds/SNCF_france.zip")

    
Exemplo n.º 23
0
    def test_all_gtfs(self):

        if not ENABLE:
            print("This test is disabled as it is very time-consuming.")
            print("If you want to enable it, please see in the code.")
            return

        # Create temporary directory if not there
        if not os.path.isdir(DIR):
            os.mkdir(DIR)

        # Create a DAO. Re-use any existing present.
        logging.basicConfig(level=logging.INFO)
        dao = Dao("%s/all_gtfs.sqlite" % (DIR))

        deids = IDS_TO_LOAD
        if deids is None:
            print("Downloading meta-info for all agencies...")
            resource_url = "http://www.gtfs-data-exchange.com/api/agencies?format=json"
            response = requests.get(resource_url).json()
            if response.get('status_code') != 200:
                raise IOError()
            deids = []
            for entry in response.get('data'):
                deid = entry.get('dataexchange_id')
                deids.append(deid)
            # Randomize the list, otherwise we will always load ABCBus, then ...
            random.shuffle(deids)

        for deid in deids:
            try:
                local_filename = "%s/%s.gtfs.zip" % (DIR, deid)
                if os.path.exists(local_filename) and SKIP_EXISTING:
                    print("Skipping [%s], GTFS already present." % (deid))
                    continue

                print("Downloading meta-info for ID [%s]" % (deid))
                resource_url = "http://www.gtfs-data-exchange.com/api/agency?agency=%s&format=json" % deid
                response = requests.get(resource_url).json()
                status_code = response.get('status_code')
                if status_code != 200:
                    raise IOError("Error %d (%s)" % (status_code, response.get('status_txt')))
                data = response.get('data')
                agency_data = data.get('agency')
                agency_name = agency_data.get('name')
                agency_area = agency_data.get('area')
                agency_country = agency_data.get('country')

                print("Processing [%s] %s (%s / %s)" % (deid, agency_name, agency_country, agency_area))
                date_max = 0.0
                file_url = None
                file_size = 0
                file_md5 = None
                for datafile in data.get('datafiles'):
                    date_added = datafile.get('date_added')
                    if date_added > date_max:
                        date_max = date_added
                        file_url = datafile.get('file_url')
                        file_size = datafile.get('size')
                        file_md5 = datafile.get('md5sum')
                if file_url is None:
                    print("No datafile available, skipping.")
                    continue

                if file_size > MAX_GTFS_SIZE:
                    print("GTFS too large (%d bytes > max %d), skipping." % (file_size, MAX_GTFS_SIZE))
                    continue

                # Check if the file is present and do not download it.
                try:
                    existing_md5 = hashlib.md5(open(local_filename, 'rb').read()).hexdigest()
                except:
                    existing_md5 = None
                if existing_md5 == file_md5:
                    print("Using existing file '%s': MD5 checksum matches." % (local_filename))
                else:
                    print("Downloading file '%s' to '%s' (%d bytes)" % (file_url, local_filename, file_size))
                    with open(local_filename, 'wb') as local_file:
                        cnx = requests.get(file_url, stream=True)
                        for block in cnx.iter_content(1024):
                            local_file.write(block)
                    cnx.close()

                feed = dao.feed(deid)
                if feed is not None:
                    print("Removing existing data for feed [%s]" % (deid))
                    dao.delete_feed(deid)
                print("Importing into DAO as ID [%s]" % (deid))
                try:
                    dao.load_gtfs("%s/%s.gtfs.zip" % (DIR, deid), feed_id=deid)
                except:
                    error_filename = "%s/%s.error" % (DIR, deid)
                    print("Import of [%s]: FAILED. Logging error to '%s'" % (deid, error_filename))
                    with open(error_filename, 'wb') as errfile:
                        errfile.write(traceback.format_exc())
                    raise
                print("Import of [%s]: OK." % (deid))

            except Exception as error:
                logging.exception(error)
                continue
Exemplo n.º 24
0
    def test_gtfs_data(self):
        dao = Dao(DAO_URL, sql_logging=False)
        dao.load_gtfs(DUMMY_GTFS)

        # Check feed
        feed = dao.feed()
        self.assertTrue(feed.feed_id == "")
        self.assertTrue(feed.feed_publisher_name == "Mecatran")
        self.assertTrue(feed.feed_publisher_url == "http://www.mecatran.com/")
        self.assertTrue(feed.feed_contact_email == "*****@*****.**")
        self.assertTrue(feed.feed_lang == "fr")
        self.assertTrue(len(dao.agencies()) == 2)
        self.assertTrue(len(dao.routes()) == 3)
        self.assertTrue(len(feed.agencies) == 2)
        self.assertTrue(len(feed.routes) == 3)

        # Check agencies
        at = dao.agency("AT")
        self.assertTrue(at.agency_name == "Agency Train")
        self.assertTrue(len(at.routes) == 1)
        ab = dao.agency("AB")
        self.assertTrue(ab.agency_name == "Agency Bus")
        self.assertTrue(len(ab.routes) == 2)

        # Check calendars
        week = dao.calendar("WEEK")
        self.assertTrue(len(week.dates) == 253)
        summer = dao.calendar("SUMMER")
        self.assertTrue(len(summer.dates) == 42)
        mon = dao.calendar("MONDAY")
        self.assertTrue(len(mon.dates) == 49)
        sat = dao.calendar("SAT")
        self.assertTrue(len(sat.dates) == 53)
        for date in mon.dates:
            self.assertTrue(date.dow() == 0)
        for date in sat.dates:
            self.assertTrue(date.dow() == 5)
        for date in week.dates:
            self.assertTrue(date.dow() >= 0 and date.dow() <= 4)
        for date in summer.dates:
            self.assertTrue(date >= CalendarDate.ymd(2016, 7, 1) and date <= CalendarDate.ymd(2016, 8, 31))
        empty = dao.calendars(func.date(CalendarDate.date) == datetime.date(2016, 5, 1))
        # OR USE: empty = dao.calendars(CalendarDate.date == "2016-05-01")
        self.assertTrue(len(empty) == 0)
        july4 = CalendarDate.ymd(2016, 7, 4)
        summer_mon = dao.calendars(func.date(CalendarDate.date) == july4.date)
        n = 0
        for cal in summer_mon:
            self.assertTrue(july4 in cal.dates)
            n += 1
        self.assertTrue(n == 3)

        # Check stops
        sbq = dao.stop("BQ")
        self.assertAlmostEqual(sbq.stop_lat, 44.844, places=2)
        self.assertAlmostEqual(sbq.stop_lon, -0.573, places=2)
        self.assertTrue(sbq.stop_name == "Bordeaux Quinconces")
        n = 0
        for stop in dao.stops(Stop.stop_name.like("Gare%")):
            self.assertTrue(stop.stop_name.startswith("Gare"))
            n += 1
        self.assertTrue(n == 7)
        n = 0
        for stop in dao.stops(fltr=dao.in_area(RectangularArea(44.7, -0.6, 44.9, -0.4))):
            self.assertTrue(stop.stop_lat >= 44.7 and stop.stop_lat <= 44.9 and stop.stop_lon >= -0.6 and stop.stop_lon <= -0.4)
            n += 1
        self.assertTrue(n == 16)
        for station in dao.stops(Stop.location_type == Stop.TYPE_STATION):
            self.assertTrue(station.location_type == Stop.TYPE_STATION)
            self.assertTrue(len(station.sub_stops) >= 2)
            for stop in station.sub_stops:
                self.assertTrue(stop.parent_station == station)

        # Check zones
        z_inexistant = dao.zone("ZX")
        self.assertTrue(z_inexistant is None)
        z1 = dao.zone("Z1")
        self.assertEquals(16, len(z1.stops))
        z2 = dao.zone("Z2")
        self.assertEquals(4, len(z2.stops))

        # Check transfers
        transfers = dao.transfers()
        self.assertTrue(len(transfers) == 3)
        transfers = dao.transfers(fltr=(dao.transfer_from_stop().stop_id == 'GBSJB'))
        self.assertTrue(len(transfers) == 1)
        self.assertTrue(transfers[0].from_stop.stop_id == 'GBSJB')

        # Check routes
        tgv = dao.route("TGVBP")
        self.assertTrue(tgv.agency == at)
        self.assertTrue(tgv.route_type == 2)
        r1 = dao.route("BR")
        self.assertTrue(r1.route_short_name == "R1")
        self.assertTrue(r1.route_long_name == "Bus Red")
        n = 0
        for route in dao.routes(Route.route_type == 3):
            self.assertTrue(route.route_type == 3)
            n += 1
        self.assertTrue(n == 2)

        # Check trip for route
        n = 0
        trips = dao.trips(fltr=Route.route_type == Route.TYPE_BUS)
        for trip in trips:
            self.assertTrue(trip.route.route_type == Route.TYPE_BUS)
            n += 1
        self.assertTrue(n > 20)

        # Check trips on date
        trips = dao.trips(fltr=func.date(CalendarDate.date) == july4.date, prefetch_calendars=True)
        n = 0
        for trip in trips:
            self.assertTrue(july4 in trip.calendar.dates)
            n += 1
        self.assertTrue(n > 30)
Exemplo n.º 25
0
    def _test_one_gtfs(self, gtfs):
        dao = Dao(DAO_URL, sql_logging=SQL_LOG)
        dao.load_gtfs(gtfs)

        # Check stop time normalization and interpolation
        for trip in dao.trips(prefetch_stop_times=True):
            stopseq = 0
            n_stoptimes = len(trip.stop_times)
            last_stop = None
            distance = trip.stop_times[0].shape_dist_traveled
            last_stoptime = None
            last_interpolated_speed = None
            for stoptime in trip.stop_times:
                self.assertTrue(stoptime.stop_sequence == stopseq)
                if stopseq == 0:
                    self.assertTrue(stoptime.arrival_time is None)
                else:
                    self.assertTrue(stoptime.arrival_time is not None)
                if stopseq == n_stoptimes - 1:
                    self.assertTrue(stoptime.departure_time is None)
                else:
                    self.assertTrue(stoptime.departure_time is not None)
                if last_stop is not None:
                    distance += orthodromic_distance(last_stop, stoptime.stop)
                last_stop = stoptime.stop
                if trip.shape is not None:
                    self.assertTrue(stoptime.shape_dist_traveled >= distance)
                else:
                    self.assertAlmostEqual(stoptime.shape_dist_traveled, distance, 1)
                stopseq += 1
                if stoptime.interpolated or (last_stoptime is not None and last_stoptime.interpolated):
                    dist = stoptime.shape_dist_traveled - last_stoptime.shape_dist_traveled
                    time = stoptime.arrival_time - last_stoptime.departure_time
                    speed = dist * 1.0 / time
                    if last_interpolated_speed is not None:
                        self.assertAlmostEqual(speed, last_interpolated_speed, 2)
                    last_interpolated_speed = speed
                if not stoptime.interpolated:
                    last_interpolated_speed = None
                last_stoptime = stoptime

        # Get all hops
        hops = dao.hops()
        nhops = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 1 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            nhops += 1

        # Get hops with a delta of 2
        hops = dao.hops(delta=2)
        nhops2 = 0
        for st1, st2 in hops:
            self.assertTrue(st1.stop_sequence + 2 == st2.stop_sequence)
            self.assertTrue(st1.trip == st2.trip)
            nhops2 += 1
        ntrips = len(list(dao.trips()))
        # Assume all trips have len > 2
        self.assertTrue(nhops == nhops2 + ntrips)

        # Test shape_dist_traveled on stoptimes
        for trip in dao.trips():
            # Assume no shapes for now
            distance = 0.0
            last_stop = None
            for stoptime in trip.stop_times:
                if last_stop is not None:
                    distance += orthodromic_distance(last_stop, stoptime.stop)
                last_stop = stoptime.stop
                if trip.shape:
                    self.assertTrue(stoptime.shape_dist_traveled >= distance)
                else:
                    self.assertAlmostEqual(stoptime.shape_dist_traveled, distance, 2)

        # Test shape normalization
        for shape in dao.shapes():
            distance = 0.0
            last_pt = None
            ptseq = 0
            for point in shape.points:
                if last_pt is not None:
                    distance += orthodromic_distance(last_pt, point)
                last_pt = point
                self.assertAlmostEqual(point.shape_dist_traveled, distance, 2)
                self.assertTrue(point.shape_pt_sequence == ptseq)
                ptseq += 1

        # Check zone-stop relationship
        for zone in dao.zones(prefetch_stops=True):
            for stop in zone.stops:
                self.assertTrue(stop.zone == zone)
        for stop in dao.stops():
            if stop.zone:
                self.assertTrue(stop in stop.zone.stops)