Ejemplo n.º 1
0
 def create_database_from_gtfs(self, path):
     if os.path.isfile(self.database_path):
         schedule = pygtfs.Schedule(self.database_path)
     else:
         schedule = pygtfs.Schedule(self.database_path)
         pygtfs.append_feed(schedule, path)
     return schedule
Ejemplo n.º 2
0
    def update(self):
        """Get the latest data from GTFS and update the states."""
        import pygtfs

        split_file_name = os.path.splitext(self._data_source)

        sqlite_file = "{}.sqlite".format(split_file_name[0])
        gtfs = pygtfs.Schedule(os.path.join(self._gtfs_folder, sqlite_file))

        # pylint: disable=no-member
        if len(gtfs.feeds) < 1:
            pygtfs.append_feed(
                gtfs, os.path.join(self._gtfs_folder, self._data_source))

        self._departure = get_next_departure(gtfs, self.origin,
                                             self.destination)
        self._state = self._departure["minutes_until_departure"]

        origin_station = self._departure["origin_station"]
        destination_station = self._departure["destination_station"]
        origin_stop_time = self._departure["origin_stop_time"]
        destination_stop_time = self._departure["destination_stop_time"]
        agency = self._departure["agency"]
        route = self._departure["route"]
        trip = self._departure["trip"]

        name = "{} {} to {} next departure"
        self._name = name.format(agency.agency_name, origin_station.stop_id,
                                 destination_station.stop_id)

        # Build attributes

        self._attributes = {}

        def dict_for_table(resource):
            """Return a dict for the SQLAlchemy resource given."""
            return dict((col, getattr(resource, col))
                        for col in resource.__table__.columns.keys())

        def append_keys(resource, prefix=None):
            """Properly format key val pairs to append to attributes."""
            for key, val in resource.items():
                if val == "" or val is None or key == "feed_id":
                    continue
                pretty_key = key.replace("_", " ")
                pretty_key = pretty_key.title()
                pretty_key = pretty_key.replace("Id", "ID")
                pretty_key = pretty_key.replace("Url", "URL")
                if prefix is not None and \
                   pretty_key.startswith(prefix) is False:
                    pretty_key = "{} {}".format(prefix, pretty_key)
                self._attributes[pretty_key] = val

        append_keys(dict_for_table(agency), "Agency")
        append_keys(dict_for_table(route), "Route")
        append_keys(dict_for_table(trip), "Trip")
        append_keys(dict_for_table(origin_station), "Origin Station")
        append_keys(dict_for_table(destination_station), "Destination Station")
        append_keys(origin_stop_time, "Origin Stop")
        append_keys(destination_stop_time, "Destination Stop")
Ejemplo n.º 3
0
def setup_platform(hass, config, add_devices, discovery_info=None):
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config.get(CONF_DATA)
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found!")
        return False

    import pygtfs

    split_file_name = os.path.splitext(data)

    sqlite_file = "{}.sqlite".format(split_file_name[0])
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if len(gtfs.feeds) < 1:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_devices([GTFSDepartureSensor(gtfs, name, origin, destination)])
Ejemplo n.º 4
0
def setup_platform(
    hass: HomeAssistant,
    config: ConfigType,
    add_entities: AddEntitiesCallback,
    discovery_info: DiscoveryInfoType | None = None,
) -> None:
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config[CONF_DATA]
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)
    offset: datetime.timedelta = config[CONF_OFFSET]
    include_tomorrow = config[CONF_TOMORROW]

    os.makedirs(gtfs_dir, exist_ok=True)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found")
        return

    (gtfs_root, _) = os.path.splitext(data)

    sqlite_file = f"{gtfs_root}.sqlite?check_same_thread=False"
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if not gtfs.feeds:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_entities([
        GTFSDepartureSensor(gtfs, name, origin, destination, offset,
                            include_tomorrow)
    ])
Ejemplo n.º 5
0
def load_schedule(input_file):
    global schedule
    warnings.filterwarnings("ignore", category=sa_exc.SAWarning)
    schedule = pygtfs.Schedule(input_file)

    for r in schedule.routes:
        routes[r.route_short_name] = r

    log("Schedule loaded for", schedule.agencies[0].agency_name)
Ejemplo n.º 6
0
def updatedb():
    schedule = pygtfs.Schedule("database.db")
    r = requests.get(
        "https://sncb-opendata.hafas.de/gtfs/static/c21ac6758dd25af84cca5b707f3cb3de",
        allow_redirects=True)
    with open('rawdata.zip', 'wb') as f:
        f.write(r.content)
    pygtfs.overwrite_feed(schedule, "rawdata.zip")
    return "ok"
Ejemplo n.º 7
0
def construct_data() -> Tuple[List[List[int]], List[List[int]], List[int]]:
    """
    Create the stops_for_routes and route_for_stops lists from gtfs data.

    RAPTOR expects a "route" to only contain trips that all have the same
    stop sequence. This means we cannot reuse GTFS route objects, because
    they do not obey this constraint because they group both directions as a
    single route AND group trips with different stop sequences as part of
    same route.
    """
    schedule = pygtfs.Schedule(DB_FILENAME)

    # Map from GTFS stop_ids to the raptor int id.
    gtfs_stop_id_to_raptor = {
        s.stop_id: i
        for i, s in enumerate(schedule.stops)
    }

    trip_id_to_stop_ids: DefaultDict[str, List[str]] = defaultdict(lambda: [])
    for st in schedule.stop_times:
        trip_id_to_stop_ids[st.trip_id].append(st.stop_id)

    Route = namedtuple('Route', ['trip_id', 'stop_ids'])

    stop_seq_to_route: Dict[str, Route] = {}
    for trip_id, stop_ids in trip_id_to_stop_ids.items():
        stop_sequence = ''.join(stop_ids)
        if stop_sequence in stop_seq_to_route:
            continue
        stop_seq_to_route[stop_sequence] = Route(trip_id, stop_ids)

    print(f'Done grouping routes, found: {len(stop_seq_to_route)} routes')

    routes_for_stops: List[Set[int]] = [
        set() for _ in range(len(gtfs_stop_id_to_raptor))
    ]
    stops_for_routes: List[List[int]] = [[]
                                         for _ in range(len(stop_seq_to_route))
                                         ]
    raptor_id_to_short_name: List[int] = [
        i for i in range(len(stop_seq_to_route))
    ]
    for route_id, route in enumerate(stop_seq_to_route.values()):
        gtfs_route = schedule.routes_by_id(
            schedule.trips_by_id(route.trip_id)[0].route_id)[0]
        raptor_id_to_short_name[route_id] = gtfs_route.route_short_name

        for stop_id in route.stop_ids:
            raptor_stop = gtfs_stop_id_to_raptor[stop_id]
            routes_for_stops[raptor_stop].add(route_id)
            stops_for_routes[route_id].append(raptor_stop)

    return routes_for_stops, stops_for_routes, raptor_id_to_short_name
Ejemplo n.º 8
0
def main(origin_stop_id, dest_stop_id):
    schedule = pygtfs.Schedule(DB_FILENAME)
    timetable = Timetable(schedule)

    router = Router()

    result = router.find_route(
        origin_stop_id=origin_stop_id,
        dest_stop_id=dest_stop_id,
        timetable=timetable,
    )

    return result
Ejemplo n.º 9
0
def setup_platform(hass, config, add_devices, discovery_info=None):
    """Get the GTFS sensor."""
    if config.get("origin") is None:
        _LOGGER.error("Origin must be set in the GTFS configuration!")
        return False

    if config.get("destination") is None:
        _LOGGER.error("Destination must be set in the GTFS configuration!")
        return False

    if config.get("data") is None:
        _LOGGER.error("Data must be set in the GTFS configuration!")
        return False

    gtfs_dir = hass.config.path("gtfs")

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, config["data"])):
        _LOGGER.error("The given GTFS data file/folder was not found!")
        return False

    import pygtfs

    split_file_name = os.path.splitext(config["data"])

    sqlite_file = "{}.sqlite".format(split_file_name[0])
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if len(gtfs.feeds) < 1:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir,
                                              config["data"]))

    dev = []
    dev.append(GTFSDepartureSensor(gtfs, config["origin"],
                                   config["destination"]))
    add_devices(dev)
Ejemplo n.º 10
0
def setup_platform(
    hass: HomeAssistantType,
    config: ConfigType,
    add_entities: Callable[[list], None],
    discovery_info: Optional[dict] = None,
) -> None:
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config[CONF_DATA]
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)
    offset = config.get(CONF_OFFSET)
    include_tomorrow = config[CONF_TOMORROW]

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found")
        return

    import pygtfs

    (gtfs_root, _) = os.path.splitext(data)

    sqlite_file = "{}.sqlite?check_same_thread=False".format(gtfs_root)
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if not gtfs.feeds:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_entities([
        GTFSDepartureSensor(gtfs, name, origin, destination, offset,
                            include_tomorrow)
    ])
Ejemplo n.º 11
0
# 'services_by_id',
# 'session',
# 'shapes',
# 'stop_times',
# 'stops',
# 'stops_by_id',
# 'transfers',
# 'translations',
# 'trips',
# 'trips_by_id']

# Routes

# sched = pygtfs.Schedule(':memory:')
# pygtfs.append_feed(sched, '/Users/chris/Downloads/google_transit.zip')
sched = pygtfs.Schedule('artbus.sqlite')

# >>> sched.agencies
# [<Agency 1: Arlington Transit>]
# >>> sched.stops
# # Long list like <Stop 83: Ballston Metro G, Fairfax Dr, EB @ N Stafford, NS>,
# >>> sched.routes
# [<Route 41: 41>, <Route 42: 42>, ... <Route 92: 92>]
# >>> sched.routes_by_id('55')
# [<Route 55: 55>]
# Stops like: <Stop 98: N George Mason Drive, SB @ Patrick Henry Drive, NS>,

# r52 = sched.routes_by_id('52')[0] = [<Trip 4371>, ...]

# Realtime entity like:
# trip {
import pygtfs
sched = pygtfs.Schedule(":memory:")
pygtfs.append_feed(sched, "../subway_gtfs")
pair_set = set()
with open('subway_pair_traveltime.csv','a') as file:
    for tr in sched.trips:
        n = len(tr.stop_times)
        r_id = tr.route_id
        sts = [(st.stop_id, st.arrival_time, st.departure_time)for st in tr.stop_times]
        sts.sort(key=lambda x:x[1])
        for i in range(n - 1):
            j = i + 1
            s_id = sts[i][0]
            e_id = sts[j][0]
            if not (r_id,s_id,e_id) in pair_set:
                start = sts[i][1]
                end = sts[j][2]
                pair_set.add((r_id,s_id,e_id))
#                 print(s_id,e_id,str(end-start))
                file.write(tr.route_id+','+s_id+','+e_id+','+str(end-start))
                file.write('\n')
Ejemplo n.º 13
0
def initialize_schedule(filename=':memory') -> pygtfs.Schedule:
    return pygtfs.Schedule(filename)
Ejemplo n.º 14
0
 def _load_schedule(self):
     """
     SQLite cursor connections must be made in the same thread of execution
     by default. So open a new connection for each request context.
     """
     self.schedule = pygtfs.Schedule(DB_FILENAME)
Ejemplo n.º 15
0
# How to install pygtfs:
# Run the following in command line (Assume pip is already installed, you may need to modify Python path)
# C:\Python27\ArcGIS10.3\python.exe -m pip install -U pygtfs

#-------------------------------------------------------------------------------

import datetime
import pygtfs

start_time = datetime.datetime.now()
print('\nStart at ' + str(start_time))

# CONFIG
# Update the following paths as needed
gtfs_feed = r"C:\tasks\2016_09_12_GTFS_ingest\MBTA\MBTA_GTFS.zip"
output_sqlite = r"C:\tasks\2016_09_12_GTFS_ingest\MBTA\mbta_gtfs.sqlite"

# MAIN

#Create blank sqlite file
sched = pygtfs.Schedule(output_sqlite)

#Ingest GTFS feed into sqlite file
pygtfs.append_feed(sched, gtfs_feed)

print("Done creating sqlite file")

end_time = datetime.datetime.now()
total_time = end_time - start_time
print("\nEnd at {}.  Total run time {}".format(end_time, total_time))
Ejemplo n.º 16
0
def trains():
    delays = []
    schedule = pygtfs.Schedule("database.db")
    train_data = []
    feed = gtfs_realtime_pb2.FeedMessage()
    response = requests.get(
        "https://sncb-opendata.hafas.de/gtfs/realtime/c21ac6758dd25af84cca5b707f3cb3de",
        allow_redirects=True)
    feed.ParseFromString(response.content)
    for entity in feed.entity:
        trips = schedule.trips_by_id(entity.trip_update.trip.trip_id)
        if len(trips) == 0:
            continue
        trip = trips[0]
        current_arrive_delay = 0
        current_depart_delay = 0
        depart_delays = {}
        arrive_delays = {}

        for stop_time_update in entity.trip_update.stop_time_update:
            arrive_delays[
                stop_time_update.stop_id] = stop_time_update.arrival.delay
            depart_delays[
                stop_time_update.stop_id] = stop_time_update.departure.delay

        prev_departure_time = None
        prev_stop = None
        for stop_time in trip.stop_times:
            stop = stop_time.stop
            if stop.stop_id in arrive_delays:
                current_arrive_delay = arrive_delays[stop.stop_id]
            if stop.stop_id in depart_delays:
                current_depart_delay = depart_delays[stop.stop_id]

            arrival_seconds = stop_time.arrival_time.seconds + current_arrive_delay
            departure_seconds = stop_time.departure_time.seconds + current_depart_delay

            arrival_time = datetime.time((arrival_seconds // 3600) % 24,
                                         (arrival_seconds // 60) % 60,
                                         arrival_seconds % 60)
            departure_time = datetime.time((departure_seconds // 3600) % 24,
                                           (departure_seconds // 60) % 60,
                                           departure_seconds % 60)
            current_time = datetime.datetime.now(tz).time()

            if prev_departure_time is None:
                prev_departure_time = arrival_time

            if prev_departure_time <= current_time < arrival_time:
                if prev_stop is None:
                    prev_stop = stop

                prev_stop_lat = prev_stop.stop_lat
                prev_stop_lon = prev_stop.stop_lon
                next_stop_lat = stop.stop_lat
                next_stop_lon = stop.stop_lon

                total = arrival_seconds - seconds(prev_departure_time)
                current = arrival_seconds - seconds(current_time)
                if total == 0:
                    percentage = 1
                elif seconds(current_time) < seconds(prev_departure_time):
                    percentage = 0
                else:
                    percentage = 1 - (current / total)

                delta_lat = (next_stop_lat - prev_stop_lat) * percentage
                delta_lon = (next_stop_lon - prev_stop_lon) * percentage

                delays.append(current_arrive_delay)
                train_data.append({
                    "name":
                    translate(trip.trip_headsign, schedule),
                    "lat":
                    prev_stop_lat + delta_lat,
                    "lon":
                    prev_stop_lon + delta_lon,
                    "delay":
                    current_arrive_delay,
                    "nextStopName":
                    translate(stop.stop_name, schedule),
                    "isStopped":
                    False,
                })
                break
            elif arrival_time <= current_time <= departure_time:
                delays.append(current_depart_delay)
                train_data.append({
                    "name":
                    translate(trip.trip_headsign, schedule),
                    "lat":
                    stop.stop_lat,
                    "lon":
                    stop.stop_lon,
                    "delay":
                    current_depart_delay,
                    "nextStopName":
                    translate(stop.stop_name, schedule),
                    "isStopped":
                    True
                })
                break

            prev_departure_time = departure_time
            prev_stop = stop
    return jsonify({
        "trains": train_data,
        "stats": {
            "max_delay": max(delays),
            "avg_delay": sum(delays) / len(delays),
            "all_delays": delays
        }
    })
Ejemplo n.º 17
0
 def __init__(self, path):
     self._sched = pygtfs.Schedule(path)
     print "gtfs inited"
Ejemplo n.º 18
0
def make_dict(name, db_path):
    sched = pygtfs.Schedule(
        ":memory:")  # create a schedule object (a sqlite database)
    pygtfs.append_feed(sched, "GTFS.zip")
    routes = sched.routes

    # Storing the static trip IDs
    static_trip_ids = []
    for i in routes:
        x = i.trips.pop()
        static_trip_ids.append(int(x.id))
    print(static_trip_ids[0])

    #getting saved trip ids
    saved_trip_ids = []
    con = sqlite3.connect(db_path)
    curs = con.cursor()
    sqlData = curs.execute("select trip_id from vehicle_feed")
    instance = sqlData.fetchall()

    for i in instance:
        try:
            saved_trip_ids.append(int(i[0]))
        except:
            continue
    print(saved_trip_ids[0])

    #comparing trip ids in static vs saved and implementing algo
    allCases = {}
    m = 0
    for i in static_trip_ids:
        found = False
        for j in saved_trip_ids:
            if i == j:
                found = True
                break
        if found:
            this_case = {}
            stopt = sched.stop_times
            stops = []
            # Getting needed stop data
            for j in stopt:
                if int(j.trip_id) == i:
                    stops.append(j)
            #sorting sort wrt stop_sequence
            stopt.sort(key=operator.attrgetter('stop_sequence'))
            for j in stops:
                #Getting static data
                print(m)
                m += 1
                try:
                    static_time = j.arrival_time
                except:
                    continue
                static_stop_id = j.stop_id
                static_lat = sched.stops_by_id(static_stop_id)[0].stop_lat
                static_long = sched.stops_by_id(static_stop_id)[0].stop_lon
                sqlData = curs.execute(
                    "select time,lat,lng from vehicle_feed where trip_id=" +
                    str(i))
                instance = sqlData.fetchall()
                for k in instance:
                    dist = findDistInM(static_lat, static_long, float(k[1]),
                                       float(k[2]))
                    if dist < 1000:
                        minute = int(k[0].split(":")[1])
                        hour = int(k[0].split(":")[0])
                        real_time = datetime.timedelta(hours=hour,
                                                       minutes=minute)
                        this_case[static_stop_id] = int(
                            abs(static_time - real_time).seconds / 60)
                        break


# if int(k[0].split(":")[0])==static_time_hour:
# 	minute=int(k[0].split(":")[1])
# 	if abs(minute-static_time_min)<=1:
# 		dist=findDistInM(static_lat,static_long,float(k[1]),float(k[2]))
# 		this_case[static_stop_id]=dist
# 		break
            if this_case != {}:
                allCases[i] = this_case
                #getting saved data
    with open(name + '.dictionary', 'wb') as config_dictionary_file:
        pickle.dump(allCases, config_dictionary_file)
Ejemplo n.º 19
0
#!/usr/bin/python3

import math
from PIL import Image, ImageDraw
import pygtfs

print("Transit access test")

sched = pygtfs.Schedule("gtfs.sqlite")

routes = []

for line in ("SILVER", "GREEN", "BLUE", "RED", "YELLOW", "ORANGE"):
    route = sched.routes_by_id(line)[0]
    #for route in sched.routes:
    #	if len(route.trips) < 11:
    #		continue
    routes.append([])
    for stop_time in sorted(route.trips[10].stop_times,
                            key=lambda a: a.stop_sequence):
        stop = stop_time.stop
        #print("%s: %s at %g %g" % (line, stop.stop_name, stop.stop_lat, stop.stop_lon))
        routes[-1].append((stop.stop_lat, stop.stop_lon))

print(routes)
#routes = (((25, 30), (25, 40), (35, 45), (45, 55), (55, 65), (70, 65),
#	(90, 65), (120, 65), (140, 65), (150, 50), (160, 40), (170, 30)),
#        ((150, 50), (150, 60), (150, 80), (150, 100), (150, 120), (150, 150),
#	(150, 250), (150, 300), (150, 350)))

min_lat = min(map(lambda x: min(map(lambda y: y[0], x)), routes))
Ejemplo n.º 20
0
                    static_time_min = int(str(j.arrival_time).split(":")[1])
                except:
                    continue
                static_stop_id = j.stop_id
                static_lat = sched.stops_by_id(static_stop_id)[0].stop_lat
                static_long = sched.stops_by_id(static_stop_id)[0].stop_lon
                sqlData = curs.execute(
                    "select time,lat,lng from vehicle_feed where trip_id=" +
                    str(i))
                instance = sqlData.fetchall()
                for k in instance:
                    if int(k[0].split(":")[0]) == static_time_hour:
                        minute = int(k[0].split(":")[1])
                        if abs(minute - static_time_min) <= 1:
                            dist = findDistInM(static_lat, static_long,
                                               float(k[1]), float(k[2]))
                            this_case[static_stop_id] = dist
                            break
            if this_case != {}:
                allCases[i] = this_case
                #getting saved data
    with open(name + '.dictionary', 'wb') as config_dictionary_file:
        pickle.dump(allCases, config_dictionary_file)


sched = pygtfs.Schedule(
    ":memory:")  # create a schedule object (a sqlite database)
pygtfs.append_feed(sched, "GTFS.zip")
make_dict('trip_dist_diff1', 'bus_movements_2019_08_01.db', sched)
make_dict('trip_dist_diff2', 'bus_movements_2019_08_02.db', sched)
make_dict('trip_dist_diff3', 'bus_movements_2019_08_03.db', sched)
Ejemplo n.º 21
0
import datetime
import pytz
import calendar
import pygtfs
import numpy as np
import gevent

from . import settings

sched = pygtfs.Schedule(':memory:')
pygtfs.append_feed(sched, settings.GTFS_FILE)

def _utcnow():
    return pytz.UTC.localize(datetime.datetime.now())

pacific = pytz.timezone('US/Pacific')

def _now():
    return _utcnow().astimezone(pacific)

def _today():
    return pacific.localize(datetime.datetime.combine(_now().date(), datetime.time()))


def _unix_ts(dt):
    return calendar.timegm(dt.utctimetuple())


def lookup_train_service(timestamp):
    dow = timestamp.weekday()
    for s in sched.services: