Ejemplo n.º 1
0
 def create_database_from_gtfs(self, path):
     if os.path.isfile(self.database_path):
         schedule = pygtfs.Schedule(self.database_path)
     else:
         schedule = pygtfs.Schedule(self.database_path)
         pygtfs.append_feed(schedule, path)
     return schedule
Ejemplo n.º 2
0
def setup_platform(hass, config, add_devices, discovery_info=None):
    """Get the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config.get(CONF_DATA)
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found!")
        return False

    import pygtfs

    split_file_name = os.path.splitext(data)

    sqlite_file = "{}.sqlite".format(split_file_name[0])
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if len(gtfs.feeds) < 1:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_devices([GTFSDepartureSensor(gtfs, name, origin, destination)])
Ejemplo n.º 3
0
def setup_platform(
    hass: HomeAssistant,
    config: ConfigType,
    add_entities: AddEntitiesCallback,
    discovery_info: DiscoveryInfoType | None = None,
) -> None:
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config[CONF_DATA]
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)
    offset: datetime.timedelta = config[CONF_OFFSET]
    include_tomorrow = config[CONF_TOMORROW]

    os.makedirs(gtfs_dir, exist_ok=True)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found")
        return

    (gtfs_root, _) = os.path.splitext(data)

    sqlite_file = f"{gtfs_root}.sqlite?check_same_thread=False"
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if not gtfs.feeds:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_entities([
        GTFSDepartureSensor(gtfs, name, origin, destination, offset,
                            include_tomorrow)
    ])
Ejemplo n.º 4
0
    def update(self):
        """Get the latest data from GTFS and update the states."""
        import pygtfs

        split_file_name = os.path.splitext(self._data_source)

        sqlite_file = "{}.sqlite".format(split_file_name[0])
        gtfs = pygtfs.Schedule(os.path.join(self._gtfs_folder, sqlite_file))

        # pylint: disable=no-member
        if len(gtfs.feeds) < 1:
            pygtfs.append_feed(
                gtfs, os.path.join(self._gtfs_folder, self._data_source))

        self._departure = get_next_departure(gtfs, self.origin,
                                             self.destination)
        self._state = self._departure["minutes_until_departure"]

        origin_station = self._departure["origin_station"]
        destination_station = self._departure["destination_station"]
        origin_stop_time = self._departure["origin_stop_time"]
        destination_stop_time = self._departure["destination_stop_time"]
        agency = self._departure["agency"]
        route = self._departure["route"]
        trip = self._departure["trip"]

        name = "{} {} to {} next departure"
        self._name = name.format(agency.agency_name, origin_station.stop_id,
                                 destination_station.stop_id)

        # Build attributes

        self._attributes = {}

        def dict_for_table(resource):
            """Return a dict for the SQLAlchemy resource given."""
            return dict((col, getattr(resource, col))
                        for col in resource.__table__.columns.keys())

        def append_keys(resource, prefix=None):
            """Properly format key val pairs to append to attributes."""
            for key, val in resource.items():
                if val == "" or val is None or key == "feed_id":
                    continue
                pretty_key = key.replace("_", " ")
                pretty_key = pretty_key.title()
                pretty_key = pretty_key.replace("Id", "ID")
                pretty_key = pretty_key.replace("Url", "URL")
                if prefix is not None and \
                   pretty_key.startswith(prefix) is False:
                    pretty_key = "{} {}".format(prefix, pretty_key)
                self._attributes[pretty_key] = val

        append_keys(dict_for_table(agency), "Agency")
        append_keys(dict_for_table(route), "Route")
        append_keys(dict_for_table(trip), "Trip")
        append_keys(dict_for_table(origin_station), "Origin Station")
        append_keys(dict_for_table(destination_station), "Destination Station")
        append_keys(origin_stop_time, "Origin Stop")
        append_keys(destination_stop_time, "Destination Stop")
Ejemplo n.º 5
0
def setup_platform(hass, config, add_entities, discovery_info=None):
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config.get(CONF_DATA)
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)
    offset = config.get(CONF_OFFSET)

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found")
        return False

    import pygtfs

    (gtfs_root, _) = os.path.splitext(data)

    sqlite_file = "{}.sqlite?check_same_thread=False".format(gtfs_root)
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if not gtfs.feeds:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_entities([
        GTFSDepartureSensor(gtfs, name, origin, destination, offset)])
Ejemplo n.º 6
0
def setup_platform(hass, config, add_devices, discovery_info=None):
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config.get(CONF_DATA)
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found!")
        return False

    import pygtfs

    split_file_name = os.path.splitext(data)

    sqlite_file = "{}.sqlite".format(split_file_name[0])
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if len(gtfs.feeds) < 1:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_devices([GTFSDepartureSensor(gtfs, name, origin, destination)])
Ejemplo n.º 7
0
def setup_platform(hass: HomeAssistantType, config: ConfigType,
                   add_entities: Callable[[list], None],
                   discovery_info: Optional[dict] = None) -> bool:
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = str(config.get(CONF_DATA))
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)
    offset = config.get(CONF_OFFSET)
    include_tomorrow = config.get(CONF_TOMORROW)

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found")
        return False

    import pygtfs

    (gtfs_root, _) = os.path.splitext(data)

    sqlite_file = "{}.sqlite?check_same_thread=False".format(gtfs_root)
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if not gtfs.feeds:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_entities([
        GTFSDepartureSensor(gtfs, name, origin, destination, offset,
                            include_tomorrow)])
    return True
Ejemplo n.º 8
0
    def __init__(self):
        self.schedule = Schedule(":memory:")
        append_feed(self.schedule, "test/data/atx_small" )


# class GTFSATXSetup(object):
#     def __init__(self):
#         self.schedule = Schedule(":memory:")
#         append_feed(self.schedule, "test/data/atx_small" )
Ejemplo n.º 9
0
def setup_platform(hass, config, add_devices, discovery_info=None):
    """Get the GTFS sensor."""
    if config.get("origin") is None:
        _LOGGER.error("Origin must be set in the GTFS configuration!")
        return False

    if config.get("destination") is None:
        _LOGGER.error("Destination must be set in the GTFS configuration!")
        return False

    if config.get("data") is None:
        _LOGGER.error("Data must be set in the GTFS configuration!")
        return False

    gtfs_dir = hass.config.path("gtfs")

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, config["data"])):
        _LOGGER.error("The given GTFS data file/folder was not found!")
        return False

    import pygtfs

    split_file_name = os.path.splitext(config["data"])

    sqlite_file = "{}.sqlite".format(split_file_name[0])
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if len(gtfs.feeds) < 1:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir,
                                              config["data"]))

    dev = []
    dev.append(GTFSDepartureSensor(gtfs, config["origin"],
                                   config["destination"]))
    add_devices(dev)
Ejemplo n.º 10
0
def setup_platform(hass, config, add_devices, discovery_info=None):
    """Get the GTFS sensor."""
    if config.get("origin") is None:
        _LOGGER.error("Origin must be set in the GTFS configuration!")
        return False

    if config.get("destination") is None:
        _LOGGER.error("Destination must be set in the GTFS configuration!")
        return False

    if config.get("data") is None:
        _LOGGER.error("Data must be set in the GTFS configuration!")
        return False

    gtfs_dir = hass.config.path("gtfs")

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, config["data"])):
        _LOGGER.error("The given GTFS data file/folder was not found!")
        return False

    import pygtfs

    split_file_name = os.path.splitext(config["data"])

    sqlite_file = "{}.sqlite".format(split_file_name[0])
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if len(gtfs.feeds) < 1:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir,
                                              config["data"]))

    dev = []
    dev.append(GTFSDepartureSensor(gtfs, config["origin"],
                                   config["destination"]))
    add_devices(dev)
Ejemplo n.º 11
0
def setup_platform(
    hass: HomeAssistantType,
    config: ConfigType,
    add_entities: Callable[[list], None],
    discovery_info: Optional[dict] = None,
) -> None:
    """Set up the GTFS sensor."""
    gtfs_dir = hass.config.path(DEFAULT_PATH)
    data = config[CONF_DATA]
    origin = config.get(CONF_ORIGIN)
    destination = config.get(CONF_DESTINATION)
    name = config.get(CONF_NAME)
    offset = config.get(CONF_OFFSET)
    include_tomorrow = config[CONF_TOMORROW]

    if not os.path.exists(gtfs_dir):
        os.makedirs(gtfs_dir)

    if not os.path.exists(os.path.join(gtfs_dir, data)):
        _LOGGER.error("The given GTFS data file/folder was not found")
        return

    import pygtfs

    (gtfs_root, _) = os.path.splitext(data)

    sqlite_file = "{}.sqlite?check_same_thread=False".format(gtfs_root)
    joined_path = os.path.join(gtfs_dir, sqlite_file)
    gtfs = pygtfs.Schedule(joined_path)

    # pylint: disable=no-member
    if not gtfs.feeds:
        pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data))

    add_entities([
        GTFSDepartureSensor(gtfs, name, origin, destination, offset,
                            include_tomorrow)
    ])
Ejemplo n.º 12
0
import datetime
import pytz
import calendar
import pygtfs
import numpy as np
import gevent

from . import settings

sched = pygtfs.Schedule(':memory:')
pygtfs.append_feed(sched, settings.GTFS_FILE)

def _utcnow():
    return pytz.UTC.localize(datetime.datetime.now())

pacific = pytz.timezone('US/Pacific')

def _now():
    return _utcnow().astimezone(pacific)

def _today():
    return pacific.localize(datetime.datetime.combine(_now().date(), datetime.time()))


def _unix_ts(dt):
    return calendar.timegm(dt.utctimetuple())


def lookup_train_service(timestamp):
    dow = timestamp.weekday()
    for s in sched.services:
Ejemplo n.º 13
0
def gtfs_import():
    schedule = Schedule("gtfs.sqlite")
    append_feed(schedule, "israel-gtfs.zip")
Ejemplo n.º 14
0
import sys
sys.path.insert(0, 'pygtfs')
import pygtfs

sched = pygtfs.Schedule("gtfs.sqlite")
pygtfs.append_feed(sched, "Trimet_2016-02-13")
Ejemplo n.º 15
0
def make_dict(name, db_path):
    sched = pygtfs.Schedule(
        ":memory:")  # create a schedule object (a sqlite database)
    pygtfs.append_feed(sched, "GTFS.zip")
    routes = sched.routes

    # Storing the static trip IDs
    static_trip_ids = []
    for i in routes:
        x = i.trips.pop()
        static_trip_ids.append(int(x.id))
    print(static_trip_ids[0])

    #getting saved trip ids
    saved_trip_ids = []
    con = sqlite3.connect(db_path)
    curs = con.cursor()
    sqlData = curs.execute("select trip_id from vehicle_feed")
    instance = sqlData.fetchall()

    for i in instance:
        try:
            saved_trip_ids.append(int(i[0]))
        except:
            continue
    print(saved_trip_ids[0])

    #comparing trip ids in static vs saved and implementing algo
    allCases = {}
    m = 0
    for i in static_trip_ids:
        found = False
        for j in saved_trip_ids:
            if i == j:
                found = True
                break
        if found:
            this_case = {}
            stopt = sched.stop_times
            stops = []
            # Getting needed stop data
            for j in stopt:
                if int(j.trip_id) == i:
                    stops.append(j)
            #sorting sort wrt stop_sequence
            stopt.sort(key=operator.attrgetter('stop_sequence'))
            for j in stops:
                #Getting static data
                print(m)
                m += 1
                try:
                    static_time = j.arrival_time
                except:
                    continue
                static_stop_id = j.stop_id
                static_lat = sched.stops_by_id(static_stop_id)[0].stop_lat
                static_long = sched.stops_by_id(static_stop_id)[0].stop_lon
                sqlData = curs.execute(
                    "select time,lat,lng from vehicle_feed where trip_id=" +
                    str(i))
                instance = sqlData.fetchall()
                for k in instance:
                    dist = findDistInM(static_lat, static_long, float(k[1]),
                                       float(k[2]))
                    if dist < 1000:
                        minute = int(k[0].split(":")[1])
                        hour = int(k[0].split(":")[0])
                        real_time = datetime.timedelta(hours=hour,
                                                       minutes=minute)
                        this_case[static_stop_id] = int(
                            abs(static_time - real_time).seconds / 60)
                        break


# if int(k[0].split(":")[0])==static_time_hour:
# 	minute=int(k[0].split(":")[1])
# 	if abs(minute-static_time_min)<=1:
# 		dist=findDistInM(static_lat,static_long,float(k[1]),float(k[2]))
# 		this_case[static_stop_id]=dist
# 		break
            if this_case != {}:
                allCases[i] = this_case
                #getting saved data
    with open(name + '.dictionary', 'wb') as config_dictionary_file:
        pickle.dump(allCases, config_dictionary_file)
Ejemplo n.º 16
0
  parser.add_argument("--route", help="Run for route; helpful for debugging.", action="append")
  parser.add_argument("--stops", help="Include stopss in output.", action="store_true")
  parser.add_argument("--exclude", help="Exclude routes", action="append")
  parser.add_argument("--planner", help="Reconstruct routes using a trip planner: osrm or otp")
  parser.add_argument("--trips", help="Include trip details", action="store_true")

  args = parser.parse_args()
  filename = args.filename
  output = args.output
  
  # Open the GTFS .zip or cache-y sqlite version.
  if filename.endswith(".db"):
    sched = pygtfs.Schedule(filename)
  elif filename.endswith(".zip"):
    sched = pygtfs.Schedule(":memory:")
    pygtfs.append_feed(sched, filename)

  # Get routes
  routes = sched.routes
  if args.route:
    routes = [i for i in sched.routes if i.route_id in args.route]
  if args.exclude:
    routes = [i for i in sched.routes if i.route_id not in args.exclude]

  # Calculate route stats and add to collection
  c = []
  for route in routes:
    for f in route_info(route, sched=sched, planner=args.planner, includetrips=args.trips):
      c.append(f)

  if args.stops:
Ejemplo n.º 17
0
# How to install pygtfs:
# Run the following in command line (Assume pip is already installed, you may need to modify Python path)
# C:\Python27\ArcGIS10.3\python.exe -m pip install -U pygtfs

#-------------------------------------------------------------------------------

import datetime
import pygtfs

start_time = datetime.datetime.now()
print('\nStart at ' + str(start_time))

# CONFIG
# Update the following paths as needed
gtfs_feed = r"C:\tasks\2016_09_12_GTFS_ingest\MBTA\MBTA_GTFS.zip"
output_sqlite = r"C:\tasks\2016_09_12_GTFS_ingest\MBTA\mbta_gtfs.sqlite"

# MAIN

#Create blank sqlite file
sched = pygtfs.Schedule(output_sqlite)

#Ingest GTFS feed into sqlite file
pygtfs.append_feed(sched, gtfs_feed)

print("Done creating sqlite file")

end_time = datetime.datetime.now()
total_time = end_time - start_time
print("\nEnd at {}.  Total run time {}".format(end_time, total_time))
Ejemplo n.º 18
0
def load_gtfs_feed(schedule: pygtfs.Schedule, filename: str) -> None:
    pygtfs.append_feed(schedule, input_filename)
import pygtfs
with open('bus_pair_traveltime.csv', 'a') as file:
    sched = pygtfs.Schedule(":memory:")
    zone = 'Manhattan'
    # 'Manhattan'
    # , 'Staten']:
    pygtfs.append_feed(sched, "../bus_gtfs/gtfs_" + zone + '.zip')
    pair_set = set()
    for tr in sched.trips:
        n = len(tr.stop_times)
        r_id = tr.route_id
        sts = [(st.stop_id, st.arrival_time, st.departure_time)
               for st in tr.stop_times]
        sts.sort(key=lambda x: x[1])
        for i in range(n - 1):
            j = i + 1
            s_id = sts[i][0]
            e_id = sts[j][0]
            if not (r_id, s_id, e_id) in pair_set:
                start = sts[i][1]
                end = sts[j][2]
                pair_set.add((r_id, s_id, e_id))
                #                 print(s_id,e_id,str(end-start))
                file.write(tr.route_id + ',' + s_id + ',' + e_id + ',' +
                           str(end - start))
                file.write('\n')
import pygtfs
sched = pygtfs.Schedule(":memory:")
pygtfs.append_feed(sched, "../subway_gtfs")
pair_set = set()
with open('subway_pair_traveltime.csv','a') as file:
    for tr in sched.trips:
        n = len(tr.stop_times)
        r_id = tr.route_id
        sts = [(st.stop_id, st.arrival_time, st.departure_time)for st in tr.stop_times]
        sts.sort(key=lambda x:x[1])
        for i in range(n - 1):
            j = i + 1
            s_id = sts[i][0]
            e_id = sts[j][0]
            if not (r_id,s_id,e_id) in pair_set:
                start = sts[i][1]
                end = sts[j][2]
                pair_set.add((r_id,s_id,e_id))
#                 print(s_id,e_id,str(end-start))
                file.write(tr.route_id+','+s_id+','+e_id+','+str(end-start))
                file.write('\n')
import pygtfs
with open('lirr_pair_traveltime.csv', 'a') as file:
    sched = pygtfs.Schedule(":memory:")
    pygtfs.append_feed(sched, "../lirr_gtfs")
    pair_set = set()
    for tr in sched.trips:
        n = len(tr.stop_times)
        r_id = tr.route_id
        sts = [(st.stop_id, st.arrival_time, st.departure_time)
               for st in tr.stop_times]
        sts.sort(key=lambda x: x[1])
        for i in range(n - 1):
            j = i + 1
            s_id = sts[i][0]
            e_id = sts[j][0]
            if not (r_id, s_id, e_id) in pair_set:
                start = sts[i][1]
                end = sts[j][2]
                pair_set.add((r_id, s_id, e_id))
                #                 print(s_id,e_id,str(end-start))
                file.write(tr.route_id + ',' + s_id + ',' + e_id + ',' +
                           str(end - start))
                file.write('\n')
Ejemplo n.º 22
0
    def update(self):
        """Get the latest data from GTFS and update the states."""
        import pygtfs

        split_file_name = os.path.splitext(self._data_source)

        sqlite_file = "{}.sqlite".format(split_file_name[0])
        gtfs = pygtfs.Schedule(os.path.join(self._gtfs_folder, sqlite_file))

        # pylint: disable=no-member
        if len(gtfs.feeds) < 1:
            pygtfs.append_feed(gtfs, os.path.join(self._gtfs_folder,
                                                  self._data_source))

        self._departure = get_next_departure(gtfs, self.origin,
                                             self.destination)
        self._state = self._departure["minutes_until_departure"]

        origin_station = self._departure["origin_station"]
        destination_station = self._departure["destination_station"]
        origin_stop_time = self._departure["origin_stop_time"]
        destination_stop_time = self._departure["destination_stop_time"]
        agency = self._departure["agency"]
        route = self._departure["route"]
        trip = self._departure["trip"]

        name = "{} {} to {} next departure"
        self._name = name.format(agency.agency_name,
                                 origin_station.stop_id,
                                 destination_station.stop_id)

        # Build attributes

        self._attributes = {}

        def dict_for_table(resource):
            """Return a dict for the SQLAlchemy resource given."""
            return dict((col, getattr(resource, col))
                        for col in resource.__table__.columns.keys())

        def append_keys(resource, prefix=None):
            """Properly format key val pairs to append to attributes."""
            for key, val in resource.items():
                if val == "" or val is None or key == "feed_id":
                    continue
                pretty_key = key.replace("_", " ")
                pretty_key = pretty_key.title()
                pretty_key = pretty_key.replace("Id", "ID")
                pretty_key = pretty_key.replace("Url", "URL")
                if prefix is not None and \
                   pretty_key.startswith(prefix) is False:
                    pretty_key = "{} {}".format(prefix, pretty_key)
                self._attributes[pretty_key] = val

        append_keys(dict_for_table(agency), "Agency")
        append_keys(dict_for_table(route), "Route")
        append_keys(dict_for_table(trip), "Trip")
        append_keys(dict_for_table(origin_station), "Origin Station")
        append_keys(dict_for_table(destination_station), "Destination Station")
        append_keys(origin_stop_time, "Origin Stop")
        append_keys(destination_stop_time, "Destination Stop")
Ejemplo n.º 23
0
                    static_time_min = int(str(j.arrival_time).split(":")[1])
                except:
                    continue
                static_stop_id = j.stop_id
                static_lat = sched.stops_by_id(static_stop_id)[0].stop_lat
                static_long = sched.stops_by_id(static_stop_id)[0].stop_lon
                sqlData = curs.execute(
                    "select time,lat,lng from vehicle_feed where trip_id=" +
                    str(i))
                instance = sqlData.fetchall()
                for k in instance:
                    if int(k[0].split(":")[0]) == static_time_hour:
                        minute = int(k[0].split(":")[1])
                        if abs(minute - static_time_min) <= 1:
                            dist = findDistInM(static_lat, static_long,
                                               float(k[1]), float(k[2]))
                            this_case[static_stop_id] = dist
                            break
            if this_case != {}:
                allCases[i] = this_case
                #getting saved data
    with open(name + '.dictionary', 'wb') as config_dictionary_file:
        pickle.dump(allCases, config_dictionary_file)


sched = pygtfs.Schedule(
    ":memory:")  # create a schedule object (a sqlite database)
pygtfs.append_feed(sched, "GTFS.zip")
make_dict('trip_dist_diff1', 'bus_movements_2019_08_01.db', sched)
make_dict('trip_dist_diff2', 'bus_movements_2019_08_02.db', sched)
make_dict('trip_dist_diff3', 'bus_movements_2019_08_03.db', sched)