def gtfs_ex_api(): file_list = [] gtfs_api = GTFSExchange() for agency in gtfs_api.get_gtfs_agencies(official_only=False): details = gtfs_api.get_gtfs_agency_details(agency) file = gtfs_api.get_most_recent_file(agency) if file: file_list.append(file['file']['file_url']) return file_list
def get_gtfs_feeds(session): gtfs_api = GTFSExchange() feeds = [] for feed in gtfs_api.get_gtfs_agencies(True): if not feed['country'] == 'United States': continue details = gtfs_api.get_gtfs_agency_details(feed)['agency'] load_external_agencies(session, details) feeds.append(FeedFile(**gtfs_api.get_most_recent_file(feed)['file'])) return feeds
def get_gtfs_feeds(session, dataexchangeid_list=[]): gtfs_api = GTFSExchange() feeds = [] for feed in gtfs_api.get_gtfs_agencies(True): if len(dataexchangeid_list) > 0: if feed['dataexchange_id'] not in dataexchangeid_list: continue details = gtfs_api.get_gtfs_agency_details(feed)['agency'] load_external_agencies(session, details) feeds.append(FeedFile(**gtfs_api.get_most_recent_file(feed)['file'])) return feeds
def gtfs_source_list(): data_dir = resource_filename('gtfsdb', 'data') src_file = os.path.join(data_dir, 'url_list.pkl') data_feed = pickle.load(open(src_file, 'rb')) file_feed = [] for f in data_feed: if f: datafile = GTFSExchange.recent_file(f['datafiles']) file_feed.append({ 'file_url': datafile['file_url'] }) return file_feed
def test_offline(self): gtfs_api = GTFSExchange() agencies = gtfs_api.get_gtfs_agencies() agency_detail = gtfs_api.get_gtfs_agency_details(agencies[0]) file = gtfs_api.get_most_recent_file(agencies[0]) pass
def test_official_filter(self): gtfs_api = GTFSExchange() for agency in gtfs_api.get_gtfs_agencies(official_only=True): self.assertTrue(agency['is_official'])