def add_feed_id(gtfs_feed, gtfs_file, feed_id=None, new_agency_id_bool=False): files = ["feed_info.txt"] util.delete_temp_files(files) if len(gtfs_feed.agencies()) > 1 and new_agency_id_bool is True: raise ValueError( 'cannot replace agency_id when there is more than one agency') agency_id = gtfs_feed.agencies()[0].id() url = gtfs_feed.agency(agency_id).get('agency_url') lang = gtfs_feed.agency(agency_id).get('agency_lang') feed_id = feed_id or agency_id print "adding feed_id " + feed_id + " to " + gtfs_file if lang: feed_lang = lang else: feed_lang = 'en' if str_to_bool(new_agency_id_bool): gtfs_feed.agency(agency_id).set('agency_id', feed_id) agency_id = feed_id files.append('agency.txt') files.append('routes.txt') gtfs_feed.write('agency.txt', gtfs_feed.agencies()) for r in gtfs_feed.routes(): r.set('agency_id', feed_id) gtfs_feed.write('routes.txt', gtfs_feed.routes()) else: pass if 'feed_info' not in gtfs_feed.by_id: gtfs_feed.by_id['feed_info'] = {} cls = gtfs_feed.FACTORIES['feed_info'] info = cls.from_row({ 'feed_publisher_name': agency_id, 'feed_publisher_url': url, 'feed_lang': feed_lang, 'feed_id': feed_id }) gtfs_feed.by_id['feed_info']['a'] = info else: gtfs_feed.feed_infos.set('feed_id', feed_id) gtfs_feed.write('feed_info.txt', gtfs_feed.feed_infos()) gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file) shutil.move('output.zip', gtfs_file) util.delete_temp_files(files)
def rename_route_id(gtfs_feed, gtfs_file, original_route_id, new_route_id): files = ['trips.txt', 'routes.txt'] util.delete_temp_files(files) for t in gtfs_feed.trips(): if t.get('route_id') == original_route_id: t.set('route_id', new_route_id) for r in gtfs_feed.routes(): if r.get('route_id') == original_route_id: gtfs_feed.by_id['routes'].pop(original_route_id) gtfs_feed.write('trips.txt', gtfs_feed.trips()) gtfs_feed.write('routes.txt', gtfs_feed.routes()) gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file) shutil.move('output.zip', gtfs_file) util.delete_temp_files(files)
def main(argv): if len(argv) < 2: print "usage: simple_agency_remove.py gtfs_file" sys.exit(0) files = ['agency.txt'] gtfs_file = argv[1] f = mzgtfs.feed.Feed(gtfs_file) agencies_in_routes = set((r.get('agency_id') for r in f.routes())) for a in f.agencies(): if a.id() not in agencies_in_routes: f.by_id['agency'].pop(a.id()) f.write('agency.txt', f.agencies()) f.make_zip('output.zip', files=files, clone=gtfs_file) shutil.move('output.zip', gtfs_file) util.delete_temp_files(files)
def fix_transfers(gtfs_file, gtfs_feed): files = ["transfers.txt"] util.delete_temp_files(files) xfers = list() for t in gtfs_feed.transfers(): to = t['to_stop_id'] fr = t['from_stop_id'] for s in gtfs_feed.stops(): sid = s['stop_id'] if sid == to: to = "COMPLETEDSTOPID" if sid == fr: fr = "COMPLETEDSTOPID" if to == "COMPLETEDSTOPID" and fr == "COMPLETEDSTOPID": xfers.append(t) else: print("Dropping Xfer to: %s from: %s because the stops weren't found in stops.txt." % (t['to_stop_id'], t['from_stop_id'])) gtfs_feed.by_id['transfers'] = {} cls = gtfs_feed.FACTORIES['transfers'] i = 0 gtfs_feed.by_id['transfers'] = {} for x in xfers: xfer = cls.from_row({ 'from_stop_id' : x['from_stop_id'], 'to_stop_id' : x['to_stop_id'], 'transfer_type' : x['transfer_type'], 'min_transfer_time' : x['min_transfer_time'] }) gtfs_feed.by_id['transfers'][i] = xfer i = i + 1 #for t in gtfs_feed.transfers(): # print(t['from_stop_id']) gtfs_feed.write('transfers.txt', gtfs_feed.transfers()) gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file) shutil.move('output.zip', gtfs_file) util.delete_temp_files(files)
def rename_agency(gtfs_feed, gtfs_file, original_agency, new_agency, feed_id): files = ["feed_info.txt", "agency.txt", "routes.txt"] util.delete_temp_files(files) aid = None for agency in gtfs_feed.agencies(): if agency.id() == original_agency: aid = agency break if aid == None: raise ValueError("No agency id of "+original_agency+" found, exiting") url = gtfs_feed.agency(aid.id()).get('agency_url') lang = gtfs_feed.agency(aid.id()).get('agency_lang') print "changing agency "+aid.id()+" to "+new_agency+" and adding feed_id " + feed_id +"." if lang: feed_lang = lang else: feed_lang = 'en' #fix the agency.txt file first gtfs_feed.agency(original_agency).set('agency_id', new_agency) gtfs_feed.write('agency.txt', gtfs_feed.agencies()) #now fix the routes.txt file for r in gtfs_feed.routes(): if r.get('agency_id') == original_agency: r.set('agency_id', new_agency) gtfs_feed.write('routes.txt', gtfs_feed.routes()) #no feed info there, make a new one try: len(gtfs_feed.feed_infos()) skip = False newfi = {} key = 97 for f in gtfs_feed.feed_infos(): if f.get('feed_publisher_name') == new_agency and not skip: cls = gtfs_feed.FACTORIES['feed_info'] info = cls.from_row({ 'feed_publisher_name' : new_agency, 'feed_publisher_url' : url, 'feed_lang' : feed_lang, 'feed_id' : feed_id }) newfi[chr(key)] = info skip = True else: cls = gtfs_feed.FACTORIES['feed_info'] info = cls.from_row({ 'feed_publisher_name' : f.get('feed_publisher_name'), 'feed_publisher_url' : f.get('feed_publisher_url'), 'feed_lang' : f.get('feed_lang'), 'feed_id' : f.get('feed_id') }) newfi[chr(key)] = info key = key + 1 if not skip: cls = gtfs_feed.FACTORIES['feed_info'] info = cls.from_row({ 'feed_publisher_name' : new_agency, 'feed_publisher_url' : url, 'feed_lang' : feed_lang, 'feed_id' : feed_id }) gtfs_feed.by_id['feed_info']['a'] = info gtfs_feed.write('feed_info.txt', gtfs_feed.feed_infos()) else: gtfs_feed.by_id['feed_info'] = newfi gtfs_feed.write('feed_info.txt', gtfs_feed.feed_infos()) except Exception as e: gtfs_feed.by_id['feed_info'] = {} cls = gtfs_feed.FACTORIES['feed_info'] info = cls.from_row({ 'feed_publisher_name' : new_agency, 'feed_publisher_url' : url, 'feed_lang' : feed_lang, 'feed_id' : feed_id }) gtfs_feed.by_id['feed_info']['a'] = info gtfs_feed.write('feed_info.txt', gtfs_feed.feed_infos()) gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file) shutil.move('output.zip', gtfs_file) util.delete_temp_files(files)