コード例 #1
0
def main(gtfs_file, input_json_file):
    """ load gtfs_file and instructions from JSON"""
    gtfs_feed = mzgtfs.feed.Feed(filename=gtfs_file)
    gtfs_feed.preload()

    with open(input_json_file) as jsonfile:
        input_json = json.load(jsonfile)

    for shape_id, new_headsign in input_json.iteritems():
        printed = False
        print "updating shape " + shape_id + " to " + new_headsign
        
        #set comprehension did not work :/
        for t in gtfs_feed.trips():
            if t.get('shape_id') == shape_id:
                if not printed:
                    print "originally  " + t.get('trip_headsign')
                    print "now  " + new_headsign
                    printed = True
                t.set('trip_headsign', new_headsign)

    gtfs_feed.write('trips.txt', gtfs_feed.trips())
    files = ['trips.txt']

    print "saving file"
    
    gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file)
    shutil.move('output.zip', gtfs_file)
    util.delete_temp_files(files)
コード例 #2
0
def main(gtfs_file):
    """ load gtfs_file and replace headsign with last stop name"""
    gtfs_feed = mzgtfs.feed.Feed(filename=gtfs_file)
    gtfs_feed.preload()

    for t in gtfs_feed.trips():
        last_stop = t.stop_sequence()[-1].get('stop_id')
        new_headsign = gtfs_feed.stop(last_stop).get('stop_name')
        t.set('trip_headsign', new_headsign)

    gtfs_feed.write('trips.txt', gtfs_feed.trips())
    files = ['trips.txt']

    print "saving file"

    gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file)
    shutil.move('output.zip', gtfs_file)
    util.delete_temp_files(files)
コード例 #3
0
def main(gtfs_file, input_json_file):
    """ load gtfs_file and instructions from JSON"""

    with open(input_json_file) as jsonfile:
        input_json = json.load(jsonfile)

    gtfs_feed = mzgtfs.feed.Feed(filename=gtfs_file)

    for fare_id, rules_attributes in input_json.iteritems():
        add_fare_id(gtfs_feed, fare_id, rules_attributes)

    files = ['fare_attributes.txt', 'fare_rules.txt']
    gtfs_feed.write('fare_attributes.txt', gtfs_feed.fares())
    gtfs_feed.write('fare_rules.txt', gtfs_feed.fare_rules())

    gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file)
    shutil.move('output.zip', gtfs_file)

    util.delete_temp_files(files)
コード例 #4
0
def main(gtfs_file, input_json_file):
    """ load gtfs_file and instructions from JSON"""
    gtfs_feed = mzgtfs.feed.Feed(filename=gtfs_file)
    gtfs_feed.preload()

    with open(input_json_file) as jsonfile:
        input_json = json.load(jsonfile)

    for new_route in input_json['new_routes']:
        add_route(gtfs_feed, new_route)

    for route in input_json['routes']:
        split_route(gtfs_feed, route)

    files = ["routes.txt", "trips.txt"]

    gtfs_feed.write('routes.txt', gtfs_feed.routes())
    gtfs_feed.write('trips.txt', gtfs_feed.trips())
    gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file)
    shutil.move('output.zip', gtfs_file)
    util.delete_temp_files(files)
コード例 #5
0
    def test_write_out(self):
        add_fares.add_attribute(self.f, '1', self.attribute)
        add_fares.add_rule_to_route(self.f, '1', 'R1', {})

        files = ['fare_attributes.txt', 'fare_rules.txt']

        self.f.write('fare_attributes.txt', self.f.fares())
        self.f.write('fare_rules.txt', self.f.fare_rules())

        with open('fare_attributes.txt', 'rb') as o:
            reader = csv.DictReader(o)
            for row in reader:
                assert row['price'] == '0.95'
                assert row['currency_type'] == 'EUR'

        with open('fare_rules.txt', 'rb') as o:
            reader = csv.DictReader(o)
            for row in reader:
                assert row['fare_id'] == '1'
                assert row['route_id'] == 'R1'

        util.delete_temp_files(files)
コード例 #6
0
def main(gtfs_file):
    gtfs_feed = mzgtfs.feed.Feed(filename=gtfs_file)

    make_accessible_stops(gtfs_feed)

    make_accessible_trips(gtfs_feed)

    files = ['stops.txt', 'trips.txt']

    cols = [
        'stop_id', 'stop_lat', 'stop_lon', 'stop_name', 'wheelchair_boarding'
    ]
    gtfs_feed.write('stops.txt', gtfs_feed.stops(), columns=cols)

    cols = [
        'route_id', 'trip_id', 'service_id', 'direction_id', 'trip_headsign',
        'shape_id', 'wheelchair_accessible'
    ]
    gtfs_feed.write('trips.txt', gtfs_feed.trips(), columns=cols)

    gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file)
    shutil.move('output.zip', gtfs_file)

    util.delete_temp_files(files)
コード例 #7
0
ファイル: add_accessible.py プロジェクト: laidig/mzgtfs-tools
def main(gtfs_file, input_json_file):
    """ load gtfs_file and instructions from JSON"""
    gtfs_feed = mzgtfs.feed.Feed(filename=gtfs_file)
    feed_stops = gtfs_feed.stops()

    with open(input_json_file, 'rb') as jsonfile:
        accessible_stops = json.load(jsonfile)
        accessible_stop_count = len(accessible_stops)

    added_accessible_stop_count = 0
    added_accessible_parent_count = 0

    parent_to_child_stations = defaultdict(set)

    for stop in feed_stops:
        if stop.id() in accessible_stops:
            stop.set('wheelchair_boarding', '1')
            added_accessible_stop_count += 1

        parent = stop.get('parent_station')
        if (parent and parent.strip()):
            parent_to_child_stations[parent].add(stop.id())

    # if the stop is a parent station and all substations are accessible, parent station is accessible, too
    for parent, children in parent_to_child_stations.iteritems():
        child_count = len(children)
        accessible_count = 0
        for c in children:
            stop = gtfs_feed.stop(c)
            if stop.get('wheelchair_boarding') == '1':
                accessible_count += 1

        if child_count is accessible_count:
            gtfs_feed.stop(parent).set('wheelchair_boarding', '1')

    make_accessible_trips(gtfs_feed)

    files = ['stops.txt', 'trips.txt']

    # nicely ordered output columns
    cols = [
        'stop_id', 'stop_lat', 'stop_lon', 'stop_name', 'wheelchair_boarding'
    ]

    # if there are any additional columns in the source data, add them here
    all_columns = {column for s in feed_stops for column in s.keys()}
    for c in all_columns:
        if c not in cols:
            cols.append(c)

    gtfs_feed.write('stops.txt', gtfs_feed.stops(), columns=cols)

    print "added accessibility to {} stops from an input list of {}".format(
        added_accessible_stop_count, accessible_stop_count)
    if added_accessible_parent_count > 0:
        print "and {} parent stations made accessible".format(
            added_accessible_parent_count)

    cols = [
        'route_id', 'trip_id', 'service_id', 'direction_id', 'trip_headsign',
        'shape_id', 'wheelchair_accessible'
    ]

    gtfs_feed.write('trips.txt', gtfs_feed.trips(), columns=cols)

    gtfs_feed.make_zip('output.zip', files=files, clone=gtfs_file)
    shutil.move('output.zip', gtfs_file)

    util.delete_temp_files(files)