def test_read_missing(self): f = feed.Feed( util.example_feed(), path=os.path.dirname(util.example_feed()) ) with self.assertRaises(Exception): f.read('missing')
def test_make_zip(self): f = feed.Feed() outfile = test_outfile() f.make_zip( outfile, path=os.path.dirname(util.example_feed()), clone=util.example_feed() ) expect = [ 'agency.txt', 'calendar.txt', 'calendar_dates.txt', 'fare_attributes.txt', 'fare_rules.txt', 'frequencies.txt', 'routes.txt', 'shapes.txt', 'stop_times.txt', 'trips.txt', 'stops.txt' ] zf = zipfile.ZipFile(outfile) for i,j in zip(sorted(zf.namelist()), sorted(expect)): assert i == j zf.close() os.unlink(outfile)
def test_read_path(self): # Test overlay f = feed.Feed( util.example_feed(), path=os.path.dirname(util.example_feed()) ) assert f.stop('TEST') with self.assertRaises(Exception): f.stop('FUR_CREEK_RES')
def test_make_zip_exists(self): f = feed.Feed() outfile = test_outfile() f.make_zip( outfile, path=os.path.dirname(util.example_feed()), clone=util.example_feed() ) with self.assertRaises(IOError): f.make_zip( outfile, path=os.path.dirname(util.example_feed()), clone=util.example_feed() ) os.unlink(outfile)
def test_read_padding(self): # The Google GTFS example feed is missing columns in # stop_times.txt. Check the padding mechanism works. f = feed.Feed(util.example_feed()) data = f.read('stop_times') # Check that all 9 elements are present. for entity in f.read('stop_times'): assert len(entity) == 9
def test_read(self): # Test basic read f = feed.Feed(util.example_feed()) data = f.read('stops') # check we got 9 entities assert len(data) == 9 # check cache assert 'stops' in f.by_id
def test_download(self): # Download the file, then download again to verify cache. f = tempfile.NamedTemporaryFile(delete=False) f.close() entity = util.example_feed() entity.data['url'] = self.url entity.download(f.name, cache=False) assert util.sha1file(f.name) == self.sha1_gtfs
def test_json(self): # Check result looks like self.expect. entity = util.example_feed() data = entity.json() for k in ('onestopId','url','feedFormat'): assert data[k] == self.expect[k] assert len(data['operatorsInFeed']) == 1 assert data['operatorsInFeed'][0]['onestopId'] == 'o-9qs-demotransitauthority'
def test_download_badsha1(self): f = tempfile.NamedTemporaryFile(delete=False) f.write('asdf') f.close() assert util.sha1file(f.name) != self.sha1_gtfs entity = util.example_feed() entity.data['url'] = self.url entity.download(f.name, verify=True, sha1=self.sha1_gtfs) assert util.sha1file(f.name) == self.sha1_gtfs
def test_bootstrap_gtfs(self): entity = util.example_feed() self._sanity(entity) # Check operators... assert len(entity.operators()) == 1 o = list(entity.operators())[0] assert o.onestop() == 'o-9qs-demotransitauthority' assert len(o.routes()) == 5 assert len(o.stops()) == 9
def test_cache(self): f = feed.Feed(util.example_feed()) # Read a first time data1 = f.read('stops') # Read a second time data2 = f.read('stops') assert len(data1) == len(data2) assert 'stops' in f.by_id assert len(data1) == len(f.by_id['stops'])
def test_make_zip_compression(self): f = feed.Feed() outfile = test_outfile() f.make_zip( outfile, path=os.path.dirname(util.example_feed()), clone=util.example_feed() ,compress=False ) outfile2 = test_outfile() f.make_zip( outfile2, path=os.path.dirname(util.example_feed()), clone=util.example_feed() ) assert os.stat(outfile).st_size > os.stat(outfile2).st_size os.unlink(outfile) os.unlink(outfile2)
def test_json(self): entity = util.example_feed().stop(self.expect['onestopId']) data = entity.json() for k in ['name','onestopId','type']: assert data[k] == self.expect[k] assert len(data['servedBy']) == 1 assert data['servedBy'][0] == self.expect['servedBy'][0] assert data['geometry'] assert data['geometry']['type'] == 'Point' assert data['geometry']['coordinates'] assert len(data['identifiers']) == 1
def test_json(self): entity = util.example_feed().route(self.expect['onestopId']) data = entity.json() for k in ['name','onestopId','type']: assert data[k] == self.expect[k] assert data['operatedBy'] == self.expect['operatedBy'] assert data['geometry'] assert data['geometry']['type'] == 'MultiLineString' assert data['geometry']['coordinates'] assert len(data['serves']) == 2 assert len(data['identifiers']) == 1
def test_json(self): entity = util.example_feed().stop(self.expect['onestopId']) data = entity.json() for k in ['name', 'onestopId', 'type']: assert data[k] == self.expect[k] assert len(data['servedBy']) == 1 assert data['servedBy'][0] == self.expect['servedBy'][0] assert data['geometry'] assert data['geometry']['type'] == 'Point' assert data['geometry']['coordinates'] assert len(data['identifiers']) == 1
def test_operators(self): entity = util.example_feed().route(self.expect['onestopId']) assert len(entity.operators()) == 1
def test_stop(self): entity = util.example_feed().operator(self.expect['onestopId']) for i in entity.stops(): assert entity.stop(i.onestop()) with self.assertRaises(ValueError): entity.stop('none')
def test_stops(self): entity = util.example_feed().operator(self.expect['onestopId']) assert len(entity.stops()) == 9
def test_bbox(self): entity = util.example_feed().stop(self.expect['onestopId']) expect = [-116.81797, 36.88108, -116.81797, 36.88108] for i,j in zip(entity.bbox(), expect): self.assertAlmostEqual(i,j)
def test_init(self): f = feed.Feed(util.example_feed())
def test_stops(self): f = feed.Feed(util.example_feed()) assert len(f.stops()) == 9
def test_routes(self): f = feed.Feed(util.example_feed()) assert len(f.routes()) == 5
def test_agencies(self): f = feed.Feed(util.example_feed()) data = f.agencies() assert len(data) == 1
def test_read_invalidfile(self): f = feed.Feed(util.example_feed()) with self.assertRaises(KeyError): f.read('invalidfile')
def test_geohash(self): entity = util.example_feed().route(self.expect['onestopId']) assert entity.geohash() == '9qsb'
def test_operator(self): entity = util.example_feed().stop(self.expect['onestopId']) for i in self.expect['servedBy']: assert entity.operator(i)
def test_operators(self): entity = util.example_feed().stop(self.expect['onestopId']) assert len(entity.operators()) == len(self.expect['servedBy'])
def test_bbox(self): entity = util.example_feed().stop(self.expect['onestopId']) expect = [-116.81797, 36.88108, -116.81797, 36.88108] for i, j in zip(entity.bbox(), expect): self.assertAlmostEqual(i, j)
def test_from_json(self): feed = util.example_feed() entity = util.example_feed().operator(self.expect['onestopId']) roundtrip = Operator.from_json(entity.json()) self._sanity(roundtrip)
def test_agency(self): f = feed.Feed(util.example_feed()) data = f.agency(self.agency_expect['agency_id']) for k in self.agency_expect: assert self.agency_expect[k] == data[k]
def test_stops(self): entity = util.example_feed().route(self.expect['onestopId']) stops = entity.stops() assert len(stops) == 2 for i in stops: assert i.onestop() in self.expect['serves']
def test_route(self): f = feed.Feed(util.example_feed()) data = f.route(self.route_expect['route_id']) for k in self.route_expect: assert self.route_expect[k] == data[k]
def test_stop(self): entity = util.example_feed().route(self.expect['onestopId']) for i in self.expect['serves']: assert entity.stop(i)
def test_stop(self): f = feed.Feed(util.example_feed()) data = f.stop(self.stop_expect['stop_id']) for k in self.stop_expect: assert self.stop_expect[k] == data[k]
def test_example_feed(self): expect = util.example_feed() assert os.path.exists(expect)
def test_geohash(self): entity = util.example_feed().operator(self.expect['onestopId']) assert entity.geohash() == '9qs'
def test_operator(self): entity = util.example_feed().route(self.expect['onestopId']) assert entity.operator(self.expect['operatedBy'])
def test_json(self): entity = util.example_feed().operator(self.expect['onestopId']) data = entity.json() for k in ['name','onestopId','type']: assert data[k] == self.expect[k] assert len(data['features']) == 14