def run(self): # Update datastore. self.log("===== Feed: %s =====" % self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip' % feed.onestop()) self.log("Opening: %s" % filename) gtfs_feed = mzgtfs.feed.Feed(filename) self.log("Creating Onestop Entities") gtfs_feed.preload() feed.load_gtfs(gtfs_feed, populate=False) if not feed.operators(): self.log( "No matching operators specified in the feed registry entry. Nothing to do." ) return # Precalculate all Onestop IDs for o in feed.operators(): o._cache_onestop() # Compare against datastore entities and merge if possible. for stop in feed.stops(): self._merge_stop(stop) # Upload changeset. self.log("Updating feed: %s" % feed.onestop()) # Create empty changeset changeset = self.datastore.postjson('/api/v1/changesets', {"changeset": { "payload": {} }}) changeset_id = changeset['id'] self.log("Changeset ID: %s" % changeset_id) # Append each entity self._append_batch(feed.operators(), changeset_id, change_entity) self._append_batch(feed.routes(), changeset_id, change_entity) self._append_batch(feed.stops(), changeset_id, change_entity) if self.schedule_stop_pairs: self._append_batch(make_ssp(gtfs_feed), changeset_id, change_ssp) # Apply changeset self.log("Applying changeset...") self.datastore.postjson('/api/v1/changesets/%s/apply' % changeset_id) self.log(" -> ok") self.log("Finished!")
def run(self): # Update datastore. self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) self.log("Opening: %s"%filename) gtfs_feed = mzgtfs.feed.Feed(filename) self.log("Creating Onestop Entities") gtfs_feed.preload() feed.load_gtfs(gtfs_feed, populate=False) if not feed.operators(): self.log("No matching operators specified in the feed registry entry. Nothing to do.") return # Precalculate all Onestop IDs for o in feed.operators(): o._cache_onestop() # Compare against datastore entities and merge if possible. for stop in feed.stops(): self._merge_stop(stop) # Upload changeset. self.log("Updating feed: %s"%feed.onestop()) # Create empty changeset changeset = self.datastore.postjson('/api/v1/changesets', {"changeset": {"payload": {}}} ) changeset_id = changeset['id'] self.log("Changeset ID: %s"%changeset_id) # Append each entity self._append_batch(feed.operators(), changeset_id, change_entity) self._append_batch(feed.routes(), changeset_id, change_entity) self._append_batch(feed.stops(), changeset_id, change_entity) if self.schedule_stop_pairs: self._append_batch(make_ssp(gtfs_feed), changeset_id, change_ssp) # Apply changeset self.log("Applying changeset...") self.datastore.postjson('/api/v1/changesets/%s/apply'%changeset_id) self.log(" -> ok") self.log("Finished!")
def run(self): # Create GTFS Artifacts self.log("===== Feed: %s =====" % self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip' % feed.onestop()) self.log("Opening: %s" % filename) gtfsfeed = mzgtfs.feed.Feed(filename) for stop in gtfsfeed.stops(): identifier = stop.feedid(self.feedid) self.log("Looking for identifier: %s" % identifier) found = self.datastore.stops(identifier=identifier) if not found: self.log(" No identifier found!") stop.set('onestop_id', None) stop.set('osm_way_id', None) continue match = sorted(found, key=lambda x: x.data['updated_at'])[0] onestop_id = match.onestop() osm_way_id = match.tag('osm_way_id') self.log(" onestop_id: %s, osm_way_id: %s" % (onestop_id, osm_way_id)) stop.set('onestop_id', onestop_id) stop.set('osm_way_id', osm_way_id) # Write output stopstxt = os.path.join(self.workdir, 'stops.txt') artifact = os.path.join(self.workdir, '%s.artifact.zip' % feed.onestop()) if os.path.exists(stopstxt): os.unlink(stopstxt) if os.path.exists(artifact): os.unlink(artifact) # self.log("Creating output artifact: %s" % artifact) gtfsfeed.write(stopstxt, gtfsfeed.stops(), sortkey='stop_id') gtfsfeed.make_zip(artifact, files=[stopstxt], clone=filename) # if os.path.exists(stopstxt): os.unlink(stopstxt) self.log("Finished!")
def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) self.log("Finished!")
def run(self): # Create GTFS Artifacts self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) self.log("Opening: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) for stop in gtfsfeed.stops(): identifier = stop.feedid(self.feedid) self.log("Looking for identifier: %s"%identifier) found = self.datastore.stops(identifier=identifier) if not found: self.log(" No identifier found!") stop.set('onestop_id', None) stop.set('osm_way_id', None) continue match = sorted(found, key=lambda x:x.data['updated_at'])[0] onestop_id = match.onestop() osm_way_id = match.tag('osm_way_id') self.log(" onestop_id: %s, osm_way_id: %s"%(onestop_id, osm_way_id)) stop.set('onestop_id', onestop_id) stop.set('osm_way_id', osm_way_id) # Write output stopstxt = os.path.join(self.workdir, 'stops.txt') artifact = os.path.join(self.workdir, '%s.artifact.zip'%feed.onestop()) if os.path.exists(stopstxt): os.unlink(stopstxt) if os.path.exists(artifact): os.unlink(artifact) # self.log("Creating output artifact: %s"%artifact) gtfsfeed.write(stopstxt, gtfsfeed.stops(), sortkey='stop_id') gtfsfeed.make_zip(artifact, files=[stopstxt], clone=filename) # if os.path.exists(stopstxt): os.unlink(stopstxt) self.log("Finished!")
def run(self): # Validate feeds self.log("===== Feed: %s =====" % self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip' % feed.onestop()) report = os.path.join(self.workdir, '%s.html' % feed.onestop()) self.log("Validating: %s" % filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s" % (e.source, e.message)) self.log("Finished!")
# Everything is now ready to create the feed. print "Loading feed:", filename f = mzgtfs.feed.Feed(filename, debug=args.debug) # Create Transitland Feed from GTFS. kw = {} kw['debug'] = args.debug kw['url'] = args.url if args.feedname: kw['feedname'] = args.feedname feed = entities.Feed(**kw) feed.bootstrap_gtfs(f) # Print basic feed information. print "Feed:", feed.onestop() print " Stops:", len(feed.stops()) print " Routes:", len(feed.routes()) print " Operators:", len(feed.operators()) # Print basic operator information. for operator in feed.operators(): print " Operator:", operator.name() print " Routes:", len(operator.routes()) print " Stops:", len(operator.stops()) # Write out updated feed. output = args.output or '%s.json'%feed.onestop() data = feed.json() if args.printjson: util.json_pretty_print(data) if os.path.exists(output):
# Everything is now ready to create the feed. print "Loading feed:", filename f = mzgtfs.feed.Feed(filename, debug=args.debug) # Create Transitland Feed from GTFS. kw = {} kw['debug'] = args.debug kw['url'] = args.url if args.feedname: kw['feedname'] = args.feedname feed = entities.Feed(**kw) feed.bootstrap_gtfs(f) # Print basic feed information. print "Feed:", feed.onestop() print " Stops:", len(feed.stops()) print " Routes:", len(feed.routes()) print " Operators:", len(feed.operators()) # Print basic operator information. for operator in feed.operators(): print " Operator:", operator.name() print " Routes:", len(operator.routes()) print " Stops:", len(operator.stops()) # Write out updated feed. output = args.output or '%s.json' % feed.onestop() data = feed.json() if args.printjson: util.json_pretty_print(data) if os.path.exists(output):