def get_gtfs_rt_data_feed( self, params: Optional[GTFSRTParams] = None ) -> Union[FeedMessage, APIError]: """ Returns a FeedMessage of vehicles currently providing Automatic Vehicle Locations in BODS. Args: bounding_box: Limit vehicles to those within the BoundingBox. route_id: Limit vehicles to those with this route id. start_time_after: Limit vehicles to those with start time after this datetime. start_time_before: Limit vehicles to with the start time before this datetime. """ if params is None: params = GTFSRTParams() params = json.loads(params.json(by_alias=True, exclude_none=True)) response = self._make_request(self.gtfs_rt_endpoint, params=params) if response.status_code == HTTPStatus.OK: message = FeedMessage() message.ParseFromString(response.content) return message return APIError(status_code=response.status_code, reason=response.content)
def get_gtfs_rt_from_archive(self) -> Union[FeedMessage, APIError]: """ Returns a FeedMessage of vehicles currently providing Automatic Vehicle Locations bulk download URL in BODS. """ response = self._make_request(self.gtfs_rt_zip_endpoint) if response.status_code == HTTPStatus.OK: with zipfile.ZipFile(io.BytesIO(response.content)) as zf: with zf.open("gtfsrt.bin") as f: message = FeedMessage() message.ParseFromString(f.read()) return message return APIError(status_code=response.status_code, reason=response.content)
def merge_feeds(self) -> None: """ Parses the feed into the smallest possible representation of the necessary realtime data. """ full_feed = FeedMessage() for fh in self.feed_handlers: try: full_feed.MergeFrom(fh.latest_feed) except (ValueError, TypeError): try: full_feed.MergeFrom(fh.prev_feed) except (ValueError, TypeError) as err: u.log.error("Could not merge feed %s. Error: %s", fh.id_, err) self.feed = full_feed
def restore_feed_from_redis(self) -> None: _raw = self.redis_server.hget("realtime:feeds", self.id_) if not _raw: return _feed = FeedMessage() try: _feed.ParseFromString(_raw) self.latest_feed = _feed self.latest_timestamp = _feed.header.timestamp except (DecodeError, SystemError, RuntimeWarning) as err: u.log.error( "%s: unable to parse feed %s restored from redis", err, self.id_, )
async def fetch( self, thread_pool_excecutor: concurrent.futures.ThreadPoolExecutor, attempt: int = 0, ) -> None: """ Fetches url, updates class attributes with feed info. """ try: headers = {"x-api-key": u.MTA_API_KEY} realtime_timeout = aiohttp.ClientTimeout(total=u.REALTIME_TIMEOUT) async with aiohttp.ClientSession( timeout=realtime_timeout) as session: async with session.get(self.url, headers=headers) as response: _raw = await response.read() feed_message = FeedMessage() loop = asyncio.get_event_loop() await loop.run_in_executor( thread_pool_excecutor, feed_message.ParseFromString, _raw, ) timestamp: int = feed_message.header.timestamp if timestamp >= self.latest_timestamp + TIME_DIFF_THRESHOLD: self.result = FetchResult(NEW_FEED, timestamp=timestamp) ( self.prev_feed, self.latest_feed, self.latest_timestamp, ) = ( self.latest_feed, feed_message, timestamp, ) self.redis_server.hset("realtime:feeds", self.id_, _raw) else: self.result = FetchResult(OLD_FEED) return except OSError as err: self.result = FetchResult(FETCH_FAILED, error=err) except (DecodeError, SystemError) as err: self.result = FetchResult(DECODE_FAILED, error=err) except RuntimeWarning as err: self.result = FetchResult(RUNTIME_WARNING, error=err) except asyncio.TimeoutError as err: self.result = FetchResult(FETCH_FAILED, error=f"TIMEOUT of {err}") if attempt + 1 < u.REALTIME_MAX_ATTEMPTS: u.log.debug("parser: Fetch failed for %s, trying again", self.id_) await self.fetch( attempt=attempt + 1, thread_pool_excecutor=thread_pool_excecutor, )
async def get_feed(self): feed = await self.cache.get('feed') if feed is None: async with aiohttp.ClientSession( ) as session: # type: aiohttp.ClientSession async with session.get( self.feed_url ) as response: # type: aiohttp.ClientResponse contents = await response.read() feed: FeedMessage = FeedMessage() feed.ParseFromString(contents) await self.cache.set('feed', feed, ttl=30) return feed
def bus_position(request): with open(FEED_FILE, 'rb') as feed_file: feed_message = FeedMessage.FromString(feed_file.read()) latest = 0 coords = None, None for msg in feed_message.entity: if msg.vehicle.timestamp > latest: latest = msg.vehicle.timestamp coords = msg.vehicle.position.latitude, msg.vehicle.position.longitude response = HttpResponse(','.join([str(x) for x in coords]), content_type='text/plain') response['Access-Control-Allow-Origin'] = '*' return response
def test_extract_delays(): for f in [FeedMessage(), feed]: delays = extract_delays(f) # Should be a data frame assert isinstance(delays, pd.DataFrame) if f.header.timestamp: # Should have the correct columns expect_cols = [ "route_id", "trip_id", "stop_id", "stop_sequence", "arrival_delay", "departure_delay", ] assert set(delays.columns) == set(expect_cols) else: # Should be empty assert delays.empty
def test_get_timestamp_str(): for f in [FeedMessage(), feed]: assert isinstance(get_timestamp_str(f), str)
def transit_update(request): data = json.loads(request.body.decode('utf-8')) try: with open(FEED_FILE, 'rb') as feed_file: feed_message = FeedMessage.FromString(feed_file.read()) except IOError: feed_header = FeedHeader( gtfs_realtime_version='1.0', incrementality='FULL_DATASET', timestamp=int(time.time()), ) feed_message = FeedMessage( header=feed_header, entity=[], ) message_hash = hashlib.sha256() for key in sorted(data.keys()): message_hash.update(str(data[key])) position = Position( latitude=float(data['px']), longitude=float(data['py']), ) trip_descriptor = TripDescriptor( trip_id=data['tripid'], ) vehicle_position = VehiclePosition( trip=trip_descriptor, position=position, timestamp=int(data['ts']) // 1000, ) feed_entity = FeedEntity( id=message_hash.hexdigest(), vehicle=vehicle_position, ) feed_message.header.timestamp = int(time.time()) feed_message.entity.extend([feed_entity]) to_remove = [] old_enough = datetime.now() - timedelta(minutes=30) for item in feed_message.entity: try: when = datetime.fromtimestamp(item.vehicle.timestamp) except ValueError: when = datetime.fromtimestamp(item.vehicle.timestamp // 1000) if when < old_enough: to_remove.append(item) for item in to_remove: feed_message.entity.remove(item) with open(FEED_FILE, 'wb') as feed_file: feed_file.write(feed_message.SerializeToString()) return HttpResponse(feed_message.SerializeToString(), content_type='application/octet-stream')