def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.API_URL, timeout=self.TIMEOUT) response.raise_for_status() data = response.json() counts = { "Parking Deck": { "spaces": 0, "capacity": 0 }, "Gay St Lot": { "spaces": 0, "capacity": 0 }, } for row in data: if row["StallType"] is not None: if 100 <= row["StallNumber"] < 200: counts["Parking Deck"]["capacity"] += 1 if not row["Occupied"]: counts["Parking Deck"]["spaces"] += 1 elif row["StallNumber"] < 100: counts["Gay St Lot"]["capacity"] += 1 if not row["Occupied"]: counts["Gay St Lot"]["spaces"] += 1 for lot, details in counts.items(): yield LotSpaces(lot=lot, spaces=details["spaces"], capacity=details["capacity"])
def fetch_spaces(self) -> Iterator[LotSpaces]: for garage_id in self.GARAGE_IDS: metadata_response = requests.get( self.METADATA_API_URL.format(garage_id), headers={"Authorization": f"Bearer {self.API_KEY}"}, timeout=self.TIMEOUT, ) metadata_response.raise_for_status() metadata_data = metadata_response.json() occupancy_response = requests.get( self.OCCUPANCY_API_URL.format(garage_id), headers={"Authorization": f"Bearer {self.API_KEY}"}, timeout=self.TIMEOUT, ) occupancy_response.raise_for_status() occupancy_data = occupancy_response.json() occupancy_rows = occupancy_data["value"] assert len(occupancy_rows) <= 1 total_occupancy = next( (row for row in occupancy_rows if row["group"] == "Total"), None) occupancy = total_occupancy[ "value"] if total_occupancy is not None else 0 lot = self.NAME_PREFIX_PATTERN.sub("", metadata_data["name"]) yield LotSpaces( lot=lot, spaces=metadata_data["spaces"] - occupancy, capacity=metadata_data["spaces"], id=str(metadata_data["id"]), address=metadata_data["address1"], )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.HTML_URL, timeout=self.TIMEOUT) response.raise_for_status() doc = lxml.html.fromstring(response.content) scripts = doc.xpath('//script[contains(text(), "var map")]') assert len(scripts) == 1 script = scripts[0] names = dict(self.NAME_PATTERN.findall(script.text)) spaces = dict(self.SPACES_PATTERN.findall(script.text)) capacities = dict(self.CAPACITY_PATTERN.findall(script.text)) addresses = dict(self.ADDRESS_PATTERN.findall(script.text)) assert names.keys() == spaces.keys() == capacities.keys( ) == addresses.keys() seen = set() for key in names.keys(): if names[key] in seen: continue seen.add(names[key]) yield LotSpaces( lot=names[key], spaces=int(spaces[key]), capacity=int(capacities[key]), address=addresses[key], )
def fetch_spaces(self) -> Iterator[LotSpaces]: geo_response = requests.get( self.GEODATA_API_URL.format(self.LOCATION_ID), headers={"Authorization": f"Bearer {self.API_KEY}"}, timeout=self.TIMEOUT, ) geo_response.raise_for_status() geo_data = geo_response.json() occupancy_response = requests.get( self.OCCUPANCY_API_URL.format(self.LOCATION_ID), headers={"Authorization": f"Bearer {self.API_KEY}"}, timeout=self.TIMEOUT, ) occupancy_response.raise_for_status() occupancy_data = occupancy_response.json() for lot in self.LOT_NAMES: geo_row = next(row for row in geo_data["value"]["areas"] if row["areaId"] == lot) occupancy_row = next(row for row in occupancy_data["value"] if row["group"] == lot) yield LotSpaces( lot=lot, spaces=geo_row["spaces"] - occupancy_row["value"], capacity=geo_row["spaces"], )
def fetch_spaces(self) -> Iterator[LotSpaces]: for api_url in self.API_URLS: response = requests.get(api_url, timeout=self.TIMEOUT) response.raise_for_status() data = response.json() for deck in data["decks"]: yield LotSpaces( lot=deck["name"], spaces=int(deck["available"]), )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.API_URL, timeout=self.TIMEOUT) response.raise_for_status() data = response.json() for row in data: yield LotSpaces( lot=row["name"], spaces=row["vacant_stalls"], id=str(row["id"]), url=row["url"], )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.API_URL, timeout=self.TIMEOUT) response.raise_for_status() data = response.json() for _, lots in data.items(): if not isinstance(lots, dict): continue for lot, attributes in lots.items(): yield LotSpaces( lot=self.SPACES_PATTERN.sub(" ", lot), spaces=attributes["vacancies"], )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.API_URL, timeout=self.TIMEOUT) response.raise_for_status() data = response.json() for row in data: facility = row["facilitynumber"] spaces = int(row["space_count"]) capacity = int(row["total_spaces"]) yield LotSpaces( lot=self.GARAGE_NAMES.get(facility, facility), spaces=int(spaces), capacity=int(capacity), )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.API_URL, timeout=self.TIMEOUT) response.raise_for_status() data = response.json() for row in data: if row["name"] in self.SKIP_LOTS: continue yield LotSpaces( lot=row["name"], spaces=int(row["SubscriberCapacity"]) - int(row["SubscriberCount"]), capacity=int(row["SubscriberCapacity"]), id=str(row["LotId"]), )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.API_URL, timeout=self.TIMEOUT) response.raise_for_status() match = self.JSON_PATTERN.search(response.content) assert match is not None data = json.loads(match.groups()[0]) for result in data["results"]: for row in result.values(): yield LotSpaces( lot=row["location_name"], spaces=int(row["free_spaces"]), capacity=int(row["total_spaces"]), )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.API_URL, timeout=self.TIMEOUT) response.raise_for_status() data = response.json() for row in data: # This lot shows a capacity of 9999 and doesn't appear on the website if row["CarparkName"] == "Lot 9-3 Base": continue yield LotSpaces( lot=row["CarparkName"], spaces=row["Capacity"] - row["CurrentLevel"], capacity=row["Capacity"], id=str(row["CarparkNo"]), )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.HTML_URL, timeout=self.TIMEOUT) response.raise_for_status() doc = lxml.html.fromstring(response.content) for td in doc.xpath('//td[contains(@class, "garage")]'): lot = td.xpath('./span[contains(@class, "small")]/text()')[0] spaces = int( td.xpath('../td[contains(@class, "count")]/span/text()')[0]) url = td.xpath("./a/@href")[0] yield LotSpaces( lot=lot, spaces=spaces, url=url, )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get(self.HTML_URL, timeout=self.TIMEOUT) response.raise_for_status() doc = lxml.html.fromstring(response.content) sections = doc.xpath('//div[contains(@class, "parking-spaces")]' '//div[contains(@class, "section")]') for section in sections: section_title = section.xpath(".//h3/text()")[0] rows = section.xpath('.//div[contains(@class, "row")]') for row in rows: location_title = row.xpath( './/*[contains(@class, "location-title")]/text()')[0] spaces = ( row.xpath('.//div[contains(@class, "spaces-available")]') [0].text_content().strip()) lot = f"{section_title}: {location_title}" yield LotSpaces( lot=lot, spaces=int(spaces) if "full" not in spaces.lower() else 0)
def fetch_spaces(self) -> Iterator[LotSpaces]: for lot_id, lot_name in self.LOTS.items(): url = f"{self.HTML_URL}?lotname={lot_id}" response = requests.get(url, timeout=self.TIMEOUT) response.raise_for_status() doc = lxml.html.fromstring(response.content) digits = [ self.image_to_digit(image) for image in doc.xpath( "//div[@id='divAvailableSpaces']/img/@src") ] while digits and digits[0] == "b": digits = digits[1:] assert len(digits) > 0 spaces = int("".join(digits)) yield LotSpaces( lot=lot_name, spaces=spaces, )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.get( self.HTML_URL, headers={"User-Agent": "open-parking-spaces"}, timeout=self.TIMEOUT, ) response.raise_for_status() doc = lxml.html.fromstring(response.content) links = doc.xpath( '//div[contains(@class, "tickr")]//a[contains(@class, "tickrlink")]' ) for link in links: match = self.SPACES_PATTERN.search(link.text_content()) assert match is not None lot, percent, spaces = match.groups() yield LotSpaces( lot=lot, spaces=int(spaces), url=link.attrib["href"], )
def fetch_spaces(self) -> Iterator[LotSpaces]: response = requests.post( self.API_URL, timeout=self.TIMEOUT, json={ "OrganizationId": "86999b40-7810-4174-b1d0-42a2bb55f165", "IncludeLevels": True, }, ) response.raise_for_status() data = response.json() for row in data["PublicParkingLocations"]: levels = row["LocationLevels"] assert len(levels) == 1 level = levels[0] yield LotSpaces( lot=row["Name"], spaces=level["VacantBays"], capacity=level["TotalBays"], id=row["Id"], )
def fetch_spaces(self) -> Iterator[LotSpaces]: html_response = requests.get(self.HTML_URL, timeout=self.TIMEOUT) html_response.raise_for_status() doc = lxml.html.fromstring(html_response.content) rows = doc.xpath( '//div[@id="parking-widget"]//a[contains(@class, "widget-row")]' ) id_to_name = {} for row in rows: lot_name = row.xpath('.//div[contains(@class, "left-col")]/text()')[ 0 ].strip() lot_id = row.xpath('.//div[contains(@class, "count")]/@data-parking-id')[0] id_to_name[lot_id] = lot_name json_response = requests.get(self.JSON_URL, timeout=self.TIMEOUT) json_response.raise_for_status() json_data = json_response.json() for lot_id, spaces in json_data.items(): yield LotSpaces( lot=id_to_name[lot_id], spaces=spaces, id=lot_id, )