def parse(): print("Downloading zones data...") data = utils.download_string(URL) soup = BeautifulSoup(data, "lxml") tables = soup.findAll("table") tables = tables[:-2] for table in tables: rows = table.findAll("tr") for row in rows[1:]: cols = row.findAll("td") code = utils.trim_strings(cols[0].text) code = code.rsplit(" ", 1)[0] name = utils.trim_strings(cols[1].text) name = name[name.startswith("The ") and len("The "):] zone = {"code": code, "building": name, "campus": get_campus(code)} utils.update_zone(zone)
def parse_custom_zones(): for customZone in CUSTOM_ZONES: zone = {"code": customZone, "building": CUSTOM_ZONES[customZone], "campus": get_campus(customZone)} utils.update_zone(zone)