def get_lots(city): if city == "favicon.ico" or city == "robots.txt": abort(404) app.logger.info("GET /" + city + " - " + user_agent(request)) city_module = env.supported_cities().get(city, None) if city_module is None: app.logger.info("Unsupported city: " + city) return ("Error 404: Sorry, '" + city + "' isn't supported at the current time.", 404) if env.LIVE_SCRAPE: return jsonify(scraper._live(city_module)) try: with db.cursor() as cursor: sql = "SELECT timestamp_updated, timestamp_downloaded, data" \ " FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;" cursor.execute(sql, (city, )) data = cursor.fetchall()[0]["data"] except (psycopg2.OperationalError, psycopg2.ProgrammingError) as e: app.logger.error("Unable to connect to database: " + str(e)) abort(500) return jsonify(data)
def get_most_lots_from_known_data(city, lot_name): """ Get the total value from the highest known value in the last saved JSON. This is useful for cities that don't publish total number of spaces for a parking lot. Caveats: - Returns 0 if not found. - If a lot name exists twice only the last value is returned. :param city: :param lot_name: :return: """ global LOT_COUNTS_PER_CITY # FIXME ugly work around, this should be really fixed in a different way lot_counts = LOT_COUNTS_PER_CITY.get(city, {}) if lot_counts == {}: with db.cursor() as cursor: sql = """ SELECT data FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 600; """ cursor.execute(sql, (city,)) all_data = cursor.fetchall() for json_data in all_data: lots = json_data[0]["lots"] for lot in lots: highest_count = lot_counts.get(lot_name, 0) count = int(lot["free"]) if count > highest_count: lot_counts[lot_name] = count LOT_COUNTS_PER_CITY[city] = lot_counts return lot_counts.get(lot_name, 0)
def known_timespan_data(city, lot_id, date_from, date_to, version): if version == 1: return {} elif version == "1.1": with db.cursor() as cur: sql = '''SELECT timestamp_downloaded, data \ FROM parkapi \ WHERE timestamp_downloaded > %s AND timestamp_downloaded < %s AND city = %s''' cur.execute(sql, ( date_from, date_to, city, )) data = [] for row in cur.fetchall(): for lot in row['data']['lots']: if lot['id'] == lot_id: data.append({ "timestamp": row["timestamp_downloaded"].strftime( "%Y-%m-%dT%H:%M:%S"), "free": lot["free"] }) return data
def get_lots(city): if city == "favicon.ico" or city == "robots.txt": abort(404) app.logger.info("GET /" + city + " - " + user_agent(request)) city_module = env.supported_cities().get(city, None) if city_module is None: app.logger.info("Unsupported city: " + city) return ("Error 404: Sorry, '" + city + "' isn't supported at the current time.", 404) if env.LIVE_SCRAPE: return jsonify(scraper._live(city_module)) try: with db.cursor() as cursor: sql = "SELECT timestamp_updated, timestamp_downloaded, data" \ " FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;" cursor.execute(sql, (city,)) data = cursor.fetchall()[0]["data"] except (psycopg2.OperationalError, psycopg2.ProgrammingError) as e: app.logger.error("Unable to connect to database: " + str(e)) abort(500) return jsonify(data)
def main(args): if args.month and args.week: print("Month and Week cannot be specified together.") exit(1) query = create_query(args.city, args.year, args.month, args.week) db.setup() with db.cursor() as cursor: get_data(cursor, query)
def update_cache(city): global cache with db.cursor() as cursor: if city in cache: sql = "SELECT timestamp_downloaded FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;" cursor.execute(sql, (city, )) ts = cursor.fetchall()[0]["timestamp_downloaded"] if cache[city][0] == ts: return sql = "SELECT timestamp_updated, timestamp_downloaded, data" \ " FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;" cursor.execute(sql, (city, )) raw = cursor.fetchall()[0] data = raw["data"] cache[city] = (raw["timestamp_downloaded"], jsonify(data))
def init_static(): global cache global static for city in env.supported_cities().keys(): try: static[city] = {} with db.cursor() as cursor: sql = "SELECT data" \ " FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;" cursor.execute(sql, (city, )) raw = cursor.fetchall()[0] data = raw["data"] for lot in data["lots"]: static[city][lot["id"]] = {"total": lot["total"]} except IndexError: app.logger.warning("Failed to get static data for " + city)
def scrape_city(module): city = module.geodata.city data = add_metadata(module.parse_html(get_html(city))) with db.cursor(commit=True) as cursor: save_data_to_db(cursor, data, city.id)