Exemplo n.º 1
0
def get_lots(city):
    if city == "favicon.ico" or city == "robots.txt":
        abort(404)

    app.logger.info("GET /" + city + " - " + user_agent(request))

    city_module = env.supported_cities().get(city, None)

    if city_module is None:
        app.logger.info("Unsupported city: " + city)
        return ("Error 404: Sorry, '" + city +
                "' isn't supported at the current time.", 404)

    if env.LIVE_SCRAPE:
        return jsonify(scraper._live(city_module))

    try:
        with db.cursor() as cursor:
            sql = "SELECT timestamp_updated, timestamp_downloaded, data" \
                    " FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;"
            cursor.execute(sql, (city, ))
            data = cursor.fetchall()[0]["data"]
    except (psycopg2.OperationalError, psycopg2.ProgrammingError) as e:
        app.logger.error("Unable to connect to database: " + str(e))
        abort(500)

    return jsonify(data)
Exemplo n.º 2
0
def get_lots(city):
    if city == "favicon.ico" or city == "robots.txt":
        abort(404)

    user_agent = "no user-agent" if request.headers.get("User-Agent") is None else request.headers.get("User-Agent")
    app.logger.info("GET /" + city + " - " + user_agent)

    city_module = env.supported_cities().get(city, None)

    if city_module is None:
        app.logger.info("Unsupported city: " + city)
        return "Error 404: Sorry, '" + city + "' isn't supported at the current time.", 404

    if env.LIVE_SCRAPE:
        return jsonify(scraper._live(city_module))

    try:
        with psycopg2.connect(**env.DATABASE) as conn:
            cursor = conn.cursor()
            cursor.execute("SELECT timestamp_updated, timestamp_downloaded, data FROM parkapi WHERE city=%s;", (city,))
            data = cursor.fetchall()[-1][2]
    except (psycopg2.OperationalError, psycopg2.ProgrammingError) as e:
        app.logger.error("Unable to connect to database: " + str(e))
        abort(500)

    return jsonify(data)
Exemplo n.º 3
0
def get_lots(city):
    if city == "favicon.ico" or city == "robots.txt":
        abort(404)

    app.logger.info("GET /" + city + " - " + user_agent(request))

    city_module = env.supported_cities().get(city, None)

    if city_module is None:
        app.logger.info("Unsupported city: " + city)
        return ("Error 404: Sorry, '" +
                city +
                "' isn't supported at the current time.", 404)

    if env.LIVE_SCRAPE:
        return jsonify(scraper._live(city_module))

    try:
      with db.cursor() as cursor:
          sql = "SELECT timestamp_updated, timestamp_downloaded, data" \
                  " FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;"
          cursor.execute(sql, (city,))
          data = cursor.fetchall()[0]["data"]
    except (psycopg2.OperationalError, psycopg2.ProgrammingError) as e:
        app.logger.error("Unable to connect to database: " + str(e))
        abort(500)

    return jsonify(data)
Exemplo n.º 4
0
 def test_insert(self, mock):
     path = os.path.join(helpers.TEST_ROOT, "fixtures", "dresden.html")
     cities = env.supported_cities()
     module = cities["Dresden"]
     with open(path) as f:
         src = module.geodata.city.source
         mock.get(src, text=f.read())
     scraper.scrape_city(module)
Exemplo n.º 5
0
 def test_insert(self, mock):
     path = os.path.join(helpers.TEST_ROOT, "fixtures", "dresden.html")
     cities = env.supported_cities()
     module = cities["Dresden"]
     with open(path) as f:
         src = module.geodata.city.source
         mock.get(src, text=f.read())
     scraper.scrape_city(module)
Exemplo n.º 6
0
def main():
    """
    Iterate over all cities in ./cities,
    scrape and save their data to the database
    """
    # the catch-all enterprise loop
    db.setup()
    for module in env.supported_cities().values():
        try:
            scrape_city(module)
        except Exception as e:
            print("Failed to scrape '%s': %s" % (module.geodata.city.name, e))
            print(traceback.format_exc())
Exemplo n.º 7
0
def get_meta():
    user_agent = "no user-agent" if request.headers.get("User-Agent") is None else request.headers.get("User-Agent")
    app.logger.info("GET / - " + user_agent)

    cities = {}
    for city_id, city in env.supported_cities().items():
        cities[city.city_name] = city_id

    return jsonify({
        "cities": cities,
        "api_version": env.API_VERSION,
        "server_version": env.SERVER_VERSION,
        "reference": env.SOURCE_REPOSITORY
    })
Exemplo n.º 8
0
def main():
    """
    Iterate over all cities in ./cities,
    scrape and save their data to the database
    """
    # the catch-all enterprise loop
    db.setup()
    for module in env.supported_cities().values():
        try:
            scrape_city(module)
        except Exception as e:
            print("Failed to scrape '%s': %s" %
                  (module.geodata.city.name, e))
            print(traceback.format_exc())
Exemplo n.º 9
0
def init_static():
    global cache
    global static
    for city in env.supported_cities().keys():
        try:
            static[city] = {}
            with db.cursor() as cursor:
                sql = "SELECT data" \
                      " FROM parkapi WHERE city=%s ORDER BY timestamp_downloaded DESC LIMIT 1;"
                cursor.execute(sql, (city, ))
                raw = cursor.fetchall()[0]
                data = raw["data"]
                for lot in data["lots"]:
                    static[city][lot["id"]] = {"total": lot["total"]}
        except IndexError:
            app.logger.warning("Failed to get static data for " + city)
Exemplo n.º 10
0
def main():
    """Iterate over all cities in ./cities, scrape and save their data to the database"""

    conn = psycopg2.connect(**env.DATABASE)
    cursor = conn.cursor()

    for file, city in env.supported_cities().items():
        try:
            data = add_metadata(parse_html(city, get_html(city)))
            save_data_to_db(cursor, data, file.title())
        except Exception as e:
            print("Failed to scrape '%s': %s" % (city, e))
            print(traceback.format_exc())

    conn.commit()
    conn.close()
Exemplo n.º 11
0
def main():
    """Iterate over all cities in ./cities, scrape and save their data to the database"""

    conn = psycopg2.connect(**env.DATABASE)
    cursor = conn.cursor()

    for file, city in env.supported_cities().items():
        try:
            data = add_metadata(parse_html(city, get_html(city)))
            save_data_to_db(cursor, data, file.title())
        except Exception as e:
            print("Failed to scrape '%s': %s" %(city, e))
            print(traceback.format_exc())

    conn.commit()
    conn.close()
Exemplo n.º 12
0
def get_meta():
    app.logger.info("GET / - " + user_agent(request))

    cities = {}
    for module in env.supported_cities().values():
        city = module.geodata.city
        cities[city.id] = {
            "name": city.name,
            "coords": city.coords,
            "source": city.source,
            "url": city.url,
            "active_support": city.active_support
        }

    return jsonify({
        "cities": cities,
        "api_version": env.API_VERSION,
        "server_version": env.SERVER_VERSION,
        "reference": env.SOURCE_REPOSITORY
    })
Exemplo n.º 13
0
def get_meta():
    app.logger.info("GET / - " + user_agent(request))

    cities = {}
    for module in env.supported_cities().values():
        city = module.geodata.city
        cities[city.id] = {
                "name": city.name,
                "coords": city.coords,
                "source": city.source,
                "url": city.url,
                "active_support": city.active_support
        }

    return jsonify({
        "cities": cities,
        "api_version": env.API_VERSION,
        "server_version": env.SERVER_VERSION,
        "reference": env.SOURCE_REPOSITORY
    })
Exemplo n.º 14
0
def get_lots(city):
    global cache
    if city == "favicon.ico" or city == "robots.txt":
        abort(404)

    app.logger.info("GET /" + city + " - " + user_agent(request))

    city_module = env.supported_cities().get(city, None)

    if city_module is None:
        app.logger.info("Unsupported city: " + city)
        return ("Error 404: Sorry, '" + city +
                "' isn't supported at the current time.", 404)

    if env.LIVE_SCRAPE:
        return jsonify(scraper._live(city_module))
    try:
        update_cache(city)
        return cache[city][1]
    except IndexError:
        return jsonify(empty)
    except (psycopg2.OperationalError, psycopg2.ProgrammingError) as e:
        app.logger.error("Unable to connect to database: " + str(e))
        abort(500)