示例#1
0
def main(avg_city_prices_csv: str,
         output_geojson: str,
         headers: bool = False) -> None:
    setup_log()
    log = getLogger()
    log.info(
        f"Parsing CSV {avg_city_prices_csv} file into {output_geojson} GeoJSON..."
    )
    csv_lines = list(read_csv(avg_city_prices_csv))
    if headers:
        _ = csv_lines.pop(0)
    colors = color_gradient("red", "green", 10)
    points = [
        render_geojson_point(lat=float(t[3]),
                             lon=float(t[4]),
                             marker_size="small",
                             marker_color=colors[_calc_value(int(float(
                                 t[1])))],
                             props={
                                 "title":
                                 t[0],
                                 "offer_count":
                                 int(float(t[2])),
                                 "price_per_sqm":
                                 f"{round(int(float(t[1])))} zł/m2"
                             }) for t in csv_lines if t[0]
    ]
    log.info(f"Rendering GeoJSON out of {len(points)} points...")
    save_geojson(points, output_geojson)
    log.info(f"Done rendering file {output_geojson}")
示例#2
0
def main(db_file: str, ddl_script: str, place_cache: str, time_to_wro: str, offers_path: str, inet_curr: str,
         inet_popc: str):
    setup_log()
    log = getLogger()
    db_conn: sqlite3.Connection = sqlite3.connect(db_file)
    _init_tables(db_conn, ddl_script)

    log.info(f"Inserting Places cache {place_cache} into sqlite3 DB {db_file}...")
    places = filter(None, map(Place.from_csv_row, read_csv(place_cache)))
    for places_chunk in chunked(map(lambda p: p.to_sql_row(), places), 50):
        try:
            c = db_conn.cursor()
            c.executemany("INSERT INTO place VALUES (?,?,?,?,?)", places_chunk)
            c.close()
            db_conn.commit()
        except sqlite3.IntegrityError as e:
            log.warning(f"Could not insert {len(places_chunk)} rows [{places_chunk[0]}, ..., {places_chunk[-1]}]: {e}")

    log.info(f"Inserting time-to-wroclaw data from {time_to_wro} into DB...")
    time_to_wro_iter = ((r[0], int(r[1]) if r[1] else None) for r in read_csv(time_to_wro))
    for row_chunk in chunked(time_to_wro_iter, 50):
        try:
            c = db_conn.cursor()
            c.executemany("INSERT INTO time_to_wroclaw VALUES (?,?)", row_chunk)
            c.close()
            db_conn.commit()
        except sqlite3.IntegrityError as e:
            log.warning(f"Could not insert {len(row_chunk)} rows [{row_chunk[0]}, ..., {row_chunk[-1]}]: {e}")

    for inet_curr_csv in list_csv_files(inet_curr):
        log.info(f"Inserting current broadband data from CSVs under {inet_curr_csv} into DB...")
        inet_curr_rows = filter(None,
                                map(_filter_fields_from_curr_inet_csv_row,
                                    read_csv(inet_curr_csv, delimiter=';')))
        broadband_accesses = filter(None, map(BroadbandAccess.from_csv_row, inet_curr_rows))
        _insert_broadband_access_obj(broadband_accesses, db_conn, log)

    for inet_popc_csv in list_csv_files(inet_popc):
        log.info(
            f"Inserting planned broadband expansion data from CSVs under {inet_popc_csv} into DB...")
        inet_curr_rows = filter(None,
                                map(_filter_fields_from_planned_inet_csv_row, read_csv(inet_popc_csv, delimiter=';')))
        broadband_accesses = filter(None, map(BroadbandAccess.from_csv_row, inet_curr_rows))
        _insert_broadband_access_obj(broadband_accesses, db_conn, log)

    for offers_csv in list_csv_files(offers_path):
        log.info(f"Inserting Offers from CSV {offers_csv} into sqlite3 DB {db_file}...")
        offers = filter(None, map(ParcelOffer.from_csv_row, read_csv(offers_csv)))
        for offers_chunk in chunked(map(lambda p: p.to_sql_row(), offers), 50):
            try:
                c = db_conn.cursor()
                c.executemany("INSERT INTO parcel_offer VALUES (?,?,?,?,?,?,?)", offers_chunk)
                c.close()
                db_conn.commit()
            except sqlite3.IntegrityError as e:
                log.warning(
                    f"Could not insert {len(offers_chunk)} rows [{offers_chunk[0]}, ..., {offers_chunk[-1]}]: {e}")

    db_conn.close()
    log.info("Done inserting Places cache and Offers into sqlite3 DB")
示例#3
0
def main(broadband_city_csv: str, geojson: str):
    setup_log()
    log = getLogger()
    colors = color_gradient("#737373", "#F2F2F2", 3)
    points = []
    for i, cells in enumerate(read_csv(broadband_city_csv)):
        if i == 0 or not cells:
            continue
        try:
            city, ap_count = cells[0], int(cells[1])
            min_bw, avg_bw, max_bw = int(cells[2]), float(cells[3]), int(
                cells[4])
            lat, lon = randomize_coordinates(float(cells[5]),
                                             float(cells[6]),
                                             delta=0.004)
            color_class = calc_class(
                avg_bandwidth=avg_bw) if max_bw < 100 else 2
            point = render_geojson_point(lat=lat,
                                         lon=lon,
                                         marker_color=colors[color_class],
                                         marker_symbol='star',
                                         props={
                                             'title': city,
                                             'min-bandwidth': min_bw,
                                             'avg-bandwidth': avg_bw,
                                             'max-bandwidth': max_bw,
                                             'bandwidth-class': color_class
                                         })
            points.append(point)
        except (TypeError, ValueError, LookupError) as e:
            log.warning(f"Could not parse: {e}, cells: {cells}")
    if points:
        save_geojson(points, geojson)
示例#4
0
def main(isochrone: str,
         place_cache: str,
         output: str,
         polygon_step_time_min: int = 7):
    setup_log()
    log = getLogger()

    log.info(f"Reading isochrone map from {isochrone} ...")
    with codecs.open(isochrone, 'r', 'utf-8-sig') as map_:
        isochrone_map = json.load(map_)
    polygons = _build_polygons(isochrone_map)

    log.info(f"Reading places cache from {place_cache} ...")
    places = filter(None, map(Place.from_csv_row, read_csv(place_cache)))
    city_to_time_to_wroclaw: Dict[str, Optional[int]] = {}

    log.info(f"Finding time to reach destination for places...")
    for p in places:
        if p not in city_to_time_to_wroclaw.keys():
            index = _index_of_polygon_point_is_in(p.lat, p.lon, polygons)
            if index != -1:
                time_to_wroclaw_min = index * polygon_step_time_min
                city_to_time_to_wroclaw[p.city] = time_to_wroclaw_min
            else:
                city_to_time_to_wroclaw[p.city] = None

    log.info(f"Writing {len(city_to_time_to_wroclaw)} results to {output} ...")
    write_csv(output,
              sorted([[k, v] for k, v in city_to_time_to_wroclaw.items()]))
    log.info("Done")
示例#5
0
def main(avg_city_prices_csv: str,
         output_geojson: str,
         headers: bool = False) -> None:
    setup_log()
    log = getLogger()
    log.info(
        f"Parsing CSV {avg_city_prices_csv} file into {output_geojson} GeoJSON..."
    )
    csv_lines = list(read_csv(avg_city_prices_csv))
    if headers:
        _ = csv_lines.pop(0)
    colors = color_gradient("red", "green", 10)
    points = list(
        filter(None,
               [row_to_point(t, colors, log) for t in csv_lines if t[0]]))
    log.info(f"Rendering GeoJSON out of {len(points)} points...")
    save_geojson(points, output_geojson)
    log.info(f"Done rendering file {output_geojson}")
示例#6
0
def convert_place_cache_from_json_to_csv(json_cache: str, csv_cache: str):
    setup_log()
    log = getLogger()
    with open(json_cache, "r") as pc:
        cache = json.load(pc)
    places: List[Place] = []
    for _, place in cache.items():
        p = Place.from_json(place)
        if p:
            places.append(p)
        else:
            log.warning(f"Could not parse into place: {place}")
    rows = []
    for p in places:
        csv_row = p.to_csv_row()
        rows.append(csv_row)
    with open(csv_cache, "w") as csv_f:
        place_writer = csv.writer(csv_f, delimiter=';', quotechar='"', quoting=csv.QUOTE_MINIMAL)
        for r in rows:
            place_writer.writerow(r)
示例#7
0
def main(tgt_map: str, isochrone_map: str, debug: bool, *layers_files) -> None:
    setup_log(logging.DEBUG if debug else logging.INFO)
    log = logging.getLogger("estate")

    base_map = _read_isochrone_map(isochrone_map)

    log.info(f"Applying {len(layers_files)} layers on top of {isochrone_map}")
    for lf in layers_files:
        lf = path.abspath(lf)
        if path.isfile(lf):
            with open(lf, "r") as lf_:
                layer_map = json.load(lf_)
            base_map["features"].extend(layer_map["features"])
        else:
            log.warning(
                f"GeoJSON file {lf} does not exist, omitting the layer")

    log.info(f"Writing final map at {tgt_map}")
    with open(tgt_map, "w") as tgt:
        json.dump(base_map, tgt, ensure_ascii=False, indent=2)

    log.info(f"Done writing final map at {tgt_map}")
示例#8
0
def main(sqlite_db: str, output_path: str, headers: bool = False):
    setup_log()
    _export_query(sqlite_db,
                  "SELECT * FROM city_broadband",
                  path.join(output_path, "city_broadband.csv"),
                  header_table="city_broadband" if headers else None)
    _export_query(sqlite_db,
                  "SELECT * FROM latest_offers",
                  path.join(output_path, "latest_offers.csv"),
                  header_table="latest_offers" if headers else None)
    _export_query(sqlite_db,
                  "SELECT * FROM avg_city_price",
                  path.join(output_path, "avg_city_price.csv"),
                  header_table="avg_city_price" if headers else None)
    _export_query(sqlite_db,
                  "SELECT * FROM daily_price_avg",
                  path.join(output_path, "daily_price_avg.csv"),
                  header_table="daily_price_avg" if headers else None)
    _export_query(sqlite_db,
                  "SELECT * FROM last_10days_offers",
                  path.join(output_path, "last_10days_offers.csv"),
                  header_table="last_10days_offers" if headers else None)
示例#9
0
def main(map_quest_api_key: str, csv_cache: str, offers_directory: str):
    setup_log()
    log = getLogger()
    client = MapQuestClient(map_quest_api_key, log)
    resolver = PlaceResolver(client, log)

    if path.isfile(csv_cache):
        resolver.load(csv_cache)
        log.info(
            f"Loaded {csv_cache} with {len(resolver.cache.keys())} addresses")

    for csv_file in list_csv_files(offers_directory):
        log.info(f"Parsing CSV {csv_file}")
        for row in read_csv(csv_file):
            offer = ParcelOffer.from_csv_row(row)
            if offer:
                _ = resolver.get(offer)
            else:
                log.warning(f"Could not parse into offer: {row}")
        log.info(
            f"Storing cache with {len(resolver.cache.keys())} into {csv_cache}"
        )
        resolver.save(csv_cache)