Пример #1
0
def main(config_file):
    conf = Addict(yaml.safe_load(open(config_file, 'r')))
    if conf.get("logging") is not None:
        logging.config.dictConfig(conf["logging"])
    else:
        logging.basicConfig(level=logging.INFO,
                            format="%(asctime)s - %(levelname)s - %(message)s")
    raw_file = conf.get("output").get("existing_supermarkets_raw")
    supermarkets = []
    with open(raw_file, encoding='utf-8') as f:
        for supermarket_raw in json.loads(f.read()):
            supermarket_obj = {}
            supermarket_obj["name"] = supermarket_raw.get("name")
            supermarket_obj["addr"] = supermarket_raw.get("formatted_address")
            geocode = supermarket_raw.get("geometry").get("location")
            supermarket_obj["lat"] = geocode.get("lat")
            supermarket_obj["lng"] = geocode.get("lng")
            supermarket_obj["type"] = supermarket_raw.get("place_type")
            supermarkets.append(supermarket_obj)

    supermarkets_df = pd.DataFrame(supermarkets)
    logging.info("%s supermarkets are located in the city",
                 supermarkets_df.shape[0])
    supermarkets_df = supermarkets_df.drop_duplicates(subset=["lat", "lng"])
    logging.info("There are %s supermarkets left after duplicates removed",
                 supermarkets_df.shape[0])
    grocery_df = supermarkets_df.loc[supermarkets_df["type"] == "grocery"]
    logging.info("%s of the results are grocery", grocery_df.shape[0])
    supermarkets_df = supermarkets_df.reset_index()
    supermarkets_df = supermarkets_df.loc[supermarkets_df["type"] ==
                                          "supermarket"]
    output_fp = conf.get("output").get("existing_supermarkets_data")
    supermarkets_df.to_csv(output_fp, index=False)
    logging.info("Information of existing supermarkets written to %s",
                 output_fp)
Пример #2
0
def main(config_file):
    conf = Addict(yaml.safe_load(open(config_file, 'r')))
    if conf.get("logging") is not None:
        logging.config.dictConfig(conf["logging"])
    else:
        logging.basicConfig(level=logging.INFO,
                            format="%(asctime)s - %(levelname)s - %(message)s")
    supermarkets_file = conf.get("input").get("supermarkets_file")
    logging.info("Loading geocode of supermarkets from %s", supermarkets_file)
    supermarkets_file_reader = csv.reader(open(supermarkets_file))
    supermarkets_file_header = next(supermarkets_file_reader)
    supermarkets = []
    for row in supermarkets_file_reader:
        supermarket = dict(zip(supermarkets_file_header, row))
        supermarkets.append(supermarket)
    logging.info("%s supermarkets located in the city.", len(supermarkets))
    
    grid_geocode_file = conf.get("input").get("grid_geocode_file")
    logging.info("Loading geocode of city grids from %s", grid_geocode_file)
    grids_file_reader = csv.reader(open(grid_geocode_file))
    grids_file_header = next(grids_file_reader)
    grids = []
    for row in grids_file_reader:
        grid = dict(zip(grids_file_header, row))
        grids.append(grid)
    logging.info("The city is covered by %s 1km x 1km grids.", len(grids))

    api_key = conf.get("API").get("KEY")
    gmaps = googlemaps.Client(key=api_key)
    results = []
    counter = 0
    logging.info("Start querying driving time from city grid to supermarkets ...")
    start_time = time.time()
    for grid in grids:
        for supermarket in supermarkets:
            logging.debug("Processing grid: %s - supermarket: %s", grid, supermarket)
            dist_api_worker = DistAPIWorker(gmaps, grid, supermarket)
            response = dist_api_worker.run()
            results.append(response)
            counter += 1
            if counter % 1000 == 0:
                logging.info("%s grid-supermarket pair processed ... Elapsed time %s seconds",
                             counter, round(time.time() - start_time, 4))

    # Export query responses to file
    if len(results) > 0:
        results_fp = conf.get("output").get("grid_to_supermarket_dist_raw")
        with open(results_fp, 'w') as output_file:
            json.dump(results, output_file, indent=4)
        logging.info("%s query responses dumped to %s", len(results), results_fp)
class FieldInstanceTracker:
    def __init__(self, obj: models.Model, fields: Iterable[str]) -> None:
        self.obj = obj
        self.fields = fields

    def get_field_value(self, field: str) -> Any:
        return getattr(self.obj, field)

    def set_saved_fields(self, fields: Optional[Iterable[str]] = None) -> None:
        self.saved_data = self.current(fields)

        # preventing mutable fields side effects
        for field, field_value in self.saved_data.items():
            self.saved_data[field] = deepcopy(field_value)

        self.saved_data = ADict(self.saved_data)

    def current(self,
                fields: Optional[Iterable[str]] = None) -> Dict[str, Any]:
        """Returns dict of current values for all tracked fields
        """
        if fields is None:
            fields = self.fields

        return {f: self.get_field_value(f) for f in fields}

    def has_changed(self, field: str) -> bool:
        """Returns ``True`` if field has changed from currently saved value"""
        return self.previous(field) != self.get_field_value(field)

    def previous(self, field: str) -> Optional[Any]:
        """Returns currently saved value of given field
        """
        return self.saved_data.get(field)
Пример #4
0
def main(config_file):
    conf = Addict(yaml.safe_load(open(config_file, 'r')))
    if conf.get("logging") is not None:
        logging.config.dictConfig(conf["logging"])
    else:
        logging.basicConfig(level=logging.INFO,
                            format="%(asctime)s - %(levelname)s - %(message)s")
    API_KEY = conf.get("API").get("KEY")
    base_url = conf.get("API").get("URL")
    # place_types = ["supermarkets", "convenience_store", "department_store", "store", "grocery"]
    place_types = ["supermarkets", "grocery"]
    locations = []
    for place in place_types:
        logging.info("Searching for %s in Penang", place)
        query = "query=" + place + "+in+Penang"
        url = base_url + query + "&key=" + API_KEY
        logging.info("url of API query: %s", url)
        response = requests.get(url)
        results = json.loads(response.text).get("results")
        logging.info("%s results are found.", len(results))
        for result in results:
            location = {}
            location["name"] = result.get("name")
            location["addr"] = result.get("formatted_address")
            geocode = result.get("geometry").get("location")
            location["lat"] = geocode.get("lat")
            location["lng"] = geocode.get("lng")
            location["type"] = place
            locations.append(location)

    places_df = pd.DataFrame(locations)
    places_df = places_df.drop_duplicates(subset=["lat", "lng"])
    places_df = places_df.reset_index()
    output_fp = conf.get("output").get("filename")
    places_df.to_csv(output_fp, index=False)
    logging.info("%s supermarkets are located in the city", places_df.shape[0])
    logging.info("Information of existing supermarkets written to %s",
                 output_fp)
Пример #5
0
def main(config_file):
    conf = Addict(yaml.safe_load(open(config_file, 'r')))
    if conf.get("logging") is not None:
        logging.config.dictConfig(conf["logging"])
    else:
        logging.basicConfig(level=logging.INFO,
                            format="%(asctime)s - %(levelname)s - %(message)s")
    raw_file = conf.get("output").get("grid_to_supermarket_dist_raw")
    dist_data = []
    with open(raw_file, encoding='utf-8') as f:
        for dist_raw in json.loads(f.read()):
            dist_obj = {}
            dist_obj["grid_id"] = dist_raw["grid_id"]
            dist_obj["supermarket_id"] = dist_raw["supermarket_id"]
            dist_obj["status"] = dist_raw["status"]
            if dist_raw.get("rows"):
                row = dist_raw.get("rows")[0]
                if row.get("elements"):
                    element = row.get("elements")[0]
                    if element.get("distance"):
                        dist_obj["distance"] = element.get("distance").get("value")
                    else:
                        dist_obj["distance"] = None

                    if element.get("duration"):
                        dist_obj["driving_time"] = element.get("duration").get("value")
                    else:
                        dist_obj["driving_time"] = None
            else:
                dist_obj["distance"] = None
                dist_obj["driving_time"] = None
            dist_data.append(dist_obj)
    dist_df = pd.DataFrame(dist_data)
    output_file = conf.get("output").get("grid_to_supermarket_dist_data")
    dist_df.to_csv(output_file, index=False)
    logging.info("%s distance query results written to %s",
                 len(dist_data), output_file)

    supermarket_counts = {}
    max_driving_time = int(conf.get("max_driving_time"))
    for grid_id in dist_df["grid_id"].unique():
        supermarket_counts[grid_id] = catch_supermarkets(grid_id, dist_df, max_driving_time)
    
    population_file = conf.get("input").get("grid_population_file")
    logging.info("Loading simulated population of city grids from %s", population_file)
    population_df = pd.read_csv(population_file)
    population_df["density"] = population_df.apply(lambda pop: \
        compute_density(pop["id"], pop["population"], supermarket_counts), axis=1)
    density_df = population_df[["id", "density"]]

    grid_shape = conf.get("input").get("grid_shape_file")
    sf = shp.Reader(grid_shape)
    shp_df = read_shapefile(sf)
    logging.info("Shape of shp_df: %s", shp_df.shape)
    logging.info(shp_df.head())
    density_shp_df = pd.merge(shp_df, density_df, left_on='id', right_on='id', how='outer')
    density_shp_df = density_shp_df.drop("coords", axis=1)
    logging.info("Export supermarket density to text file")
    supermarket_density_file = conf.get("output").get("supermarket_density_file")
    density_shp_df.to_csv(supermarket_density_file, index=False)
    logging.info(density_shp_df.head())
    gdf = gpd.read_file(grid_shape)
    gdf = gdf.to_crs({'init': 'epsg:3857'})
    gdf["density"] = density_shp_df["density"]
    supermarket_density_shape_file = conf.get("output").get("supermarket_density_shape_file")
    gdf.to_file(supermarket_density_shape_file)
    logging.info("Supermarket density added to the shape file of city grid layer")
Пример #6
0
def main(config_file):
    conf = Addict(yaml.safe_load(open(config_file, 'r')))
    if conf.get("logging") is not None:
        logging.config.dictConfig(conf["logging"])
    else:
        logging.basicConfig(level=logging.INFO,
                            format="%(asctime)s - %(levelname)s - %(message)s")

    start_time = time.time()
    logging.info("Part I Load city grid layer")
    grid_fp = conf.get("input").get("grid_file")
    grid_df = pd.read_csv(grid_fp)
    grid_df["id"] = grid_df["id"].apply(lambda grid_id: str(grid_id))
    grid_df = grid_df.set_index("id")
    grid_df = grid_df.dropna()

    logging.info("Converting UTM coordinate system to geocode ...")
    inProj = pyproj.Proj(init='epsg:3857')
    outProj = pyproj.Proj(init='epsg:4326')
    grid_df[["left_lng", "top_lat"]] = grid_df.apply(lambda row: convert_utm_coords(row[["left", "top"]], inProj, outProj), axis=1)
    grid_df[["right_lng", "bottom_lat"]] = grid_df.apply(lambda row: convert_utm_coords(row[["right", "bottom"]], inProj, outProj), axis=1)
    grid_df["center_lng"] = (grid_df["left_lng"] + grid_df["right_lng"]) / 2
    grid_df["center_lat"] = (grid_df["top_lat"] + grid_df["bottom_lat"]) / 2
    logging.info("Write grid center geocode to file")
    grid_geocode_df = grid_df[["center_lng", "center_lat"]]
    grid_geocode_file = conf.get("output").get("grid_geocode_file")
    grid_geocode_df.to_csv(grid_geocode_file, index=True)
    logging.info("Elapsed time %s seconds ...",
                 round(time.time() - start_time, 4))

    logging.info("Part II Assign residential buildings to grids")
    grid_dict = grid_df.to_dict("index")
    buildings_fp = conf.get("input").get("residential_buildings_file")
    buildings_df = pd.read_csv(buildings_fp)
    logging.info("Range of longitude: %s - %s",
                 buildings_df["center_lng"].min(),
                 buildings_df["center_lng"].max())
    logging.info("Range of latitude: %s - %s",
                 buildings_df["center_lat"].min(),
                 buildings_df["center_lat"].max())
    buildings_df["grid"] = buildings_df.apply(lambda row: assign_grid(row[["center_lng", "center_lat"]], grid_dict), axis=1)
    buildings_df = buildings_df.set_index("id")
    logging.info("Elapsed time: %s seconds ...",
                 round(time.time() - start_time, 4))

    logging.info("Part III Compute gridwise total floor area")
    logging.info("Residential building types: %s",
                 buildings_df["type"].unique())
    buildings_df[["area", "area_bungalow"]] = buildings_df.apply(lambda row: check_bungalow(row["type"], row["area"]), axis=1)
    area_df = buildings_df.groupby(['grid'])['area', 'area_bungalow'].agg('sum')
    area_df = pd.merge(area_df, grid_df, left_index=True, right_index=True)
    area_df = area_df.drop(["left", "right", "top", "bottom",
                            "left_lng", "top_lat", "right_lng", "bottom_lat",
                            "center_lng", "center_lat"], axis=1)
    area_df = area_df.reset_index()
    logging.info("Shape of area_df: %s", area_df.shape)
    logging.info(area_df.head())
    logging.info("Elapsed time: %s seconds ...",
                 round(time.time() - start_time, 4))

    logging.info("Part IV Distribute city population into grids")
    district_df = area_df.groupby(["district"])['area', 'area_bungalow'].agg('sum')
    district_df["total_population"] = conf.get("district_population")
    district_df["bungalow_population"] = district_df["total_population"] / 100 * 5
    district_df["apartment_population"] = district_df["total_population"] - district_df["bungalow_population"]
    district_df = district_df.reset_index()
    logging.info(district_df)
    population_df = pd.merge(area_df, district_df[["district", "area", "apartment_population"]], on='district')
    population_df = population_df.rename(columns={
            "index": "grid_id",
            "area_x": "area",
            "area_bungalow_x": "area_bungalow",
            "area_y": "area_apartment_district",
            "apartment_population": "apartment_population_district"
    })
    population_df["population"] = population_df["apartment_population_district"] / population_df["area_apartment_district"] * population_df["area"] + \
                                        population_df["area_bungalow"] / 100 * 5
    population_df["grid_id"] = population_df["grid_id"].apply(lambda grid_id: int(grid_id))
    logging.info("Shape of population_df: %s", population_df.shape)
    logging.info(population_df.head())

    logging.info("Part V Incorporate grid population with shape file")
    grid_shape = conf.get("input").get("grid_shape_file")
    sf = shp.Reader(grid_shape)
    shp_df = read_shapefile(sf)
    logging.info("Shape of shp_df: %s", shp_df.shape)
    logging.info(shp_df.head())
    population_shp_df = pd.merge(shp_df, population_df[["grid_id", "population"]],
                                 left_on='id', right_on='grid_id', how='outer')
    population_shp_df["population"].fillna(0, inplace=True)
    population_shp_df = population_shp_df.drop(["grid_id", "coords"], axis=1)
    logging.info("Export grid population to text file")
    grid_population_file = conf.get("output").get("grid_population_file")
    population_shp_df.to_csv(grid_population_file, index=False)
    gdf = gpd.read_file(grid_shape)
    gdf = gdf.to_crs({'init': 'epsg:3857'})
    gdf["population"] = population_shp_df["population"]
    grid_population_shape_file = conf.get("output").get("grid_population_shape_file")
    gdf.to_file(grid_population_shape_file)
    logging.info("Population info added to the shape file of city grid layer")