def load_temperatures(database: SqliteUtil, kind: str, max_idx: int, min_idx: int): query = f''' SELECT mrt_temperatures.temperature_id, mrt_temperatures.temperature_idx, mrt_temperatures.{kind}, COUNT(*) AS util FROM mrt_temperatures INNER JOIN links ON links.mrt_temperature = mrt_temperatures.temperature_id INNER JOIN output_events ON output_events.link_id = links.link_id WHERE output_events.start >= mrt_temperatures.temperature_idx * 900 AND output_events.end < mrt_temperatures.temperature_idx * 900 + 900 GROUP BY temperature_id, temperature_idx; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading temperature profile %s.') temps = defaultdict(lambda: [None] * (max_idx - min_idx + 1)) for uuid, idx, temp, util in rows: if util > 0: temps[uuid][idx - min_idx] = temp return temps
def load_links(database: SqliteUtil): query = ''' SELECT links.link_id, links.mrt_temperature, nodes1.point AS source_point, nodes2.point AS terminal_point FROM links INNER JOIN nodes AS nodes1 ON links.source_node = nodes1.node_id INNER JOIN nodes AS nodes2 ON links.terminal_node = nodes2.node_id; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading link %s.') bounds = lambda x, y: min(x) > 0.5e6 and max(x) < 0.85e6 and \ min(y) > 0.8e6 and max(y) < 1.0e6 links = [] for link_id, profile, src_pt, term_pt in rows: line = LineString((xy(src_pt), xy(term_pt))) x, y = line.coords.xy if bounds(x, y): link = Link(link_id, line, profile) links.append(link) return links
def load_extrema(database: SqliteUtil, kind: str): query = f''' SELECT max({kind}), min({kind}), max(temperature_idx), min(temperature_idx) FROM mrt_temperatures; ''' database.cursor.execute(query) max_temp, min_temp, max_idx, min_idx = next(database.fetch_rows()) return max_temp, min_temp, max_idx, min_idx
def main(database: SqliteUtil): query = ''' SELECT households.hhIncomeDollar, output_agents.exposure FROM households INNER JOIN agents ON households.hhid = agents.household_id INNER JOIN output_agents ON agents.agent_id = output_agents.agent_id WHERE households.hhIncomeDollar < 500000; ''' database.cursor.execute(query) persons = database.fetch_rows() total = {} data = [] for income, exposure in persons: adj = (income // 10000) * 10000 if adj in total: total[adj][0] += exposure total[adj][1] += 1 else: total[adj] = [exposure, 1] for age, (total, count) in total.items(): data.append((age, total / count)) data = pd.DataFrame(data, columns=('household income (USD)', 'average exposure (°C·sec)')) axes = sns.barplot(x=data['household income (USD)'], y=data['average exposure (°C·sec)'], color='royalblue') axes.set_title('Exposure By Income') for ind, label in enumerate(axes.get_xticklabels()): if ind % 10 == 0: label.set_visible(True) else: label.set_visible(False) plot = axes.get_figure() plot.savefig('result/income_exposure.png', bbox_inches='tight') plot.clf()
def load_regions(database: SqliteUtil): query = ''' SELECT maz, region FROM regions; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading region %s.') regions = [] for maz, polygon in rows: region = Region(maz, polygon) regions.append(region) return regions
def load_nodes(database: SqliteUtil) -> Dict[str,Node]: query = ''' SELECT node_id, point FROM nodes; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading node %s.') nodes: Dict[str,Node] = {} for uuid, point in rows: x, y = xy(point) node = Node(uuid, x, y) nodes[uuid] = node return nodes
def load_parcels(database: SqliteUtil): query = ''' SELECT apn, type, cooling, center FROM parcels; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading parcel %s.') parcels = [] for apn, kind, cooling, center in rows: x, y = xy(center) parcel = Parcel(apn, kind, bool(cooling), x, y) parcels.append(parcel) return parcels
def load_links(database: SqliteUtil): query = ''' SELECT links.link_id, nodes.point FROM links INNER JOIN nodes ON links.source_node = nodes.node_id; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading link %s.') links = [] for link_id, point in rows: x, y = xy(point) link = Link(link_id, x, y) links.append(link) return links
def main(database: SqliteUtil): query = ''' SELECT agents.agent_id, persons.age, output_agents.exposure FROM persons INNER JOIN agents ON persons.hhid = agents.household_id AND persons.pnum = agents.household_idx INNER JOIN output_agents ON agents.agent_id = output_agents.agent_id; ''' database.cursor.execute(query) persons = database.fetch_rows() total = {} data = [] for _, age, exposure in persons: adj = (age // 5) * 5 if adj in total: total[adj][0] += exposure total[adj][1] += 1 else: total[adj] = [exposure, 1] for age, (total, count) in total.items(): data.append((age, total / count)) data = pd.DataFrame(data, columns=('age (years)', 'average exposure (°C·sec)')) axes = sns.barplot(x=data['age (years)'], y=data['average exposure (°C·sec)'], color='royalblue') axes.set_title('Exposure By Age') plot = axes.get_figure() plot.savefig('result/age_exposure.png', bbox_inches='tight') plot.clf()
def load_links(database: SqliteUtil, nodes: Dict[str,Node]) -> Dict[str,Link]: query = ''' SELECT link_id, source_node, terminal_node, freespeed, permlanes FROM links; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading link %s.') links: Dict[str,Link] = {} for uuid, src, term, speed, lanes in rows: source_node = nodes[src] terminal_node = nodes[term] link = Link(uuid, source_node, terminal_node, lanes, speed) links[uuid] = link return links
def null_count(database: SqliteUtil, table: str, col: str): query = f''' SELECT CASE WHEN {col} IS NULL THEN 0 ELSE 1 END AS valid, COUNT(*) AS freq FROM {table} GROUP BY valid ORDER BY valid ASC; ''' database.cursor.execute(query) rows = database.fetch_rows() null, nnull = 0, 0 for value, freq in rows: if value == 0: null = freq elif value == 1: nnull = freq return null, nnull
def export_links(database: SqliteUtil, filepath: str, src_epsg: int, prj_epsg: int): transformer = Transformer.from_crs(f'epsg:{src_epsg}', f'epsg:{prj_epsg}', always_xy=True, skip_equivalent=True) project = transformer.transform prjpath = os.path.splitext(filepath)[0] + '.prj' with open(prjpath, 'w') as prjfile: info = get_wkt_string(prj_epsg) prjfile.write(info) query = ''' SELECT links.link_id, links.source_node, links.terminal_node, links.length, links.freespeed, links.capacity, links.permlanes, links.oneway, links.modes, links.air_temperature, links.mrt_temperature, nodes1.point, nodes2.point FROM links INNER JOIN nodes AS nodes1 ON links.source_node = nodes1.node_id INNER JOIN nodes AS nodes2 ON links.terminal_node = nodes2.node_id; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Exporting link %s.') links = shapefile.Writer(filepath, ) links.field('link_id', 'C') links.field('source_node', 'C') links.field('terminal_node', 'C') links.field('length', 'N') links.field('freespeed', 'N') links.field('capacity', 'N') links.field('permlanes', 'N') links.field('oneway', 'N') links.field('modes', 'C') links.field('air_temperature', 'N') links.field('mrt_temperature', 'N') for row in rows: props = row[:-2] pt1, pt2 = row[-2:] x1, y1 = project(*xy(pt1)) x2, y2 = project(*xy(pt2)) try: links.record(*props) except: print(props) breakpoint() exit() links.line([((x1, y1), (x2, y2))]) if links.recNum != links.shpNum: log.error('Record/shape misalignment; shapefile exporting failure.') raise RuntimeError links.close()
def map_mrt_temperature(database: SqliteUtil, kind: str): log.info('Profiling temperature extrema.') max_temp, min_temp, max_idx, min_idx = load_extrema(database, kind) log.info('Loading network links.') links = load_links(database) os.makedirs('result/mrt_temperatures/', exist_ok=True) log.info('Loading temperatures.') temps = defaultdict(lambda: [None] * (max_idx - min_idx + 1)) query = f''' SELECT temperature_id, temperature_idx, {kind} FROM mrt_temperatures; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Loading temperature profile %s.') for uuid, idx, temp in rows: temps[uuid][idx - min_idx] = temp def generate(): for link in links: temp = temps[link.profile] yield (link.id, *temp, link.line) log.info('Forming dataframes.') cols = [f'temp_{idx}' for idx in range(min_idx, max_idx + 1)] df = pd.DataFrame(generate(), columns=('id', *cols, 'line')) df['line'] = gpd.GeoSeries(df['line'], crs='EPSG:2223') gpdf = gpd.GeoDataFrame(df, geometry='line', crs='EPSG:2223') gpdf = gpdf.to_crs(epsg=3857) del links, temps, df for idx in range(min_idx, max_idx + 1): fig, ax = plt.subplots(1, figsize=(20, 12)) log.info(f'Plotting network visual.') plot = gpdf.plot(column=f'temp_{idx}', cmap='YlOrRd', linewidth=0.5, ax=ax, alpha=1) ax.set_title(f'Maricopa {kind.upper()} Temperatures {idx_to_hhmm(idx)}', fontdict={'fontsize': '18', 'fontweight' : '3'}) ctx.add_basemap(plot, source=ctx.providers.Stamen.TonerLite) sm = plt.cm.ScalarMappable(cmap='YlOrRd', norm=plt.Normalize(vmin=min_temp, vmax=max_temp)) sm._A = [] cbar = fig.colorbar(sm) log.info(f'Saving map.') fig.savefig(f'result/mrt_temperatures1/{idx}.png', bbox_inches='tight') plt.clf() plt.close()
def export_routes(database: SqliteUtil, modes: List[str], filepath: str, skip_empty: bool, epsg: int = 2223): transformer = Transformer.from_crs('epsg:2223', f'epsg:{epsg}', always_xy=True, skip_equivalent=True) transform = transformer.transform measurer = Geod(f'epsg:{epsg}') measure = lambda n1, n2: measurer.inv([n1.y], [n1.x], [n2.y], [n2.x]) prjpath = os.path.splitext(filepath)[0] + '.prj' with open(prjpath, 'w') as prjfile: info = get_wkt_string(epsg) prjfile.write(info) log.info('Loading network node data.') query = ''' SELECT node_id, point FROM nodes; ''' nodes = {} database.cursor.execute(query) result = counter(database.fetch_rows(), 'Loading node %s.') for node_id, point in result: x, y = transform(*map(float, point[7:-1].split(' '))) nodes[node_id] = Node(x, y) log.info('Loading network link data.') query = ''' SELECT link_id, source_node, terminal_node FROM links; ''' links = {} database.cursor.execute(query) result = counter(database.fetch_rows(), 'Loading link %s.') for link_id, source_node, terminal_node in result: src_node = nodes[source_node] term_node = nodes[terminal_node] length = measure(src_node, term_node) links[link_id] = Link(src_node, term_node, length) log.info('Loading network routing data.') query = f''' SELECT output_legs.leg_id, output_legs.agent_id, output_legs.agent_idx, output_legs.mode, output_legs.duration, GROUP_CONCAT(output_events.link_id, " ") FROM output_legs LEFT JOIN output_events ON output_legs.leg_id = output_events.leg_id WHERE output_legs.mode IN {tuple(modes)} GROUP BY output_legs.leg_id ORDER BY output_events.leg_id, output_events.leg_idx; ''' database.cursor.execute(query) result = counter(database.fetch_rows(block=1000000), 'Exporting route %s.') routes = shapefile.Writer(filepath) routes.field('leg_id', 'N') routes.field('agent_id', 'N') routes.field('agent_idx', 'N') routes.field('mode', 'C') routes.field('duration', 'N') routes.field('length', 'N') log.info('Exporting simulation routes to shapefile.') for leg_id, agent_id, agent_idx, mode, duration, events in result: if events is not None: route = [links[l] for l in events.split(' ')] line = [(link.source_node.x, link.source_node.y) for link in route] line.append((route[-1].terminal_node.x, route[-1].terminal_node.y)) length = sum((link.length for link in route)) routes.record(leg_id, agent_id, agent_idx, mode, duration, length) routes.line([line]) elif not skip_empty: routes.record(leg_id, agent_id, agent_idx, mode, duration, None) routes.null() if routes.recNum != routes.shpNum: log.error('Record/shape misalignment; internal exporting failure.') routes.close() log.info(f'Routing export complete: wrote {routes.shpNum} routes.')