def build_networks(settings): name = settings['build_networks']['name'] st = pd.HDFStore(os.path.join(misc.data_dir(), name), "r") nodes, edges = st.nodes, st.edges net = pdna.Network(nodes["x"], nodes["y"], edges["from"], edges["to"], edges[["weight"]], twoway=True) net.precompute(settings['build_networks']['max_distance']) return net
def logsums(settings): logsums_index = settings.get("logsums_index_col", "taz") return pd.read_csv(os.path.join(misc.data_dir(), 'logsums.csv'), index_col=logsums_index)
def household_controls(): df = pd.read_csv(os.path.join(misc.data_dir(), "household_controls.csv")) return df.set_index('year')
def employment_controls(): df = pd.read_csv(os.path.join(misc.data_dir(), "employment_controls.csv")) return df.set_index('year')
def zoning_baseline(store, settings, year): # df = pd.merge(zoning_for_parcels.to_frame(), # zoning.to_frame(), # left_on='zoning', # right_index=True) df = store['zoning_baseline'] #if os.path.exists(os.path.join(misc.data_dir(), "zoning_parcels.csv")): # df['parcel_id'] = df.index # alter = pd.read_csv(os.path.join(misc.data_dir(), "zoning_parcels.csv"), index_col='parcel_id') # df = pd.merge(df, alter, how='left', left_index=True, right_index=True, suffixes=('','_x')) # df.max_dua[df.max_dua_x.notnull()] = df.max_dua_x[df.max_dua_x.notnull()] # df.max_far[df.max_far_x.notnull()] = df.max_far_x[df.max_far_x.notnull()] # df = df.drop(['max_dua_x','max_far_x'], axis=1) if os.path.exists( os.path.join(misc.data_dir(), "scenario_inputs", settings['scenario'], "zoning_parcels_p.csv")): update = pd.read_csv( os.path.join(misc.data_dir(), "scenario_inputs", settings['scenario'], "zoning_parcels_p.csv")) update = update[update.year <= year] update = update.sort_values(by='year', ascending=1) update = update.drop_duplicates("parcel_id", "last") if update.empty: df.max_height = 999 return df df2 = pd.merge(df, update, how='left', left_index=True, right_on='parcel_id', suffixes=('', '_x')) df2.set_index('parcel_id', inplace=True) df2.max_dua[df2.max_dua_x.notnull()] = df2.max_dua_x[ df2.max_dua_x.notnull()] df2.max_dua[df2.max_dua == 0] = np.nan df2.max_far[df2.max_far_x.notnull()] = df2.max_far_x[ df2.max_far_x.notnull()] df2.max_far[df2.max_far == 0] = np.nan df2.type1[df2.type1_x == 1] = 't' df2.type1[df2.type1_x == 0] = 'f' df2.type2[df2.type2_x == 1] = 't' df2.type2[df2.type2_x == 0] = 'f' df2.type3[df2.type3_x == 1] = 't' df2.type3[df2.type3_x == 0] = 'f' df2.type4[df2.type4_x == 1] = 't' df2.type4[df2.type4_x == 0] = 'f' df2.type5[df2.type5_x == 1] = 't' df2.type5[df2.type5_x == 0] = 'f' df2.type6[df2.type6_x == 1] = 't' df2.type6[df2.type6_x == 0] = 'f' df2.type7[df2.type7_x == 1] = 't' df2.type7[df2.type7_x == 0] = 'f' df2.type8[df2.type8_x == 1] = 't' df2.type8[df2.type8_x == 0] = 'f' df2 = df2.drop([ 'year', 'max_dua_x', 'max_far_x', 'type1_x', 'type2_x', 'type3_x', 'type4_x', 'type5_x', 'type6_x', 'type7_x', 'type8_x' ], axis=1) df2.max_height = 999 if os.path.exists( os.path.join(misc.data_dir(), "developableparcels.dbf")): devbuffer = wfrc_utils.dbf2df( os.path.join(misc.data_dir(), "developableparcels.dbf")) undevbuffer = devbuffer.parcel_id.unique() df2.type1[df2.index.isin(undevbuffer)] = 'f' df2.type2[df2.index.isin(undevbuffer)] = 'f' df2.type4[df2.index.isin(undevbuffer)] = 'f' df2.type5[df2.index.isin(undevbuffer)] = 'f' return df2 else: df.max_height = 999 return df