Example #1
0
def encode_report(rpt, rpt_path):

    rpt_dict = {}

    #write parcel json files
    parcels = spatial.read_shapefile(sg.config.parcels_shapefile)
    parcels = parcels[['PARCELID', 'coords']]  #omit 'ADDRESS', 'OWNER1'
    flooded = rpt.alt_report.parcel_flooding  #proposed flooding condition
    flooded = pd.merge(flooded, parcels, right_on='PARCELID', left_index=True)
    rpt_dict['parcels'] = spatial.write_geojson(flooded, geomtype='polygon')

    #non null delta category parcels
    delta_parcels = rpt.flood_comparison.loc[pd.notnull(
        rpt.flood_comparison.Category)]
    delta_parcels = pd.merge(delta_parcels,
                             parcels,
                             right_on='PARCELID',
                             left_index=True)
    rpt_dict['delta_parcels'] = spatial.write_geojson(delta_parcels,
                                                      geomtype='polygon')

    #encode conduit and nodes data into geojson
    # rpt_dict['conduits'] = spatial.write_geojson(rpt.alt_report.model.conduits())
    rpt_dict['new_conduits'] = spatial.write_geojson(rpt.newconduits)
    # rpt_dict['nodes'] = spatial.write_geojson(rpt.model.nodes(), geomtype='point')

    #write summary stats
    rpt_dict.update(rpt.summary_dict)

    with open(rpt_path, 'w') as f:
        f.write(json.dumps(rpt_dict))
Example #2
0
def batch_reports(project_dir,
                  results_file,
                  additional_costs=None,
                  join_data=None):

    #combine the segments and options (combinations) into one iterable
    SEGMENTS_DIR = os.path.join(project_dir, 'Segments')
    COMBOS_DIR = os.path.join(project_dir, 'Combinations')
    COMMON_DATA_DIR = os.path.join(project_dir, 'CommonData')
    ADMIN_DIR = os.path.join(project_dir, 'ProjectAdmin')
    BASELINE_DIR = os.path.join(project_dir, 'Baseline')

    #instantiate the true baseline flood report
    baseline_model = Model(BASELINE_DIR)
    pn_join_csv = os.path.join(COMMON_DATA_DIR,
                               r'sphila_sheds_parcels_join.csv')
    parcel_node_join_df = pd.read_csv(pn_join_csv)
    parcel_shp_df = spatial.read_shapefile(sg.config.parcels_shapefile)
    baserpt = reporting.FloodReport(baseline_model, parcel_node_join_df)
    base_flood_vol = baserpt.flood_vol_mg

    paths = (SEGMENTS_DIR, COMBOS_DIR)
    #result file header
    # cols = 'MODEL,COST,FLOOD_VOL_MG,PARCEL_FLOOD_HRS,FLOOD_VOL_REDUCED_MG,PARCEL_FLOOD_HRS_REDUCED,PARCEL_HRS_REDUCED_DELTA_THRESH'
    # with open(results_file, 'a') as f:
    #     f.write(cols + '\n')

    for path, dirs, files in chain.from_iterable(
            os.walk(path) for path in paths):

        for f in files:
            if '.inp' in f:
                inp_path = os.path.join(path, f)
                alt = Model(inp_path)
                print 'reporting on {}'.format(alt.name)
                #generate the reports
                frpt = reporting.FloodReport(alt, parcel_node_join_df)
                impact_rpt = reporting.ComparisonReport(
                    baserpt, frpt, additional_costs, join_data)

                # #write to the log
                # model_id = os.path.splitext(f)[0]
                # with open(results_file, 'a') as f:
                #
                #     stats = (model_id, impact_rpt.cost_estimate,
                #              frpt.flood_vol_mg, frpt.parcel_hrs_flooded,
                #              baserpt.flood_vol_mg - frpt.flood_vol_mg,
                #              baserpt.parcel_hrs_flooded - frpt.parcel_hrs_flooded,
                #              impact_rpt.parcel_hours_reduced,
                #              )
                #     f.write('{},{},{},{},{},{},{}\n'.format(*stats))

                report_dir = os.path.join(alt.inp.dir, 'Report_AllParcels')
                if not os.path.exists(report_dir): os.mkdir(report_dir)

                #write the report files
                # impact_rpt.write(report_dir)
                # impact_rpt.generate_figures(report_dir, parcel_shp_df)
                serialize.encode_report(impact_rpt,
                                        os.path.join(report_dir, 'rpt.json'))
Example #3
0
def encode_report(rpt, rpt_path):

    rpt_dict = {}

    #write parcel json files
    parcels = spatial.read_shapefile(sg.config.parcels_shapefile)
    parcels = parcels[['PARCELID', 'coords']] #omit 'ADDRESS', 'OWNER1'
    flooded = rpt.alt_report.parcel_flooding #proposed flooding condition
    flooded = pd.merge(flooded, parcels, right_on='PARCELID', left_index=True)
    rpt_dict['parcels'] = spatial.write_geojson(flooded, geomtype='polygon')

    #non null delta category parcels
    delta_parcels = rpt.flood_comparison.loc[pd.notnull(rpt.flood_comparison.Category)]
    delta_parcels = pd.merge(delta_parcels, parcels, right_on='PARCELID', left_index=True)
    rpt_dict['delta_parcels'] = spatial.write_geojson(delta_parcels, geomtype='polygon')

    #encode conduit and nodes data into geojson
    # rpt_dict['conduits'] = spatial.write_geojson(rpt.alt_report.model.conduits())
    rpt_dict['new_conduits'] = spatial.write_geojson(rpt.newconduits)
    # rpt_dict['nodes'] = spatial.write_geojson(rpt.model.nodes(), geomtype='point')

    #write summary stats
    rpt_dict.update(rpt.summary_dict)

    with open(rpt_path, 'w') as f:
        f.write(json.dumps(rpt_dict))
Example #4
0
def batch_reports(project_dir, results_file,
                  additional_costs=None, join_data=None,
                  report_dirname='Report_AllParcels'):

    #combine the segments and options (combinations) into one iterable
    SEGMENTS_DIR = os.path.join(project_dir, 'Segments')
    COMBOS_DIR = os.path.join(project_dir, 'Combinations')
    COMMON_DATA_DIR = os.path.join(project_dir, 'CommonData')
    ADMIN_DIR = os.path.join(project_dir, 'ProjectAdmin')
    BASELINE_DIR = os.path.join(project_dir, 'Baseline')

    #instantiate the true baseline flood report
    baseline_model = Model(BASELINE_DIR)
    pn_join_csv = os.path.join(COMMON_DATA_DIR,r'sphila_sheds_parcels_join.csv')
    parcel_node_join_df = pd.read_csv(pn_join_csv)
    parcel_shp_df = spatial.read_shapefile(sg.config.parcels_shapefile)
    baserpt = reporting.FloodReport(baseline_model, parcel_node_join_df)
    base_flood_vol = baserpt.flood_vol_mg

    paths = (SEGMENTS_DIR,COMBOS_DIR)
    #result file header
    cols = 'MODEL,COST,FLOOD_VOL_MG,PARCEL_FLOOD_HRS,FLOOD_VOL_REDUCED_MG,PARCEL_FLOOD_HRS_REDUCED,PARCEL_HRS_REDUCED_DELTA_THRESH'
    with open(results_file, 'a') as f:
        f.write(cols + '\n')

    for path, dirs, files in chain.from_iterable(os.walk(path) for path in paths):

        for f in files:
            if '.inp' in f:
                inp_path = os.path.join(path,f)
                alt = Model(inp_path)
                print('reporting on {}'.format(alt.name))
                #generate the reports
                frpt = reporting.FloodReport(alt, parcel_node_join_df)
                impact_rpt = reporting.ComparisonReport(baserpt, frpt,
                                                        additional_costs,
                                                        join_data)

                #write to the log
                model_id = os.path.splitext(f)[0]
                with open(results_file, 'a') as f:

                    stats = (model_id, impact_rpt.cost_estimate,
                             frpt.flood_vol_mg, frpt.parcel_hrs_flooded,
                             baserpt.flood_vol_mg - frpt.flood_vol_mg,
                             baserpt.parcel_hrs_flooded - frpt.parcel_hrs_flooded,
                             impact_rpt.parcel_hours_reduced,
                             )
                    f.write('{},{},{},{},{},{},{}\n'.format(*stats))



                report_dir = os.path.join(alt.inp.dir, report_dirname)
                if not os.path.exists(report_dir):os.mkdir(report_dir)

                #write the report files
                impact_rpt.write(report_dir)
                impact_rpt.generate_figures(report_dir, parcel_shp_df)
                serialize.encode_report(impact_rpt, os.path.join(report_dir, 'rpt.json'))
Example #5
0
    def write(self, rpt_dir):
        #write cost per sewer segment spreadsheet
        self.newconduits.to_csv(os.path.join(rpt_dir,'cost_estimate.csv'))
        self.flood_comparison.to_csv(os.path.join(rpt_dir,'parcel_flood_comparison.csv'))

        #write parcel json files
        parcels = spatial.read_shapefile(sg.config.parcels_shapefile)
        parcels = parcels[['PARCELID', 'ADDRESS', 'OWNER1', 'coords']]
        flooded = self.flood_comparison
        flooded = flooded.loc[flooded.Category.notnull()] #parcels with significant flood delta
        flooded = pd.merge(flooded, parcels, right_on='PARCELID', left_index=True)
        colors = flooded.apply(lambda row:'#%02x%02x%02x' % drawing.parcel_draw_color(row, style='delta'), axis=1)
        flooded = flooded.assign(fill=colors)
        geoparcelpath = os.path.join(rpt_dir,'delta_parcels.json')
        spatial.write_geojson(flooded, filename=geoparcelpath, geomtype='polygon')

        #write new conduit json, shapefiles
        shpdir = os.path.join(os.path.dirname(rpt_dir), 'shapefiles')
        if not os.path.exists(shpdir):os.mkdir(shpdir)
        geocondpath = os.path.join(rpt_dir,'new_conduits.json')
        shpcondpath = os.path.join(shpdir, self.alt_report.model.inp.name + '_new_conduits.shp')
        spatial.write_geojson(self.newconduits, filename=geocondpath)
        spatial.write_shapefile(self.newconduits, filename=shpcondpath)

        #write node and conduit report csvs
        self.alt_report.model.nodes().to_csv(os.path.join(rpt_dir,'nodes.csv'))
        self.alt_report.model.conduits().to_csv(os.path.join(rpt_dir,'conduits.csv'))

        #write a html map
        with open (geocondpath, 'r') as f:
            geo_conduits = geojson.loads(f.read())


        proposed_flooded = self.alt_report.parcel_flooding
        proposed_flooded = pd.merge(proposed_flooded, parcels, right_on='PARCELID', left_index=True)
        geo_parcels = spatial.write_geojson(proposed_flooded)
        # with open (geoparcelpath, 'r') as f:
        #     geo_parcels = geojson.loads(f.read())

        with open(BETTER_BASEMAP_PATH, 'r') as bm:
            filename = os.path.join(os.path.dirname(geocondpath), self.alt_report.model.name + '.html')
            with open(filename, 'wb') as newmap:
                for line in bm:
                    if '//INSERT GEOJSON HERE ~~~~~' in line:
                        newmap.write('conduits = {};\n'.format(geojson.dumps(geo_conduits)))
                        newmap.write('nodes = {};\n'.format(0))
                        newmap.write('parcels = {};\n'.format(geojson.dumps(geo_parcels)))
                    else:
                        newmap.write(line)
Example #6
0
    def write(self, rpt_dir):
        #write cost per sewer segment spreadsheet
        self.newconduits.to_csv(os.path.join(rpt_dir,'cost_estimate.csv'))
        self.flood_comparison.to_csv(os.path.join(rpt_dir,'parcel_flood_comparison.csv'))

        #write parcel json files
        parcels = spatial.read_shapefile(sg.config.parcels_shapefile)
        parcels = parcels[['PARCELID', 'ADDRESS', 'OWNER1', 'coords']]
        flooded = self.flood_comparison
        flooded = flooded.loc[flooded.Category.notnull()] #parcels with significant flood delta
        flooded = pd.merge(flooded, parcels, right_on='PARCELID', left_index=True)
        colors = flooded.apply(lambda row:'#%02x%02x%02x' % drawing.parcel_draw_color(row, style='delta'), axis=1)
        flooded = flooded.assign(fill=colors)
        geoparcelpath = os.path.join(rpt_dir,'delta_parcels.json')
        spatial.write_geojson(flooded, filename=geoparcelpath, geomtype='polygon')

        #write new conduit json, shapefiles
        shpdir = os.path.join(os.path.dirname(rpt_dir), 'shapefiles')
        if not os.path.exists(shpdir):os.mkdir(shpdir)
        geocondpath = os.path.join(rpt_dir,'new_conduits.json')
        shpcondpath = os.path.join(shpdir, self.alt_report.model.inp.name + '_new_conduits.shp')
        spatial.write_geojson(self.newconduits, filename=geocondpath)
        spatial.write_shapefile(self.newconduits, filename=shpcondpath)

        #write node and conduit report csvs
        self.alt_report.model.nodes().to_csv(os.path.join(rpt_dir,'nodes.csv'))
        self.alt_report.model.conduits().to_csv(os.path.join(rpt_dir,'conduits.csv'))

        #write a html map
        with open (geocondpath, 'r') as f:
            geo_conduits = geojson.loads(f.read())


        proposed_flooded = self.alt_report.parcel_flooding
        proposed_flooded = pd.merge(proposed_flooded, parcels, right_on='PARCELID', left_index=True)
        geo_parcels = spatial.write_geojson(proposed_flooded)
        # with open (geoparcelpath, 'r') as f:
        #     geo_parcels = geojson.loads(f.read())

        with open(BETTER_BASEMAP_PATH, 'r') as bm:
            filename = os.path.join(os.path.dirname(geocondpath), self.alt_report.model.name + '.html')
            with open(filename, 'wb') as newmap:
                for line in bm:
                    if '//INSERT GEOJSON HERE ~~~~~' in line:
                        newmap.write('conduits = {};\n'.format(geojson.dumps(geo_conduits)))
                        newmap.write('nodes = {};\n'.format(0))
                        newmap.write('parcels = {};\n'.format(geojson.dumps(geo_parcels)))
                    else:
                        newmap.write(line)
Example #7
0
def _draw_basemap(draw, img, bbox, px_width, shift_ratio):
    """
    given the shapefiles in config.basemap_options, render each layer
    on the model basemap.
    """

    for f in config.basemap_options['features']:

        shp_path = os.path.join(config.basemap_shapefile_dir, f['feature'])
        df = spatial.read_shapefile(shp_path)[f['cols'] + ['coords']]
        df = px_to_irl_coords(df, bbox=bbox, shift_ratio=shift_ratio,
                              px_width=px_width)[0]

        if 'ST_NAME' in df.columns:
            # this is a street, draw a polyline accordingly
            df.apply(lambda r: draw.line(r.draw_coords, fill=f['fill']), axis=1)
            annotate_streets(df, img, 'ST_NAME')
        else:
            df.apply(lambda r: draw.polygon(r.draw_coords,
                                            fill=f['fill']), axis=1)
Example #8
0
def _draw_basemap(draw, img, bbox, px_width, shift_ratio):

	"""
	given the shapefiles in options.basemap_options, render each layer
	on the model basemap.
	"""

	for f in options.basemap_options['features']:

		shp_path = os.path.join(config.basemap_shapefile_dir, f['feature'])
		df = spatial.read_shapefile(shp_path)[f['cols']+['coords']]
		df = px_to_irl_coords(df, bbox=bbox, shift_ratio=shift_ratio,
								px_width=px_width)[0]

		if 'ST_NAME' in df.columns:
			#this is a street, draw a polyline accordingly
			df.apply(lambda r: draw.line(r.draw_coords, fill=f['fill']), axis=1)
			annotate_streets(df, img, 'ST_NAME')
		else:
			df.apply(lambda r: draw.polygon(r.draw_coords,
											fill=f['fill']), axis=1)