Exemplo n.º 1
0
    def __init__(self, shp, prefix='au', rebuild=False):

        sf = shapefile.Reader(shp)
        header = [field[0] for field in sf.fields][1:]
        """
        Field name: the name describing the data at this column index.
        Field type: the type of data at this column index. Types can be: Character, Numbers, Longs, Dates, or Memo.
        Field length: the length of the data found at this column index.
        Decimal length: the number of decimal places found in Number fields.
        """
        #        shapes = sf.shapes()
        #        print(len(shapes))
        #        records = sf.records()
        #        print(len(records))

        gwc = RasterDatasetInterpolator(
            '/geodata/weppcloud_runs/au/gwc_dnbr_barc4_utm.tif')
        # gwc2 = RasterDatasetInterpolator('gwc_sbs2.tif')
        # gwc6 = RasterDatasetInterpolator('gwc_sbs6.tif')

        fp_hill = open('%s_hill_summary.csv' % prefix, 'w')
        csv_wtr = csv.DictWriter(
            fp_hill,
            fieldnames=('huc', 'topaz_id', 'wepp_id', 'length', 'width',
                        'area', 'slope', 'centroid_lng', 'centroid_lat',
                        'landuse', 'soil_texture', 'sbs', 'ash_wind_transport',
                        'ash_water_transport', 'ash_transport'))
        csv_wtr.writeheader()

        fails = 0
        for i, shape in enumerate(sf.iterShapes()):
            record = {k: v for k, v in zip(header, sf.record(i))}
            # print(record)
            huc12 = str(record['ID'])
            print(huc12)

            if huc12 in blacklist:
                print('in blacklist, skipping', huc12)
                continue

            bbox = shape.bbox
            _y = haversine((bbox[0], bbox[1]), (bbox[0], bbox[3])) * 1000
            _x = haversine((bbox[0], bbox[1]), (bbox[2], bbox[1])) * 1000

            sqm2 = _y * _x
            if sqm2 < 30 * 30 * 4:
                print('too small, skipping', huc12)
                continue

            wd = _join('/geodata/weppcloud_runs/', prefix, huc12)

            if _exists(wd):
                if _exists(_join(wd, 'dem', 'topaz', 'SUBWTA.ARC')):
                    print('already delineated, skipping', huc12)
                    continue

                shutil.rmtree(wd)
            os.mkdir(wd)

            print('initializing nodbs')
            ron = Ron(wd, "au-fire.cfg")
            #ron = Ron(wd, "au.cfg")

            # ron = Ron(wd, "0.cfg")
            ron.name = wd

            print('setting map')
            pad = max(abs(bbox[0] - bbox[2]), abs(bbox[1] - bbox[3])) * 0.4
            map_center = (bbox[0] + bbox[2]) / 2.0, (bbox[1] + bbox[3]) / 2.0
            l, b, r, t = bbox
            bbox = [l - pad, b - pad, r + pad, t + pad]
            print('bbox', bbox)
            ron.set_map(bbox, map_center, zoom=13)

            print('fetching dem')
            ron.fetch_dem()

            print('setting topaz parameters')
            topaz = Topaz.getInstance(wd)

            print('building channels')
            topaz_pars = chn_routing_err_topaz_pars.get(
                huc12, dict(csa=10, mcl=200))
            topaz.build_channels(**topaz_pars)
            map = ron.map

            print('find raster indices')
            utm2wgs_transformer = GeoTransformer(src_proj4=wgs84_proj4,
                                                 dst_proj4=map.srs_proj4)
            points = [
                utm2wgs_transformer.transform(lng, lat)
                for lng, lat in shape.points
            ]
            mask = build_mask(points, ron.dem_fn)
            # plt.figure()
            # plt.imshow(mask)
            # plt.colorbar()
            # plt.savefig(_join(topaz.topaz_wd, 'mask.png'))

            if huc12 in outlet_locs:
                out_lng, out_lat = outlet_locs[huc12]
                rdi = RasterDatasetInterpolator(ron.dem_fn)
                px, py = rdi.get_px_coord_from_lnglat(out_lng, out_lat)
                print('px, py', px, py)
                dem, transform, proj = read_raster(ron.dem_fn)
                min_elev = dem[px, py]

            else:
                print('loading channel map')
                channels, _, _ = read_raster(topaz.netful_arc)
                mask[np.where(channels == 0)] = 1

                plt.figure()
                plt.imshow(mask)
                plt.colorbar()
                plt.savefig(_join(topaz.topaz_wd, 'mask.png'))
                plt.close()

                print('finding lowest point in HUC')
                dem, transform, proj = read_raster(ron.dem_fn)
                print(mask.shape, dem.shape)
                print(np.sum(mask))
                demma = ma.masked_array(dem, mask=mask)
                plt.figure()
                plt.imshow(demma)
                plt.colorbar()
                plt.savefig(_join(topaz.topaz_wd, 'demma.png'))
                plt.close()

                min_elev = np.min(demma)
                px, py = np.unravel_index(np.argmin(demma), demma.shape)
                px = int(px)
                py = int(py)

            print(min_elev, px, py, px / dem.shape[0], py / dem.shape[1])

            print('building subcatchments')
            topaz.set_outlet(px, py, pixelcoords=True)
            try:
                topaz.build_subcatchments()
            except:
                fails += 1
                raise

            print('abstracting watershed')
            wat = Watershed.getInstance(wd)
            wat.abstract_watershed()
            translator = wat.translator_factory()
            topaz_ids = [
                top.split('_')[1] for top in translator.iter_sub_ids()
            ]

            # is_gwc2 = is_gwc6 = False

            for topaz_id, hill_summary in wat.sub_iter():
                print(topaz_id)
                _wat = hill_summary.as_dict()
                # _landuse = landuse_summaries[str(topaz_id)]
                # _soils = soils_summaries[str(topaz_id)]

                _centroid_lng, _centroid_lat = _wat['centroid']

                # try:
                #     _sbs2 = gwc2.get_location_info(_centroid_lng, _centroid_lat, method='near')
                #     if _sbs2 < 0:
                #         _sbs2 = None
                # except RDIOutOfBoundsException:
                #     _sbs2 = None
                #
                # try:
                #     _sbs6 = gwc6.get_location_info(_centroid_lng, _centroid_lat, method='near')
                #     if _sbs6 < 0:
                #         _sbs6 = None
                # except RDIOutOfBoundsException:
                #     _sbs6 = None
                #
                # if _sbs2 is None and _sbs6 is None:
                #     _sbs = 0
                #
                # elif _sbs2 is not None:
                #     _sbs = _sbs2
                #     is_gwc2 = True
                #
                # else:
                #     _sbs = _sbs6
                #     is_gwc6 = True

                # _d = dict(huc=huc12, topaz_id=int(topaz_id), wepp_id=_wat['wepp_id'],
                #           length=_wat['length'], width=_wat['width'], area=_wat['area'],
                #           slope=_wat['slope_scalar'],
                #           centroid_lng=_centroid_lng,
                #           centroid_lat=_centroid_lat,
                #           landuse=_landuse['key'],
                #           soil_texture=_soils['simple_texture'],
                #           sbs=_sbs)
                # csv_wtr.writerow(_d)

            # if not is_gwc2 and not is_gwc6:
            #     continue

            baer = Baer.getInstance(wd)
            # if is_gwc2:
            #     shutil.copyfile('gwc_sbs2.tif', _join(baer.baer_dir, 'gwc_sbs2.tif'))
            #     baer.validate('gwc_sbs2.tif')
            # if is_gwc6:
            #     shutil.copyfile('gwc_sbs6.tif', _join(baer.baer_dir, 'gwc_sbs6.tif'))
            #     baer.validate('gwc_sbs6.tif')

            shutil.copyfile(
                '/geodata/weppcloud_runs/au/gwc_dnbr_barc4_utm.tif',
                _join(baer.baer_dir, 'gwc_dnbr_barc4_utm.tif'))
            baer.validate('gwc_dnbr_barc4_utm.tif')

            print('building landuse')
            landuse = Landuse.getInstance(wd)
            landuse.mode = LanduseMode.Gridded
            landuse.build()
            landuse = Landuse.getInstance(wd)
            landuse_summaries = landuse.subs_summary

            print('building soils')
            soils = Soils.getInstance(wd)
            soils.mode = SoilsMode.Gridded
            soils.build()
            soils_summaries = soils.subs_summary

            print('building climate')
            climate = Climate.getInstance(wd)
            stations = climate.find_au_heuristic_stations()
            climate.input_years = 100
            climate.climatestation = stations[0]['id']
            climate.climate_spatialmode = ClimateSpatialMode.Single
            climate.build(verbose=True)

            print('prepping wepp')
            wepp = Wepp.getInstance(wd)
            wepp.prep_hillslopes()

            print('running hillslopes')
            wepp.run_hillslopes()

            print('prepping watershed')
            wepp = Wepp.getInstance(wd)
            wepp.prep_watershed()

            print('running watershed')
            wepp.run_watershed()

            print('generating loss report')
            loss_report = wepp.report_loss()

            print('generating totalwatsed report')
            fn = _join(ron.export_dir, 'totalwatsed.csv')

            totwatsed = TotalWatSed(_join(ron.output_dir, 'totalwatsed.txt'),
                                    wepp.baseflow_opts, wepp.phosphorus_opts)
            totwatsed.export(fn)
            assert _exists(fn)

            ash = Ash.getInstance(wd)
            ash.run_ash(fire_date='8/4',
                        ini_white_ash_depth_mm=16.5625,
                        ini_black_ash_depth_mm=17.166666666666668)

            ashpost = AshPost.getInstance(wd)

            ash_summary = ashpost.summary_stats
            if ash_summary is not None:
                _recurrence = ash_summary['recurrence']
                _return_periods = ash_summary['return_periods']
                _annuals = ash_summary['annuals']
                _sev_annuals = ash_summary['sev_annuals']
                ash_out = ashpost.ash_out

                for topaz_id, hill_summary in wat.sub_iter():
                    print(topaz_id)
                    _wat = hill_summary.as_dict()
                    _landuse = landuse_summaries[str(topaz_id)]
                    _soils = soils_summaries[str(topaz_id)]
                    _centroid_lng, _centroid_lat = _wat['centroid']

                    _d = dict(huc=huc12,
                              topaz_id=int(topaz_id),
                              wepp_id=_wat['wepp_id'],
                              length=_wat['length'],
                              width=_wat['width'],
                              area=_wat['area'],
                              slope=_wat['slope_scalar'],
                              centroid_lng=_centroid_lng,
                              centroid_lat=_centroid_lat,
                              landuse=_landuse['key'],
                              soil_texture=_soils['simple_texture'],
                              ash_wind_transport=ash_out[str(
                                  topaz_id)]['wind_transport (kg/ha)'],
                              ash_water_transport=ash_out[str(
                                  topaz_id)]['water_transport (kg/ha)'],
                              ash_transport=ash_out[str(topaz_id)]
                              ['ash_transport (kg/ha)'])
                    csv_wtr.writerow(_d)

            print('exporting arcmap resources')
            arc_export(wd)

            print(fails, i + 1)
Exemplo n.º 2
0
                    log_print('has channels.shp... skipping.')
                    continue

            log_print('cleaning dir')
            if _exists(wd):
                print()
                shutil.rmtree(wd)
            os.mkdir(wd)

            log_print('initializing project')
            ron = Ron(wd, "%s.cfg" % cfg)
            ron.name = wd
            ron.set_map(extent, map_center, zoom=map_zoom)

            log_print('fetching dem')
            ron.fetch_dem()

            log_print('building channels')
            topaz = Topaz.getInstance(wd)
            topaz.build_channels(csa=5, mcl=60)
            topaz.set_outlet(*outlet)
            sleep(0.5)

            log_print('building subcatchments')
            topaz.build_subcatchments()

            log_print('abstracting watershed')
            wat = Watershed.getInstance(wd)
            wat.abstract_watershed()
            translator = wat.translator_factory()
            topaz_ids = [