Ejemplo n.º 1
0
    def _run_tool(self):

        dataset = self.dataset

        # get metadata, path etc from first dataset, i.e. assume all datasets
        # are in same folder. This will break if you try and combine datasets
        # from different providers

        orig_metadata = get_metadata(dataset)[dataset]
        src_path = orig_metadata['file_path']

        if self.new_crs is None:
            raise ValueError(
                "A new coordinated reference system MUST be provided")

        dst_crs = self.new_crs

        new_metadata = {
            'parameter': orig_metadata['parameter'],
            'datatype': orig_metadata['datatype'],
            'file_format': orig_metadata['file_format'],
        }

        new_dset, file_path, catalog_entry = self._create_new_dataset(
            old_dataset=dataset,
            ext='.tif',
            dataset_metadata=new_metadata,
        )

        # run filter
        with rasterio.open(src_path) as src:
            # write out tif file
            subprocess.check_output([
                'gdalwarp', src_path, file_path, '-s_srs',
                src.crs.to_string(), '-t_srs', dst_crs
            ])

        with rasterio.open(file_path) as f:
            geometry = util.bbox2poly(f.bounds.left,
                                      f.bounds.bottom,
                                      f.bounds.right,
                                      f.bounds.top,
                                      as_shapely=True)
        update_metadata(catalog_entry,
                        quest_metadata={'geometry': geometry.to_wkt()})

        return {'datasets': new_dset, 'catalog_entries': catalog_entry}
Ejemplo n.º 2
0
    def _run_tool(self):
        dataset = self.dataset

        orig_metadata = get_metadata(dataset)[dataset]
        elev_file = orig_metadata['file_path']

        new_dset, file_path, catalog_entry = self._create_new_dataset(
            old_dataset=dataset, ext='.tif')

        wbt.fill_depressions(elev_file, output=file_path)

        quest_metadata = {
            'parameter': 'streams',
            'datatype': orig_metadata['datatype'],
            'file_format': orig_metadata['file_format'],
        }

        update_metadata(new_dset, quest_metadata=quest_metadata)

        return {'datasets': new_dset}
Ejemplo n.º 3
0
    def _run_tool(self):
        dataset = self.elevation_dataset

        # get metadata, path etc from dataset

        orig_metadata = get_metadata(dataset)[dataset]
        elev_file = orig_metadata['file_path']

        try:
            original_outlets = [
                f['geometry'] for f in get_metadata(self.outlets)
            ]
        except:
            original_outlets = self.outlets

        new_dset, file_path, catalog_entry = self._create_new_dataset(
            old_dataset=dataset,
            ext='.tif',
            dataset_metadata={
                'parameter': 'watershed_boundary',
                'datatype': orig_metadata['datatype'],
                'file_format': orig_metadata['file_format'],
            })

        d8 = wbt.d8_pointer(elev_file)
        point_shp = points_to_shp(original_outlets)

        if self.snap_distance > 0:
            pp = wbt.vector_points_to_raster(point_shp, base=elev_file)
            snap_options = {
                'pour_pts': pp,
                'snap_dist': self.snap_distance,
            }
            fa = None
            if self.algorithm == 'nearest-stream':
                st = self.streams_dataset
                if st:
                    st = open_dataset(st, with_nodata=True, isel_band=0)
                else:
                    fa = wbt.d_inf_flow_accumulation(elev_file)
                    st = wbt.extract_streams(fa, threshold=.1)
                snap_options.update(streams=st)
            else:
                fa = fa or wbt.d_inf_flow_accumulation(elev_file)
                # fa = wbt.d8_flow_accumulation(elev_file)
                snap_options.update(flow_accum=fa)

            snap_function = self.SNAP_DISTANCE_ALGORITHMS[self.algorithm]
            snapped = snap_function(**snap_options)

            indices = np.nonzero(np.nan_to_num(snapped))
            snapped_outlets = [(snapped.x.values[row], snapped.y.values[col])
                               for col, row in zip(*indices)]
            point_shp = points_to_shp(snapped_outlets)

        wbt.watershed(
            d8_pntr=d8,
            pour_pts=point_shp,
            output=file_path,
        )

        new_catalog_entries = raster_to_polygons(file_path)

        quest_metadata = {
            'parameter': 'streams',
            'datatype': orig_metadata['datatype'],
            'file_format': orig_metadata['file_format'],
            'file_path': file_path,
        }

        update_metadata(new_dset, quest_metadata=quest_metadata)

        return {
            'datasets':
            new_dset,
            'catalog_entries':
            [new_catalog_entries, snapped_outlets, catalog_entry]
        }
Ejemplo n.º 4
0
    def _run_tool(self):

        # if len(datasets) < 2:
        #     raise ValueError('There must be at LEAST two datasets for this filter')

        datasets = self.datasets

        orig_metadata = get_metadata(datasets[0])[datasets[0]]
        raster_files = [get_metadata(dataset)[dataset]['file_path'] for dataset in datasets]

        for dataset in datasets:
            if get_metadata(dataset)[dataset]['parameter'] != orig_metadata['parameter']:
                raise ValueError('Parameters must match for all datasets')
            if get_metadata(dataset)[dataset]['unit'] != orig_metadata['unit']:
                raise ValueError('Units must match for all datasets')

        new_metadata = {
            'parameter': orig_metadata['parameter'],
            'datatype': orig_metadata['datatype'],
            'file_format': orig_metadata['file_format'],
            'unit': orig_metadata['unit'],
        }

        new_dset, file_path, catalog_entry = self._create_new_dataset(
            old_dataset=datasets[0],
            ext='.tif',
            dataset_metadata=new_metadata,
        )

        open_datasets = [rasterio.open(d) for d in raster_files]
        profile = open_datasets[0].profile
        # hack to avoid nodata out of range of dtype error for NED datasets
        profile['nodata'] = -32768.0 if profile['nodata'] == -3.4028234663853e+38 else profile['nodata']
        new_data, transform = rasterio.merge.merge(open_datasets, nodata=profile['nodata'])
        for d in open_datasets:
            d.close()
        profile.pop('tiled', None)
        profile.update(
            height=new_data.shape[1],
            width=new_data.shape[2],
            transform=transform,
            driver='GTiff'
        )
        with rasterio.open(file_path, 'w', **profile) as output:
            output.write(new_data.astype(profile['dtype']))

        bbox = self.bbox

        if bbox is not None:
            bbox = box(*bbox)
            geo = gpd.GeoDataFrame({'geometry': bbox}, index=[0], crs=from_epsg(4326))
            geo = geo.to_crs(crs=profile['crs'])
            bbox = geo.geometry

            with rasterio.open(file_path, 'r') as merged:
                new_data, transform = rasterio.mask.mask(dataset=merged, shapes=bbox, all_touched=True, crop=True)

            # profile.pop('tiled', None)
            profile.update(
                height=new_data.shape[1],
                width=new_data.shape[2],
                transform=transform,
            )
            with rasterio.open(file_path, 'w', **profile) as clipped:
                clipped.write(new_data)

        with rasterio.open(file_path) as f:
            geometry = util.bbox2poly(f.bounds.left, f.bounds.bottom, f.bounds.right, f.bounds.top, as_shapely=True)
        update_metadata(catalog_entry, quest_metadata={'geometry': geometry.to_wkt()})

        return {'datasets': new_dset, 'catalog_entries': catalog_entry}