def test_no_excl(ds_slice): """ Test ExclusionMask with no exclusions provided """ excl_h5 = os.path.join(TESTDATADIR, 'ri_exclusions', 'ri_exclusions.h5') with ExclusionLayers(excl_h5) as f: shape = f.shape truth = np.ones(shape) with ExclusionMask(excl_h5) as f: if ds_slice is None: test = f.mask else: test = f[ds_slice] truth = truth[ds_slice] assert np.allclose(truth, test) truth = np.ones(shape) with ExclusionMaskFromDict(excl_h5) as f: if ds_slice is None: test = f.mask else: test = f[ds_slice] truth = truth[ds_slice] assert np.allclose(truth, test)
def test_extraction(layer, ds_slice): """ Test extraction of Exclusions Layers Parameters ---------- layer : str Layer to extract ds_slice : tuple Slices to extract """ excl_h5 = os.path.join(TESTDATADIR, 'ri_exclusions', 'ri_exclusions.h5') with h5py.File(excl_h5, mode='r') as f: truth = f[layer][0] if ds_slice is not None: truth = truth[ds_slice] with ExclusionLayers(excl_h5) as f: if ds_slice is None: test = f[layer] else: keys = (layer, ) + ds_slice test = f[keys] assert np.allclose(truth, test)
def test_inclusion_mask(scenario): """ Test creation of inclusion mask Parameters ---------- scenario : str Standard reV exclusion scenario """ excl_h5 = os.path.join(TESTDATADIR, 'ri_exclusions', 'ri_exclusions.h5') truth_path = os.path.join(TESTDATADIR, 'ri_exclusions', '{}.npy'.format(scenario)) truth = np.load(truth_path) layers_dict = CONFIGS[scenario] min_area = AREA.get(scenario, None) layers = [] with ExclusionLayers(excl_h5) as f: for layer, kwargs in layers_dict.items(): nodata_value = f.get_nodata_value(layer) kwargs['nodata_value'] = nodata_value layers.append(LayerMask(layer, **kwargs)) mask_test = ExclusionMask.run(excl_h5, layers=layers, min_area=min_area) assert np.allclose(truth, mask_test) dict_test = ExclusionMaskFromDict.run(excl_h5, layers_dict=layers_dict, min_area=min_area) assert np.allclose(truth, dict_test)
def __init__(self, excl_fpath, h5_fpath, tm_dset, *agg_dset, excl_dict=None, area_filter_kernel='queen', min_area=None, check_excl_layers=False, resolution=64, excl_area=None, gids=None): """ Parameters ---------- excl_fpath : str Filepath to exclusions h5 with techmap dataset. h5_fpath : str Filepath to .h5 file to aggregate tm_dset : str Dataset name in the techmap file containing the exclusions-to-resource mapping data. agg_dset : str Dataset to aggreate, can supply multiple datasets excl_dict : dict | None Dictionary of exclusion LayerMask arugments {layer: {kwarg: value}} area_filter_kernel : str Contiguous area filter method to use on final exclusions mask min_area : float | None Minimum required contiguous area filter in sq-km check_excl_layers : bool Run a pre-flight check on each exclusion layer to ensure they contain un-excluded values resolution : int | None SC resolution, must be input in combination with gid. Prefered option is to use the row/col slices to define the SC point instead. excl_area : float | None Area of an exclusion pixel in km2. None will try to infer the area from the profile transform attribute in excl_fpath. gids : list | None List of gids to get aggregation for (can use to subset if running in parallel), or None for all gids in the SC extent. """ super().__init__(excl_fpath, tm_dset, excl_dict=excl_dict, area_filter_kernel=area_filter_kernel, min_area=min_area, check_excl_layers=check_excl_layers, resolution=resolution, gids=gids) self._h5_fpath = h5_fpath if isinstance(agg_dset, str): agg_dset = (agg_dset, ) self._agg_dsets = agg_dset self._check_files() self._gen_index = self._parse_gen_index(self._h5_fpath) if excl_area is None: with ExclusionLayers(excl_fpath) as excl: excl_area = excl.pixel_area self._excl_area = excl_area if self._excl_area is None: e = ('No exclusion pixel area was input and could not parse ' 'area from the exclusion file attributes!') logger.error(e) raise SupplyCurveInputError(e)
def test_shape(): """ Test shape attr extraction """ excl_h5 = os.path.join(TESTDATADIR, 'ri_exclusions', 'ri_exclusions.h5') with h5py.File(excl_h5, mode='r') as f: truth = f.attrs['shape'] with ExclusionLayers(excl_h5) as excl: test = excl.shape assert np.allclose(truth, test)
def test_crs(): """ Test crs extraction """ excl_h5 = os.path.join(TESTDATADIR, 'ri_exclusions', 'ri_exclusions.h5') with h5py.File(excl_h5, mode='r') as f: truth = json.loads(f.attrs['profile'])['crs'] with ExclusionLayers(excl_h5) as excl: test = excl.crs check_crs(truth, test) for layer in excl.layers: test = excl.get_layer_crs(layer) if test is not None: check_crs(truth, test)
def __init__(self, excl_h5, layers=None, min_area=None, kernel='queen', hsds=False, check_layers=False): """ Parameters ---------- excl_h5 : str Path to exclusions .h5 file layers : list | NoneType list of LayerMask instances for each exclusion layer to combine min_area : float | NoneType Minimum required contiguous area in sq-km kernel : str Contiguous filter method to use on final exclusion hsds : bool Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS behind HSDS check_layers : bool Run a pre-flight check on each layer to ensure they contain un-excluded values """ self._layers = {} self._excl_h5 = ExclusionLayers(excl_h5, hsds=hsds) self._excl_layers = None self._check_layers = check_layers if layers is not None: if not isinstance(layers, list): layers = [layers] for layer in layers: self.add_layer(layer) if kernel in ["queen", "rook"]: self._min_area = min_area self._kernel = kernel logger.debug('Initializing Exclusions mask with min area of {} ' 'km2 and filter kernel "{}".'.format( self._min_area, self._kernel)) else: raise KeyError('kernel must be "queen" or "rook"')
def test_profile(): """ Test profile extraction """ excl_h5 = os.path.join(TESTDATADIR, 'ri_exclusions', 'ri_exclusions.h5') with h5py.File(excl_h5, mode='r') as f: truth = json.loads(f.attrs['profile']) with ExclusionLayers(excl_h5) as excl: test = excl.profile assert truth['transform'] == test['transform'] check_crs(truth['crs'], test['crs']) for layer in excl.layers: test = excl.get_layer_profile(layer) if test is not None: assert truth['transform'] == test['transform'] check_crs(truth['crs'], test['crs'])
def test_layer_mask(layer_name, inclusion_range, exclude_values, include_values, weight, exclude_nodata): """ Test creation of layer masks Parameters ---------- layer_name : str Layer name inclusion_range : tuple (min threshold, max threshold) for values to include exclude_values : list list of values to exclude Note: Only supply exclusions OR inclusions include_values : list List of values to include Note: Only supply inclusions OR exclusions """ excl_h5 = os.path.join(TESTDATADIR, 'ri_exclusions', 'ri_exclusions.h5') with ExclusionLayers(excl_h5) as f: data = f[layer_name] nodata_value = f.get_nodata_value(layer_name) truth = mask_data(data, inclusion_range, exclude_values, include_values, weight, exclude_nodata, nodata_value) layer = LayerMask(layer_name, inclusion_range=inclusion_range, exclude_values=exclude_values, include_values=include_values, weight=weight, exclude_nodata=exclude_nodata, nodata_value=nodata_value) layer_test = layer._apply_mask(data) assert np.allclose(truth, layer_test) mask_test = ExclusionMask.run(excl_h5, layers=layer) assert np.allclose(truth, mask_test) layer_dict = {layer_name: {"inclusion_range": inclusion_range, "exclude_values": exclude_values, "include_values": include_values, "weight": weight, "exclude_nodata": exclude_nodata}} dict_test = ExclusionMaskFromDict.run(excl_h5, layers_dict=layer_dict) assert np.allclose(truth, dict_test)
def test_resource_tech_mapping(): """Run the supply curve technology mapping and compare to baseline file""" lats, lons, ind = TechMapping.run(EXCL, RES, TM_DSET, max_workers=2, save_flag=False) with ExclusionLayers(EXCL) as ex: lat_truth = ex.latitude lon_truth = ex.longitude ind_truth = ex[TM_DSET] msg = 'Tech mapping failed for {} vs. baseline results.' assert np.allclose(lats, lat_truth), msg.format('latitudes') assert np.allclose(lons, lon_truth), msg.format('longitudes') assert np.allclose(ind, ind_truth), msg.format('index mappings') msg = 'Tech mapping didnt find all 100 generation points!' assert len(set(ind.flatten())) == 101, msg
def agg_data_layers(self, summary, data_layers): """Perform additional data layer aggregation. If there is no valid data in the included area, the data layer will be taken from the full SC point extent (ignoring exclusions). If there is still no valid data, a warning will be raised and the data layer will have a NaN/None value. Parameters ---------- summary : dict Dictionary of summary outputs for this sc point. data_layers : None | dict Aggregation data layers. Must be a dictionary keyed by data label name. Each value must be another dictionary with "dset", "method", and "fpath". Returns ------- summary : dict Dictionary of summary outputs for this sc point. A new entry for each data layer is added. """ if data_layers is not None: for name, attrs in data_layers.items(): if 'fobj' not in attrs: with ExclusionLayers(attrs['fpath']) as f: raw = f[attrs['dset'], self.rows, self.cols] nodata = f.get_nodata_value(attrs['dset']) else: raw = attrs['fobj'][attrs['dset'], self.rows, self.cols] nodata = attrs['fobj'].get_nodata_value(attrs['dset']) data = raw.flatten()[self.bool_mask] excl_mult = self.excl_data_flat[self.bool_mask] if nodata is not None: nodata_mask = (data == nodata) # All included extent is nodata. # Reset data from raw without exclusions. if all(nodata_mask): data = raw.flatten() excl_mult = self.excl_data_flat nodata_mask = (data == nodata) data = data[~nodata_mask] excl_mult = excl_mult[~nodata_mask] if not data.size: data = None excl_mult = None m = ('Data layer "{}" has no valid data for ' 'SC point gid {}!' .format(name, self._gid)) logger.debug(m) data = self._agg_data_layer_method(data, excl_mult, attrs['method']) summary[name] = data return summary