Example #1
0
def get_counts(cost_coverage=False):
    """Get cell counts for each category."""

    code_dict = get_codes(cost_coverage)

    # Read in code and conus rasters
    chunks = {"band": 1, "x": 5000, "y": 5000}
    code_path = DP.join("rasters/albers/acre/cost_codes.tif")
    cost_path = DP.join("rasters/albers/acre/rent_map.tif")
    conus_path = DP.join("rasters/albers/acre/masks/conus.tif")
    codes = xr.open_rasterio(code_path, chunks=chunks)[0].data
    costs = xr.open_rasterio(cost_path, chunks=chunks)[0].data
    conus = xr.open_rasterio(conus_path, chunks=chunks)[0].data

    # Dask array's `count_nonzero` counts na values
    codes[da.isnan(codes)] = 0
    conus[da.isnan(conus)] = 0

    # If calculating costs
    if cost_coverage:
        coverage = codes[(costs > 0) | (codes == 9999)]  # No exclusion in cost
    else:
        coverage = codes.copy()

    # Extract code from dictionary
    blm_codes = code_dict["blm"]
    tribal_codes = code_dict["tribal"]
    state_codes = code_dict["state"]
    private_codes = code_dict["private"]

    # Arrays
    developable = conus[codes != 9999]
    dev_covered = coverage[coverage != 9999]
    excl = coverage[coverage == 9999]
    blm = coverage[da.isin(coverage, blm_codes)]
    tribal = coverage[da.isin(coverage, tribal_codes)]
    state = coverage[da.isin(coverage, state_codes)]
    private = coverage[da.isin(coverage, private_codes)]
    arrays = {"excl": excl, "blm": blm, "tribal": tribal, "state": state,
              "private": private, "covered": coverage, "total": conus, 
              "developable": developable, "dev_covered": dev_covered}

    # Collect counts
    counts = {}
    with Client():
        for key, item in tqdm(arrays.items(), position=0):
            counts["n" + key] = da.count_nonzero(item).compute()

    return counts
Example #2
0
def test_isin_assume_unique(assume_unique):
    a1 = np.arange(10)
    d1 = da.from_array(a1, chunks=(5, ))

    test_elements = np.arange(0, 10, 2)
    r_a = np.isin(a1, test_elements, assume_unique=assume_unique)
    r_d = da.isin(d1, test_elements, assume_unique=assume_unique)
    assert_eq(r_a, r_d)
Example #3
0
def test_isin_assume_unique(assume_unique):
    a1 = np.arange(10)
    d1 = da.from_array(a1, chunks=(5,))

    test_elements = np.arange(0, 10, 2)
    r_a = np.isin(a1, test_elements, assume_unique=assume_unique)
    r_d = da.isin(d1, test_elements, assume_unique=assume_unique)
    assert_eq(r_a, r_d)
def test_isin_rand(seed, low, high, elements_shape, elements_chunks,
                   test_shape, test_chunks, invert):
    rng = np.random.RandomState(seed)

    a1 = rng.randint(low, high, size=elements_shape)
    d1 = da.from_array(a1, chunks=elements_chunks)

    a2 = rng.randint(low, high, size=test_shape) - 5
    d2 = da.from_array(a2, chunks=test_chunks)

    r_a = np.isin(a1, a2, invert=invert)
    r_d = da.isin(d1, d2, invert=invert)
    assert_eq(r_a, r_d)
Example #5
0
def mask_features(mask, feature_ids, masked=False):
    ''' create mask for specific features
    Input:
    mask:              iris.cube.Cube 
                       cube containing mask (int id for tacked volumes 0 everywhere else)
    Output:
    variable_cube_out: numpy.ndarray 
                       Masked cube for untracked volume
    '''
    from dask.array import ma, isin
    from copy import deepcopy
    mask_i = deepcopy(mask)
    mask_i_data = mask_i.core_data()
    mask_i_data[~isin(mask_i.core_data(), feature_ids)] = 0
    if masked:
        mask_i.data = ma.masked_equal(mask_i.core_data(), 0)
    return mask_i
Example #6
0
def mask_cube_features(variable_cube, mask, feature_ids):
    ''' Mask cube for tracked volume of an individual cell   
    Input:
    variable_cube:     iris.cube.Cube 
                       unmasked data cube
    mask:              iris.cube.Cube 
                       cube containing mask (int id for tacked volumes 0 everywhere else)
    cell:          int
                       interger id of cell to create masked cube for
    Output:
    variable_cube_out: iris.cube.Cube 
                       Masked cube with data for respective cell
    '''
    from dask.array import ma, isin
    from copy import deepcopy
    variable_cube_out = deepcopy(variable_cube)
    variable_cube_out.data = ma.masked_where(
        ~isin(mask.core_data(), feature_ids), variable_cube_out.core_data())
    return variable_cube_out
Example #7
0
def mask_features_surface(mask,
                          feature_ids,
                          masked=False,
                          z_coord='model_level_number'):
    ''' create surface mask for individual features 
    Input:
    mask:              iris.cube.Cube 
                       cube containing mask (int id for tacked volumes 0 everywhere else)
    Output:
    variable_cube_out: iris.cube.Cube 
                       Masked cube for untracked volume
    '''
    from iris.analysis import MAX
    from dask.array import ma, isin
    from copy import deepcopy
    mask_i = deepcopy(mask)
    #     mask_i.data=[~isin(mask_i.data,feature_ids)]=0
    mask_i_data = mask_i.core_data()
    mask_i_data[~isin(mask_i.core_data(), feature_ids)] = 0
    mask_i_surface = mask_i.collapsed(z_coord, MAX)
    if masked:
        mask_i_surface.data = ma.masked_equal(mask_i_surface.core_data(), 0)
    return mask_i_surface