Example #1
0
def test_aggregate_input_da(dataarray_2d_example):
    blocks = [("i", 3), ("j", 3)]
    with pytest.raises(RuntimeError):
        aggregate(dataarray_2d_example.compute(), blocks, func=np.nanmean)
Example #2
0
def test_aggregate_input_blocks(dataarray_2d_example, blocks_fail):
    with pytest.raises(RuntimeError):
        aggregate(dataarray_2d_example, blocks_fail, func=np.nanmean)
Example #3
0
def test_aggregate_regular_func(dataarray_2d_example, func, expected_result):
    blocks = [("i", 3), ("j", 3)]
    a = aggregate(dataarray_2d_example, blocks, func=func)
    assert_allclose(a.data.compute(), expected_result)
Example #4
0
def test_aggregate_regular_blocks(
    dataarray_2d_example, blocks, expected_result
):
    func = np.nanmean
    a = aggregate(dataarray_2d_example, blocks, func=func)
    assert_allclose(a.data, expected_result)
def KOC_Full(snap,
             mean,
             validfile,
             tr_num,
             bins,
             kappa=63,
             method='LT',
             reset_frq=None,
             reset_pha=None,
             dt_model=None,
             dt_tracer=None,
             cut_time=7776000,
             ref_date='No date',
             debug=False):

    # !!! totally hacky...this needs to be replaced.
    axis_bins = [('X', bins[0][1]), ('Y', bins[0][1])]
    area = mean.rA
    area_sum = aggregate(area, bins, func=np.sum)

    landmask_w = mean.hFacW.data != 0
    landmask_s = mean.hFacS.data != 0
    landmask_c = mean.hFacC != 0
    landmask = np.logical_and(np.logical_and(landmask_w, landmask_s),
                              landmask_c)

    validmask = xr.DataArray(readbin(validfile, area.shape),
                             dims=area.dims,
                             coords=area.coords)

    mask = np.logical_and(validmask, landmask)

    check_KOC_input(mean, snap, tr_num, method)

    # snapshots averaged in space
    if method == 'L':
        data = snap
        grid = xgcm.Grid(data)
        grid_coarse = xgcm.Grid(grid_aggregate(grid._ds, axis_bins))
        # Numerator
        q = data['TRAC' + tr_num]
        q_grad_sq = gradient_sq_amplitude(grid, q)
        q_grad_sq_coarse = custom_coarse(q_grad_sq, area, bins, mask)
        n = q_grad_sq_coarse
        # Denominator
        q_coarse = custom_coarse(q, area, bins, mask)
        q_coarse_grad_sq = gradient_sq_amplitude(grid_coarse, q_coarse)
        d = q_coarse_grad_sq
    elif method == 'T':
        data = mean
        grid = xgcm.Grid(data)
        grid_coarse = xgcm.Grid(grid_aggregate(grid._ds, axis_bins))
        # Numerator
        q_gradx_sq_mean = data['DXSqTr' + tr_num]
        q_grady_sq_mean = data['DYSqTr' + tr_num]
        q_grad_sq_mean = grid.interp(q_gradx_sq_mean, 'X') + \
            grid.interp(q_grady_sq_mean, 'Y')
        n = q_grad_sq_mean
        # !!! this is not the right way to do it but its the same way ryan did
        n = custom_coarse(n, area, bins, mask)
        # Denominator
        q_mean = data['TRAC' + tr_num]
        q_mean_grad_sq = gradient_sq_amplitude(grid, q_mean)
        d = q_mean_grad_sq
        # !!! this is not the right way to do it but its the same way ryan did
        d = custom_coarse(d, area, bins, mask)
    elif method == 'LT':
        data = mean
        grid = xgcm.Grid(data)
        grid_coarse = xgcm.Grid(grid_aggregate(grid._ds, axis_bins))
        # Numerator
        q_gradx_sq_mean = data['DXSqTr' + tr_num]
        q_grady_sq_mean = data['DYSqTr' + tr_num]

        q_grad_sq_mean = grid.interp(q_gradx_sq_mean, 'X') + \
            grid.interp(q_grady_sq_mean, 'Y')
        n = custom_coarse(q_grad_sq_mean, area, bins, mask)
        # Denominator
        q_mean = data['TRAC' + tr_num]
        q_mean_coarse = custom_coarse(q_mean, area, bins, mask)
        q_mean_grad_sq = gradient_sq_amplitude(grid_coarse, q_mean_coarse)
        d = q_mean_grad_sq

    # Calculate the gradient criterion
    crit_q_mean = custom_coarse(data['TRAC' + tr_num], area, bins, mask)
    crit_q_sq_mean = custom_coarse(data['TRACSQ' + tr_num], area, bins, mask)
    crit_dict = gradient_criterion(grid_coarse, crit_q_mean, crit_q_sq_mean)

    # Export the 'raw tracer fields' ###
    raw = data['TRAC' + tr_num]
    raw_coarse = custom_coarse(raw, area, bins, mask)
    raw.coords['landmask'] = landmask
    raw.coords['validmask'] = validmask
    raw.coords['mask'] = mask
    raw.validmask.attrs['long_name'] = 'Mask for valid Aviso data points'
    raw.landmask.attrs['long_name'] = 'Land mask'
    raw.mask.attrs['long_name'] = 'combination of land and validmask'

    # Final edits for output
    koc = n / d * kappa

    # Count of aggregated valid cells per output pixel.
    mask.data = da.from_array(mask.data, mask.data.shape)
    mask_count = aggregate(mask, bins, func=np.sum)

    # replace with coarse grid coords
    co = matching_coords(grid_coarse._ds, koc.dims)
    # !!! this is not necessarily enough (this needs to be automated to chose
    # only the corrds with all dims matching i,g,time)

    d = xr.DataArray(d.data, coords=co, dims=d.dims)
    n = xr.DataArray(n.data, coords=co, dims=n.dims)
    koc = xr.DataArray(koc.data, coords=co, dims=koc.dims)
    raw_coarse = xr.DataArray(raw_coarse.data, coords=co, dims=raw_coarse.dims)

    ds = xr.Dataset({
        'KOC': koc,
        'Numerator': n,
        'Denominator': d,
        'AveTracer': raw_coarse,
        'RMSTracer': crit_dict['q_rms'],
        'GradTracer': crit_dict['q_grad_abs'],
        'gradient_criterion': crit_dict['crit']
    })

    ds.coords['mask_count'] = mask_count
    ds.coords['area'] = area_sum

    # Add attributes to ds
    ds.KOC.attrs['long_name'] = 'Osborn-Cox Diffusivity'
    ds.AveTracer.attrs['long_name'] = 'Coarsened Tracer'
    ds.RMSTracer.attrs['long_name'] = 'Coarsened Tracer Variance'
    ds.GradTracer.attrs['long_name'] = 'Coarsened Tracer Gradient (absolute)'
    ds.gradient_criterion.attrs['long_name'] = \
        'Gradient Criterion'
    ds.Numerator.attrs['long_name'] = 'Mixing Enhancement'
    ds.Denominator.attrs['long_name'] = 'Background Mixing'
    ds.mask_count.attrs['long_name'] = 'number of ocean data points before \
                                        coarsening'

    # Determine reset properties
    val_idx, _, _ = reset_cut(reset_frq, reset_pha, dt_model, dt_tracer,
                              mean.iter.data, cut_time)

    ds.coords['valid_index'] = (['time'], val_idx)
    ds['valid_index'].attrs = {'Description': 'Time mask eliminating spin up'}
    return ds, raw