Пример #1
0
def test_as_quantity():
    """Test conversion to sparse.COO-backed xr.DataArray in as_quantity()."""
    x_series = pd.Series(
        data=[1, 2, 3, 4],
        index=pd.MultiIndex.from_product([['a', 'b'], ['c', 'd']],
                                         names=['foo', 'bar']),
    )
    y_series = pd.Series(data=[5, 6], index=pd.Index(['e', 'f'], name='baz'))

    x = xr.DataArray.from_series(x_series, sparse=True)
    y = xr.DataArray.from_series(y_series, sparse=True)

    x_dense = xr.DataArray.from_series(x_series)
    y_dense = xr.DataArray.from_series(y_series)

    with pytest.raises(ValueError, match='make sure that the broadcast shape'):
        x_dense * y

    z1 = as_quantity(x_dense) * y
    z2 = x * as_quantity(y_dense)
    assert z1.dims == ('foo', 'bar', 'baz')
    assert_qty_equal(z1, z2)

    z3 = as_quantity(x) * as_quantity(y)
    assert_qty_equal(z1, z3)

    z4 = as_quantity(x) * y
    assert_qty_equal(z1, z4)

    z5 = as_quantity(x_series) * y
    assert_qty_equal(z1, z5)
Пример #2
0
def test_reporter_add_product(test_mp):
    scen = ixmp.Scenario(test_mp, 'reporter_add_product',
                         'reporter_add_product', 'new')
    *_, x = add_test_data(scen)
    rep = Reporter.from_scenario(scen)

    # add_product() works
    key = rep.add_product('x squared', 'x', 'x', sums=True)

    # Product has the expected dimensions
    assert key == 'x squared:t-y'

    # Product has the expected value
    exp = as_quantity(x * x)
    exp.attrs['_unit'] = UNITS('kilogram ** 2').units
    assert_qty_equal(exp, rep.get(key))
Пример #3
0
def test_reporting_aggregate(test_mp):
    scen = ixmp.Scenario(test_mp, 'Group reporting', 'group reporting', 'new')
    t, t_foo, t_bar, x = add_test_data(scen)

    # Reporter
    rep = Reporter.from_scenario(scen)

    # Define some groups
    t_groups = {'foo': t_foo, 'bar': t_bar, 'baz': ['foo1', 'bar5', 'bar6']}

    # Use the computation directly
    agg1 = computations.aggregate(as_quantity(x), {'t': t_groups}, True)

    # Expected set of keys along the aggregated dimension
    assert set(agg1.coords['t'].values) == set(t) | set(t_groups.keys())

    # Sums are as expected
    assert_qty_allclose(agg1.sel(t='foo', drop=True), x.sel(t=t_foo).sum('t'))
    assert_qty_allclose(agg1.sel(t='bar', drop=True), x.sel(t=t_bar).sum('t'))
    assert_qty_allclose(agg1.sel(t='baz', drop=True),
                        x.sel(t=['foo1', 'bar5', 'bar6']).sum('t'))

    # Use Reporter convenience method
    key2 = rep.aggregate('x:t-y', 'agg2', {'t': t_groups}, keep=True)

    # Group has expected key and contents
    assert key2 == 'x:t-y:agg2'

    # Aggregate is computed without error
    agg2 = rep.get(key2)

    assert_qty_equal(agg1, agg2)

    # Add aggregates, without keeping originals
    key3 = rep.aggregate('x:t-y', 'agg3', {'t': t_groups}, keep=False)

    # Distinct keys
    assert key3 != key2

    # Only the aggregated and no original keys along the aggregated dimension
    agg3 = rep.get(key3)
    assert set(agg3.coords['t'].values) == set(t_groups.keys())

    with pytest.raises(NotImplementedError):
        # Not yet supported; requires two separate operations
        rep.aggregate('x:t-y', 'agg3', {'t': t_groups, 'y': [2000, 2010]})
Пример #4
0
def test_reporter_from_dantzig(test_mp, test_data_path):
    scen = make_dantzig(test_mp, solve=test_data_path)

    # Reporter.from_scenario can handle the Dantzig problem
    rep = Reporter.from_scenario(scen)

    # Partial sums are available automatically (d is defined over i and j)
    d_i = rep.get('d:i')

    # Units pass through summation
    assert d_i.attrs['_unit'] == UNITS.parse_units('km')

    # Summation across all dimensions results a 1-element Quantity
    d = rep.get('d:')
    assert d.shape == ((1, ) if Quantity is AttrSeries else tuple())
    assert d.size == 1
    assert np.isclose(d.values, 11.7)

    # Weighted sum
    weights = Quantity(
        xr.DataArray([1, 2, 3],
                     coords=['chicago new-york topeka'.split()],
                     dims=['j']))
    new_key = rep.aggregate('d:i-j', 'weighted', 'j', weights)

    # ...produces the expected new key with the summed dimension removed and
    # tag added
    assert new_key == 'd:i:weighted'

    # ...produces the expected new value
    obs = rep.get(new_key)
    d_ij = rep.get('d:i-j')
    exp = (d_ij * weights).sum(dim=['j']) / weights.sum(dim=['j'])
    # FIXME attrs has to be explicitly copied here because math is done which
    #       returns a pd.Series
    exp.attrs = d_ij.attrs

    assert_qty_equal(exp, obs)

    # Disaggregation with explicit data
    # (cases of canned food 'p'acked in oil or water)
    shares = xr.DataArray([0.8, 0.2], coords=[['oil', 'water']], dims=['p'])
    new_key = rep.disaggregate('b:j', 'p', args=[as_quantity(shares)])

    # ...produces the expected key with new dimension added
    assert new_key == 'b:j-p'

    b_jp = rep.get('b:j-p')

    # Units pass through disaggregation
    assert b_jp.attrs['_unit'] == 'cases'

    # Set elements are available
    assert rep.get('j') == ['new-york', 'chicago', 'topeka']

    # 'all' key retrieves all quantities
    obs = {da.name for da in rep.get('all')}
    exp = set(('a b d f x z cost cost-margin demand demand-margin supply '
               'supply-margin').split())
    assert obs == exp

    # Shorthand for retrieving a full key name
    assert rep.full_key('d') == 'd:i-j' and isinstance(rep.full_key('d'), Key)