def test_cat_simple(output): x = random_tensor(OrderedDict([ ('i', bint(2)), ]), output) y = random_tensor(OrderedDict([ ('i', bint(3)), ('j', bint(4)), ]), output) z = random_tensor(OrderedDict([ ('i', bint(5)), ('k', bint(6)), ]), output) assert Cat('i', (x, )) is x assert Cat('i', (y, )) is y assert Cat('i', (z, )) is z xy = Cat('i', (x, y)) assert isinstance(xy, Tensor) assert xy.inputs == OrderedDict([ ('i', bint(2 + 3)), ('j', bint(4)), ]) assert xy.output == output xyz = Cat('i', (x, y, z)) assert isinstance(xyz, Tensor) assert xyz.inputs == OrderedDict([ ('i', bint(2 + 3 + 5)), ('j', bint(4)), ('k', bint(6)), ]) assert xy.output == output
def test_cat_simple(output): x = random_tensor(OrderedDict([ ('i', Bint[2]), ]), output) y = random_tensor(OrderedDict([ ('i', Bint[3]), ('j', Bint[4]), ]), output) z = random_tensor(OrderedDict([ ('i', Bint[5]), ('k', Bint[6]), ]), output) assert Cat('i', (x, )) is x assert Cat('i', (y, )) is y assert Cat('i', (z, )) is z xy = Cat('i', (x, y)) assert isinstance(xy, Tensor) assert xy.inputs == OrderedDict([ ('i', Bint[2 + 3]), ('j', Bint[4]), ]) assert xy.output == output xyz = Cat('i', (x, y, z)) assert isinstance(xyz, Tensor) assert xyz.inputs == OrderedDict([ ('i', Bint[2 + 3 + 5]), ('j', Bint[4]), ('k', Bint[6]), ]) assert xy.output == output
def test_cat_simple(): x = Stack('i', (Number(0), Number(1), Number(2))) y = Stack('i', (Number(3), Number(4))) assert Cat('i', (x, )) is x assert Cat('i', (y, )) is y xy = Cat('i', (x, y)) assert xy.inputs == OrderedDict(i=bint(5)) assert xy.name == 'i' for i in range(5): assert xy(i=i) is Number(i)
def test_cat_slice_tensor(start, stop, step): terms = tuple( random_tensor(OrderedDict(t=bint(t), a=bint(2))) for t in [2, 1, 3, 4, 1, 3]) dtype = sum(term.inputs['t'].dtype for term in terms) sub = Slice('t', start, stop, step, dtype) # eager expected = Cat('t', terms)(t=sub) # lazy - exercise Cat.eager_subs with interpretation(lazy): actual = Cat('t', terms)(t=sub) actual = reinterpret(actual) assert_close(actual, expected)
def mixed_sequential_sum_product(sum_op, prod_op, trans, time, step, num_segments=None): """ For a funsor ``trans`` with dimensions ``time``, ``prev`` and ``curr``, computes a recursion equivalent to:: tail_time = 1 + arange("time", trans.inputs["time"].size - 1) tail = sequential_sum_product(sum_op, prod_op, trans(time=tail_time), time, {"prev": "curr"}) return prod_op(trans(time=0)(curr="drop"), tail(prev="drop")) \ .reduce(sum_op, "drop") by mixing parallel and serial scan algorithms over ``num_segments`` segments. :param ~funsor.ops.AssociativeOp sum_op: A semiring sum operation. :param ~funsor.ops.AssociativeOp prod_op: A semiring product operation. :param ~funsor.terms.Funsor trans: A transition funsor. :param Variable time: The time input dimension. :param dict step: A dict mapping previous variables to current variables. This can contain multiple pairs of prev->curr variable names. :param int num_segments: number of segments for the first stage """ time_var, time, duration = time, time.name, time.output.size num_segments = duration if num_segments is None else num_segments assert num_segments > 0 and duration > 0 # handle unevenly sized segments by chopping off the final segment and calling mixed_sequential_sum_product again if duration % num_segments and duration - duration % num_segments > 0: remainder = trans(**{time: Slice(time, duration - duration % num_segments, duration, 1, duration)}) initial = trans(**{time: Slice(time, 0, duration - duration % num_segments, 1, duration)}) initial_eliminated = mixed_sequential_sum_product( sum_op, prod_op, initial, Variable(time, bint(duration - duration % num_segments)), step, num_segments=num_segments) final = Cat(time, (Stack(time, (initial_eliminated,)), remainder)) final_eliminated = naive_sequential_sum_product( sum_op, prod_op, final, Variable(time, bint(1 + duration % num_segments)), step) return final_eliminated # handle degenerate cases that reduce to a single stage if num_segments == 1: return naive_sequential_sum_product(sum_op, prod_op, trans, time_var, step) if num_segments >= duration: return sequential_sum_product(sum_op, prod_op, trans, time_var, step) # break trans into num_segments segments of equal length segment_length = duration // num_segments segments = [trans(**{time: Slice(time, i * segment_length, (i + 1) * segment_length, 1, duration)}) for i in range(num_segments)] first_stage_result = naive_sequential_sum_product( sum_op, prod_op, Stack(time + "__SEGMENTED", tuple(segments)), Variable(time, bint(segment_length)), step) second_stage_result = sequential_sum_product( sum_op, prod_op, first_stage_result, Variable(time + "__SEGMENTED", bint(num_segments)), step) return second_stage_result
def sequential_sum_product(sum_op, prod_op, trans, time, step): """ For a funsor ``trans`` with dimensions ``time``, ``prev`` and ``curr``, computes a recursion equivalent to:: tail_time = 1 + arange("time", trans.inputs["time"].size - 1) tail = sequential_sum_product(sum_op, prod_op, trans(time=tail_time), time, {"prev": "curr"}) return prod_op(trans(time=0)(curr="drop"), tail(prev="drop")) \ .reduce(sum_op, "drop") but does so efficiently in parallel in O(log(time)). :param ~funsor.ops.AssociativeOp sum_op: A semiring sum operation. :param ~funsor.ops.AssociativeOp prod_op: A semiring product operation. :param ~funsor.terms.Funsor trans: A transition funsor. :param Variable time: The time input dimension. :param dict step: A dict mapping previous variables to current variables. This can contain multiple pairs of prev->curr variable names. """ assert isinstance(sum_op, AssociativeOp) assert isinstance(prod_op, AssociativeOp) assert isinstance(trans, Funsor) assert isinstance(time, Variable) assert isinstance(step, dict) assert all(isinstance(k, str) for k in step.keys()) assert all(isinstance(v, str) for v in step.values()) if time.name in trans.inputs: assert time.output == trans.inputs[time.name] step = OrderedDict(sorted(step.items())) drop = tuple("_drop_{}".format(i) for i in range(len(step))) prev_to_drop = dict(zip(step.keys(), drop)) curr_to_drop = dict(zip(step.values(), drop)) drop = frozenset(drop) time, duration = time.name, time.output.size while duration > 1: even_duration = duration // 2 * 2 x = trans(**{time: Slice(time, 0, even_duration, 2, duration)}, **curr_to_drop) y = trans(**{time: Slice(time, 1, even_duration, 2, duration)}, **prev_to_drop) contracted = Contraction(sum_op, prod_op, drop, x, y) if duration > even_duration: extra = trans(**{time: Slice(time, duration - 1, duration)}) contracted = Cat(time, (contracted, extra)) trans = contracted duration = (duration + 1) // 2 return trans(**{time: 0})
def test_quote(interp): with interpretation(interp): x = Variable('x', bint(8)) check_quote(x) y = Variable('y', reals(8, 3, 3)) check_quote(y) check_quote(y[x]) z = Stack('i', (Number(0), Variable('z', reals()))) check_quote(z) check_quote(z(i=0)) check_quote(z(i=Slice('i', 0, 1, 1, 2))) check_quote(z.reduce(ops.add, 'i')) check_quote(Cat('i', (z, z, z))) check_quote(Lambda(Variable('i', bint(2)), z))
def test_cat(name): with interpretation(reflect): x = Stack("t", (Number(1), Number(2))) y = Stack("t", (Number(4), Number(8), Number(16))) xy = Cat(name, (x, y), "t") xy.reduce(ops.add)