def test_DataflowEnvironment_WithKwArgs(self): """ The nodes are specified in the wrong order. The excution plan is determined in the lock() method. There is no data in generator but arguments. datain is a coroutine made from datain func with coroutine from func... f is a coroutine g is a funcion terminal is a function """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out f_co = coroutine_from_func(f) dtf = dfp.DataflowEnvironment() dtf.args = ['my_data'] dtf.add_cotask(f_co, filters=('call_args', 'my_data'), **attr_f) dtf.add_task(g, filters=dict(args=[f_co])) dtf.add_task(data_out.func, filters=dict(args=[g])) dtf.start() # dtf.run() for i in range(10): dtf(my_data=i) assert data_out.out == self.results in_co = coroutine_from_func(data_in.func)
def test_DataflowEnvironment_as_Generator(self): """ The nodes are specified in the wrong order. The excution plan is determined in the lock() method. There is no data in generator but arguments. datain is a coroutine made from datain func with coroutine from func... f is a coroutine g is a funcion terminal is a function """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out f_co = coroutine_from_func(f) dtf = dfp.DataflowEnvironment() in_co = coroutine_from_func(data_in.func) dtf.add_cotask(in_co) dtf.add_cotask(f_co, filters=in_co, **attr_f) dtf.add_task(g, filters=dict(args=[f_co])) dtf.add_edge_call_rets(g) dtf.start() results = [] for res in dtf.gen(): results.append(res) assert results == self.results
def test_DataflowEnvironment_NotNamedTasks(self): """ The nodes are specified in the wrong order. The excution plan is determined in the lock() method. datain is a coroutine made from datain func with coroutine from func... f is a coroutine g is a funcion terminal is a function """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out f_co = coroutine_from_func(f) in_co = coroutine_from_func(data_in.func) dtf = dfp.DataflowEnvironment() dtf.add_task(data_out.func, filters=dict(args=[g])) dtf.add_cotask(f_co, filters=in_co, **attr_f) dtf.add_task(g, filters=dict(args=[f_co])) dtf.add_cotask(in_co) dtf.start() dtf.run() assert data_out.out == self.results
def test_DataflowEnvironment_AplyNonTrivialFilters(self): """ The nodes are specified in the wrong order. The excution plan is determined in the lock() method. We apply a non-trivial filter that should not be executed in the exection plan determination datain is a coroutine made from datain func with coroutine from func... f is a coroutine g is a funcion terminal is a function """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out f_co = coroutine_from_func(f) in_co = coroutine_from_func(data_in.func) end_func = data_out.func dtf = dfp.DataflowEnvironment() dtf.add_task(end_func, filters=dict(args=[(g, lambda x: -float(str(x)))])) dtf.add_cotask(in_co) dtf.add_task(g, filters=dict(args=[(f_co, lambda x: -float(str(x)))])) dtf.add_cotask(f_co, filters=(in_co, lambda x: -float(str(x))), **attr_f) dtf.start() dtf.run() assert data_out.out == [-990.0, -991.0, -992.0, -993.0, -994.0, -995.0, -996.0, -997.0, -998.0, -999.0]
def test_DataflowEnvironmentAutomaticOrderInLock(self): """ The nodes are specified in the wrong order. The excution plan is determined in the lock() method. datain is a coroutine made from datain func with coroutine from func... f is a coroutine g is a funcion terminal is a function """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out dtf = dfp.DataflowEnvironment() dtf.add_task('terminal', data_out.func, filters=dict(args=['g'])) dtf.add_cotask('f', coroutine_from_func(f), filters='indata', **attr_f) dtf.add_task('g', g, filters=dict(args=['f'])) dtf.add_cotask('indata', coroutine_from_func(data_in.func)) dtf.start() dtf.run() assert data_out.out == self.results
def test_main_coroutines_partial(self): f_co = coroutine_from_func(f) g_co = coroutine_from_func(g) co = f_co(g_co(self.data_out.co())) for x in self.data_in.gen(): co.send(x) assert self.data_out.out == self.results
def test_main_coroutines(self): f_co = coroutine_from_func(f) g_co = coroutine_from_func(g) co = self.data_in.co(f_co(g_co(self.data_out.co()))) while True: try: co.next() except StopIteration: break assert self.data_out.out == self.results
def test_DataflowEnvironment4(self): """ datain is a coroutine made from datain func with coroutine from func... f is a coroutine g is a funcion terminal is a function """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out dtf = dfp.DataflowEnvironment() dtf.add_cotask('indata', coroutine_from_func(data_in.func)) dtf.add_cotask('f', coroutine_from_func(f), filters='indata', **attr_f) dtf.add_task('g', g, filters=dict(args=['f'])) dtf.add_task('terminal', data_out.func, filters=dict(args=['g'])) dtf.start() dtf.run() assert data_out.out == self.results
def test_DataflowEnvironment1(self): """ datain is a generator f is a coroutine g is a funcion terminal is a function """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out dtf = dfp.DataflowEnvironment() dtf.add_gentask('indata', data_in.gen, initial=True) dtf.add_cotask('f', coroutine_from_func(f), filters='indata', **attr_f) dtf.add_task('g', g, filters=dict(args=['f'])) dtf.add_task('terminal', data_out.func, filters=dict(args=['g'])) dtf.start() dtf.run() assert data_out.out == self.results
def test_DataflowEnvironment_as_Generator_Chained(self): """ The DataflowEnvironment instance is chained with two other generators """ attr_f = self.attr_f data_in = self.data_in data_out = self.data_out f_co = coroutine_from_func(f) dtf = dfp.DataflowEnvironment() dtf.add_cotask(f_co, filters='call_args', **attr_f) dtf.add_task(g, filters=dict(args=[f_co])) dtf.add_edge_call_rets(g) dtf.start() gen = data_out.gen(dtf.gen(data_in.gen())) while True: try: gen.next() except StopIteration: break assert data_out.out == self.results
def test_coroutine_from_func(): f_func = func_from_coroutine(coroutine_from_func(f)) for x in range(10): d1, d2 = f_func(x), f(x) assert d1 == d2
def test_coroutine_from_callable_obj(): mlag = dfp.Lag(0) f_lag = func_from_coroutine(coroutine_from_func(mlag)) assert 0 == f_lag(10) assert 10 == f_lag(11)