def test_verify_no_pint(self): """ Verify pint is not installed on the service """ topo = Topology() s = topo.source(down_a_pint_source) tester = Tester(topo) tester.contents(s, ['NoPintsForYou']) tester.test(self.test_ctxtype, self.test_config)
def test_fan_in_out(self): """Test pending connection fan in/out. """ data1 = ['A1','B1', 'A2', 'A3', 'C1', 'C1'] data2 = ['X','Y', 'Z', 'Q', 'T', 'X'] all_data = data1 + data2 expected_pc = [ e + "PC" for e in all_data ] expected_cp = [ "CP" + e for e in all_data ] expected_su = [ "SU" + e + "US" for e in all_data ] topo = Topology() pending = PendingStream(topo) apc = pending.stream.map(lambda s : s + "PC") acp = pending.stream.map(lambda s : 'CP' + s) self.assertFalse(pending.is_complete()) s1 = topo.source(data1) s2 = topo.source(data2) su = s1.union({s2}) asu = su.map(lambda s : 'SU' + s + 'US') pending.complete(su) self.assertTrue(pending.is_complete()) tester = Tester(topo) tester.contents(apc, expected_pc, ordered=False) tester.contents(acp, expected_cp, ordered=False) tester.contents(asu, expected_su, ordered=False) tester.test(self.test_ctxtype, self.test_config)
def test_blob_type(self): topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, '../testtkpy') data = ['Hello', 'Blob', 'Did', 'you', 'reset' ] s = topo.source(data) s = s.as_string() toBlob = op.Map( "com.ibm.streamsx.topology.pytest.pytypes::ToBlob", s, 'tuple<blob b>') toBlob = op.Map( "com.ibm.streamsx.topology.pysamples.positional::Noop", toBlob.stream, 'tuple<blob b>') bt = op.Map( "com.ibm.streamsx.topology.pytest.pytypes::BlobTest", toBlob.stream, 'tuple<rstring string>', {'keep': True}) bt2 = op.Map( "com.ibm.streamsx.topology.pytest.pytypes::BlobTest", toBlob.stream, 'tuple<rstring string>', {'keep': False}) tester = Tester(topo) tester.contents(bt.stream, data) self.test_config['topology.keepArtifacts'] = True; tester.test(self.test_ctxtype, self.test_config)
def test_StringHash(self): """ Test hashing works when the schema is tuple<rstring string>. """ raw = [] for v in range(20): raw.append(''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(v))) data = [] for v in range(7): data.extend(raw) random.shuffle(data) for width in (1,3): with self.subTest(width=width): topo = Topology("test_StringHash" + str(width)) s = topo.source(data) s = s.as_string() s = s.parallel(width, Routing.HASH_PARTITIONED) s = s.map(AddChannel()) s = s.end_parallel() s = s.map(CheckSameChannel()) tester = Tester(topo) tester.contents(s, data, ordered=width==1) tester.test(self.test_ctxtype, self.test_config) print(tester.result)
def test_enter_exit(self): topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) source = op.Source(topo, 'com.ibm.streamsx.topology.pytest.checkpoint::EnterExitSource', schema.StreamSchema('tuple<rstring from, int32 enter, int32 exit>').as_tuple(), params={'period':0.1}) source.stream.set_consistent(ConsistentRegionConfig.periodic(5, drain_timeout=40, reset_timeout=40, max_consecutive_attempts=6)) transit = op.Map('com.ibm.streamsx.topology.pytest.checkpoint::EnterExitMap', source.stream, schema.StreamSchema('tuple<rstring from, int32 enter, int32 exit>').as_tuple()) tester = Tester(topo) tester.resets(10) # On each operator, __enter__ and __exit__ should be called once for # each reset. Also __enter__ should be called at startup and __exit__ # at shutdown. It is hard to verify the final __exit__ call (and that # is handled by python rather than our code), so # the test is valid if the number of __enter__ calls is one more than # the number of resets, and the number of __exit__ calls is equal to # number of resets. The tuples on the two streams indicate the number # of times __enter__ and __exit__ have been called. # We are looking for two specific tuples: # ('source', 6, 5) and ('transit', 6, 5) tester.eventual_result(source.stream, lambda tuple_ : True if tuple_[1] >= 6 and tuple_[1] == tuple_[2] + 1 else Fale if tuple_[1] != tuple_[2] + 1 else None) tester.eventual_result(transit.stream, lambda tuple_ : True if tuple_[1] >= 6 and tuple_[1] == tuple_[2] + 1 else Fale if tuple_[1] != tuple_[2] + 1 else None) job_config = streamsx.topology.context.JobConfig(tracing='debug') job_config.add(self.test_config) tester.test(self.test_ctxtype, self.test_config)
def test_good(self): for dt in SPL_TYPES: if dt in GOOD_DATA: data = GOOD_DATA[dt] topo = Topology() schema = StreamSchema('tuple<' + dt + ' a>') s = topo.source(data) c = s.map(lambda x : (x,), schema=schema) #c.print(tag=dt) if dt.startswith('float'): expected = [{'a':float(d)} for d in data] elif dt.startswith('int'): expected = [{'a':int(d)} for d in data] elif dt == 'decimal32': ctx = decimal.Context(prec=7, rounding=decimal.ROUND_HALF_EVEN) expected = [{'a':decimal.Decimal(str(d)).normalize(ctx)} for d in data] elif dt == 'decimal64': ctx = decimal.Context(prec=16, rounding=decimal.ROUND_HALF_EVEN) expected = [{'a':decimal.Decimal(str(d)).normalize(ctx)} for d in data] elif dt == 'decimal128': ctx = decimal.Context(prec=34, rounding=decimal.ROUND_HALF_EVEN) expected = [{'a':decimal.Decimal(str(d)).normalize(ctx)} for d in data] elif dt.startswith('complex'): expected = [{'a':complex(d)} for d in data] elif dt == 'timestamp': expected = [{'a': d if isinstance(d, Timestamp) else Timestamp.from_datetime(d)} for d in data] tester = Tester(topo) tester.tuple_count(c, len(data)) tester.contents(c, expected) tester.test(self.test_ctxtype, self.test_config)
class TestSources(unittest.TestCase): """ Test @spl.source decorated operators """ @classmethod def setUpClass(cls): """Extract Python operators in toolkit""" streamsx.scripts.extract.main(['-i', '../testtkpy', '--make-toolkit']) def setUp(self): Tester.setup_standalone(self) def test_class_source(self): count = 43 topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, '../testtkpy') bop = op.Source(topo, "com.ibm.streamsx.topology.pysamples.sources::Range", schema.StreamSchema('tuple<int64 c>').as_tuple(), params={'count':count}) r = bop.stream self.tester = Tester(topo) self.tester.tuple_count(r, count) self.tester.contents(r, list(zip(range(count)))) self.tester.test(self.test_ctxtype, self.test_config) def test_fn_source(self): count = 37 topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, '../testtkpy') bop = op.Source(topo, "com.ibm.streamsx.topology.pysamples.sources::Range37", schema.StreamSchema('tuple<int64 c>').as_tuple()) r = bop.stream self.tester = Tester(topo) self.tester.tuple_count(r, count) self.tester.contents(r, list(zip(range(count)))) self.tester.test(self.test_ctxtype, self.test_config)
def test_TopologyIndirectPackage(self): topo = Topology("test_TopologyIndirectPackage") hw = topo.source(["Hello", "World!"]) hwf = hw.transform(test2_pkg_helpers.imported_package) tester = Tester(topo) tester.contents(hwf, ["HelloIP"]) tester.test(self.test_ctxtype, self.test_config)
def test_strings(self): """ Test strings that are unicode. Includes a stream name to verify it does not cause an error, but under the covers the actual name will be a mangled version of it since SPL identifiers are only ASCII. """ topo = Topology() ud = [] ud.append(u'⡍⠔⠙⠖ ⡊ ⠙⠕⠝⠰⠞ ⠍⠑⠁⠝ ⠞⠕ ⠎⠁⠹ ⠹⠁⠞ ⡊ ⠅⠝⠪⠂ ⠕⠋ ⠍⠹') ud.append(u'2H₂ + O₂ ⇌ 2H₂O, R = 4.7 kΩ, ⌀ 200 mm') ud.append(u'многоязычных') ud.append("Arsenal hammered 5-1 by Bayern again") s = topo.source(ud, name=u'façade') sas = s.as_string() sd = s.map(lambda s : {'val': s + u"_test_it!"}) tester = Tester(topo) tester.contents(s, ud) tester.contents(sas, ud) dud = [] for v in ud: dud.append({'val': v + u"_test_it!"}) tester.contents(sd, dud) tester.test(self.test_ctxtype, self.test_config)
def test_fetch_logs_on_failure(self): topo = Topology("fetch_logs_on_failure") s = topo.source(["foo"]) tester = Tester(topo) # Causes test to fail tester.contents(s, ["bar"]) try: self.tester = tester tester.local_check = self._can_retrieve_logs tester.test(self.test_ctxtype, self.test_config) except AssertionError: # This test is expected to fail, do nothing. pass # Check if logs were downloaded if self.can_retrieve_logs: logs = tester.result['application_logs'] exists = os.path.isfile(logs) self.assertTrue(exists, "Application logs were not downloaded on test failure") if exists: os.remove(logs)
def test_TopologyImportPackage(self): topo = Topology("test_TopologyImportPackage") hw = topo.source(test_package.test_subpackage.test_module.SourceTuples(["Hello", "World!"])) hwf = hw.filter(test_package.test_subpackage.test_module.filter) tester = Tester(topo) tester.contents(hwf, ["Hello"]) tester.test(self.test_ctxtype, self.test_config)
def test_TopologySourceItertools(self): topo = Topology('test_TopologySourceItertools') hw = topo.source(itertools.repeat(9, 3)) hw = hw.filter(test_functions.check_asserts_disabled) tester = Tester(topo) tester.contents(hw, [9, 9, 9]) tester.test(self.test_ctxtype, self.test_config)
def test_app_log(self): topo = Topology() s = topo.source(['logmsg1', 'logmsg2你好']) s.for_each(_log_msg) tester = Tester(topo) tester.tuple_count(s, 2) tester.test(self.test_ctxtype, self.test_config)
def test_SPLHashFunc(self): """ Test hashing works when the schema is a general SPL one using an explicit hash function. """ raw = [] for v in range(20): raw.append(''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(v))) data = [] for v in range(7): data.extend(raw) random.shuffle(data) for width in (1,4): with self.subTest(width=width): topo = Topology("test_SPLHash" + str(width)) s = topo.source(data) s = s.as_string() f = op.Map('spl.relational::Functor', s, schema = 'tuple<rstring string, rstring s2>') f.s2 = f.output('string + "_1234"') s = f.stream s = s.parallel(width, Routing.HASH_PARTITIONED, s2_hash) s = s.map(AddChannel()) s = s.end_parallel() s = s.map(CheckSameChannel(lambda t : t[0]['s2'])) expected = [] for v in data: expected.append(v + '_1234') tester = Tester(topo) tester.contents(s, expected, ordered=width==1) tester.test(self.test_ctxtype, self.test_config) print(tester.result)
def test_mt(self): topo = Topology() N = 1000 streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) b1 = op.Source(topo, "spl.utility::Beacon", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={'iterations':N}) b1.f = b1.output('(int32)IterationCount()') b2 = op.Source(topo, "spl.utility::Beacon", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={'iterations':N}) b2.f = b2.output(str(N) + ' + (int32)IterationCount()') b3 = op.Source(topo, "spl.utility::Beacon", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={'iterations':N}) b3.f = b3.output(str(2*N) + ' + (int32)IterationCount()') s1 = b1.stream.low_latency() s2 = b2.stream.low_latency() s3 = b3.stream.low_latency() s = s1.union({s2, s3}) f = op.Map("com.ibm.streamsx.topology.pytest.mt::MTFilter", s) m = op.Map("com.ibm.streamsx.topology.pytest.mt::MTMap", f.stream) op.Sink("com.ibm.streamsx.topology.pytest.mt::MTForEach", f.stream) cr = m.stream.flat_map() tester = Tester(topo) tester.tuple_count(m.stream, 3*N) tester.contents(cr, range(3*N), ordered=False) tester.test(self.test_ctxtype, self.test_config)
def test_WindowPunctuation(self): """Trigger an aggregation 4 times. Ensure that window punctuations are submitted each time by writing them to an output file, and then verifying that the file contains the correct contents.""" topo = Topology() s = topo.source([1,2,3,4]) # Aggregate and write to file. s = s.last(1).trigger(1).aggregate(lambda x: x[0]+7) # Ensure map/flat_map/filter passes window marks through. s = s.flat_map(lambda x : [x]) s = s.filter(lambda x : True) s = s.map(lambda x : (x,), schema='tuple<int32 z>') op_params = {'file' : 'punct_file', 'writePunctuations' : True, 'flushOnPunctuation' : True} op.Sink("spl.adapter::FileSink", s, params = op_params) # Copy the config, since it's shared across all tests, and not every test needs a data # directory. cfg = self.test_config.copy() jc = context.JobConfig(data_directory=os.getcwd()) jc.add(cfg) tester = Tester(topo) tester.test(self.test_ctxtype, cfg) path = os.path.join(os.getcwd(), 'punct_file') # Validate the contents of the file. with open(path, 'r') as f: file_contents = f.read() self.assertEqual(expected_contents, file_contents) os.remove(path)
def test_feedback_loop(self): topo = Topology() data = ['A','B', 'A', 'A', 'X', 'C', 'C', 'D', 'A', 'A', 'E'] expected = ['B', 'X', 'C', 'C', 'D', 'A', 'A', 'E'] s = topo.source(data) s = s.filter(lambda t : time.sleep(1) or True).as_string(); feedback = PendingStream(topo) df = op.Invoke(topo, 'spl.utility::DynamicFilter', inputs = [s, feedback.stream], schemas= [schema.CommonSchema.String]) df.params['key'] = df.attribute(s, 'string') df.params['addKey'] = df.attribute(feedback.stream, 'string') delayed_out = op.Map('spl.utility::Delay', df.outputs[0], params={'delay': 0.05}).stream x = delayed_out.filter(lambda s : s == 'X').map(lambda s : 'A').as_string() i = topo.source(['B', 'X', 'C', 'D', 'E']).as_string() x = x.union({i}) feedback.complete(x) result = delayed_out result.print() #streamsx.topology.context.submit('TOOLKIT', topo) tester = Tester(topo) tester.contents(result, expected) tester.test(self.test_ctxtype, self.test_config)
def test_mixed_toolkits(self): topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('tk17')) data = ['A'] bop = op.Source(topo, "spl.utility::Beacon", 'tuple<rstring a>', {'iterations': 1}) bop.a = bop.output('"A"') sv = op.Map( "com.ibm.streamsx.topology.pytest.pyvers::StreamsxVersion", bop.stream, 'tuple<rstring a, rstring v1, rstring v2>') m17f = op.Map( "com.ibm.streamsx.topology.pytest.tk17::M17F", sv.stream, 'tuple<rstring a, rstring v1, rstring v2, rstring f1, rstring f2>') m17c = op.Map( "com.ibm.streamsx.topology.pytest.tk17::M17C", m17f.stream, 'tuple<rstring a, rstring v1, rstring v2, rstring f1, rstring f2, rstring c1, rstring c2, int32 x>', {'x': 27}) tester = Tester(topo) tester.contents(m17c.stream, [{'a':'A', 'f1':'1.7', 'f2':'F', 'v1':'aggregate', 'v2':'True', 'c1':'1.7', 'c2':'C', 'x':27}]) tester.test(self.test_ctxtype, self.test_config)
def test_object_to_string(self): topo = Topology() s = topo.source([93,'hello',True]) st = s.map(lambda x : x, schema=CommonSchema.String) tester = Tester(topo) tester.contents(st, ['93','hello','True']) tester.test(self.test_ctxtype, self.test_config)
def test_json_to_json(self): topo = Topology() s = topo.source([{'a': True}, {'a': 8}, {'a': 'third'}]).as_json() st = s.map(lambda x : {'yy': x['a']}, schema=CommonSchema.Json) tester = Tester(topo) tester.contents(st, [{'yy': True}, {'yy': 8}, {'yy': 'third'}]) tester.test(self.test_ctxtype, self.test_config)
def test_string_to_json(self): topo = Topology() s = topo.source(['a', 79, 'c']).as_string() st = s.map(lambda x: x if x == 'c' else {'v': x + 'd'}, schema=CommonSchema.Json) tester = Tester(topo) tester.contents(st, [{'v': 'ad'}, {'v': '79d'}, 'c']) tester.test(self.test_ctxtype, self.test_config)
def test_string_to_schema_dict(self): topo = Topology() s = topo.source(['a', 'b', 'c']).as_string() st = s.map(lambda x : {'z': x+'dict!'}, schema='tuple<rstring z>') tester = Tester(topo) tester.contents(st, [{'z':'adict!'}, {'z':'bdict!'}, {'z':'cdict!'}]) tester.test(self.test_ctxtype, self.test_config)
def test_string_to_schema(self): topo = Topology() s = topo.source(['a', 'b', 'c']).as_string() st = s.map(lambda x : (x+'struct!',), schema='tuple<rstring y>') tester = Tester(topo) tester.contents(st, [{'y':'astruct!'}, {'y':'bstruct!'}, {'y':'cstruct!'}]) tester.test(self.test_ctxtype, self.test_config)
def test_json_to_schema(self): topo = Topology() s = topo.source([{'a':7}, {'b':8}, {'c':9}]).as_json() st = s.map(lambda x : (next(iter(x)), x[next(iter(x))]), schema='tuple<rstring y, int32 x>') tester = Tester(topo) tester.contents(st, [{'y':'a', 'x':7}, {'y':'b', 'x':8}, {'y':'c', 'x':9}]) tester.test(self.test_ctxtype, self.test_config)
def test_string_to_string(self): topo = Topology() s = topo.source([False, 'b', 19]).as_string() st = s.map(lambda x: x + '3', schema=CommonSchema.String) tester = Tester(topo) tester.contents(st, ['False3', 'b3', '193']) tester.test(self.test_ctxtype, self.test_config)
def test_object_to_schema_dict(self): topo = Topology() s = topo.source([1,2,3]) st = s.map(lambda x : {'x':x}, schema='tuple<int32 x>') tester = Tester(topo) tester.contents(st, [{'x':1}, {'x':2}, {'x':3}]) tester.test(self.test_ctxtype, self.test_config)
def test_as_tuple_for_each(self): topo = Topology() st = self._create_stream(topo) st.for_each(check_is_tuple_for_each(self.is_named())) tester = Tester(topo) tester.tuple_count(st, 3) tester.test(self.test_ctxtype, self.test_config)
def test_as_tuple_map_to_schema(self): topo = Topology() s = self._create_stream(topo) st = s.map(check_is_tuple_map_to_schema(self.is_named()), schema=StreamSchema('tuple<int32 y, rstring txt>')) tester = Tester(topo) tester.contents(st, [{'y':13, 'txt':'2Hi!-MapSPL'}, {'y':26, 'txt':'4Hi!-MapSPL'}, {'y':39, 'txt':'6Hi!-MapSPL'}]) tester.test(self.test_ctxtype, self.test_config)
def test_json_to_string(self): topo = Topology() s = topo.source([{'a': True}, {'a': 8}, {'a': 'third'}]).as_json() st = s.map(lambda x : x['a'], schema=CommonSchema.String) tester = Tester(topo) tester.contents(st, ['True', '8', 'third']) tester.test(self.test_ctxtype, self.test_config)
def test_object_to_json(self): topo = Topology() s = topo.source([{'a': 7}, {'b': 8}, {'c': 9}]) st = s.map(lambda x: x, schema=CommonSchema.Json) tester = Tester(topo) tester.contents(st, [{'a': 7}, {'b': 8}, {'c': 9}]) tester.test(self.test_ctxtype, self.test_config)
def test_verify_operator_pip_install(self): """ Verify pint is installed by the operator module """ topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy_pip_op')) s = topo.source(['a']) s = s.as_string() fp = op.Map("com.ibm.streamsx.topology.pytest.pypip::find_a_pint", s) tester = Tester(topo) tester.contents(fp.stream, ['RTOP_PintImported']) tester.test(self.test_ctxtype, self.test_config)
def test_structured_as_named_tuple(self): schema = StreamSchema("tuple<rstring a, int32 b>").as_tuple(named=True) topo = Topology() s = topo.source([('a',1),('b', 7),('c', 2),('d', 9)]) s = s.map(lambda x: x, schema = schema) s = s.last(3).trigger(2).aggregate(lambda items: (items[1].a, items[0].b)) tester = Tester(topo) tester.contents(s, [('b',1), ('c',7)] ) tester.test(self.test_ctxtype, self.test_config)
def test_NotByRefWindow(self): topo = Topology() s = topo.source(['1','3','5','7']) # Used to prevent pass by ref for the source f = s.filter(lambda x: True) s = s.last(3).trigger(4).aggregate(lambda x: int(sum([int(s) for s in x])/len(x))) tester = Tester(topo) tester.contents(s, [5]) tester.test(self.test_ctxtype, self.test_config)
def test_at_least(self): """ Test the at least tuple count. """ if self.test_ctxtype == context.ContextTypes.STANDALONE: return unittest.skip("Standalone tests must complete") topo = Topology() s = topo.source(rands) tester = Tester(topo) tester.tuple_count(s, 100, exact=False) tester.test(self.test_ctxtype, self.test_config)
def test_structured_as_dict(self): topo = Topology() s = topo.source([('a', 1), ('b', 7), ('c', 2), ('d', 9)]) s = s.map(lambda x: x, schema="tuple<rstring a, int32 b>") s = s.last(3).trigger(2).aggregate(lambda items: (items[0]['a'], items[1]['b'])) tester = Tester(topo) tester.contents(s, [('a', 7), ('b', 2)]) tester.test(self.test_ctxtype, self.test_config)
def test_BasicCountCountWindow_stv(self): topo = Topology() s = topo.source([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]) count = topo.create_submission_parameter('count', 10) # Need a float cast to make the value consistent with Python 2/3 s = s.last(count).trigger(2).aggregate( lambda x: float(sum(x)) / float(len(x))) tester = Tester(topo) tester.contents(s, [1.5, 2.5, 3.5, 4.5, 5.5, 7.5, 9.5]) tester.test(self.test_ctxtype, self.test_config)
def test_as_tuple_map_to_json(self): topo = Topology() s = self._create_stream(topo) s = s.map(check_is_tuple_map_to_json(self.is_named()), schema=CommonSchema.Json) s = s.map(lambda x: str(x['a']) + x['b']) tester = Tester(topo) tester.contents(s, ['152Hi!-MapJSON', '304Hi!-MapJSON', '456Hi!-MapJSON']) tester.test(self.test_ctxtype, self.test_config)
def test_closure_vars(self): topo = Topology("test_closure_vars") s = topo.source([1, 2, 3, 4, 5, 6, 7, 8, 9]) cv_y = 7.0 s = s.filter(lambda x: x < cv_y) s = s.filter(lambda v: g1 == 3 and (not g2) and g3 == 9.7 and g4 == 'abcd') tester = Tester(topo) tester.contents(s, [1, 2, 3, 4, 5, 6]) tester.test(self.test_ctxtype, self.test_config)
def test_NotebookDefaultNames(self): """ Test default topo names from a notebook """ topo = Topology(name='<module>', namespace='<ipython-input-1-e300f4c6abce>') hw = topo.source(["Hello", "Tester"]) s = hw.filter(lambda x: True, name="One.A") s = s.filter(lambda x: True, name="Two.A") tester = Tester(topo) tester.contents(s, ["Hello", "Tester"]) tester.test(self.test_ctxtype, self.test_config)
def test_checker(self): """ Test the per-tuple checker. """ topo = Topology() s = topo.source(rands) s = s.filter(lambda r : r > 0.8) s = s.map(lambda r : r + 7.0 ) tester = Tester(topo) tester.tuple_count(s, 200, exact=False) tester.tuple_check(s, lambda r : r > 7.8) tester.test(self.test_ctxtype, self.test_config)
def test_partition_batch_func(self): topo = Topology() s = topo.source([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]) s = s.batch(2).partition(ModTwo).aggregate( lambda items: (sum(item for item in items))) tester = Tester(topo) tester.contents( s, [1 + 3, 2 + 4, 5 + 7, 6 + 8, 9 + 11, 10 + 12, 13 + 15, 14]) tester.test(self.test_ctxtype, self.test_config)
def test_non_matching_output(self): topo = Topology() s = topo.source(U.Sequence(iterations=4)) matches, non_matches = R.Filter.matching(s, filter='seq<2ul', non_matching=True) tester = Tester(topo) tester.tuple_count(matches, 2) tester.tuple_count(non_matches, 2) tester.test(self.test_ctxtype, self.test_config)
def _runTest(self, topo, s): #s.print() #self.test_config = { # ConfigParams.SSL_VERIFY: False, # 'topology.keepArtifacts': True #} tester = Tester(topo) tester.tuple_count(s, 10) tester.test(self.test_ctxtype, self.test_config)
def test_map_foreach(self): topo = Topology() topo.checkpoint_period = timedelta(seconds=1) streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) timeCounter = op.Source(topo, "com.ibm.streamsx.topology.pytest.checkpoint::TimeCounter", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={'iterations':30,'period':0.1}) fizzbuzz = op.Map("com.ibm.streamsx.topology.pytest.checkpoint::FizzBuzzMap", timeCounter.stream, schema.StreamSchema('tuple<int32 f, rstring c>').as_tuple()) verify = op.Sink("com.ibm.streamsx.topology.pytest.checkpoint::Verify", fizzbuzz.stream) s = fizzbuzz.stream tester = Tester(topo) tester.tuple_count(s, 30) tester.test(self.test_ctxtype, self.test_config)
def test_no_func_flat_map(self): topo = Topology() s = topo.source(['World', 'Cup', '2018']) s1 = s.flat_map() s2 = s.flat_map(name='JustFlatten!') tester = Tester(topo) tester.contents(s1, 'WorldCup2018') tester.tuple_count(s1, 12) tester.contents(s2, 'WorldCup2018') tester.tuple_count(s2, 12) tester.test(self.test_ctxtype, self.test_config)
def test_source(self): topo = Topology("test") topo.checkpoint_period = timedelta(seconds=1) streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) bop = op.Source(topo, "com.ibm.streamsx.topology.pytest.checkpoint::TimeCounter", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={'iterations':30,'period':0.1}) s = bop.stream tester = Tester(topo) tester.tuple_count(s, 30) #tester.contents(s, range(0,30)) # why doesn't this work? tester.contents(s, list(zip(range(0,30)))) tester.test(self.test_ctxtype, self.test_config, always_collect_logs=True)
def test_TopologyFilter(self): topo = Topology("test_TopologyFilter") hw = topo.source(['Hello', 'World']) hwf1 = hw.filter(lambda t: "Wor" in t, non_matching=False) hwf2 = hw.filter(lambda t: "Wor" in t, non_matching=None) hwf3 = hw.filter(lambda t: "Wor" in t) tester = Tester(topo) tester.contents(hwf1, ['World']) tester.contents(hwf2, ['World']) tester.contents(hwf3, ['World']) tester.test(self.test_ctxtype, self.test_config)
def test_functor_operator(self): topo = Topology('test_functor_operator') s = topo.source(['0','1','2','3','4','five','6','7','8','9']).as_string() f = op.Map('spl.relational::Functor', s, schema='tuple<int64 num>') f.num = f.output('(int64) string') num_stream = f.stream num_stream.catch_exceptions(tuple_trace=True) tester = Tester(topo) tester.tuple_count(num_stream, 10-1) tester.test(self.test_ctxtype, self.test_config)
def test_enter_called(self): self.assertFalse(streamsx.ec.is_active()) topo = Topology() s = topo.source(EcSource('A211')) s = s.filter(EcFilter('F243')) s = s.filter(lambda _ : streamsx.ec.is_active()) s.for_each(EcForEach()) s = s.map(EcMap('M523')) s = s.map(EcDuplicateMetric()) tester = Tester(topo) tester.contents(s, [('A211', 'EcSource_enter', 'M523', 'EcMap_enter', 'METRIC1', True)]) tester.test(self.test_ctxtype, self.test_config)
def test_at_least_no_tuples(self): """ Test at least count with zero tuples. (kind of a pointless condition, always true). """ if self.test_ctxtype == context.ContextTypes.STANDALONE: return unittest.skip("Standalone tests must complete") topo = Topology() s = topo.source([]) tester = Tester(topo) tester.tuple_count(s, 0, exact=False) tester.test(self.test_ctxtype, self.test_config)
def test_spray(self): topo = Topology() s = U.sequence(topo, iterations=2442) outs = [] for so in U.spray(s, count=7): outs.append(so.map(lambda x : (x['seq'], x['ts']), schema=U.SEQUENCE_SCHEMA)) s = outs[0].union(set(outs)) tester = Tester(topo) tester.tuple_count(s, 2442) tester.test(self.test_ctxtype, self.test_config)
def test_batch_time(self): topo = Topology() s = topo.source(lambda : map(_delay, range(50)), name='A') b = s.batch(datetime.timedelta(seconds=2)) r = b.aggregate(lambda x : x) rf = r.flat_map() tester = Tester(topo) tester.tuple_count(rf, 50) tester.run_for((50*0.2) + 20) tester.tuple_check(r, _BatchTimeCheck()) tester.test(self.test_ctxtype, self.test_config)
def test_source(self): topo = Topology() topo.checkpoint_period = timedelta(seconds=1) streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) bop = op.Source(topo, "com.ibm.streamsx.topology.pytest.checkpoint::TimeCounter", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={'iterations':30,'period':0.1}) # streamsx.topology.context.submit('TOOLKIT', topo) s = bop.stream tester = Tester(topo) tester.tuple_count(s, 30) tester.contents(s, list(zip(range(0,30)))) tester.test(self.test_ctxtype, self.test_config)
def test_ClassCountCountWindow(self): topo = Topology() s = topo.source([['Wallace', 55], ['Copernicus', 544], ['Feynman', 99], ['Dirac', 115], ['Pauli', 117], ['Frenkel', 49], ['Terence Tao', 42]]) s = s.map(lambda x: Person(x[0], x[1])) s = s.last(3).trigger(1).aggregate( lambda x: int(sum([p.rough_birth_year() for p in x]) / len(x))) tester = Tester(topo) tester.contents(s, [1962, 1717, 1784, 1764, 1906, 1923, 1947]) tester.test(self.test_ctxtype, self.test_config)
def test_batch_time_stv(self): topo = Topology() s = topo.source(lambda: map(_delay, range(50)), name='A') b = s.batchSeconds(topo.create_submission_parameter('secs', 2)) r = b.aggregate(lambda x: x) rf = r.flat_map() tester = Tester(topo) tester.tuple_count(rf, 50) tester.run_for((50 * 0.2) + 20) #tester.tuple_check(r, _BatchTimeCheck()) tester.test(self.test_ctxtype, self.test_config)
def test_transform_schema(self): topo = Topology() s = topo.source(U.Sequence(iterations=10)) A = U.SEQUENCE_SCHEMA.extend(StreamSchema('tuple<rstring a>')) fo = R.Functor.map(s, A) fo.a = fo.output(fo.outputs[0], '"string value"') r = fo.outputs[0] r.print() tester = Tester(topo) tester.tuple_count(r, 10) tester.test(self.test_ctxtype, self.test_config)
def test_BasicCountTimeWindow(self): # Aggregate every 0.5 seconds aggregate_period = 0.5 # Check that each aggregation is triggered at the right time, with a maximum %20 error tolerance = 0.20 topo = Topology() s = topo.source(TimeCounter(iterations = 10)) s = s.last(1).trigger(datetime.timedelta(seconds=aggregate_period)).aggregate(TriggerDiff()) tester = Tester(topo) tester.tuple_check(s, lambda val: within_tolerance(val, tolerance, aggregate_period)) tester.test(self.test_ctxtype, self.test_config)
def test_as_json_as_string(self): topo = Topology() s = topo.source(['JSON_STRING!', 89, list(('b', 93))]) s = s.map(lambda x : {'abc': x}) s = s.as_json() s = s.as_string() s = s.map(lambda x : eval(x)) s = s.map(lambda x : x['abc']) tester = Tester(topo) tester.contents(s, ['JSON_STRING!', 89, ['b', 93]]) tester.test(self.test_ctxtype, self.test_config)
def test_SPL_as_json(self): topo = Topology() b = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq, rstring b>', params = {'period': 0.02, 'iterations':5}) b.seq = b.output('IterationCount()') s = b.stream.as_json() tester = Tester(topo) tester.contents(s, [{'seq':0, 'b':''}, {'seq':1, 'b':''}, {'seq':2, 'b':''}, {'seq':3, 'b':''}, {'seq':4, 'b':''}]) tester.test(self.test_ctxtype, self.test_config)
def test_dict_to_string(self): topo = Topology() s = topo.source([{'a': 7}, {'b': 8}, {'c': 9}]).as_json() st = s.map(lambda x: (next(iter(x)), x[next(iter(x))]), schema='tuple<rstring y, int32 x>') st = st.map(lambda x: (x['y'], x['x'] + 20), schema=CommonSchema.String) tester = Tester(topo) tester.contents(st, ["('a', 27)", "('b', 28)", "('c', 29)"]) tester.test(self.test_ctxtype, self.test_config)