def test_suppress_metric(self): schema = 'tuple<int32 a, int32 b, int32 c, int32 d>' topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) # no metric st = op.Source( topo, kind='com.ibm.streamsx.topology.pytest.pysource::SparseTuple', schema=schema, name='NOMETRIC_ST') sf = op.Source( topo, kind='com.ibm.streamsx.topology.pysamples.sources::Range37', schema=schema, name='NOMETRIC_SF') s = st.stream.union({sf.stream}) sm = op.Map('com.ibm.streamsx.topology.pysamples.positional::Noop', s, name='NOMETRIC_MF') sm = op.Map('com.ibm.streamsx.topology.pysamples.positional::AddSeq', sm.stream, name='NOMETRIC_MC') # With metric schema = 'tuple<rstring a, int32 b>' ms = op.Source( topo, kind= 'com.ibm.streamsx.topology.pytest.pyexceptions::SuppressNextSource', schema=schema, name='HASMETRIC_S_1') mm = op.Map( kind='com.ibm.streamsx.topology.pytest.pyexceptions::SuppressMap', stream=ms.stream, name='HASMETRIC_M_0') mf = op.Map( kind= 'com.ibm.streamsx.topology.pytest.pyexceptions::SuppressFilter', stream=ms.stream, name='HASMETRIC_F_0') self.tester = Tester(topo) self.tester.local_check = self.check_suppress_metric # Add filters to avoid the test operators having # names of NOMETIC/HASMETRIC self.tester.tuple_count(sm.stream.filter(lambda _: True), 38) self.tester.tuple_count(ms.stream.filter(lambda _: True), 2) self.tester.tuple_count(mm.stream.filter(lambda _: True), 2) self.tester.tuple_count(mf.stream.filter(lambda _: True), 2) self.tester.test(self.test_ctxtype, self.test_config)
def test_spl(self): """ Test passing as an SPL parameter. """ N = 22 G = 'hey' t = ''.join(random.choice('0123456789abcdef') for x in range(20)) topic = 'topology/test/python/' + t topo = Topology() spTopic = topo.create_submission_parameter('mytopic') spGreet = topo.create_submission_parameter('greeting') self.assertIsNone(spTopic()) self.assertIsNone(spGreet()) sch = StreamSchema('tuple<uint64 seq, rstring s>') b = op.Source(topo, "spl.utility::Beacon", sch, params={ 'initDelay': 10.0, 'period': 0.02, 'iterations': N }) b.seq = b.output('IterationCount()') b.s = b.output(spGreet) p = op.Sink("com.ibm.streamsx.topology.topic::Publish", b.stream, params={'topic': topic}) s = op.Source(topo, "com.ibm.streamsx.topology.topic::Subscribe", sch, params={ 'streamType': sch, 'topic': spTopic }) jc = JobConfig() jc.submission_parameters['mytopic'] = topic jc.submission_parameters['greeting'] = G jc.add(self.test_config) tester = Tester(topo) tester.tuple_count(s.stream, N) #tester.run_for(300) tester.contents(s.stream, [{'seq': i, 's': G} for i in range(N)]) tester.test(self.test_ctxtype, self.test_config)
def _build_launch_validate(self, name, composite_name, parameters, toolkit_name): print("------ " + name + " ------") topo = Topology(name) self._add_toolkits(topo, toolkit_name) #if name == 'test_content_ranking': # topo.add_pip_package('Theano==0.8.2') # topo.add_pip_package('Keras==1.0.7') params = parameters # Call the test composite test_op = op.Source(topo, composite_name, 'tuple<rstring result>', params=params) tester = Tester(topo) tester.tuple_count(test_op.stream, 1, exact=True) tester.contents(test_op.stream, [{'result': 'ok'}]) cfg = {} # change trace level job_config = streamsx.topology.context.JobConfig(tracing='info') job_config.add(cfg) if ("TestICP" in str(self)): cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False tester.test(self.test_ctxtype, cfg) print(str(tester.result))
def test_enter_exit(self): topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) source = op.Source(topo, 'com.ibm.streamsx.topology.pytest.checkpoint::EnterExitSource', schema.StreamSchema('tuple<rstring from, int32 enter, int32 exit>').as_tuple(), params={'period':0.1}) source.stream.set_consistent(ConsistentRegionConfig.periodic(5, drain_timeout=40, reset_timeout=40, max_consecutive_attempts=6)) transit = op.Map('com.ibm.streamsx.topology.pytest.checkpoint::EnterExitMap', source.stream, schema.StreamSchema('tuple<rstring from, int32 enter, int32 exit>').as_tuple()) tester = Tester(topo) tester.resets(10) # On each operator, __enter__ and __exit__ should be called once for # each reset. Also __enter__ should be called at startup and __exit__ # at shutdown. It is hard to verify the final __exit__ call (and that # is handled by python rather than our code), so # the test is valid if the number of __enter__ calls is one more than # the number of resets, and the number of __exit__ calls is equal to # number of resets. The tuples on the two streams indicate the number # of times __enter__ and __exit__ have been called. # We are looking for two specific tuples: # ('source', 6, 5) and ('transit', 6, 5) tester.eventual_result(source.stream, lambda tuple_ : True if tuple_[1] >= 6 and tuple_[1] == tuple_[2] + 1 else Fale if tuple_[1] != tuple_[2] + 1 else None) tester.eventual_result(transit.stream, lambda tuple_ : True if tuple_[1] >= 6 and tuple_[1] == tuple_[2] + 1 else Fale if tuple_[1] != tuple_[2] + 1 else None) job_config = streamsx.topology.context.JobConfig(tracing='debug') job_config.add(self.test_config) tester.test(self.test_ctxtype, self.test_config)
def test_SPL_as_string(self): topo = Topology() b = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq, rstring b>', params={ 'period': 0.02, 'iterations': 5 }) b.seq = b.output('IterationCount()') b.b = b.output('"str!"') s = b.stream.as_string() s = s.map(lambda x: eval(x)) tester = Tester(topo) tester.contents(s, [{ 'seq': 0, 'b': 'str!' }, { 'seq': 1, 'b': 'str!' }, { 'seq': 2, 'b': 'str!' }, { 'seq': 3, 'b': 'str!' }, { 'seq': 4, 'b': 'str!' }]) tester.test(self.test_ctxtype, self.test_config)
def test_beacon(self): # An operator-driven consistent region can be used with a source # that supports it, such as Beacon iterations = 5000 topo = Topology() beacon = op.Source(topo, "spl.utility::Beacon", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={ 'iterations': iterations, 'period': 0.01, 'triggerCount': streamsx.spl.types.uint32(500) }) beacon.f = beacon.output('(int32)IterationCount()') s = beacon.stream s.set_consistent( ConsistentRegionConfig.operator_driven(drain_timeout=40, reset_timeout=40, max_consecutive_attempts=4)) tester = Tester(topo) # For operator-driven regions, the resetter uses a random interval # from 10-40 seconds for resets. Only one is likely to be completed # while processing tuples for this test. tester.resets(1) tester.tuple_count(s, iterations) tester.contents(s, list(zip(range(0, iterations)))) tester.test(self.test_ctxtype, self.test_config)
def test_mixed_toolkits(self): topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('tk17')) data = ['A'] bop = op.Source(topo, "spl.utility::Beacon", 'tuple<rstring a>', {'iterations': 1}) bop.a = bop.output('"A"') sv = op.Map("com.ibm.streamsx.topology.pytest.pyvers::StreamsxVersion", bop.stream, 'tuple<rstring a, rstring v1, rstring v2>') m17f = op.Map( "com.ibm.streamsx.topology.pytest.tk17::M17F", sv.stream, 'tuple<rstring a, rstring v1, rstring v2, rstring f1, rstring f2>') m17c = op.Map( "com.ibm.streamsx.topology.pytest.tk17::M17C", m17f.stream, 'tuple<rstring a, rstring v1, rstring v2, rstring f1, rstring f2, rstring c1, rstring c2, int32 x>', {'x': 27}) tester = Tester(topo) tester.contents(m17c.stream, [{ 'a': 'A', 'f1': '1.7', 'f2': 'F', 'v1': 'aggregate', 'v2': 'True', 'c1': '1.7', 'c2': 'C', 'x': 27 }]) tester.test(self.test_ctxtype, self.test_config)
def test_no_default_hash(self): topo = Topology('test_SPLBeaconFilter') s = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params = {'period': 0.2, 'iterations':100}) s.seq = s.output('IterationCount()') self.assertRaises(NotImplementedError, s.stream.parallel, 3, Routing.HASH_PARTITIONED)
def test_sliding_count(self): for step in [1, 3]: with self.subTest(step=step): topo = Topology() b = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params={'iterations': 12}) b.seq = b.output('IterationCount()') s = b.stream agg = op.Map('spl.relational::Aggregate', s.last(4).trigger(step), schema='tuple<uint64 sum, uint64 max>') agg.sum = agg.output('Sum(seq)') agg.max = agg.output('Max(seq)') expected = [] for i in range(4 + step - 2, 12, step): expected.append({ 'sum': sum(range(i - 3, i + 1)), 'max': i }) tester = Tester(topo) tester.contents(agg.stream, expected) tester.test(self.test_ctxtype, self.test_config)
def test_filter_map(self): topo = Topology() topo.checkpoint_period = timedelta(seconds=1) streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) timeCounter = op.Source( topo, "com.ibm.streamsx.topology.pytest.checkpoint::TimeCounter", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={ 'iterations': 30, 'period': 0.1 }) evenFilter = op.Map( "com.ibm.streamsx.topology.pytest.checkpoint::StatefulEvenFilter", timeCounter.stream, None, params={}) hpo = op.Map( "com.ibm.streamsx.topology.pytest.checkpoint::StatefulHalfPlusOne", evenFilter.stream, None, params={}) s = hpo.stream tester = Tester(topo) tester.tuple_count(s, 15) tester.contents(s, list(zip(range(1, 16)))) tester.test(self.test_ctxtype, self.test_config)
def _build_launch_validate(self, name, composite_name, parameters, toolkit_name, num_tuples, exact): print("------ " + name + " ------") topo = Topology(name) self._add_toolkits(topo, toolkit_name) if self.es_keystore is not None: self._add_store_file(topo, self.es_keystore) #streamsx.spl.toolkit.add_toolkit_dependency(topo, 'com.ibm.streamsx.eventstore', '[2.4.1,3.0.0)') params = parameters # Call the test composite test_op = op.Source(topo, composite_name, 'tuple<rstring result>', params=params) tester = Tester(topo) tester.run_for(120) tester.tuple_count(test_op.stream, num_tuples, exact=exact) cfg = {} # change trace level job_config = streamsx.topology.context.JobConfig(tracing='info') job_config.add(cfg) cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False tester.test(self.test_ctxtype, cfg, always_collect_logs=True) print(str(tester.result))
def test_source(self): topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) bop = op.Source( topo, "com.ibm.streamsx.topology.pytest.checkpoint::TimeCounter", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={ 'iterations': 30, 'period': 0.1 }) s = bop.stream s.set_consistent( ConsistentRegionConfig.operator_driven(drain_timeout=40, reset_timeout=40, max_consecutive_attempts=3)) tester = Tester(topo) self.assertFalse( tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False))
def _build_launch_app(self, name, composite_name, parameters, num_result_tuples, test_toolkit): print("------ " + name + " ------") topo = Topology(name) self._add_toolkits(topo, test_toolkit) params = parameters # Call the test composite test_op = op.Source(topo, composite_name, 'tuple<rstring result>', params=params) self.tester = Tester(topo) self.tester.run_for(30) self.tester.tuple_count(test_op.stream, num_result_tuples, exact=False) cfg = {} # change trace level job_config = streamsx.topology.context.JobConfig(tracing='warn') job_config.add(cfg) if ("Cloud" not in str(self)): cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False # Run the test test_res = self.tester.test(self.test_ctxtype, cfg, assert_on_fail=True, always_collect_logs=True) print(str(self.tester.result)) assert test_res, name + " FAILED (" + self.tester.result[ "application_logs"] + ")"
def test_spl_ops(self): topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, TestEmSPL._TK) context = _rand_path() name = _rand_path() schema = StreamSchema('tuple<int32 a, rstring b, boolean c>') params = {} params['port'] = 0 params['context'] = context if self._monitor: params['sslAppConfigName'] = self._monitor + '-streams-certs' inject = op.Source(topo, 'com.ibm.streamsx.inet.rest::HTTPTupleInjection', schema, params, name) self._path = '/' + context + '/' + name + '/ports/output/0/inject' self._alias = '/' + context + '/' + name + '/inject' self.tester = Tester(topo) self.tester.local_check = self._form_inject self.tester.contents(inject.stream, [{ 'a': 42, 'b': 'HHGTTG', 'c': True }, { 'a': 93, 'b': 'ABCDE', 'c': False }]) self.tester.test(self.test_ctxtype, self.test_config) self._check_no_endpoint()
def test_sliding_count_stv_no_default(self): step = 1 topo = Topology() b = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params={'iterations': 12}) b.seq = b.output('IterationCount()') s = b.stream count = topo.create_submission_parameter('count', type_=int) window = s.last(count).trigger(step) agg = op.Map('spl.relational::Aggregate', window, schema='tuple<uint64 sum, uint64 max>') agg.sum = agg.output('Sum(seq)') agg.max = agg.output('Max(seq)') expected = [] for i in range(4 + step - 2, 12, step): expected.append({'sum': sum(range(i - 3, i + 1)), 'max': i}) jc = JobConfig() jc.submission_parameters['count'] = 4 jc.add(self.test_config) tester = Tester(topo) tester.contents(agg.stream, expected) tester.test(self.test_ctxtype, self.test_config)
def test_spl_default(self): """ Test passing as with default using SPL """ N = 27 G = 'hey there' t = ''.join(random.choice('0123456789abcdef') for x in range(20)) topic = 'topology/test/python/' + t topo = Topology() spGreet = topo.create_submission_parameter('greeting', default=G) self.assertIsNone(spGreet()) sch = StreamSchema('tuple<uint64 seq, rstring s>') b = op.Source(topo, "spl.utility::Beacon", sch, params={ 'initDelay': 10.0, 'period': 0.02, 'iterations': N }) b.seq = b.output('IterationCount()') b.s = b.output(spGreet) tester = Tester(topo) tester.tuple_count(b.stream, N) tester.contents(b.stream, [{'seq': i, 's': G} for i in range(N)]) tester.test(self.test_ctxtype, self.test_config)
def test_sliding_time_stv_no_default(self): topo = Topology() b = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params={'iterations': 12}) b.seq = b.output('IterationCount()') s = b.stream wtime = topo.create_submission_parameter(name='secs', type_=int) window = s.lastSeconds(wtime).trigger(1) agg = op.Map('spl.relational::Aggregate', window, schema='tuple<uint64 sum, uint64 max>') agg.sum = agg.output('Sum(seq)') agg.max = agg.output('Max(seq)') jc = JobConfig() jc.submission_parameters['secs'] = 2 jc.add(self.test_config) tester = Tester(topo) tester.tuple_count(agg.stream, 12) tester.test(self.test_ctxtype, self.test_config)
def test_source(self): iterations = 3000 topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) bop = op.Source( topo, "com.ibm.streamsx.topology.pytest.checkpoint::TimeCounter", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={ 'iterations': iterations, 'period': 0.01 }) s = bop.stream s.set_consistent( ConsistentRegionConfig.periodic(5, drain_timeout=40, reset_timeout=40, max_consecutive_attempts=6)) tester = Tester(topo) tester.resets() tester.tuple_count(s, iterations) tester.contents(s, list(zip(range(0, iterations)))) # job_config = streamsx.topology.context.JobConfig(tracing='debug') # job_config.add(self.test_config) tester.test(self.test_ctxtype, self.test_config)
def _subscribe(self, topo, topic, direct=True, drop=None, filtered=False, extra=None): s = op.Source( topo, "com.ibm.streamsx.topology.topic::FilteredSubscribe" if filtered else "com.ibm.streamsx.topology.topic::Subscribe", SCHEMA, params={ 'topic': topic, 'streamType': SCHEMA }, name='MSS') if extra: s.params.update(extra) if not direct: s.params['connect'] = op.Expression.expression( 'com.ibm.streamsx.topology.topic::Buffered') if drop: s.params['bufferFullPolicy'] = op.Expression.expression( 'Sys.' + drop) return s.stream.filter(slowme) return s.stream
def test_map_attr(self): """Test a Source and a Map operator. Including an output clause. """ topo = Topology('test_SPLBeaconFilter') beacon = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params={ 'period': 0.02, 'iterations': 27 }) beacon.seq = beacon.output('IterationCount()') f = op.Map( 'spl.relational::Functor', beacon.stream, schema='tuple<uint64 a>', params={'filter': op.Expression.expression('seq % 4ul == 0ul')}) f.a = f.output(f.attribute('seq')) s = f.stream.map(lambda x: x['a']) tester = Tester(topo) tester.contents(s, [0, 4, 8, 12, 16, 20, 24]) tester.test(self.test_ctxtype, self.test_config)
def test_stream_alias(self): """ test a stream alias to ensure the SPL expression is consistent with hand-coded SPL expression. """ topo = Topology('test_stream_alias') s = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params={ 'period': 0.02, 'iterations': 27 }, name='SomeName') s.seq = s.output('IterationCount()') stream = s.stream.aliased_as('IN') f = op.Map( 'spl.relational::Functor', stream, schema='tuple<uint64 a>', params={'filter': op.Expression.expression('IN.seq % 4ul == 0ul')}) f.a = f.output(f.attribute('seq')) s = f.stream.map(lambda x: x['a']) tester = Tester(topo) tester.contents(s, [0, 4, 8, 12, 16, 20, 24]) tester.test(self.test_ctxtype, self.test_config)
def test_published_topics(self): """Test a published stream is available through get_published_topics. """ tspl = ''.join(random.choice('0123456789abcdef') for x in range(20)) tpy = ''.join(random.choice('0123456789abcdef') for x in range(20)) self.topic_spl = 'topology/test/python/' + tspl self.topic_python = 'topology/test/python/' + tpy self.assertNotEqual(self.topic_spl, self.topic_python) topo = Topology() beacon = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params = {'period': 0.02}) beacon.seq = beacon.output('IterationCount()') beacon.stream.publish(topic=self.topic_spl) s = beacon.stream.map(lambda x : x) s.publish(topic=self.topic_python) # Publish twice to ensure its only listed once s = s.filter(lambda x : True) s.publish(topic=self.topic_python) self.tester = Tester(topo) self.tester.local_check = self._check_topics self.tester.tuple_count(s, 100, exact=False) self.tester.test(self.test_ctxtype, self.test_config)
def test_mixed_types(self): creds_file = os.environ['DB2_CREDENTIALS'] with open(creds_file) as data_file: credentials = json.load(data_file) topo = Topology('test_mixed_types') if self.jdbc_toolkit_home is not None: streamsx.spl.toolkit.add_toolkit(topo, self.jdbc_toolkit_home) pulse = op.Source(topo, "spl.utility::Beacon", 'tuple<rstring A, rstring B>', params = {'iterations':1}) pulse.A = pulse.output('"hello"') pulse.B = pulse.output('"world"') sample_schema = StreamSchema('tuple<rstring A, rstring B>') query_schema = StreamSchema('tuple<rstring sql>') sql_create = 'CREATE TABLE RUN_SAMPLE (A CHAR(10), B CHAR(10))' create_table = db.run_statement(pulse.stream, credentials, schema=sample_schema, sql=sql_create) sql_insert = 'INSERT INTO RUN_SAMPLE (A, B) VALUES (?, ?)' inserts = db.run_statement(create_table, credentials, schema=sample_schema, sql=sql_insert, sql_params="A, B") query = op.Map('spl.relational::Functor', inserts, schema=query_schema) query.sql = query.output('"SELECT A, B FROM RUN_SAMPLE"') res_sql = db.run_statement(query.stream, credentials, schema=sample_schema, sql_attribute='sql') res_sql.print() sql_drop = 'DROP TABLE RUN_SAMPLE' drop_table = db.run_statement(res_sql, credentials, sql=sql_drop) tester = Tester(topo) tester.tuple_count(drop_table, 1) #tester.run_for(60) tester.test(self.test_ctxtype, self.test_config)
def main(): """ This demonstrates the invocation of SPL operators from the SPL standard toolkit. Example: python3 files.py Output: Capitalized words from the contents of files in /tmp/work """ # Create the container for the topology that will hold the streams of tuples. topo = Topology("Files") # Invoke an SPL DirectoryScan operator as a source. # This one scans /tmp/work for files. # Note the full kind of the operator is required. files = op.Source(topo, "spl.adapter::DirectoryScan", schema=CommonSchema.String, params = {'directory': '/tmp/work'}) # Follow it with a FileSource operator # If no schema is provided then the input schema is used. lines = op.Map("spl.adapter::FileSource", files.stream) # Feed the lines into a Python function lines = lines.stream.map(string.capwords) # Sink lines by printing each of its tuples to standard output lines.print() # Now execute the topology by submitting to a standalone context. streamsx.topology.context.submit("STANDALONE", topo.graph)
def test_SPL_as_json(self): topo = Topology() # 'tuple<uint64 seq, rstring b>', SEQ = typing.NamedTuple('SQNT', [('seq', int), ('b', str)]) b = op.Source(topo, "spl.utility::Beacon", schema=SEQ, params={ 'period': 0.02, 'iterations': 5 }) b.seq = b.output('(int64) IterationCount()') s = b.stream.as_json() tester = Tester(topo) tester.contents(s, [{ 'seq': 0, 'b': '' }, { 'seq': 1, 'b': '' }, { 'seq': 2, 'b': '' }, { 'seq': 3, 'b': '' }, { 'seq': 4, 'b': '' }]) tester.test(self.test_ctxtype, self.test_config)
def test_primitive_foreach(self): iterations = 3000 topo = Topology() topo.checkpoint_period = timedelta(seconds=1) streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) timeCounter = op.Source( topo, "com.ibm.streamsx.topology.pytest.checkpoint::TimeCounter", schema.StreamSchema('tuple<int32 f>').as_tuple(), params={ 'iterations': iterations, 'period': 0.01 }) timeCounter.stream.set_consistent( ConsistentRegionConfig.periodic(5, drain_timeout=40, reset_timeout=40, max_consecutive_attempts=6)) fizzbuzz = op.Map( "com.ibm.streamsx.topology.pytest.checkpoint::FizzBuzzPrimitive", timeCounter.stream, schema.StreamSchema('tuple<int32 f, rstring c>').as_tuple()) verify = op.Sink("com.ibm.streamsx.topology.pytest.checkpoint::Verify", fizzbuzz.stream) s = fizzbuzz.stream tester = Tester(topo) tester.resets() tester.tuple_count(s, iterations) tester.test(self.test_ctxtype, self.test_config)
def test_map_attr_opt(self): """Test a Source and a Map operator with optional types. Including with operator parameters and output clauses. """ if ("TestSas" in str(self)) or 'CP4D_URL' in os.environ: print('Testing with Streams supporting optional types') else: if 'STREAMS_INSTALL' in os.environ: Tester.require_streams_version(self, '4.3') else: self.skipTest("STREAMS_INSTALL not set") topo = Topology('test_map_attr_opt') this_dir = os.path.dirname(os.path.realpath(__file__)) spl_dir = os.path.join(os.path.dirname(os.path.dirname(this_dir)), 'spl') tk_dir = os.path.join(spl_dir, 'testtkopt') streamsx.spl.toolkit.add_toolkit(topo, tk_dir) schema = 'tuple<' \ 'rstring r, ' \ 'optional<rstring> orv, ' \ 'optional<rstring> ornv, ' \ 'int32 i32, ' \ 'optional<int32> oi32v, ' \ 'optional<int32> oi32nv>' s = op.Source(topo, "testgen::TypeLiteralTester", schema, params={ 'r': 'a string', 'orv': 'optional string', 'ornv': None, 'i32': 123, 'oi32v': 456, 'oi32nv': streamsx.spl.types.null() }) f = op.Map('spl.relational::Functor', s.stream, schema=schema) f.orv = f.output("null") f.ornv = f.output('"string value"') f.oi32v = f.output(streamsx.spl.types.null()) f.oi32nv = f.output('789') tester = Tester(topo) tester.contents(s.stream, [{ 'r': 'a string', 'orv': 'optional string', 'ornv': None, 'i32': 123, 'oi32v': 456, 'oi32nv': None }]) tester.contents(f.stream, [{ 'r': 'a string', 'orv': None, 'ornv': 'string value', 'i32': 123, 'oi32v': None, 'oi32nv': 789 }]) tester.test(self.test_ctxtype, self.test_config)
def beacon(topo, schema): s = op.Source(topo, "spl.utility::Beacon", schema, params={'iterations': 100}) if schema is cs.Json: s.jsonString = s.output('"{}"') return s.stream
def _test_instance_submit(self): """ Test submitting a bundle from an Instance. Tests all four mechanisms. """ sab_name = 'ISJ_'+uuid.uuid4().hex topo = topology.Topology(sab_name, namespace='myinstancens') s = op.Source(topo, "spl.utility::Beacon", 'tuple<uint64 seq>', params = {'period': 0.02, 'iterations':100}) s.seq = s.output('IterationCount()') f = op.Map('spl.relational::Filter', s.stream, params = {'filter': op.Expression.expression('seq % 2ul == 0ul')}) bb = streamsx.topology.context.submit('BUNDLE', topo, {}) self.assertIn('bundlePath', bb) self.assertIn('jobConfigPath', bb) sc = self.sc instances = sc.get_instances() if len(instances) == 1: instance = instances[0] else: instance = sc.get_instance(os.environ['STREAMS_INSTANCE_ID']) job = instance.submit_job(bb['bundlePath']) self.assertIsInstance(job, Job) self.assertEqual('myinstancens::'+sab_name, job.applicationName) job.cancel() with open(bb['jobConfigPath']) as fp: jc = JobConfig.from_overlays(json.load(fp)) jn = 'JN_'+uuid.uuid4().hex jc.job_name = jn job = instance.submit_job(bb['bundlePath'], jc) self.assertIsInstance(job, Job) self.assertEqual('myinstancens::'+sab_name, job.applicationName) self.assertEqual(jn, job.name) job.cancel() ab = instance.upload_bundle(bb['bundlePath']) self.assertIsInstance(ab, ApplicationBundle) job = ab.submit_job() self.assertIsInstance(job, Job) self.assertEqual('myinstancens::'+sab_name, job.applicationName) job.cancel() jn = 'JN_'+uuid.uuid4().hex jc.job_name = jn job = ab.submit_job(jc) self.assertIsInstance(job, Job) self.assertEqual('myinstancens::'+sab_name, job.applicationName) self.assertEqual(jn, job.name) job.cancel() os.remove(bb['bundlePath']) os.remove(bb['jobConfigPath'])
def test_fn_source(self): count = 37 topo = Topology() streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy')) bop = op.Source(topo, "com.ibm.streamsx.topology.pysamples.sources::Range37", schema.StreamSchema('tuple<int64 c>').as_tuple()) r = bop.stream self.tester = Tester(topo) self.tester.tuple_count(r, count) self.tester.contents(r, list(zip(range(count)))) self.tester.test(self.test_ctxtype, self.test_config)