def test_insert_with_app_config(self):
        print('\n---------' + str(self))
        self._create_app_config()
        topo = Topology('test_insert_with_app_config')
        self._add_toolkits(topo, None)
        topo.add_pip_package('streamsx.eventstore')
        s = self._create_stream(topo)
        result_schema = StreamSchema(
            'tuple<int32 id, rstring name, boolean _Inserted_>')
        res = es.insert(
            s,
            config='eventstore',
            table='SampleTable',
            schema=result_schema,
            primary_key='id',
            partitioning_key='id',
            front_end_connection_flag=self.front_end_connection_flag,
            truststore=self.es_truststore,
            keystore=self.es_keystore)
        res.print()

        tester = Tester(topo)
        tester.run_for(120)
        tester.tuple_count(res, 20, exact=True)

        cfg = {}
        job_config = streamsx.topology.context.JobConfig(tracing='info')
        job_config.add(cfg)
        cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
        tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
        print(str(tester.result))
    def _build_launch_validate(self, name, composite_name, parameters,
                               toolkit_name, num_tuples, exact):
        print("------ " + name + " ------")
        topo = Topology(name)
        self._add_toolkits(topo, toolkit_name)
        if self.es_keystore is not None:
            self._add_store_file(topo, self.es_keystore)

        #streamsx.spl.toolkit.add_toolkit_dependency(topo, 'com.ibm.streamsx.eventstore', '[2.4.1,3.0.0)')

        params = parameters
        # Call the test composite
        test_op = op.Source(topo,
                            composite_name,
                            'tuple<rstring result>',
                            params=params)

        tester = Tester(topo)
        tester.run_for(120)
        tester.tuple_count(test_op.stream, num_tuples, exact=exact)

        cfg = {}

        # change trace level
        job_config = streamsx.topology.context.JobConfig(tracing='info')
        job_config.add(cfg)

        cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False

        tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
        print(str(tester.result))
Пример #3
0
class JmsTestDefinitions(TestCase):
    def test_text_message_class(self):
        topology = TopologyProvider.text_message_class_topology()
        self.tester = Tester(topology)
        self.tester.run_for(60)

        # Add the local check
        self.tester.local_check = self.local_checks

        # Run the test
        self.tester.test(self.test_ctxtype, self.test_config)

        self.assertTrue(filecmp.cmp('/tmp/expected.txt', '/tmp/actual.txt'))

    def test_map_message_class(self):
        topology = TopologyProvider.map_message_class_topology()
        self.tester = Tester(topology)
        self.tester.run_for(60)

        # Add the local check
        self.tester.local_check = self.local_checks

        # Run the test
        self.tester.test(self.test_ctxtype, self.test_config)

        self.assertTrue(filecmp.cmp('/tmp/expected.txt', '/tmp/actual.txt'))

    def local_checks(self):
        job = self.tester.submission_result.job
        self.assertEqual('healthy', job.health)
    def test_endpoint_source(self):
        topo = Topology("test_endpoint_source")

        service_documentation={'title': 'streamsx-sample-endpoint-sources', 'description': '2 sources'}

        documentation = dict()
        documentation['summary'] = 'Test endpoint source'
        documentation['tags'] = ['Input', 'STREAMS']
        documentation['description'] = 'CPD job endpoint injects some data'
        doc_attr = dict()
        descr = {'x': {'description': 'IDENTIFIER'}}
        doc_attr.update(descr)
        descr = {'n': {'description': 'NUMBER'}}
        doc_attr.update(descr)
        documentation['attributeDescriptions'] = doc_attr

        schema = 'tuple<rstring x, int64 n>'
        s = topo.source(EndpointSource(schema=schema, buffer_size=20000, service_documentation=service_documentation, endpoint_documentation=documentation), name='cpd_endpoint_src')
        s.print()

        documentation['summary'] = 'Test endpoint source JSON'
        s = topo.source(EndpointSource(schema=CommonSchema.Json, service_documentation=service_documentation, endpoint_documentation=documentation), name='cpd_endpoint_src_json')
        s.print()

        tester = Tester(topo)
        tester.run_for(10)
        tester.test(self.test_ctxtype, self.test_config)
    def _run_app(self, kind, opi='M'):
        schema = 'tuple<rstring a, int32 b>'
        topo = Topology('TESPL' + str(uuid.uuid4().hex))
        streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy'))
        if opi == 'M':
            data = [1,2,3]
            se = topo.source(data)
            se = se.map(lambda x : {'a':'hello', 'b':x} , schema=schema)
            prim = op.Map(
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                se, params={'tf':self.tf})

            res = prim.stream
        elif opi == 'S':
            prim = op.Source(
                topo,
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                schema=schema, params={'tf':self.tf})
            res = prim.stream
        elif opi == 'E':
            data = [1,2,3]
            se = topo.source(data)
            se = se.map(lambda x : {'a':'hello', 'b':x} , schema=schema)
            prim = op.Sink(
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                se, params={'tf':self.tf})
            res = None

        tester = Tester(topo)
        tester.run_for(3)
        ok = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
        self.assertFalse(ok)
    def test_endpoint_sink(self):
        topo = Topology("test_endpoint_sink")
        stream1 = topo.source(lambda : itertools.count()).as_string()

        endpoint_documentation = dict()
        endpoint_documentation['summary'] = 'Sample endpoint sink'
        endpoint_documentation['tags'] = ['Output']
        endpoint_documentation['description'] = 'Streams job endpoint emits some data with random numbers'

        doc_attr = dict()
        descr = {'string': {'description': 'number incremented by one'}}
        doc_attr.update(descr)
        endpoint_documentation['attributeDescriptions'] = doc_attr

        service_documentation={'title': 'streamsx-sample-endpoint-sink', 'description': 'NUMBER GENERATOR', 'version': '0.1.0', 'externalDocsUrl': 'https://mycompany.com/numgen/doc', 'externalDocsDescription': 'Number generator documentation'}

        tags = dict()
        tag1 = {'Output': {'description': 'Output tag description', 'externalDocs': {'url': 'https://mycompany.com/numgen/input/doc', 'description': 'Output tag external doc description'}}}
        tags.update(tag1)
        service_documentation['tags'] = tags

        stream1.for_each(EndpointSink(buffer_size=50000, endpoint_documentation=endpoint_documentation, service_documentation=service_documentation), name='cpd_endpoint_sink')

        tester = Tester(topo)
        tester.tuple_count(stream1, 10, exact=False)
        tester.run_for(10)
        tester.test(self.test_ctxtype, self.test_config)
    def test_batch_time(self):
        topo = Topology()
        s = topo.source(lambda : map(_delay, range(50)), name='A')
        b = s.batch(datetime.timedelta(seconds=2))
        r = b.aggregate(lambda x : x)
        rf = r.flat_map()

        tester = Tester(topo)
        tester.tuple_count(rf, 50)
        tester.run_for((50*0.2) + 20)
        tester.tuple_check(r, _BatchTimeCheck())
        tester.test(self.test_ctxtype, self.test_config)
Пример #8
0
class Test(unittest.TestCase):
    """ Test invocations of composite operators in local Streams instance """

    @classmethod
    def setUpClass(self):
        print (str(self))
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    def setUp(self):
        Tester.setup_distributed(self)
        self.inet_toolkit_location = "../../com.ibm.streamsx.inet"


    def _add_toolkits(self, topo, test_toolkit):
        tk.add_toolkit(topo, test_toolkit)
        if self.inet_toolkit_location is not None:
            tk.add_toolkit(topo, self.inet_toolkit_location)

    def _build_launch_app(self, name, composite_name, parameters, num_result_tuples, test_toolkit):
        print ("------ "+name+" ------")
        topo = Topology(name)
        self._add_toolkits(topo, test_toolkit)
	
        params = parameters
        # Call the test composite
        test_op = op.Source(topo, composite_name, 'tuple<rstring result>', params=params)
        self.tester = Tester(topo)
        self.tester.run_for(30)
        self.tester.tuple_count(test_op.stream, num_result_tuples, exact=True)

        cfg = {}
        # change trace level
        job_config = streamsx.topology.context.JobConfig(tracing='info')
        job_config.add(cfg)

        if ("TestCloud" not in str(self)):
            cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False   

        # Run the test
        test_res = self.tester.test(self.test_ctxtype, cfg, assert_on_fail=True, always_collect_logs=True)
        print (str(self.tester.result))        
        assert test_res, name+" FAILED ("+self.tester.result["application_logs"]+")"


    # ------------------------------------

    def test_inet_source(self):
        self._build_launch_app("test_inet_source", "com.ibm.streamsx.inet.test::InetSourceTestComp", {}, 3, 'inet_test')

    # ------------------------------------

    def test_http_request(self):
        self._build_launch_app("test_http_request", "com.ibm.streamsx.inet.test::HTTPRequestTestComp", {}, 8, 'inet_test')
    def test_batch_time_stv(self):
        topo = Topology()
        s = topo.source(lambda: map(_delay, range(50)), name='A')
        b = s.batchSeconds(topo.create_submission_parameter('secs', 2))
        r = b.aggregate(lambda x: x)
        rf = r.flat_map()

        tester = Tester(topo)
        tester.tuple_count(rf, 50)
        tester.run_for((50 * 0.2) + 20)
        #tester.tuple_check(r, _BatchTimeCheck())
        tester.test(self.test_ctxtype, self.test_config)
Пример #10
0
    def test_request_post_url_in_input_stream_string_type(self):
        topo = Topology('test_request_post_url_in_input_stream_string_type')

        s = topo.source(['http://httpbin.org/post']).as_string()
        res_http = inet.request_post(s)
        res_http.print()
        tester = Tester(topo)
        tester.tuple_count(res_http, 1)
        tester.run_for(60)
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
Пример #11
0
    def test_request_delete_url_in_input_stream_string_type(self):
        topo = Topology('test_request_delete_url_in_input_stream_string_type')

        s = topo.source(['http://httpbin.org/delete']).as_string()
        res_http = inet.request_delete(s, ssl_accept_all_certificates=True)
        res_http.print()
        tester = Tester(topo)
        tester.tuple_count(res_http, 1)
        tester.run_for(60)
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
    def test_batch_time(self):
        topo = Topology()
        s = topo.source(lambda : map(_delay, range(50)), name='A')
        b = s.batch(datetime.timedelta(seconds=2))
        r = b.aggregate(lambda x : x)
        rf = r.flat_map()

        tester = Tester(topo)
        tester.tuple_count(rf, 50)
        tester.run_for((50*0.2) + 20)
        tester.tuple_check(r, _BatchTimeCheck())
        tester.test(self.test_ctxtype, self.test_config)
Пример #13
0
 def test_checker(self):
     """ Test the per-tuple checker.
     """
     topo = Topology()
     s = topo.source(rands)
     s = s.filter(lambda r: r > 0.8)
     s = s.map(lambda r: r + 7.0)
     tester = Tester(topo)
     tester.tuple_count(s, 200, exact=False)
     if self.test_ctxtype == context.ContextTypes.STANDALONE:
         tester.run_for(20)
     tester.tuple_check(s, lambda r: r > 7.8)
     tester.test(self.test_ctxtype, self.test_config)
Пример #14
0
 def test_checker(self):
     """ Test the per-tuple checker.
     """
     topo = Topology()
     s = topo.source(rands)
     s = s.filter(lambda r : r > 0.8)
     s = s.map(lambda r : r + 7.0 )
     tester = Tester(topo)
     tester.tuple_count(s, 200, exact=False)
     if self.test_ctxtype == context.ContextTypes.STANDALONE:
         tester.run_for(20)
     tester.tuple_check(s, lambda r : r > 7.8)
     tester.test(self.test_ctxtype, self.test_config)
Пример #15
0
    def test_request_get_fixed_url(self):
        topo = Topology('test_request_get_fixed_url')

        url_sample = 'http://httpbin.org/get'
        s = topo.source(['fixed-url-test']).as_string()
        res_http = inet.request_get(s, url_sample)
        res_http.print()
        tester = Tester(topo)
        tester.tuple_count(res_http, 1)
        tester.run_for(60)
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
    def _run_app(self, fn=None, data=None):
        topo = Topology('TE' + str(uuid.uuid4().hex))
        if data is None:
            data = [1,2,3]
        se = topo.source(data)

        if fn is not None:
            se = fn(se)

        tester = Tester(topo)
        tester.run_for(3)
        ok = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
        self.assertFalse(ok)
    def _test_insert_consistent_region(self):
        print('\n---------' + str(self))
        name = 'test_insert_consistent_region'
        topo = Topology(name)
        self._add_toolkits(topo, None)
        # configuration of consistent region trigger period
        trigger_period = 10
        num_expected_tuples = 8000
        num_resets = 2
        run_for = 120  # in seconds

        beacon = op.Source(topo,
                           "spl.utility::Beacon",
                           'tuple<int64 id, rstring val>',
                           params={
                               'period': 0.01,
                               'iterations': num_expected_tuples
                           })
        beacon.id = beacon.output('(int64)IterationCount()')
        beacon.val = beacon.output(spltypes.rstring('CR_TEST'))
        beacon.stream.set_consistent(
            ConsistentRegionConfig.periodic(trigger_period))

        es.insert(beacon.stream,
                  connection=self.connection,
                  database=self.database,
                  table='StreamsCRTable',
                  primary_key='id',
                  partitioning_key='id',
                  front_end_connection_flag=False,
                  user=self.es_user,
                  password=self.es_password,
                  truststore=self.es_truststore,
                  truststore_password=self.es_truststore_password,
                  keystore=self.es_keystore,
                  keystore_password=self.es_keystore_password)

        #self._build_only(name, topo)

        tester = Tester(topo)
        tester.run_for(run_for)
        tester.resets(num_resets)  # minimum number of resets for each region

        cfg = {}
        # change trace level
        job_config = streamsx.topology.context.JobConfig(tracing='warn')
        job_config.add(cfg)
        cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
        tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
        print(str(tester.result))
Пример #18
0
    def test_TopologyNestedParallel(self):
        if 'TestSas' in str(self):
            self.skipTest("Skip for SAS - since job is not healthy there")
        topo = Topology("test_TopologyNestedParallel")
        s = topo.source([1])
        s = s.parallel(5, routing=Routing.BROADCAST)
        s = s.parallel(5, routing=Routing.BROADCAST)
        s = s.map(lambda x: x)
        s = s.end_parallel()
        s = s.end_parallel()

        tester = Tester(topo)
        tester.run_for(60)
        tester.contents(s, [1 for i in range(25)])
        tester.test(self.test_ctxtype, self.test_config)
Пример #19
0
    def _run_app(self, fn=None, data=None):
        topo = Topology('TE' + str(uuid.uuid4().hex))
        if data is None:
            data = [1, 2, 3]
        se = topo.source(data)

        if fn is not None:
            se = fn(se)

        tester = Tester(topo)
        tester.run_for(3)
        ok = tester.test(self.test_ctxtype,
                         self.test_config,
                         assert_on_fail=False)
        self.assertFalse(ok)
Пример #20
0
 def test_checker(self):
     """ Test the per-tuple checker.
     """
     topo = Topology()
     s = topo.source(rands)
     s = s.filter(lambda r : r > 0.8)
     s = s.map(lambda r : r + 7.0 )
     tester = Tester(topo)
     tester.tuple_count(s, 200, exact=False)
     if self.test_ctxtype == context.ContextTypes.STANDALONE:
         tester.run_for(20)
     tester.tuple_check(s, lambda r : r > 7.8)
     # Ensure we perform dependency checking for the check function
     import fns_test2_test
     tester.tuple_check(s, fns_test2_test.tc_dep)
     tester.test(self.test_ctxtype, self.test_config)
Пример #21
0
    def _run_app(self, fn=None, data=None, n=None, e=None):
        topo = Topology('TSE' + str(uuid.uuid4().hex))
        if data is None:
            data = [1, 2, 3]
        se = topo.source(data)

        if fn is not None:
            se = fn(se)

        tester = Tester(topo)
        if n is not None:
            tester.tuple_count(se, n)
        if e is not None:
            tester.contents(se, e)
        tester.run_for(3)
        tester.test(self.test_ctxtype, self.test_config)
Пример #22
0
 def test_checker(self):
     """ Test the per-tuple checker.
     """
     topo = Topology()
     s = topo.source(rands)
     s = s.filter(lambda r: r > 0.8)
     s = s.map(lambda r: r + 7.0)
     tester = Tester(topo)
     tester.tuple_count(s, 200, exact=False)
     if self.test_ctxtype == context.ContextTypes.STANDALONE:
         tester.run_for(20)
     tester.tuple_check(s, lambda r: r > 7.8)
     # Ensure we perform dependency checking for the check function
     import fns_test2_test
     tester.tuple_check(s, fns_test2_test.tc_dep)
     tester.test(self.test_ctxtype, self.test_config)
    def _run_app(self, fn=None, data=None, n=None, e=None):
        topo = Topology('TSE' + str(uuid.uuid4().hex))
        if data is None:
            data = [1,2,3]
        se = topo.source(data)

        if fn is not None:
            se = fn(se)

        tester = Tester(topo)
        if n is not None:
            tester.tuple_count(se, n)
        if e is not None:
            tester.contents(se, e)
        tester.run_for(3)
        tester.test(self.test_ctxtype, self.test_config)
Пример #24
0
    def test_request_post_url_in_input_stream_string_type_content_type_param(
            self):
        topo = Topology(
            'test_request_post_url_in_input_stream_string_type_content_type_param'
        )

        s = topo.source(['http://httpbin.org/post']).as_string()
        res_http = inet.request_post(
            s,
            content_type='application/x-www-form-urlencoded',
            ssl_accept_all_certificates=True)
        res_http.print()
        tester = Tester(topo)
        tester.tuple_count(res_http, 1)
        tester.run_for(60)
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
Пример #25
0
    def test_request_put_with_url_contt_params_body_in_input_stream_string_type(
            self):
        topo = Topology(
            'test_request_put_with_url_contt_params_body_in_input_stream_string_type'
        )

        s = topo.source(['hello world']).as_string()
        result_http_put = inet.request_put(s,
                                           url='http://httpbin.org/put',
                                           content_type='text/plain',
                                           ssl_accept_all_certificates=False)
        result_http_put.print()
        tester = Tester(topo)
        tester.tuple_count(result_http_put, 1)
        tester.run_for(60)
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
    def test_param_consuming_reads(self):
        topo = Topology('test_param_consuming_reads')

        stream1 = topo.source(lambda : itertools.count()).as_string()

        endpoint_documentation = dict()
        endpoint_documentation['summary'] = 'Sample endpoint sink'
        endpoint_documentation['tags'] = ['Output']
        endpoint_documentation['description'] = 'Streams job endpoint emits some data with random numbers'

        doc_attr = dict()
        descr = {'string': {'description': 'number incremented by one'}}
        doc_attr.update(descr)
        endpoint_documentation['attributeDescriptions'] = doc_attr

        stream1.for_each(EndpointSink(buffer_size=50000, consuming_reads=True, endpoint_documentation=endpoint_documentation), name='cpd_endpoint_sink')

        tester = Tester(topo)
        tester.tuple_count(stream1, 10, exact=False)
        tester.run_for(10)
        tester.test(self.test_ctxtype, self.test_config)
Пример #27
0
    def test_hw(self):
        print('\n---------' + str(self))
        name = 'test_hw'
        n = 100
        topo = Topology(name)
        if self.es_toolkit_home is not None:
            streamsx.spl.toolkit.add_toolkit(topo, self.es_toolkit_home)

        s = topo.source(['Hello', 'World!']).as_string()
        es.bulk_insert(s,
                       'test-index-cloud',
                       credentials=get_credentials(),
                       ssl_trust_all_certificates=True)

        tester = Tester(topo)
        tester.run_for(60)

        # Run the test
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
    def _run_app(self, kind, e, opi='M'):
        schema = 'tuple<rstring a, int32 b>'
        topo = Topology('TSESPL' + str(uuid.uuid4().hex))
        streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy'))

        if opi == 'M':
            data = [1, 2, 3]
            se = topo.source(data)
            se = se.map(lambda x: {'a': 'hello', 'b': x}, schema=schema)
            prim = op.Map("com.ibm.streamsx.topology.pytest.pyexceptions::" +
                          kind,
                          se,
                          params={'tf': self.tf})

            res = prim.stream
        elif opi == 'S':
            prim = op.Source(
                topo,
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                schema=schema,
                params={'tf': self.tf})
            res = prim.stream
        elif opi == 'E':
            data = [1, 2, 3]
            se = topo.source(data)
            se = se.map(lambda x: {'a': 'hello', 'b': x}, schema=schema)
            prim = op.Sink("com.ibm.streamsx.topology.pytest.pyexceptions::" +
                           kind,
                           se,
                           params={'tf': self.tf})
            res = None

        tester = Tester(topo)
        if res is not None:
            tester.tuple_count(res, len(e))
            if e:
                tester.contents(res, e)
        else:
            tester.run_for(5)
        tester.test(self.test_ctxtype, self.test_config)
Пример #29
0
    def test_request_get_url_in_input_stream(self):
        topo = Topology('test_request_get_url_in_input_stream')

        pulse = op.Source(topo,
                          "spl.utility::Beacon",
                          'tuple<rstring url, rstring header>',
                          params={'iterations': 1})
        pulse.url = pulse.output('"http://httpbin.org/get"')
        pulse.header = pulse.output('"myheader: myvalue"')

        res_http = inet.request_get(pulse.stream,
                                    url_attribute='url',
                                    extra_header_attribute='header',
                                    ssl_accept_all_certificates=True)
        res_http.print()

        tester = Tester(topo)
        tester.tuple_count(res_http, 1)
        tester.run_for(60)
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
Пример #30
0
    def test_composite(self):
        print('\n---------' + str(self))
        name = 'test_composite'
        n = 100
        topo = Topology(name)
        if self.es_toolkit_home is not None:
            streamsx.spl.toolkit.add_toolkit(topo, self.es_toolkit_home)

        s = topo.source(['Hello', 'World!']).as_string()
        config = {'ssl_trust_all_certificates': True}
        s.for_each(
            es.Insert(credentials=get_credentials(),
                      index_name='test-index-cloud',
                      **config))

        tester = Tester(topo)
        tester.run_for(60)

        # Run the test
        tester.test(self.test_ctxtype,
                    self.test_config,
                    always_collect_logs=True)
    def _run_app(self, kind, opi='M'):
        schema = 'tuple<rstring a, int32 b>'
        topo = Topology('TESPL' + str(uuid.uuid4().hex))
        streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy'))
        if opi == 'M':
            data = [1, 2, 3]
            se = topo.source(data)
            se = se.map(lambda x: {'a': 'hello', 'b': x}, schema=schema)
            prim = op.Map("com.ibm.streamsx.topology.pytest.pyexceptions::" +
                          kind,
                          se,
                          params={'tf': self.tf})

            res = prim.stream
        elif opi == 'S':
            prim = op.Source(
                topo,
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                schema=schema,
                params={'tf': self.tf})
            res = prim.stream
        elif opi == 'E':
            data = [1, 2, 3]
            se = topo.source(data)
            se = se.map(lambda x: {'a': 'hello', 'b': x}, schema=schema)
            prim = op.Sink("com.ibm.streamsx.topology.pytest.pyexceptions::" +
                           kind,
                           se,
                           params={'tf': self.tf})
            res = None

        tester = Tester(topo)
        tester.run_for(3)
        ok = tester.test(self.test_ctxtype,
                         self.test_config,
                         assert_on_fail=False)
        self.assertFalse(ok)
    def _run_app(self, kind, e, opi='M'):
        schema = 'tuple<rstring a, int32 b>'
        topo = Topology()
        streamsx.spl.toolkit.add_toolkit(topo, '../testtkpy')

        if opi == 'M':
            data = [1,2,3]
            se = topo.source(data)
            se = se.map(lambda x : {'a':'hello', 'b':x} , schema=schema)
            prim = op.Map(
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                se, params={'tf':self.tf})

            res = prim.stream
        elif opi == 'S':
            prim = op.Source(
                topo,
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                schema=schema, params={'tf':self.tf})
            res = prim.stream
        elif opi == 'E':
            data = [1,2,3]
            se = topo.source(data)
            se = se.map(lambda x : {'a':'hello', 'b':x} , schema=schema)
            prim = op.Sink(
                "com.ibm.streamsx.topology.pytest.pyexceptions::" + kind,
                se, params={'tf':self.tf})
            res = None
    
        tester = Tester(topo)
        if res is not None:
            tester.tuple_count(res, len(e))
            if e:
                tester.contents(res, e)
        else:
            tester.run_for(5)
        tester.test(self.test_ctxtype, self.test_config)
 def test_insert_udp(self):
     print('\n---------' + str(self))
     topo = Topology('test_insert_udp')
     self._add_toolkits(topo, None)
     topo.add_pip_package('streamsx.eventstore')
     s = self._create_stream(topo)
     result_schema = StreamSchema(
         'tuple<int32 id, rstring name, boolean _Inserted_>')
     # user-defined parallelism with two channels (two EventStoreSink operators)
     res = es.insert(
         s.parallel(2),
         table='SampleTable',
         database=self.database,
         connection=self.connection,
         schema=result_schema,
         primary_key='id',
         partitioning_key='id',
         front_end_connection_flag=self.front_end_connection_flag,
         user=self.es_user,
         password=self.es_password,
         truststore=self.es_truststore,
         truststore_password=self.es_truststore_password,
         keystore=self.es_keystore,
         keystore_password=self.es_keystore_password)
     res = res.end_parallel()
     res.print()
     #self._build_only('test_insert_udp', topo)
     tester = Tester(topo)
     tester.run_for(120)
     tester.tuple_count(res, 20, exact=True)
     cfg = {}
     job_config = streamsx.topology.context.JobConfig(tracing='info')
     job_config.add(cfg)
     cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
     tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
     print(str(tester.result))
Пример #34
0
class TestPubSub(unittest.TestCase):
    """ Test basic pub-sub in SPL
    """
    def setUp(self):
        Tester.setup_distributed(self)

    def _publish(self, topo, N, topic, width=None, allow_filter=False):
        b = op.Source(topo, "spl.utility::Beacon",
            SCHEMA,
            params = {'initDelay':10.0, 'iterations':N})
        b.seq = b.output('IterationCount()')

        ps = b.stream
        if width:
            ps = ps.parallel(width=width)

        p = op.Sink('com.ibm.streamsx.topology.topic::Publish',
            ps,
            params = {'topic': topic},
            name='MSP')
        if allow_filter:
           p.params['allowFilter'] = True


    def _subscribe(self, topo, topic, direct=True, drop=None, filtered=False, extra=None):
        s = op.Source(topo,
               "com.ibm.streamsx.topology.topic::FilteredSubscribe" if filtered else "com.ibm.streamsx.topology.topic::Subscribe",
            SCHEMA,
            params = {'topic':topic, 'streamType':SCHEMA},
            name='MSS')

        if extra:
            s.params.update(extra)

        if not direct:
            s.params['connect'] = op.Expression.expression('com.ibm.streamsx.topology.topic::Buffered')
            if drop:
                s.params['bufferFullPolicy'] =  op.Expression.expression('Sys.' + drop)
            return s.stream.filter(slowme)

        return s.stream

    def _get_single_sub_op(self):
        job = self.tester.submission_result.job
        self.assertEqual('healthy', job.health)
        for op in job.get_operators():
           if op.name.startswith('MSS') and op.operatorKind == 'spl.relational::Filter':
              mss = op
        return mss

    def _get_single_sub_metrics(self, mss):
        nDropped = None
        nProcessed = None
        ip = mss.get_input_ports()[0]
        while nDropped is None or nProcessed is None:
            if nDropped is None:
                metrics = ip.get_metrics(name='nTuplesDropped')
                if metrics:
                    nDropped = metrics[0]
            if nProcessed is None:
                metrics = ip.get_metrics(name='nTuplesProcessed')
                if metrics:
                    nProcessed = metrics[0]
              
        return nDropped, nProcessed


    def check_single_sub(self):
        """
        Check we get all the tuples with none dropped
        with a single subcriber.
        """
        mss = self._get_single_sub_op()
        nDropped, nProcessed = self._get_single_sub_metrics(mss)
        while nProcessed.value < self.N:
            self.assertEqual(0, nDropped.value)
            time.sleep(2)
            nDropped, nProcessed = self._get_single_sub_metrics(mss)

        self.assertEqual(0, nDropped.value)
        self.assertEqual(self.N, nProcessed.value)

    def check_single_sub_drop(self):
        """
        Check we get all the tuples with none dropped
        with a single subcriber.
        """
        mss = self._get_single_sub_op()
        nDropped, nProcessed = self._get_single_sub_metrics(mss)
        while nDropped.value + nProcessed.value < self.N:
            time.sleep(2)
            nDropped, nProcessed = self._get_single_sub_metrics(mss)

        self.assertEqual(self.N, nDropped.value + nProcessed.value)
        self.assertTrue(nDropped.value > 0)

    def test_One2One(self):
        """Publish->Subscribe
        """
        N = 2466
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic)

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, N)
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_One2OneNonDirect(self):
        """Publish->Subscribe with a buffered subscriber.
        """
        N = 3252
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic, direct=False)

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, N)
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_One2OneNonDirectDropFirst(self):
        """Publish->Subscribe with a buffered subscriber.
        """
        N = 5032
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic, direct=False, drop='DropFirst')

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        # 1000-2 for window & final mark
        self.tester.tuple_count(s, 998, exact=False)
        self.tester.local_check = self.check_single_sub_drop
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_One2OneNonDirectDropLast(self):
        """Publish->Subscribe with a buffered subscriber.
        """
        N = 5032
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic, direct=False, drop='DropLast')

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, 1000, exact=False)
        self.tester.local_check = self.check_single_sub_drop
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_UDPMany2One(self):
        """
        UDP publishers to a single subscriber.
        """
        N = 17342

        for pw in (1,5):
            topic = random_topic()
            topo = Topology()

            # Subscriber
            s = self._subscribe(topo, topic)

            # Publisher
            self._publish(topo, N, topic, width=pw)

            self.tester = Tester(topo)
            self.tester.run_for(15)
            self.tester.tuple_count(s, N)
            self.N = N
            self.tester.local_check = self.check_single_sub
            self.tester.test(self.test_ctxtype, self.test_config)

    def test_Many2One(self):
        """
        Many non-UDP publishers to a single subscriber.
        """
        N = 17342

        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic)

        # Publisher
        M=3
        for i in range(M):
            self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.tester.tuple_count(s, N*M)
        self.N = N*M
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_allow_filter_subscribe(self):
        N = 99

        topic = random_topic()
        topo = Topology()

        # Non-Filter Subscriber
        s = self._subscribe(topo, topic)

        # Publisher
        self._publish(topo, N, topic, allow_filter=True)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, N)
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)


    def test_allow_filter_filtered_subscribe(self):
        N = 201
        F = 87
        pd = os.path.dirname(os.path.dirname(__file__))
        ttk = os.path.join(pd, 'testtk')

        for af,d in [(False,False), (False,True), (True,False), (True,True)]:

            #af = c[0]
            #d = c[1]

            topic = random_topic()
            topo = Topology()

            streamsx.spl.toolkit.add_toolkit(topo, ttk)

            extra = {}
            extra['remoteFilter'] = 'seq < 87'
            extra['localFilterFunction'] = op.Expression.expression('testspl::affs')
            s = self._subscribe(topo, topic, direct=d, filtered=True, extra=extra)

            sf = s.filter(check_lt_87)

            # Publisher
            self._publish(topo, N, topic, allow_filter=af)

            self.tester = Tester(topo)
            self.tester.run_for(15)
            self.N = F
            self.tester.tuple_count(s, F)
            self.tester.tuple_check(s, check_lt_87)
            #self.tester.local_check = self.check_single_sub
            self.tester.test(self.test_ctxtype, self.test_config)
Пример #35
0
class TestDistributedPubSub(unittest.TestCase):
    """ Test basic pub-sub in SPL
    """
    def setUp(self):
        Tester.setup_distributed(self)
        self.test_config[ConfigParams.SSL_VERIFY] = False

    def _publish(self, topo, N, topic, width=None, allow_filter=False):
        b = op.Source(topo,
                      "spl.utility::Beacon",
                      SCHEMA,
                      params={
                          'initDelay': 10.0,
                          'iterations': N
                      })
        b.seq = b.output('IterationCount()')

        ps = b.stream
        if width:
            ps = ps.parallel(width=width)

        p = op.Sink('com.ibm.streamsx.topology.topic::Publish',
                    ps,
                    params={'topic': topic},
                    name='MSP')
        if allow_filter:
            p.params['allowFilter'] = True

    def _subscribe(self,
                   topo,
                   topic,
                   direct=True,
                   drop=None,
                   filtered=False,
                   extra=None):
        s = op.Source(
            topo,
            "com.ibm.streamsx.topology.topic::FilteredSubscribe"
            if filtered else "com.ibm.streamsx.topology.topic::Subscribe",
            SCHEMA,
            params={
                'topic': topic,
                'streamType': SCHEMA
            },
            name='MSS')

        if extra:
            s.params.update(extra)

        if not direct:
            s.params['connect'] = op.Expression.expression(
                'com.ibm.streamsx.topology.topic::Buffered')
            if drop:
                s.params['bufferFullPolicy'] = op.Expression.expression(
                    'Sys.' + drop)
            return s.stream.filter(slowme)

        return s.stream

    def _get_single_sub_op(self):
        job = self.tester.submission_result.job
        self.assertEqual('healthy', job.health)
        for op in job.get_operators():
            if op.name.startswith(
                    'MSS') and op.operatorKind == 'spl.relational::Filter':
                mss = op
        return mss

    def _get_single_sub_metrics(self, mss):
        nDropped = None
        nProcessed = None
        ip = mss.get_input_ports()[0]
        while nDropped is None or nProcessed is None:
            if nDropped is None:
                metrics = ip.get_metrics(name='nTuplesDropped')
                if metrics:
                    nDropped = metrics[0]
            if nProcessed is None:
                metrics = ip.get_metrics(name='nTuplesProcessed')
                if metrics:
                    nProcessed = metrics[0]

        return nDropped, nProcessed

    def check_single_sub(self):
        """
        Check we get all the tuples with none dropped
        with a single subcriber.
        """
        mss = self._get_single_sub_op()
        nDropped, nProcessed = self._get_single_sub_metrics(mss)
        while nProcessed.value < self.N:
            self.assertEqual(0, nDropped.value)
            time.sleep(2)
            nDropped, nProcessed = self._get_single_sub_metrics(mss)

        self.assertEqual(0, nDropped.value)
        self.assertEqual(self.N, nProcessed.value)

    def check_single_sub_drop(self):
        """
        Check we get all the tuples with none dropped
        with a single subcriber.
        """
        mss = self._get_single_sub_op()
        nDropped, nProcessed = self._get_single_sub_metrics(mss)
        while nDropped.value + nProcessed.value < self.N:
            time.sleep(2)
            nDropped, nProcessed = self._get_single_sub_metrics(mss)

        self.assertEqual(self.N, nDropped.value + nProcessed.value)
        self.assertTrue(nDropped.value > 0)

    def test_One2One(self):
        """Publish->Subscribe
        """
        N = 2466
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic)

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, N)
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_One2OneNonDirect(self):
        """Publish->Subscribe with a buffered subscriber.
        """
        N = 3252
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic, direct=False)

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, N)
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_One2OneNonDirectDropFirst(self):
        """Publish->Subscribe with a buffered subscriber.
        """
        N = 5032
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic, direct=False, drop='DropFirst')

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        # 1000-2 for window & final mark
        self.tester.tuple_count(s, 998, exact=False)
        self.tester.local_check = self.check_single_sub_drop
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_One2OneNonDirectDropLast(self):
        """Publish->Subscribe with a buffered subscriber.
        """
        N = 5032
        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic, direct=False, drop='DropLast')

        # Publisher
        self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, 1000, exact=False)
        self.tester.local_check = self.check_single_sub_drop
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_UDPMany2One(self):
        """
        UDP publishers to a single subscriber.
        """
        N = 17342

        for pw in (1, 5):
            topic = random_topic()
            topo = Topology()

            # Subscriber
            s = self._subscribe(topo, topic)

            # Publisher
            self._publish(topo, N, topic, width=pw)

            self.tester = Tester(topo)
            self.tester.run_for(15)
            self.tester.tuple_count(s, N)
            self.N = N
            self.tester.local_check = self.check_single_sub
            self.tester.test(self.test_ctxtype, self.test_config)

    def test_Many2One(self):
        """
        Many non-UDP publishers to a single subscriber.
        """
        N = 17342

        topic = random_topic()
        topo = Topology()

        # Subscriber
        s = self._subscribe(topo, topic)

        # Publisher
        M = 3
        for i in range(M):
            self._publish(topo, N, topic)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.tester.tuple_count(s, N * M)
        self.N = N * M
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_allow_filter_subscribe(self):
        N = 99

        topic = random_topic()
        topo = Topology()

        # Non-Filter Subscriber
        s = self._subscribe(topo, topic)

        # Publisher
        self._publish(topo, N, topic, allow_filter=True)

        self.tester = Tester(topo)
        self.tester.run_for(15)
        self.N = N
        self.tester.tuple_count(s, N)
        self.tester.local_check = self.check_single_sub
        self.tester.test(self.test_ctxtype, self.test_config)

    def test_allow_filter_filtered_subscribe(self):
        N = 201
        F = 87
        pd = os.path.dirname(os.path.dirname(__file__))
        ttk = os.path.join(pd, 'testtk')

        for af, d in [(False, False), (False, True), (True, False),
                      (True, True)]:

            #af = c[0]
            #d = c[1]

            topic = random_topic()
            topo = Topology()

            streamsx.spl.toolkit.add_toolkit(topo, ttk)

            extra = {}
            extra['remoteFilter'] = 'seq < 87'
            extra['localFilterFunction'] = op.Expression.expression(
                'testspl::affs')
            s = self._subscribe(topo,
                                topic,
                                direct=d,
                                filtered=True,
                                extra=extra)

            sf = s.filter(check_lt_87)

            # Publisher
            self._publish(topo, N, topic, allow_filter=af)

            self.tester = Tester(topo)
            self.tester.run_for(15)
            self.N = F
            self.tester.tuple_count(s, F)
            self.tester.tuple_check(s, check_lt_87)
            #self.tester.local_check = self.check_single_sub
            self.tester.test(self.test_ctxtype, self.test_config)
Пример #36
0
class TestTester(unittest.TestCase):
    _multiprocess_can_split_ = True

    def setUp(self):
        Tester.setup_standalone(self)

    def test_at_least(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source(rands)
        tester = Tester(topo)
        tester.tuple_count(s, 100, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_no_tuples(self):
        """ Test exact count with zero tuples.
        """
        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0)
        tester.test(self.test_ctxtype, self.test_config)

    def test_at_least_no_tuples(self):
        """ Test at least count with zero tuples. 
            (kind of a pointless condition, always true).
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_checker(self):
        """ Test the per-tuple checker.
        """
        topo = Topology()
        s = topo.source(rands)
        s = s.filter(lambda r : r > 0.8)
        s = s.map(lambda r : r + 7.0 )
        tester = Tester(topo)
        tester.tuple_count(s, 200, exact=False)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            tester.run_for(20)
        tester.tuple_check(s, lambda r : r > 7.8)
        # Ensure we perform dependency checking for the check function
        import fns_test2_test
        tester.tuple_check(s, fns_test2_test.tc_dep)
        tester.test(self.test_ctxtype, self.test_config)

    def test_local_check(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests don't support local check")
        topo = Topology()
        s = topo.source(rands)
        self.my_local_called = False
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 100, exact=False)
        self.tester.local_check = self.my_local
        self.tester.test(self.test_ctxtype, self.test_config)
        self.assertTrue(self.my_local_called)

    def my_local(self):
        self.assertTrue(hasattr(self.tester, 'submission_result'))
        self.assertTrue(hasattr(self.tester, 'streams_connection'))
        self.assertIs(self.tester.streams_connection, self.tester.submission_result.job.rest_client._sc)
        self.my_local_called = True

    def test_bad_pe(self):
        """Test a failure in a PE is caught as a test failure"""
        topo = Topology()
        s = topo.source(rands)
        # intentional addition of a string with an int
        # to cause a PE failure
        s = s.map(lambda t : t + 'a string')
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tp = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
        self.assertFalse(tp)

    def test_run_for(self):
        topo = Topology()
        s = topo.source([1,2,3])
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 3)
        self.tester.run_for(120)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            self.rf_start = time.time()
        else:
            self.tester.local_check = self.get_start_time
        self.tester.test(self.test_ctxtype, self.test_config)
        now = time.time()
        test_duration = now - self.rf_start
        self.assertTrue(test_duration >= 120)

    def test_eventual_result_ok(self):
        N=500000
        topo = Topology()
        s = topo.source(range(N))
        w = s.batch(datetime.timedelta(milliseconds=300))
        a = w.aggregate(lambda t : (len(t), sum(t)))
        tester = Tester(topo)
        tester.tuple_count(s, N)
        tester.eventual_result(a, _EROK(N))
        # Ensure we perform dependency checking for the check function
        import fns_test2_test
        tester.eventual_result(s, fns_test2_test.tc_dep)
        tester.test(self.test_ctxtype, self.test_config)

    def test_eventual_result_bad(self):
        N=500000
        topo = Topology()
        s = topo.source(range(N))
        w = s.batch(datetime.timedelta(milliseconds=300))
        a = w.aggregate(lambda t : (len(t), sum(t)))
        tester = Tester(topo)
        tester.tuple_count(s, N)
        tester.eventual_result(a, _EROK(int(N/4)))
        ok = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
        self.assertFalse(ok)

    def test_count_bad(self):
        N=10
        topo = Topology()
        s = topo.source(range(N))
        tester = Tester(topo)
        tester.tuple_count(s, N+1)
        ok = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
        self.assertFalse(ok)

    def test_count_bad_conflicting(self):
        N=10
        topo = Topology()
        s = topo.source(range(N))
        tester = Tester(topo)
        # Add one that fails and one that never becomes valid
        tester.tuple_count(s.map(), int(N/2))
        tester.tuple_count(s, N+1)
        ok = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
        self.assertFalse(ok)

    def get_start_time(self):
        job = self.tester.submission_result.job
        self.rf_start = job.submitTime / 1000.0
Пример #37
0
class TestTester(unittest.TestCase):
    _multiprocess_can_split_ = True

    def setUp(self):
        Tester.setup_standalone(self)

    def test_at_least(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source(rands)
        tester = Tester(topo)
        tester.tuple_count(s, 100, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_no_tuples(self):
        """ Test exact count with zero tuples.
        """
        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0)
        tester.test(self.test_ctxtype, self.test_config)

    def test_at_least_no_tuples(self):
        """ Test at least count with zero tuples. 
            (kind of a pointless condition, always true).
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_checker(self):
        """ Test the per-tuple checker.
        """
        topo = Topology()
        s = topo.source(rands)
        s = s.filter(lambda r: r > 0.8)
        s = s.map(lambda r: r + 7.0)
        tester = Tester(topo)
        tester.tuple_count(s, 200, exact=False)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            tester.run_for(20)
        tester.tuple_check(s, lambda r: r > 7.8)
        # Ensure we perform dependency checking for the check function
        import fns_test2_test
        tester.tuple_check(s, fns_test2_test.tc_dep)
        tester.test(self.test_ctxtype, self.test_config)

    def test_local_check(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests don't support local check")
        topo = Topology()
        s = topo.source(rands)
        self.my_local_called = False
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 100, exact=False)
        self.tester.local_check = self.my_local
        self.tester.test(self.test_ctxtype, self.test_config)
        self.assertTrue(self.my_local_called)

    def my_local(self):
        self.assertTrue(hasattr(self.tester, 'submission_result'))
        self.assertTrue(hasattr(self.tester, 'streams_connection'))
        self.assertIs(self.tester.streams_connection,
                      self.tester.submission_result.job.rest_client._sc)
        self.my_local_called = True

    def test_bad_pe(self):
        """Test a failure in a PE is caught as a test failure"""
        topo = Topology()
        s = topo.source(rands)
        # intentional addition of a string with an int
        # to cause a PE failure
        s = s.map(lambda t: t + 'a string')
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tp = tester.test(self.test_ctxtype,
                         self.test_config,
                         assert_on_fail=False)
        self.assertFalse(tp)

    def test_run_for(self):
        topo = Topology()
        s = topo.source([1, 2, 3])
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 3)
        self.tester.run_for(120)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            self.rf_start = time.time()
        else:
            self.tester.local_check = self.get_start_time
        self.tester.test(self.test_ctxtype, self.test_config)
        now = time.time()
        test_duration = now - self.rf_start
        self.assertTrue(test_duration >= 120)

    def test_eventual_result_ok(self):
        N = 500000
        topo = Topology()
        s = topo.source(range(N))
        w = s.batch(datetime.timedelta(milliseconds=300))
        a = w.aggregate(lambda t: (len(t), sum(t)))
        tester = Tester(topo)
        tester.tuple_count(s, N)
        tester.eventual_result(a, _EROK(N))
        # Ensure we perform dependency checking for the check function
        import fns_test2_test
        tester.eventual_result(s, fns_test2_test.tc_dep)
        tester.test(self.test_ctxtype, self.test_config)

    def test_eventual_result_bad(self):
        N = 500000
        topo = Topology()
        s = topo.source(range(N))
        w = s.batch(datetime.timedelta(milliseconds=300))
        a = w.aggregate(lambda t: (len(t), sum(t)))
        tester = Tester(topo)
        tester.tuple_count(s, N)
        tester.eventual_result(a, _EROK(int(N / 4)))
        ok = tester.test(self.test_ctxtype,
                         self.test_config,
                         assert_on_fail=False)
        self.assertFalse(ok)

    def test_count_bad(self):
        N = 10
        topo = Topology()
        s = topo.source(range(N))
        tester = Tester(topo)
        tester.tuple_count(s, N + 1)
        ok = tester.test(self.test_ctxtype,
                         self.test_config,
                         assert_on_fail=False)
        self.assertFalse(ok)

    def test_count_bad_conflicting(self):
        N = 10
        topo = Topology()
        s = topo.source(range(N))
        tester = Tester(topo)
        # Add one that fails and one that never becomes valid
        tester.tuple_count(s.map(), int(N / 2))
        tester.tuple_count(s, N + 1)
        ok = tester.test(self.test_ctxtype,
                         self.test_config,
                         assert_on_fail=False)
        self.assertFalse(ok)

    def get_start_time(self):
        job = self.tester.submission_result.job
        self.rf_start = job.submitTime / 1000.0
Пример #38
0
class Test(unittest.TestCase):
    """ Test invocations of composite operators in local Streams instance """
    @classmethod
    def setUpClass(self):
        print(str(self))
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
        self.streams_install = os.environ.get('STREAMS_INSTALL')
        if self.streams_install is not None:
            self.json_toolkit_location = self.streams_install + '/toolkits/com.ibm.streamsx.json'

    def setUp(self):
        Tester.setup_distributed(self)
        self.isCloudTest = False
        if os.environ.get('STREAMSX_AVRO_TOOLKIT') is None:
            self.avro_toolkit_location = "../../com.ibm.streamsx.avro"
        else:
            self.avro_toolkit_location = os.environ.get(
                'STREAMSX_AVRO_TOOLKIT')

    def _add_toolkits(self, topo, test_toolkit):
        tk.add_toolkit(topo, test_toolkit)
        if self.avro_toolkit_location is not None:
            tk.add_toolkit(topo, self.avro_toolkit_location)
        if self.json_toolkit_location is not None:
            tk.add_toolkit(topo, self.json_toolkit_location)

    def _build_launch_app(self, name, composite_name, parameters,
                          num_result_tuples, test_toolkit):
        print("------ " + name + " ------")
        topo = Topology(name)
        self._add_toolkits(topo, test_toolkit)

        params = parameters
        # Call the test composite
        test_op = op.Source(topo,
                            composite_name,
                            'tuple<rstring result>',
                            params=params)
        self.tester = Tester(topo)
        self.tester.run_for(30)
        self.tester.tuple_count(test_op.stream, num_result_tuples, exact=False)

        cfg = {}

        # change trace level
        job_config = streamsx.topology.context.JobConfig(tracing='warn')
        job_config.add(cfg)

        if ("Cloud" not in str(self)):
            cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False

        # Run the test
        test_res = self.tester.test(self.test_ctxtype,
                                    cfg,
                                    assert_on_fail=True,
                                    always_collect_logs=True)
        print(str(self.tester.result))
        assert test_res, name + " FAILED (" + self.tester.result[
            "application_logs"] + ")"

    # ------------------------------------

    def test_avro_sample(self):
        self._build_launch_app(
            "test_avro_sample",
            "com.ibm.streamsx.avro.sample::AvroJSONSampleComp", {}, 1,
            'avro_test')
Пример #39
0
class Test(unittest.TestCase):
    """ Test invocations of composite operators in local Streams instance """
    @classmethod
    def setUpClass(self):
        print(str(self))
        print("Setup Elasticsearch client ...")
        # ES client expects ES_URL environment variable with URL to Compose Elasticsearch service, e.g. https://user:[email protected]:port/
        es_url = os.environ['ES_URL']
        print(str(es_url))
        self._es = Elasticsearch([es_url], verify_certs=False)
        self._indexName = 'test-index-cloud'
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

        creds = urlparse(es_url)
        self._es_user_name = creds.username
        self._es_password = creds.password
        self._es_node_list = creds.hostname + ':' + str(creds.port)

    def setUp(self):
        Tester.setup_distributed(self)
        self.elasticsearch_toolkit_location = "../../com.ibm.streamsx.elasticsearch"

    def tearDown(self):
        self._es.indices.delete(index=self._indexName, ignore=[400, 404])

    def _add_toolkits(self, topo, test_toolkit):
        tk.add_toolkit(topo, test_toolkit)
        if self.elasticsearch_toolkit_location is not None:
            tk.add_toolkit(topo, self.elasticsearch_toolkit_location)

    def _build_launch_app(self,
                          name,
                          composite_name,
                          parameters,
                          num_result_tuples,
                          test_toolkit,
                          exact=True,
                          run_for=60,
                          resets=0):
        print("------ " + name + " ------")
        topo = Topology(name)
        self._add_toolkits(topo, test_toolkit)

        params = parameters
        # Call the test composite
        test_op = op.Source(topo,
                            composite_name,
                            'tuple<rstring result>',
                            params=params)
        self.tester = Tester(topo)
        self.tester.run_for(run_for)
        if (resets > 0):
            self.tester.resets(
                resets
            )  # minimum number of resets for each region, requires v1.11 of topology toolkit
        self.tester.tuple_count(test_op.stream, num_result_tuples, exact=False)

        cfg = {}

        # change trace level
        job_config = streamsx.topology.context.JobConfig(tracing='info')
        job_config.add(cfg)

        if ("TestCloud" not in str(self)):
            cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False

        # Run the test
        test_res = self.tester.test(self.test_ctxtype,
                                    cfg,
                                    assert_on_fail=True,
                                    always_collect_logs=True)
        print(str(self.tester.result))
        assert test_res, name + " FAILED (" + self.tester.result[
            "application_logs"] + ")"

    def _validate_count(self, indexName, expectedNum):
        # check the count
        count = self._es.count(index=indexName,
                               doc_type='_doc',
                               body={"query": {
                                   "match_all": {}
                               }})
        print("Count: " + str(count['count']))
        assert (
            count['count'] == expectedNum
        ), "Wrong tuple count (expected=" + str(expectedNum) + "): " + str(
            count['count'])

    def _run_shell_command_line(self, command):
        process = Popen(command,
                        universal_newlines=True,
                        shell=True,
                        stdout=PIPE,
                        stderr=PIPE)
        stdout, stderr = process.communicate()
        return stdout, stderr, process.returncode

    def _create_app_config(self):
        if ("TestICP" in str(self) or "TestCloud" in str(self)):
            print("Ensure that application configuration 'es' is created.")
        else:
            if streams_install_env_var():
                print(
                    "Create elasticsearch application configuration with streamtool"
                )
                this_dir = os.path.dirname(os.path.realpath(__file__))
                app_dir = this_dir + '/es_test'
                stdout, stderr, err = self._run_shell_command_line(
                    'export ES_NODES=' + self._es_node_list + ';' +
                    'export ES_USER='******';' +
                    'export ES_PASSWORD='******';' + 'cd ' +
                    app_dir + '; make configure')
                print(str(err))

    # ------------------------------------

    # CONSISTENT REGION test with TopologyTester:
    # Resets triggered by ConsistentRegionResetter and Beacon re-submits the tuples
    def test_consistent_region_with_resets(self):
        self._indexName = 'test-index-cr'
        self._create_app_config()
        # delete index before launching Streams job
        self._es.indices.delete(index=self._indexName, ignore=[400, 404])
        numResets = 3
        runFor = 150
        numTuples = 300  # num generated tuples
        drainPeriod = 5.0
        self._build_launch_app(
            "test_consistent_region_with_resets",
            "com.ibm.streamsx.elasticsearch.test::TestConsistentRegionAppConfig",
            {
                'indexName': self._indexName,
                'drainPeriod': drainPeriod,
                'numTuples': numTuples
            }, numTuples, 'es_test', False, runFor, numResets)
        self._validate_count(self._indexName, numTuples)

    # ------------------------------------

    def test_bulk(self):
        self._indexName = 'test-index-bulk'
        self._create_app_config()
        # delete index before launching Streams job
        self._es.indices.delete(index=self._indexName, ignore=[400, 404])
        numTuples = 20000  # num generated tuples
        bulkSize = 1000
        self._build_launch_app(
            "test_bulk", "com.ibm.streamsx.elasticsearch.test::TestBulk", {
                'indexName': self._indexName,
                'numTuples': numTuples,
                'bulkSize': bulkSize
            }, numTuples, 'es_test')
        self._validate_count(self._indexName, numTuples)
Пример #40
0
class TestTester(unittest.TestCase):
    _multiprocess_can_split_ = True

    def setUp(self):
        Tester.setup_standalone(self)

    def test_at_least(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source(rands)
        tester = Tester(topo)
        tester.tuple_count(s, 100, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_no_tuples(self):
        """ Test exact count with zero tuples.
        """
        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0)
        tester.test(self.test_ctxtype, self.test_config)

    def test_at_least_no_tuples(self):
        """ Test at least count with zero tuples. 
            (kind of a pointless condition, always true).
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_checker(self):
        """ Test the per-tuple checker.
        """
        topo = Topology()
        s = topo.source(rands)
        s = s.filter(lambda r: r > 0.8)
        s = s.map(lambda r: r + 7.0)
        tester = Tester(topo)
        tester.tuple_count(s, 200, exact=False)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            tester.run_for(20)
        tester.tuple_check(s, lambda r: r > 7.8)
        tester.test(self.test_ctxtype, self.test_config)

    def test_local_check(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests don't support local check")
        topo = Topology()
        s = topo.source(rands)
        self.my_local_called = False
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 100, exact=False)
        self.tester.local_check = self.my_local
        self.tester.test(self.test_ctxtype, self.test_config)
        self.assertTrue(self.my_local_called)

    def my_local(self):
        self.assertTrue(hasattr(self.tester, 'submission_result'))
        self.assertTrue(hasattr(self.tester, 'streams_connection'))
        self.my_local_called = True

    def test_bad_pe(self):
        """Test a failure in a PE is caught as a test failure"""
        topo = Topology()
        s = topo.source(rands)
        # intentional addition of a string with an int
        # to cause a PE failure
        s = s.map(lambda t: t + 'a string')
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tp = tester.test(self.test_ctxtype,
                         self.test_config,
                         assert_on_fail=False)
        self.assertFalse(tp)

    def test_run_for(self):
        topo = Topology()
        s = topo.source([1, 2, 3])
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 3)
        self.tester.run_for(120)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            self.rf_start = time.time()
        else:
            self.tester.local_check = self.get_start_time
        self.tester.test(self.test_ctxtype, self.test_config)
        now = time.time()
        test_duration = now - self.rf_start
        self.assertTrue(test_duration >= 120)

    def get_start_time(self):
        job = self.tester.submission_result.job
        self.rf_start = job.submitTime / 1000.0
Пример #41
0
class TestTester(unittest.TestCase):
    def setUp(self):
        Tester.setup_standalone(self)

    def test_at_least(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source(rands)
        tester = Tester(topo)
        tester.tuple_count(s, 100, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_no_tuples(self):
        """ Test exact count with zero tuples.
        """
        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0)
        tester.test(self.test_ctxtype, self.test_config)

    def test_at_least_no_tuples(self):
        """ Test at least count with zero tuples. 
            (kind of a pointless condition, always true).
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests must complete")

        topo = Topology()
        s = topo.source([])
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tester.test(self.test_ctxtype, self.test_config)

    def test_checker(self):
        """ Test the per-tuple checker.
        """
        topo = Topology()
        s = topo.source(rands)
        s = s.filter(lambda r : r > 0.8)
        s = s.map(lambda r : r + 7.0 )
        tester = Tester(topo)
        tester.tuple_count(s, 200, exact=False)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            tester.run_for(20)
        tester.tuple_check(s, lambda r : r > 7.8)
        tester.test(self.test_ctxtype, self.test_config)

    def test_local_check(self):
        """ Test the at least tuple count.
        """
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            return unittest.skip("Standalone tests don't support local check")
        topo = Topology()
        s = topo.source(rands)
        self.my_local_called = False
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 100, exact=False)
        self.tester.local_check = self.my_local
        self.tester.test(self.test_ctxtype, self.test_config)
        self.assertTrue(self.my_local_called)

    def my_local(self):
        self.assertTrue(hasattr(self.tester, 'submission_result'))
        self.assertTrue(hasattr(self.tester, 'streams_connection'))
        self.my_local_called = True

    def test_bad_pe(self):
        """Test a failure in a PE is caught as a test failure"""
        topo = Topology()
        s = topo.source(rands)
        # intentional addition of a string with an int
        # to cause a PE failure
        s = s.map(lambda t : t + 'a string')
        tester = Tester(topo)
        tester.tuple_count(s, 0, exact=False)
        tp = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
        self.assertFalse(tp)

    def test_run_for(self):
        topo = Topology()
        s = topo.source([1,2,3])
        self.tester = Tester(topo)
        self.tester.tuple_count(s, 3)
        self.tester.run_for(120)
        if self.test_ctxtype == context.ContextTypes.STANDALONE:
            self.rf_start = time.time()
        else:
            self.tester.local_check = self.get_start_time
        self.tester.test(self.test_ctxtype, self.test_config)
        now = time.time()
        test_duration = now - self.rf_start
        self.assertTrue(test_duration >= 120)

    def get_start_time(self):
        job = self.tester.submission_result.job
        self.rf_start = job.submitTime / 1000.0