Esempio n. 1
0
    def test_buildonly_produce(self):
        txtmsg_schema = StreamSchema('tuple<rstring msg>')
        errmsg_schema = StreamSchema('tuple<rstring errorMessage>')

        java_class_lib_paths = []
        java_class_lib_paths.append("./streamsx/jms/tests/libs/activemq/lib")
        java_class_lib_paths.append(
            "./streamsx/jms/tests/libs/activemq/lib/optional")

        path_to_connection_doc = "./streamsx/jms/tests/connectionDocument.xml"  # tests are supposed to be run from the package directory

        topo = Topology('buildonly_produce')
        toolkit.add_toolkit(topo, "../../streamsx.jms/com.ibm.streamsx.jms")
        txtmsg_source = Source(topo,
                               'spl.utility::Beacon',
                               txtmsg_schema,
                               params={'period': 0.3},
                               name="DataGenerator")
        txtmsg_source.msg = txtmsg_source.output(
            '"Message #" + (rstring)IterationCount()')
        txtmsg_stream = txtmsg_source.stream
        txtmsg_stream.print()
        errmsg_stream = jms.produce(stream=txtmsg_stream,
                                    schema=errmsg_schema,
                                    java_class_libs=java_class_lib_paths,
                                    connection="localActiveMQ",
                                    access="accessToTextMessages",
                                    connection_document=path_to_connection_doc,
                                    name="JMS_Producer")
        errmsg_stream.print()
        self._build_only('test_buildonly_produce', topo)
Esempio n. 2
0
    def test_window_spl_aggregate(self):
        topo = Topology('test_window_spl_aggregate')

        ts1 = Timestamp(1608196, 235000000, 0)
        ts2 = Timestamp(1608199, 876265298, 0)
        ts3 = Timestamp(1608506, 123456078, 0)
        ts4 = Timestamp(1608507, 654326980, 0)
        ts_schema = StreamSchema('tuple<int64 num, timestamp ts>').as_tuple(
            named=True)
        s = topo.source([(1, ts1), (2, ts2), (3, ts3), (4, ts4)])

        # map() named "event_time_source" transforms to structured schema
        s = s.map(lambda x: x, schema=ts_schema, name='event_time_source')
        # add event-time annotation for attribute ts to the "event_time_source"
        s = s.set_event_time('ts')

        agg_schema = StreamSchema(
            'tuple<uint64 sum, rstring pane_timing>').as_tuple(named=True)
        # timeInterval window
        win = s.time_interval(interval_duration=5.0, discard_age=30.0)
        agg = op.Map('spl.relational::Aggregate', win, schema=agg_schema)
        agg.sum = agg.output('Sum((uint64)num)')
        agg.pane_timing = agg.output('(rstring)paneTiming()')
        #agg.stream.print()

        result = agg.stream.map(lambda x: x.sum)
        result_pane_timing = agg.stream.map(lambda x: x.pane_timing)

        tester = Tester(topo)
        tester.tuple_count(result, 2)
        tester.contents(result, [3, 7])
        tester.contents(result_pane_timing,
                        ['paneOnComplete', 'paneOnComplete'])
        tester.test(self.test_ctxtype, self.test_config)
Esempio n. 3
0
    def test_mixed_types(self):
        creds_file = os.environ['DB2_CREDENTIALS']
        with open(creds_file) as data_file:
            credentials = json.load(data_file)

        topo = Topology('test_mixed_types')
        if self.jdbc_toolkit_home is not None:
            streamsx.spl.toolkit.add_toolkit(topo, self.jdbc_toolkit_home)

        pulse = op.Source(topo, "spl.utility::Beacon", 'tuple<rstring A, rstring B>', params = {'iterations':1})
        pulse.A = pulse.output('"hello"')
        pulse.B = pulse.output('"world"')

        sample_schema = StreamSchema('tuple<rstring A, rstring B>')
        query_schema = StreamSchema('tuple<rstring sql>')

        sql_create = 'CREATE TABLE RUN_SAMPLE (A CHAR(10), B CHAR(10))'
        create_table = db.run_statement(pulse.stream, credentials, schema=sample_schema, sql=sql_create)
 
        sql_insert = 'INSERT INTO RUN_SAMPLE (A, B) VALUES (?, ?)'
        inserts = db.run_statement(create_table, credentials, schema=sample_schema, sql=sql_insert, sql_params="A, B")

        query = op.Map('spl.relational::Functor', inserts, schema=query_schema)
        query.sql = query.output('"SELECT A, B FROM RUN_SAMPLE"')

        res_sql = db.run_statement(query.stream, credentials, schema=sample_schema, sql_attribute='sql')
        res_sql.print()

        sql_drop = 'DROP TABLE RUN_SAMPLE'
        drop_table = db.run_statement(res_sql, credentials, sql=sql_drop)

        tester = Tester(topo)
        tester.tuple_count(drop_table, 1)
        #tester.run_for(60)
        tester.test(self.test_ctxtype, self.test_config)
Esempio n. 4
0
    def test_timestamp_event_time_attribute(self):
        topo = Topology('test_timestamp_event_time_attribute')

        ts1 = Timestamp(1608196, 235000000, 0)
        ts2 = Timestamp(1608199, 876265298, 0)
        ts_schema = StreamSchema('tuple<int64 num, timestamp ts>').as_tuple(
            named=True)
        s = topo.source([(1, ts1), (2, ts2)])

        # map() named "event_time_source" transforms to structured schema
        s = s.map(lambda x: x, schema=ts_schema, name='event_time_source')
        # add event-time annotation for attribute ts to the "event_time_source"
        s = s.set_event_time('ts')

        # use SPL function getEventTime() to get the event time of the input tuple
        # copies the event-time timestamp value to a new output attribute "eventtime"
        f = op.Map('spl.relational::Functor',
                   s,
                   schema=StreamSchema('tuple<timestamp eventtime>').as_tuple(
                       named=True))
        f.eventtime = f.output('getEventTime(event_time_source)')

        # map to Python schema (prepare for content comparision)
        as_ts = f.stream.map(lambda x: x.eventtime)

        tester = Tester(topo)
        tester.tuple_count(s, 2)
        tester.contents(as_ts, [ts1, ts2])
        tester.test(self.test_ctxtype, self.test_config)
Esempio n. 5
0
    def generateSPLOperator(self):
        _op = {}
        _op["name"] = self.name

        _op["kind"] = self.kind
        _op["partitioned"] = False
        if self._start_op:
            _op["startOp"] = True

        _outputs = []
        _inputs = []

        for output in self.outputPorts:
            _outputs.append(output.getSPLOutputPort())

        for input in self.inputPorts:
            _inputs.append(input.getSPLInputPort())
        _op["outputs"] = _outputs
        _op["inputs"] = _inputs
        _op["config"] = {}
        _op["config"]["streamViewability"] = self.viewable
        _op["config"]["viewConfigs"] = self.view_configs
        if self._placement:
            _op["config"]["placement"] = self._placement
            if 'resourceTags' in self._placement:
                # Convert the set to a list for JSON
                tags = _op['config']['placement']['resourceTags']
                _op['config']['placement']['resourceTags'] = list(tags)

        # Add parameters as their string representation
        # unless they value has a spl_json() function,
        # then use that
        _params = {}

        # Fix up any pending streams for input style
        if 'pyStyle' in self.params and 'pending' == self.params['pyStyle']\
                and self.kind.startswith('com.ibm.streamsx.topology.functional.python'):
            StreamSchema._fnop_style(self.inputPorts[0].schema, self, 'pyStyle')

        for name in self.params:
            param = self.params[name]
            try:
                _params[name] = param.spl_json()
            except:
                _value = {}
                _value["value"] = param
                _params[name] = _value
        _op["parameters"] = _params

        if self.sl is not None:
           _op['sourcelocation'] = self.sl.spl_json()

        if self._layout_hints:
            _op['layout'] = self._layout_hints

        # Callout to allow a ExtensionOperator
        # to augment the JSON
        if hasattr(self, '_ex_op'):
            self._ex_op._generate(_op)
        return _op
def data_source(topo, schema):
    input_file = 'TradesAndQuotes.csv.gz'    
    sample_file = os.path.join(script_dir, input_file)
    topo.add_file_dependency(sample_file, 'etc') # add sample file to etc dir in bundle
    fn = os.path.join('etc', input_file) # file name relative to application dir
    s = topo.source(files.CSVReader(schema=schema, file=fn, compression=Compression.gzip.name))
    # add event-time
    TQRecTWithEvTime = StreamSchema(schema).extend(StreamSchema('tuple<timestamp evTime>'))
    fo = R.Functor.map(s, TQRecTWithEvTime)     
    fo.evTime = fo.output(fo.outputs[0], op.Expression.expression('timeStringToTimestamp(date, time, false)'))
    ev_stream = fo.outputs[0]
    ev_stream = ev_stream.set_event_time('evTime')
    return ev_stream
Esempio n. 7
0
    def test_dir_scan(self):
        topo = Topology()
        script_dir = os.path.dirname(os.path.realpath(__file__))
        sample_file = os.path.join(script_dir, 'data.csv')
        topo.add_file_dependency(sample_file,
                                 'etc')  # add sample file to etc dir in bundle
        fn = os.path.join('etc',
                          'data.csv')  # file name relative to application dir
        dir = streamsx.spl.op.Expression.expression('getApplicationDir()+"' +
                                                    '/etc"')
        scanned = topo.source(
            files.DirectoryScan(directory=dir, pattern='.*\.csv$'))
        r = scanned.map(
            files.CSVFilesReader(file_name='filename'),
            schema=StreamSchema('tuple<rstring a, int32 b, rstring filename>'))
        r.print()

        #result = streamsx.topology.context.submit("TOOLKIT", topo.graph) # creates tk* directory
        #print('(TOOLKIT):' + str(result))
        #assert(result.return_code == 0)
        result = streamsx.topology.context.submit(
            "BUNDLE", topo.graph)  # creates sab file
        assert (result.return_code == 0)
        os.remove(result.bundlePath)
        os.remove(result.jobConfigPath)
def _create_stream_for_get(topo):
    s = topo.source([1, 2, 3, 4, 5, 6])
    schema = StreamSchema(
        'tuple<int32 id, rstring who, rstring infoType, rstring requestedDetail>'
    ).as_tuple()
    return s.map(lambda x: (x, 'Gandalf', 'location', 'beginTwoTowers'),
                 schema=schema)
Esempio n. 9
0
    def test_spl_ops(self):
        topo = Topology()
        streamsx.spl.toolkit.add_toolkit(topo, TestEmSPL._TK)

        context = _rand_path()
        name = _rand_path()
        schema = StreamSchema('tuple<int32 a, rstring b, boolean c>')

        params = {}
        params['port'] = 0
        params['context'] = context
        if self._monitor:
            params['sslAppConfigName'] = self._monitor + '-streams-certs'

        inject = op.Source(topo,
                           'com.ibm.streamsx.inet.rest::HTTPTupleInjection',
                           schema, params, name)

        self._path = '/' + context + '/' + name + '/ports/output/0/inject'
        self._alias = '/' + context + '/' + name + '/inject'

        self.tester = Tester(topo)
        self.tester.local_check = self._form_inject
        self.tester.contents(inject.stream, [{
            'a': 42,
            'b': 'HHGTTG',
            'c': True
        }, {
            'a': 93,
            'b': 'ABCDE',
            'c': False
        }])
        self.tester.test(self.test_ctxtype, self.test_config)

        self._check_no_endpoint()
 def _create_stream(self, topo):
     s = topo.source([
         1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
         20
     ])
     schema = StreamSchema('tuple<int32 id, rstring name>').as_tuple()
     return s.map(lambda x: (x, 'X' + str(x * 2)), schema=schema)
Esempio n. 11
0
    def test_schemas_bad(self):
        topo = Topology()
        pyObjStream = topo.source(['Hello', 'World!'])
        binStream = pyObjStream.map(func=lambda s: bytes("ABC", utf - 8),
                                    schema=CommonSchema.Binary)
        xmlStream = pyObjStream.map(schema=CommonSchema.XML)
        binMsgMetaStream = pyObjStream.map(func=lambda s: {
            'message': bytes(s, 'utf-8'),
            'key': s
        },
                                           schema=MsgSchema.BinaryMessageMeta)
        strMsgMetaStream = pyObjStream.map(func=lambda s: {
            'message': s,
            'key': s
        },
                                           schema=MsgSchema.StringMessageMeta)
        otherSplTupleStream1 = pyObjStream.map(
            schema=StreamSchema('tuple<int32 a>'))
        otherSplTupleStream2 = pyObjStream.map(schema='tuple<int32 a>')

        self.assertRaises(TypeError, evstr.publish, pyObjStream, "Topic")
        self.assertRaises(TypeError, evstr.publish, binStream, "Topic")
        self.assertRaises(TypeError, evstr.publish, xmlStream, "Topic")
        self.assertRaises(TypeError, evstr.publish, binMsgMetaStream, "Topic")
        self.assertRaises(TypeError, evstr.publish, strMsgMetaStream, "Topic")
        self.assertRaises(TypeError, evstr.publish, otherSplTupleStream1,
                          "Topic")
        self.assertRaises(TypeError, evstr.publish, otherSplTupleStream2,
                          "Topic")
    def test_insert_with_app_config(self):
        print('\n---------' + str(self))
        self._create_app_config()
        topo = Topology('test_insert_with_app_config')
        self._add_toolkits(topo, None)
        topo.add_pip_package('streamsx.eventstore')
        s = self._create_stream(topo)
        result_schema = StreamSchema(
            'tuple<int32 id, rstring name, boolean _Inserted_>')
        res = es.insert(
            s,
            config='eventstore',
            table='SampleTable',
            schema=result_schema,
            primary_key='id',
            partitioning_key='id',
            front_end_connection_flag=self.front_end_connection_flag,
            truststore=self.es_truststore,
            keystore=self.es_keystore)
        res.print()

        tester = Tester(topo)
        tester.run_for(120)
        tester.tuple_count(res, 20, exact=True)

        cfg = {}
        job_config = streamsx.topology.context.JobConfig(tracing='info')
        job_config.add(cfg)
        cfg[streamsx.topology.context.ConfigParams.SSL_VERIFY] = False
        tester.test(self.test_ctxtype, cfg, always_collect_logs=True)
        print(str(tester.result))
Esempio n. 13
0
    def test_score_with_feed_on_second_input_port(self):
        print('\n---------' + str(self))
        name = 'test_score_with_feed_on_second_input_port'
        topo = Topology(name)
        streamsx.spl.toolkit.add_toolkit(topo, self.pmml_toolkit_home)

        credentials = self._get_credentials()
        models = pmml.model_feed(topo,
                                 connection_configuration=credentials,
                                 model_name="sample_pmml",
                                 polling_period=datetime.timedelta(minutes=5))
        # sample with a single model predictor field
        s = topo.source(['first tuple', 'second tuple']).as_string()
        out_schema = StreamSchema('tuple<rstring string, rstring result>')
        res = pmml.score(
            s,
            schema=out_schema,
            model_input_attribute_mapping='p=string',
            model_stream=models,
            raw_result_attribute_name='result',
            initial_model_provisioning_timeout=datetime.timedelta(minutes=1))
        res.print()

        if (("TestDistributed" in str(self))
                or ("TestStreamingAnalytics" in str(self))):
            self._launch(topo)
        else:
            # build only
            self._build_only(name, topo)
Esempio n. 14
0
    def test_MQTTSource_schemas(self):
        s = MQTTSource(server_uri='tcp://server:1833', topics='topic1', schema=CommonSchema.Json, data_attribute_name='ignored')
        # topology.source() calls our populate()
        Topology().source(s)
        self.assertEqual(s._op.params['dataAttributeName'], 'jsonString')
        
        s = MQTTSource(server_uri='tcp://server:1833', topics='topic1', schema=CommonSchema.String, data_attribute_name='ignored')
        Topology().source(s)
        self.assertEqual(s._op.params['dataAttributeName'], 'string')
        
        s = MQTTSource(server_uri='tcp://server:1833', topics='topic1', schema=CommonSchema.Binary, data_attribute_name='ignored')
        Topology().source(s)
        self.assertEqual(s._op.params['dataAttributeName'], 'binary')
        
        s = MQTTSource(server_uri='tcp://server:1833', topics='topic1', schema=MqttDataTuple)
        Topology().source(s)
        self.assertNotIn('dataAttributeName', s._op.params)
        
        s = MQTTSource(server_uri='tcp://server:1833', topics='topic1', schema=[MqttDataTuple], data_attribute_name='data')
        Topology().source(s)
        self.assertEqual(s._op.params['dataAttributeName'], 'data')

        s = MQTTSource(server_uri='tcp://server:1833', topics='topic1', schema='tuple<rstring data>')
        Topology().source(s)
        self.assertNotIn('dataAttributeName', s._op.params)

        s = MQTTSource(server_uri='tcp://server:1833', topics='topic1', schema=StreamSchema('tuple<rstring data>'))
        Topology().source(s)
        self.assertNotIn('dataAttributeName', s._op.params)
Esempio n. 15
0
    def test_spl_default(self):
        """
        Test passing as with default using SPL
        """
        N = 27
        G = 'hey there'
        t = ''.join(random.choice('0123456789abcdef') for x in range(20))
        topic = 'topology/test/python/' + t

        topo = Topology()
        spGreet = topo.create_submission_parameter('greeting', default=G)
        self.assertIsNone(spGreet())

        sch = StreamSchema('tuple<uint64 seq, rstring s>')
        b = op.Source(topo,
                      "spl.utility::Beacon",
                      sch,
                      params={
                          'initDelay': 10.0,
                          'period': 0.02,
                          'iterations': N
                      })
        b.seq = b.output('IterationCount()')
        b.s = b.output(spGreet)

        tester = Tester(topo)
        tester.tuple_count(b.stream, N)
        tester.contents(b.stream, [{'seq': i, 's': G} for i in range(N)])
        tester.test(self.test_ctxtype, self.test_config)
    def test_transform_two_outputs(self):
        topo = Topology()
        s = topo.source(U.Sequence(iterations=10))
        fo = R.Functor.map(s, [
            StreamSchema('tuple<uint64 seq>'),
            StreamSchema('tuple<timestamp ts>')
        ])
        seq = fo.outputs[0]
        ts = fo.outputs[1]
        seq.print()
        ts.print()

        tester = Tester(topo)
        tester.tuple_count(seq, 10)
        tester.tuple_count(ts, 10)
        tester.test(self.test_ctxtype, self.test_config)
    def test_as_tuple_map_to_schema(self):
        topo = Topology()
        s = self._create_stream(topo)
        st = s.map(check_is_tuple_map_to_schema, schema=StreamSchema('tuple<int32 y, rstring txt>'))

        tester = Tester(topo)
        tester.contents(st, [{'y':13, 'txt':'2Hi!-MapSPL'}, {'y':26, 'txt':'4Hi!-MapSPL'}, {'y':39, 'txt':'6Hi!-MapSPL'}])
        tester.test(self.test_ctxtype, self.test_config)
    def test_transform_schema_two_outputs(self):
        topo = Topology()
        s = topo.source(U.Sequence(iterations=2))
        fo = R.Functor.map(s, [
            StreamSchema('tuple<uint64 seq, rstring a>'),
            StreamSchema('tuple<timestamp ts, int32 b>')
        ])
        fo.a = fo.output(fo.outputs[0], '"string value"')
        fo.b = fo.output(fo.outputs[1], 99)
        a = fo.outputs[0]
        b = fo.outputs[1]
        a.print()
        b.print()

        tester = Tester(topo)
        tester.tuple_count(a, 2)
        tester.tuple_count(b, 2)
        tester.test(self.test_ctxtype, self.test_config)
    def test_union(self):
        topo = Topology()
        s = U.sequence(topo, iterations=932)
        A = U.SEQUENCE_SCHEMA.extend(StreamSchema('tuple<int32 a, int32 c>'))
        B = U.SEQUENCE_SCHEMA.extend(StreamSchema('tuple<int32 c, int32 b>'))
        F = U.SEQUENCE_SCHEMA.extend(StreamSchema('tuple<int32 c>'))
        r0 = s.map(lambda t : (t['seq'], t['ts'], 89, t['seq']+19), schema=A)
        r1 = s.map(lambda t : (t['seq'], t['ts'], t['seq']+5, 32), schema=B)

        r = U.union([r0,r1], schema=F)

        r19 = r.filter(lambda t : t['c'] == t['seq'] + 19)
        r5 = r.filter(lambda t : t['c'] == t['seq'] + 5)
        tester = Tester(topo)
        tester.tuple_count(r, 932*2)
        tester.tuple_count(r19, 932)
        tester.tuple_count(r5, 932)
        tester.test(self.test_ctxtype, self.test_config)
    def test_sequence_period(self):
        topo = Topology()
        s = U.sequence(topo, iterations=67, period=0.1)
        E = U.SEQUENCE_SCHEMA.extend(StreamSchema('tuple<float64 d>'))

        s = s.map(_Delta(), schema=E)
        tester = Tester(topo)
        tester.tuple_check(s, lambda x: x['d'] > 0.08)
        tester.tuple_count(s, 67-1)
        tester.test(self.test_ctxtype, self.test_config)
    def test_structured_as_named_tuple(self):
        schema = StreamSchema("tuple<rstring a, int32 b>").as_tuple(named=True)
        topo = Topology()
        s = topo.source([('a',1),('b', 7),('c', 2),('d', 9)])
        s = s.map(lambda x: x, schema = schema)

        s = s.last(3).trigger(2).aggregate(lambda items: (items[1].a, items[0].b))

        tester = Tester(topo)
        tester.contents(s, [('b',1), ('c',7)] )
        tester.test(self.test_ctxtype, self.test_config)
 def test_basic_stream_schema_injection(self):
     name = 'test_basic_stream_schema_injection'
     topo = Topology(name)
     res = endpoint.inject(
         topo,
         name='jsoninject',
         schema=StreamSchema('tuple<int32 a, boolean alert>'),
         monitor=None,
         context='sample')
     res.print()
     self._build_only(name, topo)
Esempio n. 23
0
def _create_stream_for_put(topo):
    # create 6 rows
    s = topo.source([1, 2, 3, 4, 5, 6])
    timeStamp = _get_timestamp()
    schema = StreamSchema(
        'tuple<int32 id, rstring character, rstring colF, rstring colQ, rstring value, int64 Timestamp>'
    ).as_tuple()
    return s.map(lambda x:
                 (x, 'Gandalf_' + str(x), 'location', 'beginTwoTowers',
                  'travelling_' + str(x), timeStamp + x),
                 schema=schema)
    def test_transform_schema(self):
        topo = Topology()
        s = topo.source(U.Sequence(iterations=10))
        A = U.SEQUENCE_SCHEMA.extend(StreamSchema('tuple<rstring a>'))
        fo = R.Functor.map(s, A)
        fo.a = fo.output(fo.outputs[0], '"string value"')
        r = fo.outputs[0]
        r.print()

        tester = Tester(topo)
        tester.tuple_count(r, 10)
        tester.test(self.test_ctxtype, self.test_config)
    def test_transform_filter(self):
        topo = Topology()
        s = topo.source(U.Sequence(iterations=5))
        fo = R.Functor.map(s,
                           StreamSchema('tuple<uint64 seq>'),
                           filter='seq>=2ul')
        r = fo.outputs[0]
        r.print()

        tester = Tester(topo)
        tester.tuple_count(r, 3)
        tester.test(self.test_ctxtype, self.test_config)
Esempio n. 26
0
 def test_schemas_bad(self):
     topo = Topology()
     self.assertRaises(TypeError, evstr.subscribe, topo, 'T1',
                       CommonSchema.Python)
     self.assertRaises(TypeError, evstr.subscribe, topo, 'T1',
                       CommonSchema.Binary)
     self.assertRaises(TypeError, evstr.subscribe, topo, 'T1',
                       CommonSchema.XML)
     self.assertRaises(TypeError, evstr.subscribe, topo, 'T1',
                       StreamSchema('tuple<int32 a>'))
     self.assertRaises(TypeError, evstr.subscribe, topo, 'T1',
                       'tuple<int32 a>')
Esempio n. 27
0
class FlighPathEncounterSchema:
    """
    Structured stream schemas for :py:meth:`~streamsx.geospatial.FlightPathEncounter`.
    
    All schemas defined in this class are instances of `streamsx.topology.schema.StreamSchema`.
    
    """

    EncounterEvents = StreamSchema(_SPL_SCHEMA_FLIGHTPATH_ENCOUNTER_EVENT)
    """
    The :py:meth:`~streamsx.geospatial.FlightPathEncounter` creates encounter events as output.
    An encounter consists of the original observation, the data for the encountered object and the distances in time and space between the colliding objects.
    This schema is provided for convenience as it can be used as input and output type for the :py:meth:`~streamsx.geospatial.FlightPathEncounter`. 
    
    The schema defines following attributes
    
    * observation(StreamSchema) - The input observation encounters are calculated for. This is a tuple attribute of type :py:meth:`~streamsx.geospatial.schema.FlighPathEncounterSchema.Observation3D`.
    * encounter(StreamSchema) - The data for the object that is encountered. This is a tuple attribute of type :py:meth:`~streamsx.geospatial.schema.FlighPathEncounterSchema.Observation3D`.
    * encounterDistance(float64) - The latitude/longitude distance between the two objects at the time of the encounter. It is given in meters.
    * encounterTime(int64) - The time in the future the encounter will happen. It is an absolute time given in milliseconds since January 1st 1970 (Posix time).

    """

    Observation3D = StreamSchema(_SPL_SCHEMA_FLIGHTPATH_OBSERVATION3D)
    """
    The :py:meth:`~streamsx.geospatial.FlightPathEncounter` processes observations of flying objects.

    Each observation needs to conform to the Observation3D type. The schema defines following attributes
    
    * entityId(str) - The unique identifier of the flying object. You may use the ICAO field from an ADSB feed.
    * latitude(float64) - The latitude of the object in degrees. Allowed values are in the range from -90 to 90.
    * longitude(float64) - The longitude of the object in degrees. Allowed values are in the range from -180 to 180.
    * altitude(float64) - The altitude of the object in meters. Allowed values are greater or equal 0. If you need to convert from feet to meters, multiply the feet by 0.3048.
    * observationTime(int64) - The time stamp of the last observation of this object. Given in milliseconds as Posix time (milliseconds since January 1st 1970).
    * azimuth(float64) - The azimuth of the object. This is the clockwise angle between the objects motion direction and a line from the object to the north pole. Given in degrees (for example if the plane is flying to the east, this will be 90 degrees). Allowed values are 0 to 360.
    * groundSpeed(float64) - The groundSpeed of the object in meters per second. There is a dependency between this value and the timeSearchInterval parameter of the operator. At the given speed the object must not travel more than 20000 kilometers within the given timeSearchInterval. For example with a time search interval of 15 minutes (900000 ms) the object speed must not be faster than 80000 km/h (~22000 m/s). For all practical purposes this should not be a serious limitation. If you need to convert from knots to meters/second multiply the knots by 0.514444.
    * altitudeChangeRate(float64) - The altitudeChangeRate of the object in meters per second. Positive values denote increasing altitude.

    """
    pass
Esempio n. 28
0
 def test_bad_from_string(self):
     for dt in SPL_TYPES:
         topo = Topology()
         schema = StreamSchema('tuple<' + dt + ' a>')
         s = topo.source(['ABC'])
         c = s.map(lambda x : (x,), schema=schema)
         e = c.filter(lambda t : True)
         #e.print(tag=dt)
     
         tester = Tester(topo)
         tester.tuple_count(e, 1)
         tr = tester.test(self.test_ctxtype, self.test_config, assert_on_fail=False)
         self.assertFalse(tr, msg=dt)
Esempio n. 29
0
    def test_partition_batch_no_incomplete(self):
        schema = StreamSchema("tuple<rstring c, int32 d>").as_tuple()
        topo = Topology()
        s = topo.source([('a', 1), ('b', 7), ('a', 2), ('b', 9), ('a', 4),
                         ('a', 5), ('b', 8), ('b', 17)])
        s = s.map(lambda x: x, schema=schema)

        s = s.batch(2).partition('c').aggregate(
            lambda items: (items[0][0], sum(item[1] for item in items)))

        tester = Tester(topo)
        tester.contents(s, [('a', 3), ('b', 16), ('a', 9), ('b', 25)])
        tester.test(self.test_ctxtype, self.test_config)
    def test_pair(self):
        topo = Topology()
        s = U.sequence(topo, iterations=932)
        rschema = U.SEQUENCE_SCHEMA.extend(StreamSchema('tuple<float64 score>'))
        r0 = s.map(lambda t : (t['seq'], t['ts'], 1.0), schema=rschema)
        r1 = s.map(lambda t : (t['seq'], t['ts'], 2.0), schema=rschema)

        r = U.pair(r0, r1)

        tester = Tester(topo)
        tester.tuple_count(r, 932*2)
        tester.tuple_check(r, PairCheck())
        tester.test(self.test_ctxtype, self.test_config)
Esempio n. 31
0
    def generateSPLOperator(self):
        _op = dict(self._op_def)
        _op["name"] = self.name
        if self.category:
            _op["category"] = self.category

        _op["kind"] = self.kind
        if self.model:
            _op["model"] = self.model
        if self.language:
           _op["language"] = self.language

        _op["partitioned"] = False
        if self._start_op:
            _op["startOp"] = True

        _outputs = []
        _inputs = []

        for output in self.outputPorts:
            _outputs.append(output.getSPLOutputPort())

        for input in self.inputPorts:
            _inputs.append(input.getSPLInputPort())
        _op["outputs"] = _outputs
        _op["inputs"] = _inputs
        _op["config"] = self.config
        _op["config"]["streamViewability"] = self.viewable
        _op["config"]["viewConfigs"] = list(self.view_configs.values())
        if self._placement:
            _op["config"]["placement"] = self._placement
            if 'resourceTags' in self._placement:
                # Convert the set to a list for JSON
                tags = _op['config']['placement']['resourceTags']
                _op['config']['placement']['resourceTags'] = list(tags)

        # Fix up any pending streams for input style
        if 'pyStyle' in self.params and 'pending' == self.params['pyStyle']\
                and self.kind.startswith('com.ibm.streamsx.topology.functional.python'):
            StreamSchema._fnop_style(self.inputPorts[0].schema, self, 'pyStyle')

        # Add parameters as their natural representation
        # unless they value has a spl_json() function,
        # then use that
        _params = {}

        for name in self.params:
            param = _as_spl_expr(self.params[name])
            try:
                _params[name] = param.spl_json()
            except:
                _value = {}
                _value["value"] = param
                _params[name] = _value
        _op["parameters"] = _params

        if self.sl is not None:
           _op['sourcelocation'] = self.sl.spl_json()

        if self._layout_hints:
            _op['layout'] = self._layout_hints

        if self._consistent is not None:
            _op['consistent'] = {}
            consistent = _op['consistent']
            consistent['trigger'] = self._consistent.trigger.name
            if self._consistent.trigger == streamsx.topology.state.ConsistentRegionConfig.Trigger.PERIODIC:
                if isinstance(self._consistent.period, datetime.timedelta):
                    consistent_period = self._consistent.period.total_seconds()
                else:
                    consistent_period = float(self._consistent.period)
                consistent['period'] = str(consistent_period)

            if isinstance(self._consistent.drain_timeout, datetime.timedelta):
                consistent_drain = self._consistent.drain_timeout.total_seconds();
            else:
                consistent_drain = float(self._consistent.drain_timeout)
            consistent['drainTimeout'] = str(consistent_drain)

            if isinstance(self._consistent.reset_timeout, datetime.timedelta):
                consistent_reset = self._consistent.reset_timeout.total_seconds();
            else:
                consistent_reset = float(self._consistent.reset_timeout)
            consistent['resetTimeout'] = str(consistent_reset)

            consistent['maxConsecutiveResetAttempts'] = int(self._consistent.max_consecutive_attempts)

        # Callout to allow a ExtensionOperator
        # to augment the JSON
        if hasattr(self, '_ex_op'):
            self._ex_op._generate(_op)
        return _op