예제 #1
0
    def test_get_assessment_historical_output(self):
        fclient = FClient(host=host, token=token,options=None)
        try:
            options = {'startTime':'2011-01-02T01:00:00.000Z','endTime':'2013-06-13T01:00:00.000Z','responseFormat':'application/json'}
            response = fclient.get_historical_output(assessment, options)
            '''If data is not readily available then, a tracker id will be sent with 202 status code. While falkonry will genrate ouptut data
             Client should do timely pooling on the using same method, sending tracker id (__id) in the query params
             Once data is available server will response with 200 status code and data in json/csv format.'''

            if response.status_code is 202:
                trackerResponse = Schemas.Tracker(tracker=json.loads(response.text))

                # get id from the tracker
                trackerId = trackerResponse.get_id()

                # use this tracker for checking the status of the process.
                options = {"trackerId": trackerId, "responseFormat":"application/json"}
                newResponse = fclient.get_historical_output(assessment, options)

                # if status is 202 call the same request again

            if response.status_code is 200:
                # if status is 200, output data will be present in response.text field
                self.assertEqual(len(response.text) > 0, True, 'Error getting historical output of a Assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Error getting output of a Assessment')
    def test_get_assessment_historical_output(self):
        fclient = FClient(host=host, token=token,options=None)
        try:
            options = {'startTime':'2011-01-02T01:00:00.000Z','endTime':'2013-06-13T01:00:00.000Z','responseFormat':'application/json'}
            response = fclient.get_historical_output(assessment, options)
            '''If data is not readily available then, a tracker id will be sent with 202 status code. While falkonry will genrate ouptut data
             Client should do timely pooling on the using same method, sending tracker id (__id) in the query params
             Once data is available server will response with 200 status code and data in json/csv format.'''

            if response.status_code is 202:
                trackerResponse = Schemas.Tracker(tracker=json.loads(response.text))

                # get id from the tracker
                trackerId = trackerResponse.get_id()

                # use this tracker for checking the status of the process.
                options = {"trackerId": trackerId, "responseFormat":"application/json"}
                newResponse = fclient.get_historical_output(assessment, options)

                # if status is 202 call the same request again

            if response.status_code is 200:
                # if status is 200, output data will be present in response.text field
                self.assertEqual(len(response.text) > 0, True, 'Error getting historical output of a Assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Error getting output of a Assessment')
예제 #3
0
    def test_get_assessment_output_with_offset(self):
        fclient = FClient(host=host, token=token, options=None)

        try:
            stream = fclient.get_output(assessment, {})
            lastOffset = 0
            for event in stream.events():
                print(json.dumps(json.loads(event.data)))
                lastOffset = json.loads(
                    event.data
                )['offset']  # keep track of offset sent in falkonry output event

        except Exception as e:
            print(exception_handler(e))
            ''' Assuming there was some exception occured and you want to listen the output again 
            then use the offset value and set it in the options parameter. 
            '''
            # assuming last offset was 10
            options = {"offset": lastOffset}
            try:
                stream = fclient.get_output(assessment, options)
                for event in stream.events():
                    self.assertEqual(
                        json.loads(event.data)['offset'] >= lastOffset, True)
                    print(json.dumps(json.loads(event.data)))
            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Error getting output of a Assessment')
    def test_create_pipeline_with_multiple_assessment(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        eventbuffer.set_thing_identifier('motor')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            data = "time, motor, current, vibration, state\n" + "2016-03-01 01:01:01, Motor1, 12.4, 3.4, On"
            try:
                response = fclient.add_input_data(eventbuffer.get_id(), 'csv', {}, data)

                pipeline = Schemas.Pipeline()
                signals  = {
                    'current': ['Numeric','Occurrences'],
                'vibration': ['Numeric','Samples'],
                'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                assessment2 = Schemas.Assessment()
                assessment2.set_name('Health2') \
                    .set_input_signals(['vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment) \
                    .set_assessment(assessment2)

                try:
                    response = fclient.create_pipeline(pipeline)
                    self.assertEqual(isinstance(response, Schemas.Pipeline), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(isinstance(response.get_id(), unicode), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_name(), pipeline.get_name(), 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_thing_identifier(), eventbuffer.get_thing_identifier(), 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_input_signals()), 3, 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_assessments()), 2, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_eventbuffer(), eventbuffer.get_id(), 'Invalid Pipeline object after creation')

                    # tear down
                    try:
                        fclient.delete_pipeline(response.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add data')        
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #5
0
    def test_create_eventbuffer_with_pipeline_outflow_subscription(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')

        subscription = Schemas.Subscription()
        subscription.set_type('PIPELINEOUTFLOW') \
            .set_path('urn:falkonry:pipeline:qaerscdtxh7rc3')

        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                response = fclient.create_subscription(eventbuffer.get_id(),
                                                       subscription)
                self.assertNotEqual(
                    response.get_key(), None,
                    'Invalid Subscription object after creation')
                self.assertEqual(response.get_type(), 'PIPELINEOUTFLOW',
                                 'Invalid Subscription object after creation')
                self.assertEqual(response.get_path(), subscription.get_path(),
                                 'Invalid Subscription object after creation')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create Subscription')

            # tear down
            try:
                fclient.delete_eventbuffer(eventbuffer.get_id())
            except Exception as e:
                pass
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create Eventbuffer')
예제 #6
0
    def test_create_eventbuffer_with_multiple_things(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        eventbuffer.set_thing_identifier('thing')
        try:
            response = fclient.create_eventbuffer(eventbuffer)
            self.assertEqual(isinstance(response, Schemas.Eventbuffer), True,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(response.get_name(), eventbuffer.get_name(),
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(len(response.get_schema()), 1,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(len(response.get_subscriptions()), 1,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(response.get_thing_identifier(),
                             eventbuffer.get_thing_identifier(),
                             'Invalid Eventbuffer object after creation')

            # tear down
            try:
                fclient.delete_eventbuffer(response.get_id())
            except Exception as e:
                pass
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create Eventbuffer')
예제 #7
0
class TestLiveDatastream(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Datastream On (Start live monitoring of datastream)
    def test_turn_datstream_on_off(self):

        try:
            # assuming model is already built
            listAssessment = self.fclient.on_datastream(datastream_id)
            self.assertEqual(len(listAssessment) > 0, True, 'Cannot turn on live monitoring for datastream')
            self.assertEqual(str(listAssessment[0]['datastream']), datastream_id, 'Live mornitoring turned on for incorrect datastream')
            # self.assertEqual(str(listAssessment[0]['live']), 'ON', 'Cannot turn on live mornitoring')

            timepkg.sleep(10)

            # turning off live monitoring
            try:
                listAssessment = self.fclient.off_datastream(datastream_id)
                self.assertEqual(len(listAssessment) > 0, True, 'Cannot turn off live monitoring for datastream')
                self.assertEqual(str(listAssessment[0]['datastream']), datastream_id, 'Live mornitoring turned off for incorrect datastream')
                # self.assertEqual(str(listAssessment[0]['live']), 'OFF', 'Cannot turn off live mornitoring')

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot turn datastream off')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot turn datastream on')
    def test_get_assessment_output_with_offset(self):
        fclient = FClient(host=host, token=token,options=None)

        try:
            stream = fclient.get_output(assessment, {})
            lastOffset = 0
            for event in stream.events():
                print(json.dumps(json.loads(event.data)))
                lastOffset = json.loads(event.data)['offset'] # keep track of offset sent in falkonry output event

        except Exception as e:
            print(exception_handler(e))
            ''' Assuming there was some exception occured and you want to listen the output again 
            then use the offset value and set it in the options parameter. 
            '''
            # assuming last offset was 10
            options = {"offset": lastOffset}
            try:
                stream = fclient.get_output(assessment, options)
                for event in stream.events():
                    self.assertEqual(json.loads(event.data)['offset'] >= lastOffset, True)
                    print(json.dumps(json.loads(event.data)))
            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Error getting output of a Assessment')
    def test_create_pipeline_for_single_thing(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            try:
                response = fclient.add_input_data(eventbuffer.get_id(), 'json', {}, data)

                pipeline = Schemas.Pipeline()
                signals  = {
                    'current': 'Numeric',
                    'vibration': 'Numeric',
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    response = fclient.create_pipeline(pipeline)
                    self.assertEqual(isinstance(response, Schemas.Pipeline), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(isinstance(response.get_id(), unicode), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_name(), pipeline.get_name(), 'Invalid Pipeline object after creation')
                    self.assertNotEqual(response.get_thing_name(), None, 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_input_signals()), 3, 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_assessments()), 1, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_eventbuffer(), eventbuffer.get_id(), 'Invalid Pipeline object after creation')

                    # tear down
                    try:
                        fclient.delete_pipeline(response.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add data')        
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
    def test_get_assessment_output(self):
        fclient = FClient(host=host, token=token,options=None)

        try:
            stream = fclient.get_output(assessment, {})
            for event in stream.events():
                print(json.dumps(json.loads(event.data)))

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Error getting output of a Assessment')
    def test_get_assessment_output(self):
        fclient = FClient(host=host, token=token, options=None)

        try:
            stream = fclient.get_output(assessment, {})
            for event in stream.events():
                print(json.dumps(json.loads(event.data)))

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Error getting output of a Assessment')
    def test_get_pipeline_output(self):
        fclient = FClient(host=host, token=token)

        try:
            response = fclient.get_output(pipeline, 1456794061)
            with open('/tmp/pipeline_'+pipeline+'output.json', 'w') as f:
                for line in response:
                    f.write(line + '\n')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Error getting output of a Pipeline')
    def test_get_pipeline_output(self):
        fclient = FClient(host=host, token=token)

        try:
            response = fclient.get_output(pipeline, 1456794061)
            with open('/tmp/pipeline_' + pipeline + 'output.json', 'w') as f:
                for line in response:
                    f.write(line + '\n')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Error getting output of a Pipeline')
    def test_get_entity_meta(self):
        fclient = FClient(host=host, token=token,options=None)
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            self.assertEqual(isinstance(datastreamResponse, Schemas.Datastream), True, 'Invalid Datastream object after creation')
            self.assertEqual(isinstance(datastreamResponse.get_id(), str), True, 'Invalid id of datastream after creation')
            data= [{"sourceId": "testId","label": "testName","path": "root/path"}]

            # add EntityMeta
            entityMetaResponse = fclient.add_entity_meta(datastreamResponse.get_id(), {}, data)
            self.assertEqual(isinstance(entityMetaResponse, list), True, 'Invalid entityMeta object after creation')
            self.assertEqual(len(entityMetaResponse)>0, True, 'Invalid length of entityMeta')

            entityMetaResp = entityMetaResponse.__getitem__(0)
            self.assertEqual(isinstance(entityMetaResp, Schemas.EntityMeta), True, 'Invalid entityMeta object after creation')
            self.assertEqual(isinstance(entityMetaResp.get_id(), str), True, 'Invalid id of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_label(),'testName', 'Invalid label of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_path(),'root/path', 'Invalid path of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_sourceId(),'testId', 'Invalid sourceId of entityMeta after creation')

            #get entity meta
            getEntityMetaResponse = fclient.get_entity_meta(datastreamResponse.get_id())
            self.assertEqual(isinstance(getEntityMetaResponse, list), True, 'Invalid entityMeta object after creation')
            self.assertEqual(len(getEntityMetaResponse) > 0, True, 'Invalid length of entityMeta')
            getEntityMetaResp = getEntityMetaResponse.__getitem__(0)
            self.assertEqual(isinstance(getEntityMetaResp, Schemas.EntityMeta), True, 'Invalid entityMeta object after creation')
            self.assertEqual(isinstance(getEntityMetaResp.get_id(), str), True, 'Invalid id of entityMeta after creation')
            self.assertEqual(getEntityMetaResp.get_label(), 'testName', 'Invalid label of entityMeta after creation')
            self.assertEqual(getEntityMetaResp.get_path(), 'root/path', 'Invalid path of entityMeta after creation')
            self.assertEqual(getEntityMetaResp.get_sourceId(), 'testId', 'Invalid sourceId of entityMeta after creation')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add entityMeta to datastream')
예제 #15
0
def validate_login(host, token):
    """validate Login"""
    try:
        global _falkonry
        if not (not host or not token):
            p = re.compile(
                r'^(?:http|ftp)s?://'  # http:// or https://
                r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'  #domain...
                r'localhost|'  #localhost...
                r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'  # ...or ip
                r'(?::\d+)?'  # optional port
                r'(?:/?|[/?]\S+)$',
                re.IGNORECASE)
            m = p.match(host)
            if m:
                _falkonry = Falkonry(host=host,
                                     token=token,
                                     options={"header": "falkonry-cli"})
                # test auth token validation
                try:
                    datastream = _falkonry.get_datastream('test-id')
                except Exception as error:
                    if hasattr(error, 'message'):
                        errorObj = json.loads(error.message)
                        if errorObj['message'] == "Unauthorized Access":
                            print_error(
                                'Unauthorized Access. Please verify your details.'
                            )
                            _falkonry = None
                            return False
                        elif errorObj[
                                'message'] == "No such Datastream available":
                            return True
                        else:
                            _falkonry = None
                            return False
                    else:
                        _falkonry = None
                        print_error(
                            'Unable to connect to falkonry. Please verify your details.'
                        )
                        return False
            else:
                print_error("Invalid Host Url")
                return False
    except Exception as error:
        _falkonry = None
        print_error(
            'Unable to connect to falkonry. Please verify your details.')
        return False
예제 #16
0
    def test_create_eventbuffer_with_mqtt_subscription(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')

        subscription = Schemas.Subscription()
        subscription.set_type('MQTT') \
            .set_path('mqtt://test.mosquito.com') \
            .set_topic('falkonry-eb-1-test') \
            .set_username('test-user') \
            .set_password('test')

        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                response = fclient.create_subscription(eventbuffer.get_id(),
                                                       subscription)
                self.assertNotEqual(
                    response.get_key(), None,
                    'Invalid Subscription object after creation')
                self.assertEqual(response.get_type(), 'MQTT',
                                 'Invalid Subscription object after creation')
                self.assertEqual(response.get_topic(),
                                 subscription.get_topic(),
                                 'Invalid Subscription object after creation')
                self.assertEqual(response.get_path(), subscription.get_path(),
                                 'Invalid Subscription object after creation')
                self.assertEqual(response.get_username(),
                                 subscription.get_username(),
                                 'Invalid Subscription object after creation')
                self.assertEqual(response.get_time_identifier(),
                                 eventbuffer.get_time_identifier(),
                                 'Invalid Subscription object after creation')
                self.assertEqual(response.get_time_format(),
                                 eventbuffer.get_time_format(),
                                 'Invalid Subscription object after creation')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create Subscription')

            # tear down
            try:
                fclient.delete_eventbuffer(eventbuffer.get_id())
            except Exception as e:
                pass
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create Eventbuffer')
class TestAssessmentGetFacts(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        pass

    def test_get_assessment_facts(self):

        try:
            response = self.fclient.get_facts(assessment, {})
            pprint(response.text)
            self.assertEqual(
                len(response.text) == 0, False, 'Invalid facts response')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot get facts from the assessment")

    def test_get_assessment_facts_with_model(self):

        try:
            options = {
                'startTime': '2011-01-02T00:00:00.000Z',
                'endTime': '2014-01-01T00:00:00.000Z',
                'model': '1'
            }
            response = self.fclient.get_facts(assessment, options)
            pprint(response.text)
            self.assertEqual(
                len(response.text) == 0, False, 'Invalid facts response')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(
                0, 1,
                "Cannot get facts from the assessment for a specific model")

    def test_get_assessment_facts_with_batch(self):

        try:
            response = self.fclient.get_facts(assessmentB, {})
            pprint(response.text)
            self.assertEqual('batch' in response.text, True,
                             'Invalid facts with batch response')
            self.assertEqual(
                len(response.text) == 0, False,
                'Invalid facts with batch response')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(
                0, 1, "Cannot get facts from the assessment for batch case")
예제 #18
0
    def test_add_data_single_thing(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health')
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json',
                                                  {}, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to eventbuffer')

                # tear down
                try:
                    fclient.delete_pipeline(created_pipeline.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add input data to eventbuffer')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #19
0
    def test_add_data_single_thing(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health')
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json', {}, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to eventbuffer')

                # tear down
                try:
                    fclient.delete_pipeline(created_pipeline.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add input data to eventbuffer')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
    def test_add_csv_data_stream_for_single_thing(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health')
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = io.open('./data.csv')
                response = fclient.add_input_stream(eventbuffer.get_id(), 'csv', {}, data)

                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to eventbuffer')

                # tear down
                try:
                    fclient.delete_pipeline(created_pipeline.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add input data to eventbuffer')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #21
0
    def test_create_eventbuffer_for_narrow_format_data(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        eventbuffer.set_signals_tag_field("tag")
        eventbuffer.set_signals_delimiter("_")
        eventbuffer.set_signals_location("prefix")
        eventbuffer.set_value_column("value")

        try:
            response = fclient.create_eventbuffer(eventbuffer)
            self.assertEqual(isinstance(response, Schemas.Eventbuffer), True,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(response.get_name(), eventbuffer.get_name(),
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(len(response.get_schema()), 1,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(len(response.get_subscriptions()), 1,
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(response.get_signals_tag_field(),
                             eventbuffer.get_signals_tag_field(),
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(response.get_signals_delimiter(),
                             eventbuffer.get_signals_delimiter(),
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(response.get_signals_location(),
                             eventbuffer.get_signals_location(),
                             'Invalid Eventbuffer object after creation')
            self.assertEqual(response.get_value_column(),
                             eventbuffer.get_value_column(),
                             'Invalid Eventbuffer object after creation')
            # tear down
            try:
                fclient.delete_eventbuffer(response.get_id())
            except Exception as e:
                pass
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create Eventbuffer')
class TestAssessmentGetFacts(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        pass

    def test_get_assessment_facts(self):

        try:
            response = self.fclient.get_facts(assessment, {})
            pprint(response.content)
            self.assertEqual(len(response.content) == 0, False, 'Invalid facts response')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot get facts from the assessment")

    def test_get_assessment_facts_with_model(self):

        try:
            options = {'startTime': '2011-01-02T00:00:00.000Z', 'endTime': '2014-01-01T00:00:00.000Z', 'model': '1'}
            response = self.fclient.get_facts(assessment, options)
            pprint(response.content)
            self.assertEqual(len(response.content) == 0, False, 'Invalid facts response')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot get facts from the assessment for a specific model")

    def test_get_assessment_facts_with_batch(self):

        try:
            response = self.fclient.get_facts(assessmentB, {})
            pprint(response.content)
            self.assertEqual('batch' in str(response.content), True, 'Invalid facts with batch response')
            self.assertEqual(len(response.content)==0,False, 'Invalid facts with batch response')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot get facts from the assessment for batch case")
    def test_get_pipelines(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        options = {
            'timeIdentifier' : 'time',
            'timeFormat'     : 'iso_8601'
        }
        eventbuffer.set_name('Motor Health')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer, options)
            pipeline = Schemas.Pipeline()
            signals  = {
                'current': 'Numeric',
                'vibration': 'Numeric',
                'state': 'Categorical'
            }
            assessment = Schemas.Assessment()
            assessment.set_name('Health') \
                .set_input_signals(['current', 'vibration', 'state'])
            pipeline.set_name('Motor Health 1') \
                .set_eventbuffer(eventbuffer.get_id()) \
                .set_input_signals(signals) \
                .set_thing_name('Motor') \
                .set_assessment(assessment)

            try:
                response  = fclient.create_pipeline(pipeline)
                pipelines = fclient.get_pipelines()
                self.assertGreater(len(pipelines), 0, 'Cannot fetch Pipelines')

                # tear down
                try:
                    fclient.delete_pipeline(response.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot fetch Pipelines')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #24
0
import os
import subprocess
from datetime import datetime as dt
from falkonryclient import client as Falkonry

host = os.environ['FALKONRY_HOST_URL']
token = os.environ['FALKONRY_TOKEN']
falkonry = Falkonry(host, token)
falkonry_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
path_test_transcripts = falkonry_path + "/tests/test_transcripts"

try:
    files = os.listdir(path_test_transcripts)
    if (len(files) > 1):
        subprocess.call(
            "rm -r {path}/tests/test_transcripts/*".format(path=falkonry_path),
            shell=True)
except Exception as e:
    print(e)

subprocess.call(
    'python {path}/tests/test_assessment.py'.format(path=falkonry_path),
    shell=True)
subprocess.call(
    'python {path}/tests/test_datastream.py'.format(path=falkonry_path),
    shell=True)

files = os.listdir("{path}/tests/test_transcripts".format(path=falkonry_path))
print("Testing Transcripts")

with open("RunTranscriptTest.sh", 'w') as f:
class TestAddData(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add narrow input data (json format) to multi entity Datastream
    def test_add_data_json_mutli(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = '{"time" : "2016-03-01 01:01:01", "signal" : "current", "value" : 12.4, "car" : "unit1"}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'signalIdentifier': 'signal',
                    'valueIdentifier': 'value',
                    'entityIdentifier': 'car'
                }

                # adding data to the created datastream
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (csv format) to single entity to Datastream
    def test_add_data_csv_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        signal.set_valueIdentifier("value")
        signal.set_signalIdentifier("signal")
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")

        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # input data has timeformat different than the one set  while creating datastream
                data = "time, signal, value " + "\n" + "2016-03-01 01:01:01, signal1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2, 1.4"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': "YYYY-MM-DD HH:mm:ss",
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier()
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) to single entity Datastream
    def test_add_data_json_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityName('machine')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # adding data to datastream
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityName': 'machine'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) to multi entity Datastream
    def test_add_data_csv_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time Identifer
    def test_add_data_csv_multi_miss_time_identifier(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1,
                                 'Missing time identifer error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print("\nResponse :", exception_handler(e))
                self.assertEqual(exception_handler(e),
                                 "Missing time identifier.",
                                 'Missing time identifer error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time zone
    def test_add_data_csv_multi_miss_time_zone(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time zone error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # (b'{"message":"Missing time zone."}',)
                print("\nResponse :", exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time zone.",
                                 'Missing time zone error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time format
    def test_add_data_csv_multi_miss_time_format(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time format error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print('\nResponse :', exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time format.",
                                 'Missing time format error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (json format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_json(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = "time, tag, value " + "\n" + "2016-03-01 01:01:01, signal1_entity1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2_entity1, 1.4"
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'json',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (csv format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream of datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_csv(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'json',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add narrow input data (csv format) with batch identifier to multi thing Datastream
    def test_add_narrow_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = 'time,batchId,unit,signal,value\n' \
                       '1,batch_1,unit1,signal1,9.95\n' \
                       '2,batch_1,unit1,signal1,4.45\n' \
                       '3,batch_2,unit1,signal1,1.45\n' \
                       '4,batch_2,unit1,signal1,8.45\n' \
                       '5,batch_2,unit1,signal1,2.45\n' \
                       '1,batch_1,unit1,signal2,19.95\n' \
                       '2,batch_1,unit1,signal2,14.45\n' \
                       '3,batch_2,unit1,signal2,10.45\n' \
                       '4,batch_2,unit1,signal2,18.45\n' \
                       '5,batch_2,unit1,signal2,12.45\n' \
                       '1,batch_1,unit1,signal3,39.95\n' \
                       '2,batch_1,unit1,signal3,34.45\n' \
                       '3,batch_2,unit1,signal3,30.45\n' \
                       '4,batch_2,unit1,signal3,38.45\n' \
                       '5,batch_2,unit1,signal3,32.45\n'
                options = {'streaming': False, 'hasMoreData': False}
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (json format) with batch identifier to single thing Datastream
    def test_add_narrow_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("inputs")
        signal.set_valueIdentifier("val")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal": "signal1","value": 9.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal": "signal1","value": 4.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal": "signal1","value": 1.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal": "signal1","value": 8.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal": "signal1","value": 2.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'signalIdentifier': 'signal',
                    'valueIdentifier': 'value',
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) with batch identifier to multi thing Datastream
    def test_add_wide_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = 'time,batchId,unit,signal1,signal2,signal3\n' \
                       '1,batch_1,unit1,9.95,19.95,39.95\n' \
                       '2,batch_1,unit1,4.45,14.45,34.45\n' \
                       '3,batch_2,unit1,1.45,10.45,30.45\n' \
                       '4,batch_2,unit1,8.45,18.45,38.45\n' \
                       '5,batch_2,unit1,2.45,12.45,32.45'
                options = {'streaming': False, 'hasMoreData': False}
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) with batch identifier to single thing Datastream
    def test_add_wide_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal1": 9.95,"signal2": 19.95,"signal3": 39.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal1": 4.45,"signal2": 14.45,"signal3": 34.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal1": 1.45,"signal2": 10.45,"signal3": 30.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal1": 8.45,"signal2": 18.45,"signal3": 38.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal1": 2.45,"signal2": 12.45,"signal3": 32.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))

    pass
예제 #26
0
class TestCreateAssessment(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Create Assessment
    def test_create_assessment(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(
                isinstance(assessmentResponse, Schemas.Assessment), True,
                'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), unicode),
                             True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(),
                             asmtRequest.get_name(),
                             'Invalid name of Assessment after creation')
            self.assertEqual(
                assessmentResponse.get_datastream(),
                asmtRequest.get_datastream(),
                'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(),
                             asmtRequest.get_rate(),
                             'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF',
                             'Invalid rate of Assessment after creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Retrieve Assessments
    def test_get_assessments(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(
                isinstance(assessmentResponse, Schemas.Assessment), True,
                'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), unicode),
                             True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(),
                             asmtRequest.get_name(),
                             'Invalid name of Assessment after creation')
            self.assertEqual(
                assessmentResponse.get_datastream(),
                asmtRequest.get_datastream(),
                'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(),
                             asmtRequest.get_rate(),
                             'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF',
                             'Invalid rate of Assessment after creation')

            # get assessments
            assessmentListResponse = self.fclient.get_assessments()
            self.assertEqual(isinstance(assessmentListResponse, list), True,
                             'Invalid Assessment object after creation')
            self.assertEqual(
                len(assessmentListResponse) > 0, True,
                'Invalid length of assessment')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Retrieve Assessment by Id
    def test_get_assessment_by_id(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(
                isinstance(assessmentResponse, Schemas.Assessment), True,
                'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), unicode),
                             True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(),
                             asmtRequest.get_name(),
                             'Invalid name of Assessment after creation')
            self.assertEqual(
                assessmentResponse.get_datastream(),
                asmtRequest.get_datastream(),
                'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(),
                             asmtRequest.get_rate(),
                             'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF',
                             'Invalid rate of Assessment after creation')

            # get assessments
            assessmentGetResp = self.fclient.get_assessment(
                assessmentResponse.get_id())
            self.assertEqual(isinstance(assessmentGetResp, Schemas.Assessment),
                             True, 'Invalid Assessment object after creation')
            self.assertEqual(assessmentGetResp.get_id(),
                             assessmentResponse.get_id(),
                             'Invalid assessment fetched')
            self.assertEqual(
                isinstance(assessmentGetResp.get_aprioriConditionList(), list),
                True, 'Invalid aprioriConditionList object after creation')
            self.assertEqual(
                len(assessmentGetResp.get_aprioriConditionList()) == 0, True,
                'Invalid length of aprioriConditionList')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Delete Assessment
    def test_delete_assessment(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(
                isinstance(assessmentResponse, Schemas.Assessment), True,
                'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), unicode),
                             True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(),
                             asmtRequest.get_name(),
                             'Invalid name of Assessment after creation')
            self.assertEqual(
                assessmentResponse.get_datastream(),
                asmtRequest.get_datastream(),
                'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(),
                             asmtRequest.get_rate(),
                             'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF',
                             'Invalid rate of Assessment after creation')

            # delete assessment
            try:
                self.fclient.delete_assessment(assessmentResponse.get_id())
            except Exception as e:
                print(e.message)
                pass

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(e.message)

    pass
 def setUp(self):
     self.fclient = FClient(host=host, token=token, options=None)
     pass
예제 #28
0
import os, sys
from falkonryclient import client as Falkonry
from falkonryclient import schemas as Schemas
import time
import yaml
import base64
from bunch import bunchify

data = bunchify(yaml.safe_load(open('data.yaml')))
falkonry = Falkonry('https://service.falkonry.io',
                    base64.b64decode(data.auth_token))

startTime = None  #str(time.time()-1000*60*60) #seconds since unix epoch
endTime = None  #str(time.time()) #seconds since unix epoch

outputStream = falkonry.get_output(data.pipeline_id, startTime, endTime)
for x in outputStream:
    print x
class TestAddDataStream(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add historical wide input data (json format) to single entity Datastream (Used for model revision)
    def test_add_historical_json_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.json')
                options = {'streaming': False,
                           'hasMoreData':False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier()}

                # adding data to the datastream
                response = self.fclient.add_input_stream(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to single entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.csv')

                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier()}

                # adding data to datstream
                response = self.fclient.add_input_stream(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to Multi entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/dataMultiEntity.csv')

                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car',
                           'valueIdentifier': 'value',
                           'signalIdentifier': 'signal'
                           }
                response = self.fclient.add_input_stream(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (csv format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_csv_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.csv')
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'csv', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (json format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_json_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.json')
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'json', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass
 def setUp(self):
     self.fclient = FClient(host=host, token=token, options=None)
     pass
예제 #31
0
 def setUp(self):
     self.fclient = FClient(host=host, token=token, options=None)
     self.created_datastreams = []
     pass
 def setUp(self):
     self.fclient = FClient(host=host, token=token, options=None)
     self.created_datastreams = []
     pass
예제 #33
0
class TestAddDataStream(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add historical wide input data (json format) to single entity Datastream (Used for model revision)
    def test_add_historical_json_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.json')
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier()
                }

                # adding data to the datastream
                response = self.fclient.add_input_stream(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to single entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.csv')

                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier()
                }

                # adding data to datstream
                response = self.fclient.add_input_stream(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to Multi entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/dataMultiEntity.csv')

                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car',
                    'valueIdentifier': 'value',
                    'signalIdentifier': 'signal'
                }
                response = self.fclient.add_input_stream(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (csv format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_csv_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.csv')
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'csv',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (json format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_json_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.json')
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'json',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))

    pass
class TestLiveAssessment(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Should get exception when turning on the assessment without active model
    def test_on_assessment_exception(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)

            try:
                assessment = self.fclient.on_assessment(assessmentResponse.get_id())
                self.assertEqual(assessment.get_id(), assessmentResponse.get_id())
            except Exception as e:
                msg = exception_handler(e)
                print(msg)
                self.assertEqual(msg, "No Active model assigned in Assessment: " + assessmentResponse.get_name())

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Should get live monitoring status of assessment
    def test_live_monitoring_status(self):
        assessment = self.fclient.get_assessment(assessment_id);
        self.assertEqual(str(assessment.get_live()), 'OFF')

    # Should turn on and off the assessment
    def test_turn_assessment_on_off(self):

        try:
            # assuming model is already built
            assessment = self.fclient.on_assessment(assessment_id)
            self.assertEqual(assessment.get_id(), assessment_id, 'Live monitoring turned on for incorrect assessment')

            timepkg.sleep(30)

            # turning off live monitoring
            try:
                assessment = self.fclient.off_assessment(assessment_id)
                self.assertEqual(assessment.get_id(), assessment_id, 'Live monitoring turned off for incorrect assessment')
                timepkg.sleep(30)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot turn assessment off')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot turn assessment on')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
        pass
예제 #35
0
class TestAddData(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add narrow input data (json format) to multi entity Datastream
    def test_add_data_json_mutli(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = '{"time" : "2016-03-01 01:01:01", "signal" : "current", "value" : 12.4, "car" : "unit1"}'
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'signalIdentifier': 'signal',
                           'valueIdentifier': 'value',
                           'entityIdentifier': 'car'}

                # adding data to the created datastream
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (csv format) to single entity to Datastream
    def test_add_data_csv_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        signal.set_valueIdentifier("value")
        signal.set_signalIdentifier("signal")
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")

        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # input data has timeformat different than the one set  while creating datastream
                data = "time, signal, value " + "\n" + "2016-03-01 01:01:01, signal1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2, 1.4"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': "YYYY-MM-DD HH:mm:ss",
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier()}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) to single entity Datastream
    def test_add_data_json_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityName('machine')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # adding data to datastream
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityName': 'machine'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) to multi entity Datastream
    def test_add_data_csv_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time Identifer
    def test_add_data_csv_multi_miss_time_identifier(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time identifer error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print("\nResponse :", exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time identifier.", 'Missing time identifer error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time zone
    def test_add_data_csv_multi_miss_time_zone(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time zone error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # (b'{"message":"Missing time zone."}',)
                print("\nResponse :",exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time zone.", 'Missing time zone error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time format
    def test_add_data_csv_multi_miss_time_format(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time format error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print('\nResponse :',exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time format.", 'Missing time format error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (json format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_json(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = "time, tag, value " + "\n" + "2016-03-01 01:01:01, signal1_entity1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2_entity1, 1.4"
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'json', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (csv format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream of datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_csv(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'json', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add narrow input data (csv format) with batch identifier to multi thing Datastream
    def test_add_narrow_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = 'time,batchId,unit,signal,value\n' \
                       '1,batch_1,unit1,signal1,9.95\n' \
                       '2,batch_1,unit1,signal1,4.45\n' \
                       '3,batch_2,unit1,signal1,1.45\n' \
                       '4,batch_2,unit1,signal1,8.45\n' \
                       '5,batch_2,unit1,signal1,2.45\n' \
                       '1,batch_1,unit1,signal2,19.95\n' \
                       '2,batch_1,unit1,signal2,14.45\n' \
                       '3,batch_2,unit1,signal2,10.45\n' \
                       '4,batch_2,unit1,signal2,18.45\n' \
                       '5,batch_2,unit1,signal2,12.45\n' \
                       '1,batch_1,unit1,signal3,39.95\n' \
                       '2,batch_1,unit1,signal3,34.45\n' \
                       '3,batch_2,unit1,signal3,30.45\n' \
                       '4,batch_2,unit1,signal3,38.45\n' \
                       '5,batch_2,unit1,signal3,32.45\n'
                options = {
                    'streaming': False,
                    'hasMoreData': False
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (json format) with batch identifier to single thing Datastream
    def test_add_narrow_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("inputs")
        signal.set_valueIdentifier("val")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal": "signal1","value": 9.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal": "signal1","value": 4.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal": "signal1","value": 1.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal": "signal1","value": 8.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal": "signal1","value": 2.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'signalIdentifier': 'signal',
                    'valueIdentifier': 'value',
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) with batch identifier to multi thing Datastream
    def test_add_wide_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = 'time,batchId,unit,signal1,signal2,signal3\n' \
                       '1,batch_1,unit1,9.95,19.95,39.95\n' \
                       '2,batch_1,unit1,4.45,14.45,34.45\n' \
                       '3,batch_2,unit1,1.45,10.45,30.45\n' \
                       '4,batch_2,unit1,8.45,18.45,38.45\n' \
                       '5,batch_2,unit1,2.45,12.45,32.45'
                options = {
                    'streaming': False,
                    'hasMoreData': False
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) with batch identifier to single thing Datastream
    def test_add_wide_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal1": 9.95,"signal2": 19.95,"signal3": 39.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal1": 4.45,"signal2": 14.45,"signal3": 34.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal1": 1.45,"signal2": 10.45,"signal3": 30.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal1": 8.45,"signal2": 18.45,"signal3": 38.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal1": 2.45,"signal2": 12.45,"signal3": 32.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass
class TestCreateAssessment(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Create Assessment
    def test_create_assessment(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in  Datastream object after creation')
            self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')


            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(isinstance(assessmentResponse, Schemas.Assessment), True, 'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), str), True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(), asmtRequest.get_name(), 'Invalid name of Assessment after creation')
            self.assertEqual(assessmentResponse.get_datastream(), asmtRequest.get_datastream(), 'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(), asmtRequest.get_rate(), 'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF', 'Invalid rate of Assessment after creation')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Retrieve Assessments
    def test_get_assessments(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in  Datastream object after creation')
            self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')


            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(isinstance(assessmentResponse, Schemas.Assessment), True, 'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), str), True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(), asmtRequest.get_name(), 'Invalid name of Assessment after creation')
            self.assertEqual(assessmentResponse.get_datastream(), asmtRequest.get_datastream(), 'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(), asmtRequest.get_rate(), 'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF', 'Invalid rate of Assessment after creation')
           

            # get assessments
            assessmentListResponse = self.fclient.get_assessments()
            self.assertEqual(isinstance(assessmentListResponse, list), True, 'Invalid Assessment object after creation')
            self.assertEqual(len(assessmentListResponse) > 0, True, 'Invalid length of assessment')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Retrieve Assessment by Id
    def test_get_assessment_by_id(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in  Datastream object after creation')
            self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')


            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(isinstance(assessmentResponse, Schemas.Assessment), True, 'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), str), True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(), asmtRequest.get_name(), 'Invalid name of Assessment after creation')
            self.assertEqual(assessmentResponse.get_datastream(), asmtRequest.get_datastream(), 'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(), asmtRequest.get_rate(), 'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF', 'Invalid rate of Assessment after creation')

            # get assessments
            assessmentGetResp = self.fclient.get_assessment(assessmentResponse.get_id())
            self.assertEqual(isinstance(assessmentGetResp, Schemas.Assessment), True, 'Invalid Assessment object after creation')
            self.assertEqual(assessmentGetResp.get_id(), assessmentResponse.get_id(), 'Invalid assessment fetched')
            self.assertEqual(isinstance(assessmentGetResp.get_aprioriConditionList(), list), True, 'Invalid aprioriConditionList object after creation')
            self.assertEqual(len(assessmentGetResp.get_aprioriConditionList()) == 0,True,'Invalid length of aprioriConditionList')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Delete Assessment
    def test_delete_assessment(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in  Datastream object after creation')
            self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')


            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)
            self.assertEqual(isinstance(assessmentResponse, Schemas.Assessment), True, 'Invalid Assessment object after creation')
            self.assertEqual(isinstance(assessmentResponse.get_id(), str), True, 'Invalid id of Assessment after creation')
            self.assertEqual(assessmentResponse.get_name(), asmtRequest.get_name(), 'Invalid name of Assessment after creation')
            self.assertEqual(assessmentResponse.get_datastream(), asmtRequest.get_datastream(), 'Invalid datastream in assessment after creation')
            self.assertEqual(assessmentResponse.get_rate(), asmtRequest.get_rate(), 'Invalid rate of Assessment after creation')
            self.assertEqual(assessmentResponse.get_live(), 'OFF', 'Invalid rate of Assessment after creation')

            # delete assessment
            try:
                self.fclient.delete_assessment(assessmentResponse.get_id())
            except Exception as e:
                print(exception_handler(e))
                pass

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass
예제 #37
0
class TestAddFacts(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add facts data (json format) from a stream to Assessment
    def test_add_json_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsData.json')
                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }
                response = self.fclient.add_facts_stream(
                    resp_assessment.get_id(), 'json', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) from a stream to  Assessment
    def test_add_csv_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsData.csv')

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }
                response = self.fclient.add_facts_stream(
                    resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with tags from a stream to  Assessment
    def test_add_csv_facts_with_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsDataWithTags.csv')

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'tagIdentifier': "Tags"
                }
                response = self.fclient.add_facts_stream(
                    resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with additional Tag from a stream to  Assessment
    def test_add_csv_facts_with_additional_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsData.csv')

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'additionalTag': "testTag"
                }

                response = self.fclient.add_facts_stream(
                    resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, "Cannot create datastream")

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(e.message)

    pass
예제 #38
0
    def test_create_publication_of_splunk_type(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health')
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            pipeline = Schemas.Pipeline()
            signals = {
                'current': 'Numeric',
                'vibration': 'Numeric',
                'state': 'Categorical'
            }
            assessment = Schemas.Assessment()
            assessment.set_name('Health') \
                .set_input_signals(['current', 'vibration', 'state'])
            pipeline.set_name('Motor Health 1') \
                .set_eventbuffer(eventbuffer.get_id()) \
                .set_input_signals(signals) \
                .set_thing_name('Motor') \
                .set_assessment(assessment)

            try:
                pipeline = fclient.create_pipeline(pipeline)
                try:
                    publication = Schemas.Publication() \
                                      .set_type('SPLUNK') \
                                      .set_topic('falkonry-test-pipeline') \
                                      .set_path('https://test.splunk.com/') \
                                      .set_headers({
                                          'Authorization': 'Token 1234567890'
                                      })
                    response = fclient.create_publication(
                        pipeline.get_id(), publication)

                    self.assertEqual(
                        isinstance(response, Schemas.Publication), True,
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        isinstance(response.get_key(), unicode), True,
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        response.get_type(), 'SPLUNK',
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        response.get_topic(), 'falkonry-test-pipeline',
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        response.get_path(), 'https://test.splunk.com/',
                        'Invalid Publication object after creation')
                except Exception as e:
                    print(e.message)
                    self.assertEqual(0, 1, 'Cannot create publication')

                # tear down
                try:
                    fclient.delete_pipeline(pipeline.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create pipeline')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #39
0
import os, sys
from falkonryclient import client as Falkonry
from falkonryclient import schemas as Schemas
import time
import yaml
import base64
from bunch import bunchify

data = bunchify(yaml.safe_load(open('data.yaml')))
falkonry     = Falkonry('https://service.falkonry.io', base64.b64decode(data.auth_token))

startTime    = None #str(time.time()-1000*60*60) #seconds since unix epoch
endTime      = None #str(time.time()) #seconds since unix epoch

outputStream = falkonry.get_output(data.pipeline_id,startTime,endTime)
for x in outputStream:
    print x
    def test_create_pipeline_for_single_thing_with_eventType(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            try:
                response = fclient.add_input_data(eventbuffer.get_id(), 'json',
                                                  {}, data)

                pipeline = Schemas.Pipeline()
                signals = {
                    'current': ['Numeric', 'Occurrences'],
                    'vibration': ['Numeric', 'Samples'],
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    response = fclient.create_pipeline(pipeline)
                    self.assertEqual(isinstance(response,
                                                Schemas.Pipeline), True,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(isinstance(response.get_id(),
                                                unicode), True,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_name(), pipeline.get_name(),
                                     'Invalid Pipeline object after creation')
                    self.assertNotEqual(
                        response.get_thing_name(), None,
                        'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_input_signals()), 3,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_assessments()), 1,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_eventbuffer(),
                                     eventbuffer.get_id(),
                                     'Invalid Pipeline object after creation')

                    # tear down
                    try:
                        fclient.delete_pipeline(response.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add data')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #41
0
    def test_create_publication_of_mqtt_type(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health')
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')

        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            pipeline = Schemas.Pipeline()
            signals = {
                'current': 'Numeric',
                'vibration': 'Numeric',
                'state': 'Categorical'
            }
            assessment = Schemas.Assessment()
            assessment.set_name('Health') \
                .set_input_signals(['current', 'vibration', 'state'])
            pipeline.set_name('Motor Health 1') \
                .set_eventbuffer(eventbuffer.get_id()) \
                .set_input_signals(signals) \
                .set_thing_name('Motor') \
                .set_assessment(assessment)

            try:
                pipeline = fclient.create_pipeline(pipeline)
                try:
                    publication = Schemas.Publication() \
                        .set_type('MQTT') \
                        .set_topic('falkonry-test-pipeline') \
                        .set_path('mqtt://test.mosquito.com') \
                        .set_username('test-user') \
                        .set_password('test-password') \
                        .set_content_type('application/json')
                    response = fclient.create_publication(
                        pipeline.get_id(), publication)

                    self.assertEqual(
                        isinstance(response, Schemas.Publication), True,
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        isinstance(response.get_key(), unicode), True,
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        response.get_type(), 'MQTT',
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        response.get_topic(), 'falkonry-test-pipeline',
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        response.get_username(), 'test-user',
                        'Invalid Publication object after creation')
                    self.assertEqual(
                        response.get_content_type(), 'application/json',
                        'Invalid Publication object after creation')
                except Exception as e:
                    print(e.message)
                    self.assertEqual(0, 1, 'Cannot create publication')

                # tear down
                try:
                    fclient.delete_pipeline(pipeline.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot create pipeline')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #42
0
class TestLiveAssessment(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Should get exception when turning on the assessment without active model
    def test_on_assessment_exception(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            # Create assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(response.get_id())
            asmtRequest.set_rate('PT0S')

            assessmentResponse = self.fclient.create_assessment(asmtRequest)

            try:
                assessment = self.fclient.on_assessment(
                    assessmentResponse.get_id())
                self.assertEqual(assessment.get_id(),
                                 assessmentResponse.get_id())
            except Exception as e:
                msg = exception_handler(e)
                print(msg)
                self.assertEqual(
                    msg, "No Active model assigned in Assessment: " +
                    assessmentResponse.get_name())

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Should get live monitoring status of assessment
    def test_live_monitoring_status(self):
        assessment = self.fclient.get_assessment(assessment_id)
        self.assertEqual(str(assessment.get_live()), 'OFF')

    # Should turn on and off the assessment
    def test_turn_assessment_on_off(self):

        try:
            # assuming model is already built
            assessment = self.fclient.on_assessment(assessment_id)
            self.assertEqual(
                assessment.get_id(), assessment_id,
                'Live monitoring turned on for incorrect assessment')

            timepkg.sleep(30)

            # turning off live monitoring
            try:
                assessment = self.fclient.off_assessment(assessment_id)
                self.assertEqual(
                    assessment.get_id(), assessment_id,
                    'Live monitoring turned off for incorrect assessment')
                timepkg.sleep(30)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot turn assessment off')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot turn assessment on')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
        pass
class TestAddFacts(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add facts data (json format) to Assessment
    def test_add_json_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier("car")
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact
                data = '{"time" : "2011-03-26T12:00:00.000Z", "car" : "HI3821", "end" : "2012-06-01T00:00:00.000Z", "Health" : "Normal"}'

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'entityIdentifier': "car",
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(), 'json', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) to Assessment
    def test_add_csv_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with tags to Assessment
    def test_add_csv_facts_with_tags(self):

            # creating datastream
            datastream = Schemas.Datastream()
            datastream.set_name('Motor Health' + str(random.random()))

            datasource = Schemas.Datasource()
            field = Schemas.Field()
            time = Schemas.Time()
            signal = Schemas.Signal()

            time.set_zone("GMT")
            time.set_identifier("time")
            time.set_format("YYYY-MM-DD HH:mm:ss")
            field.set_signal(signal)
            datasource.set_type("STANDALONE")
            field.set_time(time)
            datastream.set_datasource(datasource)
            datastream.set_field(field)
            try:
                datastreamResponse = self.fclient.create_datastream(datastream)
                self.created_datastreams.append(datastreamResponse.get_id())

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(asmtRequest)
                    data = "time,end,car,Health,Tags\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal,testTag1\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal,testTag2"

                    options = {
                        'startTimeIdentifier': "time",
                        'endTimeIdentifier': "end",
                        'timeFormat': "iso_8601",
                        'timeZone': time.get_zone(),
                        'valueIdentifier': "Health",
                        'keywordIdentifier': 'Tags'
                    }

                    # adding fact
                    response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    self.assertEqual(0, 1, 'Cannot create assessment')
            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with additional Tag to Assessment
    def test_add_csv_facts_with_additional_tags(self):

            # creating datastream
            datastream = Schemas.Datastream()
            datastream.set_name('Motor Health' + str(random.random()))

            datasource = Schemas.Datasource()
            field = Schemas.Field()
            time = Schemas.Time()
            signal = Schemas.Signal()

            time.set_zone("GMT")
            time.set_identifier("time")
            time.set_format("YYYY-MM-DD HH:mm:ss")
            field.set_signal(signal)
            datasource.set_type("STANDALONE")
            field.set_time(time)
            datastream.set_datasource(datasource)
            datastream.set_field(field)
            try:
                datastreamResponse = self.fclient.create_datastream(datastream)
                self.created_datastreams.append(datastreamResponse.get_id())

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(asmtRequest)
                    data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                    options = {
                        'startTimeIdentifier': "time",
                        'endTimeIdentifier': "end",
                        'timeFormat': "iso_8601",
                        'timeZone': time.get_zone(),
                        'valueIdentifier': "Health",
                        'additionalKeyword': 'testTag'
                    }

                    response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    self.assertEqual(0, 1, 'Cannot create assessment')
            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with batch identifier to Assessment
    def test_add_csv_fact_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(asmtRequest)

                    data = '{"time" : 123898422222, "batches" : "batch_1", "signal" : "current", "value" : 12.4}\n' \
                            '{"time" : 123898422322, "batches" : "batch_2", "signal" : "current", "value" : 12.4}'
                    options = {
                       'streaming': False,
                       'hasMoreData': False,
                       'timeFormat': time.get_format(),
                       'timeZone': time.get_zone(),
                       'timeIdentifier': time.get_identifier(),
                       'signalIdentifier': 'signal',
                       'valueIdentifier': 'value',
                       'batchIdentifier': 'batches'
                    }

                    # adding data to the created datastream
                    response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                    self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                    # adding fact to the assessment
                    data = "batchId,value\n" \
                           "batch_1,normal\n" \
                           "batch_2,abnormal"

                    options = {
                        'valueIdentifier': "value",
                        'batchIdentifier': 'batchId'
                    }

                    response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)
                    self.assertNotEqual(response['__$id'], None, 'Cannot add fact data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    try:
                        self.fclient.delete_datastream(datastreamResponse.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create assessment')

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input or fact data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass
class TestSchema(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add EntityMeta to a Datastream
    def test_add_entity_meta(self):

        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            self.assertEqual(
                isinstance(datastreamResponse, Schemas.Datastream), True,
                'Invalid Datastream object after creation')
            self.assertEqual(isinstance(datastreamResponse.get_id(), str),
                             True, 'Invalid id of datastream after creation')

            # add EntityMeta
            data = [{
                "sourceId": "testId",
                "label": "testName",
                "path": "root/path"
            }]
            entityMetaResponse = self.fclient.add_entity_meta(
                datastreamResponse.get_id(), {}, data)
            self.assertEqual(isinstance(entityMetaResponse, list), True,
                             'Invalid entityMeta object after creation')
            self.assertEqual(
                len(entityMetaResponse) > 0, True,
                'Invalid length of entityMeta')

            entityMetaResp = entityMetaResponse.__getitem__(0)
            self.assertEqual(isinstance(entityMetaResp, Schemas.EntityMeta),
                             True, 'Invalid entityMeta object after creation')
            self.assertEqual(isinstance(entityMetaResp.get_id(), str), True,
                             'Invalid id of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_label(), 'testName',
                             'Invalid label of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_path(), 'root/path',
                             'Invalid path of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_sourceId(), 'testId',
                             'Invalid sourceId of entityMeta after creation')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add entityMeta to datastream')

    # Get EntityMeta of a Datastream
    def test_get_entity_meta(self):
        fclient = FClient(host=host, token=token, options=None)
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            self.assertEqual(
                isinstance(datastreamResponse, Schemas.Datastream), True,
                'Invalid Datastream object after creation')
            self.assertEqual(isinstance(datastreamResponse.get_id(), str),
                             True, 'Invalid id of datastream after creation')
            data = [{
                "sourceId": "testId",
                "label": "testName",
                "path": "root/path"
            }]

            # add EntityMeta
            entityMetaResponse = fclient.add_entity_meta(
                datastreamResponse.get_id(), {}, data)
            self.assertEqual(isinstance(entityMetaResponse, list), True,
                             'Invalid entityMeta object after creation')
            self.assertEqual(
                len(entityMetaResponse) > 0, True,
                'Invalid length of entityMeta')

            entityMetaResp = entityMetaResponse.__getitem__(0)
            self.assertEqual(isinstance(entityMetaResp, Schemas.EntityMeta),
                             True, 'Invalid entityMeta object after creation')
            self.assertEqual(isinstance(entityMetaResp.get_id(), str), True,
                             'Invalid id of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_label(), 'testName',
                             'Invalid label of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_path(), 'root/path',
                             'Invalid path of entityMeta after creation')
            self.assertEqual(entityMetaResp.get_sourceId(), 'testId',
                             'Invalid sourceId of entityMeta after creation')

            #get entity meta
            getEntityMetaResponse = fclient.get_entity_meta(
                datastreamResponse.get_id())
            self.assertEqual(isinstance(getEntityMetaResponse, list), True,
                             'Invalid entityMeta object after creation')
            self.assertEqual(
                len(getEntityMetaResponse) > 0, True,
                'Invalid length of entityMeta')
            getEntityMetaResp = getEntityMetaResponse.__getitem__(0)
            self.assertEqual(isinstance(getEntityMetaResp, Schemas.EntityMeta),
                             True, 'Invalid entityMeta object after creation')
            self.assertEqual(isinstance(getEntityMetaResp.get_id(), str), True,
                             'Invalid id of entityMeta after creation')
            self.assertEqual(getEntityMetaResp.get_label(), 'testName',
                             'Invalid label of entityMeta after creation')
            self.assertEqual(getEntityMetaResp.get_path(), 'root/path',
                             'Invalid path of entityMeta after creation')
            self.assertEqual(getEntityMetaResp.get_sourceId(), 'testId',
                             'Invalid sourceId of entityMeta after creation')

        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add entityMeta to datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))

    pass
    def test_add_csv_verification(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json', {}, data)
                pipeline = Schemas.Pipeline()
                signals  = {
                    'current': 'Numeric',
                    'vibration': 'Numeric',
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    resp_pipeline = fclient.create_pipeline(pipeline)
                    data = "time,end,car,Health\n2011-03-31T00:00:00Z,2011-04-01T00:00:00Z,IL9753,Normal\n2011-03-31T00:00:00Z,2011-04-01T00:00:00Z,HI3821,Normal"
                    response = fclient.add_verification(resp_pipeline.get_id(), 'csv', {}, data)
                    # tear down
                    try:
                        fclient.delete_pipeline(resp_pipeline.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0,1,"Cannot add data")        
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')            
예제 #46
0
    def test_create_datastream_with_batch_identifier(self):
        fclient = FClient(host=host, token=token, options=None)
        datastream = Schemas.Datastream()
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        input1 = Schemas.Input()
        input2 = Schemas.Input()
        input3 = Schemas.Input()

        datastream.set_name('Motor Health' +
                            str(random.random()))  # set name of the Datastream

        input1.set_name("Signal1")  # set name of input signal
        input1.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input1.set_event_type("Samples")  # set event type of input signal
        input2.set_name("Signal2")  # set name of input signal
        input2.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input2.set_event_type("Samples")  # set event type of input signal
        input3.set_name("Signal3")  # set name of input signal
        input3.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input3.set_event_type("Samples")  # set event type of input signal
        inputs = []
        inputs.append(input1)
        inputs.append(input2)
        inputs.append(input3)

        time.set_zone("GMT")  # set timezone of the datastream
        time.set_identifier("time")  # set time identifier of the datastream
        time.set_format("iso_8601")  # set time format of the datastream
        field.set_time(time)
        field.set_signal(signal)  # set signal in field
        field.set_batchIdentifier("batch")  # set batchIdentifier in field
        datasource.set_type("STANDALONE")  # set datastource type in datastream
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        datastream.set_inputs(inputs)

        try:
            # create Datastream
            response = fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')

            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')
            self.assertEqual(fieldResponse.get_batchIdentifier(), "batch",
                             'Invalid batchIdentifier after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            inputs = response.get_inputs()
            self.assertEqual(isinstance(inputs, list), True,
                             'Invalid inputs object after creation')
            self.assertEqual(len(inputs), 3,
                             'Invalid inputs object after creation')
            inputResp1 = inputs.__getitem__(0)
            inputResp2 = inputs.__getitem__(1)
            inputResp3 = inputs.__getitem__(2)
            self.assertEqual(inputResp1.get_name(), input1.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp1.get_value_type(),
                             input1.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp2.get_name(), input2.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp2.get_value_type(),
                             input2.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp3.get_name(), input3.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp3.get_value_type(),
                             input3.get_value_type(),
                             'Invalid input value type after object creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')
예제 #47
0
class TestAddFacts(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add facts data (json format) to Assessment
    def test_add_json_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier("car")
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact
                data = '{"time" : "2011-03-26T12:00:00.000Z", "car" : "HI3821", "end" : "2012-06-01T00:00:00.000Z", "Health" : "Normal"}'

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'entityIdentifier': "car",
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'json', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) to Assessment
    def test_add_csv_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with tags to Assessment
    def test_add_csv_facts_with_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)
                data = "time,end,car,Health,Tags\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal,testTag1\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal,testTag2"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'tagIdentifier': 'Tags'
                }

                # adding fact
                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with additional Tag to Assessment
    def test_add_csv_facts_with_additional_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)
                data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'additionalTag': 'testTag'
                }

                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with batch identifier to Assessment
    def test_add_csv_fact_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(
                        asmtRequest)

                    data = '{"time" : 123898422222, "batches" : "batch_1", "signal" : "current", "value" : 12.4}\n' \
                            '{"time" : 123898422322, "batches" : "batch_2", "signal" : "current", "value" : 12.4}'
                    options = {
                        'streaming': False,
                        'hasMoreData': False,
                        'timeFormat': time.get_format(),
                        'timeZone': time.get_zone(),
                        'timeIdentifier': time.get_identifier(),
                        'signalIdentifier': 'signal',
                        'valueIdentifier': 'value',
                        'batchIdentifier': 'batches'
                    }

                    # adding data to the created datastream
                    response = self.fclient.add_input_data(
                        datastreamResponse.get_id(), 'json', options, data)
                    self.assertNotEqual(response['__$id'], None,
                                        'Cannot add input data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                    # adding fact to the assessment
                    data = "batchId,value\n" \
                           "batch_1,normal\n" \
                           "batch_2,abnormal"

                    options = {
                        'valueIdentifier': "value",
                        'batchIdentifier': 'batchId'
                    }

                    response = self.fclient.add_facts(resp_assessment.get_id(),
                                                      'csv', options, data)
                    self.assertNotEqual(response['__$id'], None,
                                        'Cannot add fact data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    try:
                        self.fclient.delete_datastream(
                            datastreamResponse.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create assessment')

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(
                    0, 1, 'Cannot add input or fact data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))

    pass
예제 #48
0
class TestDatastream(unittest.TestCase):
    def setUp(self):
        self.created_datastreams = []
        self.fclient = FClient(host=host, token=token, options=None)
        pass

    # Create datastream without any signals
    def test_create_standalone_datastream(self):
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())
            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Create Datastream for narrow/historian style data from a single entity
    def test_create_datastream_narrow_style_single_entity(self):
        datastream = Schemas.Datastream()
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        datastream.set_name('Motor Health' +
                            str(random.random()))  # set name of the Datastream
        time.set_zone("GMT")  # set timezone of the datastream
        time.set_identifier("time")  # set time identifier of the datastream
        time.set_format("iso_8601")  # set time format of the datastream
        field.set_time(time)
        signal.set_valueIdentifier("value")
        signal.set_signalIdentifier("signal")
        field.set_signal(signal)  # set signal in field
        datasource.set_type("STANDALONE")  # set datastource type in datastream
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            # create Datastream
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())
            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')
            signalResponse = fieldResponse.get_signal()
            self.assertEqual(signalResponse.get_valueIdentifier(),
                             signal.get_valueIdentifier(),
                             'Invalid value identifier after object creation')
            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Create Datastream for narrow/historian style data from a multiple entities
    def test_create_datastream_narrow_style_multiple_entity(self):
        datastream = Schemas.Datastream()
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        datastream.set_name('Motor Health' +
                            str(random.random()))  # set name of the Datastream
        time.set_zone("GMT")  # set timezone of the datastream
        time.set_identifier("time")  # set time identifier of the datastream
        time.set_format("iso_8601")  # set time format of the datastream
        field.set_time(time)
        signal.set_signalIdentifier("signal")  # set signal identifier
        signal.set_valueIdentifier("value")  # set value identifier
        field.set_entityIdentifier("entity")  # set entity identifier
        field.set_signal(signal)  # set signal in field
        datasource.set_type("STANDALONE")  # set datastource type in datastream
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            # create Datastream
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())
            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(fieldResponse.get_entityName(), None,
                             'Invalid entity name object after creation')
            signalResponse = fieldResponse.get_signal()
            self.assertEqual(signalResponse.get_valueIdentifier(),
                             signal.get_valueIdentifier(),
                             'Invalid value identifier after object creation')
            self.assertEqual(
                signalResponse.get_signalIdentifier(),
                signal.get_signalIdentifier(),
                'Invalid signal identifier after object creation')
            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Create Datastream for wide style data from a single entity
    def test_create_datastream_wide_style_single_entity(self):
        datastream = Schemas.Datastream()
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        input1 = Schemas.Input()
        input2 = Schemas.Input()
        input3 = Schemas.Input()

        datastream.set_name('Motor Health' +
                            str(random.random()))  # set name of the Datastream

        input1.set_name("Signal1")  # set name of input signal
        input1.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input1.set_event_type("Samples")  # set event type of input signal
        input2.set_name("Signal2")  # set name of input signal
        input2.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input2.set_event_type("Samples")  # set event type of input signal
        input3.set_name("Signal3")  # set name of input signal
        input3.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input3.set_event_type("Samples")  # set event type of input signal
        inputs = []
        inputs.append(input1)
        inputs.append(input2)
        inputs.append(input3)

        time.set_zone("GMT")  # set timezone of the datastream
        time.set_identifier("time")  # set time identifier of the datastream
        time.set_format("iso_8601")  # set time format of the datastream
        field.set_time(time)
        field.set_signal(signal)  # set signal in field
        datasource.set_type("STANDALONE")  # set datastource type in datastream
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        datastream.set_inputs(inputs)

        try:
            # create Datastream
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())
            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')

            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            inputs = response.get_inputs()
            self.assertEqual(isinstance(inputs, list), True,
                             'Invalid inputs object after creation')
            self.assertEqual(len(inputs), 3,
                             'Invalid inputs object after creation')
            inputResp1 = inputs.__getitem__(0)
            inputResp2 = inputs.__getitem__(1)
            inputResp3 = inputs.__getitem__(2)
            self.assertEqual(inputResp1.get_name(), input1.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp1.get_value_type(),
                             input1.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp2.get_name(), input2.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp2.get_value_type(),
                             input2.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp3.get_name(), input3.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp3.get_value_type(),
                             input3.get_value_type(),
                             'Invalid input value type after object creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Create Datastream for wide style data from a multiple entities
    def test_create_datastream_wide_style_multiple_entity(self):
        datastream = Schemas.Datastream()
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        input1 = Schemas.Input()
        input2 = Schemas.Input()
        input3 = Schemas.Input()

        datastream.set_name('Motor Health' +
                            str(random.random()))  # set name of the Datastream

        input1.set_name("Signal1")  # set name of input signal
        input1.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input1.set_event_type("Samples")  # set event type of input signal
        input2.set_name("Signal2")  # set name of input signal
        input2.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input2.set_event_type("Samples")  # set event type of input signal
        input3.set_name("Signal3")  # set name of input signal
        input3.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input3.set_event_type("Samples")  # set event type of input signal
        inputs = []
        inputs.append(input1)
        inputs.append(input2)
        inputs.append(input3)

        time.set_zone("GMT")  # set timezone of the datastream
        time.set_identifier("time")  # set time identifier of the datastream
        time.set_format("iso_8601")  # set time format of the datastream
        field.set_time(time)
        field.set_signal(signal)  # set signal in field
        field.set_entityIdentifier("entity")
        datasource.set_type("STANDALONE")  # set datastource type in datastream
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        datastream.set_inputs(inputs)

        try:
            # create Datastream
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())
            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')

            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(), None,
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            inputs = response.get_inputs()
            self.assertEqual(isinstance(inputs, list), True,
                             'Invalid inputs object after creation')
            self.assertEqual(len(inputs), 3,
                             'Invalid inputs object after creation')
            inputResp1 = inputs.__getitem__(0)
            inputResp2 = inputs.__getitem__(1)
            inputResp3 = inputs.__getitem__(2)
            self.assertEqual(inputResp1.get_name(), input1.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp1.get_value_type(),
                             input1.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp2.get_name(), input2.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp2.get_value_type(),
                             input2.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp3.get_name(), input3.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp3.get_value_type(),
                             input3.get_value_type(),
                             'Invalid input value type after object creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Retrieve Datastreams
    def test_get_datastream_list(self):
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())
            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            # get datastream list
            datastreamList = self.fclient.get_datastreams()
            self.assertEqual(isinstance(datastreamList, list), True,
                             'Invalid datastreamlist in response')
            self.assertEqual(
                len(datastreamList) > 0, True,
                'No datastreams in get response')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Retrieve Datastream by Id
    def test_get_datastream_by_id(self):

        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            # get datastream list
            datastreamResp = self.fclient.get_datastream(response.get_id())
            self.assertEqual(isinstance(datastreamResp, Schemas.Datastream),
                             True, 'Invalid time object after creation')
            self.assertEqual(response.get_id(), datastreamResp.get_id(),
                             'Invalid id of datastream after creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Delete Datastream
    def test_delete_datastream_by_id(self):

        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            response = self.fclient.create_datastream(datastream)
            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            # delete datastream
            try:
                self.fclient.delete_datastream(response.get_id())
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot delete datastream')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Create Datastream microseconds precision
    def test_create_datastream_micro_second_precision(self):
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))
        datastream.set_time_precision(
            'micro')  # set 'micro' for microseconds precision
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_entityIdentifier("entity")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            # create Datastream
            response = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')
            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(), None,
                             'Invalid entity name object after creation')
            signalResponse = fieldResponse.get_signal()
            self.assertEqual(
                signalResponse.get_signalIdentifier(), "signal",
                'Invalid signal identifier object after creation')
            self.assertEqual(signalResponse.get_valueIdentifier(),
                             signal.get_valueIdentifier(),
                             'Invalid value identifier after object creation')
            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')
            self.assertEqual(response.get_time_precision(),
                             datastream.get_time_precision(),
                             'Invalid time precision after creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Create Datastream for batch identifier
    def test_create_datastream_with_batch_identifier(self):
        fclient = FClient(host=host, token=token, options=None)
        datastream = Schemas.Datastream()
        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        input1 = Schemas.Input()
        input2 = Schemas.Input()
        input3 = Schemas.Input()

        datastream.set_name('Motor Health' +
                            str(random.random()))  # set name of the Datastream

        input1.set_name("Signal1")  # set name of input signal
        input1.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input1.set_event_type("Samples")  # set event type of input signal
        input2.set_name("Signal2")  # set name of input signal
        input2.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input2.set_event_type("Samples")  # set event type of input signal
        input3.set_name("Signal3")  # set name of input signal
        input3.set_value_type(
            "Numeric"
        )  # set value type of input signal (Numeric for number, Categorical for string type)
        input3.set_event_type("Samples")  # set event type of input signal
        inputs = []
        inputs.append(input1)
        inputs.append(input2)
        inputs.append(input3)

        time.set_zone("GMT")  # set timezone of the datastream
        time.set_identifier("time")  # set time identifier of the datastream
        time.set_format("iso_8601")  # set time format of the datastream
        field.set_time(time)
        field.set_signal(signal)  # set signal in field
        field.set_batchIdentifier("batch")  # set batchIdentifier in field
        datasource.set_type("STANDALONE")  # set datastource type in datastream
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        datastream.set_inputs(inputs)

        try:
            # create Datastream
            response = fclient.create_datastream(datastream)
            self.created_datastreams.append(response.get_id())

            self.assertEqual(isinstance(response, Schemas.Datastream), True,
                             'Invalid Datastream object after creation')
            self.assertEqual(isinstance(response.get_id(), unicode), True,
                             'Invalid id of datastream after creation')
            self.assertEqual(response.get_name(), datastream.get_name(),
                             'Invalid name of Datastream after creation')

            fieldResponse = response.get_field()
            self.assertEqual(
                isinstance(fieldResponse, Schemas.Field), True,
                'Invalid field in  Datastream object after creation')
            self.assertEqual(
                fieldResponse.get_entityIdentifier(), "entity",
                'Invalid entity identifier object after creation')
            self.assertEqual(fieldResponse.get_entityName(),
                             response.get_name(),
                             'Invalid entity name object after creation')
            self.assertEqual(fieldResponse.get_batchIdentifier(), "batch",
                             'Invalid batchIdentifier after creation')

            timeResponse = fieldResponse.get_time()
            self.assertEqual(isinstance(timeResponse, Schemas.Time), True,
                             'Invalid time object after creation')
            self.assertEqual(timeResponse.get_zone(), time.get_zone(),
                             'Invalid zone object after creation')
            self.assertEqual(timeResponse.get_identifier(),
                             time.get_identifier(),
                             'Invalid time identifier object after creation')
            self.assertEqual(timeResponse.get_format(), time.get_format(),
                             'Invalid time format object after creation')

            inputs = response.get_inputs()
            self.assertEqual(isinstance(inputs, list), True,
                             'Invalid inputs object after creation')
            self.assertEqual(len(inputs), 3,
                             'Invalid inputs object after creation')
            inputResp1 = inputs.__getitem__(0)
            inputResp2 = inputs.__getitem__(1)
            inputResp3 = inputs.__getitem__(2)
            self.assertEqual(inputResp1.get_name(), input1.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp1.get_value_type(),
                             input1.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp2.get_name(), input2.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp2.get_value_type(),
                             input2.get_value_type(),
                             'Invalid input value type after object creation')
            self.assertEqual(inputResp3.get_name(), input3.get_name(),
                             'Invalid input after object creation')
            self.assertEqual(inputResp3.get_value_type(),
                             input3.get_value_type(),
                             'Invalid input value type after object creation')

        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(e.message)
        pass
예제 #49
0
    def test_add_csv_verification(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json',
                                                  {}, data)
                pipeline = Schemas.Pipeline()
                signals = {
                    'current': 'Numeric',
                    'vibration': 'Numeric',
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    resp_pipeline = fclient.create_pipeline(pipeline)
                    data = io.open('./verificationData.csv')
                    response = fclient.add_verification(
                        resp_pipeline.get_id(), 'csv', {}, data)
                    # tear down
                    try:
                        fclient.delete_pipeline(resp_pipeline.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, "Cannot add data")
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
class TestAddFacts(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add facts data (json format) from a stream to Assessment
    def test_add_json_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsData.json')
                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }
                response = self.fclient.add_facts_stream(resp_assessment.get_id(), 'json', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) from a stream to  Assessment
    def test_add_csv_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsData.csv')

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }
                response = self.fclient.add_facts_stream(resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with tags from a stream to  Assessment
    def test_add_csv_facts_with_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsDataWithTags.csv')

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'keywordIdentifier': "Tags"
                }
                response = self.fclient.add_facts_stream(resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with additional Tag from a stream to  Assessment
    def test_add_csv_facts_with_additional_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = io.open('./resources/factsData.csv')

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'additionalKeyword': "testTag"
                }

                response = self.fclient.add_facts_stream(resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass