예제 #1
0
    def test_add_data_single_thing(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health')
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json',
                                                  {}, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to eventbuffer')

                # tear down
                try:
                    fclient.delete_pipeline(created_pipeline.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add input data to eventbuffer')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #2
0
    def test_add_data_single_thing(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health')
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json', {}, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to eventbuffer')

                # tear down
                try:
                    fclient.delete_pipeline(created_pipeline.get_id())
                    fclient.delete_eventbuffer(eventbuffer.get_id())
                except Exception as e:
                    pass
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add input data to eventbuffer')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
    def test_create_pipeline_with_multiple_assessment(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        eventbuffer.set_thing_identifier('motor')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            data = "time, motor, current, vibration, state\n" + "2016-03-01 01:01:01, Motor1, 12.4, 3.4, On"
            try:
                response = fclient.add_input_data(eventbuffer.get_id(), 'csv', {}, data)

                pipeline = Schemas.Pipeline()
                signals  = {
                    'current': ['Numeric','Occurrences'],
                'vibration': ['Numeric','Samples'],
                'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                assessment2 = Schemas.Assessment()
                assessment2.set_name('Health2') \
                    .set_input_signals(['vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment) \
                    .set_assessment(assessment2)

                try:
                    response = fclient.create_pipeline(pipeline)
                    self.assertEqual(isinstance(response, Schemas.Pipeline), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(isinstance(response.get_id(), unicode), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_name(), pipeline.get_name(), 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_thing_identifier(), eventbuffer.get_thing_identifier(), 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_input_signals()), 3, 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_assessments()), 2, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_eventbuffer(), eventbuffer.get_id(), 'Invalid Pipeline object after creation')

                    # tear down
                    try:
                        fclient.delete_pipeline(response.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add data')        
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
    def test_create_pipeline_for_single_thing(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            try:
                response = fclient.add_input_data(eventbuffer.get_id(), 'json', {}, data)

                pipeline = Schemas.Pipeline()
                signals  = {
                    'current': 'Numeric',
                    'vibration': 'Numeric',
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    response = fclient.create_pipeline(pipeline)
                    self.assertEqual(isinstance(response, Schemas.Pipeline), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(isinstance(response.get_id(), unicode), True, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_name(), pipeline.get_name(), 'Invalid Pipeline object after creation')
                    self.assertNotEqual(response.get_thing_name(), None, 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_input_signals()), 3, 'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_assessments()), 1, 'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_eventbuffer(), eventbuffer.get_id(), 'Invalid Pipeline object after creation')

                    # tear down
                    try:
                        fclient.delete_pipeline(response.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add data')        
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #5
0
    def test_add_json_verification(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json',
                                                  {}, data)
                pipeline = Schemas.Pipeline()
                signals = {
                    'current': 'Numeric',
                    'vibration': 'Numeric',
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    resp_pipeline = fclient.create_pipeline(pipeline)
                    data = io.open('./verificationData.json')

                    response = fclient.add_verification(
                        resp_pipeline.get_id(), 'json', {}, data)
                    # tear down
                    try:
                        fclient.delete_pipeline(resp_pipeline.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, "Cannot add data")
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
    def test_add_json_verification(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            try:
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                response = fclient.add_input_data(eventbuffer.get_id(), 'json', {}, data)
                pipeline = Schemas.Pipeline()
                signals  = {
                    'current': 'Numeric',
                    'vibration': 'Numeric',
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    resp_pipeline = fclient.create_pipeline(pipeline)
                    data = '{"time" : "2011-03-26T12:00:00Z", "car" : "HI3821", "end" : "2012-06-01T00:00:00Z", "Health" : "Normal"}'

                    response = fclient.add_verification(resp_pipeline.get_id(), 'json', {}, data)
                    # tear down
                    try:
                        fclient.delete_pipeline(resp_pipeline.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0,1,"Cannot add data")        
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #7
0
class TestAddFacts(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add facts data (json format) to Assessment
    def test_add_json_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier("car")
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact
                data = '{"time" : "2011-03-26T12:00:00.000Z", "car" : "HI3821", "end" : "2012-06-01T00:00:00.000Z", "Health" : "Normal"}'

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'entityIdentifier': "car",
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'json', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) to Assessment
    def test_add_csv_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with tags to Assessment
    def test_add_csv_facts_with_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)
                data = "time,end,car,Health,Tags\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal,testTag1\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal,testTag2"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'tagIdentifier': 'Tags'
                }

                # adding fact
                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with additional Tag to Assessment
    def test_add_csv_facts_with_additional_tags(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)
                data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health",
                    'additionalTag': 'testTag'
                }

                response = self.fclient.add_facts(resp_assessment.get_id(),
                                                  'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with batch identifier to Assessment
    def test_add_csv_fact_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(
                        asmtRequest)

                    data = '{"time" : 123898422222, "batches" : "batch_1", "signal" : "current", "value" : 12.4}\n' \
                            '{"time" : 123898422322, "batches" : "batch_2", "signal" : "current", "value" : 12.4}'
                    options = {
                        'streaming': False,
                        'hasMoreData': False,
                        'timeFormat': time.get_format(),
                        'timeZone': time.get_zone(),
                        'timeIdentifier': time.get_identifier(),
                        'signalIdentifier': 'signal',
                        'valueIdentifier': 'value',
                        'batchIdentifier': 'batches'
                    }

                    # adding data to the created datastream
                    response = self.fclient.add_input_data(
                        datastreamResponse.get_id(), 'json', options, data)
                    self.assertNotEqual(response['__$id'], None,
                                        'Cannot add input data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                    # adding fact to the assessment
                    data = "batchId,value\n" \
                           "batch_1,normal\n" \
                           "batch_2,abnormal"

                    options = {
                        'valueIdentifier': "value",
                        'batchIdentifier': 'batchId'
                    }

                    response = self.fclient.add_facts(resp_assessment.get_id(),
                                                      'csv', options, data)
                    self.assertNotEqual(response['__$id'], None,
                                        'Cannot add fact data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    try:
                        self.fclient.delete_datastream(
                            datastreamResponse.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create assessment')

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(
                    0, 1, 'Cannot add input or fact data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))

    pass
예제 #8
0
class TestAddDataStream(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add historical wide input data (json format) to single entity Datastream (Used for model revision)
    def test_add_historical_json_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.json')
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier()
                }

                # adding data to the datastream
                response = self.fclient.add_input_stream(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to single entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.csv')

                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier()
                }

                # adding data to datstream
                response = self.fclient.add_input_stream(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to Multi entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/dataMultiEntity.csv')

                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car',
                    'valueIdentifier': 'value',
                    'signalIdentifier': 'signal'
                }
                response = self.fclient.add_input_stream(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (csv format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_csv_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.csv')
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'csv',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (json format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_json_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.json')
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'json',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))

    pass
class TestAddData(unittest.TestCase):
    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add narrow input data (json format) to multi entity Datastream
    def test_add_data_json_mutli(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = '{"time" : "2016-03-01 01:01:01", "signal" : "current", "value" : 12.4, "car" : "unit1"}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'signalIdentifier': 'signal',
                    'valueIdentifier': 'value',
                    'entityIdentifier': 'car'
                }

                # adding data to the created datastream
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (csv format) to single entity to Datastream
    def test_add_data_csv_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        signal.set_valueIdentifier("value")
        signal.set_signalIdentifier("signal")
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")

        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # input data has timeformat different than the one set  while creating datastream
                data = "time, signal, value " + "\n" + "2016-03-01 01:01:01, signal1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2, 1.4"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': "YYYY-MM-DD HH:mm:ss",
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier()
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) to single entity Datastream
    def test_add_data_json_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityName('machine')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # adding data to datastream
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityName': 'machine'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) to multi entity Datastream
    def test_add_data_csv_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time Identifer
    def test_add_data_csv_multi_miss_time_identifier(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1,
                                 'Missing time identifer error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print("\nResponse :", exception_handler(e))
                self.assertEqual(exception_handler(e),
                                 "Missing time identifier.",
                                 'Missing time identifer error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time zone
    def test_add_data_csv_multi_miss_time_zone(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time zone error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # (b'{"message":"Missing time zone."}',)
                print("\nResponse :", exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time zone.",
                                 'Missing time zone error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time format
    def test_add_data_csv_multi_miss_time_format(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'entityIdentifier': 'car'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time format error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print('\nResponse :', exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time format.",
                                 'Missing time format error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (json format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_json(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = "time, tag, value " + "\n" + "2016-03-01 01:01:01, signal1_entity1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2_entity1, 1.4"
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'json',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (csv format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream of datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_csv(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            options = {'streaming': True, 'hasMoreData': False}
            response = self.fclient.add_input_data(datastreamId, 'json',
                                                   options, data)
            self.assertNotEqual(
                response, 'Data Submitted Successfully',
                'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add narrow input data (csv format) with batch identifier to multi thing Datastream
    def test_add_narrow_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = 'time,batchId,unit,signal,value\n' \
                       '1,batch_1,unit1,signal1,9.95\n' \
                       '2,batch_1,unit1,signal1,4.45\n' \
                       '3,batch_2,unit1,signal1,1.45\n' \
                       '4,batch_2,unit1,signal1,8.45\n' \
                       '5,batch_2,unit1,signal1,2.45\n' \
                       '1,batch_1,unit1,signal2,19.95\n' \
                       '2,batch_1,unit1,signal2,14.45\n' \
                       '3,batch_2,unit1,signal2,10.45\n' \
                       '4,batch_2,unit1,signal2,18.45\n' \
                       '5,batch_2,unit1,signal2,12.45\n' \
                       '1,batch_1,unit1,signal3,39.95\n' \
                       '2,batch_1,unit1,signal3,34.45\n' \
                       '3,batch_2,unit1,signal3,30.45\n' \
                       '4,batch_2,unit1,signal3,38.45\n' \
                       '5,batch_2,unit1,signal3,32.45\n'
                options = {'streaming': False, 'hasMoreData': False}
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (json format) with batch identifier to single thing Datastream
    def test_add_narrow_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("inputs")
        signal.set_valueIdentifier("val")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal": "signal1","value": 9.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal": "signal1","value": 4.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal": "signal1","value": 1.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal": "signal1","value": 8.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal": "signal1","value": 2.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'signalIdentifier': 'signal',
                    'valueIdentifier': 'value',
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) with batch identifier to multi thing Datastream
    def test_add_wide_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = 'time,batchId,unit,signal1,signal2,signal3\n' \
                       '1,batch_1,unit1,9.95,19.95,39.95\n' \
                       '2,batch_1,unit1,4.45,14.45,34.45\n' \
                       '3,batch_2,unit1,1.45,10.45,30.45\n' \
                       '4,batch_2,unit1,8.45,18.45,38.45\n' \
                       '5,batch_2,unit1,2.45,12.45,32.45'
                options = {'streaming': False, 'hasMoreData': False}
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) with batch identifier to single thing Datastream
    def test_add_wide_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal1": 9.95,"signal2": 19.95,"signal3": 39.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal1": 4.45,"signal2": 14.45,"signal3": 34.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal1": 1.45,"signal2": 10.45,"signal3": 30.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal1": 8.45,"signal2": 18.45,"signal3": 38.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal1": 2.45,"signal2": 12.45,"signal3": 32.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(
                    datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None,
                                    'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))

    pass
class TestAddDataStream(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add historical wide input data (json format) to single entity Datastream (Used for model revision)
    def test_add_historical_json_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.json')
                options = {'streaming': False,
                           'hasMoreData':False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier()}

                # adding data to the datastream
                response = self.fclient.add_input_stream(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to single entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/data.csv')

                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier()}

                # adding data to datstream
                response = self.fclient.add_input_stream(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add historical input data (csv format) from a stream to Multi entity Datastream (Used for model revision)
    def test_add_historical_csv_data_stream_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = io.open('./resources/dataMultiEntity.csv')

                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car',
                           'valueIdentifier': 'value',
                           'signalIdentifier': 'signal'
                           }
                response = self.fclient.add_input_stream(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (csv format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_csv_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.csv')
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'csv', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (json format) from a stream to Datastream (Used for live monitoring)
    @unittest.skip("streaming can only be done once ")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_streaming_json_data_stream(self):

        datastreamId = 'datstream-id'  # id of the datastream which is live
        try:
            data = io.open('./resources/data.json')
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'json', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass
    def test_create_pipeline_for_single_thing_with_eventType(self):
        fclient = FClient(host=host, token=token)
        eventbuffer = Schemas.Eventbuffer()
        eventbuffer.set_name('Motor Health' + str(random.random()))
        eventbuffer.set_time_identifier('time')
        eventbuffer.set_time_format('iso_8601')
        try:
            eventbuffer = fclient.create_eventbuffer(eventbuffer)
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            try:
                response = fclient.add_input_data(eventbuffer.get_id(), 'json',
                                                  {}, data)

                pipeline = Schemas.Pipeline()
                signals = {
                    'current': ['Numeric', 'Occurrences'],
                    'vibration': ['Numeric', 'Samples'],
                    'state': 'Categorical'
                }
                assessment = Schemas.Assessment()
                assessment.set_name('Health') \
                    .set_input_signals(['current', 'vibration', 'state'])
                pipeline.set_name('Motor Health 1') \
                    .set_eventbuffer(eventbuffer.get_id()) \
                    .set_input_signals(signals) \
                    .set_assessment(assessment)

                try:
                    response = fclient.create_pipeline(pipeline)
                    self.assertEqual(isinstance(response,
                                                Schemas.Pipeline), True,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(isinstance(response.get_id(),
                                                unicode), True,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_name(), pipeline.get_name(),
                                     'Invalid Pipeline object after creation')
                    self.assertNotEqual(
                        response.get_thing_name(), None,
                        'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_input_signals()), 3,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(len(response.get_assessments()), 1,
                                     'Invalid Pipeline object after creation')
                    self.assertEqual(response.get_eventbuffer(),
                                     eventbuffer.get_id(),
                                     'Invalid Pipeline object after creation')

                    # tear down
                    try:
                        fclient.delete_pipeline(response.get_id())
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                except Exception as e:
                    print(e.message)
                    try:
                        fclient.delete_eventbuffer(eventbuffer.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create pipeline')
            except Exception as e:
                print(e.message)
                self.assertEqual(0, 1, 'Cannot add data')
        except Exception as e:
            print(e.message)
            self.assertEqual(0, 1, 'Cannot create eventbuffer')
예제 #12
0
class TestAddData(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add narrow input data (json format) to multi entity Datastream
    def test_add_data_json_mutli(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = '{"time" : "2016-03-01 01:01:01", "signal" : "current", "value" : 12.4, "car" : "unit1"}'
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'signalIdentifier': 'signal',
                           'valueIdentifier': 'value',
                           'entityIdentifier': 'car'}

                # adding data to the created datastream
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (csv format) to single entity to Datastream
    def test_add_data_csv_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        signal.set_valueIdentifier("value")
        signal.set_signalIdentifier("signal")
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("iso_8601")

        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # input data has timeformat different than the one set  while creating datastream
                data = "time, signal, value " + "\n" + "2016-03-01 01:01:01, signal1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2, 1.4"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': "YYYY-MM-DD HH:mm:ss",
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier()}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) to single entity Datastream
    def test_add_data_json_single(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityName('machine')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                # adding data to datastream
                data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityName': 'machine'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) to multi entity Datastream
    def test_add_data_csv_multi(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time Identifer
    def test_add_data_csv_multi_miss_time_identifier(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeZone': time.get_zone(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time identifer error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print("\nResponse :", exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time identifier.", 'Missing time identifer error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time zone
    def test_add_data_csv_multi_miss_time_zone(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeFormat': time.get_format(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time zone error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # (b'{"message":"Missing time zone."}',)
                print("\nResponse :",exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time zone.", 'Missing time zone error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Cannot add data due to missing time format
    def test_add_data_csv_multi_miss_time_format(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('car')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2"
                options = {'streaming': False,
                           'hasMoreData': False,
                           'timeZone': time.get_zone(),
                           'timeIdentifier': time.get_identifier(),
                           'entityIdentifier': 'car'}
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertEqual(0, 1, 'Missing time format error not caught')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                # Printing only for debugging purposes
                print('\nResponse :',exception_handler(e))
                self.assertEqual(exception_handler(e), "Missing time format.", 'Missing time format error not caught')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add live input data (json format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream if datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_json(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = "time, tag, value " + "\n" + "2016-03-01 01:01:01, signal1_entity1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2_entity1, 1.4"
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'json', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add live input data (csv format) to Datastream (Used for live monitoring)
    @unittest.skip("Skipping streaming data ingestion")
    # Streaming data can only be sent to datastream of datastream is live. So make sure that datastream is live first
    def test_add_data_streaming_csv(self):

        datastreamId = 'datstream-id'  # id of the datasream which is live
        try:
            data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}'
            options = {'streaming': True, 'hasMoreData':False}
            response = self.fclient.add_input_data(datastreamId, 'json', options, data)
            self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream')
        except Exception as e:
            # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot add input data to datastream')

    # Add narrow input data (csv format) with batch identifier to multi thing Datastream
    def test_add_narrow_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("signal")
        signal.set_valueIdentifier("value")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = 'time,batchId,unit,signal,value\n' \
                       '1,batch_1,unit1,signal1,9.95\n' \
                       '2,batch_1,unit1,signal1,4.45\n' \
                       '3,batch_2,unit1,signal1,1.45\n' \
                       '4,batch_2,unit1,signal1,8.45\n' \
                       '5,batch_2,unit1,signal1,2.45\n' \
                       '1,batch_1,unit1,signal2,19.95\n' \
                       '2,batch_1,unit1,signal2,14.45\n' \
                       '3,batch_2,unit1,signal2,10.45\n' \
                       '4,batch_2,unit1,signal2,18.45\n' \
                       '5,batch_2,unit1,signal2,12.45\n' \
                       '1,batch_1,unit1,signal3,39.95\n' \
                       '2,batch_1,unit1,signal3,34.45\n' \
                       '3,batch_2,unit1,signal3,30.45\n' \
                       '4,batch_2,unit1,signal3,38.45\n' \
                       '5,batch_2,unit1,signal3,32.45\n'
                options = {
                    'streaming': False,
                    'hasMoreData': False
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add narrow input data (json format) with batch identifier to single thing Datastream
    def test_add_narrow_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        signal.set_signalIdentifier("inputs")
        signal.set_valueIdentifier("val")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal": "signal1","value": 9.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal": "signal1","value": 4.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal": "signal1","value": 1.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal": "signal1","value": 8.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal": "signal1","value": 2.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'signalIdentifier': 'signal',
                    'valueIdentifier': 'value',
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (csv format) with batch identifier to multi thing Datastream
    def test_add_wide_multi_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier('unit')
        field.set_batchIdentifier('batchId')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:
                data = 'time,batchId,unit,signal1,signal2,signal3\n' \
                       '1,batch_1,unit1,9.95,19.95,39.95\n' \
                       '2,batch_1,unit1,4.45,14.45,34.45\n' \
                       '3,batch_2,unit1,1.45,10.45,30.45\n' \
                       '4,batch_2,unit1,8.45,18.45,38.45\n' \
                       '5,batch_2,unit1,2.45,12.45,32.45'
                options = {
                    'streaming': False,
                    'hasMoreData': False
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    # Add wide input data (json format) with batch identifier to single thing Datastream
    def test_add_wide_single_thing_data_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # adding data to datastream
                data = '{"time": 1,"batchId": "batch_1","signal1": 9.95,"signal2": 19.95,"signal3": 39.95}\n' \
                       '{"time": 2,"batchId": "batch_1","signal1": 4.45,"signal2": 14.45,"signal3": 34.45}\n' \
                       '{"time": 3,"batchId": "batch_2","signal1": 1.45,"signal2": 10.45,"signal3": 30.45}\n' \
                       '{"time": 4,"batchId": "batch_2","signal1": 8.45,"signal2": 18.45,"signal3": 38.45}\n' \
                       '{"time": 5,"batchId": "batch_2","signal1": 2.45,"signal2": 12.45,"signal3": 32.45}'
                options = {
                    'streaming': False,
                    'hasMoreData': False,
                    'timeFormat': time.get_format(),
                    'timeZone': time.get_zone(),
                    'timeIdentifier': time.get_identifier(),
                    'batchIdentifier': 'batchId'
                }
                response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass
class TestAddFacts(unittest.TestCase):

    def setUp(self):
        self.fclient = FClient(host=host, token=token, options=None)
        self.created_datastreams = []
        pass

    # Add facts data (json format) to Assessment
    def test_add_json_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_entityIdentifier("car")
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact
                data = '{"time" : "2011-03-26T12:00:00.000Z", "car" : "HI3821", "end" : "2012-06-01T00:00:00.000Z", "Health" : "Normal"}'

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'entityIdentifier': "car",
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(), 'json', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) to Assessment
    def test_add_csv_facts(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()

        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("YYYY-MM-DD HH:mm:ss")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        datastream.set_datasource(datasource)
        datastream.set_field(field)
        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())

            # creating assessment
            asmtRequest = Schemas.AssessmentRequest()
            asmtRequest.set_name('Assessment Name ' + str(random.random()))
            asmtRequest.set_datastream(datastreamResponse.get_id())
            asmtRequest.set_rate('PT0S')

            try:
                resp_assessment = self.fclient.create_assessment(asmtRequest)

                # adding fact to the assessment
                data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                options = {
                    'startTimeIdentifier': "time",
                    'endTimeIdentifier': "end",
                    'timeFormat': "iso_8601",
                    'timeZone': time.get_zone(),
                    'valueIdentifier': "Health"
                }

                response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)

                # checking if data got ingested
                check_data_ingestion(self, response)

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot create assessment')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with tags to Assessment
    def test_add_csv_facts_with_tags(self):

            # creating datastream
            datastream = Schemas.Datastream()
            datastream.set_name('Motor Health' + str(random.random()))

            datasource = Schemas.Datasource()
            field = Schemas.Field()
            time = Schemas.Time()
            signal = Schemas.Signal()

            time.set_zone("GMT")
            time.set_identifier("time")
            time.set_format("YYYY-MM-DD HH:mm:ss")
            field.set_signal(signal)
            datasource.set_type("STANDALONE")
            field.set_time(time)
            datastream.set_datasource(datasource)
            datastream.set_field(field)
            try:
                datastreamResponse = self.fclient.create_datastream(datastream)
                self.created_datastreams.append(datastreamResponse.get_id())

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(asmtRequest)
                    data = "time,end,car,Health,Tags\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal,testTag1\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal,testTag2"

                    options = {
                        'startTimeIdentifier': "time",
                        'endTimeIdentifier': "end",
                        'timeFormat': "iso_8601",
                        'timeZone': time.get_zone(),
                        'valueIdentifier': "Health",
                        'keywordIdentifier': 'Tags'
                    }

                    # adding fact
                    response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    self.assertEqual(0, 1, 'Cannot create assessment')
            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with additional Tag to Assessment
    def test_add_csv_facts_with_additional_tags(self):

            # creating datastream
            datastream = Schemas.Datastream()
            datastream.set_name('Motor Health' + str(random.random()))

            datasource = Schemas.Datasource()
            field = Schemas.Field()
            time = Schemas.Time()
            signal = Schemas.Signal()

            time.set_zone("GMT")
            time.set_identifier("time")
            time.set_format("YYYY-MM-DD HH:mm:ss")
            field.set_signal(signal)
            datasource.set_type("STANDALONE")
            field.set_time(time)
            datastream.set_datasource(datasource)
            datastream.set_field(field)
            try:
                datastreamResponse = self.fclient.create_datastream(datastream)
                self.created_datastreams.append(datastreamResponse.get_id())

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(asmtRequest)
                    data = "time,end,car,Health\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal"

                    options = {
                        'startTimeIdentifier': "time",
                        'endTimeIdentifier': "end",
                        'timeFormat': "iso_8601",
                        'timeZone': time.get_zone(),
                        'valueIdentifier': "Health",
                        'additionalKeyword': 'testTag'
                    }

                    response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    self.assertEqual(0, 1, 'Cannot create assessment')
            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, "Cannot create datastream")

    # Add facts data (csv format) with batch identifier to Assessment
    def test_add_csv_fact_with_batch(self):

        # creating datastream
        datastream = Schemas.Datastream()
        datastream.set_name('Motor Health' + str(random.random()))

        datasource = Schemas.Datasource()
        field = Schemas.Field()
        time = Schemas.Time()
        signal = Schemas.Signal()
        time.set_zone("GMT")
        time.set_identifier("time")
        time.set_format("millis")
        field.set_signal(signal)
        datasource.set_type("STANDALONE")
        field.set_time(time)
        field.set_batchIdentifier('batches')
        datastream.set_datasource(datasource)
        datastream.set_field(field)

        try:
            datastreamResponse = self.fclient.create_datastream(datastream)
            self.created_datastreams.append(datastreamResponse.get_id())
            try:

                # creating assessment
                asmtRequest = Schemas.AssessmentRequest()
                asmtRequest.set_name('Assessment Name ' + str(random.random()))
                asmtRequest.set_datastream(datastreamResponse.get_id())
                asmtRequest.set_rate('PT0S')

                try:
                    resp_assessment = self.fclient.create_assessment(asmtRequest)

                    data = '{"time" : 123898422222, "batches" : "batch_1", "signal" : "current", "value" : 12.4}\n' \
                            '{"time" : 123898422322, "batches" : "batch_2", "signal" : "current", "value" : 12.4}'
                    options = {
                       'streaming': False,
                       'hasMoreData': False,
                       'timeFormat': time.get_format(),
                       'timeZone': time.get_zone(),
                       'timeIdentifier': time.get_identifier(),
                       'signalIdentifier': 'signal',
                       'valueIdentifier': 'value',
                       'batchIdentifier': 'batches'
                    }

                    # adding data to the created datastream
                    response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data)
                    self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                    # adding fact to the assessment
                    data = "batchId,value\n" \
                           "batch_1,normal\n" \
                           "batch_2,abnormal"

                    options = {
                        'valueIdentifier': "value",
                        'batchIdentifier': 'batchId'
                    }

                    response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data)
                    self.assertNotEqual(response['__$id'], None, 'Cannot add fact data to datastream')

                    # checking if data got ingested
                    check_data_ingestion(self, response)

                except Exception as e:
                    print(exception_handler(e))
                    try:
                        self.fclient.delete_datastream(datastreamResponse.get_id())
                    except Exception as e:
                        pass
                    self.assertEqual(0, 1, 'Cannot create assessment')

            except Exception as e:
                print(exception_handler(e))
                self.assertEqual(0, 1, 'Cannot add input or fact data to datastream')
        except Exception as e:
            print(exception_handler(e))
            self.assertEqual(0, 1, 'Cannot create datastream')

    def tearDown(self):  # teardown
        for ds in self.created_datastreams:
            try:
                self.fclient.delete_datastream(ds)
            except Exception as e:
                print(exception_handler(e))
    pass