def test_turn_datstream_on_off(self): try: # assuming model is already built listAssessment = self.fclient.on_datastream(datastream_id) self.assertEqual(len(listAssessment) > 0, True, 'Cannot turn on live monitoring for datastream') self.assertEqual(str(listAssessment[0]['datastream']), datastream_id, 'Live mornitoring turned on for incorrect datastream') # self.assertEqual(str(listAssessment[0]['live']), 'ON', 'Cannot turn on live mornitoring') timepkg.sleep(10) # turning off live monitoring try: listAssessment = self.fclient.off_datastream(datastream_id) self.assertEqual(len(listAssessment) > 0, True, 'Cannot turn off live monitoring for datastream') self.assertEqual(str(listAssessment[0]['datastream']), datastream_id, 'Live mornitoring turned off for incorrect datastream') # self.assertEqual(str(listAssessment[0]['live']), 'OFF', 'Cannot turn off live mornitoring') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot turn datastream off') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot turn datastream on')
def test_turn_assessment_on_off(self): try: # assuming model is already built assessment = self.fclient.on_assessment(assessment_id) self.assertEqual( assessment.get_id(), assessment_id, 'Live monitoring turned on for incorrect assessment') timepkg.sleep(30) # turning off live monitoring try: assessment = self.fclient.off_assessment(assessment_id) self.assertEqual( assessment.get_id(), assessment_id, 'Live monitoring turned off for incorrect assessment') timepkg.sleep(30) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot turn assessment off') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot turn assessment on')
def test_get_assessment_output_with_offset(self): fclient = FClient(host=host, token=token, options=None) try: stream = fclient.get_output(assessment, {}) lastOffset = 0 for event in stream.events(): print(json.dumps(json.loads(event.data))) lastOffset = json.loads( event.data )['offset'] # keep track of offset sent in falkonry output event except Exception as e: print(exception_handler(e)) ''' Assuming there was some exception occured and you want to listen the output again then use the offset value and set it in the options parameter. ''' # assuming last offset was 10 options = {"offset": lastOffset} try: stream = fclient.get_output(assessment, options) for event in stream.events(): self.assertEqual( json.loads(event.data)['offset'] >= lastOffset, True) print(json.dumps(json.loads(event.data))) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Error getting output of a Assessment')
def test_get_assessment_output_with_offset(self): fclient = FClient(host=host, token=token,options=None) try: stream = fclient.get_output(assessment, {}) lastOffset = 0 for event in stream.events(): print(json.dumps(json.loads(event.data))) lastOffset = json.loads(event.data)['offset'] # keep track of offset sent in falkonry output event except Exception as e: print(exception_handler(e)) ''' Assuming there was some exception occured and you want to listen the output again then use the offset value and set it in the options parameter. ''' # assuming last offset was 10 options = {"offset": lastOffset} try: stream = fclient.get_output(assessment, options) for event in stream.events(): self.assertEqual(json.loads(event.data)['offset'] >= lastOffset, True) print(json.dumps(json.loads(event.data))) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Error getting output of a Assessment')
def test_add_narrow_single_thing_data_with_batch(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("millis") signal.set_signalIdentifier("inputs") signal.set_valueIdentifier("val") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_batchIdentifier('batches') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: # adding data to datastream data = '{"time": 1,"batchId": "batch_1","signal": "signal1","value": 9.95}\n' \ '{"time": 2,"batchId": "batch_1","signal": "signal1","value": 4.45}\n' \ '{"time": 3,"batchId": "batch_2","signal": "signal1","value": 1.45}\n' \ '{"time": 4,"batchId": "batch_2","signal": "signal1","value": 8.45}\n' \ '{"time": 5,"batchId": "batch_2","signal": "signal1","value": 2.45}' options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'signalIdentifier': 'signal', 'valueIdentifier': 'value', 'batchIdentifier': 'batchId' } response = self.fclient.add_input_data( datastreamResponse.get_id(), 'json', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_json_facts(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier("car") datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) # creating assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(datastreamResponse.get_id()) asmtRequest.set_rate('PT0S') try: resp_assessment = self.fclient.create_assessment(asmtRequest) # adding fact data = '{"time" : "2011-03-26T12:00:00.000Z", "car" : "HI3821", "end" : "2012-06-01T00:00:00.000Z", "Health" : "Normal"}' options = { 'startTimeIdentifier': "time", 'endTimeIdentifier': "end", 'timeFormat': "iso_8601", 'timeZone': time.get_zone(), 'entityIdentifier': "car", 'valueIdentifier': "Health" } response = self.fclient.add_facts(resp_assessment.get_id(), 'json', options, data) # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create assessment') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot create datastream")
def test_add_csv_facts_with_additional_tags(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) # creating assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(datastreamResponse.get_id()) asmtRequest.set_rate('PT0S') try: resp_assessment = self.fclient.create_assessment(asmtRequest) # adding fact to the assessment data = io.open('./resources/factsData.csv') options = { 'startTimeIdentifier': "time", 'endTimeIdentifier': "end", 'timeFormat': "iso_8601", 'timeZone': time.get_zone(), 'valueIdentifier': "Health", 'additionalTag': "testTag" } response = self.fclient.add_facts_stream( resp_assessment.get_id(), 'csv', options, data) # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create assessment') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot create datastream")
def test_add_narrow_single_thing_data_with_batch(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("millis") signal.set_signalIdentifier("inputs") signal.set_valueIdentifier("val") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_batchIdentifier('batches') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: # adding data to datastream data = '{"time": 1,"batchId": "batch_1","signal": "signal1","value": 9.95}\n' \ '{"time": 2,"batchId": "batch_1","signal": "signal1","value": 4.45}\n' \ '{"time": 3,"batchId": "batch_2","signal": "signal1","value": 1.45}\n' \ '{"time": 4,"batchId": "batch_2","signal": "signal1","value": 8.45}\n' \ '{"time": 5,"batchId": "batch_2","signal": "signal1","value": 2.45}' options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'signalIdentifier': 'signal', 'valueIdentifier': 'value', 'batchIdentifier': 'batchId' } response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_csv_facts_with_additional_tags(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) # creating assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(datastreamResponse.get_id()) asmtRequest.set_rate('PT0S') try: resp_assessment = self.fclient.create_assessment(asmtRequest) # adding fact to the assessment data = io.open('./resources/factsData.csv') options = { 'startTimeIdentifier': "time", 'endTimeIdentifier': "end", 'timeFormat': "iso_8601", 'timeZone': time.get_zone(), 'valueIdentifier': "Health", 'additionalKeyword': "testTag" } response = self.fclient.add_facts_stream(resp_assessment.get_id(), 'csv', options, data) # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create assessment') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot create datastream")
def test_add_csv_facts_with_tags(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) # creating assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(datastreamResponse.get_id()) asmtRequest.set_rate('PT0S') try: resp_assessment = self.fclient.create_assessment(asmtRequest) data = "time,end,car,Health,Tags\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal,testTag1\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal,testTag2" options = { 'startTimeIdentifier': "time", 'endTimeIdentifier': "end", 'timeFormat': "iso_8601", 'timeZone': time.get_zone(), 'valueIdentifier': "Health", 'tagIdentifier': 'Tags' } # adding fact response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data) # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create assessment') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot create datastream")
def test_add_data_json_mutli(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") signal.set_signalIdentifier("signal") signal.set_valueIdentifier("value") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('car') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = '{"time" : "2016-03-01 01:01:01", "signal" : "current", "value" : 12.4, "car" : "unit1"}' options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'signalIdentifier': 'signal', 'valueIdentifier': 'value', 'entityIdentifier': 'car' } # adding data to the created datastream response = self.fclient.add_input_data( datastreamResponse.get_id(), 'json', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_csv_facts_with_tags(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) # creating assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(datastreamResponse.get_id()) asmtRequest.set_rate('PT0S') try: resp_assessment = self.fclient.create_assessment(asmtRequest) data = "time,end,car,Health,Tags\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,IL9753,Normal,testTag1\n2011-03-31T00:00:00.000Z,2011-04-01T00:00:00.000Z,HI3821,Normal,testTag2" options = { 'startTimeIdentifier': "time", 'endTimeIdentifier': "end", 'timeFormat': "iso_8601", 'timeZone': time.get_zone(), 'valueIdentifier': "Health", 'keywordIdentifier': 'Tags' } # adding fact response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data) # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create assessment') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot create datastream")
def test_add_historical_csv_data_stream_multi(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('car') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = io.open('./resources/dataMultiEntity.csv') options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'entityIdentifier': 'car', 'valueIdentifier': 'value', 'signalIdentifier': 'signal' } response = self.fclient.add_input_stream( datastreamResponse.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_data_json_mutli(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") signal.set_signalIdentifier("signal") signal.set_valueIdentifier("value") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('car') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = '{"time" : "2016-03-01 01:01:01", "signal" : "current", "value" : 12.4, "car" : "unit1"}' options = {'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'signalIdentifier': 'signal', 'valueIdentifier': 'value', 'entityIdentifier': 'car'} # adding data to the created datastream response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_data_csv_multi_miss_time_identifier(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('car') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2" options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'entityIdentifier': 'car' } response = self.fclient.add_input_data( datastreamResponse.get_id(), 'csv', options, data) self.assertEqual(0, 1, 'Missing time identifer error not caught') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: # Printing only for debugging purposes print("\nResponse :", exception_handler(e)) self.assertEqual(exception_handler(e), "Missing time identifier.", 'Missing time identifer error not caught') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_data_csv_single(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() signal.set_valueIdentifier("value") signal.set_signalIdentifier("signal") time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: # input data has timeformat different than the one set while creating datastream data = "time, signal, value " + "\n" + "2016-03-01 01:01:01, signal1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2, 1.4" options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': "YYYY-MM-DD HH:mm:ss", 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier() } response = self.fclient.add_input_data( datastreamResponse.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def tearDown(self): # teardown for ds in self.created_datastreams: try: self.fclient.delete_datastream(ds) except Exception as e: print(exception_handler(e)) pass
def test_get_assessment_historical_output(self): fclient = FClient(host=host, token=token,options=None) try: options = {'startTime':'2011-01-02T01:00:00.000Z','endTime':'2013-06-13T01:00:00.000Z','responseFormat':'application/json'} response = fclient.get_historical_output(assessment, options) '''If data is not readily available then, a tracker id will be sent with 202 status code. While falkonry will genrate ouptut data Client should do timely pooling on the using same method, sending tracker id (__id) in the query params Once data is available server will response with 200 status code and data in json/csv format.''' if response.status_code is 202: trackerResponse = Schemas.Tracker(tracker=json.loads(response.text)) # get id from the tracker trackerId = trackerResponse.get_id() # use this tracker for checking the status of the process. options = {"trackerId": trackerId, "responseFormat":"application/json"} newResponse = fclient.get_historical_output(assessment, options) # if status is 202 call the same request again if response.status_code is 200: # if status is 200, output data will be present in response.text field self.assertEqual(len(response.text) > 0, True, 'Error getting historical output of a Assessment') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Error getting output of a Assessment')
def test_add_wide_multi_thing_data_with_batch(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("millis") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('unit') field.set_batchIdentifier('batchId') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = 'time,batchId,unit,signal1,signal2,signal3\n' \ '1,batch_1,unit1,9.95,19.95,39.95\n' \ '2,batch_1,unit1,4.45,14.45,34.45\n' \ '3,batch_2,unit1,1.45,10.45,30.45\n' \ '4,batch_2,unit1,8.45,18.45,38.45\n' \ '5,batch_2,unit1,2.45,12.45,32.45' options = { 'streaming': False, 'hasMoreData': False } response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_historical_csv_data_stream_multi(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('car') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = io.open('./resources/dataMultiEntity.csv') options = {'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'entityIdentifier': 'car', 'valueIdentifier': 'value', 'signalIdentifier': 'signal' } response = self.fclient.add_input_stream(datastreamResponse.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_on_assessment_exception(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: response = self.fclient.create_datastream(datastream) self.created_datastreams.append(response.get_id()) # Create assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(response.get_id()) asmtRequest.set_rate('PT0S') assessmentResponse = self.fclient.create_assessment(asmtRequest) try: assessment = self.fclient.on_assessment( assessmentResponse.get_id()) self.assertEqual(assessment.get_id(), assessmentResponse.get_id()) except Exception as e: msg = exception_handler(e) print(msg) self.assertEqual( msg, "No Active model assigned in Assessment: " + assessmentResponse.get_name()) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_wide_multi_thing_data_with_batch(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("millis") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('unit') field.set_batchIdentifier('batchId') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = 'time,batchId,unit,signal1,signal2,signal3\n' \ '1,batch_1,unit1,9.95,19.95,39.95\n' \ '2,batch_1,unit1,4.45,14.45,34.45\n' \ '3,batch_2,unit1,1.45,10.45,30.45\n' \ '4,batch_2,unit1,8.45,18.45,38.45\n' \ '5,batch_2,unit1,2.45,12.45,32.45' options = {'streaming': False, 'hasMoreData': False} response = self.fclient.add_input_data( datastreamResponse.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_data_csv_single(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() signal.set_valueIdentifier("value") signal.set_signalIdentifier("signal") time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: # input data has timeformat different than the one set while creating datastream data = "time, signal, value " + "\n" + "2016-03-01 01:01:01, signal1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2, 1.4" options = {'streaming': False, 'hasMoreData': False, 'timeFormat': "YYYY-MM-DD HH:mm:ss", 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier()} response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_add_entity_meta(self): datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) self.assertEqual( isinstance(datastreamResponse, Schemas.Datastream), True, 'Invalid Datastream object after creation') self.assertEqual(isinstance(datastreamResponse.get_id(), str), True, 'Invalid id of datastream after creation') # add EntityMeta data = [{ "sourceId": "testId", "label": "testName", "path": "root/path" }] entityMetaResponse = self.fclient.add_entity_meta( datastreamResponse.get_id(), {}, data) self.assertEqual(isinstance(entityMetaResponse, list), True, 'Invalid entityMeta object after creation') self.assertEqual( len(entityMetaResponse) > 0, True, 'Invalid length of entityMeta') entityMetaResp = entityMetaResponse.__getitem__(0) self.assertEqual(isinstance(entityMetaResp, Schemas.EntityMeta), True, 'Invalid entityMeta object after creation') self.assertEqual(isinstance(entityMetaResp.get_id(), str), True, 'Invalid id of entityMeta after creation') self.assertEqual(entityMetaResp.get_label(), 'testName', 'Invalid label of entityMeta after creation') self.assertEqual(entityMetaResp.get_path(), 'root/path', 'Invalid path of entityMeta after creation') self.assertEqual(entityMetaResp.get_sourceId(), 'testId', 'Invalid sourceId of entityMeta after creation') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add entityMeta to datastream')
def test_get_assessment_facts(self): try: response = self.fclient.get_facts(assessment, {}) pprint(response.content) self.assertEqual(len(response.content) == 0, False, 'Invalid facts response') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot get facts from the assessment")
def test_create_assessment(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: response = self.fclient.create_datastream(datastream) self.created_datastreams.append(response.get_id()) self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation') self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation') self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation') fieldResponse = response.get_field() self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation') self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation') self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation') timeResponse = fieldResponse.get_time() self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation') self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation') self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation') self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation') # Create assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(response.get_id()) asmtRequest.set_rate('PT0S') assessmentResponse = self.fclient.create_assessment(asmtRequest) self.assertEqual(isinstance(assessmentResponse, Schemas.Assessment), True, 'Invalid Assessment object after creation') self.assertEqual(isinstance(assessmentResponse.get_id(), str), True, 'Invalid id of Assessment after creation') self.assertEqual(assessmentResponse.get_name(), asmtRequest.get_name(), 'Invalid name of Assessment after creation') self.assertEqual(assessmentResponse.get_datastream(), asmtRequest.get_datastream(), 'Invalid datastream in assessment after creation') self.assertEqual(assessmentResponse.get_rate(), asmtRequest.get_rate(), 'Invalid rate of Assessment after creation') self.assertEqual(assessmentResponse.get_live(), 'OFF', 'Invalid rate of Assessment after creation') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_get_assessment_facts_with_model(self): try: options = {'startTime': '2011-01-02T00:00:00.000Z', 'endTime': '2014-01-01T00:00:00.000Z', 'model': '1'} response = self.fclient.get_facts(assessment, options) pprint(response.content) self.assertEqual(len(response.content) == 0, False, 'Invalid facts response') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot get facts from the assessment for a specific model")
def test_delete_datastream_by_id(self): datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: response = self.fclient.create_datastream(datastream) self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation') self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation') self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation') fieldResponse = response.get_field() self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation') self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation') self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation') timeResponse = fieldResponse.get_time() self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation') self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation') self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation') self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation') # delete datastream try: self.fclient.delete_datastream(response.get_id()) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot delete datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_get_assessment_facts_with_batch(self): try: response = self.fclient.get_facts(assessmentB, {}) pprint(response.content) self.assertEqual('batch' in str(response.content), True, 'Invalid facts with batch response') self.assertEqual(len(response.content)==0,False, 'Invalid facts with batch response') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, "Cannot get facts from the assessment for batch case")
def test_add_data_csv_multi_miss_time_format(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("YYYY-MM-DD HH:mm:ss") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_entityIdentifier('car') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: data = "time,current,vibrarion,state,car" + "\n" + "2016-03-01 01:01:01,12.4,3.4,on,car1" + "\n" + "2016-03-01 01:01:01,31.2,1.4,off,car1" + "\n" + "2016-03-01 01:01:01,24,3.2,on,car2" + "\n" + "2016-03-01 01:01:01,31,3.4,off,car2" options = {'streaming': False, 'hasMoreData': False, 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'entityIdentifier': 'car'} response = self.fclient.add_input_data(datastreamResponse.get_id(), 'csv', options, data) self.assertEqual(0, 1, 'Missing time format error not caught') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: # Printing only for debugging purposes print('\nResponse :',exception_handler(e)) self.assertEqual(exception_handler(e), "Missing time format.", 'Missing time format error not caught') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_on_assessment_exception(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: response = self.fclient.create_datastream(datastream) self.created_datastreams.append(response.get_id()) # Create assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(response.get_id()) asmtRequest.set_rate('PT0S') assessmentResponse = self.fclient.create_assessment(asmtRequest) try: assessment = self.fclient.on_assessment(assessmentResponse.get_id()) self.assertEqual(assessment.get_id(), assessmentResponse.get_id()) except Exception as e: msg = exception_handler(e) print(msg) self.assertEqual(msg, "No Active model assigned in Assessment: " + assessmentResponse.get_name()) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_get_assessment_output(self): fclient = FClient(host=host, token=token,options=None) try: stream = fclient.get_output(assessment, {}) for event in stream.events(): print(json.dumps(json.loads(event.data))) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Error getting output of a Assessment')
def test_get_entity_meta(self): fclient = FClient(host=host, token=token,options=None) datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) self.assertEqual(isinstance(datastreamResponse, Schemas.Datastream), True, 'Invalid Datastream object after creation') self.assertEqual(isinstance(datastreamResponse.get_id(), str), True, 'Invalid id of datastream after creation') data= [{"sourceId": "testId","label": "testName","path": "root/path"}] # add EntityMeta entityMetaResponse = fclient.add_entity_meta(datastreamResponse.get_id(), {}, data) self.assertEqual(isinstance(entityMetaResponse, list), True, 'Invalid entityMeta object after creation') self.assertEqual(len(entityMetaResponse)>0, True, 'Invalid length of entityMeta') entityMetaResp = entityMetaResponse.__getitem__(0) self.assertEqual(isinstance(entityMetaResp, Schemas.EntityMeta), True, 'Invalid entityMeta object after creation') self.assertEqual(isinstance(entityMetaResp.get_id(), str), True, 'Invalid id of entityMeta after creation') self.assertEqual(entityMetaResp.get_label(),'testName', 'Invalid label of entityMeta after creation') self.assertEqual(entityMetaResp.get_path(),'root/path', 'Invalid path of entityMeta after creation') self.assertEqual(entityMetaResp.get_sourceId(),'testId', 'Invalid sourceId of entityMeta after creation') #get entity meta getEntityMetaResponse = fclient.get_entity_meta(datastreamResponse.get_id()) self.assertEqual(isinstance(getEntityMetaResponse, list), True, 'Invalid entityMeta object after creation') self.assertEqual(len(getEntityMetaResponse) > 0, True, 'Invalid length of entityMeta') getEntityMetaResp = getEntityMetaResponse.__getitem__(0) self.assertEqual(isinstance(getEntityMetaResp, Schemas.EntityMeta), True, 'Invalid entityMeta object after creation') self.assertEqual(isinstance(getEntityMetaResp.get_id(), str), True, 'Invalid id of entityMeta after creation') self.assertEqual(getEntityMetaResp.get_label(), 'testName', 'Invalid label of entityMeta after creation') self.assertEqual(getEntityMetaResp.get_path(), 'root/path', 'Invalid path of entityMeta after creation') self.assertEqual(getEntityMetaResp.get_sourceId(), 'testId', 'Invalid sourceId of entityMeta after creation') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add entityMeta to datastream')
def test_add_data_streaming_csv(self): datastreamId = 'datstream-id' # id of the datasream which is live try: data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}' options = {'streaming': True, 'hasMoreData':False} response = self.fclient.add_input_data(datastreamId, 'json', options, data) self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream') except Exception as e: # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream')
def test_add_data_streaming_json(self): datastreamId = 'datstream-id' # id of the datasream which is live try: data = "time, tag, value " + "\n" + "2016-03-01 01:01:01, signal1_entity1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2_entity1, 1.4" options = {'streaming': True, 'hasMoreData':False} response = self.fclient.add_input_data(datastreamId, 'json', options, data) self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream') except Exception as e: # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream')
def test_add_streaming_json_data_stream(self): datastreamId = 'datstream-id' # id of the datastream which is live try: data = io.open('./resources/data.json') options = {'streaming': True, 'hasMoreData':False} response = self.fclient.add_input_data(datastreamId, 'json', options, data) self.assertNotEqual(response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream') except Exception as e: # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream')
def test_turn_assessment_on_off(self): try: # assuming model is already built assessment = self.fclient.on_assessment(assessment_id) self.assertEqual(assessment.get_id(), assessment_id, 'Live monitoring turned on for incorrect assessment') timepkg.sleep(30) # turning off live monitoring try: assessment = self.fclient.off_assessment(assessment_id) self.assertEqual(assessment.get_id(), assessment_id, 'Live monitoring turned off for incorrect assessment') timepkg.sleep(30) except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot turn assessment off') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot turn assessment on')
def test_add_data_streaming_csv(self): datastreamId = 'datstream-id' # id of the datasream which is live try: data = '{"time" :"2016-03-01 01:01:01", "current" : 12.4, "vibration" : 3.4, "state" : "On"}' options = {'streaming': True, 'hasMoreData': False} response = self.fclient.add_input_data(datastreamId, 'json', options, data) self.assertNotEqual( response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream') except Exception as e: # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream')
def test_add_streaming_json_data_stream(self): datastreamId = 'datstream-id' # id of the datastream which is live try: data = io.open('./resources/data.json') options = {'streaming': True, 'hasMoreData': False} response = self.fclient.add_input_data(datastreamId, 'json', options, data) self.assertNotEqual( response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream') except Exception as e: # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream')
def test_add_data_streaming_json(self): datastreamId = 'datstream-id' # id of the datasream which is live try: data = "time, tag, value " + "\n" + "2016-03-01 01:01:01, signal1_entity1, 3.4" + "\n" + "2016-03-01 01:01:01, signal2_entity1, 1.4" options = {'streaming': True, 'hasMoreData': False} response = self.fclient.add_input_data(datastreamId, 'json', options, data) self.assertNotEqual( response, 'Data Submitted Successfully', 'Cannot add historical input data to datastream') except Exception as e: # if response is "{"message":"Datastream is not live, streaming data cannot be accepted."}" Please turn on datastream first then add streaming data print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input data to datastream')
def test_create_datastream_micro_second_precision(self): datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datastream.set_time_precision('micro') # set 'micro' for microseconds precision datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("iso_8601") signal.set_signalIdentifier("signal") signal.set_valueIdentifier("value") field.set_entityIdentifier("entity") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) datastream.set_datasource(datasource) datastream.set_field(field) try: # create Datastream response = self.fclient.create_datastream(datastream) self.created_datastreams.append(response.get_id()) self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation') self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation') self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation') fieldResponse = response.get_field() self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation') self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation') self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation') signalResponse = fieldResponse.get_signal() self.assertEqual(signalResponse.get_signalIdentifier(), "signal", 'Invalid signal identifier object after creation') self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation') timeResponse = fieldResponse.get_time() self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation') self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation') self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation') self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation') self.assertEqual(response.get_time_precision(), datastream.get_time_precision(), 'Invalid time precision after creation') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def test_create_datastream_narrow_style_multiple_entity(self): datastream = Schemas.Datastream() datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream time.set_zone("GMT") # set timezone of the datastream time.set_identifier("time") # set time identifier of the datastream time.set_format("iso_8601") # set time format of the datastream field.set_time(time) signal.set_signalIdentifier("signal") # set signal identifier signal.set_valueIdentifier("value") # set value identifier field.set_entityIdentifier("entity") # set entity identifier field.set_signal(signal) # set signal in field datasource.set_type("STANDALONE") # set datastource type in datastream datastream.set_datasource(datasource) datastream.set_field(field) try: # create Datastream response = self.fclient.create_datastream(datastream) self.created_datastreams.append(response.get_id()) self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation') self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation') self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation') fieldResponse = response.get_field() self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation') self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation') signalResponse = fieldResponse.get_signal() self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation') self.assertEqual(signalResponse.get_signalIdentifier(), signal.get_signalIdentifier(), 'Invalid signal identifier after object creation') timeResponse = fieldResponse.get_time() self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation') self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation') self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation') self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def validate_login(host,token): """validate Login""" try: global _falkonry if not(not host or not token): p = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) m = p.match(host) if m: _falkonry = Falkonry(host=host, token=token, options={"header":"falkonry-cli"}) # test auth token validation try: datastream = _falkonry.get_datastream('test-id') except Exception as error: errorMsg = exception_handler(error) if errorMsg: if errorMsg == "Unauthorized Access": print_error('Unauthorized Access. Please verify your details.') _falkonry = None return False elif errorMsg == "No such Datastream available": return True else: _falkonry = None return False else: _falkonry = None print_error('Unable to connect to falkonry. Please verify your details.') return False else: print_error("Invalid Host Url") return False except Exception as error: _falkonry = None print_error('Unable to connect to falkonry. Please verify your details.') return False
def test_add_csv_fact_with_batch(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("millis") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_batchIdentifier('batches') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: # creating assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(datastreamResponse.get_id()) asmtRequest.set_rate('PT0S') try: resp_assessment = self.fclient.create_assessment( asmtRequest) data = '{"time" : 123898422222, "batches" : "batch_1", "signal" : "current", "value" : 12.4}\n' \ '{"time" : 123898422322, "batches" : "batch_2", "signal" : "current", "value" : 12.4}' options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'signalIdentifier': 'signal', 'valueIdentifier': 'value', 'batchIdentifier': 'batches' } # adding data to the created datastream response = self.fclient.add_input_data( datastreamResponse.get_id(), 'json', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) # adding fact to the assessment data = "batchId,value\n" \ "batch_1,normal\n" \ "batch_2,abnormal" options = { 'valueIdentifier': "value", 'batchIdentifier': 'batchId' } response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add fact data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) try: self.fclient.delete_datastream( datastreamResponse.get_id()) except Exception as e: pass self.assertEqual(0, 1, 'Cannot create assessment') except Exception as e: print(exception_handler(e)) self.assertEqual( 0, 1, 'Cannot add input or fact data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')
def handle_error(error): try: errorMsg = exception_handler(error) print_error(errorMsg) except Exception as error_new: print(_self.colorize("Unhandled Exception : " + str(error), "red"))
def test_add_csv_fact_with_batch(self): # creating datastream datastream = Schemas.Datastream() datastream.set_name('Motor Health' + str(random.random())) datasource = Schemas.Datasource() field = Schemas.Field() time = Schemas.Time() signal = Schemas.Signal() time.set_zone("GMT") time.set_identifier("time") time.set_format("millis") field.set_signal(signal) datasource.set_type("STANDALONE") field.set_time(time) field.set_batchIdentifier('batches') datastream.set_datasource(datasource) datastream.set_field(field) try: datastreamResponse = self.fclient.create_datastream(datastream) self.created_datastreams.append(datastreamResponse.get_id()) try: # creating assessment asmtRequest = Schemas.AssessmentRequest() asmtRequest.set_name('Assessment Name ' + str(random.random())) asmtRequest.set_datastream(datastreamResponse.get_id()) asmtRequest.set_rate('PT0S') try: resp_assessment = self.fclient.create_assessment(asmtRequest) data = '{"time" : 123898422222, "batches" : "batch_1", "signal" : "current", "value" : 12.4}\n' \ '{"time" : 123898422322, "batches" : "batch_2", "signal" : "current", "value" : 12.4}' options = { 'streaming': False, 'hasMoreData': False, 'timeFormat': time.get_format(), 'timeZone': time.get_zone(), 'timeIdentifier': time.get_identifier(), 'signalIdentifier': 'signal', 'valueIdentifier': 'value', 'batchIdentifier': 'batches' } # adding data to the created datastream response = self.fclient.add_input_data(datastreamResponse.get_id(), 'json', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add input data to datastream') # checking if data got ingested check_data_ingestion(self, response) # adding fact to the assessment data = "batchId,value\n" \ "batch_1,normal\n" \ "batch_2,abnormal" options = { 'valueIdentifier': "value", 'batchIdentifier': 'batchId' } response = self.fclient.add_facts(resp_assessment.get_id(), 'csv', options, data) self.assertNotEqual(response['__$id'], None, 'Cannot add fact data to datastream') # checking if data got ingested check_data_ingestion(self, response) except Exception as e: print(exception_handler(e)) try: self.fclient.delete_datastream(datastreamResponse.get_id()) except Exception as e: pass self.assertEqual(0, 1, 'Cannot create assessment') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot add input or fact data to datastream') except Exception as e: print(exception_handler(e)) self.assertEqual(0, 1, 'Cannot create datastream')