def test_trough_antisegments(self): data_val = [ 9.0, 9.0, 9.0, 9.0, 7.0, 4.0, 7.0, 9.0, 9.0, 9.0, 5.0, 1.0, 5.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0 ] dataframe = create_dataframe(data_val) segments = [{ '_id': 'Esl7uetLhx4lCqHa', 'analyticUnitId': 'opnICRJwOmwBELK8', 'from': 1523889000010, 'to': 1523889000012, 'labeled': True, 'deleted': False }, { '_id': 'Esl7uetLhx4lCqHa', 'analyticUnitId': 'opnICRJwOmwBELK8', 'from': 1523889000003, 'to': 1523889000005, 'labeled': False, 'deleted': True }] segments = [Segment.from_json(segment) for segment in segments] try: model = models.TroughModel() model_name = model.__class__.__name__ model.state = model.get_state(None) model.fit(dataframe, segments, 'test') except ValueError: self.fail('Model {} raised unexpectedly'.format(model_name))
def test_trough_model_for_cache(self): cache = { 'patternCenter': [2, 6], 'patternModel': [5, 0.5, 4], 'confidence': 2, 'convolveMax': 8, 'convolveMin': 7, 'window_size': 1, 'convDelMin': 0, 'convDelMax': 0, } data_val = [ 5.0, 5.0, 1.0, 4.0, 5.0, 5.0, 0.0, 4.0, 5.0, 5.0, 6.0, 1.0, 5.0, 5.0, 5.0 ] dataframe = create_dataframe(data_val) segments = [{ '_id': 'Esl7uetLhx4lCqHa', 'analyticUnitId': 'opnICRJwOmwBELK8', 'from': 1523889000010, 'to': 1523889000012, 'labeled': True, 'deleted': False }] segments = [Segment.from_json(segment) for segment in segments] model = models.TroughModel() model.state = model.get_state(cache) result = model.fit(dataframe, segments, 'test') self.assertEqual(len(result.pattern_center), 3)
def resolve_model_by_pattern(pattern: str) -> models.Model: if pattern == 'GENERAL': return models.GeneralModel() if pattern == 'PEAK': return models.PeakModel() if pattern == 'TROUGH': return models.TroughModel() if pattern == 'DROP': return models.DropModel() if pattern == 'JUMP': return models.JumpModel() if pattern == 'CUSTOM': return models.CustomModel() raise ValueError('Unknown pattern "%s"' % pattern)
def test_trough_model_for_cache(self): cache = { 'pattern_center': [2, 6], 'pattern_model': [5, 0.5, 4], 'confidence': 2, 'convolve_max': 8, 'convolve_min': 7, 'WINDOW_SIZE': 1, 'conv_del_min': 0, 'conv_del_max': 0, } data_val = [5.0, 5.0, 1.0, 4.0, 5.0, 5.0, 0.0, 4.0, 5.0, 5.0, 6.0, 1.0, 5.0, 5.0, 5.0] dataframe = create_dataframe(data_val) segments = [{'_id': 'Esl7uetLhx4lCqHa', 'analyticUnitId': 'opnICRJwOmwBELK8', 'from': 1523889000010, 'to': 1523889000012, 'labeled': True, 'deleted': False}] model = models.TroughModel() result = model.fit(dataframe, segments, cache) self.assertEqual(len(result['pattern_center']), 3)
def test_models_with_corrupted_dataframe(self): data = [[1523889000000 + i, float('nan')] for i in range(10)] dataframe = pd.DataFrame(data, columns=['timestamp', 'value']) segments = [] model_instances = [ models.JumpModel(), models.DropModel(), models.GeneralModel(), models.PeakModel(), models.TroughModel() ] try: for model in model_instances: model_name = model.__class__.__name__ model.fit(dataframe, segments, dict()) except ValueError: self.fail('Model {} raised unexpectedly'.format(model_name))
def test_models_with_corrupted_dataframe(self): data = [[1523889000000 + i, float('nan')] for i in range(10)] dataframe = pd.DataFrame(data, columns=['timestamp', 'value']) segments = [] model_instances = [ models.JumpModel(), models.DropModel(), models.GeneralModel(), models.PeakModel(), models.TroughModel() ] for model in model_instances: model_name = model.__class__.__name__ model.state = model.get_state(None) with self.assertRaises(AssertionError): model.fit(dataframe, segments, 'test')
def test_random_dataset_for_random_model(self): data = create_random_model(random.randint(1, 100)) data = create_dataframe(data) model_instances = [models.PeakModel(), models.TroughModel()] cache = { 'patternCenter': [5, 50], 'patternModel': [], 'windowSize': 2, 'convolveMin': 0, 'convolveMax': 0, 'confidence': 0, 'heightMax': 0, 'heightMin': 0, 'convDelMin': 0, 'convDelMax': 0, } ws = random.randint(1, int(len(data['value'] / 2))) pattern_model = create_random_model(ws) convolve = scipy.signal.fftconvolve(pattern_model, pattern_model) confidence = 0.2 * (data['value'].max() - data['value'].min()) cache['windowSize'] = ws cache['patternModel'] = pattern_model cache['convolveMin'] = max(convolve) cache['convolveMax'] = max(convolve) cache['confidence'] = confidence cache['heightMax'] = data['value'].max() cache['heightMin'] = confidence try: for model in model_instances: model_name = model.__class__.__name__ model.state = model.get_state(cache) model.detect(data, 'test') except ValueError: self.fail( 'Model {} raised unexpectedly with dataset {} and cache {}'. format(model_name, data['value'], cache))