def detectChangePoints(self, rawData, filename): detector = cpdetect.cpDetector(rawData, distribution='normal', log_odds_threshold=1) detector.detect_cp() detector.to_csv(filename) return self.csvExtractCps(filename)
from cpdetect import cpDetector from cpdetect.cp_detector import (Normal, LogNormal) from cpdetect.tests.utils import get_fn import numpy as np import unittest import pandas as pd from scipy.special import gammaln data = np.load(get_fn('data.npy')) data_2 = np.load(get_fn('data2.npz')) # Convert to cpDetect format data_2 = [data_2[i] for i in data_2.files] detector = cpDetector(data_2, distribution='log_normal', log_odds_threshold=-10) class TestCpDetect(unittest.TestCase): def test_lognormal(self): """ Test Log-normal mean and variance """ mean, var = LogNormal.mean_var(data) self.assertEqual(mean, 1.0081320131891722) self.assertEqual(var, 0.010999447786412363) def test_normal(self): """Test Normal mean and variance""" mean, var = Normal.mean_var(data)
import cPickle as pickle except: import pickle import matplotlib.pyplot as plt import matplotlib as mpl from matplotlib.backends.backend_pdf import PdfPages # Load synthetic trajectories trajs = np.load('synthetic_trajs.np.npy') true_step = pickle.load(open('step_synthetic.pickle', 'rb')) trajs = trajs.tolist() # Run them through cpdetect for thresholds (-10, 0) for i in range(10): detector = cpDetector(trajs, distribution='log_normal', log_odds_threshold=-i) detector.detect_cp() # pickle detector pickle.dump(detector, open('detector_{}.pickle'.format(str(i)), 'wb')) # save steps df = pd.DataFrame.from_dict(detector.step_function, orient='index') df.to_csv('step_function_{}.csv'.format(str(i))) detector.to_csv('ts_log_odds_{}.csv'.format(str(i))) # Plot filename = 'synthetic_{}.pdf'.format(str(i)) fontsize = 6 x_spacing = 1000 time_res = 1.0 chunk = len(trajs) / 4
true_step = pickle.load(open('step_synthetic.pickle', 'rb')) # run through filter windows = [2, 4, 6, 8, 16] M = 12 p = 30 filtered_trajs = [] for traj in trajs: filtered_trajs.append(nonlinear_filter.nfl_filter(traj, windows, M, p)) # save filtered trajs np.save(file='filtered_trajs', arr=filtered_trajs) # Run them through cpdetect for thresholds (-10, 0) for i in range(11): detector = cpDetector(filtered_trajs, distribution='log_normal', log_odds_threshold=-i) detector.detect_cp() # pickle detector pickle.dump(detector, open('filtered_detector_{}.pickle'.format(str(i)), 'wb')) # save steps df = pd.DataFrame.from_dict(detector.step_function, orient='index') df.to_csv('filtered_step_function_{}.csv'.format(str(i))) detector.to_csv('ts_log_odds_filtered_{}.csv'.format(str(i))) # Plot filename = 'synthetic_filtered_{}.pdf'.format(str(i)) fontsize = 6 x_spacing = 1000 time_res = 1.0 chunk = len(trajs)/4 with PdfPages(filename) as pdf:
""" Test Change Point detector """ from cpdetect import cpDetector from cpdetect.cp_detector import (Normal, LogNormal) from cpdetect.tests.utils import get_fn import numpy as np import unittest import pandas as pd from scipy.special import gammaln data = np.load(get_fn('data.npy')) data_2 = np.load(get_fn('data2.npz')) # Convert to cpDetect format data_2 = [data_2[i] for i in data_2.files] detector = cpDetector(data_2, distribution='log_normal', log_odds_threshold=-10) class TestCpDetect(unittest.TestCase): def test_lognormal(self): """ Test Log-normal mean and variance """ mean, var = LogNormal.mean_var(data) self.assertEqual(mean, 1.0081320131891722) self.assertEqual(var, 0.010999447786412363) def test_normal(self): """Test Normal mean and variance""" mean, var = Normal.mean_var(data)