def PPG_Peaks(data, freq, plot=False, remove_extreme=False): ''' Performs the peak detection in steps. filtering (lowpass and cubic), peak detections (adaptive treshold and minimum distance) and lastly find the amplitudes for each peak, from the baseline removed signal. ''' # filters _data = data _data = lowpass_butter_filter(_data) _data = extreme_removal(_data) if remove_extreme else _data _data = cubing_filter(_data) # peak detection provided by peakutils package, it uses adaptive treshold and minimum distance slice = 1/3 _peaks = indexes(_data, min_dist=freq*slice) peaks = [softTemplate(data, i, freq) for i in _peaks] # peak amps from filtered data amps = [_data[i] for i in peaks] if plot: b_data = data-baseline(data, 2) plot_data([data+10, b_data], labels=['PPG', 'PPG Baselined'], normalization=True, indice=(0,len(data))) #plot_data([None, b_data], peaksIndexs=[None,peaks], labels=[None,'PPG Baselined'], normalization=False, indice = (0,len(data))) #plot_data([None, None, _data], peaksIndexs=[None, None, _peaks], labels=[None,'PPG Baselined', 'PPG Filtered'], normalization=False, indice = (0,len(data))) #plot_data([data, None, _data], peaksIndexs=[peaks, None, _peaks], labels=['PPG', 'PPG Baselined','PPG Filtered'], normalization=False, indice = (0,len(data))) return peaks, amps
def spline(maskid, data): mask = masks[maskid] unique = [i for i,t in enumerate(timecol) if i>0 and t != timecol[i-1]] timecolmask,datamask = zip(*[(timecol[i],data[i]) for i,m in enumerate(mask) if m == 0 and i in unique]) if(len(datamask) >= 2): cs = CubicSpline(timecolmask,datamask) datacs = cs(timecol) if plot: plot_data([data, datacs], labels=['Signal','Spline Correction'], indice=(0,30000)) return array(datacs) return data
def run(args): if args.url is not None: filename, headers = urllib.request.urlretrieve(args.url) else: filename = args.data_path finance_data_raw = pd.read_excel(filename, index_col=0) currency_symbols, finance_data = clean_data(finance_data_raw, args.include_pension, args.include_stock) finance_data = adjust_currency(args.currency, finance_data, currency_symbols) finance_data = compute_savings(finance_data) plot_data(finance_data, args.currency)
def plots(self, plots=['spect0','spect1'],chann=6069, freq=[0,67.5],bw=67.5): """ Plot an animation of the real time values (at the network speed refresh) Parameters: plots is a list which may contain the following options: -spect0 : gives the full spectrum of ZDOK0 ADC -spect1 : gives the full spectrum of ZDOK1 ADC -re_full: gives the real part of the correlation of both ADC -im_full: gives the imaginary part of the correlation of both ADC -phase : gives the phase in degrees between the two ADC -chann_pow : gives the relative power between the two input in the channel given in chann -chann_corr_re: gives the real part of the correlation of the channel given in chann -chann_corr_im: gives the imaginary part of the correlation if the channel given in chann -chann_phase: gives the relative phase between the two inputs in the given in chann chann: Its the channel that you want to look at. freqs: A two value list [freq_init, freq_end], the first one is the begining of the interval that you want to look at, the second one is the end. bw: is the complete bandwith of the FFT. """ plot_data(self.fpga, plots, chann, freq, bw) def parse_raw_data(self, filename='data', n_reading=None): """ """ if(n_reading==None): parse_raw(filename, self.read_cycles*2) #check the 2 factor else: parse_raw(filename, n_reading)
def __init__(self, df, security_name, **kwargs): super(Application, self).__init__(**kwargs) self.orientation = 'vertical' tab_panel = TabbedPanel() tab_panel.do_default_tab = False tab_panel.background_color = (7 / 255, 0, 13 / 255, 1) tab_menu = TabbedPanelItem(text="Menu") tab_menu.background_color = (62 / 255, 142 / 255, 222 / 255, 1) tab_chart = TabbedPanelItem(text='Chart') tab_chart.background_color = (62 / 255, 142 / 255, 222 / 255, 1) tab_training = TabbedPanelItem(text='Training') tab_training.background_color = (62 / 255, 142 / 255, 222 / 255, 1) tab_validate = TabbedPanelItem(text='Validate') tab_validate.background_color = (62 / 255, 142 / 255, 222 / 255, 1) tab_future = TabbedPanelItem(text='Prediction') tab_future.background_color = (62 / 255, 142 / 255, 222 / 255, 1) tab_panel.add_widget(tab_menu) tab_panel.add_widget(tab_chart) tab_panel.add_widget(tab_training) tab_panel.add_widget(tab_validate) tab_panel.add_widget(tab_future) tab_menu.bind(on_press=lambda x: change_screen('menu')) chart_layout = FloatLayout() fig = plot_data(df, security_name.upper(), 30, 200) canvas = fig.canvas chart_layout.add_widget(canvas) tab_chart.add_widget(chart_layout) predict_frame = FloatLayout(opacity=1) predict_btn = Button(text='Run Prediction', background_color=(62 / 255, 142 / 255, 222 / 255, 1), font_size=20, pos_hint={ "center_x": 0.5, "bottom": 0 }, size_hint=(0.3, 0.075)) predict_btn.bind(on_press=lambda x: start_predict(df, security_name)) predict_frame.add_widget(predict_btn) chart_layout.add_widget(predict_frame) def start_predict(df, security_name): que = queue.Queue() par_modeling = partial(modeling, security_name=security_name) process = threading.Thread( target=lambda q, arg1: q.put(par_modeling(arg1)), args=(que, df), daemon=True) process.start() Clock.schedule_once( lambda *args: tab_panel.switch_to(tab_training)) train_fig = plt.figure(facecolor='#07000d') train_canvas = train_fig.canvas train_layout = FloatLayout() train_layout.add_widget(train_canvas) tab_training.add_widget(train_layout) if os.path.exists('training.csv'): os.remove('training.csv') def update_plot(fig): train_canvas = fig.canvas train_layout.clear_widgets() train_layout.add_widget(train_canvas) plt.close(fig) # for child in train_layout.children[:1]: # train_layout.remove_widget(child) # Clock.schedule_once(lambda *args: tab_panel.switch_to(tab_training)) def read_training(self): if os.path.exists('training.csv'): loss_df = None try: loss_df = pd.read_csv('training.csv') except Exception as e: print(e) pass if loss_df is not None: train_fig = plot_loss(loss_df) update_plot(train_fig) if not process.is_alive(): Clock.unschedule(read_training) val_fig, future_fig = que.get() val_canvas = val_fig.canvas val_layout = FloatLayout() val_layout.add_widget(val_canvas) tab_validate.add_widget(val_layout) future_canvas = future_fig.canvas future_layout = FloatLayout() future_layout.add_widget(future_canvas) tab_future.add_widget(future_layout) Clock.schedule_once( lambda *args: tab_panel.switch_to(tab_validate)) Clock.schedule_interval(read_training, 0.1) Clock.schedule_once(lambda *args: tab_panel.switch_to(tab_chart)) self.add_widget(tab_panel)
slopeAODperDegree, intercAODdegree, alldeseasondata=Utils.GetDeseasonalizedData(period, aodvalues, allLats, allLongs, monthYears, meanAODperDegree, False, uprof=15) # plots.plot_regline(alldeseasondata[5][5], allLats[5], allLongs[5], slopeAODperDegree[5][5], intercAODdegree[5][5], # monthYears, period,period_names[pnum],AODrange_labels[AODcat],True, aodvalues) # plots.plot_data(np.flipud(meanAODperDegree), np.asarray(map(float,allLongs)),np.asarray(map(float, allLats_s)), # period_names[pnum],AODrange_labels[AODcat],"Mean",'jet',minv=0,maxv=0.10,folder="./graphsfix/") # plots.plot_data(np.flipud(slopeAODperDegree), np.asarray(map(float,allLongs)),np.asarray(map(float, allLats_s)), # period_names[pnum],AODrange_labels[AODcat],"Slope",'gnuplot',minv=-7,maxv=7,folder="./graphsfix/") plots.plot_regline(alldeseasondata[5][5], allLats[5], allLongs[5], slopeAODperDegree[5][5], intercAODdegree[5][5], monthYears, period,period_names[pnum],AODrange_labels[AODcat],True, aodvalues,folder="./graphsprof15/") plots.plot_data(np.flipud(meanAODperDegree), np.asarray(map(float,allLongs)),np.asarray(map(float, allLats_s)), period_names[pnum],AODrange_labels[AODcat],"Mean",'jet',folder="./graphsprof15/") plots.plot_data(np.flipud(slopeAODperDegree), np.asarray(map(float,allLongs)),np.asarray(map(float, allLats_s)), period_names[pnum],AODrange_labels[AODcat],"Slope",'gnuplot',folder="./graphsprof15/") plt.show() # plt.figure(2) # x, y, AodToDouble = Utils.aodPerMonthGraph(aodvalues) # plots.perMonth(x, y) # # plt.figure(3) # x, y = Utils.aodDeseasonalisation(aodvalues, AodToDouble) # plots.perMonth(x, y) # plt.show()
def create_plot(self): self.plotter = plot_data(self.fpga)
allLats[5], allLongs[5], slopeAODperDegree[5][5], intercAODdegree[5][5], monthYears, period, period_names[pnum], AODrange_labels[AODcat], True, aodvalues, folder="./graphsprof15/") plots.plot_data(np.flipud(meanAODperDegree), np.asarray(map(float, allLongs)), np.asarray(map(float, allLats_s)), period_names[pnum], AODrange_labels[AODcat], "Mean", 'jet', folder="./graphsprof15/") plots.plot_data(np.flipud(slopeAODperDegree), np.asarray(map(float, allLongs)), np.asarray(map(float, allLats_s)), period_names[pnum], AODrange_labels[AODcat], "Slope", 'gnuplot', folder="./graphsprof15/") plt.show() # plt.figure(2)