# Main iterative loop. for zt in z_iter: zt = zt.reshape(zt.shape[0],1) # Convert to a column Vector if Frahst_alg.st['anomaly'] == True: Frahst_alg.st['anomaly'] = False # reset anomaly var '''Frahst Version ''' Frahst_alg.run(zt) # Calculate reconstructed data if needed st = Frahst_alg.st Frahst_alg.st['recon'] = np.dot(st['Q'][:,:st['r']],st['ht'][:st['r']]) '''Anomaly Detection method''' Frahst_alg.detect_anom(zt) '''Rank adaptation method''' Frahst_alg.rank_adjust(zt) '''Store data''' #tracked_values = ['ht','e_ratio','r','recon', 'pred_err', 'pred_err_norm', 'pred_err_ave', 't_stat', 'pred_dsn', 'pred_zt'] #tracked_values = ['ht','e_ratio','r','recon','recon_err', 'recon_err_norm', 't_stat', 'rec_dsn', 'x_sample'] #tracked_values = ['ht','e_ratio','r','recon', 'h_res', 'h_res_aa', 'h_res_norm'] #Frahst_alg.track_var(tracked_values) Frahst_alg.track_var(['ht', 'r', 'e_ratio']) #Frahst_alg.track_var() anomalies_list.append(Frahst_alg.res['anomalies'][:]) data_list.append(D)
z_iter = iter(data) numStreams = data.shape[1] # Initialise Algorithm F = FRAHST('F-7.A-recS.R-static.S-none', p, numStreams) # Start time Profiling start = time.time() '''Begin Frahst''' # Main iterative loop. for zt in z_iter: zt = zt.reshape(zt.shape[0], 1) # Convert to a column Vector if np.any(F.st['anomaly']): F.st['anomaly'][:] = False # reset anomaly var '''Frahst Version ''' F.run(zt) '''Anomaly Detection method''' F.detect_anom(zt) '''Rank adaptation method''' F.rank_adjust(zt) '''Store Values''' F.track_var() # End of a single Frahst run time_sample_list[i] = time.time() - start # End of all initial conditions for N streams time_results[k] = time_sample_list.mean()
'''Begin Frahst''' # Main iterative loop. for zt in z_iter: zt = zt.reshape(zt.shape[0], 1) # Convert to a column Vector if Frahst_alg.st['anomaly'] == True: Frahst_alg.st['anomaly'] = False # reset anomaly var '''Frahst Version ''' Frahst_alg.run(zt) # Calculate reconstructed data if needed st = Frahst_alg.st Frahst_alg.st['recon'] = np.dot(st['Q'][:, :st['r']], st['ht'][:st['r']]) '''Anomaly Detection method''' Frahst_alg.detect_anom(zt) '''Rank adaptation method''' Frahst_alg.rank_adjust(zt) '''Store data''' #tracked_values = ['ht','e_ratio','r','recon', 'pred_err', 'pred_err_norm', 'pred_err_ave', 't_stat', 'pred_dsn', 'pred_zt'] #tracked_values = ['ht','e_ratio','r','recon','recon_err', 'recon_err_norm', 't_stat', 'rec_dsn', 'x_sample'] #tracked_values = ['ht','e_ratio','r','recon', 'h_res', 'h_res_aa', 'h_res_norm'] #Frahst_alg.track_var(tracked_values) Frahst_alg.track_var(['ht', 'r', 'e_ratio']) #Frahst_alg.track_var() anomalies_list.append(Frahst_alg.res['anomalies'][:]) data_list.append(D) ''' Plot Results ''' #Frahst_alg.plot_res([data, 'ht', 't_stat'])
# Start time Profiling start = time.time() '''Begin Frahst''' # Main iterative loop. for zt in z_iter: zt = zt.reshape(zt.shape[0],1) # Convert to a column Vector if np.any(F.st['anomaly']): F.st['anomaly'][:] = False # reset anomaly var '''Frahst Version ''' F.run(zt) '''Anomaly Detection method''' F.detect_anom(zt) '''Rank adaptation method''' F.rank_adjust(zt) '''Store Values''' F.track_var() # End of a single Frahst run time_sample_list[i] = time.time() - start # End of all initial conditions for N streams time_results[k] = time_sample_list.mean()