def process_data(workflow): data = BalsamJob.objects.filter(workflow=workflow).values_list( 'data__reward', flat=True) print(f'data len: {len(data)}') raw_rewards = list(filter(lambda e: e != None, rm_none(data))) if len(raw_rewards) == 0: print(f'no rewards for : {workflow}') return -1 plot([i for i in range(len(raw_rewards))], raw_rewards) max_rewards = max_list(raw_rewards) plot([i for i in range(len(max_rewards))], max_rewards) data = BalsamJob.objects.filter(workflow=workflow).values_list( 'data__arch_seq', flat=True) arch_seq = rm_none(data) data = BalsamJob.objects.filter(workflow=workflow).values_list( 'data__id_worker', flat=True) w = rm_none(data) filename = f'wf-{workflow}_{now}' print(f'filename: {filename}') with open('data/' + filename + '.json', "w") as f: data = dict(fig=filename, raw_rewards=raw_rewards, max_rewards=max_rewards, arch_seq=arch_seq, id_worker=w) json.dump(data, f) return 0
def process_device_message(msg): global device_readings, count temperature = msg[0]["temperature"] device_readings.append((count, temperature)) count += 1 h, w = get_terminal_size() x, y = zip(*device_readings) plot(x, y, h + 1, w)
def generateData(count=1, databasename="traindata-new.npy"): global PID #, Simulator sim = Simulator("sim", PID) sim.setpoint(spdata) pop = Evolution.Population('colony', 50, 25) pop.setFitnessCalculator(sim.generateAndScore) pop.setResultRange(0, 30) flashcards = Database(databasename) counter = 0 while counter < count: print() print("Training Data #", counter, bcolors.OKBLUE) start = time.time() # Create Real-World Scenario and data with Basic Profile sim.randomize() randomPID = sim.randomPID() #print(randomPID) pop.Initialize(randomPID, 6) pop.Evolve(1, "Scenario") #make the input seem reasonable scenarioPID = pop.getTopScore() OP, PV = sim.generate(scenarioPID) opdata = tools.datariser(OP) if debug == True: plot(opdata.getx(), opdata.gety()) #print("Area: ",tools.areaBetween(spdata,opdata.getdata())) # Find answer to scenario pop.Initialize(list(scenarioPID), 6) pop.Evolve(60, "Solution") correctPID = pop.getTopScore() p, i, d = correctPID i = i / 2 OP, PV = sim.generate((p, i, d)) #sim.generate(pop.getTopScore()) newopdata = tools.datariser(OP) newpvdata = tools.datariser(PV) #print(opdata.getx()) if debug == True: plot(newopdata.getx(), newopdata.gety()) if debug == True: sim.show() if debug == True: tools.saveoutput( [[newopdata.getx(), newopdata.gety()], [t, sp], [newpvdata.getx(), newpvdata.gety()]], "lastGenerated") flashcards.add([scenarioPID, opdata.gety()], correctPID) counter = counter + 1 end = time.time() time_taken = end - start #print('Time: ', ) print( bcolors.ENDC, " Time:", round(time_taken), "Final Score:", round(-tools.weightedAreaBetween(spdata, newopdata.getdata()), 1)) flashcards.finish()
def print_mood_graph(dates_and_moods): """Print line graph of mood in terminal""" x = range(len(dates_and_moods)) y = [int(i[1]) for i in dates_and_moods] display_rows = 10 display_columns = (TERMINAL_WIDTH if TERMINAL_WIDTH < len(dates_and_moods) else len(dates_and_moods) * 2) plot(x, y, rows=display_rows, columns=display_columns) print()
def test_plot(self): with capture_sys_output() as stdout: terminalplot.plot(x=[1, 2, 3], y=[1, 2, 3], rows=7, columns=3) expected_output = (" *\n" " * \n" "* \n" "\nMin x: 1 Max x: 3 Min y: 1 Max y: 3\n") self.assertEqual(stdout.getvalue(), expected_output)
def test_plot(self): try: out = StringIO() sys.stdout = out terminalplot.plot( x=[1,2,3], y=[1,2,3], rows=7, columns=3 ) output = out.getvalue() expected_output = ( " *\n" " * \n" "* \n" "\nMin x: 1 Max x: 3 Min y: 1 Max y: 3\n" ) self.assertEqual(output, expected_output) finally: sys.stdout = sys.__stdout__
def test_plot(self): try: out = StringIO() sys.stdout = out terminalplot.plot(x=[1, 2, 3], y=[1, 2, 3], rows=7, columns=3) output = out.getvalue() expected_output = (" *\n" " * \n" "* \n" "\nMin x: 1 Max x: 3 Min y: 1 Max y: 3\n") self.assertEqual(output, expected_output) finally: sys.stdout = sys.__stdout__
def MP_L2VR_GNMF(Ntry,lcall,Win,A0,X0,lamA,lamX,epsilon,rho=0.1, off=0): check_nonnegative(lcall,"LC") check_nonnegative(A0,"A") check_nonnegative(X0,"X") Nk=np.shape(A0)[1] Y=cp.asarray(lcall) W=cp.asarray(Win) A=cp.asarray(A0) X=cp.asarray(X0) logmetric=[] jj=0 for i in range(0,Ntry): ATA = cp.dot(A.T,A) XTX = cp.dot(X.T,X) XXT = cp.dot(X,X.T) #---------------------------------------------------- if np.mod(i,1000)==0: jj=jj+1 AA=np.sum(A*A) detXXT=cp.asnumpy(cp.linalg.det(XXT)) chi2=cp.sum((Y - cp.dot(cp.dot(W,A),X))**2) metric=[i+off,cp.asnumpy(chi2+lamA*AA+lamX*detXXT),cp.asnumpy(chi2),lamA*AA,lamX*detXXT] logmetric.append(metric) # print(metric,np.sum(A),np.sum(X)) import terminalplot Xn=cp.asnumpy(X) bandl=np.array(range(0,len(Xn[0,:]))) print(metric) terminalplot.plot(list(bandl),list(Xn[np.mod(jj,Nk),:])) if np.mod(i,10000)==0: LogNMF(i+off,cp.asnumpy(A),cp.asnumpy(X),Nk) #---------------------------------------------------- detXXT=cp.linalg.det(XXT) WA=cp.dot(W,A) Wt = cp.dot(cp.dot(WA.T,Y),XTX) + epsilon Wb = cp.dot(cp.dot(cp.dot(WA.T,WA),X),XTX)+ lamX*detXXT*X + epsilon #SGD Wt = cp.dot(cp.dot(W.T,Y),X.T)+ epsilon Wb = (cp.dot(cp.dot(cp.dot(W.T,WA),X),X.T)) + lamA*A + epsilon A = A*(Wt/Wb) A = cp.dot(cp.diag(1/cp.sum(A[:,:],axis=1)),A) A=cp.asnumpy(A) X=cp.asnumpy(X) #---------------------------------------------------- LogMetricPlot(logmetric) #---------------------------------------------------- return A, X, logmetric
def get_data(stream_url, count, is_graph): """Returns a list of data for a stream.""" data = requests.get(stream_url + '/Data' + '?count=' + str(count) + '&Startindex=1970-01-01T00:00:00Z', headers=headers) Values = [] for event in data.json(): Values.append(event['Value']) if is_graph: terminalplot.plot(range(len(Values)), Values) else: print(Values) input("Press any key to continue...") return
def main(): parser = make_parser() args = parser.parse_args() if not args.terminal_size and not args.y: parser.error('Not enough arguments provided.') if args.y: y = args.y x = args.x or range(len(y)) plot(x, y) if args.terminal_size: print("Rows: {}, Columns: {}".format(*get_terminal_size()))
def main(): args = vars(parser.parse_args()) if not any(args.values()): parser.error('No arguments provided.') if args['y']: y = args['y'] x = args['x'] if args['x'] else range(len(y)) plot(x, y) if args['terminal_size']: rc = get_terminal_size() print(''.join(['Rows: ', str(rc[0]), ' Columns: ', str(rc[1])]))
def generate_graph(arrays, graph): #get x, y values to draw a graph x_array = arrays[0] y_array = arrays[1] #determine if show graph or not if graph == "graphic": #generates new graphic window with matplotlib plt.scatter(x_array, y_array, color="green", marker="1", s=30) plt.xlabel('x_axis') plt.ylabel('y_axis') plt.title('plot') plt.show() elif graph == "terminal": #draw graph in terminal tplt.plot(x_array, y_array) elif graph == None: pass
f.close() step = np.array(step, dtype=np.int) return step, ctime if __name__ == '__main__': # usage: python read_step.py log import sys import terminalplot import numpy as np step, ctime = count_steps(sys.argv[1]) if True: print('# STEP ############################################') terminalplot.plot(list(range(len(step))), list(step)) print('# TIME (sec) ######################################') terminalplot.plot(list(range(len(ctime))), list(ctime)) # print("# log STEP x log TIME ######################################") # terminalplot.plot(list(np.log10(step)),list(np.log10(ctime))) print('# TIME/STEP (sec) ############################################') terminalplot.plot(list(range(len(step))), list(ctime / step)) print('# summary ######################################') print('Total steps:', np.sum(step)) print('# of chain:', len(step)) print('median/chain:', np.median(step), '|min-max', np.min(step), '-', np.max(step)) print('total time=', round(np.sum(ctime) / 3600, 3), 'h / ', round(np.sum(ctime) / 60, 1), 'min')
def main(): while True: y.append(psutil.cpu_percent()) terminalplot.plot(x, y) time.sleep(1) os.system('cls' if os.name == 'nt' else 'clear')
def QP_GNMF(reg,Ntry,lcall,Win,A0,X0,lamA,lamX,epsilon,filename,NtryAPGX=10,NtryAPGA=1000,eta=0.0, delta=1.e-6, off=0, nu=1.0,Lipx="norm2",Lipa="frobenius", endc=1.e-5,Nsave=10000,semiNMF=False): import scipy check_nonnegative(lcall,"LC") check_nonnegative(A0,"A") check_nonnegative(X0,"X") Ni=np.shape(lcall)[0] Nl=np.shape(lcall)[1] Nk=np.shape(A0)[1] Nj=np.shape(A0)[0] if reg=="L2-VRDet": res=np.sum((lcall-Win@A0@X0)**2)+lamA*np.sum(A0**2)+lamX*np.linalg.det(np.dot(X0,X0.T)) elif reg=="L2-VRLD": res=np.sum((lcall-Win@A0@X0)**2)+lamA*np.sum(A0**2)+lamX*np.log(np.linalg.det(np.dot(X0,X0.T)+delta*np.eye(Nk))) elif reg=="Dual-L2": res=np.sum((lcall-Win@A0@X0)**2)+lamA*np.sum(A0**2)+lamX*np.sum(X0**2) elif reg=="L2": res=np.sum((lcall-Win@A0@X0)**2)+lamA*np.sum(A0**2) else: print("No mode. Halt.") sys.exit() print("Ini residual=",res) Y=cp.asarray(lcall) W=cp.asarray(Win) A=cp.asarray(A0) X=cp.asarray(X0) WTW=cp.dot(W.T,W) jj=off resall=[] for i in range(0,Ntry): print(i) ## xk for k in range(0,Nk): AX=cp.dot(A,X) - cp.dot(A[:,k:k+1],X[k:k+1,:]) Delta=Y-cp.dot(W,AX) ak=A[:,k] Wa=cp.dot(W,ak) W_x=cp.dot(Wa,Wa)*cp.eye(Nl) bx=cp.dot(cp.dot(Delta.T,W),ak) if reg=="L2-VRDet": Xn=cp.asnumpy(X) Xminus = np.delete(Xn,obj=k,axis=0) XXTinverse=np.linalg.inv(np.dot(Xminus,Xminus.T)) Kn=np.eye(Nl) - np.dot(np.dot(Xminus.T,XXTinverse),Xminus) Kn=Kn*np.linalg.det(np.dot(Xminus,Xminus.T))*lamX D_x=cp.asarray(Kn) X[k,:]=apg.APGr(Nl,W_x + D_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) elif reg=="L2-VRLD": E_x=lamX*nu*cp.eye(Nl) X[k,:]=apg.APGr(Nl,W_x + E_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) elif reg=="Dual-L2": T_x=lamX*cp.eye(Nl) X[k,:]=apg.APGr(Nl,W_x + T_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) elif reg=="L2": X[k,:]=apg.APGr(Nl,W_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) ## X normalization #X[k,:]=X[k,:]/cp.sum(X[k,:]) ## ak xk=X[k,:] W_a=(cp.dot(xk,xk))*(cp.dot(W.T,W)) b=cp.dot(cp.dot(W.T,Delta),xk) T_a=lamA*cp.eye(Nj) if semiNMF: A[:,k]=apg.AGr(Nj,W_a+T_a,b,A[:,k],Ntry=NtryAPGA, eta=eta, Lip=Lipa) else: A[:,k]=apg.APGr(Nj,W_a+T_a,b,A[:,k],Ntry=NtryAPGA, eta=eta, Lip=Lipa) ## A normalization #A[:,k]=A[:,k]/cp.sum(A[:,k])*Nj ## A normalization #for k in range(0,Nk): # sumk=cp.sum(A[:,:],axis=1) # sumav=cp.mean(sumk) # A[:,k]=A[:,k]/sumk*sumav Like=cp.asnumpy(cp.sum((Y-cp.dot(cp.dot(W,A),X))**2)) ### BUG FIXED 2020/12/22 # RA=cp.asnumpy(lamA*cp.sum(A0**2)) RA=cp.asnumpy(lamA*cp.sum(A**2)) ### if reg=="L2-VRDet": RX=cp.asnumpy(lamX*cp.linalg.det(cp.dot(X,X.T))) elif reg=="L2-VRLD": eig=np.linalg.eigvals(cp.asnumpy(cp.dot(X,X.T) + delta*cp.eye(Nk))) nu=1.0/np.min(np.abs(eig)) print("nu=",nu) RX=cp.asnumpy(lamX*cp.log(cp.linalg.det(cp.dot(X,X.T)+delta*cp.eye(Nk)))) elif reg=="Dual-L2": RX=cp.asnumpy(lamX*cp.sum(X**2)) elif reg=="L2": RX=0.0 resprev=res res=Like+RA+RX diff=resprev - res resall.append([res,Like,RA,RX]) print("Residual=",res,Like,RA,RX) print("Xave",cp.mean(X)) print("Aave",cp.mean(A)) #LogNMF(i,A,X,Nk) if np.mod(jj,10)==0: bandl=np.array(range(0,len(X[0,:]))) import terminalplot terminalplot.plot(list(bandl),list(cp.asnumpy(X[np.mod(jj,Nk),:]))) jj=jj+1 if np.mod(jj,Nsave) == 0: np.savez(filename+"j"+str(jj),cp.asnumpy(A),cp.asnumpy(X),resall) if diff < endc: np.savez(filename+"Ej"+str(jj),cp.asnumpy(A),cp.asnumpy(X),resall) return cp.asnumpy(A),cp.asnumpy(X), resall return cp.asnumpy(A),cp.asnumpy(X), resall
def GPQP_GNMF(reg,Ntry,lcall,Win,A0,X0,KSin,lamX,epsilon,filename,NtryAPGX=10,NtryAPGA=1000,eta=0.0, delta=1.e-6, off=0, nu=1.0,Lipx="norm2",Lipa="frobenius", endc=1.e-5,Nsave=10000,semiNMF=False): """ Summary -------------- GPQP_GNMF: Gaussian Process and Quadratic Programing for Geometric Non-negative Matrix Factorization """ import scipy check_nonnegative(lcall,"LC") check_nonnegative(A0,"A") check_nonnegative(X0,"X") Ni=np.shape(lcall)[0] Nl=np.shape(lcall)[1] Nk=np.shape(A0)[1] Nj=np.shape(A0)[0] invKS0=np.linalg.inv(KSin) if reg=="GP-VRDet": res=np.sum((lcall-Win@A0@X0)**2)+np.trace(A0.T@invKS0@A0)+lamX*np.linalg.det(np.dot(X0,X0.T)) elif reg=="GP-VRLD": res=np.sum((lcall-Win@A0@X0)**2)+np.trace(A0.T@invKS0@A0)+lamX*np.log(np.linalg.det(np.dot(X0,X0.T)+delta*np.eye(Nk))) elif reg=="GP-L2": res=np.sum((lcall-Win@A0@X0)**2)+np.trace(A0.T@invKS0@A0)+lamX*np.sum(X0**2) elif reg=="GP": res=np.sum((lcall-Win@A0@X0)**2)+np.trace(A0.T@invKS0@A0) else: print("No mode. Halt.") sys.exit() print("Ini residual=",res) Y=cp.asarray(lcall) W=cp.asarray(Win) A=cp.asarray(A0) X=cp.asarray(X0) WTW=cp.dot(W.T,W) jj=off resall=[] Kw=cp.asarray(Win@[email protected]) KS=cp.asarray(KSin) invKS=np.linalg.inv(KS) for i in range(0,Ntry): print(i) ## xk for k in range(0,Nk): AX=cp.dot(A,X) - cp.dot(A[:,k:k+1],X[k:k+1,:]) Delta=Y-cp.dot(W,AX) ak=A[:,k] Wa=cp.dot(W,ak) W_x=cp.dot(Wa,Wa)*cp.eye(Nl) bx=cp.dot(cp.dot(Delta.T,W),ak) if reg=="GP-VRDet": Xn=cp.asnumpy(X) Xminus = np.delete(Xn,obj=k,axis=0) XXTinverse=np.linalg.inv(np.dot(Xminus,Xminus.T)) Kn=np.eye(Nl) - np.dot(np.dot(Xminus.T,XXTinverse),Xminus) Kn=Kn*np.linalg.det(np.dot(Xminus,Xminus.T))*lamX D_x=cp.asarray(Kn) X[k,:]=apg.APGr(Nl,W_x + D_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) elif reg=="GP-VRLD": E_x=lamX*nu*cp.eye(Nl) X[k,:]=apg.APGr(Nl,W_x + E_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) elif reg=="GP-L2": T_x=lamX*cp.eye(Nl) X[k,:]=apg.APGr(Nl,W_x + T_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) elif reg=="GP": X[k,:]=apg.APGr(Nl,W_x,bx,X[k,:],Ntry=NtryAPGX, eta=eta, Lip=Lipx) ## X normalization #X[k,:]=X[k,:]/cp.sum(X[k,:]) ## ak xk=X[k,:] ck=cp.sum(xk**2) lk=cp.sum(Delta*xk,axis=1)/ck IKw=cp.eye(Ni)+ck*Kw Xlc=cp.linalg.solve(IKw,lk) atmp= ck*cp.dot(cp.dot(KS,W.T),Xlc) # atmp[atmp<0.0]=0.0 # A[:,k]=atmp Like=cp.asnumpy(cp.sum((Y-cp.dot(cp.dot(W,A),X))**2)) RA=cp.asnumpy(cp.trace(A.T@invKS@A)) if reg=="GP-VRDet": RX=cp.asnumpy(lamX*cp.linalg.det(cp.dot(X,X.T))) elif reg=="GP-VRLD": eig=np.linalg.eigvals(cp.asnumpy(cp.dot(X,X.T) + delta*cp.eye(Nk))) nu=1.0/np.min(np.abs(eig)) print("nu=",nu) RX=cp.asnumpy(lamX*cp.log(cp.linalg.det(cp.dot(X,X.T)+delta*cp.eye(Nk)))) elif reg=="GP-L2": RX=cp.asnumpy(lamX*cp.sum(X**2)) elif reg=="GP": RX=0.0 resprev=res res=Like+RA+RX diff=resprev - res resall.append([res,Like,RA,RX]) print("Residual=",res,Like,RA,RX) print("Xave",cp.mean(X)) print("Aave",cp.mean(A)) #LogNMF(i,A,X,Nk) if np.mod(jj,10)==0: bandl=np.array(range(0,len(X[0,:]))) import terminalplot terminalplot.plot(list(bandl),list(cp.asnumpy(X[np.mod(jj,Nk),:]))) jj=jj+1 if np.mod(jj,Nsave) == 0: np.savez(filename+"j"+str(jj),cp.asnumpy(A),cp.asnumpy(X),resall) if diff < endc: np.savez(filename+"Ej"+str(jj),cp.asnumpy(A),cp.asnumpy(X),resall) return cp.asnumpy(A),cp.asnumpy(X), resall return cp.asnumpy(A),cp.asnumpy(X), resall
def plot_data(s, e, path="../data/test.txt"): data = pd.read_csv(path, sep="\n", header=None)[0].values[s:e] tp.plot(list(data), [x for x in range(e - s)])
""" Plots a ``.csv`` file to terminal. """ import os import platform import numpy as np import pandas as pd from terminalplot import plot assert platform.system() == 'Linux' assert os.path.isdir("series/") print( "The following files are available to print (by entering `series/<name>.csv`):" ) os.system("ls series/") PATH = input("Enter the path to a saved curve as a csv: ") RAW_SERIES = pd.read_csv(PATH) SERIES = np.array(RAW_SERIES) y = list(SERIES[:, 0]) # Assumes 2-dimensional data. x = [i for i in range(SERIES.shape[0])] plot(x, y) print("Series shape:", SERIES.shape)
def train_text(dataset_id, algorithm, problem='bc', n_iter=10): X, y = load_data.load_text(dataset_id) logger.debug('dataset loaded') performance = {'accuracy': [], 'log_loss': [], 'auroc': []} cm = [] if len(models.models[algorithm]) == 2: preprocessor = models.models[algorithm][0] else: preprocessor = Pipeline([ ('vectorizer', models.models[algorithm][0]), ('feature_selector', models.models[algorithm][1]), ]) X = preprocessor.fit_transform(X, y) num_features = X.shape[1] if type(preprocessor) == Pipeline: vocab = pd.Series(preprocessor['vectorizer'].get_feature_names())[ preprocessor['feature_selector'].get_support()].values else: vocab = preprocessor.get_feature_names() model_dispatcher.save_preprocessing( preprocessor, num_features, vocab, algorithm, dataset_id, ) logger.debug('data preprocessed') model = models.models[algorithm][-1] if problem == 'ml': kf = KFold(n_splits=10, shuffle=True, random_state=19) else: kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=19) logger.debug('training started') iter_count = 0 for train_indices, test_indices in kf.split(X, y): X_train = X[train_indices] y_train = y[train_indices] X_test = X[test_indices] y_test = y[test_indices] model = model.fit(X_train, y_train) y_pred = model.predict(X_test) y_pred_ = model.predict_proba(X_test) try: performance['log_loss'].append(log_loss(y_test, y_pred_)) if problem in ['mc', 'ml']: performance['auroc'].append( roc_auc_score(y_test, y_pred_, multi_class='ovr')) else: performance['auroc'].append( roc_auc_score(y_test, y_pred_[:, 1])) except: pass performance['accuracy'].append([ accuracy_score(y_test, (y_pred_ >= thresh).astype(bool)) for thresh in thresholds ]) if problem == 'ml': _ = multilabel_confusion_matrix(y_test, y_pred) else: _ = confusion_matrix(y_test, y_pred) cm.append(_) try: logger.debug(f'AUROC {performance["auroc"][-1]}') except: pass logger.debug( f'Accuracy {performance["accuracy"][-1][50]}, threshold {50}') logger.debug( f'Max Accuracy {np.max(performance["accuracy"][-1])}, threshold {np.argmax(performance["accuracy"][-1])}' ) if type(model) in [GridSearchCV, RandomizedSearchCV]: logger.debug(model.best_params_) model_dispatcher.save_model(model.best_estimator_, algorithm, dataset_id) break iter_count += 1 logger.debug(f'{iter_count} iterations done') if iter_count == n_iter: model_dispatcher.save_model(model, algorithm, dataset_id) break model_dispatcher.save_model(model, algorithm, dataset_id) cm = np.mean(cm, axis=0) if problem == 'ml': cm = [normalize(matrix, norm='l1') * 100 for matrix in cm] model_dispatcher.save_results_classification(classification_report( y_test, y_pred), cm, performance, algorithm, dataset_id, problem=problem) # print((np.mean(performance['accuracy'], axis=0)*100).shape) logger.debug( f"Accuracy: {np.mean(performance['accuracy'], axis=0)[50]}, threshold: {50}" ) logger.debug( f"Max Accuracy: {np.max(np.mean(performance['accuracy'], axis=0))}, threshold: {np.argmax(np.mean(performance['accuracy'], axis=0))}" ) tplot.plot(thresholds.tolist(), (np.mean(performance['accuracy'], axis=0) * 100).tolist()) logger.debug('model dispatched')
flows_info = collections.defaultdict(list) group_size = bound // gap pl_total = sum([i**exponent for i in range(1, group_size + 1)]) group = [ i**exponent * flow_size // pl_total if i**exponent * flow_size // pl_total > 0 else 1 for i in range(1, group_size + 1) ] group_size_range = [[gap * i, gap * (i + 1)] for i in range(group_size)] flow_size = sum(group) base_number = 0 if plot: print("***********************************") print("the distribution of flow as size of flow increase:") terminalplot.plot(range(group_size), group) while (group): sIP = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) dIP = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) protocal = ['udp', 'tcp'][random.randint(0, 1)] key = (sIP, dIP, protocal) flows_info[key] = [ random.randint(group_size_range[-1][0] + 1, group_size_range[-1][1]), 0, 0 ] base_number += flows_info[key][0] group[-1] -= 1 if group[-1] == 0: group.pop() group_size_range.pop() print("***********************************")
def print_ev_history(self): ev_history = self.mc.ev_history[self.engine.s] if not ev_history: return plot(range(len(ev_history)), ev_history, columns=50, rows=10)
import terminalplot as tpl if __name__ == '__main__': while True: with open('./Contoller/logData.csv', 'r') as fin: dataLines = fin.readlines()[-20:] x = [dataLines.index(element) for element in dataLines] y = [float(x.split(',')[0]) for x in dataLines] tpl.plot(x, y)