def dashboard(): data = None d = Read_Data() d = d.read() if(('user' in session and session['user'] == params['admin_user'])): return render_template('dashboard.html',data = d) elif('user' in session and session['user'] == params['username']): if(request.method == 'POST'): name = request.form.get('Name') email = request.form.get('Email') msg = request.form.get('Msg') data = {'Name':name,'Email':email,'Messages':msg} check_file = False try: df = pd.read_csv("Messages.csv",index_col=0) check_file = True except: df = pd.DataFrame(data,index = [0]) df.to_csv("Messages.csv") if(check_file): df = df.append(data,ignore_index = True) df.to_csv("Messages.csv") usr = params['username'] return render_template('user_dashboard.html',data = d,usr = usr) msg = None if(request.method == 'POST'): usrname = request.form.get('usrname') password = request.form.get('pass') if(usrname == params['admin_user'] and password == params['admin_pass']): session['user'] = usrname return render_template('dashboard.html',data = d) if(usrname == params['username'] and password == params['password']): session['user'] = usrname usr = usrname return render_template('user_dashboard.html',data = d,usr = usr) else: msg = 0 return render_template('Login.html',msg = msg) return render_template('Login.html',msg = msg)
def grid_search(nb_epoch=200): global NUM_CLASSES, input_shape file_path = "./gridSearch/fft_params/" nfft_try = [int(4096), int(2048), int(1024)] overlap_try = [0.9, 0.5, 0.75] brange_try = [8, 16] K.clear_session() # Grid Search for nfft_val in nfft_try: for overlap_val in overlap_try: for brange_val in brange_try: # Define file name to store results fname = file_path + "Exp_" + str(nfft_val) + "_" + str(overlap_val) + "_" + str(brange_val) # Read and format data - all gestures gd = Read_Data.GestureData(gest_set=1) print("Reading data") x, y, user, input_shape, lab_enc = gd.compile_data(nfft=nfft_val, overlap=overlap_val, brange=brange_val, keras_format=True, plot_spectogram=False, baseline_format=False) NUM_CLASSES = len(lab_enc.classes_) print("NFFT_Val: ", nfft_val, "Overlap_Val: ", overlap_val, "Brange_Val:", brange_val) print("Train the model") train_val_hist = loso_gridSearch_cv(x, y, user, lab_enc, batch_size=64, nb_epoch=nb_epoch, file_path=fname) plot_train_hist(train_val_hist, file_path=fname) K.clear_session()
def airware_baseline_data(): gd = Read_Data.GestureData(gest_set=1) x, y, user, lab_enc = gd.compile_data(nfft=4096, overlap=0.5, brange=16, keras_format=False, plot_spectogram=False, baseline_format=True) return x, y, user, lab_enc
def airware_data(gest_set=1): param_list = HyperParams() gd = Read_Data.GestureData(gest_set=gest_set) x, y, user, input_shape, lab_enc = gd.compile_data( nfft=param_list.NFFT_VAL, overlap=param_list.OVERLAP, brange=param_list.BRANGE, max_seconds=2.5, keras_format=True, plot_spectogram=False, baseline_format=False) num_classes = len(lab_enc.classes_) param_list.input_shape = input_shape param_list.num_classes = num_classes return x, y, user, lab_enc, param_list
def airware_data(): param_list = HyperParams() gd = Read_Data.GestureData(gest_set=1) x, y, user, input_shape, lab_enc = gd.compile_data( nfft=param_list.NFFT_VAL, overlap=param_list.OVERLAP, brange=param_list.BRANGE, max_seconds=2.5, keras_format=True, plot_spectogram=False, baseline_format=False) x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3, stratify=None, random_state=234) num_classes = len(lab_enc.classes_) param_list.input_shape = input_shape param_list.num_classes = num_classes return x_train, x_test, y_train, y_test, param_list
torch.cuda.manual_seed_all(seed) torch.backends.cudnn.enabled = False torch.backends.cudnn.benchmark = False torch.backends.cudnn.deterministic = True seed_index = seed_index + 1 print('Reading Data. with seeds: ' + str(seed)) data = Read_Data(column_names=args, data_name=args.data_name, data_file=args.data_file, train_ratio=args.training_ratio, test_val_ratio=args.test_val_ratio, med_flag=args.medical_data, weight_flag=args.weight_class, seed=seed, upos=args.upos, umed=args.umed, uad=args.uad, qk=args.question_knowledge, cluster_size=args.cluster_number, wordnet=args.wordnet, class_number=args.class_number) print('Building Embedding Matrix') embedding = Create_Embedding( file_path=args.embd_file, embd_file_mimic=args.embd_file_mimic, word2index=data.word2index, custom=args.embedding_flag,