def split_data(classes,k,i,the_names,path,spex,batch_size=1,class_on_char=0): np.random.shuffle(the_names) class_names = [] class_names_val = [] for c in classes: names = [idx for idx in the_names if idx[class_on_char].lower() == c.lower()] list_names = np.array_split(names,k) class_names.extend(np.hstack(np.delete(list_names, i, 0)).transpose()) class_names_val.extend(list_names[i]) np.random.shuffle(class_names) np.random.shuffle(class_names_val) genVal = signalLoader(class_names_val,path,spex,batch_size=batch_size,class_on_char=class_on_char) gen = signalLoader(class_names,path,spex,batch_size=batch_size,class_on_char=class_on_char) return (gen,genVal,len(class_names),len(class_names_val),class_names_val)
if name[0] == 'B': labels.append([0, 1, 0]) if name[0] == 'C': labels.append([0, 0, 1]) k_folds = 8 i = 0 length = len(labels) indices = range(0, length) list_names = np.array_split(names, k_folds) val_list_names = list_names[i] list_names = np.hstack(np.delete(list_names, i, 0)).transpose() list_labels = np.array_split(labels, k_folds) val_list_labels = list_labels[i] list_labels = np.vstack(np.delete(list_labels, i, 0)) inputVal = signalLoader(nchan, val_list_names, val_list_labels, path()) input = signalLoader(nchan, list_names, list_labels, path()) img_size = (L, nchan) last_conv_layer_name = "pooling" classifier_layer_names = ["flatten", "dense"] # Make model model = define_model(nchan, L, Fs) model.load_weights( "C:/Users/Oskar/Documents/GitHub/Exjobb/logs/model_check_points/20210204-103301/fold1\cp-0005.ckpt" ) date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") checkpoint_path_fold = checkpoint_path + "/gradCAM" + date + "/cp-{epoch:04d}.ckpt" cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path_fold, save_weights_only=True,
VAL_ACC = [] VAL_LOSS = [] k_folds = 5 for i in range(0, k_folds): print("Fold number " + str(i + 1) + "!") length = len(labels) indices = range(0, length) list_names = np.array_split(names, k_folds) val_list_names = list_names[i] list_names = np.hstack(np.delete(list_names, i, 0)).transpose() list_labels = np.array_split(labels, k_folds) val_list_labels = list_labels[i] list_labels = np.hstack(np.delete(list_labels, i, 0)).transpose() data_generatorVal = signalLoader(nchan, val_list_names, val_list_labels) data_generator = signalLoader(nchan, list_names, list_labels) data_generatorPred = signalLoader(nchan, prednames, predlabels) #testplot = np.loadtxt(path() + "Asimulated_test_1",delimiter=',') #Modell enligt struktur i Zhao19 model = tensorflow.keras.Sequential() model.add(layers.InputLayer((L, nchan), batch_size=1)) #TF-lager model.add(MorletConv([L, nchan], T)) #Spatial faltning? model.add( layers.Conv2D(filters=25, kernel_size=[1, nchan], activation='elu') ) #kernel_size specificerar spatial faltning enligt Zhao19 #Resten av nätverket model.add(layers.Permute((3, 1, 2)))
labels.append([0,1]) k_folds= 5 date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") for i in range(0,k_folds-4): print("Fold number " + str(i+1) + "!") length = len(labels) indices = range(0,length) list_names = np.array_split(names,k_folds) val_list_names = list_names[i] list_names = np.hstack(np.delete(list_names, i, 0)).transpose() list_labels = np.array_split(labels,k_folds) val_list_labels = list_labels[i] list_labels = np.vstack(np.delete(list_labels,i,0)) data_generatorVal = signalLoader(nchan,val_list_names,val_list_labels,path(),data_aug=False,doDownsampling=False) data_generator = signalLoader(nchan,list_names,list_labels,path(),data_aug=True,doDownsampling=False) print(list_names) print(val_list_names) tensorboard_callback = load_tensorboard(who,date,i) model = define_model(nchan,L,Fs) #es = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=2) history = model.fit( data_generator, validation_data=(data_generatorVal), steps_per_epoch=len(list_labels), validation_steps=len(val_list_labels), epochs=50, callbacks=[tensorboard_callback]), model.summary()
if name[0] == 'C': labels.append([0, 0, 1]) date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") k_folds = 7 for i in range(0, k_folds): print("Fold number " + str(i + 1) + "!") length = len(labels) indices = range(0, length) list_names = np.array_split(names, k_folds) val_list_names = list_names[i] list_names = np.hstack(np.delete(list_names, i, 0)).transpose() list_labels = np.array_split(labels, k_folds) val_list_labels = list_labels[i] list_labels = np.vstack(np.delete(list_labels, i, 0)) data_generatorVal = signalLoader(nchan, val_list_names, val_list_labels, path()) data_generator = signalLoader(nchan, list_names, list_labels, path()) tensorboard_callback = load_tensorboard(who, date, i) model = define_model_R(nchan, L, Fs) #es = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=2) history = model.fit(data_generator, validation_data=(data_generatorVal), steps_per_epoch=len(list_labels), validation_steps=len(val_list_labels), epochs=epochs, callbacks=[tensorboard_callback]), model.summary()
print("Fold number " + str(i + 1) + "!") length = len(labels) indices = range(0, length) list_names = np.array_split(names, k_folds) val_list_names = list_names[i] list_names = np.hstack(np.delete(list_names, i, 0)).transpose() list_labels = np.array_split(labels, k_folds) val_list_labels = list_labels[i] list_labels = np.vstack(np.delete(list_labels, i, 0)) # if data_aug: # val_list_labels = np.repeat(val_list_labels,2,axis=0) # val_list_names = np.repeat(val_list_names,2) # list_labels = np.repeat(list_labels,2,axis=0) # list_names = np.repeat(list_names,2) data_generatorTest = signalLoader(nchan, val_list_names, val_list_labels, pathPred()) data_generator = signalLoader(nchan, list_names, list_labels, path(), data_aug=data_aug) tensorboard_callback = load_tensorboard(who, date, i) #es = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=2) checkpoint_path_fold = checkpoint_path + date + "/fold" + str( i + 1) + "/cp-{epoch:04d}.ckpt" check_point_dir = os.path.dirname(checkpoint_path_fold) cp_callback = tensorflow.keras.callbacks.ModelCheckpoint( filepath=checkpoint_path_fold, save_weights_only=True, verbose=1) model = define_model(nchan, L, Fs)