def __getitem__(self, index): 'Generate one batch of data' # Generate indexes of the batch indexes = self.indexes[index * self.batch_size:(index + 1) * self.batch_size] # Loading Paths & Labels if self.label_type == 'attr': _paths, _labels_act, _labels_dom, _labels_val = getPaths( self.label_type, self.split_set, self.num_class) # Find Batch list of Loading Paths list_paths_temp = [_paths[k] for k in indexes] list_act_temp = [_labels_act[k] for k in indexes] list_dom_temp = [_labels_dom[k] for k in indexes] list_val_temp = [_labels_val[k] for k in indexes] # Generate data data, label = self.__data_generation_attr(list_paths_temp, list_act_temp, list_dom_temp, list_val_temp) elif self.label_type == 'class': _paths, _labels_class = getPaths(self.label_type, self.split_set, self.num_class) # Find Batch list of Loading Paths list_paths_temp = [_paths[k] for k in indexes] list_class_temp = [_labels_class[k] for k in indexes] # Generate data data, label = self.__data_generation_class(list_paths_temp, list_class_temp) return data, label
def on_epoch_end(self): 'Updates indexes after each epoch' if self.label_type == 'attr': _paths, _, _, _ = getPaths(self.label_type, self.split_set, self.num_class) elif self.label_type == 'class': _paths, _ = getPaths(self.label_type, self.split_set, self.num_class) self.indexes = np.arange(len(_paths)) if self.shuffle == True: np.random.seed(random_seed) np.random.shuffle(self.indexes)
def on_epoch_end(self): 'Updates indexes after each epoch' _paths, _labels = getPaths(self.label_dir, self.split_set, self.emo_attr) self.indexes = np.arange(len(_paths)) if self.shuffle == True: np.random.seed(random_seed) np.random.shuffle(self.indexes)
def createValloader(data_dirnames): dataloader_val = thermal_loader.ThermalTestDataLoader(*utils.getPaths(data_dirnames)) val_loader = torch.utils.data.DataLoader(dataloader_val, batch_size=1, shuffle=False, num_workers=opt.num_workers, pin_memory=True, drop_last=False) return val_loader
def __init__(self, root_dir, label_dir, batch_size, split_set, emo_attr, shuffle=True): 'Initialization' self.root_dir = root_dir self.label_dir = label_dir self.batch_size = batch_size self.split_set = split_set # 'Train' or 'Validation' self.emo_attr = emo_attr # 'Act', 'Dom' or 'Val' self.shuffle = shuffle # Loading Norm-Feature Parameters self.Feat_mean = loadmat( './NormTerm/feat_norm_means.mat')['normal_para'] self.Feat_std = loadmat('./NormTerm/feat_norm_stds.mat')['normal_para'] # Loading Norm-Label Parameters if emo_attr == 'Act': self.Label_mean = loadmat( './NormTerm/act_norm_means.mat')['normal_para'][0][0] self.Label_std = loadmat( './NormTerm/act_norm_stds.mat')['normal_para'][0][0] elif emo_attr == 'Dom': self.Label_mean = loadmat( './NormTerm/dom_norm_means.mat')['normal_para'][0][0] self.Label_std = loadmat( './NormTerm/dom_norm_stds.mat')['normal_para'][0][0] elif emo_attr == 'Val': self.Label_mean = loadmat( './NormTerm/val_norm_means.mat')['normal_para'][0][0] self.Label_std = loadmat( './NormTerm/val_norm_stds.mat')['normal_para'][0][0] # Loading Data Paths/Labels self._paths, self._labels = getPaths(label_dir, split_set, emo_attr) self.on_epoch_end()
from keras.utils.np_utils import to_categorical from keras.applications.resnet50 import preprocess_input from keras.models import Model import queue import threading from tqdm import tqdm from networks import get_models, adversarial, null_loss, generator, discriminator from utils import producer, getPaths, scale, mean data_train = r"C:\Users\tgill\OneDrive\Documents\GD_AI\ArtGAN\wikipaintings_full\wikipaintings_train" data_test = r"C:\Users\tgill\OneDrive\Documents\GD_AI\ArtGAN\wikipaintings_full\wikipaintings_train" train_paths, y_train, classes = getPaths(data_train) test_paths, y_test, classes = getPaths(data_test) target_size = (128, 128) ls = [-np.sum(y_train == i) for i in range(25)] arg = np.argsort(ls) classement = np.argsort(arg) nb_select = 1 select = arg[:nb_select] idx_select = np.isin(y_train, select) train_paths = train_paths[idx_select] y_train = y_train[idx_select] y_train = classement[y_train] print(train_paths.shape)
num_class = args['num_class'] except: pass Feat_mean_All = loadmat('./NormTerm/feat_norm_means.mat')['normal_para'] Feat_std_All = loadmat('./NormTerm/feat_norm_stds.mat')['normal_para'] Label_mean_act = loadmat('./NormTerm/act_norm_means.mat')['normal_para'][0][0] Label_std_act = loadmat('./NormTerm/act_norm_stds.mat')['normal_para'][0][0] Label_mean_dom = loadmat('./NormTerm/dom_norm_means.mat')['normal_para'][0][0] Label_std_dom = loadmat('./NormTerm/dom_norm_stds.mat')['normal_para'][0][0] Label_mean_val = loadmat('./NormTerm/val_norm_means.mat')['normal_para'][0][0] Label_std_val = loadmat('./NormTerm/val_norm_stds.mat')['normal_para'][0][0] # Testing Task if label_type == 'attr': test_file_path, test_file_tar_act, test_file_tar_dom, test_file_tar_val = getPaths( label_type, 'Test', num_class) elif label_type == 'class': test_file_path, test_file_tar_class = getPaths(label_type, 'Test', num_class) # Testing Data & Label Test_Data = [] Test_Label_Act = [] Test_Label_Dom = [] Test_Label_Val = [] Test_Label_Class = [] for i in range(len(test_file_path)): data = loadmat(root_dir + test_file_path[i].replace('.wav', '.mat'))['Audio_data'] data = (data - Feat_mean_All) / Feat_std_All # Feature Normalization data = data.reshape(-1)
conf_segnet_model.load_state_dict(checkpoint['state_dict']) optimizer.load_state_dict(checkpoint['optimizer']) lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) opt.epoch = checkpoint['epoch'] best_iou = checkpoint['best_iou'] print('Create validation loader daytime') val_loader_night = createValloader([opt.testroot_night]) print('Create validation loader nighttime') val_loader_day = createValloader([opt.testroot_day]) print('Create validation loader both') val_loader_combined = createValloader([opt.testroot_night, opt.testroot_day]) test_stamps = getTestStamps(*utils.getPaths([opt.testroot_night, opt.testroot_day])) print('Create training loader') train_loader = createDataloader(test_stamps) # Loss plot total_loss_avgmeter_phase1 = AverageMeter() total_loss_avgmeter_phase2 = AverageMeter() critic_loss_avgmeter = AverageMeter() seg_loss_avgmeter = AverageMeter() conf_loss_avgmeter = AverageMeter() if opt.eval is not "": print('Starting evaluation on: %s....' % (opt.eval))
def __len__(self): 'Denotes the number of batches per epoch' return int( len(getPaths(self.label_dir, self.split_set, self.emo_attr)[0]) / self.batch_size)
def __len__(self): 'Denotes the number of batches per epoch' return int( len(getPaths(self.label_type, self.split_set, self.num_class)[0]) / self.batch_size)
num_class = args['num_class'] except: pass # Hidden Features Paths Setting if label_type == 'attr': root_dir = './Fusion_Features/3-attribute' elif label_type == 'class': if num_class == '5-class': root_dir = './Fusion_Features/5-class' elif num_class == '8-class': root_dir = './Fusion_Features/8-class' # Loading Paths & Labels if label_type == 'class': paths_test, labels_class_test = getPaths(label_type, split_set='Test', num_class=num_class) elif label_type == 'attr': # Loading Norm-Label Label_mean_act = loadmat('./NormTerm/act_norm_means.mat')['normal_para'][0][0] Label_std_act = loadmat('./NormTerm/act_norm_stds.mat')['normal_para'][0][0] Label_mean_dom = loadmat('./NormTerm/dom_norm_means.mat')['normal_para'][0][0] Label_std_dom = loadmat('./NormTerm/dom_norm_stds.mat')['normal_para'][0][0] Label_mean_val = loadmat('./NormTerm/val_norm_means.mat')['normal_para'][0][0] Label_std_val = loadmat('./NormTerm/val_norm_stds.mat')['normal_para'][0][0] paths_test, labels_act_test, labels_dom_test, labels_val_test = getPaths(label_type, split_set='Test', num_class=num_class) # Loading Hidden Features (Testing set) X_Test = [] Y_Test_Class = [] Y_Test_Act = [] Y_Test_Dom = []
# Loading Norm-Parameters Feat_mean = loadmat('./NormTerm/feat_norm_means.mat')['normal_para'] Feat_std = loadmat('./NormTerm/feat_norm_stds.mat')['normal_para'] if emo_attr == 'Act': Label_mean = loadmat('./NormTerm/act_norm_means.mat')['normal_para'][0][0] Label_std = loadmat('./NormTerm/act_norm_stds.mat')['normal_para'][0][0] elif emo_attr == 'Dom': Label_mean = loadmat('./NormTerm/dom_norm_means.mat')['normal_para'][0][0] Label_std = loadmat('./NormTerm/dom_norm_stds.mat')['normal_para'][0][0] elif emo_attr == 'Val': Label_mean = loadmat('./NormTerm/val_norm_means.mat')['normal_para'][0][0] Label_std = loadmat('./NormTerm/val_norm_stds.mat')['normal_para'][0][0] # Regression Task test_file_path, test_file_tar = getPaths(label_dir, split_set='Test', emo_attr=emo_attr) #test_file_path, test_file_tar = getPaths(label_dir, split_set='Validation', emo_attr=emo_attr) # Setting Online Prediction Model Graph (predict sentence by sentence rather than a data batch) time_step = 62 # same as the number of frames within a chunk (i.e., m) feat_num = 130 # number of LLDs features if atten_type == 'GatedVec': # LSTM Layer inputs = Input((time_step, feat_num)) encode = LSTM(units=feat_num, activation='tanh', dropout=0.5, return_sequences=True)(inputs) encode = LSTM(units=feat_num,
""" import numpy as np import os from scipy.io import loadmat, savemat import random from utils import getPaths # Ignore warnings & Fix random seed import warnings warnings.filterwarnings("ignore") random.seed(999) if __name__ == '__main__': data_root = '/media/winston/UTD-MSP/Speech_Datasets/MSP-Face/Features/OpenSmile_func_IS13ComParE/feat_mat/' fnames, Train_Label_act, Train_Label_dom, Train_Label_val = getPaths( label_type='attr', split_set='Train', num_class=None) # Output normalize parameters folder based on the training set if not os.path.isdir('./NormTerm/'): os.makedirs('./NormTerm/') # Acoustic-Feature Normalization based on Training Set Train_Data = [] for i in range(len(fnames)): data = loadmat(data_root + fnames[i].replace('.wav', '.mat'))['Audio_data'] data = data.reshape(-1) Train_Data.append(data) Train_Data = np.array(Train_Data) # Feature Normalization Parameters
except: pass # Hidden Features Paths Setting if label_type == 'attr': root_dir = './Fusion_Features/3-attribute' elif label_type == 'class': if num_class == '5-class': root_dir = './Fusion_Features/5-class' elif num_class == '8-class': root_dir = './Fusion_Features/8-class' # Loading Paths & Labels if label_type == 'class': paths_valid, labels_class_valid = getPaths(label_type, split_set='Validation', num_class=num_class) paths_train, labels_class_train = getPaths(label_type, split_set='Train', num_class=num_class) elif label_type == 'attr': # Loading Norm-Label Label_mean_act = loadmat( './NormTerm/act_norm_means.mat')['normal_para'][0][0] Label_std_act = loadmat( './NormTerm/act_norm_stds.mat')['normal_para'][0][0] Label_mean_dom = loadmat( './NormTerm/dom_norm_means.mat')['normal_para'][0][0] Label_std_dom = loadmat( './NormTerm/dom_norm_stds.mat')['normal_para'][0][0] Label_mean_val = loadmat(