Esempio n. 1
0
    def load_data(self):

        if self.partition_modus == 'train':
            #idx_files = [ids for ids in range(0,12)]
            if self.config["proportions"] == 0.2:
                path = '/data/sawasthi/NTU/trainData_pose_1/'
                dataset = CustomDataSet(path)
            elif self.config["proportions"] == 0.5:
                path = '/data/sawasthi/NTU/trainData_pose_1/'
                dataset = CustomDataSet(path)
            elif self.config["proportions"] == 0.75:
                path = '/data/sawasthi/NTU/trainData_pose_1/'
                dataset = CustomDataSet(path)
            elif self.config["proportions"] == 1.0:
                path = '/data/sawasthi/NTU/trainData_pose_1/'
                #path = 'S:/Datasets/nturgbd_skeletons_s001_to_s017/train/'
                #path = 'S:/MS A&R/4th Sem/Thesis/PAMAP2_Dataset/pkl files'
                #path = "S:/MS A&R/4th Sem/Thesis/LaRa/OMoCap data/Train_data/"
                dataset = CustomDataSet(path)

        elif self.partition_modus == 'val':
            path = '/data/sawasthi/NTU/trainData_pose_1/'
            dataset = CustomDataSet(path)
        elif self.partition_modus == 'test':
            path = '/data/sawasthi/NTU/trainData_pose_1/'
            dataset = CustomDataSet(path)
        else:
            raise ("Wrong Dataset partition settup")
        logging.info('        Dataloader: Processing dataset files ...')
        return dataset
Esempio n. 2
0
def getTrainData(path, batch_size):

    train_data = []
    #path = '/data/sawasthi/data/trainData/'
    #path = 'S:/MS A&R/4th Sem/Thesis/LaRa/IMU data/IMU data/Windows2/'
    #while folder_counter < 10:
    #some code to get path_to_imgs which is the location of the image folder
    train_dataset = CustomDataSet(path)
    #all_datasets.append(train_dataset)

    #final_dataset = torch.utils.data.ConcatDataset(train_dataset)
    train_loader = DataLoader(train_dataset,
                              shuffle=False,
                              batch_size=batch_size,
                              num_workers=0,
                              pin_memory=True,
                              drop_last=True)
    for idx, input_seq in enumerate(train_loader):
        train_data.append(input_seq)

    return train_data
Esempio n. 3
0
        optimizer = optim.RMSprop(model.parameters(),
                                  lr=learning_rate,
                                  alpha=0.9,
                                  weight_decay=0.0005,
                                  momentum=0.9)
        #lmbda = lambda epoch: 0.95
        #scheduler = lr_scheduler.StepLR(optimizer, step_size=1,gamma=0.95)
        #scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, patience=5)

        #optimizer = optim.SGD(model.parameters(), lr=0.0001, momentum=0.9)
        model_path = '/data/sawasthi/LaraMM/model/Laramm_model_cnn.pth'
        #model_path = 'S:/MS A&R/4th Sem/Thesis/LaRa/OMoCap data/model.pth'
        path = '/data/sawasthi/LaraMM/sequences_train'
        #path = 'S:/MS A&R/4th Sem/Thesis/LaRa/IMU data/IMU data/Windows2/'
        #path = "S:/MS A&R/4th Sem/Thesis/LaRa/OMoCap data/Train_data/"
        train_dataset = CustomDataSet(path)
        dataLoader_train = DataLoader(train_dataset,
                                      shuffle=True,
                                      batch_size=batch_size,
                                      num_workers=0,
                                      pin_memory=True,
                                      drop_last=True)

        # Validation data
        path = '/data/sawasthi/LaraMM/sequences_val'
        #path = 'S:/MS A&R/4th Sem/Thesis/LaRa/IMU data/IMU data/Windows/'
        #path = "S:/MS A&R/4th Sem/Thesis/LaRa/OMoCap data/Test_data/"
        validation_dataset = CustomDataSet(path)
        dataLoader_validation = DataLoader(validation_dataset,
                                           shuffle=False,
                                           batch_size=batch_size,
Esempio n. 4
0
    noise = np.random.normal(0, 1, (batch_size, 1, ws, features))
    #noise = np.random.normal(0,1,(batch_size,features,ws))
    noise = torch.tensor(noise)
    noise = noise.float()
    #criterion = nn.NLLLoss()
    criterion = nn.CrossEntropyLoss()
    optimizer = optim.Adam(model.parameters(), lr=0.0001)
    #model_path = '/data/sawasthi/data/model/model.pth'
    model_path = '/data/sawasthi/data/MoCAP_data/model/model.pth'
    #model_path = 'S:/MS A&R/4th Sem/Thesis/LaRa/OMoCap data/'
    #path = 'S:/MS A&R/4th Sem/Thesis/LaRa/IMU data/IMU data/Windows2/'
    path = '/data/sawasthi/data/trainData/'
    #path = '/data/sawasthi/data/MoCAP_data/trainData/'
    # path = 'S:/MS A&R/4th Sem/Thesis/LaRa/IMU data/IMU data/Windows2/'
    #path = "S:/MS A&R/4th Sem/Thesis/LaRa/OMoCap data/Train_data/"
    train_dataset = CustomDataSet(path)
    dataLoader_train = DataLoader(train_dataset,
                                  shuffle=True,
                                  batch_size=batch_size,
                                  num_workers=0,
                                  pin_memory=True,
                                  drop_last=True)

    print("preparing data for normalisation")
    # Normalise the data
    value = []
    for k in range(999):
        temp_list = []
        max = -9999
        min = 9999
        temp_list.append(max)