def __init__(self, data, folder, network, window, skip, out_joints, in_joints, batch_size, lr, device): self.train_path = '../data/csv/train/' + data + '/' self.val_path = '../data/csv/val/' + data + '/' self.root = Path('checkpoints' ) self.model_root = self.root / "models_indirect" / folder self.num_joints = len(out_joints) self.joints = out_joints self.network = network.to(device) self.batch_size = batch_size self.device = device try: self.model_root.mkdir(mode=0o777, parents=False) except OSError: print("Model path exists") self.optimizer = torch.optim.SGD(self.network.parameters(), lr) self.scheduler = ReduceLROnPlateau(self.optimizer, verbose=True) train_dataset = indirectDataset(self.train_path, window, skip, in_joints) val_dataset = indirectDataset(self.val_path, window, skip, in_joints) self.train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) self.val_loader = DataLoader(dataset=val_dataset, batch_size = batch_size, shuffle=False) self.loss_fn = torch.nn.MSELoss() init_weights(self.network) self.epoch = 1 self.best_loss = 100
def main(): for t in ['60', '120', '180', '240', '300']: #'20', '40', preprocess = 'filtered_torque_' + t + 's' #sys.argv[3] dataset = indirectDataset(path, window, skip, in_joints, filter_signal=True, is_rnn=True) loader = DataLoader(dataset=dataset, batch_size=batch_size, shuffle=False) model_root = [] for j in range(JOINTS): folder = "trocar" + str(j) model_root.append(root / preprocess / net / folder) networks = [] for j in range(JOINTS): networks.append(torqueLstmNetwork(batch_size, device).to(device)) utils.load_prev(networks[j], model_root[j], epoch_to_use) print("Loaded a " + str(j) + " model") all_time = torch.tensor([]) all_pred = torch.tensor([]) loss_fn = torch.nn.MSELoss() for i, (position, velocity, torque, jacobian, time) in enumerate(loader): position = position.to(device) velocity = velocity.to(device) step_loss = 0 step_pred = torch.tensor([]) for j in range(JOINTS): posvel = torch.cat((position, velocity), axis=2).contiguous() pred = networks[j](posvel).detach().squeeze(0).cpu() step_pred = torch.cat( (step_pred, pred), axis=1) if step_pred.size() else pred all_pred = torch.cat( (all_pred, step_pred), axis=0) if all_pred.size() else step_pred time = time.permute(1, 0) all_time = torch.cat( (all_time, time), axis=0) if all_time.size() else time all_pred = torch.cat((all_time, all_pred), axis=1) results_path = '../results/' + data + '/no_contact/' np.savetxt(results_path + '/torque_lstm_troc_' + preprocess + '.csv', all_pred.numpy())
for j in range(JOINTS): if is_rnn: window = 1000 networks.append(torqueLstmNetwork(batch_size, device)) else: window = WINDOW networks.append(fsNetwork(window)) networks[j].to(device) optimizers.append(torch.optim.Adam(networks[j].parameters(), lr)) schedulers.append(ReduceLROnPlateau(optimizers[j], verbose=True)) train_dataset = indirectDataset(train_path, window, SKIP, in_joints, is_rnn=is_rnn, filter_signal=f) val_dataset = indirectDataset(val_path, window, SKIP, in_joints, is_rnn=is_rnn, filter_signal=f) train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True, drop_last=is_rnn) val_loader = DataLoader(dataset=val_dataset, batch_size=batch_size, shuffle=False,
def main(): all_pred = None if exp == 'train': path = '../data/csv/train/' + data + '/' elif exp == 'val': path = '../data/csv/val/' + data + '/' elif exp == 'test': path = '../data/csv/test/' + data + '/' + contact + '/' else: path = '../data/csv/test/' + data + '/' + contact + '/' + exp + '/' in_joints = [0, 1, 2, 3, 4, 5] if is_rnn: window = 1000 else: window = utils.WINDOW if is_rnn: dataset = indirectDataset(path, window, utils.SKIP, in_joints, is_rnn=is_rnn) else: dataset = indirectTestDataset(path, window, utils.SKIP, in_joints, is_rnn=is_rnn) loader = DataLoader(dataset=dataset, batch_size=batch_size, shuffle=False, drop_last=False) model_root = [] for j in range(JOINTS): folder = fs + str(j) model_root.append(root / preprocess / net / folder) networks = [] for j in range(JOINTS): if is_rnn: networks.append(torqueLstmNetwork(batch_size, device).to(device)) else: networks.append(fsNetwork(window).to(device)) for j in range(JOINTS): utils.load_prev(networks[j], model_root[j], epoch_to_use) print("Loaded a " + str(j) + " model") # loss_fn = torch.nn.MSELoss() # all_loss = 0 all_pred = torch.tensor([]) all_time = torch.tensor([]) for i, (position, velocity, torque, time) in enumerate(loader): position = position.to(device) velocity = velocity.to(device) if is_rnn: posvel = torch.cat((position, velocity), axis=2).contiguous() else: posvel = torch.cat((position, velocity), axis=1).contiguous() if is_rnn: time = time.permute((1, 0)) torque = torque.squeeze() cur_pred = torch.zeros(torque.size()) for j in range(JOINTS): pred = networks[j](posvel).squeeze().detach() # pred = pred * utils.range_torque[j].to(device) cur_pred[:, j] = pred.cpu() # loss = loss_fn(cur_pred, torque) # all_loss += loss.item() if is_rnn: time = time.squeeze(-1) all_time = torch.cat( (all_time, time.cpu()), axis=0) if all_time.size() else time.cpu() all_pred = torch.cat( (all_pred, cur_pred.cpu()), axis=0) if all_pred.size() else cur_pred.cpu() all_pred = torch.cat((all_time.unsqueeze(1), all_pred), axis=1) np.savetxt(path + net + '_' + seal + '_pred_' + preprocess + '.csv', all_pred.numpy())