def val_with_labels(self, epoch): """inference model on validation set, for each frame in each video find nearest neighbor in each other video and check accuracy on labels of that frame """ self.tcn.switch_mode('eval') Y = [] # tcn embeddings V = [] # video names R = [] # video classes L = [] # frame features # compute embeddings for all frames with torch.no_grad(): for batch_idx in range(len(self.val_set)): batch = self.val_set[batch_idx] X, v, r, lbls = batch[0], batch[1], batch[2], batch[3] X = X.to(self.devices[0]) y = self.tcn(X).cpu().numpy() Y.extend(list(y)) V.extend(v) R.extend(r) L.extend(lbls) print("Validation Epoch {}: {}/{} ({:.0f}%)".format( epoch, batch_idx, len(self.val_set), 100. * batch_idx / len(self.val_set)), ' ', end='\r') accuracy, correct, N = Loss.labeled_accuracy(Y, V, R, L) Tools.log('Validation Epoch {}: Accuracy {:.0f}/{} ({:.0f}%)'.format( epoch, correct, N, 100. * accuracy))
def _init_weights(self): try: nn.init.xavier_normal_(self.conv1.weight) nn.init.normal_(self.conv1.bias) Tools.log("Init TCN on device 0: Success") except Exception as e: Tools.log("Init TCN on device 0: Fail... (abort) " + e) sys.exit(1)
def _init_weights(self): try: # conv1 nn.init.xavier_normal_(self.conv1.weight.data) nn.init.normal_(self.conv1.bias.data) # conv2 nn.init.xavier_normal_(self.conv2.weight.data) nn.init.normal_(self.conv2.bias.data) Tools.log("Init Encoder: Success") except Exception as e: Tools.log("Init Encoder: Fail... (abort) " + e) sys.exit(1)
def _load(self, path): if path is not None and os.path.isfile(path): try: if torch.cuda.is_available(): self.load_state_dict(torch.load(path)) else: self.load_state_dict( torch.load(path, map_location=lambda storage, loc: storage)) Tools.log("Load Encoder: Success") return True except Exception as e: Tools.log("Load Encoder: Fail " + e) return False return False
def train(self, epoch): self.tcn.switch_mode('train') epoch_loss = 0. cum_loss = 0. cum_ratio_loss = 0. n = 0 for idx in poem(range(len(self.train_set)), "train " + str(epoch)): X, labels, perspectives, paths = self.train_set[idx] if X is None: continue n += 1 X = X.to(self.devices[0]) labels = labels.to(self.devices[1]) perspectives = perspectives.to(self.devices[1]) self.optimizer.zero_grad() y = self.tcn(X) assert not Tools.contains_nan(y) loss = Loss.triplet_semihard_loss(y, labels, perspectives, margin=Config.TCN_MARGIN, device=self.devices[1]) loss.backward() self.optimizer.step() epoch_loss += loss.item() cum_loss += Loss.embedding_accuracy(y, labels, perspectives, device=self.devices[1]).item() cum_ratio_loss += Loss.embedding_accuracy_ratio( y, labels, perspectives) Tools.pyout('Train Epoch: ' + '{} [{}/{} ({:.0f}%)]\tAccuracy: ' '{:.6f}\tRatio: {:.6f}\tLoss: {:.6f}'.format( epoch, idx, len(self.train_set), 100. * idx / len(self.train_set), cum_loss / n, cum_ratio_loss / n, epoch_loss / (n))) Tools.log('Train Epoch: ' + '{} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( epoch, n, len(self.train_set), 100., epoch_loss / (n)))
def test(self, epoch): self.tcn.switch_mode('eval') cum_loss = 0. cum_ratio_loss = 0. n = 0 for idx in poem(range(len(self.val_set)), "eval " + str(epoch)): # if idx == 5: # break with torch.no_grad(): batch = self.val_set[idx] X, labels, perspectives = batch[0], batch[1], batch[2] if X is None: continue n += 1 X = X.to(self.devices[0]) labels = labels.to(self.devices[1]) perspectives = perspectives.to(self.devices[1]) y = self.tcn(X) assert not Tools.contains_nan(y) cum_loss += Loss.embedding_accuracy( y, labels, perspectives, device=self.devices[1]).item() cum_ratio_loss += Loss.embedding_accuracy_ratio( y, labels, perspectives) Tools.pyout('Test Epoch: ' + ( '{} [{}/{} ({:.0f}%)]\tAccuracy: ' '{:.6f}\tRatio: {:.6f}').format(epoch, n, len(self.val_set), 100. * idx / len(self.val_set), cum_loss / (n), cum_ratio_loss / (n))) Tools.log('Test Epoch: ' + ('{} [{}/{} ({:.0f}%)]\tAccuracy: ' '{:.6f}\tRatio: {:.6f}' ).format(epoch, n, len(self.val_set), 100., cum_loss / (n), cum_ratio_loss / (n))) if cum_ratio_loss / (n) < self.best_ratio: self.best_ratio = cum_ratio_loss / (n) self.save_state_dict(self.save_loc) self.last_improvement = epoch
def _init_weights(self): try: nn.init.xavier_normal_(self.conv1.weight) nn.init.normal_(self.conv1.bias) nn.init.xavier_normal_(self.conv2.weight) nn.init.normal_(self.conv2.bias) nn.init.xavier_normal_(self.conv3.weight) nn.init.normal_(self.conv3.bias) nn.init.xavier_normal_(self.line_mu.weight) nn.init.normal_(self.line_mu.bias) nn.init.xavier_normal_(self.line_logvar.weight) nn.init.normal_(self.line_logvar.bias) nn.init.xavier_normal_(self.line4.weight) nn.init.normal_(self.line4.bias) nn.init.xavier_normal_(self.trans5.weight) nn.init.normal_(self.trans5.bias) nn.init.xavier_normal_(self.conv5.weight) nn.init.normal_(self.conv5.bias) nn.init.xavier_normal_(self.trans6.weight) nn.init.normal_(self.trans6.bias) nn.init.xavier_normal_(self.conv6.weight) nn.init.normal_(self.conv6.bias) nn.init.xavier_normal_(self.trans7.weight) nn.init.normal_(self.trans7.bias) nn.init.xavier_normal_(self.conv7.weight) nn.init.normal_(self.conv7.bias) except Exception as e: Tools.log('Init VAE: Fail ' + e) sys.exit(1)
def close(self): Tools.log("Closing EmbedderSet")