def augment(image, is_train=True): image = np.reshape(image, (3, 32, 32)) image = np.transpose(image, (1, 2, 0)) image = image.astype(float) if is_train: image = aug.resize(image, (cf.uw, cf.uh)) image = aug.random_crop(image, (cf.w, cf.h, 3)) image = aug.random_flip_lr(image) image = aug.random_brightness(image) image = aug.random_contrast(image) else: image = aug.resize(image, (cf.w, cf.h)) return aug.whitening(image)
def val_loop(model, val_dataset, avg_loss, mIoU, iters): for images, labels in tqdm(val_dataset, total=iters): # with tf.device('/GPU:0'): images, labels = resize(images, labels) logits = model(images) preds = tf.argmax(tf.nn.softmax(logits), axis=-1) valid_labels, valid_logits = valid_mask_preds(labels, logits) loss = tf.nn.sparse_softmax_cross_entropy_with_logits( labels=valid_labels, logits=valid_logits) loss = tf.reduce_mean(loss) avg_loss.update_state(loss) valid_lbls, valid_preds = valid_mask_preds(labels, preds) mIoU.update_state(valid_lbls, valid_preds)
def __getitem__(self, idx): T = self.T i = 0 while idx >= self.video_len[i]: idx -= self.video_len[i] i += 1 img_dir, flow_dir, inv_flow_dir, label_dir = self.dir_list[i] img_names = sorted(glob(join(img_dir, '*.jpg')))[idx:idx + T] flow_names = sorted(glob(join(flow_dir, '*.flo')))[idx:idx + T] inv_flow_names = sorted(glob(join(inv_flow_dir, '*.flo')))[idx:idx + T] label_names = sorted(glob(join(label_dir, '*.png')))[idx:idx + T] try: # get image imgs = [Image.open(img_name) for img_name in img_names] imgs = self.aug.color_warp(imgs) transform = tf.Compose([ tf.ToTensor(), tf.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ]) imgs = [transform(img) for img in imgs] imgs = torch.stack(imgs) # imgs = [] # for img in _imgs: # img = np.asarray(img) # transform = lambda x: torch.tensor(x).permute(2, 0, 1) # img = transform(img).unsqueeze(0).float() # imgs.append(img) # imgs = torch.cat(imgs, dim=0) # get flow flows = [] for flow_name in flow_names: flow = read(flow_name) flow = torch.tensor(flow).permute(2, 0, 1).unsqueeze(0) flows.append(flow) # flows.append(torch.zeros_like(flows[0])) flows = torch.cat(flows, dim=0) inv_flows = [] # inv_flows.append(torch.zeros_like(flows[:1])) for inv_flow_name in inv_flow_names: inv_flow = read(inv_flow_name) inv_flow = torch.tensor(inv_flow).permute(2, 0, 1).unsqueeze(0) inv_flows.append(inv_flow) inv_flows = torch.cat(inv_flows, dim=0) # get label labels = [] for label_name in label_names: label = cv2.imread(label_name) label = torch.tensor(label).float() label = label[ ..., 0] + label[..., 1] * 255 + label[..., 2] * 255 * 255 label = label[None, None, ...] labels.append(label) labels = torch.cat(labels, dim=0) objs = torch.unique(labels) for i, obj in enumerate(objs): labels[labels == obj] = i # print(labels.shape) masks = (labels > 0).float() imgs, flows, inv_flows, masks, labels = resize( self.aug((imgs, flows, inv_flows, masks, labels)), self.resize) one_hot_labels = F.one_hot(labels.squeeze().long()) one_hot_labels = one_hot_labels.permute(3, 0, 1, 2)[1:] n_clusters = one_hot_labels.size(0) assert not torch.isnan(imgs).any() assert not torch.isnan(flows).any() assert not torch.isnan(inv_flows).any() assert not torch.isnan(one_hot_labels).any() assert n_clusters > 0 except: with open('bad_data.txt', 'a+') as f: f.write(img_dir + '\n') return 0, 0, 0, 0, 0, 0, 0 return imgs, flows, inv_flows, masks, one_hot_labels, n_clusters, img_dir