def test_dataloader(self): if self.kwargs["benchmark"]: return fetch_dali_loader(self.train_imgs, self.train_lbls, self.args.val_batch_size, "test", **self.kwargs) return fetch_dali_loader(self.test_imgs, None, 1, "test", **self.kwargs)
def test_dataloader(self): if self.kwargs["benchmark"]: return fetch_dali_loader(self.tfrecords_train, self.train_idx, self.args.val_batch_size, "eval", **self.kwargs) return fetch_dali_loader(self.tfrecords_test, self.test_idx, 1, "test", **self.kwargs)
def get_dataloader_fn(*, data_dir: str, batch_size: int, precision: str): kwargs = { "dim": 3, "gpus": 1, "seed": 0, "num_workers": 8, "meta": None, "oversampling": 0, "benchmark": False, "patch_size": [128, 128, 128], } imgs, lbls = load_data(data_dir, "*_x.npy"), load_data(data_dir, "*_y.npy") kfold = KFold(n_splits=5, shuffle=True, random_state=12345) _, val_idx = list(kfold.split(imgs))[2] imgs, lbls = get_split(imgs, val_idx), get_split(lbls, val_idx) dataloader = fetch_dali_loader(imgs, lbls, batch_size, "bermuda", **kwargs) def _dataloader_fn(): for i, batch in enumerate(dataloader): fname = [f"{i}_{j}" for j in range(batch_size)] img = batch["image"].numpy() if "fp16" in precision: img = img.astype(np.half) img = {"INPUT__0": img} lbl = {"OUTPUT__0": batch["label"].squeeze(1).numpy().astype(int)} yield fname, img, lbl return _dataloader_fn
def val_dataloader(self): return fetch_dali_loader(self.val_imgs, self.val_lbls, 1, "eval", **self.kwargs)
def train_dataloader(self): return fetch_dali_loader(self.train_imgs, self.train_lbls, self.args.batch_size, "train", **self.kwargs)
def val_dataloader(self): return fetch_dali_loader(self.tfrecords_val, self.val_idx, 1, "eval", **self.kwargs)
def train_dataloader(self): return fetch_dali_loader(self.tfrecords_train, self.train_idx, self.args.batch_size, "training", **self.kwargs)