def readFromDiskIteratorTest(self, image_dir, chipFiles, batch_size, tile_dim, patch_size, overlap=0, padding=(0,0)): # this is a iterator for test for row in chipFiles: blockList = [] nDims = 0 for cnt, file in enumerate(row): if type(image_dir) is list: img = util_functions.uabUtilAllTypeLoad(os.path.join(image_dir[cnt], file)) else: img = util_functions.uabUtilAllTypeLoad(os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if (np.array(padding) > 0).any(): block = uabUtilreader.pad_block(block, padding) tile_dim = tile_dim + padding*2 ind = 0 image_batch = np.zeros((batch_size, patch_size[0], patch_size[1], nDims)) for patch in uabUtilreader.patchify(block, tile_dim, patch_size, overlap=overlap): # print(str(ind) +': '+ str(patch.shape)) image_batch[ind, :, :, :] = patch ind += 1 if ind == batch_size: ind = 0 yield image_batch # yield the last chunk if ind > 0: yield image_batch[:ind, :, :, :]
def readFromDiskIteratorTest(self, image_dir, chipFiles, batch_size, tile_dim, patch_size, overlap=0, padding=(0, 0)): # this is a iterator for test for row in chipFiles: blockList = [] nDims = 0 for cnt, file in enumerate(row): if type(image_dir) is list: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir[cnt], file)) else: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if not np.all([np.array(tile_dim) == block.shape[:2]]): block = skimage.transform.resize(block, tile_dim, order=0, preserve_range=True, mode='reflect') if self.block_mean is not None: block -= self.block_mean if (np.array(padding) > 0).any(): block = uabUtilreader.pad_block(block, padding) tile_dim = tile_dim + padding * 2 ind = 0 image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) for patch in uabUtilreader.patchify(block, tile_dim, patch_size, overlap=overlap): patch_gt = (patch[:, :, 0] > 0).astype(np.uint8) # patch[:, :, 0] = get_lines(patch_gt, np.array(patch_size)) image_batch[ind, :, :, :] = patch ind += 1 if ind == batch_size: ind = 0 yield image_batch # yield the last chunk if ind > 0: yield image_batch[:ind, :, :, :]
def readFromDiskIteratorTrain(self, image_dir, chipFiles, batch_size, patch_size, padding=(0, 0), dataAug=''): # this is a iterator for training nDims = len(chipFiles[0]) assert len(chipFiles) == len(self.patch_prob) while True: image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) patch_name = [[] for i in range(batch_size)] # select number to sample idx_batch = np.random.choice(len(chipFiles), batch_size, p=self.patch_prob) for cnt, randInd in enumerate(idx_batch): row = chipFiles[randInd] p_name = '_'.join(row[0].split('_')[:2]) blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug(block, nDims, dataAug, is_np=True, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) store_idx = cnt % batch_size image_batch[store_idx, :, :, :] = augDat patch_name[store_idx] = p_name if (cnt + 1) % batch_size == 0: if self.return_name: yield image_batch[:, :, :, 1:], image_batch[:, :, :, : 1], patch_name else: yield image_batch[:, :, :, 1:], image_batch[:, :, :, :1]
def readFromDiskIteratorTrain(self, image_dir, chipFiles, batch_size, patch_size, padding=(0, 0), dataAug=''): # this is a iterator for training nDims = len(chipFiles[0]) image_batch = np.zeros( (batch_size, self.center_crop[0], self.center_crop[1], nDims)) building_truth = np.zeros((batch_size, 2)) # select number to sample # idx_batch = np.random.permutation(len(chipFiles)) idx_batch = np.arange(len(chipFiles)) for cnt, randInd in enumerate(idx_batch): row = chipFiles[randInd] blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug(block, nDims, dataAug, is_np=True, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) augDat = util_functions.crop_center(augDat, self.center_crop[0], self.center_crop[1]) store_idx = cnt % batch_size image_batch[store_idx, :, :, :] = augDat percent = np.sum( augDat[:, :, 0]) / (self.center_crop[0] * self.center_crop[1]) if percent > self.patch_prob: building_truth[store_idx, :] = [0, 1] else: building_truth[store_idx, :] = [1, 0] if (cnt + 1) % batch_size == 0: yield (image_batch[:, :, :, 1:], building_truth)
def readFromDiskIteratorTrain(self, image_dir, chipFiles, batch_size, patch_size, padding=(0, 0), dataAug=''): # this is a iterator for training group = self.group_chip_files(chipFiles) assert len(group) == len(self.group_alpha) random_id = [ np.random.permutation(len(group[i])) for i in range(len(group)) ] group_cnt = [0 for i in range(len(group))] nDims = len(chipFiles[0]) while True: image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) # select number to sample city_batch = np.random.choice(len(group), batch_size, p=self.group_alpha) for cnt, randInd in enumerate(city_batch): patchInd = random_id[randInd][group_cnt[randInd] % len(group[randInd])] group_cnt[randInd] += 1 row = group[randInd][patchInd] blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug(block, nDims, dataAug, is_np=True, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) image_batch[cnt % batch_size, :, :, :] = augDat if ((cnt + 1) % batch_size == 0): yield image_batch[:, :, :, 1:], image_batch[:, :, :, :1]
def readFromDiskIteratorTrain(self, image_dir, chipFiles, batch_size, patch_size, random, padding=(0, 0), dataAug=''): # this is a iterator for training if (random): idx = np.random.permutation(len(chipFiles)) else: idx = np.arange(stop=len(chipFiles)) nDims = len(chipFiles[0]) while True: image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) for cnt, randInd in enumerate(idx): row = chipFiles[randInd] blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug(block, nDims, dataAug, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) image_batch[cnt % batch_size, :, :, :] = augDat # print('I want to see the bitch!!') # print(image_batch.shape) # print(image_batch[0, :, :, -1]) # what's happened here??? if ((cnt + 1) % batch_size == 0): yield image_batch[:, :, :, 1:], np.expand_dims(image_batch[:, :, :, -1], axis=3)
def readFromDiskIteratorTrain(self, image_dir, chipFiles, batch_size, patch_size, padding=(0, 0), dataAug=''): assert batch_size == 1 # this is a iterator for training nDims = len(chipFiles[0]) while True: image_batch = np.zeros((4, patch_size[0], patch_size[1], nDims)) # select number to sample idx_batch = np.random.permutation(len(chipFiles)) for cnt, randInd in enumerate(idx_batch): row = chipFiles[randInd] blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug(block, nDims, dataAug, is_np=True, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) image_batch[0, :, :, :] = augDat for i in range(1, 4): image_batch[i, :, :, :] = np.rot90(augDat, k=i, axes=(0, 1)) if (cnt + 1) % batch_size == 0: yield image_batch[:, :, :, 1:], image_batch[:, :, :, :1]
def readFromDiskIteratorTrain(self, image_dir, chipFiles, batch_size, patch_size, padding=(0, 0), dataAug=''): # this is a iterator for training # pure random idx = np.random.permutation(len(chipFiles)) nDims = len(chipFiles[0]) while True: image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) cityid_batch = np.zeros(batch_size, dtype=np.uint8) for cnt, randInd in enumerate(idx): row = chipFiles[randInd] blockList = [] nDims = 0 city_name = ''.join( [a for a in row[0].split('_')[0] if not a.isdigit()]) cityid_batch[cnt % batch_size] = self.city_dict[city_name] for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug(block, nDims, dataAug, is_np=True) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) image_batch[cnt % batch_size, :, :, :] = augDat if ((cnt + 1) % batch_size == 0): yield image_batch[:, :, :, 1:], image_batch[:, :, :, : 1], cityid_batch
def loadTileDataByExtension(self, tile, extId): #specify extension ID according to the meta data ext, dirn = self.getExtensionInfoById(extId) tileDataPath = self.getDataNameByTile(dirn, tile, ext) #print('What is the tile name into loading?: ', tileDataPath) return util_functions.uabUtilAllTypeLoad(tileDataPath)
def readFromDiskIteratorTrain(self, image_dir, chipFiles, batch_size, patch_size, padding=(0, 0), dataAug=''): # this is a iterator for training if self.batch_code == 0: # pure random idx = np.random.permutation(len(chipFiles)) nDims = len(chipFiles[0]) while True: image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) for cnt, randInd in enumerate(idx): row = chipFiles[randInd] blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug( block, nDims, dataAug, is_np=True, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) image_batch[cnt % batch_size, :, :, :] = augDat if ((cnt + 1) % batch_size == 0): yield image_batch[:, :, :, 1:], image_batch[:, :, :, :1] elif self.batch_code == 1: # random, batches from same tile tile_num, patch_per_tile, tile_name = get_tile_and_patch_num( chipFiles) group = group_by_tile_name(tile_name, chipFiles) tile_idx = np.random.permutation(tile_num) patch_idx = np.random.permutation(patch_per_tile) if patch_per_tile % batch_size != 0: comp_len = batch_size - patch_per_tile % batch_size patch_idx = np.append(patch_idx, patch_idx[:comp_len]) nDims = len(chipFiles[0]) while True: image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) for randInd in tile_idx: for cnt, patchInd in enumerate(patch_idx): row = group[randInd][patchInd] blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug( block, nDims, dataAug, is_np=True, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) image_batch[cnt % batch_size, :, :, :] = augDat if ((cnt + 1) % batch_size == 0): yield image_batch[:, :, :, 1:], image_batch[:, :, :, :1] else: # random, batches has to from different tiles tile_num, patch_per_tile, tile_name = get_tile_and_patch_num( chipFiles) group = group_by_city_name(tile_name, chipFiles) tile_idx = np.random.permutation(len(group)) nDims = len(chipFiles[0]) while True: image_batch = np.zeros( (batch_size, patch_size[0], patch_size[1], nDims)) for cnt, randInd in enumerate(tile_idx): patchInd = np.random.randint(low=0, high=len(group[0])) row = group[randInd][patchInd] blockList = [] nDims = 0 for file in row: img = util_functions.uabUtilAllTypeLoad( os.path.join(image_dir, file)) if len(img.shape) == 2: img = np.expand_dims(img, axis=2) nDims += img.shape[2] blockList.append(img) block = np.dstack(blockList).astype(np.float32) if self.block_mean is not None: block -= self.block_mean if dataAug != '': augDat = uabUtilreader.doDataAug( block, nDims, dataAug, is_np=True, img_mean=self.block_mean) else: augDat = block if (np.array(padding) > 0).any(): augDat = uabUtilreader.pad_block(augDat, padding) image_batch[cnt % batch_size, :, :, :] = augDat if ((cnt + 1) % batch_size == 0): yield image_batch[:, :, :, 1:], image_batch[:, :, :, :1]