def _load_data(self): # Load all records (x_train, y_train) = load_data(self.__dataset_dir, 'lfw_crop', self.__img_size) # Merge them (we split them by classes) x = x_train y = y_train # Reshape x for tensorflow x = x.reshape((x.shape[0],) + self.get_data_shape()) # Normalize x to [0, 1] x = self._scale_data(x) # Split the records by classes and store it data = {i: x[y == i] for i in np.unique(y)} # Remove classes with to less records if self.__min_images_per_class is not None and self.__min_images_per_class > 1: new_data = {} counter = 0 for i in data.keys(): if len(data[i]) >= self.__min_images_per_class: new_data[counter] = data[i] counter += 1 print("By using only classes with at least {} elements, the dataset was reduced from {} to {} records.".format( self.__min_images_per_class, len(data), len(new_data) )) data = new_data return data
def _load_data(self): # Load all records (x, y) = load_data(self.__dataset_dir, 'tiny_image_net', self.__img_size) # Reshape x for tensorflow x = x.reshape((x.shape[0],) + self.get_data_shape()) # Normalize x to [-1, 1] x = self._scale_data(x) # x = x.astype(np.float32) / 255 # Split the records by classes and store it y = y.reshape((y.shape[0],)) return {i: x[y == i] for i in np.unique(y)}
def _load_data(self): # Load all records (x_train, y_train) = load_data(self.__dataset_dir, 'facescrub', self.__img_size) # Merge them (we split them by classes) x = x_train y = y_train # Reshape x for tensorflow x = x.reshape((x.shape[0],) + self.get_data_shape()) # Normalize x to [0, 1] x = self._scale_data(x) # Split the records by classes and store it return {i: x[y == i] for i in np.unique(y)}