def predict_batch(self,images,type_class): images_predictions = tf.TensorArray(tf.float32,size=0,dynamic_size=True) ys = tf.TensorArray(tf.float32,size=0,dynamic_size=True) matched_images = tf.TensorArray(tf.float32,size=0,dynamic_size=True) index = 0 basis = tf.convert_to_tensor([0,1],dtype=tf.float32) for i in tf.range(len(images)): gen_image = data_access.normalize(data_access.de_standardize(images[i])) img = tf.expand_dims(gen_image,axis=0) c = self.classifier(img) if(self.num_classes == 2): x = tf.subtract(c,basis) w_list = tf.abs(x) else: w_list = c w_list = tf.reshape(w_list,(w_list.shape[1],)) images_predictions = images_predictions.write(i,w_list) y_list = tf.convert_to_tensor(type_class,dtype=tf.float32) ys = ys.write(i,y_list) if(tf.reduce_all(tf.equal(w_list,y_list))): matched_images = matched_images.write(index,images[i]) index +=1 return images_predictions.stack(), ys.stack(),matched_images.stack()
def predict_batch(self,images,type_class): """ Classify each image received and prepare for loss function :param images: - images tensors :param type_class: - class chosen to influence generator. Its must be a number """ images_predictions = tf.TensorArray(tf.float32,size=0,dynamic_size=True) ys = tf.TensorArray(tf.float32,size=0,dynamic_size=True) matched_images = tf.TensorArray(tf.float32,size=0,dynamic_size=True) index = 0 basis = tf.convert_to_tensor([0,1],dtype=tf.float32) for i in tf.range(len(images)): gen_image = data_access.normalize(data_access.de_standardize(images[i])) img = tf.expand_dims(gen_image,axis=0) c = self.classifier(img) if(self.num_classes == 2): x = tf.subtract(c,basis) w_list = tf.abs(x) else: w_list = c w_list = tf.reshape(w_list,(w_list.shape[1],)) images_predictions = images_predictions.write(i,w_list) y_list = tf.one_hot(type_class,self.num_classes) ys = ys.write(i,y_list) if(tf.reduce_all(tf.equal(w_list,y_list))): matched_images = matched_images.write(index,images[i]) index +=1 return images_predictions.stack(), ys.stack(),matched_images.stack()
def generate_images(self,number_of_samples,directory): seed = tf.random.normal([number_of_samples, self.random_noise_size]) images = self.generator(seed) if self.classifier is not None: predictions = self.classifier(data_access.normalize(data_access.de_standardize(images))) data_access.produce_generate_figure(directory,images,predictions,class_names) else: data_access.store_images_seed(directory,images,'gen_images','gan')
def load_images_predict(self): """ Loads images (generated images) on the directory provided at (images_path) and classifies them with given classifier """ for filename in os.listdir(self.images_path): imagePath = self.images_path + "\\" + filename image = np.array( cv2.cvtColor( cv2.resize(cv2.imread(imagePath), dsize=(self.size_shape[0], self.size_shape[1])), cv2.COLOR_BGR2RGB)) self.real_images.append(image) tempImg = Image.open(imagePath) tempImg = tempImg.resize(self.size_shape, Image.ANTIALIAS) img = ImageTk.PhotoImage(tempImg) self.ImageTk_images.append(img) self.real_images = np.array(self.real_images).astype(np.float32) images_norms = data_access.normalize(self.real_images) self.predictions = self.classifier(images_norms).numpy()
def predict_batch(self, images, type_class): images_predictions = tf.TensorArray(tf.float32, size=10, dynamic_size=True) ys = tf.TensorArray(tf.float32, size=10, dynamic_size=True) matched_images = tf.TensorArray(tf.float32, size=0, dynamic_size=True) index = 0 for i in tf.range(len(images)): gen_image = data_access.normalize( data_access.de_standardize(images[i])) img = tf.expand_dims(gen_image, axis=0) c_type = self.classifier_m.predict_image(img) w_list = tf.one_hot(c_type, self.num_classes) w_list = tf.reshape(w_list, (w_list.shape[1], )) images_predictions = images_predictions.write(i, w_list) y_list = tf.one_hot(type_class, self.num_classes) ys = ys.write(i, y_list) if (tf.reduce_all(tf.equal(w_list, y_list))): matched_images = matched_images.write(index, images[i]) index += 1 return images_predictions.stack(), ys.stack(), matched_images.stack()
def generate_images(self, number_of_samples, directory): seed = tf.random.normal([number_of_samples, 100]) images = self.generator(seed) predictions = self.classifier_m.predict_image_vector( data_access.normalize(data_access.de_standardize(images))) data_access.produce_generate_figure('imgs', images, predictions)