# Multi-structure Regions of Interest # # References : # CNN structure based on VGG16, https://github.com/ry/tensorflow-vgg16/blob/master/vgg16.py # Channel independent feature maps (3D features) using https://www.tensorflow.org/versions/r0.11/api_docs/python/nn.html#depthwise_conv2d_native # GAP based on https://github.com/jazzsaxmafia/Weakly_detector/blob/master/src/detector.py # Conv2d layer based on https://github.com/carpedm20/DCGAN-tensorflow/blob/master/ops.py import tensorflow as tf import numpy as np import cPickle from params import CNNParams, HyperParams hyper = HyperParams(verbose=False) cnn_param = CNNParams(verbose=False) def print_model_params(verbose=True): total_parameters = 0 for variable in tf.trainable_variables(): shape = variable.get_shape() if verbose: print("name: " + str(variable.name) + " - shape:" + str(shape)) variable_parametes = 1 for dim in shape: variable_parametes *= dim.value if verbose: print("variable parameters: ", variable_parametes) total_parameters += variable_parametes if verbose: print("total params: ", total_parameters) return total_parameters
import math import os #import tensorflow as tf import tensorflow.compat.v1 as tf tf.disable_v2_behavior() import numpy as np import pandas as pd from time import time from model import CNN from util import load_image, chunker from params import TrainingParams, HyperParams, CNNParams tparam = TrainingParams(verbose=True) hyper = HyperParams(verbose=True) cparam = CNNParams(verbose=True) data_train = pd.read_pickle(tparam.data_train_path) data_test = pd.read_pickle(tparam.data_test_path) len_train = len(data_train) len_test = len(data_train) train_b_num = int(math.ceil(len_train / tparam.batch_size)) test_b_num = int(math.ceil(len_train / tparam.batch_size)) images_tf = tf.placeholder(tf.float32, [None, hyper.image_h, hyper.image_w, hyper.image_c], name="images") if hyper.sparse: labels_tf = tf.placeholder(tf.int64, [None], name='labels') else: labels_tf = tf.placeholder(tf.int64, [None, hyper.n_labels], name='labels')
def compressImage(imagePath): """ Generates and save the heatmaps and MS-ROI Parameters ---------- imagePath : str original image path Returns ------- str, str Heatmap path, MS-ROI path """ image = load_single_image(imagePath) hyper = HyperParams(verbose=False) images_tf = tf.placeholder( tf.float32, [None, hyper.image_h, hyper.image_w, hyper.image_c], name="images") class_tf = tf.placeholder(tf.int64, [None], name='class') conv_last, gap, class_prob = ResNet50(images_tf) classmap = get_classmap(class_tf, conv_last) with tf.Session() as sess: tf.train.Saver().restore(sess, hyper.model_path) conv_last_val, class_prob_val = sess.run([conv_last, class_prob], feed_dict={images_tf: image}) # use argsort instead of argmax to get all the classes class_predictions_all = class_prob_val.argsort(axis=1) print(class_predictions_all) roi_map = None for i in range(-1 * hyper.top_k, 0): current_class = class_predictions_all[:, i] classmap_vals = sess.run(classmap, feed_dict={ class_tf: current_class, conv_last: conv_last_val }) normalized_classmap = normalize(classmap_vals[0]) if roi_map is None: roi_map = 1.2 * normalized_classmap else: # simple exponential ranking roi_map = (roi_map + normalized_classmap) / 2 roi_map = normalize(roi_map) # Plot the heatmap on top of image fig, ax = plt.subplots(1, 1, figsize=(12, 9)) ax.margins(0) plt.axis('off') plt.imshow(roi_map, cmap=plt.cm.jet, interpolation='nearest') plt.imshow(image[0], alpha=0.4) # save the plot and the map if not os.path.exists('output'): os.makedirs('output') os.sep = '\\' hmPath = os.sep.join(['static', 'overlayed_heatmap.png']) plt.savefig(hmPath) outPath = os.sep.join(['static', imagePath]) skimage.io.imsave(outPath, roi_map) return hmPath, outPath
def compression_engine(img): image = load_single_image(img) #print("INPUT IMAGE ARRAY ",image.shape) hyper = HyperParams(verbose=False) images_tf = tf.placeholder( tf.float32, [None, hyper.image_h, hyper.image_w, hyper.image_c], name="images") class_tf = tf.placeholder(tf.int64, [None], name='class') cnn = CNN() if hyper.fine_tuning: cnn.load_vgg_weights() conv_last, gap, class_prob = cnn.build(images_tf) classmap = cnn.get_classmap(class_tf, conv_last) with tf.Session() as sess: tf.train.Saver().restore(sess, hyper.model_path) conv_last_val, class_prob_val = sess.run([conv_last, class_prob], feed_dict={images_tf: image}) # use argsort instead of argmax to get all the classes class_predictions_all = class_prob_val.argsort(axis=1) roi_map = None for i in range(-1 * hyper.top_k, 0): current_class = class_predictions_all[:, i] classmap_vals = sess.run(classmap, feed_dict={ class_tf: current_class, conv_last: conv_last_val }) normalized_classmap = normalize(classmap_vals[0]) if roi_map is None: roi_map = 1.2 * normalized_classmap else: # simple exponential ranking roi_map = (roi_map + normalized_classmap) / 2 roi_map = normalize(roi_map) # Plot the heatmap on top of image fig, ax = plt.subplots(1, 1, figsize=(12, 9)) ax.margins(0) plt.axis('off') plt.imshow(roi_map, cmap=plt.cm.jet, interpolation='nearest') plt.imshow(image[0], alpha=0.4) # save the plot and the map if not os.path.exists('output'): os.makedirs('output') plt.savefig('output/overlayed_heatmap.png') skimage.io.imsave('msroi_map.jpg', roi_map) plt.clf() print("MSROI TYPE : ", type(roi_map)) plt.close() from glob import glob # make the output directory to store the Q level images, if not os.path.exists(output_directory): os.makedirs(output_directory) original = Image.open(img) #print("ORIGINAL : ",original) sal = Image.open('msroi_map.jpg') out_name = make_quality_compression(original, sal, img, original)
# modified from source https://github.com/jazzsaxmafia/Weakly_detector/blob/master/src/train.caltech.py#L27 import numpy as np import os import pandas as pd from params import TrainingParams, HyperParams, CNNParams hparams = HyperParams(verbose=False) tparam = TrainingParams(verbose=False) image_dir_list = os.listdir(tparam.images_path) label_pairs = map(lambda x: x.split('.'), image_dir_list) labels, label_names = zip(*label_pairs) labels = map(lambda x: int(x), labels) # label_pairs = [x.split('.') for x in image_dir_list] # labels, label_names = list(zip(*label_pairs)) # labels = [int(x) for x in labels] label_dict = pd.Series(labels, index=label_names) - 1 image_paths_per_label = map( lambda one_dir: map( lambda one_file: os.path.join(tparam.images, one_dir, one_file), os.listdir(os.path.join(tparam.images, one_dir))), image_dir_list) image_paths_train = np.hstack( map(lambda one_class: one_class[:-10], image_paths_per_label)) # image_paths_per_label = [ # [os.path.join(tparam.images, one_dir, one_file) for one_file in os.listdir(os.path.join(tparam.images, one_dir))] # for one_dir in image_dir_list]