Ejemplo n.º 1
0
class inception:

    im_size = inception_v4.default_image_size
    names = imagenet.create_readable_names_for_imagenet_labels()

    def __init__(self, imgs, sess=None, weights=None):
        self.imgs = imgs
        self.sess = sess
        self.model()

    @classmethod
    def preprocess(cls, imgs):

        images = images[:, :, :, 3].astype(np.float32)
        images = np.divide(images, 255.)
        images = np.subtract(images, 0.5)
        images = np.multiply(images, 2.0)
        images = tf.convert_to_tensor(images)

        return images

    def model(self):

        with slim.arg_scope(inception_v4_arg_scope()):
            logits, _ = inception_v4(processed_images,
                                     num_classes=1001,
                                     is_training=False)
        self.probs = tf.nn.softmax(logits)
        init_fn = slim.assign_from_checkpoint_fn(
            'inception_v4.ckpt', slim.get_model_variables('InceptionV4'))
        init_fn(self.sess)
Ejemplo n.º 2
0
class inception:

    im_size = inception_v4.default_image_size
    names = imagenet.create_readable_names_for_imagenet_labels()

    def __init__(self, imgs, sess=None, weights=None, reuse=None):
        self.imgs = imgs
        self.sess = sess
        self.model(reuse)
        print('\nCONSTRUCTING\n')

    @classmethod
    def preprocess(cls, images):

        images = images[:, :, :, :3].astype(np.float32)
        images = np.divide(images, 255.)
        images = np.subtract(images, 0.5)
        images = np.multiply(images, 2.0)

        return images

    def model(self, reuse):

        with slim.arg_scope(inception_v4_arg_scope()):
            self.logits, _ = inception_v4(self.imgs,
                                          num_classes=1001,
                                          reuse=reuse,
                                          is_training=False)
        self.probs = tf.nn.softmax(self.logits)[:, 1:]
        init_fn = slim.assign_from_checkpoint_fn(WEIGHTS_FILE,
                                                 slim.get_model_variables())
        init_fn(self.sess)
def predict(image, version='V3'):
    tf.reset_default_graph()
    
    # Process the image 
    raw_image, processed_image = process_image(image)
    print(raw_image.shape)
    class_names = imagenet.create_readable_names_for_imagenet_labels()
    
    # Create a placeholder for the images
    X = tf.placeholder(tf.float32, [None, 299, 299, 3], name="X")
    
    '''
    inception_v3 function returns logits and end_points dictionary
    logits are output of the network before applying softmax activation
    '''
    
    if version.upper() == 'V3':
        print("V3!!")
        model_ckpt_path = INCEPTION_V3_CKPT_PATH
        with tf.contrib.slim.arg_scope(inception_v3.inception_v3_arg_scope()):
            # Set the number of classes and is_training parameter  
            logits, end_points = inception_v3.inception_v3(X, num_classes=1001, is_training=False)
            
    elif version.upper() == 'V4':
        model_ckpt_path = INCEPTION_V4_CKPT_PATH
        with tf.contrib.slim.arg_scope(inception_v4.inception_v4_arg_scope()):
            # Set the number of classes and is_training parameter
            # Logits 
            logits, end_points = inception_v4.inception_v4(X, num_classes=1001, is_training=False)
            
    
    predictions = end_points.get('Predictions', 'No key named predictions')
    saver = tf.train.Saver()
    
    with tf.Session() as sess:
        print("model_ckpt_path", model_ckpt_path)
        saver.restore(sess, model_ckpt_path)
        prediction_values = predictions.eval({X: processed_image})
        
    try:
        # Add an index to predictions and then sort by probability
        prediction_values = [(i, prediction) for i, prediction in enumerate(prediction_values[0,:])]
        prediction_values = sorted(prediction_values, key=lambda x: x[1], reverse=True)
        
        # Plot the image
        #plot_color_image(raw_image)
        #plt.show()
        print("Using Inception_{} CNN\nPrediction: Probability\n".format(version))
        # Display the image and predictions 
        for i in range(10):
            predicted_class = class_names[prediction_values[i][0]]
            probability = prediction_values[i][1]
            print("{}: {:.2f}%".format(predicted_class, probability*100))
    
    # If the predictions do not come out right
    except:
        print(predictions)
def showImage(np_image_raw, np_probabilities):
    names = imagenet.create_readable_names_for_imagenet_labels()
    np_probability = np_probabilities[0, :]
    sorted_inds = [j[0] for j in sorted(enumerate(-np_probability), key=lambda x:x[1])]
    
    plt.figure()
    plt.imshow(np_image_raw.astype(np.uint8))
    plt.axis('off')
    plt.show()

    for k in range(5):
            index = sorted_inds[k]
            # Shift the index of a class name by one. 
            print('Probability %0.2f%% => [%s]' % (np_probability[index] * 100, names[index+1]))
Ejemplo n.º 5
0
        '../../poggio-urop-data/ILSVRC2-12_img_val/ILSVRC2012_val_00000002.JPEG',
        mode='RGB')
    image = imresize(image, (image_size, image_size))
    image2 = imresize(image2, (image_size, image_size))

    imgs = tf.placeholder(tf.float32, [None, image_size, image_size, 3])

    images = np.array([image, image2])
    sess = tf.Session()
    network = model(imgs, sess)
    processed_images = inception.preprocess(images)
    probabilities = np.array(
        sess.run(network.probs, feed_dict={network.imgs: processed_images}))

    #     with slim.arg_scope(inception_v4_arg_scope()):
    #       logits, _ = inception_v4(processed_images, num_classes=1001, is_training=False)
    #     probabilities = tf.nn.softmax(logits)
    #
    #     init_fn = slim.assign_from_checkpoint_fn('inception_v4.ckpt', slim.get_model_variables('InceptionV4'))
    #
    #     with tf.Session() as sess:
    #       init_fn(sess)
    #       probabilities = sess.run(probabilities, feed_dict={inception.imgs: images})

    names = imagenet.create_readable_names_for_imagenet_labels()
    print(probabilities.shape)
    for prob in probabilities:
        inds = np.argsort(prob)[::-1]
        for j in range(5):
            print(names[inds[j]])
Ejemplo n.º 6
0
eps = 2.0 * max_epsilon / 255.0
batch_shape = (batch_size, image_height, image_width, 3)
num_classes = 1001

categories = pd.read_csv(
    "../preReq/nips-2017-adversarial-learning-development-set/categories.csv")
image_classes = pd.read_csv(
    "../preReq/nips-2017-adversarial-learning-development-set/images.csv")

filterCutOff = 0.9
#power = 10
samples = 100
e = math.e

nameV3 = create_readable_names_for_imagenet_labels()


def best_fit_distribution(data, bins=1000, ax=None):
    """Model data by finding best fit distribution to data"""
    # Get histogram of original data
    y, x = np.histogram(data, bins=bins, density=True)
    x = (x + np.roll(x, -1))[:-1] / 2.0

    # Distributions to check
    DISTRIBUTIONS = [
        st.cauchy, st.dweibull, st.gennorm
        #st.alpha,st.anglit,st.arcsine,st.beta,st.betaprime,st.bradford,st.burr,st.cauchy,st.chi,st.chi2,st.cosine, #cauchy
        #st.dgamma,st.dweibull,st.erlang,st.expon,st.exponnorm,st.exponweib,st.exponpow,st.f,st.fatiguelife,st.fisk, #dweibull
        #st.foldcauchy,st.foldnorm,st.frechet_r,st.frechet_l,st.genlogistic,st.genpareto,st.gennorm,st.genexpon, #gennorm
        #st.genextreme,st.gausshyper,st.gamma,st.gengamma,st.genhalflogistic,st.gilbrat,st.gompertz,st.gumbel_r,