Esempio n. 1
0
def create_embedding_visual_batch(logdir, batch_size=256):
    streams = data_provider.get_streams(1, FLAGS.dataSet,
                                        'clustering', crop_size=FLAGS.default_image_size)
    stream_train, stream_train_eval, stream_test = streams
    len = 0
    for batch in tqdm(copy.copy(stream_train_eval.get_epoch_iterator())):
        # get images and labels from batch
        x_batch_data, c_batch_data = batch
        # change images to [B,S,S,C]
        x_batch_data = np.transpose(x_batch_data[:,[2,1,0],:,:],(0,2,3,1))
        # Reduce mean
        x_batch_data = x_batch_data-image_mean
        Label_raw = np.reshape(c_batch_data,[1])
        if Label_raw<256:
            print(Label_raw)
            if len == 0:
                images = x_batch_data
                labels = Label_raw
            else:
                images = np.concatenate([images, x_batch_data], axis=0)
                labels = np.concatenate([labels, Label_raw], axis=0)
            len +=1
        if len >= batch_size:
            print(labels)
            with open(logdir+'metadata.tsv','w') as f:
                for i in range(batch_size):
                    c = labels[i]
                    f.write('{}\n'.format(c))
            return images, labels
Esempio n. 2
0
import tensorflow as tf
import os
import time
import numpy as np
import keras.backend as K
from tqdm import tqdm
from parameters import *

# A Session object encapsulates the environment in which Operation objects are executed, and Tensor objects are evaluated.
config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.compat.v1.Session(config=config)
sess.as_default()

# Creating Streams from the dataset
streams = data_provider.get_streams(BATCH_SIZE, DATASET, crop_size=IMAGE_SIZE)
stream_train, stream_train_eval, stream_test = streams

LEN_TRAIN = stream_train.data_stream.dataset.num_examples
MAX_ITER = int(LEN_TRAIN / BATCH_SIZE)

# check system time
_time = time.strftime('%m-%d-%H-%M', time.localtime(time.time()))
LOGDIR = './tensorboard_log/' + DATASET + '/' + _time + '/'
nn_Ops.create_path(_time)

# tfd = tfp.distributions
# prior = tfd.Independent(tfd.Normal(loc=tf.zeros(EMBEDDING_SIZE), scale=1),reinterpreted_batch_ndims=1)


def samplingGaussian(z_mean, z_log_var):
Esempio n. 3
0
from datasets import data_provider
from lib import GoogleNet_Model, Loss_ops, nn_Ops, Embedding_Visualization, HDML, evaluation
import copy
from tqdm import tqdm
from tensorflow.contrib import layers
from FLAGS import *

print("applyHDML:{}".format(FLAGS.Apply_HDML))
FLAGS.Apply_HDML = False
if FLAGS.Apply_HDML:
    print("HDML is true")
else:
    print("HDML is false")
# Create the stream of datas from dataset
streams = data_provider.get_streams(FLAGS.batch_size,
                                    FLAGS.dataSet,
                                    method,
                                    crop_size=FLAGS.default_image_size)
stream_train, stream_train_eval, stream_test = streams

streams2 = data_provider.get_streams(FLAGS.batch_size,
                                     'iconTest',
                                     method,
                                     crop_size=FLAGS.default_image_size)
stream_train2, stream_train_eval2, stream_test2 = streams2

print("print train batch")
for batch in copy.copy(stream_train.get_epoch_iterator()):
    x_batch_data, c_batch_data = batch
    print(x_batch_data.shape)
    print(x_batch_data[0])
    print(c_batch_data[0], c_batch_data[-1])