def initialize(): # Different losses need different method to create batches if FLAGS.LossType == "Contrastive_Loss": method = "pair" elif FLAGS.LossType == "NpairLoss" or FLAGS.LossType == "AngularLoss" or FLAGS.LossType == "NCA_loss": method = "n_pairs_mc" elif FLAGS.LossType == "Triplet": method = 'triplet' else: method = "clustering" print("method: " + method) # Create the stream of datas from dataset streams = data_provider.get_streams(FLAGS.batch_size, FLAGS.dataSet, method, crop_size=FLAGS.default_image_size) regularizer = layers.l2_regularizer(FLAGS.Regular_factor) if FLAGS.SaveVal: nn_Ops.create_path(_time) summary_writer = tf.summary.FileWriter(LOGDIR) return streams, summary_writer
config = tf.compat.v1.ConfigProto() config.gpu_options.allow_growth = True sess = tf.compat.v1.Session(config=config) sess.as_default() # Creating Streams from the dataset streams = data_provider.get_streams(BATCH_SIZE, DATASET, crop_size=IMAGE_SIZE) stream_train, stream_train_eval, stream_test = streams LEN_TRAIN = stream_train.data_stream.dataset.num_examples MAX_ITER = int(LEN_TRAIN / BATCH_SIZE) # check system time _time = time.strftime('%m-%d-%H-%M', time.localtime(time.time())) LOGDIR = './tensorboard_log/' + DATASET + '/' + _time + '/' nn_Ops.create_path(_time) # tfd = tfp.distributions # prior = tfd.Independent(tfd.Normal(loc=tf.zeros(EMBEDDING_SIZE), scale=1),reinterpreted_batch_ndims=1) def samplingGaussian(z_mean, z_log_var): """Reparameterization trick by sampling from an isotropic unit Gaussian. # Arguments args (tensor): mean and log of variance of Q(z|X) # Returns z (tensor): sampled latent vector """ batch = K.shape(z_mean)[0] dim = K.int_shape(z_mean)[1]
image_mean = np.array([123, 117, 104], dtype=np.float32) # RGB # To shape the array image_mean to (1, 1, 1, 3) => three channels image_mean = image_mean[None, None, None, [2, 1, 0]] neighbours = [1, 2, 4, 8, 16, 32] products_neighbours = [1, 10, 1000] ############## DATASET GENERATOR ############################# streams = data_provider.get_streams(BATCH_SIZE, DATASET, "n_pairs_mc", crop_size=IMAGE_SIZE) stream_train, stream_train_eval, stream_test = streams LOGDIR = './tensorboard_log/' + DATASET + '/' + time.strftime( '%m-%d-%H-%M', time.localtime(time.time())) + '/' nn_Ops.create_path(time.strftime('%m-%d-%H-%M', time.localtime(time.time()))) tfd = tfp.distributions prior = tfd.Independent(tfd.Normal(loc=tf.zeros(EMBEDDING_SIZE), scale=1), reinterpreted_batch_ndims=1) def mysampling(z_mean, z_log_var): """Reparameterization trick by sampling from an isotropic unit Gaussian. # Arguments args (tensor): mean and log of variance of Q(z|X) # Returns z (tensor): sampled latent vector """ batch = K.shape(z_mean)[0]