def generate_output_file(gtype, outfile): gd = GenData() for k in gtype: output = None if k == 'c': template_file = get_file_data("option.c.in") output = template_file % gd.codec elif k == 'h': template_file = get_file_data("option.h.in") output = template_file % gd.codeh elif k == 'e': output = gd.codef else: assert (False) if output is not None: save_result(outfile, output)
def generate_data(self, generate_size, sample_size, budget = 100): gen_data_list = [] classfilter = self.__classfilter # This step can be parallelized for each category for NUM in classfilter.getSeenClass(): original_data = classfilter.getDatabyLabel()[str(NUM)] gen_data = GenData(original_data[:sample_size,],class_num = NUM, generate_size = generate_size,classifier = self.__classifier, budget = budget) #print "Generate positive data of class ", NUM gen_data.generate_negative_data(dim_range = [0,1]) #print "Generate negative data of class ", NUM gen_data.generate_positive_data(dim_range = [0,1]) gen_data_list.append(gen_data) self.__gen_data_list = gen_data_list self.__plus_label = [] for idx in range(len(self.__gen_data_list)): self.__plus_label.append(gen_data_list[idx].getClassNum())
import tensorflow as tf LOGS_Path = "./logs/" CHECKPOINTS_PATH = './checkpoints2/' BATCH_SIZE = 8 LEARNING_RATE = .0001 BETA = .75 EXP_NAME = f"beta_{BETA}" if __name__ == "__main__": model = Model() data = GenData('./optimization-ii-project-3/') files_list = data.files_list sess = tf.InteractiveSession(graph=tf.Graph(), config=tf.ConfigProto(log_device_placement=True)) secret_tensor = tf.placeholder(shape=[None,224,224,3],dtype=tf.float32,name="input_prep") cover_tensor = tf.placeholder(shape=[None,224,224,3],dtype=tf.float32,name="input_hide") global_step_tensor = tf.Variable(0, trainable=False, name='global_step') train_op , summary_op, loss_op,secret_loss_op,cover_loss_op = model.prepare_training_graph(secret_tensor,cover_tensor,global_step_tensor) writer = tf.summary.FileWriter(os.path.join(LOGS_Path,EXP_NAME),sess.graph) test_op, test_loss_op,test_secret_loss_op,test_cover_loss_op = model.prepare_test_graph(secret_tensor,cover_tensor) covered_tensor = tf.placeholder(shape=[None,224,224,3],dtype=tf.float32,name="deploy_covered") deploy_hide_image_op , deploy_reveal_image_op = model.prepare_deployment_graph(secret_tensor,cover_tensor,covered_tensor)
import sys sys.path.append("..") from common import config config.GPU = True from gen_data import GenData from common.optimizer import Adam from common.trainer import Trainer from eval_tools import eval_seq2seq #from seq2seq import Seq2seq #from seq2seq.peeky_seq2seq import PeekySeq2seq from attention.attention_bi_seq2seq import AttentionBiSeq2seq from common.gpu import to_cpu, to_gpu d = GenData() #d.unzip_and_gen_data() (x_train, t_train), (x_test, t_test) = d.load_corpus(file_name="realdata.txt") word_to_id_q, word_to_id_a, id_to_word_q, id_to_word_a = d.get_vocab() is_reverse = True if is_reverse: x_train, x_test = x_train[:, ::-1], x_test[:, ::-1] # ハイパーパラメータ設定 vocab_size_x = len(word_to_id_q) vocab_size_t = len(word_to_id_a) wordvec_size = 1000 hidden_size = 1000 batch_size = 50 max_epoch = 20 max_grad = 5.0
# -*- coding: utf-8 -*- from model import Model from gen_data import GenData import os import tensorflow as tf import numpy as np model = Model() data = GenData('./optimization-ii-project-3/') files_list = data.files_list secret_tensor = tf.placeholder(shape=[None,224,224,3],dtype=tf.float32,name="input_prep") cover_tensor = tf.placeholder(shape=[None,224,224,3],dtype=tf.float32,name="input_hide") global_step_tensor = tf.Variable(0, trainable=False, name='global_step') train_op , summary_op, loss_op,secret_loss_op,cover_loss_op = model.prepare_training_graph(secret_tensor,cover_tensor,global_step_tensor) test_op, test_loss_op,test_secret_loss_op,test_cover_loss_op = model.prepare_test_graph(secret_tensor,cover_tensor) covered_tensor = tf.placeholder(shape=[None,224,224,3],dtype=tf.float32,name="deploy_covered") deploy_hide_image_op = model.encode(secret_tensor,cover_tensor) deploy_reveal_image_op = model.decode(covered_tensor) saver = tf.train.Saver() sess = tf.InteractiveSession(config=tf.ConfigProto(log_device_placement=True)) saver.restore(sess, './checkpoints/beta_0.750.1396-2101') from matplotlib import pyplot as plt