Example #1
0
    def __init__(self, data_dir, batch_size, epochs, save_path, learning_rate,
                 split):
        dev = d()
        if dev.cuda == False:
            self.gen = generator()
            self.disc = discriminator()
        else:
            self.gen = torch.nn.DataParallel(generator().cuda())
            self.disc = torch.nn.DataParallel(discriminator.cuda())

        self.dataset = CUBDataset(data_dir, split=split)
        self.dataLoader = DataLoader(dataset=self.dataset,
                                     batch_size=batch_size,
                                     shuffle=True,
                                     pin_memory=True)

        self.optimD = torch.optim.Adam(self.disc.parameters(),
                                       lr=learning_rate,
                                       betas=(0.5, 0.99),
                                       amsgrad=True)
        self.optimG = torch.optim.Adam(self.gen.parameters(),
                                       lr=learning_rate,
                                       betas=(0.5, 0.99),
                                       amsgrad=True)

        self.model_dir = save_path
        self.epochs = epochs
        self.batch_size = batch_size
        self.train()
Example #2
0
def train_model(model, db, output=None, epochs=300):
    """
    Train an onset model on a database
    :param model: model to train
    :param db: database to train on
    :param output: output filename of the best trained model
    :param epochs: number of iterations
    :return: train history
    """
    if output == None:
        dbname = db.name()
        output = dbname[:dbname.rindex('.')] + '_onset.hdf5'

    checkpointer = ModelCheckpoint(filepath=output,
                                   verbose=1,
                                   save_best_only=True)

    train_group = ['train', 'mel']
    xgroup = db.get_subgroup(['train', 'mel'])
    ygroup = db.get_subgroup(['train', 'onset_labels'])

    step = min(10000, db.get_total_points(train_group))

    frac_val = 0.2
    frac = 1. - frac_val

    nb_steps, tmp = generator.get_nb_steps(xgroup,
                                           step,
                                           frac,
                                           dict_arrays=True)
    nb_steps_val, tmp = generator.get_nb_steps(xgroup,
                                               step,
                                               frac_val,
                                               shift='end',
                                               dict_arrays=True)

    print('step: ', step)
    print('nb_steps: ', nb_steps, nb_steps_val)

    hist = model.fit_generator(
        generator.generator((xgroup, ygroup),
                            nb=step,
                            frac=frac,
                            dict_arrays=True),
        steps_per_epoch=50,  #4,#max(4,nb_steps),
        max_queue_size=1,
        validation_data=generator.generator((xgroup, ygroup),
                                            nb=step,
                                            frac=frac_val,
                                            shift='end',
                                            dict_arrays=True),
        validation_steps=nb_steps_val,
        epochs=epochs,
        callbacks=[checkpointer],
        verbose=2)

    return hist.history
def article_env_init(handler, handler_paras, handler_json):
    handler_json['article_id'] = handler_paras['article_id']
    usr = handler.current_user
    env = generator(handler_paras['env_id'], handler_paras['env_type'])
    handler.env = env
    father = generator(handler_paras['father_id'], 
                handler_paras['father_type'])
    ref_comment = None
    if handler_paras['article_type'] == 'comment':
        if father is None:
            return 18#Invalid father
        ref_comment = generator(handler_paras['ref_comment'], 
                            handler_paras['article_type'])
    handler.father = None
    handler.ref_comment = None
    if father is not None:
        handler.env = father.env
        handler.father = father
    elif not handler.env:
        return 14#Invalid Env Arguments
    if ref_comment is not None:
        handler.ref_comment = ref_comment
    if not Article.is_valid_id(handler_paras['article_id']):
        acls = cls_gen(handler_paras['article_type'])
        if not acls or not issubclass(acls, Article):
            return 11#Invalid Article Type
        if not issubclass(acls, Comment) and \
                not test_auth(handler.env.authority_verify(usr), A_WRITE):
            return 12#Permission Denied
        handler.article_obj = acls()
        if handler.article_obj is None:
            return 13#Article Create Failed
        handler.article_obj.set_propertys(env=handler.env, author=usr)
        usr.add_to_drafts(handler.article_obj)
        handler_json.as_new(handler.article_obj) 
        #new Article Created
    else:
        handler.article_obj = generator(handler_paras['article_id'],
                    handler_paras['article_type'])
        if not isinstance(handler.article_obj, Article) or \
                handler.article_obj is None:
            return 4#Article Not Exist
        if handler.article_obj.env_obj_info != env.obj_info:
            return 15#Invalid Env
        if not test_auth(handler.article_obj.authority_verify(usr, env),
                        A_WRITE):
            return 16#WRITE Permission Denied
    if ref_comment is not None:
        handler.article_obj.ref_comment = ref_comment
    if father is not None:
        if not handler.article_obj.father_id:
            handler.article_obj.father = handler.father
    return 0
Example #4
0
def article_env_init(handler, handler_paras, handler_json):
    handler_json['article_id'] = handler_paras['article_id']
    usr = handler.current_user
    env = generator(handler_paras['env_id'], handler_paras['env_type'])
    handler.env = env
    father = generator(handler_paras['father_id'],
                       handler_paras['father_type'])
    ref_comment = None
    if handler_paras['article_type'] == 'comment':
        if father is None:
            return 18  #Invalid father
        ref_comment = generator(handler_paras['ref_comment'],
                                handler_paras['article_type'])
    handler.father = None
    handler.ref_comment = None
    if father is not None:
        handler.env = father.env
        handler.father = father
    elif not handler.env:
        return 14  #Invalid Env Arguments
    if ref_comment is not None:
        handler.ref_comment = ref_comment
    if not Article.is_valid_id(handler_paras['article_id']):
        acls = cls_gen(handler_paras['article_type'])
        if not acls or not issubclass(acls, Article):
            return 11  #Invalid Article Type
        if not issubclass(acls, Comment) and \
                not test_auth(handler.env.authority_verify(usr), A_WRITE):
            return 12  #Permission Denied
        handler.article_obj = acls()
        if handler.article_obj is None:
            return 13  #Article Create Failed
        handler.article_obj.set_propertys(env=handler.env, author=usr)
        usr.add_to_drafts(handler.article_obj)
        handler_json.as_new(handler.article_obj)
        #new Article Created
    else:
        handler.article_obj = generator(handler_paras['article_id'],
                                        handler_paras['article_type'])
        if not isinstance(handler.article_obj, Article) or \
                handler.article_obj is None:
            return 4  #Article Not Exist
        if handler.article_obj.env_obj_info != env.obj_info:
            return 15  #Invalid Env
        if not test_auth(handler.article_obj.authority_verify(usr, env),
                         A_WRITE):
            return 16  #WRITE Permission Denied
    if ref_comment is not None:
        handler.article_obj.ref_comment = ref_comment
    if father is not None:
        if not handler.article_obj.father_id:
            handler.article_obj.father = handler.father
    return 0
Example #5
0
def render():
	regex_variables = [('digit', r'[0-9]'), ('letter', r'[a-zA-Z]')]
	tokens = [
		('constant', r'{digit}+(\.{digit}+)?|\.{digit}+', 
		'''output, _ := strconv.ParseFloat(input, 32)''', ''),
		('identifier', r'{letter}+', '''output = input''', ''),
		('whitespace', r'[ \t\n]', '', 'IGNORE'), #Throw away whitespace
		('plus', r'\+', '', ''),
		('minus', r'-', '', ''),
		('times', r'\*', '', ''),
		('divide', r'/', '', ''),
		]
	generator.generator(regex_variables, tokens).build()
Example #6
0
def Beginer(longer_field,counter_fields,numeric,field_separate,line_separate,counter_lines,output):
    if counter_fields == None:
        counter_fields = int(random.uniform(1,11))
    if field_separate == None:
        field_separate = ' '
    if line_separate == None:
        line_separate = '\n'
    if counter_lines == None:
        counter_lines = int(random.uniform(1,11))
    if output == None:
        output = "input_GEN.txt"
    if longer_field == None:
        longer_field = int (random.uniform(1,11))

    generator.generator(longer_field,counter_fields,numeric,field_separate,line_separate,counter_lines,output)
Example #7
0
 def __init__(self, n, m, type):
     self.n = n
     self.m = m
     d = generator.generator(type, n, m)
     self.maze = d.maze
     self.start = d.start
     self.end = d.end
Example #8
0
    def __init__(self,
                 channels: int = 1,
                 z_dim: int = 100,
                 lr: float = 0.0002,
                 b1: float = 0.5,
                 b2: float = 0.999,
                 batch_size: int = 64,
                 input_size: int = 28,
                 class_num: int = 10,
                 **kwargs):

        super().__init__()
        self.channels = channels
        self.z_dim = z_dim
        self.lr = lr
        self.b1 = b1
        self.b2 = b2
        self.batch_size = batch_size
        self.input_size = input_size
        self.class_num = 10
        self.sample_num = self.class_num**2

        # networks init
        self.G = generator(input_dim=self.z_dim,
                           output_dim=self.channels,
                           input_size=self.input_size,
                           class_num=self.class_num)
        self.D = discriminator(input_dim=self.channels,
                               output_dim=1,
                               input_size=self.input_size,
                               class_num=self.class_num)
        self.G.cuda()
        self.D.cuda()
 def post(self):
     handler_para = RecArticleToBookPara(self)
     handler_json = RecArticleToBookJson(self)
     usr = self.current_user
     book = Catalog.by_id(handler_para['book_id'])
     if book is None:
         handler_json.by_status(2)
         handler_json.write()
         return  #book not exist
     article_obj = generator(handler_para['article_id'],
                             handler_para['article_type'])
     if article_obj is None:
         handler_json.by_status(1)
         handler_json.write()
         return  #article not exist
     rr = article_obj.add_to_catalog(book, handler_para['node_id'])
     if rr is None:
         handler_json.by_status(3)
         handler_json.write()
         return  #section not exist
     handler_json['book_title'] = book.name
     handler_json['chapter_title'] = book.get_node_dict(
         handler_para['node_id'])['title']
     handler_json['article_title'] = article_obj.name
     handler_json['relation_id'] = rr.uid
     handler_json.by_status(0)
     handler_json.write()
     return  #0
Example #10
0
def main():

    print(
        generator(
            "Gazam ad ad et nullam intervalla nulli honorem itidemque Caesaream et exaedificavit Neapolim Ascalonem sibi nitidis aemulas: Herodes ad est et principis abundans per exstructas abundans nitidis Gazam habens sed.",
            " ", "unique"))

    print(
        generator(
            "Gazam ad ad et nullam intervalla nulli honorem itidemque Caesaream et exaedificavit Neapolim Ascalonem sibi nitidis aemulas: Herodes ad est et principis abundans per exstructas abundans nitidis Gazam habens sed.",
            " ", "ordered"))

    print(
        generator(
            "Gazam ad ad et nullam intervalla nulli honorem itidemque Caesaream et exaedificavit Neapolim Ascalonem sibi nitidis aemulas: Herodes ad est et principis abundans per exstructas abundans nitidis Gazam habens sed.",
            " ", "shuffle"))
Example #11
0
 def __init__(self):
     self.clock = cl.Clock(self)
     self.week = cl.Week(self)
     self.calendar = cl.SecularCalendar()
     self.productionAI = ai.ProductionAI(self)
     self.jobPoster = ai.JobPoster(self)
     self.hirer = ai.Hirer(self)
     self.firer = ai.Firer(self)
     self.startupAI = ai.StartupAI()
     self.builder = ai.Builder(self)
     self.salaryPayer = ai.SalaryPayer(self)
     ourGen = generator.generator(self)
     self.ourWorld = ourGen.generateWorld(10000, 10, 10)
     Jonestown = d.getLocality()[0]
     #char
     address = Jonestown.find_property()
     yourHome = u.House(Jonestown, address)
     Jonestown.claim_node(address, yourHome)
     self.char = ai.Character(self, "Markemus", "Williamson", 0, Jonestown,
                              yourHome,
                              d.getReligions()[0])
     yourHome.addTenant(self.char)
     spouse = p.People(self, "Susan", "Spinster", 1, Jonestown, yourHome,
                       d.getReligions()[0])
     yourHome.addTenant(spouse)
     self.char.addCapital(10000)
     #makes
     ourGen.makeSpouses()
     ourGen.makeFriends()
     ourGen.makeBosses()
     ourGen.makeChurches(Jonestown)
     ourGen.assignChurches()
     self.gui = gui.gui(self.char)
Example #12
0
def load_file(datapath, mode=tf.estimator.ModeKeys.PREDICT):
    g = gen.generator(datapath)
    ds = tf.data.Dataset.from_generator(g, (tf.int8, tf.int8))
    value = ds.make_one_shot_iterator().get_next()
    data_sess = tf.Session()

    return data_sess, value
 def post(self):
     handler_para = RecArticleToBookPara(self)
     handler_json = RecArticleToBookJson(self)
     usr = self.current_user
     book = Catalog.by_id(handler_para['book_id'])
     if book is None:
         handler_json.by_status(2)
         handler_json.write()
         return #book not exist
     article_obj = generator(handler_para['article_id'], 
                             handler_para['article_type'])
     if article_obj is None:
         handler_json.by_status(1)
         handler_json.write()
         return #article not exist
     rr = article_obj.add_to_catalog(book, handler_para['node_id'])
     if rr is None:
         handler_json.by_status(3)
         handler_json.write()
         return #section not exist
     handler_json['book_title'] = book.name
     handler_json['chapter_title'] = book.get_node_dict(
                 handler_para['node_id'])['title']
     handler_json['article_title'] = article_obj.name
     handler_json['relation_id'] = rr.uid
     handler_json.by_status(0)
     handler_json.write()
     return #0
Example #14
0
 def load_model(self, path, epoch, cuda):
     if cuda == False:
         netG = generator()
         netD = discriminator()
     else:
         netG = torch.nn.DataParallel(generator().cuda())
         netD = torch.nn.DataParallel(discriminator().cuda())
     netD.load_state_dict(torch.load(path + '/Discriminator_{0}.pth'.format(epoch)))
     netG.load_state_dict(torch.load(path + '/Generator_{0}.pth'.format(epoch)))
     netD.train()
     netG.train()
     optimD = torch.optim.Adam(netD.parameters(), lr=0.0002, betas=(0.5, 0.99), amsgrad=True)
     optimG = torch.optim.Adam(netG.parameters(), lr=0.0002, betas=(0.5, 0.99), amsgrad=True)
     optimG.load_state_dict(torch.load(path + '/Generator_Optimizer_{0}.pth'.format(epoch)))
     optimD.load_state_dict(torch.load(path + '/Discriminator_Optimizer_{0}.pth'.format(epoch)))
     return netD, netG, optimD, optimG
Example #15
0
def train():
    # load_files()
    batch_size = 12
    from replay_memory import PrioritisedReplayMemory
    memory = PrioritisedReplayMemory(capacity=batch_size * 100, e=0.01)
    gen = generator(x_train, y_train, memory, batch_size=batch_size)
    model = DilatedCNN()

    if args.load:
        print("loading model")
        model.load()

    training_cycle(model, gen, memory, 1e-2, 200)

    for i in range(7):
        training_cycle(model, gen, memory, 1e-3, 1000)
        model.save()

    for i in range(5):
        training_cycle(model, gen, memory, 1e-4, 1000)
        model.save()

    for i in range(5):
        training_cycle(model, gen, memory, 5e-5, 1000)
        model.save()

    for i in range(5):
        training_cycle(model, gen, memory, 1e-5, 1000)
        model.save()
Example #16
0
def generate_instance(seed, n):
    # initiating class object with seed
    rng = generator(seed)

    # 2D list (list of n 3-item lists [[0, 0, 0], [0, 0, 0]...])
    data = [[0 for _ in range(3)] for _ in range(n)]

    # Sum of all execution times
    A = 0
    for x in range(n):
        # indexes
        data[x][0] = x + 1
        # generating execution times
        data[x][2] = rng.nextInt(1, 29)
        # summing execution times
        A += data[x][2]

    for y in range(n):
        # generating preparation times
        data[y][1] = rng.nextInt(1, A)

    # lists to make printing data easier
    nr, r, p = [], [], []

    for z in range(n):
        nr.append(data[z][0])
        r.append(data[z][1])
        p.append(data[z][2])
    print(f"nr: {nr}")
    print(f"r: {r}")
    print(f"p: {p}\n")

    # return data to allow its further processing
    return data
    def build_model(self, args):

        m0, m, self.dm, spacing, shape, origin = overthrust_model(args.vel_dir)
        self.extent = np.array([0., self.dm.shape[2]*spacing[0], 
            self.dm.shape[3]*spacing[1], 0.])/1.0e3
        self.x = self.dm.to(self.device)
        self.wave_solver = wave_solver(self.y, shape, origin, spacing, m0, self.dm, 
            noise=args.eta, device=self.device, sequential=False)
        self.net_loss_log = []
        self.model_loss_log = []
        self.z = torch.randn((1, 3, 512, 128), device=self.device, requires_grad=False)
        self.G = generator(
                    self.x.size(),
                    num_input_channels=3, num_output_channels=1, 
                    num_channels_down = [16, 32, 256],
                    num_channels_up   = [16, 32, 256],
                    num_channels_skip = [0, 0, 0],
                    upsample_mode = 'bicubic',
                    need1x1_up = True,
                    filter_size_down=5, 
                    filter_size_up=3,
                    filter_skip_size = 1,
                    need_sigmoid=False, 
                    need_bias=True, 
                    pad='reflection', 
                    act_fun='LeakyReLU').to(self.device)

        self.l2_loss = torch.nn.MSELoss().to(self.device)
        self.optim = pSGLD([{'params': self.G.parameters()}], 
            float(args.lr), weight_decay=args.weight_decay**2)
Example #18
0
 def get(self):
     pageparas = ArticleWritePara(self)
     page = WritePage(self)
     usr = self.current_user
     page['article_type'] = pageparas['type']
     page['article_type_with_env'] = page['article_type']
     article_cls = cls_gen(pageparas['type'])
     if not article_cls or not issubclass(article_cls, Article):
         self.send_error(404, error_info=u'Invalid Article Type')
         #todo Earthson
         return
     env = None
     if pageparas['env_id'] or pageparas['env_type']:
         env = generator(pageparas['env_id'], pageparas['env_type'])
         if not env:
             self.send_error(404, error_info=u'Invalid Envirenment')
             return
         env_info = env.obj_info
         page['env'] = env.as_env
     if not pageparas['id']:
         if not env:
             env = usr
             env_info = env.obj_info
             page['env'] = env.as_env
         page['article_type_with_env'] = env.first_alias + \
                                 '-' + article_cls.first_alias
         page.page_init()
         page.render()
         return
     page['isedit'] = True
     toedit = generator(pageparas['id'], pageparas['type'])
     if toedit is None:
         self.send_error(404, error_info=u'Article Not Found')
         return
     page['article_type_with_env'] = toedit.article_type_with_env
     env = toedit.env
     page['env'] = env.as_env
     auth_ret = toedit.authority_verify(usr, env=env)
     if not test_auth(auth_ret, A_WRITE):
         self.send_error(404, error_info=u'Permission Denied')
         return
     page['article'] = toedit.obj_info_view_by('edit_info',
                                               usr=usr,
                                               env=env)
     page.page_init()
     page.render()
     return
Example #19
0
    def test(self):
        big_x = tf.placeholder(tf.float32, [None, 128, 128, 3])
        sml_x = tf.placeholder(tf.float32, [None, 64, 64, 3])
        gener_x = generator(sml_x, is_training=False, reuse=False)

        real_d = discriminator(big_x, is_training=False, reuse=False)
        gener_d = discriminator(gener_x, is_training=False, reuse=True)

        g_cost, d_cost = costs_and_vars(big_x,
                                        gener_x,
                                        real_d,
                                        gener_d,
                                        is_training=False)

        init = tf.global_variables_initializer()

        with tf.Session() as sess:
            sess.run(init)

            saver = tf.train.Saver()

            try:
                saver.restore(sess,
                              '/'.join(['models', self.model, self.model]))
            except:
                print('Model coult not be restored. Exiting.')
                exit()

            makedirs(self.out_path)

            print('Saving test results ...')

            start = 0

            for batch in BatchGenerator(self.batch_size, self.dataset_size):
                batch_big = self.dataset[batch] / 255.0
                batch_sml = array([imresize(img, size=(64, 64, 3)) \
                        for img in batch_big])

                superres_imgs = sess.run(gener_x, feed_dict={sml_x: batch_sml})

                gc, dc  = sess.run([g_cost, d_cost], \
                        feed_dict={big_x : batch_big, sml_x : batch_sml})

                images = concatenate( \
                    ( \
                        array([imresize(img, size=(128, 128, 3)) / 255.0 \
                                for img in batch_sml]), \
                        superres_imgs,
                        batch_big \
                    ), 2)

                for idx, image in enumerate(images):
                    imsave('%s/%d.png' % (self.out_path, start + idx), image)

                start += self.batch_size

                print('%d/%d saved successfully: Generative cost=%.9f, Discriminative cost=%.9f' % \
                        (min(start, self.dataset_size), self.dataset_size, gc, dc))
Example #20
0
 def __init__(self, rand=None, dim=2, threshold=0):
     gen = gener.generator()
     self.dim = dim
     self.threshold = 0
     if rand == None:
         self.weights = gen.create_weights(self.dim)
     else:
         self.weights = gen.create_random_weights(self.dim)
def add_fit_generator(model, train_augmentations, validation_augmentations, epochs, batch_size=32, test_size=0.2):
	print('')
	print('batch_size ', batch_size)
	print('steps_per_epoch : ', len(train_augmentations) // batch_size)
	print('steps_per_epoch (val) : ', len(validation_augmentations) // batch_size)
	print('')
	train_generator = generator(train_augmentations, batch_size)
	validation_generator = generator(validation_augmentations, batch_size)
	history_object = model.fit_generator(
		train_generator, 
		steps_per_epoch=len(train_augmentations) // batch_size, 
		validation_data=validation_generator, 
		validation_steps=len(validation_augmentations) // batch_size,
		epochs=epochs,
		verbose=2
	)
	return (model, history_object)
 def get(self):
     pageparas = ArticleWritePara(self)
     page = WritePage(self)
     usr = self.current_user
     page['article_type'] = pageparas['type']
     page['article_type_with_env'] = page['article_type']
     article_cls = cls_gen(pageparas['type'])
     if not article_cls or not issubclass(article_cls, Article):
         self.send_error(404, error_info=u'Invalid Article Type') 
         #todo Earthson
         return
     env = None
     if pageparas['env_id'] or pageparas['env_type']:
         env = generator(pageparas['env_id'], pageparas['env_type'])
         if not env:
             self.send_error(404, error_info=u'Invalid Envirenment')
             return
         env_info = env.obj_info
         page['env'] = env.as_env
     if not pageparas['id']:
         if not env:
             env = usr
             env_info = env.obj_info
             page['env'] = env.as_env
         page['article_type_with_env'] = env.first_alias + \
                                 '-' + article_cls.first_alias
         page.page_init()
         page.render()
         return
     page['isedit'] = True
     toedit = generator(pageparas['id'], pageparas['type'])
     if toedit is None:
         self.send_error(404, error_info=u'Article Not Found')
         return
     page['article_type_with_env'] = toedit.article_type_with_env
     env = toedit.env
     page['env'] = env.as_env
     auth_ret = toedit.authority_verify(usr, env=env)
     if not test_auth(auth_ret, A_WRITE):
         self.send_error(404, error_info=u'Permission Denied')
         return
     page['article'] = toedit.obj_info_view_by('edit_info', usr=usr, env=env)
     page.page_init()
     page.render()
     return
Example #23
0
def attack(X, y, batch_size=128, thresh=0.3, target=-1):
	x_pl = tf.placeholder(tf.float32, [None, X.shape[1], X.shape[2], X.shape[3]]) # image placeholder
	t = tf.placeholder(tf.float32, [None, 10]) # target placeholder
	is_training = tf.placeholder(tf.bool, [])

	is_targeted = False
	if target in range(0, y.shape[-1]):
		is_targeted = True

	perturb = tf.clip_by_value(generator(x_pl, is_training), -thresh, thresh)
	x_perturbed = perturb + x_pl
	x_perturbed = tf.clip_by_value(x_perturbed, 0, 1)

	f = target_model()
	f_real_logits, f_real_probs = f.ModelC(x_pl)
	f_fake_logits, f_fake_probs = f.ModelC(x_perturbed)

	t_vars = tf.trainable_variables()
	f_vars = [var for var in t_vars if 'ModelC' in var.name]
	g_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='g_weights')

	sess = tf.Session()

	f_saver = tf.train.Saver(f_vars)
	g_saver = tf.train.Saver(g_vars)
	f_saver.restore(sess, "./weights/target_model/model.ckpt")
	g_saver.restore(sess, tf.train.latest_checkpoint("./weights/generator/"))

	rawpert, pert, fake_l, real_l = sess.run([perturb, x_perturbed, f_fake_probs, f_real_probs], \
												feed_dict={x_pl: X[:32], \
														   is_training: False})
	print('LA: ' + str(np.argmax(y[:32], axis=1)))
	print('OG: ' + str(np.argmax(real_l, axis=1)))
	print('PB: ' + str(np.argmax(fake_l, axis=1)))

	correct_prediction = tf.equal(tf.argmax(f_fake_probs, 1), tf.argmax(t, 1))
	accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
	accs = []
	total_batches_test = int(X.shape[0] / batch_size)
	for i in range(total_batches_test):
		batch_x, batch_y = next_batch(X, y, i, batch_size)

		if is_targeted:
			targets = np.full((batch_y.shape[0],), target)
			batch_y = np.eye(y.shape[-1])[targets]

		acc, fake_l, x_pert = sess.run([accuracy, f_fake_probs, x_perturbed], feed_dict={x_pl: batch_x, t: batch_y, is_training: False})
		accs.append(acc)

	print('accuracy of test set: {}'.format(sum(accs) / len(accs)))

	f, axarr = plt.subplots(2,2)
	axarr[0,0].imshow(np.squeeze(X[3]), cmap='Greys_r')
	axarr[0,1].imshow(np.squeeze(pert[3]), cmap='Greys_r')
	axarr[1,0].imshow(np.squeeze(X[4]), cmap='Greys_r')
	axarr[1,1].imshow(np.squeeze(pert[4]), cmap='Greys_r')
	plt.show()
Example #24
0
def train(seen, path_save):
    train_dataset = load_data(seen)
    # num_batches = len(train_dataset)
    num_epochs = 1000
    # batches_done = 0
    loss = nn.BCELoss()

    disc_loss = []
    gen_loss = []

    discriminator, generator, optimizer_D, optimizer_G = init()

    for epoch in range(num_epochs):
        for index, real_batch in enumerate(train_dataset):

            N = real_batch.size(0)

            real_data = Variable(real_batch)

            noise_gen = Variable(noise(N))
            fake_data = generator(noise_gen.cuda()).detach()

            d_error = train_discriminator(discriminator, loss, optimizer_D,
                                          real_data.cuda(), fake_data.cuda())

            noise_gen = Variable(noise(N))
            fake_data = generator(noise_gen.cuda())

            g_error = train_generator(discriminator, loss, optimizer_G,
                                      fake_data.cuda())

            disc_loss.append(d_error)
            gen_loss.append(g_error)

    if seen:
        torch.save(discriminator.state_dict(),
                   path_save + 'weights/seen/discriminator_seen.pth')
        torch.save(generator.state_dict(),
                   path_save + 'weights/seen/generator_seen.pth')
    else:
        torch.save(discriminator.state_dict(),
                   path_save + 'weights/unseen/discriminator_unseen.pth')
        torch.save(generator.state_dict(),
                   path_save + 'weights/unseen/generator_unseen.pth')
Example #25
0
def upload_file():

    # Executes if post request sent from template
    if request.method == 'POST':

        # Check if the post request includes a .file
        if 'file' not in request.files:
            flash('Please upload a .txt file')
            return render_template("index.html")

        file = request.files['file']

        # Executes if file name is blank
        if file.filename == '':
            flash('No selected file', 'error')
            return render_template("index.html")

        # Executes if uploaded file exists and has extension .txt
        if file and allowed_file(file.filename):

            # Reads in for Priority from form
            priority = request.form['priority']

            # Reads in Changefreq from form
            changefreq = request.form['changeFreq']

            # Returns a secure version of the file name
            filename = secure_filename(file.filename)

            # Saves the file in the uploads folder
            file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))

            # Calls the generator function to process uploaded file
            generator(UPLOAD_FOLDER + '/' + filename, changefreq, priority)

            # Deletes the uploaded user file
            os.remove(UPLOAD_FOLDER + '/' + filename)

            # Returns a redirect to the app.route associated with the Sitemap function

            return redirect(url_for('Sitemap'))

    # Renders the index.html template
    return render_template('index.html')
Example #26
0
    def build_model(self):
        self.x = load_dataset(data_path=self.data_path,
                              batch_size=self.batch_size,
                              scale_size=self.img_dim,
                              split=self.split)
        img_chs = self.x.get_shape().as_list()[-1]
        x = self.x / 127.5 - 1.  # Normalization
        print("Successfully loaded {} with size: {}".format(
            self.dataset, self.x.get_shape()))

        # initialize z within ball(1, z_dim, 2)
        self.z = tf.Variable(tf.random_normal([self.batch_size, self.z_dim],
                                              stddev=np.sqrt(1.0 /
                                                             self.z_dim)),
                             name='noise')

        fake_data, g_vars = generator(self.g_net,
                                      self.z,
                                      self.conv_hidden_num,
                                      self.img_dim,
                                      img_chs,
                                      self.normalize_g,
                                      reuse=False,
                                      n_hidden_layers=self.n_hidden_layers)

        self.fake_data = tf.clip_by_value((fake_data + 1) * 127.5, 0,
                                          255)  # Denormalization

        x_flat = tf.reshape(self.x, [self.batch_size, -1])
        fake_data_flat = tf.reshape(self.fake_data, [self.batch_size, -1])
        if self.loss_type == 'l2':
            self.loss = tf.norm(x_flat - fake_data_flat, axis=1)
            self.loss_mean = tf.reduce_mean(self.loss)

        if self.optimizer == 'adam':
            optim_op = tf.train.AdamOptimizer
        elif self.optimizer == 'rmsprop':
            optim_op = tf.train.RMSPropOptimizer
        else:
            raise Exception(
                "[!] Caution! Other optimizers do not apply right now!")

        self.z_optim = optim_op(self.z_lr, self.beta1,
                                self.beta2).minimize(self.loss,
                                                     var_list=self.z)
        self.g_optim = optim_op(self.g_lr, self.beta1, self.beta2).minimize(
            self.loss_mean, global_step=self.global_step, var_list=g_vars)

        # project z after each update to the representation space Z
        z_proj = tf.divide(
            self.z, tf.maximum(tf.norm(self.z, axis=1, keep_dims=True), 1))
        self.proj_op = tf.assign(self.z, z_proj)

        self.summary_op = tf.summary.merge([
            tf.summary.scalar("loss/d_loss", self.loss_mean),
        ])
Example #27
0
def main():
    while True:
        while True:
            try:
                inputFile = input("input file path: ")
                ifhand = open(inputFile)
            except FileNotFoundError:
                print("File not Found.")
                continue
            break
        message = ifhand.readline()
        genFunc = ifhand.readline()
        ## handling wrong inputs-> eg: extra spaces or \n
        message = message.replace(" ", "")
        message = message.replace("\n", '')
        genFunc = genFunc.replace(" ", "")
        genFunc = genFunc.replace("\n", "")
        print(
            "Commands:\n  Generator only -> 1\n  Generator-verifier -> 2\n  Generator-Verifier-Alter-Verifier->3\n"
        )
        command = input("Enter command number:")
        if command == '1':
            print("Transmitted message: " + generator(message, genFunc))
        elif command == '2':
            transmittedMessage = generator(message, genFunc)
            print("transmitted message: " + transmittedMessage)
            Verifier(transmittedMessage, genFunc)
        elif command == '3':
            transmittedMessage = generator(message, genFunc)
            print("transmitted message: " + transmittedMessage)
            Verifier(transmittedMessage, genFunc)
            bit_position = input("Which bit to alter? ")
            altered_msg = Alter(transmittedMessage, bit_position)
            Verifier(altered_msg, genFunc)
        else:
            print("Choose a valid command number")
            continue
        exit = input("exit?Y/N\n")
        if exit == 'y' or exit == 'Y':
            break
        else:
            print("__________________________\n\n")
            continue
Example #28
0
File: swg.py Project: wanpeng16/swg
    def build_model(self):
        """build_model
        Creates the computation graph.
        """

        # Input images from the true distribution
        self.x = tf.placeholder(
            tf.float32,
            [None, self.image_width, self.image_width, self.num_channels])

        # Latent variable
        self.z = tf.placeholder(tf.float32, [None, self.flags.latent_dim])

        # Output images from the GAN
        self.x_hat = generator(self.z)

        if self.flags.use_discriminator:
            # The discriminator returns the output (unnormalized) probability
            # of fake/true, and also a feature vector for the image.
            self.y, self.y_to_match = discriminator(self.x)
            self.y_hat, self.y_hat_to_match = discriminator(self.x_hat,
                                                            reuse=True)

            # The discriminator is trained for simple binary classification.
            true_loss = tf.nn.sigmoid_cross_entropy_with_logits(
                labels=tf.ones_like(self.y), logits=self.y)
            fake_loss = tf.nn.sigmoid_cross_entropy_with_logits(
                labels=tf.zeros_like(self.y_hat), logits=self.y_hat)
            self.discriminator_loss = tf.reduce_mean(true_loss + fake_loss)

            discriminator_vars = tf.get_collection(
                tf.GraphKeys.GLOBAL_VARIABLES, scope='discriminator')
            self.d_optimizer = tf.train.AdamOptimizer(
                self.flags.learning_rate,
                beta1=0.5).minimize(self.discriminator_loss,
                                    var_list=discriminator_vars)

            self.generator_loss = self.sw_loss(self.y_to_match,
                                               self.y_hat_to_match)

        else:
            self.generator_loss = self.sw_loss(
                tf.reshape(self.x, [-1, self.image_size]),
                tf.reshape(self.x_hat, [-1, self.image_size]))

        generator_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
                                           scope='generator')
        self.g_optimizer = tf.train.AdamOptimizer(self.flags.learning_rate,
                                                  beta1=0.5).minimize(
                                                      self.generator_loss,
                                                      var_list=generator_vars)

        # self.merged_summary_op = tf.summary.merge_all()
        return
Example #29
0
    def setUp(self):
        self.inputs = 1, 2, 3
        self.spy = mock.Mock()

        class Example(object):
            @generate(*self.inputs)
            def test_method(me, arg):
                self.spy(arg)

        self.klass = Example
        self.generated = generator(self.klass)
Example #30
0
def crawl(scheme, host, main_url, form, headers, delay, timeout):
    if form:  #这个form是一个表单,应该是从返回页面中提取出来的表单集合
        for each in form.values():
            url = each['action']
            url = main_url
            if url:
                # if url.startswith(main_url):
                #     pass
                # elif url.startswith('//') and url[2:].startswith(host):
                #     url=scheme+'://'+url[2:]
                # elif url.startswith('/'):
                #     url=scheme+'://'+host+url
                if url not in config.globalVariables['checkedForms']:
                    config.globalVariables['checkedForms'][url] = []
                method = each['method']
                GET = True if method == 'get' else False
                inputs = each['inputs']  #一个form表单中的input标签的集合
                Scan_area.insert(END, inputs)
                paramData = {}
                for one in inputs:
                    paramData[one['name']] = one['value']
                    for paramName in paramData.keys():
                        if paramName not in config.globalVariables[
                                'checkedForms'][url]:
                            config.globalVariables['checkedForms'][url].append(
                                paramName)
                            paramsCopy = copy.deepcopy(paramData)
                            paramsCopy[paramName] = xsschecker
                            response = requester(url, paramsCopy, headers, GET,
                                                 delay, timeout)  #发送GET请求
                            #Scan_area.insert(END,response.text)
                            occurences = htmlParser(
                                response, False)  #返回的是html网页中输出点的上下文信息
                            positions = occurences.keys()  #注入点位置
                            #模糊测试,判断xss漏洞的 匹配度??
                            efficiences = filterChecker(
                                url, paramsCopy, headers, GET, delay,
                                occurences, timeout, False)
                            vectors = generator(occurences,
                                                response.text)  #生成攻击向量??
                            #存储攻击向量的数据结构
                            payloads = []
                            if vectors:
                                for confidence, vects in vectors.items():
                                    try:
                                        payload = list(vects)[0]
                                        s = "this is payload area"
                                        #Scan_area.insert(END,s)
                                        Scan_area.insert(END, payload)
                                        Scan_area.insert(END, '\n')
                                        payloads.append(payload)
                                        break
                                    except IndexError:
                                        pass
Example #31
0
    def setUp(self):
        self.inputs = 1, 2, 3
        self.spy = mock.Mock()

        class Example(object):
            @generate(*self.inputs)
            def test_method(me, arg):
                self.spy(arg)

        self.klass = Example
        self.generated = generator(self.klass)
Example #32
0
def train(model, model_name, x_test, x_train, y_test, y_train, win_len, batch_size, epochs):
    model.summary()
    num_leads_signal = model.input_shape[2]
    checkpoint = ModelCheckpoint(model_name + '_best.h5', verbose=1, monitor='val_loss', save_best_only=True,
                                 mode='auto')
    train_generator = generator(X=x_train, Y=y_train, win_len=win_len, batch_size=batch_size, num_leads_signal=num_leads_signal)
    test_set = next(generator(X=x_test, Y=y_test, win_len=win_len, batch_size=300, num_leads_signal=num_leads_signal))
    history = model.fit_generator(train_generator,
                                  epochs=epochs,
                                  steps_per_epoch=10,
                                  callbacks=[checkpoint],
                                  validation_data=(test_set[0], test_set[1]))

    folder_name = "trained_models"
    if not os.path.exists(folder_name):
        os.makedirs(folder_name)
    model.save(os.path.join(folder_name, model_name + '.h5'))
    save_set_to_pkl(x_test, y_test, os.path.join(folder_name, model_name + ".pkl"))
    save_history(history, model_name)
    return history
Example #33
0
def templates_to_logs(templates, test_mode=False):
    # Turns all of the templates into a list of tab-separated strings
    # of the log and its truth, e.g.:
    # [
    #   'hello_world/hi 123\tUUUUUUUUUUUUUU_bbb',
    #   '456 howdy/hi/m.jpg\tbbb_UUUUUUUUUUUUUU',
    # ]
    return map(
        lambda template: '\t'.join(
            generator(
                template=template, test_mode=test_mode)),
        templates)
Example #34
0
def Aeye_eval_input_func_gen():
    # shapes = ((FLAGS.image_height, FLAGS.image_width, FLAGS.image_channels),(FLAGS.label_rows, FLAGS.label_cols))
    dataset = tf.data.Dataset.from_generator(
        generator=geny.generator(TRAIN_DATA_PATH),
        output_types=(tf.int8, tf.int8, tf.int8, tf.int8))
    dataset = dataset.batch(FLAGS.batch_size)
    iterator = dataset.make_one_shot_iterator()
    [features_tensors, label1, label2, label3] = iterator.get_next()
    features_tensors = tf.cast(features_tensors, tf.float32)
    images = tf.image.per_image_standardization(features_tensors)
    features = {'x': images}
    return features, label1, label2, label3
Example #35
0
    def run(x, batch_size, hidden_size):
        Z = tf.random_normal((batch_size, hidden_size), 0, 1)

        with pt.defaults_scope(learned_moments_update_rate=0.0003,
                               variance_epsilon=0.001):

            x_tilde = generator(Z, batch_size=batch_size)
            x_tilde_d = discriminator(x_tilde, batch_size=batch_size,
                                      hidden_size=hidden_size)

            x_d = discriminator(x, reuse_scope=True, batch_size=batch_size,
                                hidden_size=hidden_size)

            return x_tilde, x_tilde_d, x_d
Example #36
0
    def __init__(self, data_dir, batch_size, epochs, save_path, learning_rate, split, is_pretrained):
        self.dev = d()
        self.help = Helper()
        # self.log = Logger('Gan')
        if self.dev.cuda == False:
            self.gen = generator()
            self.disc = discriminator()
        else:
            self.gen = torch.nn.DataParallel(generator().cuda())
            self.disc = torch.nn.DataParallel(discriminator().cuda())

        self.dataset = CUBDataset(data_dir, split=split)
        self.dataLoader = DataLoader(dataset=self.dataset, batch_size=batch_size, shuffle=True, pin_memory=True)
        print("Data Loaded Successfully")
        self.optimD = torch.optim.Adam(self.disc.parameters(), lr=learning_rate, betas=(0.5, 0.99), amsgrad=True)
        self.optimG = torch.optim.Adam(self.gen.parameters(), lr=learning_rate, betas=(0.5, 0.99), amsgrad=True)
        
        if(is_pretrained == True):
            self.disc, self.gen, self.optimD, self.optimG = self.help.load_model('checkpoints', 90, self.dev.cuda)
        self.model_dir = save_path
        self.epochs = self.dev.epochs
        self.batch_size = epochs
        self.train()
Example #37
0
 def post(self):
     from generator import generator
     handler_json = GetCommentJson(self)
     handler_paras = CommentGetPara(self)
     article_obj = generator(handler_paras['article_id'],
                         handler_paras['article_type'])
     if article_obj is None:
         handler_json.by_status(1)
         handler_json.write()
         return #Article not Exist
     comments_info = article_obj.comments_info_view_by(self.current_user)
     handler_json['comment_list'] = comments_info
     handler_json.by_status(0)
     handler_json.write()
     return
 def post(self):
     handler_para = ObjRemovePara(self)
     handler_json = ObjRemoveJson(self)
     usr = self.current_user
     obj_toremove = generator(handler_para['obj_id'], 
                             handler_para['obj_type'])
     if obj_toremove is None:
         handler_json.by_status(2)
         handler_json.write()
         return #Obj Not Exist
     env = obj_toremove.env
     auth_ans = obj_toremove.authority_verify(usr, env)
     if test_auth(auth_ans, A_DEL) is False:
         handler_json.by_status(3)
         handler_json.write()
         return #Delete Permission Denied
     obj_toremove.remove()
     handler_json.by_status(0)
     handler_json.write()
     return #0
 def post(self):
     handler_para = DelArticleFromBookPara(self)
     handler_json = DelArticleFromBookJson(self)
     usr = self.current_user
     book = Catalog.by_id(handler_para['book_id'])
     article_obj = generator(handler_para['article_id'], 
                             handler_para['article_type'])
     if article_obj is None:
         handler_json.by_status(1)
         handler_json.write()
         return #article not exist
     auth_tmp = article_obj.authority_verify(usr, env=book)
     if book is not None:
         auth_tmp |= book.authority_verify(usr, env=book)
     if test_auth(auth_tmp, A_WRITE) is False:
         handler_json.by_status(2)
         handler_json.write()
         return #permission denied
     rr = article_obj.remove_from_catalog(book, handler_para['node_id'])
     handler_json.by_status(0)
     handler_json.write()
     return #0
Example #40
0
def SendMessage(gpg,server_key_fp,config,recipient,subject,msg_content_str):
	listaddress = "{0}@{1}".format(config.listname,config.listdomain)
	saveToFile("content",msg_content_str)

	output = None
	gen = generator.generator()
	gen.addDefaultHeader(listaddress,recipient,subject)

	sender_key = GetKeyByUID(gpg,recipient)
	if sender_key != None and config.getTryEncryptMessage():
		if config.getSignMessages():
			signing_key = server_key_fp
		else:
			signing_key = None
		encrypted = gpg.encrypt(msg_send_txt,sender_key["fingerprint"],always_trust=True,sign=signing_key).data
		saveToFile("encrypted",encrypted)
		gen.addMimeMultipartSignedOpenPGP(msg_content_str,signature)
		output = gen.as_string()
	else:
		if config.getAllowUnencryptedSend():
			if config.getSignMessages():
				signature = gpg.sign(msg_content_str,detach=True).data
				saveToFile("signature.asc",signature)
				gen.addMimeMultipartSignedOpenPGP(msg_content_str,signature)
				output = gen.as_string()
			else:
				gen.add(msg_content_str)
				output = gen.as_string()
		else:
			pass

	if output != None:
		saveToFile("msg_out",output)

		# send new message back
		s = smtplib.SMTP('localhost')
		s.sendmail(listaddress, [recipient], output)
		s.quit()
Example #41
0
 def do_generator(self):
   self.metadata()
   from generator import generator
   return generator(), nonblank(), noduplicates()
Example #42
0
import modules.stitches as stitches
import modules.PCG as PCG
import modules.FFT_solver as FFT_solver
import modules.governing_equations as governing_equations
import modules.MOONS as MOONS

# add_prop parameters:
#     1) name (string)
#     2) data type (integer,real etc.) (string)
#     3) Object / allocatable / Primitive / Parameter (string)
#     4) dimension (int)

# init(var,type,privacy,allocatable,rank,dimension,procedure)
# init(var,type,privacy) # defaults to allocatable = False, rank = 1, dimension = 1, procedure = False

g = g.generator()
g.set_directories(os.path.abspath(__file__),PS)
g.add_base_files(['precision'+PS+'current_precision.f90'])
g.add_base_files(['IO'+PS+'inquire_funcs.f90'])
g.add_base_files(['IO'+PS+'IO_check.f90'])
g.add_base_files(['IO'+PS+'IO_tools.f90'])
g.add_base_files(['string'+PS+'string.f90'])
g.add_base_files(['string'+PS+'string_aux.f90'])
g.add_base_modules(['string'])

# g.print()
priv = 'public'
log = 'logical'
real = 'real(cp)' # ! Double precision (default)
T = True
F = False
Example #43
0
def sample_noise(batch_size, dim):
    random_noise = tf.random_uniform(maxval=1,minval=-1,shape=[batch_size, dim])
    return random_noise


# number of images for each batch
batch_size = 128
# our noise dimension
noise_dim = 96

# placeholder for images from the training dataset
x = tf.placeholder(tf.float32, [None, 784])
# random noise feed into our generator
z = sample_noise(batch_size, noise_dim)
# generated images
G_sample = generator(z)

with tf.variable_scope("") as scope:
    #scale images to be -1 to 1
    logits_real = discriminator(preprocess_img(x))
    # Re-use discriminator weights on new inputs
    scope.reuse_variables()
    logits_fake = discriminator(G_sample)

# Get the list of variables for the discriminator and generator
D_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'discriminator')
G_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'generator') 

# get our solver
D_solver, G_solver = get_solvers()
Example #44
0
 def test_generator(self):
   numbers=list(generator(1024, 0, 42))
   self.assertEqual(len(numbers), 369, 'Different length for the same seed')
Example #45
0
def application(env, start_response):
    start_response('200 OK', [('Content-Type','text/html')])
    result = generator("starwars",os.environ['UPSTREAM_HOST'], int(os.environ['UPSTREAM_PORT']))
    return result
 def do_atom_generator(self):
   from generator import generator
   return generator()
Example #47
0
batch_size = 100   # batch size
cat_dim = 10   # total categorical factor
con_dim = 2    # total continuous factor
rand_dim = 38  # total random latent dimension


target_num = tf.placeholder(dtype=tf.int32, shape=batch_size)
target_cval_1 = tf.placeholder(dtype=tf.float32, shape=batch_size)
target_cval_2 = tf.placeholder(dtype=tf.float32, shape=batch_size)

z = tf.one_hot(tf.ones(batch_size, dtype=tf.int32) * target_num, depth=cat_dim)
z = tf.concat(axis=z.get_shape().ndims-1, values=[z, tf.expand_dims(target_cval_1, -1), tf.expand_dims(target_cval_2, -1)])

z = tf.concat(axis=z.get_shape().ndims-1, values=[z, tf.random_normal((batch_size, rand_dim))])

gen = tf.squeeze(generator(z), -1)

def run_generator(num, x1, x2, fig_name='sample.png'):
    with tf.Session() as sess:
        sess.run(tf.group(tf.global_variables_initializer(),
                      tf.local_variables_initializer()))
        saver = tf.train.Saver()
        saver.restore(sess, tf.train.latest_checkpoint('checkpoint_dir'))
        imgs = sess.run(gen, {target_num: num, target_cval_1: x1, target_cval_2:x2})

        _, ax = plt.subplots(10,10, sharex=True, sharey=True)
        for i in range(10):
            for j in range(10):
                ax[i][j].imshow(imgs[i*10+j], 'gray')
                ax[i][j].set_axis_off()
        plt.savefig(os.path.join('result/',fig_name), dpi=600)
Example #48
0
def application(env, start_response):
    start_response('200 OK', [('Content-Type','text/html')])
    result = generator("southpark","mongo.adrian-training.local")
    return result
def test_gan_rnn(): 

    #Set up parameters

    #The time series for each element of the batch
    #sequenceLength x batch_size x 1
    X_observed = T.tensor3()

    #Each sequence is either 1 or 0.  
    Y = T.vector()

    sequenceLength = 1
    batch_size = 100

    #Discriminator on observed sample, discriminator on generated sample

    params_disc, p_x_observed = discriminator(X_observed, sequenceLength, batch_size, params = None)
    params_gen, x_gen = generator(sequenceLength, batch_size, params = None)
    params_disc, p_x_generated = discriminator(x_gen, sequenceLength, batch_size, params = params_disc)

    loss_generator = adversarial_loss_generator(p_x_generated = p_x_generated)

    loss_discriminator = adversarial_loss_discriminator(p_x_generated = p_x_generated, p_x_observed = p_x_observed)

    learning_rate = 0.0001

    gen_updates = Updates.Updates(paramMap = params_gen, loss = loss_generator, learning_rate = learning_rate * 0.1)
    disc_updates = Updates.Updates(paramMap = params_disc, loss = loss_discriminator, learning_rate = learning_rate * 10.0)

    #Functions: 
    #Train discriminator and generator
    #Train only discriminator
    #Generate values without training

    print "disc update keys", len(disc_updates.getUpdates().keys())

    print "gen update keys", len(gen_updates.getUpdates().keys())

    print "joined update keys", len(dict(gen_updates.getUpdates().items() + disc_updates.getUpdates().items()))

    generate_sample = theano.function(inputs = [], outputs = [x_gen])
    trainDiscriminator = theano.function(inputs = [X_observed], updates = disc_updates.getUpdates())
    trainAll = theano.function(inputs = [X_observed], outputs = [loss_generator, loss_discriminator], updates = dict(gen_updates.getUpdates().items() + disc_updates.getUpdates().items()))

    g = generate_sample()

    print g[0].shape

    sample_prob = theano.function(inputs = [X_observed], outputs = [p_x_observed])

    sampled = sample_prob(g[0])

    from DataTransformation.plotData import getData

    p50Lst = []
    p90Lst = []

    for epoch in range(0, 400): 
        dataLst = getData()

        #seq_length x batch x 1

        for ts in dataLst: 
            if random.uniform(0,1) < 0.0: 
                trainDiscriminator(ts)
            else:         
                loss_gen, loss_disc = trainAll(ts)
                print "loss gen", loss_gen
                print "loss disc", loss_disc

            #print sample_prob(ts)
            print "===================="
            print ts[0][0]
            print "sample_prob", sample_prob(ts)[0][0].tolist()

        #Plot dist

        print "pulling samples for evaluation"

        allSamples = []

        for j in range(0, 16):
            samples = np.asarray(generate_sample()).flatten().tolist()
            allSamples += samples

        if random.uniform(0,1) < 1.0: 

            print sorted(allSamples)

            allTS = []

            for ts in dataLst[0:12]: 
                allTS += ts.flatten().tolist()

            binrange = np.arange(-2.0, 60.0, 1.0)
            p_data = []

            for val in binrange: 
                val_run = np.asarray([[[val]] * 100], dtype = 'float32')
                p_data += [sample_prob(val_run)[0][0][0]]

            plt.scatter(binrange, p_data)
            plt.hist(allTS, bins = binrange, normed = 1, alpha = 0.2)
            plt.hist(allSamples, bins = binrange, normed = 1, alpha = 0.2)

            plt.savefig(tmpDir + "hist_" + str(epoch) + ".png")
            plt.clf()

            #qqplot.qqplot(allSamples, allTS)

            print "mean samples", np.average(np.asarray(allSamples))
            print "mean observed", np.average(np.asarray(allTS))

            print "stdv samples", np.std(np.asarray(allSamples))
            print "stdv observed", np.std(np.asarray(allTS))

            print "p50 samples", np.percentile(np.asarray(allSamples), 50.0)
            print "p50 observed", np.percentile(np.asarray(allTS), 50.0)

            print "p90 samples", np.percentile(np.asarray(allSamples), 90.0)
            print "p90 observed", np.percentile(np.asarray(allTS), 90.0)

            p50Loss = abs(np.percentile(np.asarray(allSamples), 50.0) - np.percentile(np.asarray(allTS), 50.0))
            p90Loss = abs(np.percentile(np.asarray(allSamples), 90.0) - np.percentile(np.asarray(allTS), 90.0))

            p50Lst += [p50Loss]
            p90Lst += [p90Loss]

            plt.plot(p50Lst)
            plt.plot(p90Lst)

            plt.savefig(tmpDir + "progress_" + str(epoch) + ".png")
            plt.clf()
Example #50
0
#

# get random class number
z_cat = tf.multinomial(tf.ones((batch_size, cat_dim), dtype=tf.float32) / cat_dim, 1)
z_cat = tf.squeeze(z_cat, -1)
z_cat = tf.cast(z_cat, tf.int32)

# continuous latent variable
z_con = tf.random_normal((batch_size, con_dim))
z_rand = tf.random_normal((batch_size, rand_dim))

z = tf.concat(axis=1, values=[tf.one_hot(z_cat, depth = cat_dim), z_con, z_rand])


# generator network
gen = generator(z)

# add image summary
# tf.sg_summary_image(gen)
tf.summary.image('real', x)
tf.summary.image('fake', gen)

#
# discriminator
disc_real, _, _ = discriminator(x)
disc_fake, cat_fake, con_fake = discriminator(gen)

# discriminator loss
loss_d_r = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_real, labels=y_real))
loss_d_f = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_fake, labels=y_fake))
loss_d = (loss_d_r + loss_d_f) / 2
Example #51
0
    def do_atom_generator(self):
        from generator import generator

        return generator(), nonblank(), noduplicates()