Exemple #1
0
def SAE_evidence(sae_dict, cp2, ae_ids):
    # Init variables and parameters
    init = tf.global_variables_initializer()
    saver = tf.train.Saver()

    # Initialize model save string
    saemodelstr = saefinestr.split('.meta')[0]

    # Load evidence
    K = utils.load_evidence(cp2.get('Experiment', 'EVIDENCEDATAPATH'))

    _full = np.concatenate((K.train.one, K.test.one))
    p = utils.get_perm(perm_str, _full)

    EV = _full[p]

    batch_size = cp2.getint('Hyperparameters', 'BatchSize')

    # Start Session (Layerwise training)
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    with tf.Session(config=config) as sess:
        # Initialize graph variables
        init.run()

        ev_dict = {
            'cp2': cp2,
            'sess': sess,
            'saver': saver,
            'EV': EV,
            'savestr': saemodelstr,
            'ae_ids': ae_ids,
            'argv2': sys.argv[2]
        }

        EviAE.train(ev_dict, sae_dict)

        # Save hidden/output layer results for pipeline training
        utils.save_OOM(sess,
                       sae_dict['sda_in'],
                       EV,
                       sae_dict['sda_hidden'],
                       path=cp2.get('Experiment', 'PX_Z_TRAIN'),
                       batch_size=batch_size)
        utils.save_OOM(sess,
                       sae_dict['sda_in'],
                       EV,
                       sae_dict['sda_out'],
                       path=cp2.get('Experiment', 'PX_XREC'),
                       batch_size=batch_size)

    sess.close()
Exemple #2
0
evitramfinestr = cp.get('Experiment', 'ModelOutputPath') + \
    cp.get('Experiment', 'PREFIX') + '_' + \
    cp.get('Experiment', 'Enumber') + '_' + \
    sys.argv[2] + '_cond_model.ckpt.meta'

# Full dataset random permutation path

perm_str = out_ + cp.get('Experiment', 'PREFIX') + '_perm.npy'

# Initialize Dataset
XX = dataset.train.images
XX_test = dataset.test.images
XX_full = np.concatenate((dataset.train.images, dataset.test.images))
utils.log(str(XX_full.shape))

p = utils.get_perm(perm_str, XX_full)

XX_full = XX_full[p]

# Init ground truth
YY = dataset.train.labels.flatten()
YY_test = dataset.test.labels.flatten()
YY_full = np.concatenate(
    (dataset.train.labels.flatten(), dataset.test.labels.flatten()))

YY_full = YY_full[p]

# Get batch size in case of batch save
batch_size = cp.getint('Hyperparameters', 'BatchSize')