コード例 #1
0
'''
Load pretrained model
'''
load_weights(args.oldmodel, gen_layers)

# interpolation on latent space (z) class conditionally
for i in xrange(10):
    sample_y = np.int32(np.repeat(np.arange(num_classes), batch_size_g/num_classes))
    orignial_z = np.repeat(rng.uniform(size=(num_classes,n_z)), batch_size_g/num_classes, axis=0)
    target_z = np.repeat(rng.uniform(size=(num_classes,n_z)), batch_size_g/num_classes, axis=0)
    alpha = np.tile(np.arange(batch_size_g/num_classes) * 1.0 / (batch_size_g/num_classes-1), num_classes)
    alpha = alpha.reshape(-1,1)
    z = np.float32((1-alpha)*orignial_z+alpha*target_z)
    x_gen_batch = generate_interpolation(sample_y, z)
    x_gen_batch = x_gen_batch.reshape((batch_size_g,-1))
    image = paramgraphics.mat_to_img(x_gen_batch.T, dim_input, colorImg=colorImg, tile_shape=(num_classes, 2*num_classes), scale=generation_scale, save_path=os.path.join(outfolder, 'interpolation-'+str(i)+'.png'))

# class conditionally generation with shared z and fixed y
for i in xrange(10):
    sample_y = np.int32(np.repeat(np.arange(num_classes), batch_size_g/num_classes))
    x_gen_batch = generate_shared(sample_y)
    x_gen_batch = x_gen_batch.reshape((batch_size_g,-1))
    image = paramgraphics.mat_to_img(x_gen_batch.T, dim_input, colorImg=colorImg, tile_shape=(num_classes, 2*num_classes), scale=generation_scale, save_path=os.path.join(outfolder, 'shared-'+str(i)+'.png'))

# generation with randomly sampled z and y
for i in xrange(10):
    sample_y = np.int32(np.repeat(np.arange(num_classes), batch_size_g/num_classes))
    inds = np.random.permutation(batch_size_g)
    sample_y = sample_y[inds]
    x_gen_batch = generate(sample_y)
    x_gen_batch = x_gen_batch.reshape((batch_size_g,-1))
コード例 #2
0
train_x = train_x[inds]
train_y = train_y[inds]
x_labelled = []
y_labelled = []
for j in range(num_classes):
    x_labelled.append(train_x[train_y==j][:num_labelled/num_classes])
    y_labelled.append(train_y[train_y==j][:num_labelled/num_classes])
x_labelled = np.concatenate(x_labelled, axis=0)
y_labelled = np.concatenate(y_labelled, axis=0)
del train_x

if True:
    print 'Size of training data', x_labelled.shape[0], x_unlabelled.shape[0]
    y_order = np.argsort(y_labelled)
    _x_mean = x_labelled[y_order]
    image = paramgraphics.mat_to_img(_x_mean.T, dim_input, tile_shape=(num_classes, num_labelled/num_classes), colorImg=colorImg, scale=generation_scale, save_path=os.path.join(outfolder, 'x_l_'+str(ssl_data_seed)+'_sgan.png'))

num_batches_l = x_labelled.shape[0] / batch_size
num_batches_u = x_unlabelled.shape[0] / batch_size
num_batches_e = eval_x.shape[0] / batch_size_eval


'''
models
'''
# symbols
sym_z_image = T.tile(theano_rng.uniform((z_generated, n_z)), (num_classes, 1))
sym_z_rand = theano_rng.uniform(size=(batch_size, n_z))
sym_y_g = T.ivector()

sym_x_l = T.tensor4()
コード例 #3
0
    x_labelled.append(train_x[train_y == j][:int(num_labelled / num_classes)])
    y_labelled.append(train_y[train_y == j][:int(num_labelled / num_classes)])

x_labelled = np.concatenate(x_labelled, axis=0)
y_labelled = np.concatenate(y_labelled, axis=0)

print('x_labelled:', x_labelled.shape)
print('y_labelled:', y_labelled.shape)
del train_x

if True:
    print('Size of training data', x_labelled.shape[0], x_unlabelled.shape[0])
    y_order = np.argsort(y_labelled)
    _x_mean = x_labelled[y_order]
    image = paramgraphics.mat_to_img(_x_mean.T, dim_input, tile_shape=(num_classes, int(num_labelled / num_classes)),
                                     colorImg=colorImg, scale=generation_scale,
                                     save_path=os.path.join(outfolder, 'x_l_' + str(ssl_para_seed) + '_AT-JD.png'))

pretrain_batches_train_uc = int(x_unlabelled.shape[0] / pre_batch_size_uc)
pretrain_batches_train_lc = int(x_labelled.shape[0] / pre_batch_size_lc)
n_batches_train_u_c = int(x_unlabelled.shape[0] / batch_size_u_c)
n_batches_train_l_c = int(x_labelled.shape[0] / batch_size_l_c)
n_batches_train_u_d = int(x_unlabelled.shape[0] / batch_size_u_d)
n_batches_train_l_d = int(x_labelled.shape[0] / batch_size_l_d)
n_batches_train_g = int(x_unlabelled.shape[0] / batch_size_g)

'''
models
'''
# symbols
sym_z_image = T.tile(theano_rng.uniform((z_generated, n_z)), (num_classes, 1))
コード例 #4
0
ファイル: generation.py プロジェクト: superman97/DADA
                           batch_size_g / num_classes,
                           axis=0)
    target_z = np.repeat(rng.uniform(size=(num_classes, n_z)),
                         batch_size_g / num_classes,
                         axis=0)
    alpha = np.tile(
        np.arange(batch_size_g / num_classes) * 1.0 /
        (batch_size_g / num_classes - 1), num_classes)
    alpha = alpha.reshape(-1, 1)
    z = np.float32((1 - alpha) * original_z + alpha * target_z)
    x_gen_batch = generate_interpolation(sample_y, z)
    x_gen_batch = x_gen_batch.reshape(batch_size_g, -1)
    image = paramgraphics.mat_to_img(
        x_gen_batch.T,
        dim_input,
        colorImg=colorImg,
        tile_shape=(batch_size_g / num_classes, num_classes),
        scale=generation_scale,
        save_path=os.path.join(results_dir,
                               'interpolation-' + str(i) + '.png'))

for i in range(10):
    sample_y = np.int32(
        np.repeat(np.arange(num_classes), batch_size_g / num_classes))
    inds = np.random.permutation(batch_size_g)
    sample_y = sample_y[inds]
    x_gen_batch = generate(sample_y)
    x_gen_batch = x_gen_batch.reshape(batch_size_g, -1)
    image = paramgraphics.mat_to_img(
        x_gen_batch.T,
        dim_input,
        colorImg=colorImg,
コード例 #5
0
    sh_x_train_unlabelled_preprocessed = theano.shared(
        preprocesses_dataset(x_unlabelled), borrow=True)
    sh_x_test_preprocessed = theano.shared(preprocesses_dataset(test_x),
                                           borrow=True)

# visualize labeled data
if True:
    print 'size of training data ', x_labelled.shape, y_labelled.shape, x_unlabelled.shape
    _x_mean = x_labelled.reshape((num_labelled, -1))
    _x_mean = _x_mean[:num_generation]
    y_order = np.argsort(y_labelled[:num_generation])
    _x_mean = _x_mean[y_order]
    image = paramgraphics.mat_to_img(
        _x_mean.T,
        dim_input,
        colorImg=colorImg,
        scale=generation_scale,
        save_path=os.path.join(res_out,
                               'labeled_data' + str(ssl_data_seed) + '.png'))
'''
building block
'''
# shortcuts
encodelayer = convlayer


# decoder layer
def decodelayer(l, up_method, bn, dr, ps, n_kerns, d_kerns, nonlinearity, pad,
                stride, name):
    # upsampling
    if up_method == 'unpool':
コード例 #6
0
        loss_test += [test_out[4]]
        acc_test += [test_out[5]]


        line = "*Epoch=%d\tTime=%.2f\tLR=%.5f\n" %(epoch, t, lr) + \
               "  TRAIN:\tGen_loss=%.5f\tlogq(z|x)=%.5f\tlogp(z)=%.5f\tlogp(x|z)=%.5f\tdis_loss=%.5f\tlabel_error=%.5f\n" %(LL_train[-1], log_qz_given_x_train[-1], log_pz_train[-1], log_px_given_z_train[-1], loss_train[-1], 1-acc_train[-1]) + \
               "  EVAL-L1:\tGen_loss=%.5f\tlogq(z|x)=%.5f\tlogp(z)=%.5f\tlogp(x|z)=%.5f\tdis_loss=%.5f\terror=%.5f\n" %(LL_test[-1], log_qz_given_x_test[-1], log_pz_test[-1], log_px_given_z_test[-1], loss_test[-1], 1-acc_test[-1])
        print line
        with open(logfile,'a') as f:
            f.write(line + "\n")

    # random generation for visualization
    if epoch % vis_epoch == 0:
        tail='-'+str(epoch)+'.png'
        ran_y = np.int32(np.repeat(np.arange(10), 10))
        _x_mean, _x = generate(ran_y)
        _x_mean = _x_mean.reshape((100,-1))
        _x = _x.reshape((100,-1))
        image = paramgraphics.mat_to_img(_x_mean.T, dim_input, colorImg=colorImg, scale=generation_scale, 
            save_path=os.path.join(res_out, 'mean'+tail))

    #save model
    model_out = os.path.join(res_out, 'model')
    if epoch % (vis_epoch*10) == 0:
        if distribution == 'bernoulli':
            all_params=lasagne.layers.get_all_params([classifier, l_dec_x_mu])
        elif distribution == 'gaussian':
            all_params=lasagne.layers.get_all_params([classifier, l_dec_x_mu, l_dec_x_log_var])
        f = gzip.open(model_out + 'epoch%i'%(epoch), 'wb')
        cPickle.dump(all_params, f, protocol=cPickle.HIGHEST_PROTOCOL)
        f.close()