示例#1
0
img_bhwc = np.transpose(trainx[:100, ], (0, 2, 3, 1))
img_tile = plotting.img_tile(img_bhwc,
                             aspect_ratio=1.0,
                             border_color=1.0,
                             stretch=True)
img = plotting.plot_img(img_tile, title='')
plotting.plt.savefig(args.data_name + "_patches_train.png")

evens = [x for x in range(int(test_matched.shape[0] / 2)) if x % 2 == 0]
odds = [x for x in range(int(test_matched.shape[0] / 2)) if x % 2 == 1]

disc_layers.append(ll.GlobalPoolLayer(disc_layers[f_low_dim]))
batch_size_test = 100
print('Extracting features from matched data...')
features_matched = extract_features(disc_layers, test_matched, batch_size_test)

print('Extracting features from unmatched data...')
features_unmatched = extract_features(disc_layers, test_unmatched,
                                      batch_size_test)

print('Number of features considered in the experiment: ' +
      str(features_matched[evens].shape[1]))

features_matched[features_matched >= 0.0] = 1
features_matched[features_matched < 0.0] = 0

features_unmatched[features_unmatched >= 0.0] = 1
features_unmatched[features_unmatched < 0.0] = 0

d_matched = hamming_dist(features_matched[evens], features_matched[odds])
示例#2
0
from backends import get_discriminator_binary
from data_preprocessing import get_train_data, get_test_data, extract_features
from settings import settings_binary
from triplet_utils import load_model

args = settings_binary()
disc_layers = get_discriminator_binary()
load_model(disc_layers, args.discriminator_pretrained)

args = settings_binary()
trainx, trainy, txs, tys = get_train_data(args.data_dir, args.count,
                                          args.seed_data)
testx, testy = get_test_data(args.data_dir)

train_features = extract_features(disc_layers, trainx, args.batch_size)
test_features = extract_features(disc_layers, testx)
tx_features = extract_features(disc_layers, txs)
# Test generator in sampling procedure
samplefun = th.function(inputs=[],outputs=gen_dat)
sample_x = samplefun()
img_bhwc = np.transpose(sample_x[:100,], (0, 2, 3, 1))
img_tile = plotting.img_tile(img_bhwc, aspect_ratio=1.0, border_color=1.0, stretch=True)
img = plotting.plot_img(img_tile, title='CIFAR10 samples')
plotting.plt.savefig("cifar_tgan_sample.png")

features = ll.get_output(disc_layers[-1], x_temp , deterministic=True)
generateTestF = th.function(inputs=[x_temp ], outputs=features)

batch_size_test = 100

print('Extracting features from test data')
test_features = extract_features(disc_layers, testx, batch_size_test)
print('Extracting features from train data')
train_features = extract_features(disc_layers, trainx, args.batch_size)

Y = cdist(test_features,train_features)
ind = np.argsort(Y,axis=1)
prec = 0.0;
acc = [0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0];
# calculating statistics
for k in range(np.shape(test_features)[0]):
    class_values = trainy[ind[k,:]]
    y_true = (testy[k] == class_values)
    y_scores = np.arange(y_true.shape[0],0,-1)
    ap = average_precision_score(y_true, y_scores)
    prec = prec + ap
    for n in range(len(acc)):