feature_data = read.read()

test_ids = list(feature_data.keys())
for test_id in test_ids:

    _train_data, _test_data = read.split(feature_data, test_id)
    _train_data, _train_labels = read.flatten(_train_data)
    _test_data, _test_labels = read.flatten(_test_data)

    _train_data = np.array(_train_data)
    _test_data = np.array(_test_data)

    _embedding_model, _triplet_model = build_mlp_model((feature_length, ))

    _triplet_model.fit_generator(triplet_generator_minibatch(
        _train_data, _train_labels, mini_batch_size),
                                 steps_per_epoch=steps_per_epoch,
                                 epochs=epochs,
                                 verbose=1)

    _train_preds = _embedding_model.predict(_train_data)
    _test_preds = _embedding_model.predict(_test_data)

    acc = read.cos_knn(k, _test_preds, _test_labels, _train_preds,
                       _train_labels)
    result = 'prototype_tn_mlp, 3nn,' + str(test_id) + ',' + str(acc)
    print(result)
    read.write_data('tn_mlp.csv', result)
        modelinputs.append(targetembedding)
        supportlabels = Input((numsupportset, classes_per_set))
        modelinputs.append(supportlabels)
        knnsimilarity = MatchCosine(nway=classes_per_set,
                                    n_samp=samples_per_class)(modelinputs)

        model = Model(inputs=[input1, supportlabels], outputs=knnsimilarity)
        model.compile(optimizer='adam',
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])
        model.fit([_train_data[0], _train_data[1]],
                  _train_data[2],
                  epochs=epochs,
                  batch_size=batch_size,
                  verbose=1)

        _support_preds = base_network.predict(_support_data)

        for _l in list(_test_data[test_id].keys()):
            _test_label_data = _test_data[test_id][_l]
            _test_labels = [_l for i in range(len(_test_label_data))]
            _test_label_data = np.array(_test_label_data)
            _test_labels = np.array(_test_labels)
            _test_preds = base_network.predict(_test_label_data)

            acc = read.cos_knn(k, _test_preds, _test_labels, _support_preds,
                               _support_labels)
            result = 'mn_mlp, 3nn,' + str(test_id) + ',' + str(
                a_label) + ',' + str(_l) + ',' + str(acc)
            read.write_data('pmn_mlp_oe.csv', result)
    pred = [[thigh_train_data[j] for j in i[:kk]] for i in top]
    pred = np.array(pred)
    pred = np.average(pred, axis=1)
    return pred


feature_data = read.read()
test_ids = list(feature_data.keys())

for test_id in test_ids:
    _train_data, _test_data = read.split(feature_data, [test_id])
    w_train_data, t_train_data, _train_labels = read.flatten(_train_data)
    w_test_data, t_test_data, _test_labels = read.flatten(_test_data)

    w_train_data = np.array(w_train_data)
    t_train_data = np.array(t_train_data)
    train_data = np.concatenate([w_train_data, t_train_data], axis=1)
    print(train_data.shape)
    w_test_data = np.array(w_test_data)

    t_test_data = ed_knn(w_test_data, w_train_data, t_train_data)
    test_data = np.concatenate([w_test_data, t_test_data], axis=1)
    print(test_data.shape)

    cos_acc = read.cos_knn(k, test_data, _test_labels, train_data,
                           _train_labels)
    results = 'euclid_t_translator,' + str(k) + ',' + str(
        kk) + ',cos_acc,' + str(cos_acc)
    print(results)
    read.write_data(results_file, results)
Esempio n. 4
0
    ca_test_data = np.concatenate([c_test_data, a_test_data], axis=1)
    print(ca_test_data.shape)

    ae_model = auto_encoder()
    ae_model.compile(optimizer='adam', loss='mse')
    ae_model.fit(h_train_data,
                 c_train_data,
                 verbose=0,
                 epochs=100,
                 shuffle=True)

    c_test_data = ae_model.predict(h_test_data)
    hc_test_data = np.concatenate([h_test_data, c_test_data], axis=1)
    print(ca_test_data.shape)

    cos_acc = read.cos_knn(k, ha_test_data, _test_labels, ha_train_data,
                           _train_labels)
    ha_results.append('ha,' + 'a_translator,' + str(k) + ',cos_acc,' +
                      str(cos_acc))

    cos_acc = read.cos_knn(k, ca_test_data, _test_labels, ca_train_data,
                           _train_labels)
    ca_results.append('ca,' + 'a_translator,' + str(k) + ',cos_acc,' +
                      str(cos_acc))

    cos_acc = read.cos_knn(k, hc_test_data, _test_labels, hc_train_data,
                           _train_labels)
    hc_results.append('hc,' + 'c_translator,' + str(k) + ',cos_acc,' +
                      str(cos_acc))

for item in hc_results:
    read.write_data(results_file, item)
Esempio n. 5
0
    ha_train_data = np.concatenate([h_train_data, a_train_data], axis=1)
    print(ha_train_data.shape)

    h_test_data = np.array(h_test_data)
    c_test_data = np.array(c_test_data)
    a_test_data = np.array(a_test_data)
    test_data = np.concatenate([h_test_data, c_test_data, a_test_data], axis=1)
    print(test_data.shape)
    hc_test_data = np.concatenate([h_test_data, c_test_data], axis=1)
    print(hc_test_data.shape)
    ca_test_data = np.concatenate([c_test_data, a_test_data], axis=1)
    print(ca_test_data.shape)
    ha_test_data = np.concatenate([h_test_data, a_test_data], axis=1)
    print(ha_test_data.shape)

    cos_acc = read.cos_knn(k, test_data, _test_labels, train_data, _train_labels)
    all_results.append('knn,'+str(k)+',cos_acc,'+str(cos_acc))
    h_cos_acc = read.cos_knn(k, h_test_data, _test_labels, h_train_data, _train_labels)
    h_results.append('h,knn,'+str(k)+',cos_acc,'+str(h_cos_acc))
    c_cos_acc = read.cos_knn(k, c_test_data, _test_labels, c_train_data, _train_labels)
    c_results.append('c,knn,'+str(k)+',cos_acc,'+str(c_cos_acc))
    a_cos_acc = read.cos_knn(k, a_test_data, _test_labels, a_train_data, _train_labels)
    a_results.append('a,knn,'+str(k)+',cos_acc,'+str(a_cos_acc))
    hc_cos_acc = read.cos_knn(k, hc_test_data, _test_labels, hc_train_data, _train_labels)
    hc_results.append('hc,knn,'+str(k)+',cos_acc,'+str(hc_cos_acc))
    ha_cos_acc = read.cos_knn(k, ha_test_data, _test_labels, ha_train_data, _train_labels)
    ha_results.append('ha,knn,'+str(k)+',cos_acc,'+str(ha_cos_acc))
    ca_cos_acc = read.cos_knn(k, ca_test_data, _test_labels, ca_train_data, _train_labels)
    ca_results.append('ca,knn,'+str(k)+',cos_acc,'+str(ca_cos_acc))

Esempio n. 6
0
import numpy as np
import read

feature_data = read.read()
test_ids = list(feature_data.keys())
for test_id in test_ids:

    _train_data, _test_data = read.split(feature_data, [test_id])
    _test_data, _test_labels = read.flatten(_test_data)
    _train_data, _train_labels = read.flatten(_train_data)

    _train_data = np.array(_train_data)
    print(_train_data.shape)
    _train_data = np.reshape(_train_data, (_train_data.shape[0], _train_data.shape[1] * _train_data.shape[2]))
    print(_train_data.shape)

    _test_data = np.array(_test_data)
    print(_test_data.shape)
    _test_data = np.reshape(_test_data, (_test_data.shape[0], _test_data.shape[1] * _test_data.shape[2]))
    print(_test_data.shape)

    acc = read.cos_knn(1, _test_data, _test_labels, _train_data, _train_labels)
    print(acc)
    acc = read.cos_knn(2, _test_data, _test_labels, _train_data, _train_labels)
    print(acc)
    acc = read.cos_knn(3, _test_data, _test_labels, _train_data, _train_labels)
    print(acc)
    acc = read.cos_knn(5, _test_data, _test_labels, _train_data, _train_labels)
    print(acc)