def run(no_folds = 5, supervised = True, i = 0, l = 2, n = 128, expParameters = {}):
    inst = KFold(n_splits = no_folds, shuffle=True, random_state=125)
        
    exp = experiment.GGCNNExperiment('2018-08-28-SA1SA2', '2018-08-28-SA1SA2', SA1Experiment(neurons = n, blocks = l, **expParameters))

    exp.num_iterations = 5000
    exp.optimizer = 'adam'
    exp.loss_type = 'linear'

    exp.debug = True  # Was True

    exp.preprocess_data(dataset)

    valid_idx = np.flatnonzero(dataset['level_0']['labels'] >= 0)  # Missing data labelled with -1
    if supervised:
        train_idx, test_idx = list(inst.split( valid_idx ))[i]
    else:
        test_idx, train_idx = list(inst.split( valid_idx ))[i]  # Reversed to get more samples in the test set than the training set

    n_components = expParameters.get('linkage_adjustment_components', None)
    exp.create_data(train_idx, test_idx, n_components = n_components)
    exp.build_network()
    results = exp.run()
    
    # Node type of input nodes: 0 = training set; 1 = test set; -1 = neither
    idx_split = np.empty((len(dataset['level_0']['labels']), 1))
    idx_split.fill(-1)
    idx_split[train_idx] = 0
    idx_split[test_idx] = 1

    return results, idx_split
Beispiel #2
0
def run(no_folds = 5, supervised = True, i = 0, l = 2, n = 128, expParameters = {}):
    inst = KFold(n_splits = no_folds, shuffle=True, random_state=125)
        
    exp = experiment.GGCNNExperiment('2018-08-28-SA1SA2', '2018-08-28-SA1SA2', SA1Experiment(neurons = n, blocks = l, **expParameters))

    exp.num_iterations = 5000
    exp.optimizer = 'adam'
    exp.loss_type = 'linear'

    exp.debug = True  # Was True

    exp.preprocess_data(dataset)

    if supervised:
        train_idx, test_idx = list(inst.split(np.arange(len(dataset[0]))))[i]
    else:
        test_idx, train_idx = list(inst.split(np.arange(len(dataset[0]))))[i]  # Reversed to get more samples in the test set than the training set

    exp.create_data(train_idx, test_idx)
    exp.build_network()
    results = exp.run()

    return results
Beispiel #3
0
    i = int(sys.argv[3])
except IndexError:
    l = 2
    n = 128
    i = 0

saveName = 'Output/SemiSupervisedSydney-NoEdge-l={:d}-n={:d}-i={:d}.csv'.format(
    l, n, i)

max_acc = []
iteration = []
layers = []
neurons = []
rep = []

exp = experiment.GGCNNExperiment('2018-06-06-SA1', '2018-06-06-sa1',
                                 SA1Experiment(neurons=n, blocks=l))
# exp = experiment.SingleGraphCNNExperiment('2018-06-06-SA1', '2018-06-06-sa1', SA1Experiment(neurons = n, blocks = l))

exp.num_iterations = 1000
exp.optimizer = 'adam'

exp.debug = True  # Was True

exp.preprocess_data(dataset)

train_idx, test_idx = list(inst.split(np.arange(len(dataset[-1]))))[i]
# test_idx, train_idx = list(inst.split(np.arange(len(dataset[-1]))))[i]  # Reversed to get more samples in the test set than the training set

exp.create_data(train_idx, test_idx)
exp.build_network()
results = exp.run()
Beispiel #4
0
        net.make_embedding_layer(self.neurons)
        net.make_graphcnn_layer(1,
                                name='final',
                                with_bn=False,
                                with_act_func=False)


no_folds = 5  ##
inst = KFold(n_splits=no_folds, shuffle=True, random_state=125)

l = 2
n = 64
i = 2

exp = experiment.GGCNNExperiment('2018-08-28-SA1SA2', '2018-08-28-SA1SA2',
                                 SA1Experiment(neurons=n, blocks=l))

exp.num_iterations = 2000
exp.optimizer = 'adam'
exp.loss_type = "linear"

exp.debug = True  # Was True

exp.preprocess_data(dataset)

train_idx, test_idx = list(inst.split(np.arange(len(dataset[0]))))[i]
# print('Before: ', exp.train_idx.shape)
# exp.train_idx = np.append(exp.train_idx, np.arange( SA1DatasetSize , len(dataset[-1] )))
# exp.test_idx = np.append(exp.test_idx, np.arange( SA1DatasetSize , len(dataset[-1] )))
# print('After: ', exp.train_idx.shape)
# test_idx, train_idx = list(inst.split(np.arange(len(dataset[-1]))))[i]  # Reversed to get more samples in the test set than the training set