def pretrain(seed=None, data=None, topology=None, nepoch=50, weights=None): if weights is None: weights = [] # initialize weights array if len(topology) < 2: # we're done return weights print("\n[Train] Layer {}/{}, Nr of Epoch: {}".format(len(weights)+1, len(weights)+len(topology)-1, nepoch)) # create diabolo network (autoencoder) dn = DN([topology[0], topology[1], topology[0]], seed) dn.init() # create new dataset dataset = SimpleContainer(data, seed) dataset.init() # train DN on new dataset using backpropagation run_program(dataset, dn, bp, nepoch) # for each instance, calculate ilp map of middle layer xlayer = np.array([dn.compute_ilp_map([instance])[1] for instance in dataset.data['x']], dtype=np.float16) # concat newly trained weight matrix of input to hidden layer to stored weights weights.append(dn.weights[0]) # next layer pretrain(seed, {'x': xlayer, 'y': xlayer}, topology[1:], nepoch, weights) return weights
def load_dataset(self, seed=None): rdfGraph = rdf.KnowledgeGraph() rdfGraph.read("http://www.w3.org/People/Berners-Lee/card") dataset = SimpleContainer(rdf.propositionalize(rdfGraph, twoHot), seed) dataset.init() return dataset
def load_dataset(self, seed=None): rdfGraph = rdf.KnowledgeGraph() rdfGraph.read("./examples/das.ttl") dataset = SimpleContainer(rdf.propositionalize(rdfGraph, twoHot), seed) dataset.init() return dataset
def load_dataset(self, seed=None): arffFile = arff.ARFF('./examples/iris.arff') arff.toNumerical(arffFile) arffFile.data = aux.normalize(arffFile.data) dataset = SimpleContainer(arff.toVector(arffFile.data, range(0, 4), range(4, 5)), seed) dataset.init() return dataset