예제 #1
0
파일: pretrain.py 프로젝트: wxwilcke/dann
def pretrain(seed=None, data=None, topology=None, nepoch=50, weights=None):
    if weights is None:
        weights = []  # initialize weights array
    if len(topology) < 2:  # we're done
        return weights

    print("\n[Train] Layer {}/{}, Nr of Epoch: {}".format(len(weights)+1, len(weights)+len(topology)-1, nepoch))

    # create diabolo network (autoencoder)
    dn = DN([topology[0], topology[1], topology[0]], seed)
    dn.init()

    # create new dataset
    dataset = SimpleContainer(data, seed)
    dataset.init()

    # train DN on new dataset using backpropagation
    run_program(dataset, dn, bp, nepoch)

    # for each instance, calculate ilp map of middle layer
    xlayer = np.array([dn.compute_ilp_map([instance])[1] for instance in dataset.data['x']], dtype=np.float16)

    # concat newly trained weight matrix of input to hidden layer to stored weights
    weights.append(dn.weights[0])

    # next layer
    pretrain(seed, {'x': xlayer, 'y': xlayer}, topology[1:], nepoch, weights)

    return weights
예제 #2
0
파일: foaf.py 프로젝트: wxwilcke/dann
    def load_dataset(self, seed=None):
        rdfGraph = rdf.KnowledgeGraph()
        rdfGraph.read("http://www.w3.org/People/Berners-Lee/card")

        dataset = SimpleContainer(rdf.propositionalize(rdfGraph, twoHot), seed)
        dataset.init()

        return dataset
예제 #3
0
파일: das.py 프로젝트: wxwilcke/dann
    def load_dataset(self, seed=None):
        rdfGraph = rdf.KnowledgeGraph()
        rdfGraph.read("./examples/das.ttl")

        dataset = SimpleContainer(rdf.propositionalize(rdfGraph, twoHot), seed)
        dataset.init()

        return dataset
예제 #4
0
파일: iris.py 프로젝트: wxwilcke/dann
    def load_dataset(self, seed=None):
        arffFile = arff.ARFF('./examples/iris.arff')

        arff.toNumerical(arffFile)
        arffFile.data = aux.normalize(arffFile.data)

        dataset = SimpleContainer(arff.toVector(arffFile.data, range(0, 4), range(4, 5)), seed)
        dataset.init()

        return dataset