Exemplo n.º 1
0
        else:
            status[key] = value
    with open(ae.model_dir + "status.json", 'w') as f:
        f.write(json.dumps(status, indent=2, sort_keys=True))

if __name__ == '__main__':
    ### SETUP ###
    if not cross_validate:
        l = LogSourceMaker(logfolder="/root/MatchBrain/logs/")
        b = l.get_block(shift = shift)
        bws = b.sources[0]
        phs = b.sources[1]
        prep = Preprocessing(bws)
        print("prep output dim: "+str(prep.output_dim))
        AutoTransformer.model_dir = "/root/MatchBrain/models/"
        ae = AutoTransformer(prep.output_dim, prep, phs, epochs=epochs, num_sizes=5)
        print("have ae")
    ### DATA ###
    if generate_data:
        update_status(ae, "current", "preprocessing data")
        b.start()
        while b.started and data_mins>0:
            time.sleep(60)
            print(ae.batched)
            data_mins -= 1
        if b.started:
            b.stop()
        print("stopped")
        ae.cap_data()
        update_status(ae, "current", "saving data")
        ae.save_data()
            status.setdefault("history", []).insert(0, value)
        else:
            status[key] = value
    with open(ae.model_dir + "status.json", 'w') as f:
        f.write(json.dumps(status, indent=2, sort_keys=True))

if __name__ == '__main__':
    ### SETUP ###
    nsm = NormSourceMaker(datafolder="/home/joren/PycharmProjects/MatchBrain/ml/"
                         ,phases=phase_names
                         ,cross_val=True)
    AutoTransformer.model_dir = "/home/joren/PycharmProjects/MatchBrain/models/"
    for _ in xrange(len(nsm.cross_val_keys)):
        blk = nsm.get_block()
        #TODO get this size from somewhere better. Is it even correct?
        ae = AutoTransformer(100, blk.sinks[0], blk.sinks[1], epochs=epochs, num_sizes=5) #TODO get this 100 from somewhere reliable
        print("have ae")
        blk.start()
        while blk.started and data_mins>0:
            time.sleep(60)
            print(ae.batched)
            data_mins -= 1
        if blk.started:
            blk.stop()
        print("stopped")
        ae.cap_data()
        print("capped")
        ### PRETRAIN ###
        losscomb = lambda zoo, h: ", ".join(map(lambda (i,e): str(i)+":"+('%.4f'%e[-1][zoo]), enumerate(h)))
        n = nest(1,0.9)
        counter = 0
Exemplo n.º 3
0
    with open(ae.model_dir + "status.json", 'w') as f:
        f.write(json.dumps(status, indent=2, sort_keys=True))


if __name__ == '__main__':
    ### SETUP ###
    nsm = NormSourceMaker(
        datafolder="/home/joren/PycharmProjects/MatchBrain/ml/",
        phases=phase_names,
        cross_val=True)
    AutoTransformer.model_dir = "/home/joren/PycharmProjects/MatchBrain/models/"
    for _ in xrange(len(nsm.cross_val_keys)):
        blk = nsm.get_block()
        #TODO get this size from somewhere better. Is it even correct?
        ae = AutoTransformer(
            100, blk.sinks[0], blk.sinks[1], epochs=epochs,
            num_sizes=5)  #TODO get this 100 from somewhere reliable
        print("have ae")
        blk.start()
        while blk.started and data_mins > 0:
            time.sleep(60)
            print(ae.batched)
            data_mins -= 1
        if blk.started:
            blk.stop()
        print("stopped")
        ae.cap_data()
        print("capped")
        ### PRETRAIN ###
        losscomb = lambda zoo, h: ", ".join(
            map(lambda (i, e): str(i) + ":" +