def __init__(self):
     super(Acrobot, self).__init__()
     self.G1 = nx.path_graph(2).to_directed()
     self.node_feat_size = 2
     self.edge_feat_size = 3
     self.graph_feat_size = 10
     self.gn = FFGN(self.graph_feat_size, self.node_feat_size,
                    self.edge_feat_size).cuda()
     self.gn.load_state_dict(torch.load('model0.05.pth'))
     normalizers = torch.load('normalized/acrobot0.05.pth')
     self.in_normalizer = normalizers['in_normalizer']
     self.out_normalizer = normalizers['out_normalizer']
예제 #2
0
    use_cuda = True

    dl = DataLoader(dset, batch_size=200, num_workers=0, drop_last=True)
    dl_eval = DataLoader(dset_eval,
                         batch_size=200,
                         num_workers=0,
                         drop_last=True)

    G1 = nx.path_graph(6).to_directed()
    G_target = nx.path_graph(6).to_directed()
    nx.draw(G1)
    plt.show()
    node_feat_size = 6
    edge_feat_size = 3
    graph_feat_size = 10
    gn = FFGN(graph_feat_size, node_feat_size, edge_feat_size).cuda()
    if opt.model != '':
        gn.load_state_dict(torch.load(opt.model))

    optimizer = optim.Adam(gn.parameters(), lr=1e-4)
    schedular = optim.lr_scheduler.StepLR(optimizer, 5e4, gamma=0.975)
    savedir = os.path.join('./logs', 'runs',
                           datetime.now().strftime('%B%d_%H:%M:%S'))
    writer = SummaryWriter(savedir)
    step = 0

    normalizers = torch.load('normalize.pth')
    in_normalizer = normalizers['in_normalizer']
    out_normalizer = normalizers['out_normalizer']
    std = in_normalizer.get_std()
    for epoch in range(300):