Beispiel #1
0
def main():
    dataset = exoskeleton_dataset.ExoskeletonDataset(
        file="data/nc_agger2/constraint_regression", root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()

    #train_separately(train_dataset, val_dataset, test_dataset)
    train_decoder(train_dataset, val_dataset, test_dataset)
Beispiel #2
0
def main():
    dataset = exoskeleton_dataset.ExoskeletonDataset(file="data/exo_data_3",
                                                     root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()

    train_separately(train_dataset, val_dataset, test_dataset)
    print("Separate network train done! Begin whole network training...")

    train_wholenet(train_dataset, val_dataset, test_dataset)
Beispiel #3
0
def main():
    q = Queue()
    dataset = exoskeleton_dataset.ExoskeletonDataset(file="data/exo_data_3",
                                                     root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()

    t_udp = threading.Thread(target=udprecv, args=(q, ))
    t_udp.start()
    encoder_arch = [[14, 28], [28, 56], [56, 56], [56, 4]]
    decoder_arch = [[18, 36], [36, 72], [72, 72], [72, 7]]
    model = network.Net(encoder_arch, decoder_arch)

    #model.load_state_dict(torch.load('./model'))
    model.decoder.load_state_dict(torch.load("decoder.model"))
    model.encoder.load_state_dict(torch.load("encoder.model"))
    realtime_loss = nn.MSELoss()

    while True:

        string = q.get()
        tensor_data = string_to_tensor(string)
        target = torch.Tensor()
        constrains = torch.Tensor()
        master = torch.Tensor()
        slave = torch.Tensor()

        target = tensor_data["target"].unsqueeze(dim=1)

        constrains = tensor_data["constrains"].unsqueeze(dim=1)

        master = tensor_data["master"].unsqueeze(dim=1)

        slave = tensor_data["slave"].unsqueeze(dim=1)
        if target.__len__() != 7:
            continue
        if constrains.__len__() != 4:
            continue
        if slave.__len__() != 7:
            continue

        tmp = torch.cat((slave, target), 0)
        y = model.encoder.forward(torch.transpose(tmp, 0, 1))

        rl = realtime_loss.forward(constrains, y)
        jc = np.array(y.detach().numpy())
        jc_c = np.array(constrains.detach().numpy())

        data_str = "%f,%f,%f,%f" % (jc_c[0], jc_c[1], jc_c[2], jc_c[3])

        print("calculated:{}".format(data_str))

        data_str = "%f,%f,%f,%f" % (jc[0][0], jc[0][1], jc[0][2], jc[0][3])

        print("predicted:{}".format(data_str))

        print(rl.detach())
        s.sendto(data_str.encode('utf-8'), send_addr)
Beispiel #4
0
def main():

    dataset = exoskeleton_dataset.ExoskeletonDataset(
        file="data/validationset/va_set1", root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()

    f = open("mse_va1.txt", "w+")

    realtime_loss = nn.MSELoss()

    dataset_size = train_dataset.size
    for k in range(dataset_size):
        test_data(train_dataset[k], f)

    dataset = []
    dataset = exoskeleton_dataset.ExoskeletonDataset(
        file="data/validationset/va_set2", root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()
    f = open("mse_va2.txt", "w+")
    dataset_size = train_dataset.size
    for k in range(dataset_size):
        test_data(train_dataset[k], f)

    dataset = []
    dataset = exoskeleton_dataset.ExoskeletonDataset(
        file="data/validationset/va_set3", root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()
    f = open("mse_va3.txt", "w+")
    dataset_size = train_dataset.size
    for k in range(dataset_size):
        test_data(train_dataset[k], f)

    dataset = []
    dataset = exoskeleton_dataset.ExoskeletonDataset(
        file="data/validationset/va_set4", root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()
    f = open("mse_va4.txt", "w+")
    dataset_size = train_dataset.size
    for k in range(dataset_size):
        test_data(train_dataset[k], f)
Beispiel #5
0
def main():

    dataset = exoskeleton_dataset.ExoskeletonDataset(
        file="data/exo_data_3", root_dir="/")
    train_dataset, val_dataset, test_dataset = dataset.GetDataset()


    encoder_arch = [[14,28],[28,56],[56,56],[56,4]]
    decoder_arch = [[18,36],[36,72],[72,72],[72,7]]
    model = network.Net(encoder_arch, decoder_arch)


    model.load_state_dict(torch.load('./model'))
    #model.decoder.load_state_dict(torch.load("decoder.model"))
    #model.encoder.load_state_dict(torch.load("encoder.model"))
    realtime_loss = nn.MSELoss()

    dataset_size = train_dataset.size
    for k in range(dataset_size):
        test_data(model, train_dataset[k])