Пример #1
0
def main():
    #header, sdf = load_sdf_from_file(argv[1])
    # visualizer.visualize_sdf_as_mesh(sdf)
    # visualizer.start_visualization()
    #pc = sdf_to_point_cloud(sdf, header)
    '''def vec3(a, b, c): return Function((a, b, c), 'requires_grad')
    tree = ([vec3(0.577, 0.577, 0.577),
             vec3(-0.577, 0.577, 0.577),
             vec3(0.577, -0.577, 0.577),
             vec3(0.577, 0.577, -0.577)],
            Function([9], 'requires_grad'))'''

    torch.backends.cudnn.deterministic = True
    torch.backends.mkl.deterministic = True
    tree = build_tree(2, 4)

    def vec3(a, b, c): return Function((a, b, c), 'requires_grad')
    sdf = ([vec3(0.577, 0.577, 0.577),
            vec3(-0.577, 0.577, 0.577),
            vec3(0.577, -0.577, 0.577),
            vec3(0.577, 0.577, -0.577)],
           Function([9], 'requires_grad'))

    #fitted = optimize_to_point_cloud(tree, pc)
    fitted = optimize_to_sdf(tree, gdf(*sdf)(Function('pos', 3)))
Пример #2
0
def build_tree(depth, breadth):
    if depth == 0:
        return ([Function((np.random.rand(3) - 0.5).tolist(), 'requires_grad') for i in range(breadth)],
                Function(3.0, 'requires_grad'))
    else:
        return ([build_tree(depth - 1, breadth) for i in range(breadth)],
                Function(3.0, 'requires_grad'))
Пример #3
0
def optimize_to_point_cloud(tree, pointcloud):
    ps = decompose_tree(tree)
    vs, p = tree
    model = gdf(vs, p)(Function('pos', 3))
    optimizer = torch.optim.Adam(ps)
    # LBFGS
    loss_fn = torch.nn.MSELoss(size_average=False)
    for epoch in range(100):
        for row_index in range(pointcloud.shape[0]):

            xyz = pointcloud[row_index][:3]
            d = pointcloud[row_index][3]
            # prediction = model(torch.tensor(xyz, dtype=torch.float32))
            # prediction = model(Function(xyz.tolist()))
            prediction = model.generate_model(
                {'pos': torch.tensor(xyz, dtype=torch.float32, device=torch.cuda.current_device())})
            expected = torch.tensor(d).unsqueeze(0)
            loss = loss_fn(prediction, expected)

            #print("Prediction {}; expected: {}".format(prediction, expected))
            # print(ps)

            optimizer.zero_grad()
            loss.backward(retain_graph=True)
            optimizer.step()
            model.update()

        with open("model.glsl", "w") as f:
            f.write(model.generate_shader())

        print(epoch, loss.item())
    return model
Пример #4
0
def gdf_raw(tree, p):
    '''Returns a Generalized Distance Function'''
    # return lambda v: sum(
    #    gdf_raw(t[0], p_clamp(t[1]))(v) if isinstance(
    #        t, tuple) else torch.abs(torch.dot(t, v)) ** p
    #    for t in tree) ** (1 / p)

    return lambda v: sum(
        gdf_raw(t[0], p_clamp(t[1]))(v) if isinstance(
            t, tuple) else t.dot(v).abs().pow(p)
        for t in tree).pow(Function(1.0) / p)
Пример #5
0
def optimize_to_sdf(tree, sdf):
    ps = decompose_tree(tree)
    vs, p = tree
    model = gdf(vs, p)(Function('pos', 3))
    optimizer = torch.optim.Adam(ps)

    loss_fn = torch.nn.MSELoss(size_average=False)
    for epoch in range(100):

        #print_tree(tree, 0)

        for i in range(1000):

            start = time.time()
            xyz = torch.randn(BATCH_SIZE, 3, device=device)

            d = sdf.generate_model(
                {'pos': torch.tensor(xyz, dtype=torch.float32, device=device)})

            prediction = model.generate_model(
                {'pos': torch.tensor(xyz, dtype=torch.float32, device=device)})
            expected = d
            end = time.time()

            loss = loss_fn(prediction, expected)
            # print_tree(tree)
            print("Loss: {}".format(loss))

            optimizer.zero_grad()
            loss.backward(retain_graph=True)
            optimizer.step()
            model.update()
            print(end - start)

            if loss < 1e-14:
                break
        print(loss)

        with open("model.glsl", "w") as f:
            f.write(model.generate_shader())

    return model
Пример #6
0
 def vec3(a, b, c): return Function((a, b, c), 'requires_grad')
 sdf = ([vec3(0.577, 0.577, 0.577),
Пример #7
0
def p_clamp(p):
    #min_clamp = p.min(Function(40.0))
    min_clamp = p.min(Function(10.0))
    both_clamp = min_clamp.max(Function(1.0))
    return both_clamp
Пример #8
0
def gdf(tree, p,
        transpose=Function([-0.5, -0.5, -0.5]), signed_offset=Function(-0.2)):
    '''Cleaner interface that transposes and offsets the output'''
    return lambda v: gdf_raw(tree, p_clamp(p))(v + transpose) + signed_offset