def test_shape_composition_2d_single(shape_info, verbose):
    np.random.seed(42)

    cell_nums = (32, 24)
    shape = ShapeComposition2d()
    params = []
    for name, param in shape_info:
        shape.AddParametricShape(name, param.size)
        params.append(ndarray(param).ravel())
    params = np.concatenate(params)
    params += np.random.normal(size=params.size) * 0.01
    shape.Initialize(cell_nums, params, True)

    if verbose:
        visualize_level_set(shape)

    # Verify the gradients.
    nx = shape.node_num(0)
    ny = shape.node_num(1)
    sdf_weight = np.random.normal(size=(nx, ny))
    def loss_and_grad(x):
        shape.Initialize(cell_nums, x, True)
        sdf = ndarray(shape.signed_distances())
        loss = sdf_weight.ravel().dot(sdf)
        grad = 0
        for i in range(nx):
            for j in range(ny):
                grad += sdf_weight[i, j] * ndarray(shape.signed_distance_gradients((i, j)))
        return loss, grad
    from py_diff_stokes_flow.common.grad_check import check_gradients
    return check_gradients(loss_and_grad, params.ravel(), verbose=verbose)
예제 #2
0
def test_scene_2d(verbose):
    seed = 42
    env = RandomEnv2d(seed)

    def loss_and_grad(x):
        loss, grad, _ = env.solve(x, True, {'solver': 'eigen'})
        return loss, grad

    x0 = env.sample()
    if not check_gradients(loss_and_grad, x0, eps=1e-5, verbose=verbose):
        if verbose:
            print_error('Gradient check in scene_2d failed.')
        return False

    return True
예제 #3
0
def test_bezier_2d(verbose):
    np.random.seed(42)
    folder = Path('bezier_2d')

    cell_nums = (32, 24)
    control_points = ndarray([[32, 12], [22, 6], [12, 18], [0, 12]])
    control_points[:, 1] += np.random.normal(size=4)
    bezier = Bezier2d()
    bezier.Initialize(cell_nums, control_points.ravel(), True)
    sdf = ndarray(bezier.signed_distances())
    sdf_master = np.load(folder / 'sdf_master.npy')
    if np.max(np.abs(sdf - sdf_master)) > 0:
        if verbose:
            print_error('Incorrect signed distance function.')
        return False

    if verbose:
        visualize_level_set(bezier)

    # Verify the gradients.
    nx = bezier.node_num(0)
    ny = bezier.node_num(1)
    sdf_weight = np.random.normal(size=(nx, ny))

    def loss_and_grad(x):
        bezier = Bezier2d()
        bezier.Initialize(cell_nums, x.ravel(), True)
        sdf = ndarray(bezier.signed_distances())
        loss = sdf_weight.ravel().dot(sdf)
        grad = 0
        for i in range(nx):
            for j in range(ny):
                grad += sdf_weight[i, j] * ndarray(
                    bezier.signed_distance_gradients((i, j)))
        return loss, grad

    from py_diff_stokes_flow.common.grad_check import check_gradients
    return check_gradients(loss_and_grad,
                           control_points.ravel(),
                           verbose=verbose)
예제 #4
0
def test_cell_2d(verbose):
    np.random.seed(42)

    cell = Cell2d()
    E = 1e5
    nu = 0.45
    threshold = 1e-3
    edge_sample_num = 3
    # Consider a line that passes (0.5, 0.5) with a slope between 1/3 and 1.
    p = ndarray([0.5, 0.5])
    k = np.random.uniform(low=1 / 3, high=1)
    # Line equation: (y - p[1]) / (x - p[0]) = k.
    # y - p[1] = kx - kp[0].
    # kx - y + p[1] - kp[0].
    line_eq = ndarray([k, -1, p[1] - k * p[0]])
    # Solid area: line_eq >= 0.
    # So, the lower part is the solid area.
    # This means corner distance from [0, 0] and [1, 0] are positive.
    sdf_at_corners = []
    for c in [(0, 0), (0, 1), (1, 0), (1, 1)]:
        sdf_at_corners.append(
            (line_eq[0] * c[0] + line_eq[1] * c[1] + line_eq[2]) /
            np.linalg.norm(line_eq[:2]))
    cell.Initialize(E, nu, threshold, edge_sample_num, sdf_at_corners)

    # Check if all areas are correct.
    dx = 1 / 3
    x_intercept = (-line_eq[1] * dx - line_eq[2]) / line_eq[0]
    area_00 = x_intercept * x_intercept * k * 0.5
    area_01 = dx**2 - (dx - x_intercept)**2 * k * 0.5
    area_02 = dx**2
    area_10 = 0
    area_11 = dx**2 * 0.5
    area_12 = dx**2
    area_20 = 0
    area_21 = dx**2 - area_01
    area_22 = dx**2 - area_00
    area = ndarray([
        area_00, area_01, area_02, area_10, area_11, area_12, area_20, area_21,
        area_22
    ])
    area_from_cell = ndarray(cell.sample_areas())
    if not np.allclose(area, area_from_cell):
        if verbose:
            print_error('area is inconsistent.')
        return False

    # Check if all line segments are correct.
    line_00 = np.sqrt(1 + k**2) * x_intercept
    line_01 = np.sqrt(1 + k**2) * (dx - x_intercept)
    line_02 = 0
    line_10 = 0
    line_11 = np.sqrt(1 + k**2) * dx
    line_12 = 0
    line_20 = 0
    line_21 = line_01
    line_22 = line_00
    line = ndarray([
        line_00, line_01, line_02, line_10, line_11, line_12, line_20, line_21,
        line_22
    ])
    line_from_cell = ndarray(cell.sample_boundary_areas())
    if not np.allclose(line, line_from_cell):
        if verbose:
            print_error('boundary area is inconsistent.')
        return False

    # Test the gradients.
    for loss_func, grad_func, name in [
        (cell.py_normal, cell.py_normal_gradient, 'normal'),
        (cell.offset, cell.py_offset_gradient, 'offset'),
        (cell.sample_areas, cell.py_sample_areas_gradient, 'sample_areas'),
        (cell.sample_boundary_areas, cell.py_sample_boundary_areas_gradient,
         'sample_boundary_areas'), (cell.area, cell.py_area_gradient, 'area'),
        (cell.py_energy_matrix, cell.py_energy_matrix_gradient,
         'energy_matrix'),
        (cell.py_dirichlet_vector, cell.py_dirichlet_vector_gradient,
         'dirichlet_vector')
    ]:
        if verbose:
            print_info('Checking loss and gradient:', name)
        dim = ndarray(loss_func()).size
        weight = np.random.normal(size=dim)

        def loss_and_grad(x):
            cell.Initialize(E, nu, threshold, edge_sample_num, x)
            loss = ndarray(loss_func()).ravel().dot(weight)
            grad = np.zeros(4)
            for i in range(4):
                grad[i] = ndarray(grad_func(i)).ravel().dot(weight)
            return loss, grad

        if not check_gradients(
                loss_and_grad, ndarray(sdf_at_corners), verbose=verbose):
            if verbose:
                print_error('Gradient check failed.')
            return False

    return True
def test_shape_composition_3d_single(shape_info, verbose):
    np.random.seed(42)

    cell_nums = (10, 10, 3)
    shape = ShapeComposition3d()
    params = []
    for name, param in shape_info:
        shape.AddParametricShape(name, param.size)
        params.append(ndarray(param).ravel())
    params = np.concatenate(params)
    params += np.random.normal(size=params.size) * 0.01
    shape.Initialize(cell_nums, params, True)

    # Verify the gradients.
    sdf = ndarray(shape.signed_distances())
    nx = shape.node_num(0)
    ny = shape.node_num(1)
    nz = shape.node_num(2)
    sdf = sdf.reshape((nx, ny, nz))

    if verbose:
        # Visualize the mesh.
        from skimage import measure
        from mpl_toolkits.mplot3d.art3d import Poly3DCollection
        verts, faces, _, _ = measure.marching_cubes_lewiner(sdf, 0)

        fig = plt.figure(figsize=(10, 10))
        ax = fig.add_subplot(111, projection='3d')

        # Fancy indexing: `verts[faces]` to generate a collection of triangles
        mesh = Poly3DCollection(verts[faces])
        mesh.set_edgecolor('k')
        ax.add_collection3d(mesh)

        ax.set_xlabel('x')
        ax.set_ylabel('y')
        ax.set_zlabel('z')

        cx, cy, cz = cell_nums
        ax.set_xlim(0, cx)
        ax.set_ylim(0, cy)
        ax.set_zlim(0, cz)

        plt.tight_layout()
        plt.show()

    sdf_weight = np.random.normal(size=(nx, ny, nz))

    def loss_and_grad(x):
        shape.Initialize(cell_nums, x, True)
        sdf = ndarray(shape.signed_distances())
        loss = sdf_weight.ravel().dot(sdf)
        grad = 0
        for i in range(nx):
            for j in range(ny):
                for k in range(nz):
                    grad += sdf_weight[i, j, k] * ndarray(
                        shape.signed_distance_gradients((i, j, k)))
        return loss, grad

    from py_diff_stokes_flow.common.grad_check import check_gradients
    return check_gradients(loss_and_grad, params.ravel(), verbose=verbose)
예제 #6
0
    def loss_and_grad(x):
        t_begin = time.time()
        loss, grad, _ = env.solve(x, True, {'solver': solver})
        # Normalize loss and grad.
        loss /= unit_loss
        grad /= unit_loss
        t_end = time.time()
        print('loss: {:3.6e}, |grad|: {:3.6e}, time: {:3.6f}s'.format(
            loss, np.linalg.norm(grad), t_end - t_begin))
        return loss, grad

    if enable_grad_check:
        print_info('Checking gradients...')
        # Sanity check gradients.
        success = check_gradients(loss_and_grad, x_init)
        if success:
            print_ok('Gradient check succeeded.')
        else:
            print_error('Gradient check failed.')
            sys.exit(0)

    # File index + 1 = len(opt_history).
    loss, grad = loss_and_grad(x_init)
    opt_history = [(x_init.copy(), loss, grad.copy())]
    pickle.dump(opt_history, open('{}/{:04d}.data'.format(demo_name, 0), 'wb'))

    def callback(x):
        loss, grad = loss_and_grad(x)
        global opt_history
        cnt = len(opt_history)