Esempio n. 1
0
def main(argv):
    parser = argparse.ArgumentParser(
        description="Generate a cuboid dataset"
    )
    parser.add_argument(
        "output_directory",
        help="Save the dataset in this directory"
    )
    parser.add_argument(
        "--n_samples",
        type=int,
        default=10,
        help="Number of training samples to be generated"
    )
    parser.add_argument(
        "--max_n_shapes_per_samples",
        type=int,
        default=4,
        help="Number of shapes per sample"
    )
    args = parser.parse_args(argv)

    # Check if output directory exists and if it doesn't create it
    if not os.path.exists(args.output_directory):
        os.makedirs(args.output_directory)

    # Create a directory based on the type of the shapes inside the output
    # directory
    output_directory = os.path.join(
        args.output_directory,
        "spheres_dataset"
    )
    print "Saving models to %s" % (output_directory,)

    prog = Progbar(args.n_samples)
    for i in range(args.n_samples):
        prims = build_sequentially_attaching_sheres(
            np.random.choice(np.arange(2, args.max_n_shapes_per_samples))
        )
        c = Shape.from_shapes(prims)
        # Create subdirectory to save the sample
        base_dir = os.path.join(output_directory, "%05d" % (i,), "models")
        if not os.path.exists(base_dir):
            os.makedirs(base_dir)
        # print base_dir
        # Save as obj file
        c.save_as_mesh(os.path.join(base_dir, "model_normalized.obj"), "obj")
        c.save_as_mesh(os.path.join(base_dir, "model_normalized.ply"), "ply")
        c.save_as_pointcloud(
            os.path.join(base_dir, "model_normalized_pcl.obj"), "obj"
        )
        prog.update(i + 1)
Esempio n. 2
0
def main(argv):
    parser = argparse.ArgumentParser(
        description="Generate a cuboid dataset"
    )
    parser.add_argument(
        "output_directory",
        help="Save the dataset in this directory"
    )
    parser.add_argument(
        "--n_samples",
        type=int,
        default=10,
        help="Number of training samples to be generated"
    )
    parser.add_argument(
        "--shapes_type",
        default="cubes",
        choices=[
            "cubes",
            "cubes_translated",
            "cubes_rotated_translated",
            "cubes_rotated",
            "rectangles",
            "rectangles_translated",
            "rectangles_rotated",
            "rectangles_rotated_translated",
            "ellipsoid",
            "random"
        ],
        help="The type of the shapes in every sample"
    )
    parser.add_argument(
        "--n_shapes_per_samples",
        type=int,
        default=1,
        help="Number of shapes per sample"
    )
    parser.add_argument(
        "--maximum",
        type=lambda x: tuple(map(float, x.split(","))),
        default="0.5,0.5,0.5",
        help="Maximum size along every axis"
    )
    parser.add_argument(
        "--minimum",
        type=lambda x: tuple(map(float, x.split(","))),
        default="0.13,0.13,0.13",
        help="Maximum size along every axis"
    )
    args = parser.parse_args(argv)

    # Check if output directory exists and if it doesn't create it
    if not os.path.exists(args.output_directory):
        os.makedirs(args.output_directory)

    # Create a directory based on the type of the shapes inside the output
    # directory
    output_directory = os.path.join(
        args.output_directory,
        args.shapes_type
    )

    ranges = None
    if "cubes" in args.shapes_type:
        # Make sure that the maximum and minimum range are equal along each
        # axis
        assert args.maximum[0] == args.maximum[1]
        assert args.maximum[1] == args.maximum[2]
        assert args.minimum[0] == args.minimum[1]
        assert args.minimum[1] == args.minimum[2]
        ranges = np.linspace(
            args.minimum[0],
            args.maximum[0],
            10,
            endpoint=False
        )

    # elif "rectangles" in args.shapes_type:
    else:
        ranges = [
            np.linspace(args.minimum[0], args.maximum[0], 10, endpoint=False),
            np.linspace(args.minimum[1], args.maximum[1], 10, endpoint=False),
            np.linspace(args.minimum[2], args.maximum[2], 10, endpoint=False),
        ]

    bar = Bar("Generating %d cuboids" % (args.n_samples,), max=args.n_samples)
    c = None
    for i in range(args.n_samples):
        if "cubes" in args.shapes_type:
            c = get_single_cube(args.minimum, args.maximum)
        if "rectangles" in args.shapes_type:
            c = get_single_rectangle(args.minimum, args.maximum)

        if "translated" in args.shapes_type:
            t = 0.3*np.random.random((3, 1))
            c.translate(t)

        if "rotated" in args.shapes_type:
            q = Quaternion.random()
            R = q.rotation_matrix
            c.rotate(R)

        if "ellipsoid" in args.shapes_type:
            abc = np.random.random((3, 1))
            c1 = Ellipsoid(abc[0], abc[1], abc[2])
            c2 = Ellipsoid(abc[0], abc[1], abc[2])
            c3 = Ellipsoid(abc[0], abc[1], abc[2])
            q = Quaternion.random()
            R = q.rotation_matrix
            c2.rotate(R)
            q = Quaternion.random()
            R = q.rotation_matrix
            c3.rotate(R)
            # t = 0.3*np.random.random((3, 1))
            # c1.translate(t)
            c = Shape.from_shapes([c1, c2, c3])

        if "random" in args.shapes_type:
            #if random.choice((True, False)):
            #if True:
            #   q = Quaternion.random()
            #   c1, c2 = adjacent_cubes(q.rotation_matrix)
            #else:
            if True:
                q1 = Quaternion.random()
                q2 = Quaternion.random()
                c1, c2 = multiple_cubes(
                    q1.rotation_matrix,
                    q2.rotation_matrix,
                    3.5*np.random.random((3, 1))
                )
            # q = Quaternion.random()
            # c1, c2 = adjacent_cubes(q.rotation_matrix)
            # q1 = Quaternion.random()
            # x_max1, y_max1, z_max1 = tuple(np.random.rand(3))
            # c3 = Cuboid(-x_max1, x_max1, -y_max1, y_max1, -z_max1, z_max1)
            # c3.rotate(q1.rotation_matrix)
            # c3.translate(np.random.random((3,1)).reshape(3, -1))
            c = Shape.from_shapes([c1, c2])

        # Create subdirectory to save the sample
        folder_name = ''.join([
            random.choice(ascii_letters + digits) for n in xrange(32)
        ])
        base_dir = os.path.join(output_directory, folder_name, "models")
        if not os.path.exists(base_dir):
            os.makedirs(base_dir)
        # print base_dir
        # Save as obj file
        c.save_as_mesh(os.path.join(base_dir, "model_normalized.obj"), "obj")
        c.save_as_mesh(os.path.join(base_dir, "model_normalized.ply"), "ply")
        c.save_as_pointcloud(
            os.path.join(base_dir, "model_normalized_pcl.obj"), "obj"
        )
        if "translated" in args.shapes_type:
            print os.path.join(base_dir, "model_normalized_pcl.obj"), t.T
        if "rotated" in args.shapes_type:
            print os.path.join(base_dir, "model_normalized_pcl.obj"), q
        bar.next()

    for i in os.listdir(output_directory):
        x = os.path.join(output_directory, i, "models/model_normalized.obj")
        m = MeshFromOBJ(x)
        print x, m.points.max(-1)
    return (p_max - p_min) / 2 + p_min


def get_rectangle(cube, T, **kwargs):
    corner1 = cube.points[:, 0]
    corner2 = cube.points[:, -1]

    return Rectangle(corner1 - T, *(corner2 - corner1), **kwargs)


if __name__ == "__main__":
    c1, c2, y_hat = cubes_inside(0.2, 0.2, 0.1, 0.1, 0.1, 0.1)
    T = get_translation([c1, c2])
    probs, translations, quats, shapes, epsilons = y_hat
    c = Shape.from_shapes([c1, c2])
    c.save_as_mesh("/tmp/mesh.obj", "obj")
    m = MeshFromOBJ("/tmp/mesh.obj")
    y_target = torch.from_numpy(m.sample_faces(1000).astype(
        np.float32)).float().unsqueeze(0)

    # A sampler instance
    e = EqualDistanceSamplerSQ(200)
    # Compute the loss for the current experiment
    l_weights = {
        "coverage_loss_weight": 1.0,
        "consistency_loss_weight": 1.0,
    }
    reg_terms = {
        "regularizer_type": [],
        "shapes_regularizer_weight": 0.0,