def _cam2world_matrix_from_cam_extrinsics(self,
                                              config: Config) -> np.ndarray:
        """ Determines camera extrinsics by using the given config and returns them in form of a cam to world frame transformation matrix.

        :param config: The configuration object.
        :return: The 4x4 cam to world transformation matrix.
        """
        if not config.has_param("cam2world_matrix"):
            position = MathUtility.change_coordinate_frame_of_point(
                config.get_vector3d("location", [0, 0, 0]), self.source_frame)
            # position = Vector((-0.01111459918320179, -0.051188092678785324, 0.19301876425743103))

            rotation_format = config.get_string("rotation/format", "euler")
            value = config.get_vector3d("rotation/value", [0, 0, 0])
            # Transform to blender coord frame
            value = MathUtility.change_coordinate_frame_of_point(
                value, self.source_frame)
            if rotation_format == "euler":
                # Rotation, specified as euler angles
                rotation_matrix = Euler(value, 'XYZ').to_matrix()
            elif rotation_format == "forward_vec":
                # Convert forward vector to euler angle (Assume Up = Z)
                rotation_matrix = CameraUtility.rotation_from_forward_vec(
                    value)
            elif rotation_format == "look_at":
                # Convert forward vector to euler angle (Assume Up = Z)
                rotation_matrix = CameraUtility.rotation_from_forward_vec(
                    value - position)
            else:
                raise Exception("No such rotation format:" +
                                str(rotation_format))

            if rotation_format == "look_at" or rotation_format == "forward_vec":
                inplane_rot = config.get_float("rotation/inplane_rot", 0.0)
                rotation_matrix = np.matmul(
                    rotation_matrix,
                    Euler((0.0, 0.0, inplane_rot)).to_matrix())

                extra_rot = config.get_vector("rotation/extra_rot",
                                              mathutils.Vector([0., 0., 0.]))
                #extra_rot = Vector([0.3,-0.3,-0.7841])
                rotation_matrix = rotation_matrix @ Euler(
                    extra_rot).to_matrix()

            # cam2world_matrix = Matrix.Translation(Vector(position)) @ rotation_matrix.to_4x4()

            cam2world_matrix = MathUtility.build_transformation_mat(
                position, rotation_matrix)

        else:
            cam2world_matrix = np.array(
                config.get_list("cam2world_matrix")).reshape(4, 4).astype(
                    np.float32)
            cam2world_matrix = MathUtility.change_target_coordinate_frame_of_transformation_matrix(
                cam2world_matrix, self.source_frame)
        return cam2world_matrix
Exemple #2
0
    def _correct_bbox_frame(bbox: dict) -> dict:
        """ Corrects the coordinate frame of the given bbox.

        :param bbox: The bbox.
        :return: The corrected bbox.
        """
        return {
            "min": MathUtility.transform_point_to_blender_coord_frame(bbox["min"], ["X", "-Z", "Y"]),
            "max": MathUtility.transform_point_to_blender_coord_frame(bbox["max"], ["X", "-Z", "Y"])
        }
Exemple #3
0
    def get_common_attribute(
            item: bpy.types.Object,
            attribute_name: str,
            destination_frame: Union[None, List[str]] = None) -> Any:
        """ Returns the value of the requested attribute for the given item.

        This method covers all general attributes that blender objects have.

        :param item: The item. Type: blender object.
        :param attribute_name: The attribute name. Type: string.
        :param destination_frame: Used to transform item to blender coordinates. Default: ["X", "Y", "Z"]
        :return: The attribute value.
        """

        if destination_frame is None:
            destination_frame = ["X", "Y", "Z"]

        if attribute_name == "name":
            return item.name
        elif attribute_name == "location":
            return MathUtility.transform_point_to_blender_coord_frame(
                item.location, destination_frame)
        elif attribute_name == "rotation_euler":
            return MathUtility.transform_point_to_blender_coord_frame(
                item.rotation_euler, destination_frame)
        elif attribute_name == "rotation_forward_vec":
            # Calc forward vector from rotation matrix
            rot_mat = item.rotation_euler.to_matrix()
            forward = rot_mat @ mathutils.Vector([0, 0, -1])
            return MathUtility.transform_point_to_blender_coord_frame(
                forward, destination_frame)
        elif attribute_name == "rotation_up_vec":
            # Calc up vector from rotation matrix
            rot_mat = item.rotation_euler.to_matrix()
            up = rot_mat @ mathutils.Vector([0, 1, 0])
            return MathUtility.transform_point_to_blender_coord_frame(
                up, destination_frame)
        elif attribute_name == "matrix_world":
            # Transform matrix_world to given destination frame
            matrix_world = Utility.transform_matrix_to_blender_coord_frame(
                item.matrix_world, destination_frame)
            return [[x for x in c] for c in matrix_world]
        elif attribute_name.startswith("customprop_"):
            custom_property_name = attribute_name[len("customprop_"):]
            # Make sure the requested custom property exist
            if custom_property_name in item:
                return item[custom_property_name]
            else:
                raise Exception("No such custom property: " +
                                custom_property_name)
        else:
            raise Exception("No such attribute: " + attribute_name)
    def _correct_bbox_frame(bbox: dict) -> dict:
        """ Corrects the coordinate frame of the given bbox.

        :param bbox: The bbox.
        :return: The corrected bbox.
        """
        return {
            "min":
            MathUtility.change_coordinate_frame_of_point(
                bbox["min"], ["X", "-Z", "Y"]),
            "max":
            MathUtility.change_coordinate_frame_of_point(
                bbox["max"], ["X", "-Z", "Y"])
        }
    def _get_attribute(self, item, attribute_name):
        """ Returns the value of the requested attribute for the given item.

        This method covers all general attributes that blender objects have.

        :param item: The item. Type: blender object.
        :param attribute_name: The attribute name. Type: string.
        :return: The attribute value.
        """
        if attribute_name == "id":
            if item.name not in self.name_to_id:
                self.name_to_id[item.name] = len(self.name_to_id.values())
            return self.name_to_id[item.name]
        elif attribute_name == "name":
            return item.name
        elif attribute_name == "location":
            return MathUtility.transform_point_to_blender_coord_frame(
                item.location, self.destination_frame)
        elif attribute_name == "rotation_euler":
            return MathUtility.transform_point_to_blender_coord_frame(
                item.rotation_euler, self.destination_frame)
        elif attribute_name == "rotation_forward_vec":
            # Calc forward vector from rotation matrix
            rot_mat = item.rotation_euler.to_matrix()
            forward = rot_mat @ mathutils.Vector([0, 0, -1])
            return MathUtility.transform_point_to_blender_coord_frame(
                forward, self.destination_frame)
        elif attribute_name == "rotation_up_vec":
            # Calc up vector from rotation matrix
            rot_mat = item.rotation_euler.to_matrix()
            up = rot_mat @ mathutils.Vector([0, 1, 0])
            return MathUtility.transform_point_to_blender_coord_frame(
                up, self.destination_frame)
        elif attribute_name == "matrix_world":
            # Transform matrix_world to given destination frame
            matrix_world = Utility.transform_matrix_to_blender_coord_frame(
                item.matrix_world, self.destination_frame)
            return [[x for x in c] for c in matrix_world]
        elif attribute_name.startswith("customprop_"):
            custom_property_name = attribute_name[len("customprop_"):]
            # Make sure the requested custom property exist
            if custom_property_name in item:
                return item[custom_property_name]
            else:
                raise Exception("No such custom property: " +
                                custom_property_name)
        else:
            raise Exception("No such attribute: " + attribute_name)
Exemple #6
0
    def _cam2world_matrix_from_cam_extrinsics(self, config):
        """ Determines camera extrinsics by using the given config and returns them in form of a cam to world frame transformation matrix.

        :param config: The configuration object.
        :return: The cam to world transformation matrix.
        """
        if not config.has_param("cam2world_matrix"):
            position = MathUtility.transform_point_to_blender_coord_frame(
                config.get_vector3d("location", [0, 0, 0]), self.source_frame)

            # Rotation
            rotation_format = config.get_string("rotation/format", "euler")
            value = config.get_vector3d("rotation/value", [0, 0, 0])
            # Transform to blender coord frame
            value = MathUtility.transform_point_to_blender_coord_frame(
                Vector(value), self.source_frame)
            if rotation_format == "euler":
                # Rotation, specified as euler angles
                rotation_matrix = Euler(value, 'XYZ').to_matrix()
            elif rotation_format == "forward_vec":
                # Convert forward vector to euler angle (Assume Up = Z)
                rotation_matrix = CameraUtility.rotation_from_forward_vec(
                    value)
            elif rotation_format == "look_at":
                # Convert forward vector to euler angle (Assume Up = Z)
                rotation_matrix = CameraUtility.rotation_from_forward_vec(
                    (value - position).normalized())
            else:
                raise Exception("No such rotation format:" +
                                str(rotation_format))

            if rotation_format == "look_at" or rotation_format == "forward_vec":
                inplane_rot = config.get_float("rotation/inplane_rot", 0.0)
                rotation_matrix = rotation_matrix @ Euler(
                    (0.0, 0.0, inplane_rot)).to_matrix()

            cam2world_matrix = Matrix.Translation(
                Vector(position)) @ rotation_matrix.to_4x4()
        else:
            cam2world_matrix = Matrix(
                np.array(config.get_list("cam2world_matrix")).reshape(
                    4, 4).astype(np.float32))
            cam2world_matrix = Utility.transform_matrix_to_blender_coord_frame(
                cam2world_matrix, self.source_frame)
        return cam2world_matrix
Exemple #7
0
    def _compute_camera_to_world_trafo(cam_H_m2w_ref: np.array,
                                       cam_H_m2c_ref: np.array,
                                       source_frame: list) -> np.ndarray:
        """ Returns camera to world transformation in blender coords.

        :param cam_H_m2c_ref: (4x4) Homog trafo from object to camera coords.
        :param cam_H_m2w_ref: (4x4) Homog trafo from object to world coords.
        :param source_frame: Can be used if the given positions and rotations are specified in frames different from the blender frame.
        :return: cam_H_c2w: (4x4) Homog trafo from camera to world coords.
        """

        cam_H_c2w = np.dot(cam_H_m2w_ref, np.linalg.inv(cam_H_m2c_ref))

        print('-----------------------------')
        print("Cam: {}".format(cam_H_c2w))
        print('-----------------------------')

        # transform from OpenCV to blender coords
        cam_H_c2w = MathUtility.change_source_coordinate_frame_of_transformation_matrix(
            cam_H_c2w, source_frame)

        return cam_H_c2w
Exemple #8
0
# Init sampler for sampling locations inside the loaded suncg house
point_sampler = SuncgPointInRoomSampler(objs)
# Init bvh tree containing all mesh objects
bvh_tree = MeshObject.create_bvh_tree_multi_objects(
    [o for o in objs if isinstance(o, MeshObject)])

poses = 0
tries = 0
while tries < 10000 and poses < 5:
    # Sample point inside house
    height = np.random.uniform(0.5, 2)
    location, _ = point_sampler.sample(height)
    # Sample rotation (fix around X and Y axis)
    euler_rotation = np.random.uniform([1.2217, 0, 0],
                                       [1.2217, 0, 6.283185307])
    cam2world_matrix = MathUtility.build_transformation_mat(
        location, euler_rotation)

    # Check that obstacles are at least 1 meter away from the camera and make sure the view interesting enough
    if CameraValidation.perform_obstacle_in_view_check(
            cam2world_matrix, {"min": 1.0}, bvh_tree
    ) and CameraValidation.scene_coverage_score(cam2world_matrix) > 0.4:
        CameraUtility.add_camera_pose(cam2world_matrix)
        poses += 1
    tries += 1

# activate normal and distance rendering
RendererUtility.enable_normals_output()
RendererUtility.enable_distance_output()
MaterialLoaderUtility.add_alpha_channel_to_textures(blurry_edges=True)

# render the whole pipeline
Exemple #9
0
proximity_checks = {
    "min": 1.0,
    "avg": {
        "min": 2.5,
        "max": 3.5
    },
    "no_background": True
}
while tries < 10000 and poses < 10:
    # Sample point inside house
    height = np.random.uniform(1.4, 1.8)
    location = point_sampler.sample(height)
    # Sample rotation (fix around X and Y axis)
    rotation = np.random.uniform([1.2217, 0, 0], [1.338, 0, 6.283185307])
    cam2world_matrix = MathUtility.build_transformation_mat(
        location,
        Euler(rotation).to_matrix())

    # Check that obstacles are at least 1 meter away from the camera and have an average distance between 2.5 and 3.5
    # meters and make sure that no background is visible, finally make sure the view is interesting enough
    if CameraValidation.scene_coverage_score(cam2world_matrix, special_objects, special_objects_weight=10.0) > 0.8 \
            and CameraValidation.perform_obstacle_in_view_check(cam2world_matrix, proximity_checks, bvh_tree):
        CameraUtility.add_camera_pose(cam2world_matrix)
        poses += 1
    tries += 1

# set the sample amount to 350
RendererUtility.set_samples(350)

# render the whole pipeline
data = RendererUtility.render()
Exemple #10
0
    def get_common_attribute(
            item: bpy.types.Object,
            attribute_name: str,
            local_frame_change: Union[None, List[str]] = None,
            world_frame_change: Union[None, List[str]] = None) -> Any:
        """ Returns the value of the requested attribute for the given item.

        This method covers all general attributes that blender objects have.

        :param item: The item. Type: blender object.
        :param attribute_name: The attribute name. Type: string.
        :param local_frame_change: Can be used to change the local coordinate frame of matrices. Default: ["X", "Y", "Z"]
        :param world_frame_change: Can be used to change the world coordinate frame of points and matrices. Default: ["X", "Y", "Z"]
        :return: The attribute value.
        """

        if local_frame_change is None:
            local_frame_change = ["X", "Y", "Z"]
        if world_frame_change is None:
            world_frame_change = ["X", "Y", "Z"]

        # Print warning if local_frame_change is used with other attributes than matrix_world
        if local_frame_change != ["X", "Y", "Z"] and attribute_name in [
                "location", "rotation_euler", "rotation_forward_vec",
                "rotation_up_vec"
        ]:
            print(
                "Warning: The local_frame_change parameter is at the moment only supported by the matrix_world attribute."
            )

        if attribute_name == "name":
            return item.name
        elif attribute_name == "location":
            return MathUtility.change_coordinate_frame_of_point(
                item.location, world_frame_change)
        elif attribute_name == "rotation_euler":
            return MathUtility.change_coordinate_frame_of_point(
                item.rotation_euler, world_frame_change)
        elif attribute_name == "rotation_forward_vec":
            # Calc forward vector from rotation matrix
            rot_mat = item.rotation_euler.to_matrix()
            forward = rot_mat @ mathutils.Vector([0, 0, -1])
            return MathUtility.change_coordinate_frame_of_point(
                forward, world_frame_change)
        elif attribute_name == "rotation_up_vec":
            # Calc up vector from rotation matrix
            rot_mat = item.rotation_euler.to_matrix()
            up = rot_mat @ mathutils.Vector([0, 1, 0])
            return MathUtility.change_coordinate_frame_of_point(
                up, world_frame_change)
        elif attribute_name == "matrix_world":
            # Transform matrix_world to given destination frame
            matrix_world = MathUtility.change_source_coordinate_frame_of_transformation_matrix(
                item.matrix_world, local_frame_change)
            matrix_world = MathUtility.change_target_coordinate_frame_of_transformation_matrix(
                matrix_world, world_frame_change)
            return [[x for x in c] for c in matrix_world]
        elif attribute_name.startswith("customprop_"):
            custom_property_name = attribute_name[len("customprop_"):]
            # Make sure the requested custom property exist
            if custom_property_name in item:
                return item[custom_property_name]
            else:
                raise Exception("No such custom property: " +
                                custom_property_name)
        else:
            raise Exception("No such attribute: " + attribute_name)
Exemple #11
0
# define a light and set its location and energy level
light = Light()
light.set_type("POINT")
light.set_location([5, -5, 5])
light.set_energy(1000)

# define the camera intrinsics
CameraUtility.set_intrinsics_from_blender_params(1, 512, 512, lens_unit="FOV")

# read the camera positions file and convert into homogeneous camera-world transformation
with open(args.camera, "r") as f:
    for line in f.readlines():
        line = [float(x) for x in line.split()]
        position, euler_rotation = line[:3], line[3:6]
        matrix_world = MathUtility.build_transformation_mat(
            position, euler_rotation)
        CameraUtility.add_camera_pose(matrix_world)

# activate normal and distance rendering
RendererUtility.enable_normals_output()
RendererUtility.enable_distance_output()

# set the amount of samples, which should be used for the color rendering
RendererUtility.set_samples(50)

# render the whole pipeline
data = RendererUtility.render()
seg_data = SegMapRendererUtility.render(map_by=["instance", "class", "name"])

# Write data to coco file
CocoWriterUtility.write(
Exemple #12
0
    Utility.resolve_path(
        os.path.join('resources', 'id_mappings', 'nyu_idset.csv')))
objs = SuncgLoader.load(args.house, label_mapping=label_mapping)

# define the camera intrinsics
CameraUtility.set_intrinsics_from_blender_params(1,
                                                 512,
                                                 512,
                                                 pixel_aspect_x=1.333333333,
                                                 lens_unit="FOV")

# read the camera positions file and convert into homogeneous camera-world transformation
with open(args.camera, "r") as f:
    for line in f.readlines():
        line = [float(x) for x in line.split()]
        position = MathUtility.change_coordinate_frame_of_point(
            line[:3], ["X", "-Z", "Y"])
        rotation = MathUtility.change_coordinate_frame_of_point(
            line[3:6], ["X", "-Z", "Y"])
        matrix_world = MathUtility.build_transformation_mat(
            position, CameraUtility.rotation_from_forward_vec(rotation))
        CameraUtility.add_camera_pose(matrix_world)

# makes Suncg objects emit light
SuncgLighting.light()

# activate normal and distance rendering
RendererUtility.enable_normals_output()
RendererUtility.enable_distance_output()
MaterialLoaderUtility.add_alpha_channel_to_textures(blurry_edges=True)

# render the whole pipeline