Esempio n. 1
0
    def _export_marker_detections(self):
        export_slice = slice(*self.export_range)
        world_indices = range(*self.export_range)

        # Load the temporary marker cache created by the offline surface tracker
        marker_cache = file_methods.Persistent_Dict(self.marker_cache_path)
        marker_cache = marker_cache["marker_cache"][export_slice]

        incomplete_marker_cache_detected = False

        file_path = os.path.join(self.metrics_dir, "marker_detections.csv")

        try:
            with open(file_path, "w", encoding="utf-8",
                      newline="") as csv_file:
                csv_writer = csv.writer(csv_file, delimiter=",")
                csv_writer.writerow((
                    "world_index",
                    "marker_uid",
                    "corner_0_x",
                    "corner_0_y",
                    "corner_1_x",
                    "corner_1_y",
                    "corner_2_x",
                    "corner_2_y",
                    "corner_3_x",
                    "corner_3_y",
                ))
                for world_index, serialized_markers in zip(
                        world_indices, marker_cache):
                    if serialized_markers is None:
                        # set flag to break from outer loop
                        incomplete_marker_cache_detected = True

                    if incomplete_marker_cache_detected:
                        break  # outer loop

                    for sm in serialized_markers:
                        if sm is None:
                            # set flag to break from outer loop
                            incomplete_marker_cache_detected = True
                            break  # inner loop
                        m = Surface_Marker.deserialize(sm)
                        flat_corners = [x for c in m.verts_px for x in c[0]]
                        assert len(flat_corners) == 8  # sanity check
                        csv_writer.writerow((
                            world_index,
                            m.uid,
                            *flat_corners,
                        ))
        finally:
            if incomplete_marker_cache_detected:
                logger.error(
                    "Marker detection not finished. No data will be exported.")
                # Delete incomplete marker cache export file
                os.remove(file_path)

            # Delete the temporary marker cache created by the offline surface tracker
            os.remove(self.marker_cache_path)
            self.marker_cache_path = None
    def _init_marker_cache(self):
        previous_cache = file_methods.Persistent_Dict(
            os.path.join(self.g_pool.rec_dir, "square_marker_cache")
        )
        version = previous_cache.get("version", 0)
        cache = previous_cache.get("marker_cache_unfiltered", None)

        if cache is None:
            self._recalculate_marker_cache()
        elif version != self.MARKER_CACHE_VERSION:
            logger.debug("Marker cache version missmatch. Rebuilding marker cache.")
            self.inverted_markers = previous_cache.get("inverted_markers", False)
            self._recalculate_marker_cache()
        else:
            marker_cache_unfiltered = []
            for markers in cache:
                # Loaded markers are either False, [] or a list of dictionaries. We
                # need to convert the dictionaries into Square_Marker_Detection objects.
                if markers:

                    markers = [
                        Square_Marker_Detection(*args) if args else None
                        for args in markers
                    ]
                marker_cache_unfiltered.append(markers)

            self._recalculate_marker_cache(previous_state=marker_cache_unfiltered)
            self.inverted_markers = previous_cache.get("inverted_markers", False)
            logger.debug("Restored previous marker cache.")
Esempio n. 3
0
    def _add_surfaces_from_file(self):
        surface_definitions = file_methods.Persistent_Dict(
            os.path.join(self._save_dir, "surface_definitions")
        )

        for init_dict in surface_definitions.get("surfaces", []):
            self.add_surface(init_dict)
    def _init_marker_cache(self):
        previous_cache_config = file_methods.Persistent_Dict(
            os.path.join(self.g_pool.rec_dir, "square_marker_cache"))
        version = previous_cache_config.get("version", 0)

        previous_params = self._cache_relevant_params_from_cache(
            previous_cache_config)
        current_params = self._cache_relevant_params_from_controller()

        if previous_params is None:
            self._recalculate_marker_cache(parameters=current_params)
        elif version != self.MARKER_CACHE_VERSION:
            logger.debug(
                "Marker cache version missmatch. Rebuilding marker cache.")
            self._recalculate_marker_cache(parameters=current_params)
        else:
            marker_cache_unfiltered = []
            for markers in previous_cache_config["marker_cache_unfiltered"]:
                # Loaded markers are either False, [] or a list of dictionaries. We
                # need to convert the dictionaries into Surface_Marker objects.
                if markers:

                    markers = [
                        Surface_Marker.deserialize(args) if args else None
                        for args in markers
                    ]
                marker_cache_unfiltered.append(markers)

            self._recalculate_marker_cache(
                parameters=previous_params,
                previous_state=marker_cache_unfiltered)
            logger.debug("Restored previous marker cache.")
Esempio n. 5
0
 def save_surface_definitions_to_file(self):
     surface_definitions = file_methods.Persistent_Dict(
         os.path.join(self._save_dir, "surface_definitions")
     )
     surface_definitions["surfaces"] = [
         surface.save_to_dict() for surface in self.surfaces if surface.defined
     ]
     surface_definitions.save()
 def _save_marker_cache(self):
     marker_cache_file = file_methods.Persistent_Dict(
         os.path.join(self.g_pool.rec_dir, "square_marker_cache"))
     marker_cache_file["marker_cache_unfiltered"] = list(
         self.marker_cache_unfiltered)
     marker_cache_file["version"] = self.MARKER_CACHE_VERSION
     marker_cache_file["inverted_markers"] = self.inverted_markers
     marker_cache_file.save()
Esempio n. 7
0
    def _add_surfaces_from_file(self):
        surface_definitions = file_methods.Persistent_Dict(
            os.path.join(self._save_dir, "surface_definitions"))

        for init_dict in surface_definitions.get("surfaces", []):
            saved_version = init_dict.get("version", None)
            if saved_version == self.Surface_Class.version:
                self.add_surface(init_dict)
    def _save_marker_cache(self):
        marker_cache_file = file_methods.Persistent_Dict(
            os.path.join(self.g_pool.rec_dir, "square_marker_cache"))
        marker_cache_file["marker_cache_unfiltered"] = list(
            self.marker_cache_unfiltered)
        marker_cache_file["version"] = self.MARKER_CACHE_VERSION

        current_config = self._cache_relevant_params_from_controller()
        marker_cache_file["inverted_markers"] = current_config.inverted_markers
        marker_cache_file["quad_decimate"] = current_config.quad_decimate
        marker_cache_file["sharpening"] = current_config.sharpening
        marker_cache_file.save()
Esempio n. 9
0
    def _export_marker_detections(self):

        # Load the temporary marker cache created by the offline surface tracker
        marker_cache = file_methods.Persistent_Dict(self.marker_cache_path)
        marker_cache = marker_cache["marker_cache"]

        try:
            file_path = os.path.join(self.metrics_dir, "marker_detections.csv")
            with open(file_path, "w", encoding="utf-8", newline="") as csv_file:
                csv_writer = csv.writer(csv_file, delimiter=",")
                csv_writer.writerow(
                    (
                        "world_index",
                        "marker_uid",
                        "corner_0_x",
                        "corner_0_y",
                        "corner_1_x",
                        "corner_1_y",
                        "corner_2_x",
                        "corner_2_y",
                        "corner_3_x",
                        "corner_3_y",
                    )
                )
                for idx, serialized_markers in enumerate(marker_cache):
                    for m in map(Surface_Marker.deserialize, serialized_markers):
                        flat_corners = [x for c in m.verts_px for x in c[0]]
                        assert len(flat_corners) == 8  # sanity check
                        csv_writer.writerow(
                            (
                                idx,
                                m.uid,
                                *flat_corners,
                            )
                        )
        finally:
            # Delete the temporary marker cache created by the offline surface tracker
            os.remove(self.marker_cache_path)
            self.marker_cache_path = None
Esempio n. 10
0
    def make_update():
        surface_definitions_path = os.path.join(rec_dir, "surface_definitions")
        if not os.path.exists(surface_definitions_path):
            return

        surface_definitions_dict = fm.Persistent_Dict(surface_definitions_path)
        surface_definitions_backup_path = os.path.join(
            rec_dir, "surface_definitions_deprecated"
        )
        os.rename(surface_definitions_path, surface_definitions_backup_path)

        intrinsics_path = os.path.join(rec_dir, "world.intrinsics")
        if not os.path.exists(intrinsics_path):
            logger.warning(
                "Loading surface definitions failed: The data format of the "
                "surface definitions in this recording "
                "is too old and is no longer supported!"
            )
            return

        valid_ext = (".mp4", ".mkv", ".avi", ".h264", ".mjpeg")
        existing_videos = [
            f
            for f in glob.glob(os.path.join(rec_dir, "world.*"))
            if os.path.splitext(f)[1] in valid_ext
        ]
        if not existing_videos:
            return

        world_video_path = existing_videos[0]
        world_video = av.open(world_video_path)
        f = world_video.streams.video[0].format
        resolution = f.width, f.height

        intrinsics = cm.load_intrinsics(rec_dir, "world", resolution)

        DEPRECATED_SQUARE_MARKER_KEY = "realtime_square_marker_surfaces"
        if DEPRECATED_SQUARE_MARKER_KEY not in surface_definitions_dict:
            return
        surfaces_definitions_old = surface_definitions_dict[
            DEPRECATED_SQUARE_MARKER_KEY
        ]

        surfaces_definitions_new = []
        for surface_def_old in surfaces_definitions_old:
            surface_def_new = {}
            surface_def_new["deprecated"] = True
            surface_def_new["name"] = surface_def_old["name"]
            surface_def_new["real_world_size"] = surface_def_old["real_world_size"]
            surface_def_new["build_up_status"] = 1.0

            reg_markers = []
            registered_markers_dist = []
            for id, verts in surface_def_old["markers"].items():
                reg_marker_dist = {"id": id, "verts_uv": verts}
                registered_markers_dist.append(reg_marker_dist)

                verts_undist = undistort_vertices(verts, intrinsics)
                reg_marker = {"id": id, "verts_uv": verts_undist}
                reg_markers.append(reg_marker)

            surface_def_new["registered_markers_dist"] = registered_markers_dist
            surface_def_new["reg_markers"] = reg_markers

            surfaces_definitions_new.append(surface_def_new)

        surface_definitions_dict_new = fm.Persistent_Dict(surface_definitions_path)
        surface_definitions_dict_new["surfaces"] = surfaces_definitions_new
        surface_definitions_dict_new.save()
Esempio n. 11
0
def normalize(pos, width, height, flip_y=False):
    """
    normalize return as float
    """
    x = pos[0]
    y = pos[1]
    x /= float(width)
    y /= float(height)
    if flip_y:
        return x, 1 - y
    return x, y


if __name__ == '__main__':
    path = 'D:/BlindPursuit/pupil/10/000/'
    vid_path = path + "world.mp4"

    marker_cache = file_methods.Persistent_Dict(path + 'square_marker_cache')
    marker_cache['version'] = 2

    markers = marker_positions('sdcalib.rmap.full.camera.pickle',
                               vid_path,
                               path + 'markers_new.npy',
                               path,
                               visualize=False)

    marker_cache['marker_cache'] = markers
    marker_cache['inverted_markers'] = False
    marker_cache.close()
Esempio n. 12
0
    def on_notify(self, notification):
        super().on_notify(notification)

        if notification[
                "subject"] == "surface_tracker.marker_detection_params_changed":
            current_params = self._cache_relevant_params_from_controller()
            self._recalculate_marker_cache(parameters=current_params)

        elif notification[
                "subject"] == "surface_tracker.marker_min_perimeter_changed":
            marker_type = self.marker_detector.marker_detector_mode.marker_type
            if marker_type == MarkerType.SQUARE_MARKER:
                self.marker_cache = self._filter_marker_cache(
                    self.marker_cache_unfiltered)
                for surface in self.surfaces:
                    surface.location_cache = None

        elif notification[
                "subject"] == "surface_tracker.heatmap_params_changed":
            for surface in self.surfaces:
                if surface.name == notification["name"]:
                    self._heatmap_update_requests.add(surface)
                    surface.within_surface_heatmap = surface.get_placeholder_heatmap(
                    )
                    break
            self._fill_gaze_on_surf_buffer()

        elif notification["subject"].startswith(
                "seek_control.trim_indices_changed"):
            for surface in self.surfaces:
                surface.within_surface_heatmap = surface.get_placeholder_heatmap(
                )
                self._heatmap_update_requests.add(surface)
            self._fill_gaze_on_surf_buffer()

        elif notification["subject"] == "surface_tracker.surfaces_changed":
            for surface in self.surfaces:
                if surface.name == notification["name"]:
                    surface.location_cache = None
                    surface.within_surface_heatmap = surface.get_placeholder_heatmap(
                    )
                    self._heatmap_update_requests.add(surface)
                    break

        elif notification["subject"] == "should_export":

            if self.cache_filler is not None:
                logger.error(
                    "Marker detection not finished. No data will be exported.")
                return

            if self.gaze_on_surf_buffer_filler is not None:
                logger.error(
                    "Surface gaze mapping not finished. No data will be exported."
                )
                return

            # Create new marker cache temporary file
            # Backgroud exporter is responsible of removing the temporary file when finished
            file_handle, marker_cache_path = tempfile.mkstemp()
            os.close(file_handle)  # https://bugs.python.org/issue42830

            # Save marker cache into the new temporary file
            temp_marker_cache = file_methods.Persistent_Dict(marker_cache_path)
            temp_marker_cache["marker_cache"] = self.marker_cache
            temp_marker_cache.save()

            proxy = background_tasks.get_export_proxy(
                notification["export_dir"],
                notification["range"],
                self.surfaces,
                self.g_pool.timestamps,
                self.g_pool.gaze_positions,
                self.g_pool.fixations,
                self.camera_model,
                marker_cache_path,
                mp_context,
            )
            self.export_proxies.add(proxy)

        elif (notification["subject"] ==
              "surface_tracker_offline._should_fill_gaze_on_surf_buffer"):
            self._fill_gaze_on_surf_buffer()