def _classify(self): """ classify fixations """ if self.g_pool.app == "exporter": return if self.bg_task: self.bg_task.cancel() gaze_data = [gp.serialized for gp in self.g_pool.gaze_positions] cap = Empty() cap.frame_size = self.g_pool.capture.frame_size cap.intrinsics = self.g_pool.capture.intrinsics cap.timestamps = self.g_pool.capture.timestamps generator_args = ( cap, gaze_data, np.deg2rad(self.max_dispersion), self.min_duration / 1000, self.max_duration / 1000, self.g_pool.min_data_confidence, ) self.fixation_data = deque() self.fixation_start_ts = deque() self.fixation_stop_ts = deque() self.bg_task = bh.IPC_Logging_Task_Proxy("Fixation detection", detect_fixations, args=generator_args)
def background_data_processor(data, callable, seek_idx, mp_context): return background_helper.IPC_Logging_Task_Proxy( "Background Data Processor", data_processing_generator, (data, callable, seek_idx), context=mp_context, )
def get_export_proxy( export_dir, export_range, surfaces, world_timestamps, gaze_positions, fixations, camera_model, marker_cache_path, mp_context, ): exporter = Exporter( export_dir, export_range, surfaces, world_timestamps, gaze_positions, fixations, camera_model, marker_cache_path, ) proxy = background_helper.IPC_Logging_Task_Proxy( "Offline Surface Tracker Exporter", exporter.save_surface_statisics_to_file, context=mp_context, ) return proxy
def background_video_processor(video_file_path, callable, visited_list, seek_idx, mp_context): return background_helper.IPC_Logging_Task_Proxy( "Background Video Processor", video_processing_generator, (video_file_path, callable, seek_idx, visited_list), context=mp_context, )
def background_gaze_on_surface( surfaces, section, all_world_timestamps, all_gaze_events, camera_model, mp_context ): return background_helper.IPC_Logging_Task_Proxy( "Background Data Processor", gaze_on_surface_generator, (surfaces, section, all_world_timestamps, all_gaze_events, camera_model), context=mp_context, )
def detect_recordings(self): if self.search_task: self.search_task.cancel() self.search_task = None self.search_button.outer_label = "" self.search_button.label = "Search" else: self.search_button.outer_label = "Searching..." self.search_button.label = "Cancel" self.search_task = bh.IPC_Logging_Task_Proxy( "Searching recordings in {}".format(self.source_dir), get_recording_dirs, args=[self.source_dir], )
def _fuse(self): """ Fuse imu data """ if self.bg_task: self.bg_task.cancel() generator_args = ( self.data_raw, self.gyro_error, ) self.data_orient = self.data_orient_empty_copy() self._data_orient_fetched = np.empty_like(self.data_orient, shape=self.data_len) if self.should_draw_orientation: self.orient_timeline.refresh() logger.info("Starting IMU fusion using Madgwick's algorithm") self.bg_task = bh.IPC_Logging_Task_Proxy("Fusion", fuser, args=generator_args)
def init_export(self): self.in_queue = False args = ( self.rec_dir, self.g_pool.user_dir, self.g_pool.min_data_confidence, None, None, self.plugins, self.out_file_path, {}, ) self.process = bh.IPC_Logging_Task_Proxy( "Pupil Batch Export {}".format(self.out_file_path), export_function, args=args, ) self.notify_all( {"subject": "batch_export.started", "out_file_path": self.out_file_path} )
def add_export_job( self, export_range, export_dir, plugin_name, input_name, output_name, process_frame, export_timestamps, ): os.makedirs(export_dir, exist_ok=True) logger.info("Exporting to {}".format(export_dir)) try: distorted_video_loc = [ f for f in glob( os.path.join(self.g_pool.rec_dir, input_name + ".*")) if os.path.splitext(f)[-1] in (".mp4", ".mkv", ".avi", ".mjpeg") ][0] except IndexError: raise FileNotFoundError("No Video " + input_name + " found") target_video_loc = os.path.join(export_dir, output_name + ".mp4") generator_args = ( self.g_pool.timestamps, distorted_video_loc, target_video_loc, export_range, process_frame, export_timestamps, ) task = bh.IPC_Logging_Task_Proxy( self.g_pool.ipc_push_url, plugin_name + " Video Export", export_processed_h264, args=generator_args, ) self.export_tasks.append(task) return {"export_folder": export_dir}
def add_export(self, export_range, export_dir): logger.warning("Adding new video export process.") rec_dir = self.g_pool.rec_dir user_dir = self.g_pool.user_dir # export_range.stop is exclusive start_frame, end_frame = export_range # Here we make clones of every plugin that supports it. # So it runs in the current config when we lauch the exporter. plugins = self.g_pool.plugins.get_initializers() out_file_path = os.path.join(export_dir, self.rec_name) pre_computed = self.precomputed_for_range(export_range) args = ( rec_dir, user_dir, self.g_pool.min_data_confidence, start_frame, end_frame, plugins, out_file_path, pre_computed, ) process = bh.IPC_Logging_Task_Proxy( self.g_pool.ipc_push_url, "Pupil Export {}".format(out_file_path), export, args=args, ) process.out_file_path = out_file_path process.frames_to_export = end_frame - start_frame process.status = "" process.progress = 0 self.exports.append(process) logger.debug("Starting export as new process {}".format(process)) self._update_ui()
def calibrate_section(self, sec): if "bg_task" in sec: sec["bg_task"].cancel() sec["status"] = "Starting calibration" # This will be overwritten on success try: sec["gaze"].clear() sec["gaze_ts"].clear() except KeyError: sec["gaze"] = collections.deque() sec["gaze_ts"] = collections.deque() calibration_window = pm.exact_window(self.g_pool.timestamps, sec["calibration_range"]) mapping_window = pm.exact_window(self.g_pool.timestamps, sec["mapping_range"]) calibration_pupil_pos = self.g_pool.pupil_positions.by_ts_window( calibration_window) mapping_pupil_pos = self.g_pool.pupil_positions.by_ts_window( mapping_window) if sec["calibration_method"] == "circle_marker": ref_list = self.circle_marker_positions elif sec["calibration_method"] == "natural_features": ref_list = self.manual_ref_positions start = sec["calibration_range"][0] end = sec["calibration_range"][1] ref_list = [r for r in ref_list if start <= r["index"] <= end] if not len(calibration_pupil_pos): logger.error('No pupil data to calibrate section "{}"'.format( self.sections.index(sec) + 1)) sec["status"] = "Calibration failed. Not enough pupil positions." return if not ref_list: logger.error( 'No referece marker data to calibrate section "{}"'.format( self.sections.index(sec) + 1)) sec["status"] = "Calibration failed. Not enough reference positions." return if (sec["mapping_method"] == "3d" and "2d" in calibration_pupil_pos[len(calibration_pupil_pos) // 2]["method"]): # select median pupil datum from calibration list and use its detection method as mapping method logger.warning( "Pupil data is 2d, calibration and mapping mode forced to 2d.") sec["mapping_method"] = "2d" fake = setup_fake_pool( self.g_pool.capture.frame_size, self.g_pool.capture.intrinsics, sec["mapping_method"], self.g_pool.rec_dir, self.g_pool.min_calibration_confidence, ) calibration_pupil_pos = [pp.serialized for pp in calibration_pupil_pos] mapping_pupil_pos = [pp.serialized for pp in mapping_pupil_pos] generator_args = ( fake, ref_list, calibration_pupil_pos, mapping_pupil_pos, sec["x_offset"], sec["y_offset"], ) logger.info("Calibrating section {} ({}) in {} mode...".format( self.sections.index(sec) + 1, sec["label"], sec["mapping_method"])) sec["bg_task"] = bh.IPC_Logging_Task_Proxy( "Calibration Section {}".format(self.sections.index(sec) + 1), calibrate_and_map, args=generator_args, )
def start(self): assert self.task_proxy is None self.task_proxy = bh.IPC_Logging_Task_Proxy(self.heading, self.task, args=self.args)