def calibrate_section(self, sec): if 'bg_task' in sec: sec['bg_task'].cancel() sec['status'] = 'Starting calibration' # This will be overwritten on success try: sec['gaze'].clear() sec['gaze_ts'].clear() except KeyError: sec['gaze'] = collections.deque() sec['gaze_ts'] = collections.deque() calibration_window = pm.exact_window(self.g_pool.timestamps, sec['calibration_range']) mapping_window = pm.exact_window(self.g_pool.timestamps, sec['mapping_range']) calibration_pupil_pos = self.g_pool.pupil_positions.by_ts_window(calibration_window) mapping_pupil_pos = self.g_pool.pupil_positions.by_ts_window(mapping_window) if sec['calibration_method'] == 'circle_marker': ref_list = self.circle_marker_positions elif sec['calibration_method'] == 'natural_features': ref_list = self.manual_ref_positions start = sec['calibration_range'][0] end = sec['calibration_range'][1] ref_list = [r for r in ref_list if start <= r['index'] <= end] if not len(calibration_pupil_pos): logger.error('No pupil data to calibrate section "{}"'.format(self.sections.index(sec) + 1)) sec['status'] = 'Calibration failed. Not enough pupil positions.' return if not ref_list: logger.error('No referece marker data to calibrate section "{}"'.format(self.sections.index(sec) + 1)) sec['status'] = 'Calibration failed. Not enough reference positions.' return if sec["mapping_method"] == '3d' and '2d' in calibration_pupil_pos[len(calibration_pupil_pos)//2]['method']: # select median pupil datum from calibration list and use its detection method as mapping method logger.warning("Pupil data is 2d, calibration and mapping mode forced to 2d.") sec["mapping_method"] = '2d' fake = setup_fake_pool(self.g_pool.capture.frame_size, self.g_pool.capture.intrinsics, sec["mapping_method"], self.g_pool.rec_dir, self.g_pool.min_calibration_confidence) calibration_pupil_pos = [pp.serialized for pp in calibration_pupil_pos] mapping_pupil_pos = [pp.serialized for pp in mapping_pupil_pos] generator_args = (fake, ref_list, calibration_pupil_pos, mapping_pupil_pos, sec['x_offset'], sec['y_offset']) logger.info('Calibrating section {} ({}) in {} mode...'.format(self.sections.index(sec) + 1, sec['label'], sec["mapping_method"])) sec['bg_task'] = bh.Task_Proxy('{}'.format(self.sections.index(sec) + 1), calibrate_and_map, args=generator_args)
def do_export(_): left_idx = g_pool.seek_control.trim_left right_idx = g_pool.seek_control.trim_right export_range = left_idx, right_idx + 1 # exclusive range.stop export_ts_window = pm.exact_window(g_pool.timestamps, (left_idx, right_idx)) export_dir = os.path.join(g_pool.rec_dir, "exports") export_dir = next_export_sub_dir(export_dir) os.makedirs(export_dir) logger.info('Created export dir at "{}"'.format(export_dir)) export_info = { "Player Software Version": str(g_pool.version), "Data Format Version": meta_info["Data Format Version"], "Export Date": strftime("%d.%m.%Y", localtime()), "Export Time": strftime("%H:%M:%S", localtime()), "Frame Index Range:": g_pool.seek_control.get_frame_index_trim_range_string(), "Relative Time Range": g_pool.seek_control.get_rel_time_trim_range_string(), "Absolute Time Range": g_pool.seek_control.get_abs_time_trim_range_string(), } with open(os.path.join(export_dir, "export_info.csv"), "w") as csv: write_key_value_file(csv, export_info) notification = { "subject": "should_export", "range": export_range, "ts_window": export_ts_window, "export_dir": export_dir, } g_pool.ipc_pub.notify(notification)
def export_annotations(self, export_range, export_dir): if not self.annotations: logger.warning( 'No annotations in this recording nothing to export') return export_window = pm.exact_window(self.g_pool.timestamps, export_range) annotation_section = self.annotations.init_dict_for_window( export_window) annotation_idc = pm.find_closest(self.g_pool.timestamps, annotation_section['data_ts']) csv_keys = self.parse_csv_keys(annotation_section['data']) with open(os.path.join(export_dir, 'annotations.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile) csv_writer.writerow(csv_keys) for annotation, idx in zip(annotation_section['data'], annotation_idc): csv_row = [idx] csv_row.extend((annotation.get(k, '') for k in csv_keys[1:])) csv_writer.writerow(csv_row) logger.info("Created 'annotations.csv' file.")
def create_bg_task(gaze_mapper, calibration, reference_location_storage): assert g_pool, "You forgot to set g_pool by the plugin" refs_in_validation_range = reference_location_storage.get_in_range( gaze_mapper.validation_index_range) validation_window = pm.exact_window(g_pool.timestamps, gaze_mapper.validation_index_range) pupils_in_validation_range = g_pool.pupil_positions.by_ts_window( validation_window) # Make a copy of params to ensure there are no mappingproxy instances # calibration_params = fm._recursive_deep_copy(calibration.params) calibration_params = calibration.params fake_gpool = FakeGPool.from_g_pool(g_pool) args = ( fake_gpool, calibration.gazer_class_name, calibration_params, gaze_mapper, pupils_in_validation_range, refs_in_validation_range, ) return tasklib.background.create( f"validate gaze mapper '{gaze_mapper.name}'", validate, args=args, )
def create_task(gaze_mapper, calibration): assert g_pool, "You forgot to set g_pool by the plugin" mapping_window = pm.exact_window(g_pool.timestamps, gaze_mapper.mapping_index_range) pupil_pos_in_mapping_range = g_pool.pupil_positions.by_ts_window( mapping_window) fake_gpool = _setup_fake_gpool( g_pool.capture.frame_size, g_pool.capture.intrinsics, calibration.mapping_method, g_pool.rec_dir, ) args = ( calibration.result, fake_gpool, pupil_pos_in_mapping_range, gaze_mapper.manual_correction_x, gaze_mapper.manual_correction_y, ) name = "Create gaze mapper {}".format(gaze_mapper.name) return tasklib.background.create( name, _map_gaze, args=args, patches=[bg_patches.IPCLoggingPatch()], pass_shared_memory=True, )
def create_task(calibration, all_reference_locations): assert g_pool, "You forgot to set g_pool by the plugin" calibration_window = pm.exact_window( g_pool.timestamps, calibration.frame_index_range ) pupil_pos_in_calib_range = g_pool.pupil_positions.by_ts_window(calibration_window) frame_start = calibration.frame_index_range[0] frame_end = calibration.frame_index_range[1] ref_dicts_in_calib_range = [ _create_ref_dict(ref) for ref in all_reference_locations if frame_start <= ref.frame_index <= frame_end ] fake_gpool = _setup_fake_gpool( g_pool.capture.frame_size, g_pool.capture.intrinsics, calibration.mapping_method, g_pool.rec_dir, calibration.minimum_confidence, ) args = (fake_gpool, ref_dicts_in_calib_range, pupil_pos_in_calib_range) name = "Create calibration {}".format(calibration.name) return tasklib.background.create( name, _create_calibration, args=args, patches=[bg_patches.IPCLoggingPatch()] )
def create_task(calibration, all_reference_locations): assert g_pool, "You forgot to set g_pool by the plugin" calibration_window = pm.exact_window(g_pool.timestamps, calibration.frame_index_range) pupil_pos_in_calib_range = g_pool.pupil_positions.by_ts_window( calibration_window) frame_start = calibration.frame_index_range[0] frame_end = calibration.frame_index_range[1] ref_dicts_in_calib_range = [ _create_ref_dict(ref) for ref in all_reference_locations if frame_start <= ref.frame_index <= frame_end ] fake_gpool = _setup_fake_gpool( g_pool.capture.frame_size, g_pool.capture.intrinsics, calibration.mapping_method, g_pool.rec_dir, calibration.minimum_confidence, ) args = (fake_gpool, ref_dicts_in_calib_range, pupil_pos_in_calib_range) name = "Create calibration {}".format(calibration.name) return tasklib.background.create( name, _create_calibration, args=args, )
def do_export(_): left_idx = g_pool.seek_control.trim_left right_idx = g_pool.seek_control.trim_right export_range = left_idx, right_idx + 1 # exclusive range.stop export_ts_window = pm.exact_window(g_pool.timestamps, (left_idx, right_idx)) export_dir = os.path.join(g_pool.rec_dir, "exports") export_dir = next_export_sub_dir(export_dir) os.makedirs(export_dir) logger.info('Created export dir at "{}"'.format(export_dir)) export_info = { "Player Software Version": str(g_pool.version), "Data Format Version": meta_info.min_player_version, "Export Date": strftime("%d.%m.%Y", localtime()), "Export Time": strftime("%H:%M:%S", localtime()), "Frame Index Range:": g_pool.seek_control.get_frame_index_trim_range_string(), "Relative Time Range": g_pool.seek_control.get_rel_time_trim_range_string(), "Absolute Time Range": g_pool.seek_control.get_abs_time_trim_range_string(), } with open(os.path.join(export_dir, "export_info.csv"), "w") as csv: write_key_value_file(csv, export_info) notification = { "subject": "should_export", "range": export_range, "ts_window": export_ts_window, "export_dir": export_dir, } g_pool.ipc_pub.notify(notification)
def export_fixations(self, export_range, export_dir): """ between in and out mark fixation report: - fixation detection method and parameters - fixation count fixation list: id | start_timestamp | duration | start_frame_index | end_frame_index | norm_pos_x | norm_pos_y | dispersion | confidence | method | gaze_point_3d_x | gaze_point_3d_y | gaze_point_3d_z | base_data """ if not self.fixation_data: logger.warning('No fixations in this recording nothing to export') return export_window = pm.exact_window(self.g_pool.timestamps, export_range) fixations_in_section = self.g_pool.fixations.by_ts_window(export_window) with open(os.path.join(export_dir,'fixations.csv'),'w',encoding='utf-8',newline='') as csvfile: csv_writer = csv.writer(csvfile) csv_writer.writerow(self.csv_representation_keys()) for f in fixations_in_section: csv_writer.writerow(self.csv_representation_for_fixation(f)) logger.info("Created 'fixations.csv' file.") with open(os.path.join(export_dir,'fixation_report.csv'),'w',encoding='utf-8',newline='') as csvfile: csv_writer = csv.writer(csvfile) csv_writer.writerow(('fixation classifier','Dispersion_Duration')) csv_writer.writerow(('max_dispersion','{:0.3f} deg'.format(self.max_dispersion)) ) csv_writer.writerow(('min_duration','{:0.3f} sec'.format(self.min_duration)) ) csv_writer.writerow(('')) csv_writer.writerow(('fixation_count',len(fixations_in_section))) logger.info("Created 'fixation_report.csv' file.")
def create_task(gaze_mapper, calibration): assert g_pool, "You forgot to set g_pool by the plugin" mapping_window = pm.exact_window(g_pool.timestamps, gaze_mapper.mapping_index_range) pupil_pos_in_mapping_range = g_pool.pupil_positions.by_ts_window(mapping_window) fake_gpool = _setup_fake_gpool( g_pool.capture.frame_size, g_pool.capture.intrinsics, calibration.mapping_method, g_pool.rec_dir, ) args = ( calibration.result, fake_gpool, pupil_pos_in_mapping_range, gaze_mapper.manual_correction_x, gaze_mapper.manual_correction_y, ) name = "Create gaze mapper {}".format(gaze_mapper.name) return tasklib.background.create( name, _map_gaze, args=args, patches=[bg_patches.IPCLoggingPatch()], pass_shared_memory=True, )
def create_task(calibration, all_reference_locations): assert g_pool, "You forgot to set g_pool by the plugin" calibration_window = pm.exact_window(g_pool.timestamps, calibration.frame_index_range) pupil_pos_in_calib_range = g_pool.pupil_positions.by_ts_window( calibration_window) frame_start = calibration.frame_index_range[0] frame_end = calibration.frame_index_range[1] ref_dicts_in_calib_range = [ _create_ref_dict(ref) for ref in all_reference_locations if frame_start <= ref.frame_index <= frame_end ] fake_gpool = FakeGPool.from_g_pool(g_pool) fake_gpool.min_calibration_confidence = calibration.minimum_confidence args = ( fake_gpool, calibration.gazer_class_name, ref_dicts_in_calib_range, pupil_pos_in_calib_range, ) name = f"Create calibration {calibration.name}" return tasklib.background.create(name, _create_calibration, args=args)
def create_task(gaze_mapper, calibration): assert g_pool, "You forgot to set g_pool by the plugin" mapping_window = pm.exact_window(g_pool.timestamps, gaze_mapper.mapping_index_range) pupil_pos_in_mapping_range = g_pool.pupil_positions.by_ts_window( mapping_window) if not pupil_pos_in_mapping_range: raise NotEnoughPupilData fake_gpool = FakeGPool.from_g_pool(g_pool) # Make a copy of params to ensure there are no mappingproxy instances # calibration_params = fm._recursive_deep_copy(calibration.params) calibration_params = calibration.params args = ( calibration.gazer_class_name, calibration_params, fake_gpool, pupil_pos_in_mapping_range, gaze_mapper.manual_correction_x, gaze_mapper.manual_correction_y, ) name = f"Create gaze mapper {gaze_mapper.name}" return tasklib.background.create( name, _map_gaze, args=args, pass_shared_memory=True, )
def _export_surface_gaze_distribution(self): with open( os.path.join(self.metrics_dir, "surface_gaze_distribution.csv"), "w", encoding="utf-8", newline="", ) as csv_file: csv_writer = csv.writer(csv_file, delimiter=",") export_window = player_methods.exact_window( self.world_timestamps, self.export_range ) gaze_in_section = self.gaze_positions.by_ts_window(export_window) not_on_any_surf_ts = set([gp["timestamp"] for gp in gaze_in_section]) csv_writer.writerow(("total_gaze_point_count", len(gaze_in_section))) csv_writer.writerow("") csv_writer.writerow(("surface_name", "gaze_count")) for surf_idx, surface in enumerate(self.surfaces): gaze_on_surf = self.gaze_on_surfaces[surf_idx] gaze_on_surf = list(itertools.chain.from_iterable(gaze_on_surf)) gaze_on_surf_ts = set( [gp["base_data"][1] for gp in gaze_on_surf if gp["on_surf"]] ) not_on_any_surf_ts -= gaze_on_surf_ts csv_writer.writerow((surface.name, len(gaze_on_surf_ts))) csv_writer.writerow(("not_on_any_surface", len(not_on_any_surf_ts))) logger.info("Created 'surface_gaze_distribution.csv' file")
def _csv_exported_gaze_data( gaze_positions, destination_folder, export_range, timestamps, capture ): export_start, export_stop = export_range # export_stop is exclusive export_window = pm.exact_window(timestamps, (export_start, export_stop - 1)) gaze_section = gaze_positions.init_dict_for_window(export_window) # find closest world idx for each gaze datum gaze_world_idc = pm.find_closest(timestamps, gaze_section["data_ts"]) csv_header = ( "GazeTimeStamp", "MediaTimeStamp", "MediaFrameIndex", "Gaze3dX", "Gaze3dY", "Gaze3dZ", "Gaze2dX", "Gaze2dY", "PupilDiaLeft", "PupilDiaRight", "Confidence", ) csv_rows = [] for gaze_pos, media_idx in zip(gaze_section["data"], gaze_world_idc): media_timestamp = timestamps[media_idx] try: pupil_dia = {} for p in gaze_pos["base_data"]: pupil_dia[p["id"]] = p["diameter_3d"] pixel_pos = denormalize( gaze_pos["norm_pos"], capture.frame_size, flip_y=True ) undistorted3d = capture.intrinsics.unprojectPoints(pixel_pos) undistorted2d = capture.intrinsics.projectPoints( undistorted3d, use_distortion=False ) data = ( gaze_pos["timestamp"], media_timestamp, media_idx - export_range[0], *gaze_pos["gaze_point_3d"], # Gaze3dX/Y/Z *undistorted2d.flat, # Gaze2dX/Y pupil_dia.get(1, 0.0), # PupilDiaLeft pupil_dia.get(0, 0.0), # PupilDiaRight gaze_pos["confidence"], # Confidence ) except KeyError: raise _iMotionsExporterNo3DGazeDataError() csv_rows.append(data) return csv_header, csv_rows
def _convert_video_file( input_file, output_file, export_range, world_timestamps, process_frame, timestamp_export_format, ): yield "Export video", 0.0 input_source = File_Source(SimpleNamespace(), input_file, fill_gaps=True) if not input_source.initialised: yield "Exporting video failed", 0.0 return # yield progress results two times per second update_rate = int(input_source.frame_rate / 2) export_start, export_stop = export_range # export_stop is exclusive export_window = pm.exact_window(world_timestamps, (export_start, export_stop - 1)) (export_from_index, export_to_index) = pm.find_closest( input_source.timestamps, export_window ) # NOTE: Start time of the export recording will be synced with world recording # export! This means that if the recording to export started later than the world # video, the first frame of the exported recording will not be at timestamp 0 in # the recording, but later. Some video players (e.g. VLC on windows) might display # the video weirdly in this case, but we rather want syncronization between the # exported video! start_time = export_window[0] writer = MPEG_Writer(output_file, start_time) input_source.seek_to_frame(export_from_index) next_update_idx = export_from_index + update_rate while True: try: input_frame = input_source.get_frame() except EndofVideoError: break if input_frame.index >= export_to_index: break output_img = process_frame(input_source, input_frame) output_frame = input_frame output_frame._img = output_img # it's ._img because .img has no setter writer.write_video_frame(output_frame) if input_source.get_frame_index() >= next_update_idx: progress = (input_source.get_frame_index() - export_from_index) / ( export_to_index - export_from_index ) yield "Exporting video", progress * 100.0 next_update_idx += update_rate writer.close(timestamp_export_format) input_source.cleanup() yield "Exporting video completed", 100.0
def _convert_video_file( input_file, output_file, export_range, world_timestamps, process_frame, timestamp_export_format, ): yield "Export video", 0.0 input_source = File_Source(EmptyGPool(), input_file, fill_gaps=True) if not input_source.initialised: yield "Exporting video failed", 0.0 return # yield progress results two times per second update_rate = int(input_source.frame_rate / 2) export_start, export_stop = export_range # export_stop is exclusive export_window = pm.exact_window(world_timestamps, (export_start, export_stop - 1)) (export_from_index, export_to_index) = pm.find_closest(input_source.timestamps, export_window) writer = AV_Writer(output_file, fps=input_source.frame_rate, audio_dir=None, use_timestamps=True) input_source.seek_to_frame(export_from_index) next_update_idx = export_from_index + update_rate while True: try: input_frame = input_source.get_frame() except EndofVideoError: break if input_frame.index >= export_to_index: break output_img = process_frame(input_source, input_frame) output_frame = input_frame output_frame._img = output_img # it's ._img because .img has no setter writer.write_video_frame(output_frame) if input_source.get_frame_index() >= next_update_idx: progress = (input_source.get_frame_index() - export_from_index) / ( export_to_index - export_from_index) yield "Exporting video", progress * 100.0 next_update_idx += update_rate writer.close(timestamp_export_format) input_source.cleanup() yield "Exporting video completed", 100.0
def _convert_video_file( input_file, output_file, export_range, world_timestamps, process_frame, timestamp_export_format, ): yield "Export video", 0.0 input_source = File_Source(SimpleNamespace(), input_file, fill_gaps=True) if not input_source.initialised: yield "Exporting video failed", 0.0 return # yield progress results two times per second update_rate = int(input_source.frame_rate / 2) export_start, export_stop = export_range # export_stop is exclusive export_window = pm.exact_window(world_timestamps, (export_start, export_stop - 1)) (export_from_index, export_to_index) = pm.find_closest( input_source.timestamps, export_window ) writer = AV_Writer( output_file, fps=input_source.frame_rate, audio_dir=None, use_timestamps=True ) input_source.seek_to_frame(export_from_index) next_update_idx = export_from_index + update_rate while True: try: input_frame = input_source.get_frame() except EndofVideoError: break if input_frame.index >= export_to_index: break output_img = process_frame(input_source, input_frame) output_frame = input_frame output_frame._img = output_img # it's ._img because .img has no setter writer.write_video_frame(output_frame) if input_source.get_frame_index() >= next_update_idx: progress = (input_source.get_frame_index() - export_from_index) / ( export_to_index - export_from_index ) yield "Exporting video", progress * 100.0 next_update_idx += update_rate writer.close(timestamp_export_format) input_source.cleanup() yield "Exporting video completed", 100.0
def _precomputed_eye_data_for_range(self, export_range): export_window = pm.exact_window(self.g_pool.timestamps, export_range) pre_computed = { "gaze": self.g_pool.gaze_positions, "pupil": self.g_pool.pupil_positions, "fixations": self.g_pool.fixations, } for key, bisector in pre_computed.items(): init_dict = bisector.init_dict_for_window(export_window) init_dict["data"] = [datum.serialized for datum in init_dict["data"]] pre_computed[key] = init_dict return pre_computed
def _precomputed_eye_data_for_range(self, export_range): export_window = pm.exact_window(self.g_pool.timestamps, export_range) pre_computed = { "gaze": self.g_pool.gaze_positions, "pupil": self.g_pool.pupil_positions, "pupil_by_id_0": self.g_pool.pupil_positions_by_id[0], "pupil_by_id_1": self.g_pool.pupil_positions_by_id[1], "fixations": self.g_pool.fixations, } for key, bisector in pre_computed.items(): init_dict = bisector.init_dict_for_window(export_window) init_dict["data"] = [datum.serialized for datum in init_dict["data"]] pre_computed[key] = init_dict return pre_computed
def precomputed_for_range(self, export_range): export_window = pm.exact_window(self.g_pool.timestamps, export_range) pre_computed = { 'gaze': self.g_pool.gaze_positions, 'pupil': self.g_pool.pupil_positions, 'fixations': self.g_pool.fixations } for key, bisector in pre_computed.items(): init_dict = bisector.init_dict_for_window(export_window) init_dict['data'] = [ datum.serialized for datum in init_dict['data'] ] pre_computed[key] = init_dict return pre_computed
def export_annotations(self, export_range, export_dir): export_window = pm.exact_window(self.g_pool.timestamps, export_range) annotation_section = self.annotations.init_dict_for_window( export_window) annotation_idc = pm.find_closest(self.g_pool.timestamps, annotation_section["data_ts"]) csv_keys = self.parse_csv_keys(annotation_section["data"]) with open( os.path.join(export_dir, "annotations.csv"), "w", encoding="utf-8", newline="", ) as csv_file: csv_writer = csv.writer(csv_file) csv_writer.writerow(csv_keys) for annotation, idx in zip(annotation_section["data"], annotation_idc): csv_row = [idx] csv_row.extend((annotation.get(k, "") for k in csv_keys[1:])) csv_writer.writerow(csv_row) logger.info("Created 'annotations.csv' file.")
def segments_in_range(self, range) -> t.Iterable[Classified_Segment]: range_window = pm.exact_window(self._timestamps, range) return self.segments_in_timestamp_window(range_window)
def calibrate_section(self, sec): if "bg_task" in sec: sec["bg_task"].cancel() sec["status"] = "Starting calibration" # This will be overwritten on success try: sec["gaze"].clear() sec["gaze_ts"].clear() except KeyError: sec["gaze"] = collections.deque() sec["gaze_ts"] = collections.deque() calibration_window = pm.exact_window(self.g_pool.timestamps, sec["calibration_range"]) mapping_window = pm.exact_window(self.g_pool.timestamps, sec["mapping_range"]) calibration_pupil_pos = self.g_pool.pupil_positions.by_ts_window( calibration_window) mapping_pupil_pos = self.g_pool.pupil_positions.by_ts_window( mapping_window) if sec["calibration_method"] == "circle_marker": ref_list = self.circle_marker_positions elif sec["calibration_method"] == "natural_features": ref_list = self.manual_ref_positions start = sec["calibration_range"][0] end = sec["calibration_range"][1] ref_list = [r for r in ref_list if start <= r["index"] <= end] if not len(calibration_pupil_pos): logger.error('No pupil data to calibrate section "{}"'.format( self.sections.index(sec) + 1)) sec["status"] = "Calibration failed. Not enough pupil positions." return if not ref_list: logger.error( 'No referece marker data to calibrate section "{}"'.format( self.sections.index(sec) + 1)) sec["status"] = "Calibration failed. Not enough reference positions." return if (sec["mapping_method"] == "3d" and "2d" in calibration_pupil_pos[len(calibration_pupil_pos) // 2]["method"]): # select median pupil datum from calibration list and use its detection method as mapping method logger.warning( "Pupil data is 2d, calibration and mapping mode forced to 2d.") sec["mapping_method"] = "2d" fake = setup_fake_pool( self.g_pool.capture.frame_size, self.g_pool.capture.intrinsics, sec["mapping_method"], self.g_pool.rec_dir, self.g_pool.min_calibration_confidence, ) calibration_pupil_pos = [pp.serialized for pp in calibration_pupil_pos] mapping_pupil_pos = [pp.serialized for pp in mapping_pupil_pos] generator_args = ( fake, ref_list, calibration_pupil_pos, mapping_pupil_pos, sec["x_offset"], sec["y_offset"], ) logger.info("Calibrating section {} ({}) in {} mode...".format( self.sections.index(sec) + 1, sec["label"], sec["mapping_method"])) sec["bg_task"] = bh.IPC_Logging_Task_Proxy( "Calibration Section {}".format(self.sections.index(sec) + 1), calibrate_and_map, args=generator_args, )
def _write_gaze_data(gaze_positions, destination_folder, export_range, timestamps, capture): global user_warned_3d_only with open(os.path.join(destination_folder, "gaze.tlv"), "w", encoding="utf-8", newline="") as csv_file: csv_writer = csv.writer(csv_file, delimiter="\t") csv_writer.writerow(( "GazeTimeStamp", "MediaTimeStamp", "MediaFrameIndex", "Gaze3dX", "Gaze3dY", "Gaze3dZ", "Gaze2dX", "Gaze2dY", "PupilDiaLeft", "PupilDiaRight", "Confidence", )) export_start, export_stop = export_range # export_stop is exclusive export_window = pm.exact_window(timestamps, (export_start, export_stop - 1)) gaze_section = gaze_positions.init_dict_for_window(export_window) # find closest world idx for each gaze datum gaze_world_idc = pm.find_closest(timestamps, gaze_section["data_ts"]) for gaze_pos, media_idx in zip(gaze_section["data"], gaze_world_idc): media_timestamp = timestamps[media_idx] try: pupil_dia = {} for p in gaze_pos["base_data"]: pupil_dia[p["id"]] = p["diameter_3d"] pixel_pos = denormalize(gaze_pos["norm_pos"], capture.frame_size, flip_y=True) undistorted3d = capture.intrinsics.unprojectPoints(pixel_pos) undistorted2d = capture.intrinsics.projectPoints( undistorted3d, use_distortion=False) data = ( gaze_pos["timestamp"], media_timestamp, media_idx - export_range[0], *gaze_pos["gaze_point_3d"], # Gaze3dX/Y/Z *undistorted2d.flat, # Gaze2dX/Y pupil_dia.get(1, 0.0), # PupilDiaLeft pupil_dia.get(0, 0.0), # PupilDiaRight gaze_pos["confidence"], # Confidence ) except KeyError: if not user_warned_3d_only: logger.error( "Currently, the iMotions export only supports 3d gaze data" ) user_warned_3d_only = True continue csv_writer.writerow(data)
def save_surface_statsics_to_file(self, export_range, export_dir): """ between in and out mark report: gaze distribution: - total gazepoints - gaze points on surface x - gaze points not on any surface report: surface visisbility - total frames - surface x visible framecount surface events: frame_no, ts, surface "name", "id" enter/exit for each surface: fixations_on_name.csv gaze_on_name_id.csv positions_of_name_id.csv """ metrics_dir = os.path.join(export_dir, "surfaces") section = slice(*export_range) in_mark = section.start out_mark = section.stop logger.info("exporting metrics to {}".format(metrics_dir)) if os.path.isdir(metrics_dir): logger.info("Will overwrite previous export for this section") else: try: os.mkdir(metrics_dir) except: logger.warning( "Could not make metrics dir {}".format(metrics_dir)) return with open( os.path.join(metrics_dir, "surface_visibility.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") # surface visibility report frame_count = len(self.g_pool.timestamps[section]) csv_writer.writerow(("frame_count", frame_count)) csv_writer.writerow(("")) csv_writer.writerow(("surface_name", "visible_frame_count")) for s in self.surfaces: if s.cache == None: logger.warning( "The surface is not cached. Please wait for the cacher to collect data." ) return visible_count = s.visible_count_in_section(section) csv_writer.writerow((s.name, visible_count)) logger.info("Created 'surface_visibility.csv' file") with open( os.path.join(metrics_dir, "surface_gaze_distribution.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") # gaze distribution report export_window = pm.exact_window(self.g_pool.timestamps, export_range) gaze_in_section = self.g_pool.gaze_positions.by_ts_window( export_window) not_on_any_srf = set([gp["timestamp"] for gp in gaze_in_section]) csv_writer.writerow( ("total_gaze_point_count", len(gaze_in_section))) csv_writer.writerow(("")) csv_writer.writerow(("surface_name", "gaze_count")) for s in self.surfaces: gaze_on_srf = s.gaze_on_srf_in_section(section) gaze_on_srf = set( [gp["base_data"]["timestamp"] for gp in gaze_on_srf]) not_on_any_srf -= gaze_on_srf csv_writer.writerow((s.name, len(gaze_on_srf))) csv_writer.writerow(("not_on_any_surface", len(not_on_any_srf))) logger.info("Created 'surface_gaze_distribution.csv' file") with open( os.path.join(metrics_dir, "surface_events.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") # surface events report csv_writer.writerow(( "frame_number", "timestamp", "surface_name", "surface_uid", "event_type", )) events = [] for s in self.surfaces: for enter_frame_id, exit_frame_id in s.cache.positive_ranges: events.append({ "frame_id": enter_frame_id, "srf_name": s.name, "srf_uid": s.uid, "event": "enter", }) events.append({ "frame_id": exit_frame_id, "srf_name": s.name, "srf_uid": s.uid, "event": "exit", }) events.sort(key=lambda x: x["frame_id"]) for e in events: csv_writer.writerow(( e["frame_id"], self.g_pool.timestamps[e["frame_id"]], e["srf_name"], e["srf_uid"], e["event"], )) logger.info("Created 'surface_events.csv' file") for s in self.surfaces: # per surface names: surface_name = "_" + s.name.replace("/", "") + "_" + s.uid # save surface_positions as csv with open( os.path.join(metrics_dir, "srf_positons" + surface_name + ".csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") csv_writer.writerow(( "frame_idx", "timestamp", "m_to_screen", "m_from_screen", "detected_markers", )) for idx, ts, ref_srf_data in zip( range(len(self.g_pool.timestamps)), self.g_pool.timestamps, s.cache): if in_mark <= idx < out_mark: if ref_srf_data is not None and ref_srf_data is not False: csv_writer.writerow(( idx, ts, ref_srf_data["m_to_screen"], ref_srf_data["m_from_screen"], ref_srf_data["detected_markers"], )) # save gaze on srf as csv. with open( os.path.join( metrics_dir, "gaze_positions_on_surface" + surface_name + ".csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") csv_writer.writerow(( "world_timestamp", "world_frame_idx", "gaze_timestamp", "x_norm", "y_norm", "x_scaled", "y_scaled", "on_srf", "confidence", )) for idx, ts, ref_srf_data in zip( range(len(self.g_pool.timestamps)), self.g_pool.timestamps, s.cache): if in_mark <= idx < out_mark: if ref_srf_data is not None and ref_srf_data is not False: for gp in s.gaze_on_srf_by_frame_idx( idx, ref_srf_data["m_from_screen"]): csv_writer.writerow(( ts, idx, gp["base_data"]["timestamp"], gp["norm_pos"][0], gp["norm_pos"][1], gp["norm_pos"][0] * s.real_world_size["x"], gp["norm_pos"][1] * s.real_world_size["y"], gp["on_srf"], gp["confidence"], )) # save fixation on srf as csv. with open( os.path.join( metrics_dir, "fixations_on_surface" + surface_name + ".csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") csv_writer.writerow(( "id", "start_timestamp", "duration", "start_frame", "end_frame", "norm_pos_x", "norm_pos_y", "x_scaled", "y_scaled", "on_srf", )) fixations_on_surface = [] for idx, ref_srf_data in zip( range(len(self.g_pool.timestamps)), s.cache): if in_mark <= idx < out_mark: if ref_srf_data is not None and ref_srf_data is not False: for f in s.fixations_on_srf_by_frame_idx( idx, ref_srf_data["m_from_screen"]): fixations_on_surface.append(f) removed_duplicates = dict([ (f["base_data"]["id"], f) for f in fixations_on_surface ]).values() for f_on_s in removed_duplicates: f = f_on_s["base_data"] f_x, f_y = f_on_s["norm_pos"] f_on_srf = f_on_s["on_srf"] csv_writer.writerow(( f["id"], f["timestamp"], f["duration"], f["start_frame_index"], f["end_frame_index"], f_x, f_y, f_x * s.real_world_size["x"], f_y * s.real_world_size["y"], f_on_srf, )) logger.info( "Saved surface positon gaze and fixation data for '{}' with uid:'{}'" .format(s.name, s.uid)) if s.heatmap is not None: logger.info("Saved Heatmap as .png file.") cv2.imwrite( os.path.join(metrics_dir, "heatmap" + surface_name + ".png"), s.heatmap, ) logger.info("Done exporting reference surface data.")
def export_data(self, export_range, export_dir): export_window = pm.exact_window(self.g_pool.timestamps, export_range) with open(os.path.join(export_dir, 'pupil_positions.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') csv_writer.writerow( ('timestamp', 'index', 'id', 'confidence', 'norm_pos_x', 'norm_pos_y', 'diameter', 'method', 'ellipse_center_x', 'ellipse_center_y', 'ellipse_axis_a', 'ellipse_axis_b', 'ellipse_angle', 'diameter_3d', 'model_confidence', 'model_id', 'sphere_center_x', 'sphere_center_y', 'sphere_center_z', 'sphere_radius', 'circle_3d_center_x', 'circle_3d_center_y', 'circle_3d_center_z', 'circle_3d_normal_x', 'circle_3d_normal_y', 'circle_3d_normal_z', 'circle_3d_radius', 'theta', 'phi', 'projected_sphere_center_x', 'projected_sphere_center_y', 'projected_sphere_axis_a', 'projected_sphere_axis_b', 'projected_sphere_angle')) pupil_section = self.g_pool.pupil_positions.init_dict_for_window( export_window) pupil_world_idc = pm.find_closest(self.g_pool.timestamps, pupil_section['data_ts']) for p, idx in zip(pupil_section['data'], pupil_world_idc): data_2d = [ '{}'.format(p['timestamp'] ), # use str to be consitant with csv lib. idx, p['id'], p['confidence'], p['norm_pos'][0], p['norm_pos'][1], p['diameter'], p['method'] ] try: ellipse_data = [ p['ellipse']['center'][0], p['ellipse']['center'][1], p['ellipse']['axes'][0], p['ellipse']['axes'][1], p['ellipse']['angle'] ] except KeyError: ellipse_data = [None] * 5 try: data_3d = [ p['diameter_3d'], p['model_confidence'], p['model_id'], p['sphere']['center'][0], p['sphere']['center'][1], p['sphere']['center'][2], p['sphere']['radius'], p['circle_3d']['center'][0], p['circle_3d']['center'][1], p['circle_3d']['center'][2], p['circle_3d']['normal'][0], p['circle_3d']['normal'][1], p['circle_3d']['normal'][2], p['circle_3d']['radius'], p['theta'], p['phi'], p['projected_sphere']['center'][0], p['projected_sphere']['center'][1], p['projected_sphere']['axes'][0], p['projected_sphere']['axes'][1], p['projected_sphere']['angle'] ] except KeyError: data_3d = [None] * 21 row = data_2d + ellipse_data + data_3d csv_writer.writerow(row) logger.info("Created 'pupil_positions.csv' file.") with open(os.path.join(export_dir, 'gaze_positions.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') csv_writer.writerow( ("timestamp", "index", "confidence", "norm_pos_x", "norm_pos_y", "base_data", "gaze_point_3d_x", "gaze_point_3d_y", "gaze_point_3d_z", "eye_center0_3d_x", "eye_center0_3d_y", "eye_center0_3d_z", "gaze_normal0_x", "gaze_normal0_y", "gaze_normal0_z", "eye_center1_3d_x", "eye_center1_3d_y", "eye_center1_3d_z", "gaze_normal1_x", "gaze_normal1_y", "gaze_normal1_z")) gaze_section = self.g_pool.gaze_positions.init_dict_for_window( export_window) gaze_world_idc = pm.find_closest(self.g_pool.timestamps, gaze_section['data_ts']) for g, idx in zip(gaze_section['data'], gaze_world_idc): data = [ '{}'.format(g["timestamp"]), idx, g["confidence"], g["norm_pos"][0], g["norm_pos"][1], " ".join([ '{}-{}'.format(b['timestamp'], b['id']) for b in g['base_data'] ]) ] # use str on timestamp to be consitant with csv lib. # add 3d data if avaiblable if g.get('gaze_point_3d', None) is not None: data_3d = [ g['gaze_point_3d'][0], g['gaze_point_3d'][1], g['gaze_point_3d'][2] ] # binocular if g.get('eye_centers_3d', None) is not None: data_3d += g['eye_centers_3d'].get( 0, [None, None, None]) data_3d += g['gaze_normals_3d'].get( 0, [None, None, None]) data_3d += g['eye_centers_3d'].get( 1, [None, None, None]) data_3d += g['gaze_normals_3d'].get( 1, [None, None, None]) # monocular elif g.get('eye_center_3d', None) is not None: data_3d += g['eye_center_3d'] data_3d += g['gaze_normal_3d'] data_3d += [None] * 6 else: data_3d = [None] * 15 data += data_3d csv_writer.writerow(data) logger.info("Created 'gaze_positions.csv' file.") with open(os.path.join(export_dir, 'pupil_gaze_positions_info.txt'), 'w', encoding='utf-8', newline='') as info_file: info_file.write(self.__doc__)
def export_processed_h264( world_timestamps, unprocessed_video_loc, target_video_loc, export_range, process_frame, export_timestamps, ): yield "Converting video", 0.1 capture = File_Source(Empty(), unprocessed_video_loc) if not capture.initialised: yield "Converting scene video failed", 0.0 return export_window = pm.exact_window(world_timestamps, export_range) (export_from_index, export_to_index) = pm.find_closest(capture.timestamps, export_window) update_rate = 10 start_time = None time_base = Fraction(1, 65535) target_container = av.open(target_video_loc, "w") video_stream = target_container.add_stream("mpeg4", 1 / time_base) video_stream.bit_rate = 150e6 video_stream.bit_rate_tolerance = video_stream.bit_rate / 20 video_stream.thread_count = max(1, mp.cpu_count() - 1) video_stream.width, video_stream.height = capture.frame_size av_frame = av.VideoFrame(*capture.frame_size, "bgr24") av_frame.time_base = time_base capture.seek_to_frame(export_from_index) next_update_idx = export_from_index + update_rate timestamps = [] while True: try: frame = capture.get_frame() except EndofVideoError: break if frame.index > export_to_index: break if start_time is None: start_time = frame.timestamp undistorted_img = process_frame(capture, frame) av_frame.planes[0].update(undistorted_img) av_frame.pts = int((frame.timestamp - start_time) / time_base) if export_timestamps: timestamps.append(frame.timestamp) packet = video_stream.encode(av_frame) if packet: target_container.mux(packet) if capture.current_frame_idx >= next_update_idx: progress = ((capture.current_frame_idx - export_from_index) / (export_to_index - export_from_index)) * 0.9 + 0.1 yield "Converting video", progress * 100.0 next_update_idx += update_rate while True: # flush encoder packet = video_stream.encode() if packet: target_container.mux(packet) else: break if export_timestamps: write_timestamps(target_video_loc, timestamps) target_container.close() capture.cleanup() yield "Converting video completed", 1.0 * 100.0
def save_surface_statsics_to_file(self, export_range, export_dir): """ between in and out mark report: gaze distribution: - total gazepoints - gaze points on surface x - gaze points not on any surface report: surface visisbility - total frames - surface x visible framecount surface events: frame_no, ts, surface "name", "id" enter/exit for each surface: fixations_on_name.csv gaze_on_name_id.csv positions_of_name_id.csv """ metrics_dir = os.path.join(export_dir, 'surfaces') section = slice(*export_range) in_mark = section.start out_mark = section.stop logger.info("exporting metrics to {}".format(metrics_dir)) if os.path.isdir(metrics_dir): logger.info("Will overwrite previous export for this section") else: try: os.mkdir(metrics_dir) except: logger.warning( "Could not make metrics dir {}".format(metrics_dir)) return with open(os.path.join(metrics_dir, 'surface_visibility.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') # surface visibility report frame_count = len(self.g_pool.timestamps[section]) csv_writer.writerow(('frame_count', frame_count)) csv_writer.writerow(('')) csv_writer.writerow(('surface_name', 'visible_frame_count')) for s in self.surfaces: if s.cache == None: logger.warning( "The surface is not cached. Please wait for the cacher to collect data." ) return visible_count = s.visible_count_in_section(section) csv_writer.writerow((s.name, visible_count)) logger.info("Created 'surface_visibility.csv' file") with open(os.path.join(metrics_dir, 'surface_gaze_distribution.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') # gaze distribution report export_window = pm.exact_window(self.g_pool.timestamps, export_range) gaze_in_section = self.g_pool.gaze_positions.by_ts_window( export_window) not_on_any_srf = set([gp['timestamp'] for gp in gaze_in_section]) csv_writer.writerow( ('total_gaze_point_count', len(gaze_in_section))) csv_writer.writerow(('')) csv_writer.writerow(('surface_name', 'gaze_count')) for s in self.surfaces: gaze_on_srf = s.gaze_on_srf_in_section(section) gaze_on_srf = set( [gp['base_data']['timestamp'] for gp in gaze_on_srf]) not_on_any_srf -= gaze_on_srf csv_writer.writerow((s.name, len(gaze_on_srf))) csv_writer.writerow(('not_on_any_surface', len(not_on_any_srf))) logger.info("Created 'surface_gaze_distribution.csv' file") with open(os.path.join(metrics_dir, 'surface_events.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') # surface events report csv_writer.writerow(('frame_number', 'timestamp', 'surface_name', 'surface_uid', 'event_type')) events = [] for s in self.surfaces: for enter_frame_id, exit_frame_id in s.cache.positive_ranges: events.append({ 'frame_id': enter_frame_id, 'srf_name': s.name, 'srf_uid': s.uid, 'event': 'enter' }) events.append({ 'frame_id': exit_frame_id, 'srf_name': s.name, 'srf_uid': s.uid, 'event': 'exit' }) events.sort(key=lambda x: x['frame_id']) for e in events: csv_writer.writerow( (e['frame_id'], self.g_pool.timestamps[e['frame_id']], e['srf_name'], e['srf_uid'], e['event'])) logger.info("Created 'surface_events.csv' file") for s in self.surfaces: # per surface names: surface_name = '_' + s.name.replace('/', '') + '_' + s.uid #save surface_positions as csv with open(os.path.join(metrics_dir, 'srf_positons' + surface_name + '.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') csv_writer.writerow(('frame_idx', 'timestamp', 'm_to_screen', 'm_from_screen', 'detected_markers')) for idx, ts, ref_srf_data in zip( range(len(self.g_pool.timestamps)), self.g_pool.timestamps, s.cache): if in_mark <= idx < out_mark: if ref_srf_data is not None and ref_srf_data is not False: csv_writer.writerow( (idx, ts, ref_srf_data['m_to_screen'], ref_srf_data['m_from_screen'], ref_srf_data['detected_markers'])) # save gaze on srf as csv. with open(os.path.join( metrics_dir, 'gaze_positions_on_surface' + surface_name + '.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') csv_writer.writerow( ('world_timestamp', 'world_frame_idx', 'gaze_timestamp', 'x_norm', 'y_norm', 'x_scaled', 'y_scaled', 'on_srf', 'confidence')) for idx, ts, ref_srf_data in zip( range(len(self.g_pool.timestamps)), self.g_pool.timestamps, s.cache): if in_mark <= idx < out_mark: if ref_srf_data is not None and ref_srf_data is not False: for gp in s.gaze_on_srf_by_frame_idx( idx, ref_srf_data['m_from_screen']): csv_writer.writerow( (ts, idx, gp['base_data']['timestamp'], gp['norm_pos'][0], gp['norm_pos'][1], gp['norm_pos'][0] * s.real_world_size['x'], gp['norm_pos'][1] * s.real_world_size['y'], gp['on_srf'], gp['confidence'])) # save fixation on srf as csv. with open(os.path.join( metrics_dir, 'fixations_on_surface' + surface_name + '.csv'), 'w', encoding='utf-8', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',') csv_writer.writerow( ('id', 'start_timestamp', 'duration', 'start_frame', 'end_frame', 'norm_pos_x', 'norm_pos_y', 'x_scaled', 'y_scaled', 'on_srf')) fixations_on_surface = [] for idx, ref_srf_data in zip( range(len(self.g_pool.timestamps)), s.cache): if in_mark <= idx < out_mark: if ref_srf_data is not None and ref_srf_data is not False: for f in s.fixations_on_srf_by_frame_idx( idx, ref_srf_data['m_from_screen']): fixations_on_surface.append(f) removed_duplicates = dict([ (f['base_data']['id'], f) for f in fixations_on_surface ]).values() for f_on_s in removed_duplicates: f = f_on_s['base_data'] f_x, f_y = f_on_s['norm_pos'] f_on_srf = f_on_s['on_srf'] csv_writer.writerow( (f['id'], f['timestamp'], f['duration'], f['start_frame_index'], f['end_frame_index'], f_x, f_y, f_x * s.real_world_size['x'], f_y * s.real_world_size['y'], f_on_srf)) logger.info( "Saved surface positon gaze and fixation data for '{}' with uid:'{}'" .format(s.name, s.uid)) if s.heatmap is not None: logger.info("Saved Heatmap as .png file.") cv2.imwrite( os.path.join(metrics_dir, 'heatmap' + surface_name + '.png'), s.heatmap) logger.info("Done exporting reference surface data.")
def _write_gaze_data( gaze_positions, destination_folder, export_range, timestamps, capture ): global user_warned_3d_only with open( os.path.join(destination_folder, "gaze.tlv"), "w", encoding="utf-8", newline="" ) as csv_file: csv_writer = csv.writer(csv_file, delimiter="\t") csv_writer.writerow( ( "GazeTimeStamp", "MediaTimeStamp", "MediaFrameIndex", "Gaze3dX", "Gaze3dY", "Gaze3dZ", "Gaze2dX", "Gaze2dY", "PupilDiaLeft", "PupilDiaRight", "Confidence", ) ) export_start, export_stop = export_range # export_stop is exclusive export_window = pm.exact_window(timestamps, (export_start, export_stop - 1)) gaze_section = gaze_positions.init_dict_for_window(export_window) # find closest world idx for each gaze datum gaze_world_idc = pm.find_closest(timestamps, gaze_section["data_ts"]) for gaze_pos, media_idx in zip(gaze_section["data"], gaze_world_idc): media_timestamp = timestamps[media_idx] try: pupil_dia = {} for p in gaze_pos["base_data"]: pupil_dia[p["id"]] = p["diameter_3d"] pixel_pos = denormalize( gaze_pos["norm_pos"], capture.frame_size, flip_y=True ) undistorted3d = capture.intrinsics.unprojectPoints(pixel_pos) undistorted2d = capture.intrinsics.projectPoints( undistorted3d, use_distortion=False ) data = ( gaze_pos["timestamp"], media_timestamp, media_idx - export_range[0], *gaze_pos["gaze_point_3d"], # Gaze3dX/Y/Z *undistorted2d.flat, # Gaze2dX/Y pupil_dia.get(1, 0.0), # PupilDiaLeft pupil_dia.get(0, 0.0), # PupilDiaRight gaze_pos["confidence"], # Confidence ) except KeyError: if not user_warned_3d_only: logger.error( "Currently, the iMotions export only supports 3d gaze data" ) user_warned_3d_only = True continue csv_writer.writerow(data)
def export_data(self, export_range, export_dir): export_window = pm.exact_window(self.g_pool.timestamps, export_range) if self.should_export_pupil_positions: with open( os.path.join(export_dir, "pupil_positions.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") csv_writer.writerow( ( "timestamp", "index", "id", "confidence", "norm_pos_x", "norm_pos_y", "diameter", "method", "ellipse_center_x", "ellipse_center_y", "ellipse_axis_a", "ellipse_axis_b", "ellipse_angle", "diameter_3d", "model_confidence", "model_id", "sphere_center_x", "sphere_center_y", "sphere_center_z", "sphere_radius", "circle_3d_center_x", "circle_3d_center_y", "circle_3d_center_z", "circle_3d_normal_x", "circle_3d_normal_y", "circle_3d_normal_z", "circle_3d_radius", "theta", "phi", "projected_sphere_center_x", "projected_sphere_center_y", "projected_sphere_axis_a", "projected_sphere_axis_b", "projected_sphere_angle", ) ) pupil_section = self.g_pool.pupil_positions.init_dict_for_window( export_window ) pupil_world_idc = pm.find_closest( self.g_pool.timestamps, pupil_section["data_ts"] ) for p, idx in zip(pupil_section["data"], pupil_world_idc): data_2d = [ "{}".format( p["timestamp"] ), # use str to be consitant with csv lib. idx, p["id"], p["confidence"], p["norm_pos"][0], p["norm_pos"][1], p["diameter"], p["method"], ] try: ellipse_data = [ p["ellipse"]["center"][0], p["ellipse"]["center"][1], p["ellipse"]["axes"][0], p["ellipse"]["axes"][1], p["ellipse"]["angle"], ] except KeyError: ellipse_data = [None] * 5 try: data_3d = [ p["diameter_3d"], p["model_confidence"], p["model_id"], p["sphere"]["center"][0], p["sphere"]["center"][1], p["sphere"]["center"][2], p["sphere"]["radius"], p["circle_3d"]["center"][0], p["circle_3d"]["center"][1], p["circle_3d"]["center"][2], p["circle_3d"]["normal"][0], p["circle_3d"]["normal"][1], p["circle_3d"]["normal"][2], p["circle_3d"]["radius"], p["theta"], p["phi"], p["projected_sphere"]["center"][0], p["projected_sphere"]["center"][1], p["projected_sphere"]["axes"][0], p["projected_sphere"]["axes"][1], p["projected_sphere"]["angle"], ] except KeyError: data_3d = [None] * 21 row = data_2d + ellipse_data + data_3d csv_writer.writerow(row) logger.info("Created 'pupil_positions.csv' file.") if self.should_export_gaze_positions: with open( os.path.join(export_dir, "gaze_positions.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_writer = csv.writer(csvfile, delimiter=",") csv_writer.writerow( ( "timestamp", "index", "confidence", "norm_pos_x", "norm_pos_y", "base_data", "gaze_point_3d_x", "gaze_point_3d_y", "gaze_point_3d_z", "eye_center0_3d_x", "eye_center0_3d_y", "eye_center0_3d_z", "gaze_normal0_x", "gaze_normal0_y", "gaze_normal0_z", "eye_center1_3d_x", "eye_center1_3d_y", "eye_center1_3d_z", "gaze_normal1_x", "gaze_normal1_y", "gaze_normal1_z", ) ) gaze_section = self.g_pool.gaze_positions.init_dict_for_window( export_window ) gaze_world_idc = pm.find_closest( self.g_pool.timestamps, gaze_section["data_ts"] ) for g, idx in zip(gaze_section["data"], gaze_world_idc): data = [ "{}".format(g["timestamp"]), idx, g["confidence"], g["norm_pos"][0], g["norm_pos"][1], " ".join( [ "{}-{}".format(b["timestamp"], b["id"]) for b in g["base_data"] ] ), ] # use str on timestamp to be consitant with csv lib. # add 3d data if avaiblable if g.get("gaze_point_3d", None) is not None: data_3d = [ g["gaze_point_3d"][0], g["gaze_point_3d"][1], g["gaze_point_3d"][2], ] # binocular if g.get("eye_centers_3d", None) is not None: data_3d += g["eye_centers_3d"].get(0, [None, None, None]) data_3d += g["gaze_normals_3d"].get(0, [None, None, None]) data_3d += g["eye_centers_3d"].get(1, [None, None, None]) data_3d += g["gaze_normals_3d"].get(1, [None, None, None]) # monocular elif g.get("eye_center_3d", None) is not None: data_3d += g["eye_center_3d"] data_3d += g["gaze_normal_3d"] data_3d += [None] * 6 else: data_3d = [None] * 15 data += data_3d csv_writer.writerow(data) logger.info("Created 'gaze_positions.csv' file.") if self.should_export_field_info: with open( os.path.join(export_dir, "pupil_gaze_positions_info.txt"), "w", encoding="utf-8", newline="", ) as info_file: info_file.write(self.__doc__)