def update_recording_v14_v18(rec_dir): logger.info("Updating recording from v1.4 to v1.8") legacy_topic_mapping = { "notifications": "notify", "gaze_positions": "gaze", "pupil_positions": "pupil", } with fm.Incremental_Legacy_Pupil_Data_Loader(rec_dir) as loader: for old_topic, values in loader.topic_values_pairs(): new_topic = legacy_topic_mapping.get(old_topic, old_topic) with fm.PLData_Writer(rec_dir, new_topic) as writer: for datum in values: if new_topic == "notify": datum["topic"] = "notify." + datum["subject"] elif new_topic == "pupil": datum["topic"] += ".{}".format(datum["id"]) elif new_topic.startswith("surface"): datum["topic"] = "surfaces." + datum["name"] elif new_topic == "blinks" or new_topic == "fixations": datum["topic"] += "s" writer.append(datum) _update_info_version_to("v1.8", rec_dir)
def update_recording_v14_v18(rec_dir): logger.info("Updating recording from v1.4 to v1.8") legacy_topic_mapping = { 'notifications': 'notify', 'gaze_positions': 'gaze', 'pupil_positions': 'pupil' } with fm.Incremental_Legacy_Pupil_Data_Loader(rec_dir) as loader: for old_topic, values in loader.topic_values_pairs(): new_topic = legacy_topic_mapping.get(old_topic, old_topic) with fm.PLData_Writer(rec_dir, new_topic) as writer: for datum in values: if new_topic == 'notify': datum['topic'] = 'notify.' + datum['subject'] elif new_topic == 'pupil': datum['topic'] += '.{}'.format(datum['id']) elif new_topic.startswith('surface'): datum['topic'] = 'surfaces.' + datum['name'] elif new_topic == 'blinks' or new_topic == 'fixations': datum['topic'] += 's' writer.append(datum) meta_info_path = os.path.join(rec_dir, "info.csv") with open(meta_info_path, 'r', encoding='utf-8') as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info['Data Format Version'] = 'v1.8' update_meta_info(rec_dir, meta_info)
def save_to_file(self, dir_path, filename): with fm.PLData_Writer(dir_path, filename) as writer: for topic, bisector in self._bisectors.items(): for timestamp, datum in zip(bisector.timestamps, bisector.data): writer.append_serialized(timestamp, topic, datum.serialized)
def cleanup(self): with fm.PLData_Writer(self.g_pool.rec_dir, "annotation_player") as writer: for ts, annotation in zip(self.annotations.timestamps, self.annotations): writer.append_serialized(ts, "annotation", annotation.serialized)
def update_recording_v14_v18(rec_dir): logger.info("Updating recording from v1.4 to v1.8") legacy_topic_mapping = { "notifications": "notify", "gaze_positions": "gaze", "pupil_positions": "pupil", } with fm.Incremental_Legacy_Pupil_Data_Loader(rec_dir) as loader: for old_topic, values in loader.topic_values_pairs(): new_topic = legacy_topic_mapping.get(old_topic, old_topic) with fm.PLData_Writer(rec_dir, new_topic) as writer: for datum in values: if new_topic == "notify": datum["topic"] = "notify." + datum["subject"] elif new_topic == "pupil": datum["topic"] += ".{}".format(datum["id"]) elif new_topic.startswith("surface"): datum["topic"] = "surfaces." + datum["name"] elif new_topic == "blinks" or new_topic == "fixations": datum["topic"] += "s" writer.append(datum) meta_info_path = os.path.join(rec_dir, "info.csv") with open(meta_info_path, "r", encoding="utf-8") as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info["Data Format Version"] = "v1.8" update_meta_info(rec_dir, meta_info)
def copy_recorded_annotations(): logger.info("Version update: Copy recorded annotations.") notifications = fm.load_pldata_file(rec_dir, "notify") with fm.PLData_Writer(rec_dir, "annotation") as writer: for idx, topic in enumerate(notifications.topics): if topic == "notify.annotation": annotation = notifications.data[idx] ts = notifications.timestamps[idx] writer.append_serialized(ts, "annotation", annotation.serialized)
def cleanup(self): """called when the plugin gets terminated. This happens either voluntarily or forced. if you have a GUI or glfw window destroy it here. """ with fm.PLData_Writer(self.cache_dir, 'annotations') as writer: for ts, annotation in zip(self.annotations.timestamps, self.annotations): writer.append_serialized(ts, 'notify.annotation', annotation.serialized)
def _save_to_file(self): directory = self._offline_data_folder_path file_name = self._pldata_file_name os.makedirs(directory, exist_ok=True) with fm.PLData_Writer(directory, file_name) as writer: for pose_ts, pose in zip(self.pose_bisector.timestamps, self.pose_bisector.data): writer.append_serialized(pose_ts, topic="pose", datum_serialized=pose.serialized)
def _save_gaze_and_ts_to_disk(self): directory = self._gaze_mappings_directory os.makedirs(directory, exist_ok=True) for gaze_mapper in self._gaze_mappers: file_name = self._gaze_mapping_file_name(gaze_mapper) with fm.PLData_Writer(directory, file_name) as writer: for gaze_ts, gaze in zip(gaze_mapper.gaze_ts, gaze_mapper.gaze): writer.append_serialized( gaze_ts, topic="gaze", datum_serialized=gaze.serialized )
def _save_to_file(self): directory = self._offline_data_folder_path file_name = self._pldata_file_name os.makedirs(directory, exist_ok=True) with fm.PLData_Writer(directory, file_name) as writer: for marker_ts, marker in zip(self.markers_bisector.timestamps, self.markers_bisector.data): writer.append_serialized(timestamp=marker_ts, topic="", datum_serialized=marker.serialized) self._save_frame_index_to_num_markers()
def save_offline_data(self): topic_data_ts = ((self.id_topics[ts], datum, ts) for ts, datum in self.pupil_positions.items()) with fm.PLData_Writer(self.data_dir, "offline_pupil") as writer: for topic, datum, timestamp in topic_data_ts: writer.append_serialized(timestamp, topic, datum.serialized) session_data = {} session_data["detection_method"] = self.detection_method session_data["detection_status"] = self.detection_status session_data["version"] = self.session_data_version cache_path = os.path.join(self.data_dir, "offline_pupil.meta") fm.save_object(session_data, cache_path) logger.info("Cached detected pupil data to {}".format(cache_path))
def save_offline_data(self): with fm.PLData_Writer(self.data_dir, "fixations") as writer: for timestamp, datum in zip(self.fixation_start_ts, self.fixation_data): writer.append_serialized(timestamp, "fixation", datum.serialized) path_stop_ts = os.path.join(self.data_dir, "fixations_stop_timestamps.npy") np.save(path_stop_ts, self.fixation_stop_ts) path_meta = os.path.join(self.data_dir, "fixations.meta") fm.save_object( { "version": self.CACHE_VERSION, "config": self._cache_config(), }, path_meta, )
def save_offline_data(self): ts_topic_data_zip = zip(self.pupil_positions.keys(), self.id_topics.values(), self.pupil_positions.values()) with fm.PLData_Writer(self.data_dir, 'offline_pupil') as writer: for timestamp, topic, datum in ts_topic_data_zip: writer.append_serialized(timestamp, topic, datum.serialized) session_data = {} session_data["detection_method"] = self.detection_method session_data['detection_status'] = self.detection_status session_data['version'] = self.session_data_version cache_path = os.path.join(self.data_dir, 'offline_pupil.meta') fm.save_object(session_data, cache_path) logger.info('Cached detected pupil data to {}'.format(cache_path))
def _convert_gaze(recording: PupilRecording): width, height = 1088, 1080 logger.info("Converting gaze data...") template_datum = { "topic": "gaze.pi", "norm_pos": None, "timestamp": None, "confidence": 1.0, } with fm.PLData_Writer(recording.rec_dir, "gaze") as writer: for ((x, y), ts) in pi_gaze_items(root_dir=recording.rec_dir): template_datum["timestamp"] = ts template_datum["norm_pos"] = m.normalize((x, y), size=(width, height), flip_y=True) writer.append(template_datum) logger.info(f"Converted {len(writer.ts_queue)} gaze positions.")
def _save_to_file(self): directory = self._offline_data_folder_path file_name = self._pldata_file_name os.makedirs(directory, exist_ok=True) all_topics = { self._all_timestamps[frame_index]: "{}.{}".format( frame_index, self.frame_index_to_num_markers[frame_index] ) for frame_index, num_markers in self.frame_index_to_num_markers.items() } with fm.PLData_Writer(directory, file_name) as writer: for marker_ts, marker in zip( self.markers_bisector.timestamps, self.markers_bisector.data ): writer.append_serialized( timestamp=marker_ts, topic=all_topics[marker_ts], datum_serialized=marker.serialized, )