def do_export(_): left_idx = g_pool.seek_control.trim_left right_idx = g_pool.seek_control.trim_right export_range = left_idx, right_idx + 1 # exclusive range.stop export_ts_window = pm.exact_window(g_pool.timestamps, (left_idx, right_idx)) export_dir = os.path.join(g_pool.rec_dir, "exports") export_dir = next_export_sub_dir(export_dir) os.makedirs(export_dir) logger.info('Created export dir at "{}"'.format(export_dir)) export_info = { "Player Software Version": str(g_pool.version), "Data Format Version": meta_info["Data Format Version"], "Export Date": strftime("%d.%m.%Y", localtime()), "Export Time": strftime("%H:%M:%S", localtime()), "Frame Index Range:": g_pool.seek_control.get_frame_index_trim_range_string(), "Relative Time Range": g_pool.seek_control.get_rel_time_trim_range_string(), "Absolute Time Range": g_pool.seek_control.get_abs_time_trim_range_string(), } with open(os.path.join(export_dir, "export_info.csv"), "w") as csv: write_key_value_file(csv, export_info) notification = { "subject": "should_export", "range": export_range, "ts_window": export_ts_window, "export_dir": export_dir, } g_pool.ipc_pub.notify(notification)
def export(self, export_window, export_dir): """ Between in and out mark blink_detection_report.csv: - history lenght - onset threshold - offset threshold blinks.csv: id | start_timestamp | duration | end_timestamp | start_frame_index | index | end_frame_index | confidence | filter_response | base_data """ if not self.g_pool.blinks: logger.warning( "No blinks were detected in this recording. Nothing to export." ) return header = ( "id", "start_timestamp", "duration", "end_timestamp", "start_frame_index", "index", "end_frame_index", "confidence", "filter_response", "base_data", ) blinks_in_section = self.g_pool.blinks.by_ts_window(export_window) with open( os.path.join(export_dir, "blinks.csv"), "w", encoding="utf-8", newline="" ) as csvfile: csv_writer = csv.writer(csvfile) csv_writer.writerow(header) for b in blinks_in_section: csv_writer.writerow(self.csv_representation_for_blink(b, header)) logger.info("Created 'blinks.csv' file.") with open( os.path.join(export_dir, "blink_detection_report.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_utils.write_key_value_file( csvfile, { "history_length": self.history_length, "onset_confidence_threshold": self.onset_confidence_threshold, "offset_confidence_threshold": self.offset_confidence_threshold, "blinks_exported": len(blinks_in_section), }, ) logger.info("Created 'blink_detection_report.csv' file.")
def update_recording_bytes_to_unicode(rec_dir): logger.info("Updating recording from bytes to unicode.") def convert(data): if isinstance(data, bytes): return data.decode() elif isinstance(data, str) or isinstance(data, np.ndarray): return data elif isinstance(data, collections.Mapping): return dict(map(convert, data.items())) elif isinstance(data, collections.Iterable): return type(data)(map(convert, data)) else: return data for file in os.listdir(rec_dir): if file.startswith('.') or os.path.splitext(file)[1] in ('.mp4', '.avi'): continue rec_file = os.path.join(rec_dir, file) try: rec_object = load_object(rec_file) converted_object = convert(rec_object) if converted_object != rec_object: logger.info('Converted `{}` from bytes to unicode'.format(file)) save_object(converted_object, rec_file) except (UnpicklingError, IsADirectoryError): continue # manually convert k v dicts. meta_info_path = os.path.join(rec_dir, "info.csv") with open(meta_info_path, 'r', encoding='utf-8') as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) with open(meta_info_path, 'w', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, meta_info)
def update_recording_v086_to_v087(rec_dir): logger.info("Updating recording from v0.8.6 format to v0.8.7 format") pupil_data = load_object(os.path.join(rec_dir, "pupil_data")) meta_info_path = os.path.join(rec_dir, "info.csv") def _clamp_norm_point(pos): """realisitic numbers for norm pos should be in this range. Grossly bigger or smaller numbers are results bad exrapolation and can cause overflow erorr when denormalized and cast as int32. """ return min(100, max(-100, pos[0])), min(100, max(-100, pos[1])) for g in pupil_data["gaze_positions"]: if "topic" not in g: # we missed this in one gaze mapper g["topic"] = "gaze" g["norm_pos"] = _clamp_norm_point(g["norm_pos"]) save_object(pupil_data, os.path.join(rec_dir, "pupil_data")) with open(meta_info_path, "r", encoding="utf-8") as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info["Capture Software Version"] = "v0.8.7" with open(meta_info_path, "w", newline="") as csvfile: csv_utils.write_key_value_file(csvfile, meta_info)
def update_recording_v074_to_v082(rec_dir): meta_info_path = os.path.join(rec_dir, "info.csv") with open(meta_info_path, "r", encoding="utf-8") as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info["Capture Software Version"] = "v0.8.2" with open(meta_info_path, "w", newline="") as csvfile: csv_utils.write_key_value_file(csvfile, meta_info)
def update_recording_v074_to_v082(rec_dir): meta_info_path = os.path.join(rec_dir,"info.csv") with open(meta_info_path) as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info['Capture Software Version'] = 'v0.8.2' with open(meta_info_path,'w') as csvfile: csv_utils.write_key_value_file(csvfile,meta_info)
def update_recording_v086_to_v087(rec_dir): logger.info("Updating recording from v0.8.6 format to v0.8.7 format") pupil_data = load_object(os.path.join(rec_dir, "pupil_data")) meta_info_path = os.path.join(rec_dir,"info.csv") def _clamp_norm_point(pos): '''realisitic numbers for norm pos should be in this range. Grossly bigger or smaller numbers are results bad exrapolation and can cause overflow erorr when denormalized and cast as int32. ''' return min(100,max(-100,pos[0])),min(100,max(-100,pos[1])) for g in pupil_data['gaze_positions']: if 'topic' not in g: #we missed this in one gaze mapper g['topic'] = 'gaze' g['norm_pos'] = _clamp_norm_point(g['norm_pos']) save_object(pupil_data,os.path.join(rec_dir, "pupil_data")) with open(meta_info_path) as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info['Capture Software Version'] = 'v0.8.7' with open(meta_info_path,'w') as csvfile: csv_utils.write_key_value_file(csvfile,meta_info)
def start(self): self.timestamps = [] self.timestampsUnix = [] self.glint_pos_list = [] self.pupil_pos_list = [] self.gaze_pos_list = [] self.data = {'pupil_positions':[],'gaze_positions':[],'notifications':[]} self.frame_count = 0 self.running = True self.menu.read_only = True self.start_time = time() session = os.path.join(self.rec_dir, self.session_name) try: os.makedirs(session) logger.debug("Created new recordings session dir %s"%session) except: logger.debug("Recordings session dir %s already exists, using it." %session) # set up self incrementing folder within session folder counter = 0 while True: self.rec_path = os.path.join(session, "%03d/" % counter) try: os.mkdir(self.rec_path) logger.debug("Created new recording dir %s"%self.rec_path) break except: logger.debug("We dont want to overwrite data, incrementing counter & trying to make new data folder") counter += 1 self.meta_info_path = os.path.join(self.rec_path, "info.csv") with open(self.meta_info_path, 'w') as csvfile: csv_utils.write_key_value_file(csvfile,{ 'Recording Name': self.session_name, 'Start Date': strftime("%d.%m.%Y", localtime(self.start_time)), 'Start Time': strftime("%H:%M:%S", localtime(self.start_time)), 'Start Time (seconds since epoch)': str(self.start_time) }) if self.audio_src != 'No Audio': audio_path = os.path.join(self.rec_path, "world.wav") self.audio_writer = Audio_Capture(audio_path,self.audio_devices_dict[self.audio_src]) else: self.audio_writer = None if self.raw_jpeg and self.g_pool.capture.jpeg_support: self.video_path = os.path.join(self.rec_path, "world.mp4") self.writer = JPEG_Writer(self.video_path,self.g_pool.capture.frame_rate) else: self.video_path = os.path.join(self.rec_path, "world.mp4") self.writer = AV_Writer(self.video_path,fps=self.g_pool.capture.frame_rate) if self.show_info_menu: self.open_info_menu() logger.info("Started Recording.") self.notify_all( {'subject':'recording.started','rec_path':self.rec_path,'session_name':self.session_name,'record_eye':self.record_eye,'compression':self.raw_jpeg} )
def update_meta_info(rec_dir, meta_info): """Backup old meta info file. Write current format. Args: rec_dir (path): Recording folder meta_info (dict): Meta info """ logger.info('Updating meta info') meta_info_path = os.path.join(rec_dir,"info.csv") meta_info_old_path = os.path.join(rec_dir,"info_old.csv") shutil.copy2(meta_info_path,meta_info_old_path) with open(meta_info_path,'w') as csvfile: csv_utils.write_key_value_file(csvfile,meta_info)
def stop(self): # explicit release of VideoWriter self.writer.release() self.writer = None save_object(self.data, os.path.join(self.rec_path, "pupil_data")) try: copy2(os.path.join(self.g_pool.user_dir, "surface_definitions"), os.path.join(self.rec_path, "surface_definitions")) except: logger.info("No surface_definitions data found. You may want this if you do marker tracking.") camera_calibration = load_camera_calibration(self.g_pool) if camera_calibration is not None: save_object(camera_calibration, os.path.join(self.rec_path, "camera_calibration")) else: logger.info("No camera calibration found.") try: with open(self.meta_info_path, 'a', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, { 'Duration Time': self.get_rec_time_str(), 'World Camera Frames': self.frame_count, 'World Camera Resolution': str(self.g_pool.capture.frame_size[0])+"x"+str(self.g_pool.capture.frame_size[1]), 'Capture Software Version': self.g_pool.version, 'Data Format Version': self.g_pool.version, 'System Info': get_system_info() }, append=True) except Exception: logger.exception("Could not save metadata. Please report this bug!") try: with open(os.path.join(self.rec_path, "user_info.csv"), 'w', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, self.user_info) except Exception: logger.exception("Could not save userdata. Please report this bug!") self.close_info_menu() self.running = False self.menu.read_only = False self.button.status_text = '' self.data = {'pupil_positions': [], 'gaze_positions': []} self.pupil_pos_list = [] self.gaze_pos_list = [] logger.info("Saved Recording.") self.notify_all({'subject': 'recording.stopped', 'rec_path': self.rec_path})
def update_recording_v083_to_v086(rec_dir): logger.info("Updating recording from v0.8.3 format to v0.8.6 format") pupil_data = load_object(os.path.join(rec_dir, "pupil_data")) meta_info_path = os.path.join(rec_dir, "info.csv") for topic in pupil_data.keys(): for d in pupil_data[topic]: d["topic"] = topic save_object(pupil_data, os.path.join(rec_dir, "pupil_data")) with open(meta_info_path, "r", encoding="utf-8") as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info["Capture Software Version"] = "v0.8.6" with open(meta_info_path, "w", newline="") as csvfile: csv_utils.write_key_value_file(csvfile, meta_info)
def update_recording_v082_to_v083(rec_dir): logger.info("Updating recording from v0.8.2 format to v0.8.3 format") pupil_data = load_object(os.path.join(rec_dir, "pupil_data")) meta_info_path = os.path.join(rec_dir, "info.csv") for d in pupil_data["gaze_positions"]: if "base" in d: d["base_data"] = d.pop("base") save_object(pupil_data, os.path.join(rec_dir, "pupil_data")) with open(meta_info_path, "r", encoding="utf-8") as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info["Capture Software Version"] = "v0.8.3" with open(meta_info_path, "w", newline="") as csvfile: csv_utils.write_key_value_file(csvfile, meta_info)
def update_recording_v083_to_v086(rec_dir): logger.info("Updating recording from v0.8.3 format to v0.8.6 format") pupil_data = load_object(os.path.join(rec_dir, "pupil_data")) meta_info_path = os.path.join(rec_dir,"info.csv") for topic in pupil_data.keys(): for d in pupil_data[topic]: d['topic'] = topic save_object(pupil_data,os.path.join(rec_dir, "pupil_data")) with open(meta_info_path) as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info['Capture Software Version'] = 'v0.8.6' with open(meta_info_path,'w') as csvfile: csv_utils.write_key_value_file(csvfile,meta_info)
def update_recording_v082_to_v083(rec_dir): logger.info("Updating recording from v0.8.2 format to v0.8.3 format") pupil_data = load_object(os.path.join(rec_dir, "pupil_data")) meta_info_path = os.path.join(rec_dir,"info.csv") for d in pupil_data['gaze_positions']: if 'base' in d: d['base_data'] = d.pop('base') save_object(pupil_data,os.path.join(rec_dir, "pupil_data")) with open(meta_info_path) as csvfile: meta_info = csv_utils.read_key_value_file(csvfile) meta_info['Capture Software Version'] = 'v0.8.3' with open(meta_info_path,'w') as csvfile: csv_utils.write_key_value_file(csvfile,meta_info)
def update_meta_info(rec_dir, meta_info): logger.info('Updating meta info') meta_info_path = os.path.join(rec_dir, "info.csv") with open(meta_info_path, 'w', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, meta_info)
def stop(self): # explicit release of VideoWriter try: self.writer.release() except RuntimeError: logger.error("No world video recorded") else: logger.debug("Closed media container") self.g_pool.capture.intrinsics.save(self.rec_path, custom_name="world") finally: self.writer = None # save_object(self.data, os.path.join(self.rec_path, "pupil_data")) for writer in self.pldata_writers.values(): writer.close() del self.pldata_writers try: copy2( os.path.join(self.g_pool.user_dir, "surface_definitions"), os.path.join(self.rec_path, "surface_definitions"), ) except: logger.info( "No surface_definitions data found. You may want this if you do marker tracking." ) try: with open(self.meta_info_path, "a", newline="") as csvfile: csv_utils.write_key_value_file( csvfile, { "Duration Time": self.get_rec_time_str(), "World Camera Frames": self.frame_count, "World Camera Resolution": str(self.g_pool.capture.frame_size[0]) + "x" + str(self.g_pool.capture.frame_size[1]), "Capture Software Version": self.g_pool.version, "Data Format Version": self.g_pool.version, "System Info": get_system_info(), }, append=True, ) except Exception: logger.exception( "Could not save metadata. Please report this bug!") try: with open(os.path.join(self.rec_path, "user_info.csv"), "w", newline="") as csvfile: csv_utils.write_key_value_file(csvfile, self.user_info) except Exception: logger.exception( "Could not save userdata. Please report this bug!") self.close_info_menu() self.running = False if self.menu: self.menu.read_only = False self.button.status_text = "" logger.info("Saved Recording.") self.notify_all({ "subject": "recording.stopped", "rec_path": self.rec_path })
def stop(self): # explicit release of VideoWriter try: self.writer.release() except RuntimeError: logger.error("No world video recorded") else: logger.debug("Closed media container") self.g_pool.capture.intrinsics.save(self.rec_path, custom_name="world") finally: self.writer = None # save_object(self.data, os.path.join(self.rec_path, "pupil_data")) for writer in self.pldata_writers.values(): writer.close() del self.pldata_writers try: copy2( os.path.join(self.g_pool.user_dir, "surface_definitions"), os.path.join(self.rec_path, "surface_definitions"), ) except: logger.info( "No surface_definitions data found. You may want this if you do marker tracking." ) try: with open(self.meta_info_path, "a", newline="") as csvfile: csv_utils.write_key_value_file( csvfile, { "Duration Time": self.get_rec_time_str(), "World Camera Frames": self.frame_count, "World Camera Resolution": str( self.g_pool.capture.frame_size[0] ) + "x" + str(self.g_pool.capture.frame_size[1]), "Capture Software Version": self.g_pool.version, "Data Format Version": self.g_pool.version, "System Info": get_system_info(), }, append=True, ) except Exception: logger.exception("Could not save metadata. Please report this bug!") try: with open( os.path.join(self.rec_path, "user_info.csv"), "w", newline="" ) as csvfile: csv_utils.write_key_value_file(csvfile, self.user_info) except Exception: logger.exception("Could not save userdata. Please report this bug!") self.close_info_menu() self.running = False if self.menu: self.menu.read_only = False self.button.status_text = "" logger.info("Saved Recording.") self.notify_all({"subject": "recording.stopped", "rec_path": self.rec_path})
def stop(self): # explicit release of VideoWriter self.writer.release() self.writer = None save_object(self.data, os.path.join(self.rec_path, "pupil_data")) timestamps_path = os.path.join(self.rec_path, "world_timestamps.npy") # ts = sanitize_timestamps(np.array(self.timestamps)) ts = np.array(self.timestamps) np.save(timestamps_path, ts) try: copy2(os.path.join(self.g_pool.user_dir, "surface_definitions"), os.path.join(self.rec_path, "surface_definitions")) except: logger.info("No surface_definitions data found. You may want this if you do marker tracking.") try: copy2(os.path.join(self.g_pool.user_dir, "user_calibration_data"), os.path.join(self.rec_path, "user_calibration_data")) except: logger.warning("No user calibration data found. Please calibrate first.") camera_calibration = load_camera_calibration(self.g_pool) if camera_calibration is not None: save_object(camera_calibration, os.path.join(self.rec_path, "camera_calibration")) else: logger.info("No camera calibration found.") try: with open(self.meta_info_path, 'a', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, { 'Duration Time': self.get_rec_time_str(), 'World Camera Frames': self.frame_count, 'World Camera Resolution': str(self.g_pool.capture.frame_size[0])+"x"+str(self.g_pool.capture.frame_size[1]), 'Capture Software Version': self.g_pool.version, 'System Info': get_system_info() }, append=True) except Exception: logger.exception("Could not save metadata. Please report this bug!") try: with open(os.path.join(self.rec_path, "user_info.csv"), 'w', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, self.user_info) except Exception: logger.exception("Could not save userdata. Please report this bug!") self.close_info_menu() if self.audio_writer: self.audio_writer = None self.running = False self.menu.read_only = False self.button.status_text = '' self.timestamps = [] self.data = {'pupil_positions': [], 'gaze_positions': []} self.pupil_pos_list = [] self.gaze_pos_list = [] logger.info("Saved Recording.") self.notify_all({'subject': 'recording.stopped', 'rec_path': self.rec_path})
def stop(self): #explicit release of VideoWriter self.writer.release() self.writer = None save_object(self.data,os.path.join(self.rec_path, "pupil_data")) self.glint_pos_list = np.array(self.glint_pos_list) glint_list_path = os.path.join(self.rec_path, "glint_positions.npy") np.save(glint_list_path,self.glint_pos_list) timestamps_path = os.path.join(self.rec_path, "world_timestamps.npy") # ts = sanitize_timestamps(np.array(self.timestamps)) ts = np.array(self.timestamps) np.save(timestamps_path,ts) timestampsUnix_path = os.path.join(self.rec_path, "world_timestamps_unix.npy") tsUnix = np.array(self.timestampsUnix) np.save(timestampsUnix_path,tsUnix) try: copy2(os.path.join(self.g_pool.user_dir,"surface_definitions"),os.path.join(self.rec_path,"surface_definitions")) except: logger.info("No surface_definitions data found. You may want this if you do marker tracking.") try: copy2(os.path.join(self.g_pool.user_dir,"user_calibration_data"),os.path.join(self.rec_path,"user_calibration_data")) except: logger.warning("No user calibration data found. Please calibrate first.") try: copy2(os.path.join(self.g_pool.user_dir,"cal_pt_cloud_glint.npy"),os.path.join(self.rec_path,"cal_pt_cloud_glint.npy")) except: logger.warning("No pupil-glint-vector calibration data found. Please calibrate first.") try: copy2(os.path.join(self.g_pool.user_dir,"cal_ref_list.npy"),os.path.join(self.rec_path,"cal_ref_list.npy")) except: logger.warning("No calibration reference list found.") try: copy2(os.path.join(self.g_pool.user_dir,"accuracy_test_pt_cloud.npy"),os.path.join(self.rec_path,"accuracy_test_pt_cloud.npy")) copy2(os.path.join(self.g_pool.user_dir,"accuracy_test_ref_list.npy"),os.path.join(self.rec_path,"accuracy_test_ref_list.npy")) except: logger.warning("No accuracy test found.") try: copy2(os.path.join(self.g_pool.user_dir,"accuracy_test_pt_cloud_previous.npy"),os.path.join(self.rec_path,"accuracy_test_pt_cloud_previous.npy")) copy2(os.path.join(self.g_pool.user_dir,"accuracy_test_ref_list_previous.npy"),os.path.join(self.rec_path,"accuracy_test_ref_list_previous.npy")) except: logger.warning("No previous accuracy test results.") camera_calibration = load_camera_calibration(self.g_pool) if camera_calibration is not None: save_object(camera_calibration,os.path.join(self.rec_path, "camera_calibration")) else: logger.info("No camera calibration found.") try: with open(self.meta_info_path, 'a') as csvfile: csv_utils.write_key_value_file(csvfile, { 'Duration Time': self.get_rec_time_str(), 'World Camera Frames': self.frame_count, 'World Camera Resolution': str(self.g_pool.capture.frame_size[0])+"x"+str(self.g_pool.capture.frame_size[1]), 'Capture Software Version': self.g_pool.version, 'System Info': get_system_info() }, append=True) except Exception: logger.exception("Could not save metadata. Please report this bug!") try: with open(os.path.join(self.rec_path, "user_info.csv"), 'w') as csvfile: csv_utils.write_key_value_file(csvfile, self.user_info) except Exception: logger.exception("Could not save userdata. Please report this bug!") self.close_info_menu() if self.audio_writer: self.audio_writer = None self.running = False self.menu.read_only = False self.button.status_text = '' self.timestamps = [] self.data = {'pupil_positions':[],'gaze_positions':[]} self.pupil_pos_list = [] self.gaze_pos_list = [] logger.info("Saved Recording.") self.notify_all( {'subject':'rec_stopped','rec_path':self.rec_path,'network_propagate':True} ) self.notify_all( {'subject':'recording.stopped','rec_path':self.rec_path} ) copyfile(os.path.join(self.g_pool.user_dir,'capture.log'), os.path.join(self.rec_path,"capture.log"))
def start(self): session = os.path.join(self.rec_root_dir, self.session_name) try: os.makedirs(session, exist_ok=True) logger.debug("Created new recordings session dir {}".format(session)) except OSError: logger.error( "Could not start recording. Session dir {} not writable.".format( session ) ) return self.pldata_writers = {} self.frame_count = 0 self.running = True self.menu.read_only = True self.start_time = time() start_time_synced = self.g_pool.get_timestamp() recording_uuid = uuid.uuid4() # set up self incrementing folder within session folder counter = 0 while True: self.rec_path = os.path.join(session, "{:03d}/".format(counter)) try: os.mkdir(self.rec_path) logger.debug("Created new recording dir {}".format(self.rec_path)) break except: logger.debug( "We dont want to overwrite data, incrementing counter & trying to make new data folder" ) counter += 1 self.meta_info_path = os.path.join(self.rec_path, "info.csv") with open(self.meta_info_path, "w", newline="", encoding="utf-8") as csvfile: csv_utils.write_key_value_file( csvfile, { "Recording Name": self.session_name, "Start Date": strftime("%d.%m.%Y", localtime(self.start_time)), "Start Time": strftime("%H:%M:%S", localtime(self.start_time)), "Start Time (System)": self.start_time, "Start Time (Synced)": start_time_synced, "Recording UUID": recording_uuid, }, ) self.video_path = os.path.join(self.rec_path, "world.mp4") if self.raw_jpeg and self.g_pool.capture.jpeg_support: self.writer = JPEG_Writer(self.video_path, self.g_pool.capture.frame_rate) elif hasattr(self.g_pool.capture._recent_frame, "h264_buffer"): self.writer = H264Writer( self.video_path, self.g_pool.capture.frame_size[0], self.g_pool.capture.frame_size[1], int(self.g_pool.capture.frame_rate), ) else: self.writer = AV_Writer(self.video_path, fps=self.g_pool.capture.frame_rate) try: cal_pt_path = os.path.join(self.g_pool.user_dir, "user_calibration_data") cal_data = load_object(cal_pt_path) notification = {"subject": "calibration.calibration_data", "record": True} notification.update(cal_data) notification["topic"] = "notify." + notification["subject"] writer = PLData_Writer(self.rec_path, "notify") writer.append(notification) self.pldata_writers["notify"] = writer except FileNotFoundError: pass if self.show_info_menu: self.open_info_menu() logger.info("Started Recording.") self.notify_all( { "subject": "recording.started", "rec_path": self.rec_path, "session_name": self.session_name, "record_eye": self.record_eye, "compression": self.raw_jpeg, } )
def export(self, export_window, export_dir): """ Between in and out mark blink_detection_report.csv: - history lenght - onset threshold - offset threshold blinks.csv: id | start_timestamp | duration | end_timestamp | start_frame_index | index | end_frame_index | confidence | filter_response | base_data """ if not self.g_pool.blinks: logger.warning( "No blinks were detected in this recording. Nothing to export." ) return header = ( "id", "start_timestamp", "duration", "end_timestamp", "start_frame_index", "index", "end_frame_index", "confidence", "filter_response", "base_data", ) blinks_in_section = self.g_pool.blinks.by_ts_window(export_window) with open(os.path.join(export_dir, "blinks.csv"), "w", encoding="utf-8", newline="") as csvfile: csv_writer = csv.writer(csvfile) csv_writer.writerow(header) for b in blinks_in_section: csv_writer.writerow( self.csv_representation_for_blink(b, header)) logger.info("Created 'blinks.csv' file.") with open( os.path.join(export_dir, "blink_detection_report.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_utils.write_key_value_file( csvfile, { "history_length": self.history_length, "onset_confidence_threshold": self.onset_confidence_threshold, "offset_confidence_threshold": self.offset_confidence_threshold, "blinks_exported": len(blinks_in_section), }, ) logger.info("Created 'blink_detection_report.csv' file.") np.savetxt('activity.csv', self.activity, delimiter=',') np.savetxt('filter_response.csv', self.filter_response, delimiter=',') merged = np.column_stack((self.timestamps, self.filter_response)) print('merged', merged.shape) np.savetxt('timestamps_fitlerrespone.csv', merged, fmt='%10.6f', delimiter=',')
def start(self): self.timestamps = [] self.data = { 'pupil_positions': [], 'gaze_positions': [], 'notifications': [] } self.frame_count = 0 self.running = True self.menu.read_only = True self.start_time = time() session = os.path.join(self.rec_dir, self.session_name) try: os.makedirs(session) logger.debug( "Created new recordings session dir {}".format(session)) except: logger.debug( "Recordings session dir {} already exists, using it.".format( session)) # set up self incrementing folder within session folder counter = 0 while True: self.rec_path = os.path.join(session, "{:03d}/".format(counter)) try: os.mkdir(self.rec_path) logger.debug("Created new recording dir {}".format( self.rec_path)) break except: logger.debug( "We dont want to overwrite data, incrementing counter & trying to make new data folder" ) counter += 1 self.meta_info_path = os.path.join(self.rec_path, "info.csv") with open(self.meta_info_path, 'w', newline='') as csvfile: csv_utils.write_key_value_file( csvfile, { 'Recording Name': self.session_name, 'Start Date': strftime("%d.%m.%Y", localtime(self.start_time)), 'Start Time': strftime("%H:%M:%S", localtime(self.start_time)) }) if self.audio_src != 'No Audio': audio_path = os.path.join(self.rec_path, "world.wav") self.audio_writer = Audio_Capture( audio_path, self.audio_devices_dict[self.audio_src]) else: self.audio_writer = None if self.raw_jpeg and self.g_pool.capture.jpeg_support: self.video_path = os.path.join(self.rec_path, "world.mp4") self.writer = JPEG_Writer(self.video_path, self.g_pool.capture.frame_rate) else: self.video_path = os.path.join(self.rec_path, "world.mp4") self.writer = AV_Writer(self.video_path, fps=self.g_pool.capture.frame_rate) try: cal_pt_path = os.path.join(self.g_pool.user_dir, "user_calibration_data") cal_data = load_object(cal_pt_path) notification = { 'subject': 'calibration.calibration_data', 'record': True } notification.update(cal_data) self.data['notifications'].append(notification) except: pass if self.show_info_menu: self.open_info_menu() logger.info("Started Recording.") self.notify_all({ 'subject': 'recording.started', 'rec_path': self.rec_path, 'session_name': self.session_name, 'record_eye': self.record_eye, 'compression': self.raw_jpeg })
def start(self): session = os.path.join(self.rec_root_dir, self.session_name) try: os.makedirs(session, exist_ok=True) logger.debug( "Created new recordings session dir {}".format(session)) except OSError: logger.error( "Could not start recording. Session dir {} not writable.". format(session)) return self.pldata_writers = {} self.frame_count = 0 self.running = True self.menu.read_only = True self.start_time = time() start_time_synced = self.g_pool.get_timestamp() # set up self incrementing folder within session folder counter = 0 while True: self.rec_path = os.path.join(session, "{:03d}/".format(counter)) try: os.mkdir(self.rec_path) logger.debug("Created new recording dir {}".format( self.rec_path)) break except: logger.debug( "We dont want to overwrite data, incrementing counter & trying to make new data folder" ) counter += 1 self.meta_info_path = os.path.join(self.rec_path, "info.csv") with open(self.meta_info_path, "w", newline="", encoding="utf-8") as csvfile: csv_utils.write_key_value_file( csvfile, { "Recording Name": self.session_name, "Start Date": strftime("%d.%m.%Y", localtime(self.start_time)), "Start Time": strftime("%H:%M:%S", localtime(self.start_time)), "Start Time (System)": self.start_time, "Start Time (Synced)": start_time_synced, }, ) self.video_path = os.path.join(self.rec_path, "world.mp4") if self.raw_jpeg and self.g_pool.capture.jpeg_support: self.writer = JPEG_Writer(self.video_path, self.g_pool.capture.frame_rate) elif hasattr(self.g_pool.capture._recent_frame, "h264_buffer"): self.writer = H264Writer( self.video_path, self.g_pool.capture.frame_size[0], self.g_pool.capture.frame_size[1], int(self.g_pool.capture.frame_rate), ) else: self.writer = AV_Writer(self.video_path, fps=self.g_pool.capture.frame_rate) try: cal_pt_path = os.path.join(self.g_pool.user_dir, "user_calibration_data") cal_data = load_object(cal_pt_path) notification = { "subject": "calibration.calibration_data", "record": True } notification.update(cal_data) notification["topic"] = "notify." + notification["subject"] writer = PLData_Writer(self.rec_path, "notify") writer.append(notification) self.pldata_writers["notify"] = writer except FileNotFoundError: pass if self.show_info_menu: self.open_info_menu() logger.info("Started Recording.") self.notify_all({ "subject": "recording.started", "rec_path": self.rec_path, "session_name": self.session_name, "record_eye": self.record_eye, "compression": self.raw_jpeg, })
def start(self): session = os.path.join(self.rec_dir, self.session_name) try: os.makedirs(session, exist_ok=True) logger.debug("Created new recordings session dir {}".format(session)) except OSError: logger.error("Could not start recording. Session dir {} not writable.".format(session)) return self.data = {'pupil_positions': [], 'gaze_positions': [], 'notifications': []} self.frame_count = 0 self.running = True self.menu.read_only = True self.start_time = time() start_time_synced = self.g_pool.get_timestamp() # set up self incrementing folder within session folder counter = 0 while True: self.rec_path = os.path.join(session, "{:03d}/".format(counter)) try: os.mkdir(self.rec_path) logger.debug("Created new recording dir {}".format(self.rec_path)) break except: logger.debug("We dont want to overwrite data, incrementing counter & trying to make new data folder") counter += 1 self.meta_info_path = os.path.join(self.rec_path, "info.csv") with open(self.meta_info_path, 'w', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, { 'Recording Name': self.session_name, 'Start Date': strftime("%d.%m.%Y", localtime(self.start_time)), 'Start Time': strftime("%H:%M:%S", localtime(self.start_time)), 'Start Time (System)': self.start_time, 'Start Time (Synced)': start_time_synced }) self.video_path = os.path.join(self.rec_path, "world.mp4") if self.raw_jpeg and self.g_pool.capture.jpeg_support: self.writer = JPEG_Writer(self.video_path, self.g_pool.capture.frame_rate) elif hasattr(self.g_pool.capture._recent_frame, 'h264_buffer'): self.writer = H264Writer(self.video_path, self.g_pool.capture.frame_size[0], self.g_pool.capture.frame_size[1], int(self.g_pool.capture.frame_rate)) else: self.writer = AV_Writer(self.video_path, fps=self.g_pool.capture.frame_rate) try: cal_pt_path = os.path.join(self.g_pool.user_dir, "user_calibration_data") cal_data = load_object(cal_pt_path) notification = {'subject': 'calibration.calibration_data', 'record': True} notification.update(cal_data) self.data['notifications'].append(notification) except: pass if self.show_info_menu: self.open_info_menu() logger.info("Started Recording.") self.notify_all({'subject': 'recording.started', 'rec_path': self.rec_path, 'session_name': self.session_name, 'record_eye': self.record_eye, 'compression': self.raw_jpeg})
def start(self): self.timestamps = [] self.timestampsUnix = [] self.glint_pos_list = [] self.pupil_pos_list = [] self.gaze_pos_list = [] self.data = {'pupil_positions':[],'gaze_positions':[],'notifications':[]} self.frame_count = 0 self.running = True self.menu.read_only = True self.start_time = time() session = os.path.join(self.rec_dir, self.session_name) try: os.makedirs(session) logger.debug("Created new recordings session dir {}".format(session)) except: logger.debug("Recordings session dir {} already exists, using it.".format(session)) # set up self incrementing folder within session folder counter = 0 while True: self.rec_path = os.path.join(session, "{:03d}/".format(counter)) try: os.mkdir(self.rec_path) logger.debug("Created new recording dir {}".format(self.rec_path)) break except: logger.debug("We dont want to overwrite data, incrementing counter & trying to make new data folder") counter += 1 self.meta_info_path = os.path.join(self.rec_path, "info.csv") with open(self.meta_info_path, 'w', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, { 'Recording Name': self.session_name, 'Start Date': strftime("%d.%m.%Y", localtime(self.start_time)), 'Start Time': strftime("%H:%M:%S", localtime(self.start_time)), }) self.video_path = os.path.join(self.rec_path, "world.mp4") if self.raw_jpeg and self.g_pool.capture.jpeg_support: self.writer = JPEG_Writer(self.video_path, self.g_pool.capture.frame_rate) elif hasattr(self.g_pool.capture._recent_frame, 'h264_buffer'): self.writer = H264Writer(self.video_path, self.g_pool.capture.frame_size[0], self.g_pool.capture.frame_size[1], int(self.g_pool.capture.frame_rate)) else: self.writer = AV_Writer(self.video_path, fps=self.g_pool.capture.frame_rate) try: cal_pt_path = os.path.join(self.g_pool.user_dir, "user_calibration_data") cal_data = load_object(cal_pt_path) notification = {'subject': 'calibration.calibration_data', 'record': True} notification.update(cal_data) self.data['notifications'].append(notification) except: pass if self.show_info_menu: self.open_info_menu() logger.info("Started Recording.") self.notify_all({'subject': 'recording.started', 'rec_path': self.rec_path, 'session_name': self.session_name, 'record_eye': self.record_eye, 'compression': self.raw_jpeg})
def update_meta_info(rec_dir, meta_info): logger.info('Updating meta info') meta_info_path = os.path.join(rec_dir,"info.csv") with open(meta_info_path,'w',newline='') as csvfile: csv_utils.write_key_value_file(csvfile,meta_info)
def stop(self): # explicit release of VideoWriter self.writer.release() self.writer = None save_object(self.data, os.path.join(self.rec_path, "pupil_data")) try: copy2(os.path.join(self.g_pool.user_dir, "surface_definitions"), os.path.join(self.rec_path, "surface_definitions")) except: logger.info( "No surface_definitions data found. You may want this if you do marker tracking." ) self.g_pool.capture.intrinsics.save(self.rec_path, custom_name='world') try: with open(self.meta_info_path, 'a', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, { 'Duration Time': self.get_rec_time_str(), 'World Camera Frames': self.frame_count, 'World Camera Resolution': str(self.g_pool.capture.frame_size[0]) + "x" + str(self.g_pool.capture.frame_size[1]), 'Capture Software Version': self.g_pool.version, 'Data Format Version': self.g_pool.version, 'System Info': get_system_info() }, append=True) except Exception: logger.exception( "Could not save metadata. Please report this bug!") try: with open(os.path.join(self.rec_path, "user_info.csv"), 'w', newline='') as csvfile: csv_utils.write_key_value_file(csvfile, self.user_info) except Exception: logger.exception( "Could not save userdata. Please report this bug!") self.close_info_menu() self.running = False self.menu.read_only = False self.button.status_text = '' self.data = {'pupil_positions': [], 'gaze_positions': []} self.pupil_pos_list = [] self.gaze_pos_list = [] logger.info("Saved Recording.") self.notify_all({ 'subject': 'recording.stopped', 'rec_path': self.rec_path })
def export(self, export_range, export_dir): """ Between in and out mark blink_detection_report.csv: - history lenght - onset threshold - offset threshold blinks.csv: id | start_timestamp | duration | end_timestamp | start_frame_index | index | end_frame_index | confidence | filter_response | base_data """ if not self.g_pool.blinks: logger.warning( "No blinks were detected in this recording. Nothing to export." ) return header = ( "id", "start_timestamp", "duration", "end_timestamp", "start_frame_index", "index", "end_frame_index", "confidence", "filter_response", "base_data", ) start, end = export_range blinks_in_section = [ b for b in self.g_pool.blinks if start <= b["index"] < end ] with open(os.path.join(export_dir, "blinks.csv"), "w", encoding="utf-8", newline="") as csvfile: csv_writer = csv.writer(csvfile) csv_writer.writerow(header) for b in blinks_in_section: csv_writer.writerow( self.csv_representation_for_blink(b, header)) logger.info("Created 'blinks.csv' file.") with open( os.path.join(export_dir, "blink_detection_report.csv"), "w", encoding="utf-8", newline="", ) as csvfile: csv_utils.write_key_value_file( csvfile, { "history_length": self.history_length, "onset_confidence_threshold": self.onset_confidence_threshold, "offset_confidence_threshold": self.offset_confidence_threshold, "blinks_exported": len(blinks_in_section), }, ) logger.info("Created 'blink_detection_report.csv' file.")
def update_meta_info(rec_dir, meta_info): logger.info("Updating meta info") meta_info_path = os.path.join(rec_dir, "info.csv") with open(meta_info_path, "w", newline="", encoding="utf-8") as csvfile: csv_utils.write_key_value_file(csvfile, meta_info)