# merge all dataframes df_joined = df_actigraph_acc.join(df_true_non_wear_time, how='outer').join(df_actiwave_acc, how='outer').join(df_hecht_3_non_wear_time, how='outer') \ .join(df_troiano_non_wear_time, how='outer').join(df_choi_non_wear_time, how='outer').join(df_hees_non_wear_time, how='outer').join(df_epoch_60_vmu, how='outer').join(df_actiwave_hr, how='outer') # call plot function plot_non_wear_algorithms(data = df_joined, subject = subject, plot_folder = plot_folder) if __name__ == "__main__": # start timer and memory counter tic, process, logging = set_start() # 1) batch process Hecht 2009 non-wear method batch_process_non_wear_algorithm(algorithm = process_hecht_2009_triaxial) # # 2) batch process Troiano 2007 non-wear method # batch_process_non_wear_algorithm(algorithm = process_troiano_2007) # # 3) batch process Choi 2011 non-wear method # batch_process_non_wear_algorithm(algorithm = process_choi_2011) # # 4) batch process Hees 2013 non-wear method # batch_process_non_wear_algorithm(algorithm = process_hees_2013) # 5) batch process the plotting of the non-wear algorithms including true non-wear time # batch_process_plot_non_wear_algorithms() # print time and memory set_end(tic, process)
def process_gt3x_file(f, i=1, total=1, hdf5_save_location=HDF5_SAVE, delete_zip_folder=True): """ Process .gt3x file - unzip into log.bin and info.txt - extract information from info.txt - extract information from log.bin - save data to hdf5 file Parameters ---------- f : string file location of the .gt3x file i : int (optional) index of file to be processed, is used to display a counter of the process. Default = 1. For example, processing 12/20 total : int (optional) total number of files to be processed, is used to display a counter of the process. Default = 1. For example, processing 12/20 hdf5_save_location : os.path folder location where to save the extracted acceleration data to. """ logging.debug('Processing GTX3 binary file: {} {}/{}'.format( f, i + 1, total)) # unzip the raw .gt3x file: this will provide a log.bin and info.txt file # the save_location is a new folder with the same name as the .gt3x file log_bin, info_txt = unzip_gt3x_file(f, save_location=f.split('.')[0]) # check if unzipping went ok if log_bin is not None: # print verbose logging.debug('log.bin location: {}'.format(log_bin)) logging.debug('info.txt location: {}'.format(info_txt)) # get info data from info file info_data = extract_info(info_txt) # check if subject name could be read from the binary file if info_data['Subject_Name'] is not "": # check if subject ID already processed if info_data['Subject_Name'] not in get_all_subjects_hdf5( hdf5_file=HDF5_SAVE): # retrieve log_data; i.e. accellerometer data and log_time; timestamps of acceleration data log_data, log_time = extract_log( log_bin, acceleration_scale=float(info_data['Acceleration_Scale']), sample_rate=int(info_data['Sample_Rate'])) # check if log data is not None (with None something went wrong during reading of the binary file) if log_data is not None: # save log_data to HDF5 file save_data_to_group_hdf5(group=info_data['Subject_Name'], data=log_data, data_name='log', meta_data=info_data, overwrite=True, hdf5_file=hdf5_save_location) # save log_time data to HDF file save_data_to_group_hdf5(group=info_data['Subject_Name'], data=log_time, data_name='time', meta_data=info_data, overwrite=True, hdf5_file=hdf5_save_location) else: logging.error( 'Unable to convert .gt3x file: {} (subject {})'.format( f, info_data['Subject_Name'])) else: logging.info( 'Subject name already defined as group in HDF5 file: {}, skipping..' .format(info_data['Subject_Name'])) else: logging.error( "Unable to read subject from info.txt file, skipping file: {}". format(f)) else: logging.error("Error unzipping file: {}".format(f)) # delete the created zip folder if delete_zip_folder: delete_directory(f.split('.')[0]) # print time and memory set_end(tic, process)