def load_info_dat(folderpath): info_file = os.path.join(*(glob_data_path + folderpath + ['info.dat'])) if not os.path.exists(info_file): print('ERROR: no info.dat -', info_file) exit() return pyre_load(info_file)
def load_traces_dat(folderpath, filename): traces_file = os.path.join(*(glob_data_path + folderpath + [filename])) if not os.path.exists(traces_file): print('ERROR: no *-traces.dat -', traces_file) exit() metadata = [] traces = [] for n, trace_content in enumerate(pyre_load(traces_file).data_blocks()): print('Loadings trial', n+1, '...') metadata.append(trace_content[:-1]) traces.append(asarray([row.split() for row in trace_content[-1]], dtype=float).transpose()) return metadata, traces
def digest_rawdata(self, dat_file, sd_params = None): if sd_params is None: nfft = 2 ** 11 sd_params = {'NFFT': nfft, 'noverlap': nfft / 2} # iterate through all folders that have not already been processed cond = self.data.loc[:, 'digested'] == False for rowidx, data_folder, data_dir in zip(self.data.index[cond], self.data.data_folder[cond], self.data.data_dir[cond]): print('Entry', rowidx, '-', data_folder, '...') data_path = data_dir + [data_folder] # get metadata info_path = os.path.join(*data_path, 'info.dat') if os.path.exists(info_path): rec_info = pyre_load(info_path) else: print('Info.dat missing') # load recordings print(dat_file) transfer_file = os.path.join(*data_path, dat_file) if not os.path.exists(transfer_file): print('File missing') exit() Pxxs = [] Pxys = [] Pyys = [] Pyxs = [] # iterate through trials transfer_data = pyre_load(transfer_file) for trial_idx, trial_data in enumerate(transfer_data.data_blocks()): print('Trial', trial_idx) # load traces for this trial traces = asarray([row.split() for row in trial_data[-1]], dtype=float).transpose() # get data for spectral analysis sr = round(1000. / mean(diff(traces[0, :]))) output = traces[1, :] output -= mean(output) response = traces[2, :] response -= mean(response) sd_params['Fs'] = sr Pxx, _ = ml.psd(output, **sd_params) Pyy, _ = ml.psd(response, **sd_params) Pxy, _ = ml.csd(output, response, **sd_params) Pyx, f = ml.csd(response, output, **sd_params) Pxxs.append(Pxx) Pyys.append(Pyy) Pxys.append(Pxy) Pyxs.append(Pyx) # free memory del Pxx, Pyy, Pxy, Pyx, traces, output, response gc.collect() # generate new dictionary containing spectra and metadata of all trials row_content = dict( Pxxs = asarray(Pxxs), Pyys = asarray(Pyys), Pxys = asarray(Pxys), Pyxs = asarray(Pyxs), freqs = f, trialmeta = [trial_data[0]], metadata = rec_info ) # add to DataFrame self.add_data(rowidx, row_content) # free memory del Pxxs, Pyys, Pxys, Pyxs, f, transfer_data, rec_info, trial_data gc.collect() # mark dataset as processed self.add_data(rowidx, dict(digested=True)) # save to file self.data_to_file()
years = ['2015'] #, '2016'] folder_list = [] for year in years: new_folders = glob( os.path.join(*(glob_data_path + [year] + [year + '*']))) folder_list.extend([folder.split(os.sep) for folder in new_folders]) data = dict() entry_num = len(folder_list) for idx, folderpath in enumerate(folder_list): print('Entry', idx + 1, '/', entry_num, ' - Processing ', os.path.join(*folderpath), '...') info = pyre_load( os.path.join(*(glob_data_path + folderpath[-2:] + ['info.dat']))) metadata, traces = load_traces_dat(folderpath[-2:], 'transferfunction-traces.dat') Pxxs = [] Pyys = [] Pxys = [] Pyxs = [] Cxys = [] print('Processing trials ...') for t in traces: # get recordings sr = round(1000. / np.mean(np.diff(t[0, :]))) x = t[1, :] y = t[2, :]