def edit_spike_array_params(self, shell=False): '''Edit spike array parameters and adjust dig_in_mapping accordingly Parameters ---------- shell : bool, whether to use CLI or GUI ''' if not hasattr(self, 'dig_in_mapping'): self.spike_array_params = None return sa = deepcopy(self.spike_array_params) tmp = userIO.fill_dict(sa, 'Spike Array Parameters\n(Times in ms)', shell=shell) if tmp is None: return dim = self.dig_in_mapping dim['spike_array'] = False if tmp['dig_ins_to_use'] != ['']: tmp['dig_ins_to_use'] = [int(x) for x in tmp['dig_ins_to_use']] dim.loc[[x in tmp['dig_ins_to_use'] for x in dim.channel], 'spike_array'] = True dim['laser_channels'] = False if tmp['laser_channels'] != ['']: tmp['laser_channels'] = [int(x) for x in tmp['laser_channels']] dim.loc[[x in tmp['laser_channels'] for x in dim.channel], 'laser'] = True self.spike_array_params = tmp.copy() wt.write_params_to_json('spike_array_params', self.root_dir, tmp)
def _setup_digital_mapping(self, dig_type, dig_in_names=None, shell=False): '''sets up dig_in_mapping dataframe and queries user to fill in columns Parameters ---------- dig_in_names : list of str (optional) shell : bool (optional) True for command-line interface False (default) for GUI ''' rec_info = self.rec_info df = pd.DataFrame() df['channel'] = rec_info.get('dig_%s' % dig_type) n_dig_in = len(df) # Names if dig_in_names: df['name'] = dig_in_names else: df['name'] = '' # Parameters to query if dig_type == 'in': df['palatability_rank'] = 0 df['laser'] = False df['spike_array'] = True df['exclude'] = False # Re-format for query idx = df.index df.index = ['dig_%s_%i' % (dig_type, x) for x in df.channel] dig_str = dig_type + 'put' # Query for user input prompt = ('Digital %s Parameters\nSet palatability ranks from 1 to %i' '\nor blank to exclude from pal_id analysis') % (dig_str, len(df)) tmp = userIO.fill_dict(df.to_dict(), prompt=prompt, shell=shell) # Reformat for storage df2 = pd.DataFrame.from_dict(tmp) df2 = df2.sort_values(by=['channel']) df2.index = idx if dig_type == 'in': df2['palatability_rank'] = df2['palatability_rank'].fillna( -1).astype('int') if dig_type == 'in': self.dig_in_mapping = df2.copy() else: self.dig_out_mapping = df2.copy() if os.path.isfile(self.h5_file): dio.h5io.write_digital_map_to_h5(self.h5_file, self.dig_in_mapping, dig_type)
def edit_psth_params(self, shell=False): '''Allows user interface for editing psth parameters Parameters ---------- shell : bool (optional) True if you want command-line interface, False for GUI (default) ''' tmp = userIO.fill_dict(self.psth_params, 'PSTH Parameters\n(Times in ms)', shell=shell) if tmp: self.psth_params = tmp wt.write_params_to_json('psth_params', self.root_dir, tmp)
def _order_dirs(self, shell=False, order_dict=None): '''set order of redcording directories ''' if 'SSH_CONNECTION' in os.environ: shell = True if self.recording_dirs == []: return if order_dict is None: self.recording_dirs = [ x[:-1] if x.endswith(os.sep) else x for x in self.recording_dirs ] top_dirs = { os.path.basename(x): os.path.dirname(x) for x in self.recording_dirs } file_dirs = list(top_dirs.keys()) order_dict = dict.fromkeys(file_dirs, 0) tmp = userIO.dictIO(order_dict, shell=shell) order_dict = userIO.fill_dict(order_dict, ('Set order of recordings (1-%i)\n' 'Leave blank to delete directory' ' from list') % len(file_dirs), shell) if order_dict is None: return file_dirs = [ k for k, v in order_dict.items() if v is not None and v != 0 ] file_dirs = sorted(file_dirs, key=order_dict.get) file_dirs = [os.path.join(top_dirs.get(x), x) for x in file_dirs] else: file_dirs = sorted(self.recording_dirs, key=order_dict.get) self.recording_dirs = file_dirs
def _set_CAR_groups(self, group_keyword=None, shell=False, group_areas=None): '''Sets that electrode groups for common average referencing and defines which brain region electrodes eneded up in Parameters ---------- group_keyword : str or int Keyword corresponding to a preset electrode grouping in CAR_params.json Or integer indicating number of CAR groups shell : bool True for command-line interface, False (default) for GUI ''' if not hasattr(self, 'electrode_mapping'): raise ValueError('Set electrode mapping before setting CAR groups') em = self.electrode_mapping.copy() car_param_file = os.path.join(self.root_dir, 'analysis_params', 'CAR_params.json') if os.path.isfile(car_param_file): tmp = dio.params.load_params('CAR_params', self.root_dir) if tmp is not None: group_electrodes = tmp else: raise ValueError( 'CAR_params file exists in recording dir, but is empty') else: if group_keyword is None: group_keyword = userIO.get_user_input( 'Input keyword for CAR parameters or number of CAR groups', shell=shell) if group_keyword is None: ValueError('Must provide a keyword or number of groups') if group_keyword.isnumeric(): num_groups = int(group_keyword) group_electrodes = dio.params.select_CAR_groups(num_groups, em, shell=shell) else: group_electrodes = dio.params.load_params( 'CAR_params', self.root_dir, default_keyword=group_keyword) num_groups = len(group_electrodes) if group_areas is not None and len(group_areas) == num_groups: for i, x in enumerate(zip(group_electrodes, group_areas)): em.loc[x[0], 'area'] = x[1] em.loc[x[0], 'CAR_group'] = i else: group_names = ['Group %i' % i for i in range(num_groups)] area_dict = dict.fromkeys(group_names, '') area_dict = userIO.fill_dict(area_dict, 'Set Areas for CAR groups', shell=shell) for k, v in area_dict.items(): i = int(k.replace('Group', '')) em.loc[group_electrodes[i], 'area'] = v em.loc[group_electrodes[i], 'CAR_group'] = i self.CAR_electrodes = group_electrodes self.electrode_mapping = em.copy()
def port_in_dataset(rec_dir=None, shell=False): '''Import an existing dataset into this framework ''' if rec_dir is None: rec_dir = userIO.get_filedirs('Select recording directory', shell=shell) if rec_dir is None: return None dat = dataset(rec_dir, shell=shell) # Check files that will be overwritten: log_file, save_file if os.path.isfile(dat.save_file): prompt = '%s already exists. Continuing will overwrite this. Continue?' % dat.save_file q = userIO.ask_user(prompt, shell=shell) if q == 0: print('Aborted') return None # if os.path.isfile(dat.h5_file): # prompt = '%s already exists. Continuinlg will overwrite this. Continue?' % dat.h5_file # q = userIO.ask_user(prompt, shell=shell) # if q == 0: # print('Aborted') # return None if os.path.isfile(dat.log_file): prompt = '%s already exists. Continuing will append to this. Continue?' % dat.log_file q = userIO.ask_user(prompt, shell=shell) if q == 0: print('Aborted') return None with open(dat.log_file, 'a') as f: print( '\n==========\nPorting dataset into blechpy format\n==========\n', file=f) print(dat, file=f) status = dat.process_status dat.initParams(shell=shell) user_status = status.copy() user_status = userIO.fill_dict(user_status, 'Which processes have already been ' 'done to the data?', shell=shell) status.update(user_status) # if h5 exists data must have been extracted if not os.path.isfile(dat.h5_file) or status['extract_data'] == False: dat.save() return dat # write eletrode map and digital input & output maps to hf5 dio.h5io.write_electrode_map_to_h5(dat.h5_file, dat.electrode_mapping) if dat.rec_info.get('dig_in') is not None: dio.h5io.write_digital_map_to_h5(dat.h5_file, dat.dig_in_mapping, 'in') if dat.rec_info.get('dig_out') is not None: dio.h5io.write_digital_map_to_h5(dat.h5_file, dat.dig_out_mapping, 'out') node_list = dio.h5io.get_node_list(dat.h5_file) if (status['create_trial_list'] == False) and ('digital_in' in node_list): dat.create_trial_list() dat.save() return dat