def __init__(self, data_type, root_dir=None, data_name=None, savefile=None, logfile=None, shell=False): if 'SSH_CONNECTION' in os.environ: shell = True if root_dir is None: root_dir = userIO.get_filedirs('Select %s directory' % data_type, shell=shell) if root_dir is None or not os.path.isdir(root_dir): raise NotADirectoryError('Must provide a valid root directory for the %s' % data_type) if root_dir.endswith('/'): root_dir = root_dir[:-1] if data_name is None: data_name = userIO.get_user_input('Enter name for %s' % data_type, os.path.basename(root_dir), shell) if savefile is None: savefile = os.path.join(root_dir, '%s_%s.p' % (data_name, data_type)) if logfile is None: logfile = os.path.join(root_dir, '%s_%s.log' % (data_name, data_type)) self.root_dir = root_dir self.data_type = data_type self.data_name = data_name self.save_file = savefile self.log_file = logfile
def load_data(data_type, file_dir=None, shell=False): '''Loads a data_object .p file and returns the object Parameters ---------- data_type : str type of data_object extension do you want dataset, experiment or object file_dir : str (optional) path to file dir that the .p file is saved in or path to .p file Returns ------- blechpy.data_object Raises ------ NotADirectoryError ''' if 'SSH_CONNECTION' in os.environ: shell = True if file_dir is None: file_dir = userIO.get_filedirs('Select %s directory or .p file' % data_type, shell=shell) if os.path.isfile(file_dir) and f'{data_type}.p' in file_dir: data_file = [file_dir] file_dir = os.path.dirname(file_dir) elif not os.path.isdir(file_dir): raise NotADirectoryError('%s not found.' % file_dir) else: data_file = [ x for x in os.listdir(file_dir) if x.endswith('%s.p' % data_type) ] if len(data_file) == 0: return None elif len(data_file) > 1: tmp = userIO.select_from_list('Multiple %s files found.' 'Select the one you want to load.' % data_type, data_file, shell=shell) if tmp is None: return None else: data_file = tmp else: data_file = data_file[0] data_file = os.path.join(file_dir, data_file) with open(data_file, 'rb') as f: out = pickle.load(f) return out
def _change_root(self, new_root=None): if 'SSH_CONNECTION' in os.environ: shell = True else: shell = False if new_root is None: new_root = userIO.get_filedirs('Select new location of %s' % self.root_dir, shell=shell) old_root = self.root_dir self.root_dir = self.root_dir.replace(old_root, new_root) self.save_file = self.save_file.replace(old_root, new_root) self.log_file = self.log_file.replace(old_root, new_root) return new_root
def __init__(self, proj_dir=None, proj_name=None, exp_dirs=None, exp_groups=None, params=None, shell=False): if 'SSH_CONNECTION' in os.environ: shell = True # Setup basics super().__init__('project', data_name=proj_name, root_dir=proj_dir, shell=shell) if exp_dirs is None: exp_dirs = userIO.get_filedirs('Select experiment directories', multi=True, shell=shell) exp_names = [] for ed in exp_dirs: exp = load_experiment(ed) if exp is None: raise FileNotFoundError('No experiment.p file found for %s' % ed) exp_names.append(exp.data_name) # Get experiment groups if exp_groups is None: exp_groups = userIO.get_labels(exp_names, 'Label Experiment Groups') self._exp_info = pd.DataFrame({'exp_name': exp_names, 'exp_group': exp_groups, 'exp_dir': exp_dirs}) # Make list of all major files managed by this object self._files = {'params': os.path.join(self.root_dir, self.data_name+'_analysis_params.json')} # Check which files exist status = self._file_check() if status['params'] and params is None: self._params = wt.read_dict_from_json(self._files['params']) elif params is not None: self._params = params wt.write_dict_to_json(params, self._files['params']) else: # TODO: Load defaults and allow user edit pass self.save()
def add_recording(self, new_dir=None, shell=None): '''Add recording directory to experiment Parameters ---------- new_dir : str (optional) full path to new directory to add to recording dirs shell : bool (optional) True for command-line interface for user input False (default) for GUI If not passed then the preference set upon object creation is used ''' if 'SSH_CONNECTION' in os.environ: shell = True elif shell is None: shell = False if new_dir is None: new_dir = userIO.get_filedirs('Select recoring directory', root=self.root_dir, shell=shell) if not os.path.isdir(new_dir): raise NotADirectoryError('%s must be a valid directory' % new_dir) if not any([x.endswith('.h5') for x in os.listdir(new_dir)]): raise FileNotFoundError('No .h5 file found in %s' % new_dir) if not any([x.endswith('dataset.p') for x in os.listdir(new_dir)]): raise FileNotFoundError('*_dataset.p file not found in %s' % new_dir) if new_dir.endswith('/'): new_dir = new_dir[:-1] label = userIO.get_user_input('Enter label for recording %s' % os.path.basename(new_dir), shell=shell) self.recording_dirs.append(new_dir) self.rec_labels[label] = new_dir self._order_dirs(shell=shell) self._setup_taste_map() print('Added recording: %s') self.save()
def port_in_dataset(rec_dir=None, shell=False): '''Import an existing dataset into this framework ''' if rec_dir is None: rec_dir = userIO.get_filedirs('Select recording directory', shell=shell) if rec_dir is None: return None dat = dataset(rec_dir, shell=shell) # Check files that will be overwritten: log_file, save_file if os.path.isfile(dat.save_file): prompt = '%s already exists. Continuing will overwrite this. Continue?' % dat.save_file q = userIO.ask_user(prompt, shell=shell) if q == 0: print('Aborted') return None # if os.path.isfile(dat.h5_file): # prompt = '%s already exists. Continuinlg will overwrite this. Continue?' % dat.h5_file # q = userIO.ask_user(prompt, shell=shell) # if q == 0: # print('Aborted') # return None if os.path.isfile(dat.log_file): prompt = '%s already exists. Continuing will append to this. Continue?' % dat.log_file q = userIO.ask_user(prompt, shell=shell) if q == 0: print('Aborted') return None with open(dat.log_file, 'a') as f: print( '\n==========\nPorting dataset into blechpy format\n==========\n', file=f) print(dat, file=f) status = dat.process_status dat.initParams(shell=shell) user_status = status.copy() user_status = userIO.fill_dict(user_status, 'Which processes have already been ' 'done to the data?', shell=shell) status.update(user_status) # if h5 exists data must have been extracted if not os.path.isfile(dat.h5_file) or status['extract_data'] == False: dat.save() return dat # write eletrode map and digital input & output maps to hf5 dio.h5io.write_electrode_map_to_h5(dat.h5_file, dat.electrode_mapping) if dat.rec_info.get('dig_in') is not None: dio.h5io.write_digital_map_to_h5(dat.h5_file, dat.dig_in_mapping, 'in') if dat.rec_info.get('dig_out') is not None: dio.h5io.write_digital_map_to_h5(dat.h5_file, dat.dig_out_mapping, 'out') node_list = dio.h5io.get_node_list(dat.h5_file) if (status['create_trial_list'] == False) and ('digital_in' in node_list): dat.create_trial_list() dat.save() return dat