def raw_filenames_strainsurvey(exp_name, mouseNumber, day, round_number): # search event files first path = os.path.join(datadir(exp_name), 'EventFiles/EventFiles_SSe1r%d' % round_number) regex_path = "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" + "e1r%dd%01d" % (round_number, day) subdir = list() for dirname in os.listdir(path): if re.search(regex_path, dirname) is not None: subdir.append(dirname) assert len(subdir) == 1 subdir, = subdir searchpath = os.path.join(path, subdir) extensions = ["AM", "LE", "ME", "PE"] result = [None, None, None, None] regex = "[0-9][0-9][0-9][0-9]%04d\.%%s" % int(mouseNumber) for filename in os.listdir(searchpath): for i, ext in enumerate(extensions): if re.search(regex % ext, filename) is not None: result[i] = os.path.join(searchpath, filename) # search am file assert result[0] is None # no AM file found so far path = os.path.join(datadir(exp_name), 'AMFiles/AMFiles_SSe1r%d' % round_number) searchpath = os.path.join(path, subdir) for filename in os.listdir(searchpath): if re.search(regex % "AM", filename) is not None: result[0] = os.path.join(searchpath, filename) return result
def path_to_binary(self, subdir=''): """Returns path for npy files (files generated by preprocessing raw data) in: /binary/<exp_name>/<subdir>/ """ path = os.path.join(datadir(self.name), 'binary', subdir) if not os.path.isdir(path): os.makedirs(path) return path
def raw_filenames_2cd1(exp_name, mouseNumber, day): path = os.path.join(datadir(exp_name), '{}_DayFiles/'.format(exp_name)) regex_path = "^Day{:01d}".format(day) subdir = list() for dirname in os.listdir(path): if re.search(regex_path, dirname) is not None: subdir.append(dirname) # assert len(subdir) == 1 try: assert len(subdir) == 1 subdir, = subdir except AssertionError: subdir = subdir[0] # workaround: use only first directory searchpath = os.path.join(path, subdir) extensions = ["AM", "LE", "ME", "PE"] result = [None, None, None, None] regex = "[0-9][0-9][0-9][0-9]%04d\.%%s" % int(mouseNumber) for filename in os.listdir(searchpath): for i, ext in enumerate(extensions): if re.search(regex % ext, filename) is not None: result[i] = os.path.join(searchpath, filename) # # search am file assert result[0] is not None # AM file found return result
def raw_filenames(exp_name, mouseNumber, day, round_number=None): """AM, PE, LE, ME filenames""" result = None if exp_name in ['StrainSurvey', '2cD1A2aCRE', '2cD1A2aCRE2']: if exp_name == 'StrainSurvey': result = raw_filenames_strainsurvey(exp_name, mouseNumber, day, round_number) elif exp_name.startswith('2cD1A2aCRE'): result = raw_filenames_2cd1(exp_name, mouseNumber, day) else: extensions = ["AM", "LE", "ME", "PE"] result = [None, None, None, None] regex = "[0-9][0-9][0-9][0-9]%04d\.%%s" % int(mouseNumber) path = None if exp_name == '2CFast': path = os.path.join(datadir(exp_name), '2CFast_DayFiles/2CFAST_HCMe1r1_D{:02d}/'.format(day)) elif exp_name == '1ASTRESS': path = os.path.join(datadir(exp_name), '1ASTRESS_DayFiles/Day{:01d}/'.format(day)) elif exp_name == 'Stress_HCMe1r1': path = os.path.join(datadir(exp_name), 'Stress_DayFiles/StressHCMe1r1d{:01d}/'.format(day)) elif exp_name == 'CORTTREAT': path = os.path.join(datadir(exp_name), 'CORTTREAT_DayFiles/CORTTREAT_HCMe1r1_d{:02d}/'.format(day)) elif exp_name == 'HiFat2': path = os.path.join(datadir(exp_name), 'HFD2_DayFiles/HFD2_HCMe2r1_d{:01d}/'.format(day)) elif exp_name == 'HiFat1': path = os.path.join(datadir(exp_name), 'Round%d/HFD_DayFiles/HFDe1r%dd%01d/' % (round_number, round_number, day)) elif exp_name.startswith('WR'): path = os.path.join(datadir(exp_name), '%s_DayFiles/Day%01d/' % (exp_name, day)) for filename in os.listdir(path): for i, ext in enumerate(extensions): if re.search(regex % ext, filename) is not None: result[i] = os.path.join(path, filename) return result
def create_logger(args, days, logconfigfilename, exp_name): """Creates preprocessing log file. """ import datetime import logging.config log_dir = os.path.join(file_utils.datadir(exp_name), "logs") if not os.path.isdir(log_dir): os.makedirs(log_dir) text = args.bin_type or args.timepoint or "" now = datetime.datetime.now() today_now = "{}{:02d}{:02d}_h{:02d}{:02d}".format(now.year, now.month, now.day, now.hour, now.minute) logfilename = "{}_preprocessing_{}{}_{}.log".format( args.name, args.akind, text, today_now) logfilename = os.path.join(log_dir, logfilename) logconfigfilename = os.path.join(file_utils.repo_dir(), logconfigfilename) logging.config.fileConfig(logconfigfilename, defaults=dict(logfilename=logfilename)) log = logging.getLogger(__name__) log.info("Experiment {}, days: {}".format(args.name, days)) return log, logfilename
def read_msi_files(exp_name, savepath): """ Returns MSI data parsed from the given experiment, saves as cPickle object """ import cPickle as pickle filename = os.path.join(savepath, "{}_msi_data.p".format(exp_name)) try: with open(filename, 'rb') as fp: res = pickle.load(fp) except IOError: path = file_utils.datadir(exp_name) msi_filenames = find_msi_files(path) print "reading MSI file/s:" group_file = find_group_file(path) groups = parse_group_file(group_file) res = {} for fname in msi_filenames: print fname with open(fname) as msifile: for rd in parse_msi_file(msifile): accumulate_results_(rd, groups, res) res = check_groups_for_names(res, path) with open(filename, 'wb') as fp: pickle.dump(res, fp, protocol=pickle.HIGHEST_PROTOCOL) print "msi dict file saved to:\n{}".format(filename) return res