def spike_detection_job(DatFileNames, ProbeFileName, output_dir, output_name): """ Top level function that starts a data processing job. """ for DatFileName in DatFileNames: if not os.path.exists(DatFileName): raise Exception("Dat file %s does not exist" % DatFileName) DatFileNames = [os.path.abspath(DatFileName) for DatFileName in DatFileNames] probe = probes.Probe(ProbeFileName) n_ch_dat = Parameters['NCHANNELS'] sample_rate = Parameters['SAMPLERATE'] high_frequency_factor = Parameters['F_HIGH_FACTOR'] set_globals_samples(sample_rate, high_frequency_factor) Parameters['CHUNK_OVERLAP'] = int( sample_rate * Parameters['CHUNK_OVERLAP_SECONDS']) Parameters['N_CH'] = probe.num_channels max_spikes = Parameters['MAX_SPIKES'] basename = basenamefolder = output_name # OutDir = join(output_dir, basenamefolder) OutDir = output_dir with indir(OutDir): # Create a log file GlobalVariables['log_fd'] = open(basename + '.log', 'w') if Parameters['DEBUG']: GlobalVariables['debug_fd'] = open(basename + '.debug', 'w') Channels_dat = np.arange(probe.num_channels) # Print Parameters dictionary to .log file log_message("\n".join(["{0:s} = {1:s}".format(key, str(value)) for key, value in sorted(Parameters.iteritems()) if not key.startswith('_')])) spike_detection_from_raw_data(basename, DatFileNames, n_ch_dat, Channels_dat, probe.channel_graph, probe, max_spikes) numwarn = GlobalVariables['warnings'] if numwarn: log_message( 'WARNINGS ENCOUNTERED: ' + str(numwarn) + ', check log file.') # Close the log file at the end. if 'log_fd' in GlobalVariables: GlobalVariables['log_fd'].close()
def openmc(self, state): """Runs OpenMC for a given state.""" # make inpurs pwd = self.pwd(state) if not os.path.isdir(pwd): os.makedirs(pwd) self._make_omc_input(state) # run openmc statepoint = _find_statepoint(pwd) if statepoint is None: with indir(pwd): subprocess.check_call(['openmc']) statepoint = _find_statepoint(pwd) # parse & prepare results k, phi_g = self._parse_statepoint(statepoint) xstab = self._generate_xs(phi_g) return k, phi_g, xstab