def _read_log(self, path): """ Returns the last line of the :file:`penepma.csv` log file as a :class:`dict` where the keys are the header of each column and the values are the values of the last line. :arg path: directory containing the simulation files """ filepath = os.path.join(path, 'penepma-res.dat') if not os.path.exists(filepath): raise ImporterException("Data file %s cannot be found" % filepath) log = {} with open(filepath, 'r') as fp: for line in fp: line = line.strip() match = re.match(r'([^.]*) [\.]+ ([^ ]*)(?: \+\- )?([^ ]*)?', line) if not match: continue name = match.group(1).strip() val = float(match.group(2)) unc = float(match.group(3) or 0.0) log[name] = (val, unc) return log
def _import_trajectory(self, options, key, detector, dirpath, *args, **kwargs): filepath = os.path.join(dirpath, 'pe-trajectories.dat') if not os.path.exists(filepath): raise ImporterException("Data file %s cannot be found" % filepath) trajectories = {} index = 0 primary = None particle = None collision = None exit_state = None interactions = [] with open(filepath, 'r') as fp: for line in fp: line = line.strip() if not line or line.startswith("#"): continue if line == '0' * 80: if index <= 0: continue traj = Trajectory(primary, particle, collision, exit_state, np.array(interactions)) trajectories[index] = traj primary = None particle = None collision = None exit_state = None interactions = [] elif line == '1' * 80: continue elif line.startswith('TRAJ'): index = int(line.split()[1]) elif line.startswith('KPAR'): particle = _PARTICLES_REF[int(line.split()[1])] elif line.startswith('PARENT'): primary = int(line.split()[1]) == 0 elif line.startswith('ICOL'): collision = _COLLISIONS_REF[particle].get( int(line.split()[1]), NO_COLLISION) elif line.startswith('EXIT'): exit_state = int(line.split()[1]) else: values = line.split() x = float(values[0]) * 0.01 # cm to m y = float(values[1]) * 0.01 # cm to m z = float(values[2]) * 0.01 # cm to m e = float(values[3]) c = int(_COLLISIONS_REF[particle].get( int(values[6]), NO_COLLISION)) interactions.append([x, y, z, e, c]) return TrajectoryResult(trajectories.values())
def _import_transmitted_electron_energy(self, options, key, detector, path, *args): filepath = os.path.join(path, 'pe-energy-el-down.dat') if not os.path.exists(filepath): raise ImporterException("Data file %s cannot be found" % filepath) # Load distributions bins, vals, uncs = _load_dat_files(filepath) data = np.array([bins, vals, uncs]).T return TransmittedElectronEnergyResult(data)
def _import_photon_spectrum(self, options, key, detector, path, phdets_key_index, phdets_index_keys, *args): index = phdets_key_index[key] + 1 # Find data files spect_filepath = os.path.join(path, 'pe-spect-%s.dat' % str(index).zfill(2)) if not os.path.exists(spect_filepath): raise ImporterException("Data file %s cannot be found" % spect_filepath) # Load total spectrum energies, total_val, total_unc = _load_dat_files(spect_filepath) total = np.array([energies, total_val, total_unc]).T # Generate fake background background = np.zeros(total.shape) background[:, 0] = energies return PhotonSpectrumResult(total, background)
def _import_photon_intensity(self, options, name, detector, jobdir): intensities_filepath = os.path.join(jobdir, 'intensities_%s.csv' % name) if not os.path.exists(intensities_filepath): raise ImporterException('Result file "intensites_%s.csv" not found in job directory (%s)' % \ (name, jobdir)) intensities = {} with open(intensities_filepath, 'r') as fp: reader = csv.DictReader(fp, delimiter=';') try: row = next(reader) except StopIteration: row = {} for transition, intensity in row.items(): transition = from_string(transition.strip()) enf = (float(intensity.strip().replace(',', '.')), 0.0) # FIXME: Hack to handle locale problem intensities[PhotonKey(transition, True, PhotonKey.P)] = enf return PhotonIntensityResult(intensities)
def _import_phi_z(self, options, name, detector, jobdir): prz_filepath = os.path.join(jobdir, 'phi_%s.csv' % name) if not os.path.exists(prz_filepath): raise ImporterException('Result file "phi_%s.csv" not found in job directory (%s)' % \ (name, jobdir)) with open(prz_filepath, 'r') as fp: reader = csv.reader(fp, delimiter=';') header = next(reader) data = {} for row in reader: for i, val in enumerate(row): data.setdefault(header[i], []).append(float(val.replace(',', '.'))) # FIXME: Hack to handle locale problem rzs = np.array(data.pop('rho z')) distributions = {} for transition, values in data.items(): transition = from_string(transition.strip()) enf = np.array([rzs, values]).transpose() distributions[PhotonKey(transition, True, PhotonKey.P)] = enf return PhiZResult(distributions)
def _import_photon_intensity(self, options, key, detector, path, phdets_key_index, phdets_index_keys, *args): def _read_intensities_line(line): values = line.split() try: z = int(values[0]) src = Subshell(z, iupac=values[2].strip()) dst = Subshell(z, iupac=values[1].strip()) transition = Transition(z, src, dst) except ValueError: # transition not supported return None, 0.0, 0.0, 0.0, 0.0 nf = float(values[4]), float(values[5]) cf = float(values[6]), float(values[7]) bf = float(values[8]), float(values[9]) #tf = float(values[10]), float(values[11]) # skip not needed t = float(values[12]), float(values[13]) return transition, cf, bf, nf, t index = phdets_key_index[key] + 1 # Find data files emitted_filepath = os.path.join( path, 'pe-intens-%s.dat' % str(index).zfill(2)) if not os.path.exists(emitted_filepath): raise ImporterException("Data file %s cannot be found" % emitted_filepath) generated_filepath = os.path.join(path, 'pe-gen-ph.dat') if not os.path.exists(generated_filepath): raise ImporterException("Data file %s cannot be found" % generated_filepath) # Load generated intensities = {} with open(generated_filepath, 'r') as fp: for line in fp: line = line.strip() if line.startswith('#'): continue transition, gcf, gbf, gnf, gt = _read_intensities_line(line) if transition is None: continue intensities[PhotonKey(transition, False, PhotonKey.C)] = gcf intensities[PhotonKey(transition, False, PhotonKey.B)] = gbf intensities[PhotonKey(transition, False, PhotonKey.P)] = gnf intensities[PhotonKey(transition, False, PhotonKey.T)] = gt # Load emitted with open(emitted_filepath, 'r') as fp: for line in fp: line = line.strip() if line.startswith('#'): continue transition, ecf, ebf, enf, et = _read_intensities_line(line) if transition is None: continue intensities[PhotonKey(transition, True, PhotonKey.C)] = ecf intensities[PhotonKey(transition, True, PhotonKey.B)] = ebf intensities[PhotonKey(transition, True, PhotonKey.P)] = enf intensities[PhotonKey(transition, True, PhotonKey.T)] = et return PhotonIntensityResult(intensities)