def _import_photon_intensity(self, options, name, detector, path): wxrresult = CharacteristicIntensity(path) factor = self._get_normalization_factor(options, detector) # Retrieve intensities intensities = {} for z, line in wxrresult.getAtomicNumberLines(): data = wxrresult.intensities[z][line] transition = from_string("%s %s" % (symbol(z), line)) gnf = list(map(mul, data[WXRGENERATED], [factor] * 2)) enf = list(map(mul, data[WXREMITTED], [factor] * 2)) intensities[PhotonKey(transition, False, PhotonKey.P)] = gnf intensities[PhotonKey(transition, True, PhotonKey.P)] = enf return PhotonIntensityResult(intensities)
def _import_photon_intensity(self, options, name, detector, jobdir): intensities_filepath = os.path.join(jobdir, 'intensities_%s.csv' % name) if not os.path.exists(intensities_filepath): raise ImporterException('Result file "intensites_%s.csv" not found in job directory (%s)' % \ (name, jobdir)) intensities = {} with open(intensities_filepath, 'r') as fp: reader = csv.DictReader(fp, delimiter=';') try: row = next(reader) except StopIteration: row = {} for transition, intensity in row.items(): transition = from_string(transition.strip()) enf = (float(intensity.strip().replace(',', '.')), 0.0) # FIXME: Hack to handle locale problem intensities[PhotonKey(transition, True, PhotonKey.P)] = enf return PhotonIntensityResult(intensities)
def _import_photon_intensity(self, options, key, detector, path, phdets_key_index, phdets_index_keys, *args): def _read_intensities_line(line): values = line.split() try: z = int(values[0]) src = Subshell(z, iupac=values[2].strip()) dst = Subshell(z, iupac=values[1].strip()) transition = Transition(z, src, dst) except ValueError: # transition not supported return None, 0.0, 0.0, 0.0, 0.0 nf = float(values[4]), float(values[5]) cf = float(values[6]), float(values[7]) bf = float(values[8]), float(values[9]) #tf = float(values[10]), float(values[11]) # skip not needed t = float(values[12]), float(values[13]) return transition, cf, bf, nf, t index = phdets_key_index[key] + 1 # Find data files emitted_filepath = os.path.join( path, 'pe-intens-%s.dat' % str(index).zfill(2)) if not os.path.exists(emitted_filepath): raise ImporterException("Data file %s cannot be found" % emitted_filepath) generated_filepath = os.path.join(path, 'pe-gen-ph.dat') if not os.path.exists(generated_filepath): raise ImporterException("Data file %s cannot be found" % generated_filepath) # Load generated intensities = {} with open(generated_filepath, 'r') as fp: for line in fp: line = line.strip() if line.startswith('#'): continue transition, gcf, gbf, gnf, gt = _read_intensities_line(line) if transition is None: continue intensities[PhotonKey(transition, False, PhotonKey.C)] = gcf intensities[PhotonKey(transition, False, PhotonKey.B)] = gbf intensities[PhotonKey(transition, False, PhotonKey.P)] = gnf intensities[PhotonKey(transition, False, PhotonKey.T)] = gt # Load emitted with open(emitted_filepath, 'r') as fp: for line in fp: line = line.strip() if line.startswith('#'): continue transition, ecf, ebf, enf, et = _read_intensities_line(line) if transition is None: continue intensities[PhotonKey(transition, True, PhotonKey.C)] = ecf intensities[PhotonKey(transition, True, PhotonKey.B)] = ebf intensities[PhotonKey(transition, True, PhotonKey.P)] = enf intensities[PhotonKey(transition, True, PhotonKey.T)] = et return PhotonIntensityResult(intensities)