def _read(self): keys = stops = ['%'] throughParams = False with KeywordParser(self.filePath, keys, stops) as parser: for chunk in parser.yieldChunks(): if not throughParams: chunk0 = chunk[0] if 'Number' in chunk0: self._processNumChunk(chunk) elif 'included' in chunk0: what = chunk0.split()[1] self._processIndexChunk(what, chunk) elif 'energy' in chunk0: self._processEnergyChunk(chunk) elif 'latent' in chunk0: split = chunk0.split() self.latGen = int(split[split.index('latent') - 1]) throughParams = True continue self._processSensChunk(chunk) if self.zais: old = self.zais self.zais = OrderedDict() for key, value in old.items(): if key == 'total': self.zais[key] = value continue self.zais[int(key)] = value
def _read(self): """Read the file and store the detectors.""" currentName = "" grids = {} bins = None with KeywordParser(self.filePath, ["DET"], ["\n", "];"]) as parser: for chunk in parser.yieldChunks(): name, data = cleanDetChunk(chunk) # Determine if this is a new detector if not currentName: isNewDetector = False elif not name.startswith(currentName): isNewDetector = True else: isNewDetector = not any(name == "".join((currentName, g)) for g in self._KNOWN_GRIDS) if isNewDetector: self._processDet(currentName, bins, grids) bins = data grids = {} currentName = name elif bins is None: currentName = name bins = data else: gridName = name[len(currentName):] grids[gridName] = data self._processDet(currentName, bins, grids)
def _read(self): """read through the results file and store requested data.""" keys = ['NFY', 'XS', 'FLUX'] separators = ['\n', '];', '\r\n'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): if 'NFY' in chunk[0] and self.settings['getFY']: self._storeFissionYields(chunk) elif 'FLUX' in chunk[0] and self.settings['getFlx']: self._storeFluxRatio(chunk) elif 'XS' in chunk[0] and self.settings['getXS']: self._storeMicroXS(chunk)
def _read(self): """Read through the depletion file and store requested data.""" info('Preparing to read {}'.format(self.filePath)) keys = ['E', 'i\d{4,5}', 'm\w'] separators = ['\n', '];', '\r\n'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): if chunk[0][:5] == 'E = [': # The energy grid self.metadata['egrid'] = np.array(chunk[1:], dtype=np.float64) elif chunk[0][:15] == 'majorant_xs = [': # L-inf norm on all XS on all materials self.metadata['majorant_xs'] = np.array(chunk[1:], dtype=np.float64) elif chunk[0][-7:] == 'mt = [\n': debug('found mt specification') xsname = chunk[0][:-8] isiso = True if chunk[0][0] == 'i' else False self.xsections[xsname] = XSData(xsname, self.metadata, isIso=isiso) self.xsections[xsname].setMTs(chunk) elif chunk[0][-7:] == 'xs = [\n': debug('found xs specification') xsname = chunk[0][:-8] self.xsections[xsname].setData(chunk) elif chunk[0][-7:] == 'nu = [\n': debug('found nu specification') xsname = chunk[0][:-8] self.xsections[xsname].setNuData(chunk) elif 'bra_f' in chunk[0]: warning("There is this weird 'bra_f' XS. these seem to be" " constant. recording to metadata instead.") self.metadata[xsname].setData(chunk) else: print(chunk) error('Unidentifiable entry {}'.format(chunk[0])) info('Done reading xsplot file') debug(' found {} xs listings'.format(len(self.xsections)))
def _read(self): """Read through the depletion file and store requested data.""" keys = ['MAT', 'TOT'] if self.settings['processTotal'] else ['MAT'] keys.extend(self.settings['metadataKeys']) separators = ['\n', '];', '\r\n'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): mvar = getMatlabVarName(chunk) if mvar[:3] in ['MAT', 'TOT']: name, variable = getMaterialNameAndVariable(mvar) self._checkAddData(chunk, name, variable) continue self._addMetadata(chunk) if 'days' in self.metadata: for mKey in self.materials: self.materials[mKey].days = self.metadata['days']
def _read(self): """Read through the depletion file and store requested data.""" keys = ['MAT', 'TOT'] if self.settings['processTotal'] else ['MAT'] keys.extend(self.settings['metadataKeys']) separators = ['\n', '];', '\r\n'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): if 'MAT' in chunk[0]: self._addMaterial(chunk) elif 'TOT' in chunk[0]: self._addTotal(chunk) else: self._addMetadata(chunk) if 'days' in self.metadata: for mKey in self.materials: self.materials[mKey].days = self.metadata['days']
def _read(self): """Read the file and store the detectors.""" keys = ['DET'] separators = ['\n', '];'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): detString = chunk.pop(0).split(' ')[0][3:] if detString[:-1] in self.detectors: detName = detString[:-1] binType = detString[-1] elif detString in self.settings['names'] or self._loadAll: detName = detString binType = None else: continue self._addDetector(chunk, detName, binType)
def read(self): """Read through the depletion file and store requested data.""" messages.info('Preparing to read {}'.format(self.filePath)) keys = ['MAT', 'TOT'] if self.settings['processTotal'] else ['MAT'] keys.extend(self.settings['metadataKeys']) separators = ['\n', '];', '\r\n'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): if 'MAT' in chunk[0]: self._addMaterial(chunk) elif 'TOT' in chunk[0]: self._addTotal(chunk) else: self._addMetadata(chunk) if 'days' in self.metadata: for mKey in self.materials: self.materials[mKey].days = self.metadata['days'] messages.info('Done reading depletion file') messages.debug(' found {} materials'.format(len(self.materials)))
def _read(self): """Read the file and store the detectors.""" currentName = "" grids = {} bins = None with KeywordParser(self.filePath, ["DET"], ["\n", "];"]) as parser: for chunk in parser.yieldChunks(): name, data = cleanDetChunk(chunk) if currentName and name[:len(currentName)] != currentName: self._processDet(currentName, bins, grids) bins = data grids = {} currentName = name elif bins is None: currentName = name bins = data else: gridName = name[len(currentName):] grids[gridName] = data self._processDet(currentName, bins, grids)
def _read(self): """Read the file and store the detectors.""" recentName = None lenRecent = 0 recentGrids = {} keys = ['DET'] separators = ['\n', '];'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): name, data = cleanDetChunk(chunk) if recentName is None or name[:lenRecent] != recentName: if recentName is not None: self.__processDet(recentName, recentGrids) recentName = name lenRecent = len(name) recentGrids = {'tally': data} continue gridName = name[lenRecent:] recentGrids[gridName] = data self.__processDet(recentName, recentGrids)
def _read(self): """Read through the depletion file and store requested data.""" keys = ['E', r'i\d{4,5}', r'm\w'] separators = ['\n', '];', '\r\n'] with KeywordParser(self.filePath, keys, separators) as parser: for chunk in parser.yieldChunks(): lead = chunk[0].strip() data = chunk[1:] if lead.startswith("E = ["): # The energy grid self.energies = array(data, dtype=float64) elif lead.endswith('majorant_xs = ['): # L-inf norm on all XS on all materials self.majorant = array(data, dtype=float64) elif lead.endswith('_mt = ['): xsname = lead[:lead.index("_mt")] isiso = lead[0] == 'i' self.xsections[xsname] = XSData(xsname, self.metadata, isIso=isiso) self.xsections[xsname].setMTs(chunk) elif lead.endswith('_xs = ['): xsname = lead[:lead.index("_xs")] self.xsections[xsname].setData(chunk) elif lead.endswith('_nu = ['): xsname = lead[:lead.index("_nu")] self.xsections[xsname].setNuData(chunk) elif lead.endswith("bra_f = ["): xsname = lead[:lead.index("_f")] self.xsections[xsname].setData(chunk) else: raise ValueError("Unidentifiable entry\n{}".format(chunk))