Пример #1
0
 def _precheck(self):
     with open(self.filePath) as check:
         for line in check:
             if line[:3] == 'HIS' or 'active cycles' in line:
                 return
     warning('Unable to find indication of active cycles nor history data '
             'from {}'.format(self.filePath))
Пример #2
0
 def _precheck(self):
     with open(self.filePath) as fobj:
         for count in range(5):
             if 'SENS' == fobj.readline()[:4]:
                 return
     warning("Could not find any lines starting with SENS. "
             "Is {} a sensitivity file?".format(self.filePath))
Пример #3
0
 def _readAll(self):
     self.map = {}
     self.parsers = set()
     allExistFlag = self.settings['allExist']
     missing = []
     raiseErrors = self.settings['raiseErrors']
     for filePath in self.files:
         if not exists(filePath):
             if allExistFlag:
                 raise OSError('File {} does not exist'.format(filePath))
             else:
                 missing.append(filePath)
             continue
         try:
             parser = self.__parserCls(filePath)
             parser.read()
         except Exception as ee:
             if raiseErrors:
                 raise ee
             else:
                 error('The following error occurred while reading file {} '
                       'and was suppressed since setting <raiseErrors> is '
                       'True}:\n{}'.format(filePath, str(ee)))
                 continue
         self.parsers.add(parser)
         self.map[filePath] = parser
     if missing:
         warning('The following files did not exist and were not processed '
                 '\n\t{}'.format(', '.join(missing)))
Пример #4
0
 def _precheck(self):
     """do a quick scan to ensure this looks like a results file."""
     if self.__serpentVersion in MapStrVersions:
         self._keysVersion = MapStrVersions[self.__serpentVersion]
     else:
         warning("Version {} is not supported by the "
                 "ResultsReader".format(self.__serpentVersion))
     univSet = set()
     verWarning = True
     with open(self.filePath) as fid:
         if fid is None:
             raise IOError("Attempting to read on a closed file.\n"
                           "Parser: {}\nFile: {}".format(
                               self, self.filePath))
         for tline in fid:
             if verWarning and self._keysVersion['meta'] in tline:
                 verWarning = False
                 varType, varVals = self._getVarValues(tline)  # version
                 if self.__serpentVersion not in varVals:
                     warning("Version {} is used, but version {} is defined"
                             " in settings".format(varVals,
                                                   self.__serpentVersion))
             if self._keysVersion['univ'] in tline:
                 varType, varVals = self._getVarValues(tline)  # universe
                 if varVals in univSet:
                     break
                 univSet.add(varVals)  # add the new universe
         self._numUniv = len(univSet)
Пример #5
0
 def _makeMaterialRegexs(self):
     """Return the patterns by which to find the requested materials."""
     patterns = self.settings['materials'] or ['.*']
     # match all materials if nothing given
     if any(['_' in pat for pat in patterns]):
         warning('Materials with underscores are not supported.')
     return [re.compile(mat) for mat in patterns]
Пример #6
0
    def _formatLabel(self, labelFmt, names, zai):
        """
        Return a list of the formatted labels for a plot.

        Assumes that either names or zai is not None.
        """
        fmtr = labelFmt if labelFmt else '{iso}'
        allNames = self.names
        allZai = self.zai
        for allList, key, repl in zip(
                (allNames, allZai), ('names', 'zai'), ('{iso}', '{zai}')):
            if allList is None and repl in fmtr:
                warning("Isotope {} not stored on material and requested in "
                        "labelFmt. Check setting <depletion.metadataKeys>")
                fmtr = fmtr.replace(repl, '')
        iterator = zai if names is None else names
        lookup = allZai if names is None else allNames
        labels = []
        for item in iterator:
            index = lookup.index(item)
            iso = allNames[index] if allNames else ''
            zai = allZai[index] if allZai else ''
            labels.append(fmtr.format(mat=self.name, iso=iso, zai=zai))

        return labels
Пример #7
0
    def _loadFromContainer(self, detector):
        """
        Load data from a detector

        Parameters
        ----------
        detector

        Returns
        -------

        """
        if detector.name != self.name:
            warning("Attempting to read from detector with dissimilar names: "
                    "Base: {}, incoming: {}".format(self.name, detector.name))
        if not self._index:
            self.__shape = tuple([self.N] + list(detector.tallies.shape))
            self.__allocate(detector.scores is not None)
        if self.__shape[1:] != detector.tallies.shape:
            raise MismatchedContainersError(
                "Incoming detector {} tally data shape does not match "
                "sampled shape. Base: {}, incoming: {}".format(
                    detector.name, self.__shape[1:], detector.tallies.shape))
        self.__load(detector.tallies, detector.errors, detector.scores,
                    detector.name)
        if self.indexes is None:
            self.indexes = detector.indexes
        if not self.grids:
            self.grids = detector.grids
Пример #8
0
 def _loadFromContainer(self, container):
     if container.name != self.name:
         warning("Attempting to store data from material {} onto "
                 "sampled material {}".format(self.name, container.name))
     for varName, varData in container.data.items():
         if not self.allData:
             self.__allocateLike(container)
         self.allData[varName][self._index] = varData
Пример #9
0
    def addUniverse(self, univID, burnup=0, burnIndex=0, burnDays=0):
        """
        Add a universe to this branch.

        Data for the universes are produced at specific points in time.
        The additional arguments help track of when the data for this
        universe were created.
        A negative value of ``burnup`` indicates the units on burnup are
        really ``days``. Therefore, the value of ``burnDays`` and ``burnup``
        will be swapped.

        .. warning::

            This method will overwrite data for universes that already exist

        Parameters
        ----------
        univID: int or str
            Identifier for this universe
        burnup: float or int
            Value of burnup [MWd/kgU]. A negative value here indicates
            the value is really in units of days.
        burnIndex: int
            Point in the depletion schedule
        burnDays: int or float
            Point in time

        Returns
        -------
        serpentTools.objects.containers.HomogUniv
            Empty new universe

        """
        if self.__hasDays is None and burnup:
            self.__hasDays = burnup < 0
        if burnup < 0:
            if not self.__hasDays:
                raise SerpentToolsException(
                    self.__mismatchedBurnup.format('negative', 'MWd/kgU'))
            burnup, burnDays = None if burnup else 0, -burnup
        else:
            if self.__hasDays and not burnDays:
                raise SerpentToolsException(
                    self.__mismatchedBurnup.format('positive', 'days'))
            burnDays = None if burnup else 0
        newUniv = HomogUniv(univID, burnup, burnIndex, burnDays)
        key = (univID, burnup or burnDays, burnIndex)
        if key in self.__keys:
            warning('Overwriting existing universe {} in {}'.format(
                key, str(self)))
        else:
            self.__keys.add(key)
        self.universes[key] = newUniv
        return newUniv
Пример #10
0
    def get(self, variableName, uncertainty=False):
        """
        Gets the value of the variable VariableName from the dictionaries

        Parameters
        ----------
        variableName: str
            Variable Name
        uncertainty:   bool
            Boolean Variable- set to True in order to retrieve the
            uncertainty associated to the expected values

        Returns
        -------
        x:
            Variable Value
        dx:
            Associated uncertainty

        Raises
        ------
        TypeError
            If the uncertainty flag is not boolean
        KeyError
            If the variable requested is not stored on the
            object

        """
        # 1. Check the input values
        if not isinstance(uncertainty, bool):
            raise TypeError(
                'The variable uncertainty has type %s.\n ...'
                'It should be boolean.', type(uncertainty))
        # 2. Pointer to the proper dictionary
        setter = self._lookup(variableName, False)
        if variableName not in setter:
            raise KeyError(
                "Variable {} absent from expected value dictionary".format(
                    variableName))
        x = setter.get(variableName)
        # 3. Return the value of the variable
        if not uncertainty:
            return x
        if setter is self.metadata:
            warning('No uncertainty is associated to metadata')
            return x
        setter = self._lookup(variableName, True)
        if variableName not in setter:
            raise KeyError(
                "Variable {} absent from uncertainty dictionary".format(
                    variableName))
        dx = setter.get(variableName)
        return x, dx
Пример #11
0
    def reshape(self):
        """
        Reshape the tally data into a multidimensional array

        This method reshapes the tally and uncertainty data into arrays
        where the array axes correspond to specific bin types.
        If a detector was set up to tally two group flux in a 5 x 5
        xy mesh, then the resulting tally data would be in a 50 x 12/13
        matrix in the original ``detN.m`` file.
        The tally data and relative error would be rebroadcasted into
        2 x 5 x 5 arrays, and the indexing information is stored in
        ``self.indexes``

        Returns
        -------
        shape: list
            Dimensionality of the resulting array

        Raises
        ------
        SerpentToolsException:
            If the bin data has not been loaded
        """
        if self.bins is None:
            raise SerpentToolsException('Tally data for detector {} has not '
                                        'been loaded'.format(self.name))
        if self.__reshaped:
            warning('Data has already been reshaped')
            return
        shape = []
        self.indexes = OrderedDict()
        hasScores = self.bins.shape[1] == 13
        if self.bins.shape[0] == 1:
            self.tallies = self.bins[0, 10]
            self.errors = self.bins[0, 11]
            if hasScores:
                self.scores = self.bins[0, 12]
        else:
            for index in range(1, 10):
                uniqueVals = unique(self.bins[:, index])
                if len(uniqueVals) > 1:
                    indexName = self._indexName(index)
                    self.indexes[indexName] = array(uniqueVals, dtype=int) - 1
                    shape.append(len(uniqueVals))
            self.tallies = self.bins[:, 10].reshape(shape)
            self.errors = self.bins[:, 11].reshape(shape)
            if hasScores:
                self.scores = self.bins[:, 12].reshape(shape)
        self._map = {'tallies': self.tallies, 'errors': self.errors,
                     'scores': self.scores}
        self.__reshaped = True
        return shape
Пример #12
0
 def test_keyInLogs(self):
     """Verify the behavrior of LoggerMixin.msgInLogs"""
     message = "look for me"
     warning(message)
     self.assertMsgInLogs("WARNING", message)
     self.assertMsgInLogs("WARNING", message[:5], partial=True)
     self.assertMsgNotInLogs("WARNING", "<none>")
     self.assertMsgNotInLogs("WARNING", "<none>", partial=True)
     with self.assertRaises(KeyError):
         self.msgInLogs("DEBUG", message)
     with self.assertRaises(AttributeError):
         newM = LoggerMixin()
         newM.msgInLogs("WARNING", message)
Пример #13
0
    def _precheck(self):
        """do a quick scan to ensure this looks like a xsplot file."""
        if '_xs' not in self.filePath:
            warning("This file doesn't look like the file format serpent"
                    "gives for xsplot stuff.")

        with open(self.filePath) as fh:
            # first chunk should be energy bins
            line = next(fh)
            if line != 'E = [\n':
                error("It looks like {} doesn't start with an energy bin "
                      "structure. Are you sure it's an xsplot file?"
                      .format(self.filePath))
Пример #14
0
    def _read(self):
        """Read through the depletion file and store requested data."""
        info('Preparing to read {}'.format(self.filePath))
        keys = ['E', 'i\d{4,5}', 'm\w']
        separators = ['\n', '];', '\r\n']

        with KeywordParser(self.filePath, keys, separators) as parser:
            for chunk in parser.yieldChunks():

                if chunk[0][:5] == 'E = [':
                    # The energy grid
                    self.metadata['egrid'] = np.array(chunk[1:],
                                                      dtype=np.float64)

                elif chunk[0][:15] == 'majorant_xs = [':
                    # L-inf norm on all XS on all materials
                    self.metadata['majorant_xs'] = np.array(chunk[1:],
                                                            dtype=np.float64)

                elif chunk[0][-7:] == 'mt = [\n':
                    debug('found mt specification')
                    xsname = chunk[0][:-8]
                    isiso = True if chunk[0][0] == 'i' else False
                    self.xsections[xsname] = XSData(xsname,
                                                    self.metadata,
                                                    isIso=isiso)
                    self.xsections[xsname].setMTs(chunk)

                elif chunk[0][-7:] == 'xs = [\n':
                    debug('found xs specification')
                    xsname = chunk[0][:-8]
                    self.xsections[xsname].setData(chunk)

                elif chunk[0][-7:] == 'nu = [\n':
                    debug('found nu specification')
                    xsname = chunk[0][:-8]
                    self.xsections[xsname].setNuData(chunk)

                elif 'bra_f' in chunk[0]:
                    warning("There is this weird 'bra_f' XS. these seem to be"
                            " constant. recording to metadata instead.")
                    self.metadata[xsname].setData(chunk)

                else:
                    print(chunk)
                    error('Unidentifiable entry {}'.format(chunk[0]))

        info('Done reading xsplot file')
        debug('  found {} xs listings'.format(len(self.xsections)))
Пример #15
0
 def __load(self, tallies, errors, scores, oName):
     index = self._index
     otherHasScores = scores is not None
     selfHasScores = self.allScores is not None
     if otherHasScores and selfHasScores:
         self.allScores[index, ...] = scores
     elif otherHasScores and not selfHasScores:
         warning("Incoming detector {} has score data, while base does "
                 "not. Skipping score data".format(oName))
     elif not otherHasScores and selfHasScores:
         raise MismatchedContainersError(
             "Incoming detector {} does not have score data, while base "
             "does.".format(oName))
     self.allTallies[index] = tallies
     self.allErrors[index] = tallies * errors
Пример #16
0
def extendFiles(files):
    """Return a set of files where some may contain * globs"""
    out = set()
    if isinstance(files, str):
        files = [files]
    for ff in files:
        if '*' in ff:
            unGlob = glob(ff)
            if not unGlob:
                warning("No files matched with pattern {}".format(ff))
                continue
            for globbed in unGlob:
                out.add(globbed)
        else:
            out.add(ff)
    return out
Пример #17
0
 def __processEnergyChunk(self, chunk):
     for line in chunk:
         if 'SENS' == line[:4]:
             break
     else:
         raise SerpentToolsException("Could not find SENS parameter "
                                     "in energy chunk {}".format(chunk[:3]))
     splitLine = line.split()
     varName = splitLine[0].split('_')[1:]
     varValues = str2vec(splitLine[3:-1])
     if varName[0] == 'E':
         self.energies = varValues
     elif varName == ['LETHARGY', 'WIDTHS']:
         self.lethargyWidths = varValues
     else:
         warning("Unanticipated energy setting {}".format(splitLine[0]))
Пример #18
0
 def _addMetadata(self, chunk):
     for varName in METADATA_KEYS:
         if varName not in chunk[0]:
             continue
         if varName in ['ZAI', 'NAMES']:
             cleaned = [line.strip() for line in chunk[1:]]
             if varName == 'NAMES':
                 values = [item[1:item.find(" ")] for item in cleaned]
             else:
                 values = str2vec(cleaned, int, list)
         else:
             line = self._cleanSingleLine(chunk)
             values = str2vec(line)
         self.metadata[convertVariableName(varName)] = values
         return
     warning("Unsure about how to process metadata chunk {}"
             .format(chunk[0]))
Пример #19
0
    def addData(self, variableName, variableValue, uncertainty=False):
        r"""
        Sets the value of the variable and, optionally, the associate s.d.

        .. versionadded:: 0.5.0

            Reshapes scattering matrices according to setting
            ``xs.reshapeScatter``. Matrices are of the form
            :math:`S[i, j]=\Sigma_{s,i\rightarrow j}`

        .. warning::

            This method will overwrite data for variables that already exist

        Parameters
        ----------
        variableName: str
            Variable Name
        variableValue:
            Variable Value
        uncertainty: bool
            Set to ``True`` if this data is an uncertainty

        Raises
        ------
        TypeError
            If the uncertainty flag is not boolean

        """
        if not isinstance(uncertainty, bool):
            raise TypeError('The variable uncertainty has type {}, '
                            'should be boolean.'.format(type(uncertainty)))

        value = self._cleanData(variableName, variableValue)
        if variableName in HOMOG_VAR_TO_ATTR:
            value = value if variableValue.size > 1 else value[0]
            setattr(self, HOMOG_VAR_TO_ATTR[variableName], value)
            return

        name = convertVariableName(variableName)
        # 2. Pointer to the proper dictionary
        setter = self._lookup(name, uncertainty)
        # 3. Check if variable is already present. Then set the variable.
        if name in setter:
            warning("The variable {} will be overwritten".format(name))
        setter[name] = value
Пример #20
0
def normalizerFactory(data, norm, logScale, xticks, yticks):
    """
    Construct and return a :class:`~matplotlib.colors.Normalize` for this data

    Parameters
    ----------
    data : :class:`numpy.ndarray`
        Data to be plotted and normalized
    norm : None or callable or :class:`matplotlib.colors.Normalize`
        If a ``Normalize`` object, then use this as the normalizer.
        If callable, set the normalizer with
        ``norm(data, xticks, yticks)``. If not None, set the
        normalizer to be based on the min and max of the data
    logScale : bool
        If this evaluates to true, construct a
        :class:`matplotlib.colors.LogNorm` with the minimum
        set to be the minimum of the positive values.
    xticks : :class:`numpy.ndarray`
    yticks : :class:`numpy.ndarray`
        Arrays ideally corresponding to the data. Used with callable
        `norm` function.

    Returns
    --------
    :class:`matplotlib.colors.Normalize`
    or :class:`matplotlib.colors.LogNorm`
    or object:
        Object used to normalize colormaps against these data
    """
    if norm is not None:
        if isinstance(norm, Normalize):
            return norm
        elif callable(norm):
            return norm(data, xticks, yticks)
        else:
            raise TypeError("Normalizer {} not understood".format(norm))

    if logScale:
        if (data < 0).any():
            warning("Negative values will be excluded from logarithmic "
                    "colormap.")
        posData = data[data > 0]
        return LogNorm(posData.min(), posData.max())
    return Normalize(data.min(), data.max())
Пример #21
0
    def _formatLabel(self, labelFmt, names):
        if isinstance(names, str):
            names = [names]
        elif names is None:
            labels = self.names
        fmtr = labelFmt if labelFmt else '{iso}'
        labels = []
        if '{zai' in fmtr and self.zai is None:
            warning(
                'ZAI not set for material {}. Labeling plot with isotope names'
                .format(self.name))
            zaiLookup = self.names
        else:
            zaiLookup = self.zai
        names = names or self.names
        for name in names:
            labels.append(
                fmtr.format(mat=self.name,
                            iso=name,
                            zai=zaiLookup[self.names.index(name)]))

        return labels
Пример #22
0
    def _getSlices(self, fixed):
        """
        Return a list of slice operators for each axis in reshaped data

        Parameters
        ----------
        fixed: dict
            Dictionary where keys are strings pointing to dimensions in
        """
        fixed = fixed if fixed is not None else {}
        keys = set(fixed.keys())
        slices = []
        for key in self.indexes:
            if key in keys:
                slices.append(fixed[key])
                keys.remove(key)
            else:
                slices.append(slice(0, len(self.indexes[key])))
        if any(keys):
            warning(
                'Could not find arguments in index that match the following'
                ' requested slice keys: {}'.format(', '.join(keys)))
        return slices
Пример #23
0
    def getReaderSettings(self, settingsPreffix):
        """Get all module-wide and reader-specific settings.

        Parameters
        ----------
        settingsPreffix: str or list
            Name of the specific reader.
            Will look for settings that lead with ``readerName``, e.g.
            ``depletion.metadataKeys`` or ``xs.variables``

        Returns
        -------
        dict
            Single level dictionary with ``settingName: settingValue`` pairs

        Raises
        ------
        KeyError
            If the reader name is not located in the ``readers`` settings
            dictionary
        """
        settings = {}
        settingsPreffix = (
            [settingsPreffix] if isinstance(settingsPreffix, str)
            else settingsPreffix)
        for setting, value in iteritems(self):
            settingPath = setting.split('.')
            if settingPath[0] in settingsPreffix:
                name = settingPath[1]
            else:
                continue
            settings[name] = value
        if not settings:
            messages.warning('Could not obtain settings for the following '
                             'reader names: {}'
                             .format(', '.join(settingsPreffix)))
        return settings
Пример #24
0
    def addData(self, variableName, variableValue, uncertainty=False):
        """
        sets the value of the variable and, optionally, the associate s.d.

        .. warning::

            This method will overwrite data for variables that already exist

        Parameters
        ----------
        variableName: str
            Variable Name
        variableValue:
            Variable Value
        uncertainty: bool
            Set to ``True`` in order to retrieve the
            uncertainty associated to the expected values

        Raises
        ------
        TypeError
            If the uncertainty flag is not boolean

        """

        # 1. Check the input type
        variableName = convertVariableName(variableName)
        if not isinstance(uncertainty, bool):
            raise TypeError('The variable uncertainty has type {}, '
                            'should be boolean.'.format(type(uncertainty)))
        # 2. Pointer to the proper dictionary
        setter = self._lookup(variableName, uncertainty)
        # 3. Check if variable is already present. Then set the variable.
        if variableName in setter:
            warning("The variable {} will be overwritten".format(variableName))
        setter[variableName] = variableValue
Пример #25
0
    def addUniverse(self, univID, burnup=0, burnIndex=0, burnDays=0):
        """
        Add a universe to this branch.

        Data for the universes are produced at specific points in time.
        The additional arguments help track when the data for this
        universe were created.

        .. warning::
            This method will overwrite data for universes that already exist

        Parameters
        ----------
        univID: int or str
            Identifier for this universe
        burnup: float or int
            Value of burnup [MWd/kgU]
        burnIndex: int
            Point in the depletion schedule
        burnDays: int or float
            Point in time

        Returns
        -------
        newUniv: serpentTools.objects.containers.HomogUniv
        """
        newUniv = HomogUniv(self, univID, burnup, burnIndex, burnDays)
        key = tuple([univID, burnup, burnIndex] +
                    ([burnDays] if burnDays else []))
        if key in self.__keys:
            warning('Overwriting existing universe {} in {}'.format(
                key, str(self)))
        else:
            self.__keys.add(key)
        self.universes[key] = newUniv
        return newUniv
Пример #26
0
    def _precheck(self):
        """do a quick scan to ensure this looks like a results file."""
        serpentV = rc['serpentVersion']
        keys = MapStrVersions.get(serpentV)

        if keys is None:
            warning("SERPENT {} is not supported by the "
                    "ResultsReader".format(serpentV))
            warning("  Attempting to read anyway. Please report strange "
                    "behaviors/failures to developers.")
            keys = MapStrVersions[max(MapStrVersions)]

        self._keysVersion = keys

        self._burnupKeys = {
            k: convertVariableName(keys[k])
            for k in {"days", "burnup"}
        }

        univSet = set()
        verWarning = True
        with open(self.filePath) as fid:
            if fid is None:
                raise IOError("Attempting to read on a closed file.\n"
                              "Parser: {}\nFile: {}".format(
                                  self, self.filePath))
            for tline in fid:
                if verWarning and self._keysVersion['meta'] in tline:
                    verWarning = False
                    varType, varVals = self._getVarValues(tline)  # version
                    if serpentV not in varVals:
                        warning("SERPENT {} found in {}, but version {} is "
                                "defined in settings".format(
                                    varVals, self.filePath, serpentV))
                        warning("  Attemping to read anyway. Please report "
                                "strange behaviors/failures to developers.")
                if self._keysVersion['univ'] in tline:
                    varType, varVals = self._getVarValues(tline)  # universe
                    if varVals in univSet:
                        break
                    univSet.add(varVals)  # add the new universe
            self._numUniv = len(univSet)
Пример #27
0
 def _postcheck(self):
     if not self.materials:
         warning("No materials found in {}".format(self.filePath))
         return
     for attr in ['burnup', 'days']:
         if getattr(self, attr) is None:
             warning("Value of {} is None".format(attr))
     noNucs = set()
     for mname, subDict in self.materials.items():
         if not subDict['nuclides']:
             noNucs.add(mname)
     if noNucs:
         warning("The following materials did not have any nuclides:\n{}".
                 format(noNucs))
Пример #28
0
    def plot(self, xUnits, yUnits=None, timePoints=None, names=None, zai=None,
             materials=None, ax=None, legend=None, logx=False, logy=False,
             loglog=False, labelFmt=None, xlabel=None, ylabel=None, ncol=1,
             **kwargs):
        """
        Plot properties for all materials in this file together.

        Parameters
        ----------
        xUnits: str
            If ``xUnits`` is given and ``yUnits`` is ``None``, then
            the plotted data will be ``xUnits`` against ``'days'``
            name of x value to obtain, e.g. ``'days'``, ``'burnup'``
        yUnits: str
            name of y value to return, e.g. ``'adens'``, ``'ingTox'``
        timePoints: list or None
            If given, select the time points according to those
            specified here. Otherwise, select all points

            .. deprecated:: 0.7.0
               Will plot against all time points

        names: str or list or None
            If given, plot  values corresponding to these isotope
            names. Otherwise, plot values for all isotopes.
        zai: int or list or None
            If given, plot values corresponding to these
            isotope ``ZZAAAI`` values. Otherwise, plot for all isotopes

            .. versionadded:: 0.5.1

        materials: None or list
            Selection of materials from ``self.materials`` to plot.
            If None, plot all materials, potentially including ``tot``
        {ax}
        {legend}
        {xlabel} Otherwise, use ``xUnits``
        {ylabel} Otherwise, use ``yUnits``
        {logx}
        {logy}
        {loglog}
        {matLabelFmt}
        {ncol}
        {kwargs} :py:func:`matplotlib.pyplot.plot`

        Returns
        -------
        {rax}

        See Also
        --------
        * :py:func:`~serpentTools.objects.materials.DepletedMaterial.getValues`
        * :py:func:`matplotlib.pyplot.plot`
        * :py:meth:`str.format` - used for formatting labels
        * :py:func:`~serpentTools.objects.materials.DepletedMaterial.plot`

        Raises
        ------
        KeyError
            If x axis units are not ``'days'`` nor ``'burnup'``
        SerpentToolsException
            If the materials dictionary does not contain any items
        """
        if yUnits is None:
            yUnits = xUnits
            xUnits = 'days'

        if not self.materials:
            raise SerpentToolsException("Material dictionary is empty")

        if xUnits not in ('days', 'burnup'):
            raise KeyError("Plot method only uses x-axis data from <days> and "
                           "<burnup>, not {}".format(xUnits))
        missing = set()
        ax = ax or pyplot.gca()
        materials = materials or self.materials.keys()
        labelFmt = labelFmt or '{mat} {iso}'
        for mat in materials:
            if mat not in self.materials:
                missing.add(mat)
                continue

            ax = self.materials[mat].plot(
                xUnits, yUnits, timePoints, names,
                zai, ax, legend=False, xlabel=xlabel, ylabel=ylabel,
                logx=False, logy=False, loglog=False, labelFmt=labelFmt,
                **kwargs)
        if missing:
            warning("The following materials were not found in materials "
                    "dictionary: {}".format(', '.join(missing)))
        formatPlot(ax, legend=legend, legendcols=ncol, logx=logx, logy=logy,
                   loglog=loglog,
                   xlabel=xlabel or DEPLETION_PLOT_LABELS[xUnits],
                   ylabel=ylabel or DEPLETION_PLOT_LABELS[yUnits],
                   )

        return ax
Пример #29
0
    def plot(self,
             qtys,
             limitE=True,
             ax=None,
             logx=None,
             logy=None,
             loglog=None,
             sigma=3,
             xlabel=None,
             ylabel=None,
             legend=None,
             ncol=1,
             steps=True,
             labelFmt=None,
             labels=None):
        """
        Plot homogenized data as a function of energy.

        Parameters
        ----------
        qtys: str or iterable
            Plot this or these value against energy.
        limitE: bool
            If given, set the maximum energy value to be
            that of the micro group structure. By default,
            SERPENT macro group structures can reach
            1E37, leading for a very large tail on the plots.
        {ax}
        {labels}
        {logx}
        {logy}
        {loglog}
        {sigma}
        {xlabel}
        {ylabel}
        {legend}
        {ncol}
        steps: bool
            If ``True``, plot values as constant within
            energy bins.
        {univLabelFmt}

        Returns
        -------
        {rax}

        """
        qtys = [
            qtys,
        ] if isinstance(qtys, str) else qtys
        ax = ax or pyplot.gca()
        onlyXS = True
        sigma = max(0, int(sigma))
        drawstyle = 'steps-post' if steps else None
        limitE = limitE and (self.groups is not None
                             and self.microGroups is not None)
        macroBins = self.numGroups + 1 if self.numGroups is not None else None
        microBins = (self.numMicroGroups +
                     1 if self.numMicroGroups is not None else None)
        labelFmt = labelFmt or "{k}"
        if limitE:
            eneCap = min(self.microGroups.max(), self.groups.max())

        if isinstance(labels, str):
            labels = [
                labels,
            ]
        if labels is None:
            labels = [
                labelFmt,
            ] * len(qtys)
        else:
            if len(labels) != len(qtys):
                raise IndexError(
                    "Need equal number of labels for plot quantities. "
                    "Given {} expected: {}".format(len(labels), len(qtys)))

        for key, label in zip(qtys, labels):
            yVals = self.__getitem__(key)
            if len(yVals.shape) != 1 and 1 not in yVals.shape:
                warning("Data for {} is not 1D. Will not plot".format(key))
                continue
            uncD = self._lookup(key, True)
            yUncs = uncD.get(key, zeros_like(yVals))

            if 'Flx' in key:
                onlyXS = False
            yVals = hstack((yVals, yVals[-1]))
            nbins = yVals.size
            yUncs = hstack((yUncs, yUncs[-1])) * yVals * sigma

            xdata, foundE = self.__getEGrid(nbins, macroBins, microBins)

            if limitE and foundE:
                xdata = xdata.copy()
                xdata[xdata.argmax()] = eneCap

            label = self.__formatLabel(label, key)

            ax.errorbar(xdata,
                        yVals,
                        yerr=yUncs,
                        label=label,
                        drawstyle=drawstyle)

        if ylabel is None:
            ylabel, yUnits = (("Cross Section", "[cm$^{-1}$]") if onlyXS else
                              ("Group Constant", ""))
            sigStr = r" $\pm{}\sigma$".format(sigma) if sigma else ""
            ylabel = ' '.join((ylabel, sigStr, yUnits))

        if xlabel is None:
            xlabel = "Energy [MeV]" if foundE else "Energy Group"

        if legend is None:
            legend = len(qtys) > 1
        if loglog is not None:
            logx = logy = loglog
        else:
            if logx is None:
                logx = foundE
            if logy is None:
                logy = inferAxScale(ax, 'y')

        formatPlot(ax,
                   logx=logx,
                   logy=logy,
                   legendcols=ncol,
                   legend=legend,
                   xlabel=xlabel or "Energy [MeV]",
                   ylabel=ylabel)
        return ax
Пример #30
0
 def finalize(self):
     """Produce final uncertainties from all aggregated runs"""
     if self._index != self.N:
         warning("Data from only {} of {} files has been loaded".format(
             self._index, self.N))
     self._finalize()