Exemple #1
0
 def updateClusterProgress(self):
     """Updates the status window on the Windows HPC client."""
     if not shutil.which("job"):
         return
     totalSteps = max(
         (self.cs["burnSteps"] + 1) * self.cs["nCycles"] - 1, 1
     )  # 0 through 5 if 2 cycles
     currentStep = (self.cs["burnSteps"] + 1) * self.r.p.cycle + self.r.p.timeNode
     args = ["job", "modify", "%CCP_JOBID%"]
     args.append("/progress:{}".format(int((100.0 * currentStep) / totalSteps)))
     args.append(
         '/progressmsg:"At time step {} of {}"'.format(currentStep, totalSteps)
     )
     msg = "command did not run!"
     try:
         runLog.info("Updating cluster progress: {}".format(" ".join(args)))
         msg = subprocess.check_output(args, shell=True)
     except:  # pylint: disable=bare-except
         # we really don't care if the progress updater fails.. it has no impact on our lives.
         runLog.warning("Failed to update progress on the HPC: {}".format(msg))
Exemple #2
0
    def _checkConversion(self):
        """
        Make sure both reactors have the same power and that it's equal to user-input.

        On the initial neutronics run, of course source power will be zero.
        """
        UniformMeshGeometryConverter._checkConversion(self)
        sourcePow = self._sourceReactor.core.getTotalBlockParam("power")
        convPow = self.convReactor.core.getTotalBlockParam("power")
        if sourcePow > 0.0 and convPow > 0.0:
            expectedPow = (self._sourceReactor.core.p.power /
                           self._sourceReactor.core.powerMultiplier)
            if abs(sourcePow - convPow) / sourcePow > 1e-5:
                runLog.info(
                    f"Source reactor power ({sourcePow}) is too different from "
                    f"converted power ({convPow}).")
            if sourcePow and abs(sourcePow - expectedPow) / sourcePow > 1e-5:
                raise ValueError(
                    f"Source reactor power ({sourcePow}) is too different from "
                    f"user-input power ({expectedPow}).")
Exemple #3
0
    def writeMeshData(self):
        """
        Write a summary table of the radial, axial, and theta bins that will be used for geometry conversion.

        Notes
        -----
        This should be on the ``ThetaRZGrid`` object.
        """
        binCombinations = (
            self.numRingBins * self.numAxialMeshBins * self.numThetaMeshBins
        )
        runLog.info("Total mesh bins (r, z, theta): {0}".format(binCombinations))
        runLog.info(
            "  Radial bins: {}\n"
            "  Axial bins:  {}\n"
            "  Theta bins:  {}".format(
                self.numRingBins, self.numAxialMeshBins, self.numThetaMeshBins
            )
        )
        self._writeMeshLogData()
 def _copyLibraryFilesForCycle(self, cycle, libFiles):
     runLog.extra("Current library files: {}".format(libFiles))
     for baseName, cycleName in libFiles.items():
         if not os.path.exists(cycleName):
             if not os.path.exists(baseName):
                 raise ValueError(
                     "Neither {} nor {} libraries exist. Either the "
                     "current cycle library for cycle {} should exist "
                     "or a base library is required to continue.".format(
                         cycleName, baseName, cycle
                     )
                 )
             runLog.info(
                 "Existing library {} for cycle {} does not exist. "
                 "The active library is {}".format(cycleName, cycle, baseName)
             )
         else:
             runLog.info("Using {} as an active library".format(baseName))
             if cycleName != baseName:
                 shutil.copy(cycleName, baseName)
Exemple #5
0
    def countObjects(self, ao):
        """
        Recursively find objects inside arbitrarily-deeply-nested containers.

        This is designed to work with the garbage collector, so it focuses on
        objects potentially being held in dict, tuple, list, or sets.
        """
        counter = self[type(ao)]
        if counter.add(ao):
            self.count += 1
            if self.count % REPORT_COUNT == 0:
                runLog.info("Counted {} items".format(self.count))

            if isinstance(ao, dict):
                for k, v in ao.items():
                    self.countObjects(k)
                    self.countObjects(v)
            elif isinstance(ao, (list, tuple, set)):
                for v in iter(ao):
                    self.countObjects(v)
Exemple #6
0
    def checkInputs(self):
        """
        Checks ARMI inputs for consistency.

        Returns
        -------
        bool
            True if the inputs are all good, False otherwise
        """
        with DirectoryChanger(self.cs.inputDirectory, dumpOnException=False):
            operatorClass = operators.getOperatorClassFromSettings(self.cs)
            inspector = operatorClass.inspector(self.cs)
            inspectorIssues = [query for query in inspector.queries if query]
            if context.CURRENT_MODE == context.Mode.INTERACTIVE:
                # if interactive, ask user to deal with settings issues
                inspector.run()
            else:
                # when not interactive, just print out the info in the stdout
                queryData = []
                for i, query in enumerate(inspectorIssues, start=1):
                    queryData.append((
                        i,
                        textwrap.fill(query.statement,
                                      width=50,
                                      break_long_words=False),
                        textwrap.fill(query.question,
                                      width=50,
                                      break_long_words=False),
                    ))

                if queryData and context.MPI_RANK == 0:
                    runLog.header(
                        "=========== Settings Input Queries ===========")
                    runLog.info(
                        tabulate.tabulate(
                            queryData,
                            headers=["Number", "Statement", "Question"],
                            tablefmt="armi",
                        ))

            return not any(inspectorIssues)
    def _addBlockRings(
        self, blockToAdd, blockName, numRingsToAdd, firstRing, mainComponent=None
    ):
        """Add a homogenous block ring to the converted block."""
        runLog.info(
            "Converting representative block {} to its equivalent cylindrical model".format(
                self._sourceBlock
            )
        )

        innerDiam = (
            self.convertedBlock[-1].getDimension("od")
            if len(self.convertedBlock)
            else 0.0
        )

        if mainComponent is not None:
            newCompProps = mainComponent.material
            tempInput = tempHot = mainComponent.temperatureInC
        else:  # no component specified so just use block vals
            newCompProps = (
                "Custom"  # this component shouldn't change temperature anyway
            )
            tempInput = tempHot = blockToAdd.getAverageTempInC()

        for ringNum in range(firstRing, firstRing + numRingsToAdd):
            numFuelBlocksInRing = blockToAdd.location.getNumPosInRing(ringNum)
            fuelBlockTotalArea = numFuelBlocksInRing * self._driverFuelBlock.getArea()
            driverOuterDiam = getOuterDiamFromIDAndArea(innerDiam, fuelBlockTotalArea)
            driverRing = components.Circle(
                blockName,
                newCompProps,
                tempInput,
                tempHot,
                od=driverOuterDiam,
                id=innerDiam,
                mult=1,
            )
            driverRing.setNumberDensities(blockToAdd.getNumberDensities())
            self.convertedBlock.add(driverRing)
            innerDiam = driverOuterDiam
Exemple #8
0
    def invoke(self):
        # late imports so that we dont have to import the world to do anything
        # pylint: disable=import-outside-toplevel
        from armi.bookkeeping.visualization import vtk
        from armi.bookkeeping.db import databaseFactory

        # a little baroque, but easy to extend with future formats
        formatMap = {self._FORMAT_VTK: vtk.VtkDumper}

        dumper = formatMap[self.args.format](self.args.output_name)

        nodes = self.args.nodes
        db = databaseFactory(self.args.h5db, "r")
        with db:
            dbNodes = list(db.genTimeSteps())

            if nodes is not None and any(node not in dbNodes
                                         for node in nodes):
                raise RuntimeError(
                    "Some of the requested nodes are not in the source database.\n"
                    "Requested: {}\n"
                    "Present: {}".format(nodes, dbNodes))

            with dumper:
                for cycle, node in dbNodes:
                    if nodes is not None and (cycle, node) not in nodes:
                        continue

                    if (self.args.min_node is not None
                            and (cycle, node) < self.args.min_node):
                        continue

                    if (self.args.max_node is not None
                            and (cycle, node) > self.args.max_node):
                        continue

                    runLog.info(
                        "Creating visualization file for cycle {}, time node {}..."
                        .format(cycle, node))
                    r = db.load(cycle, node)
                    dumper.dumpState(r)
Exemple #9
0
 def _writeMachineInformation():
     """Create a table that contains basic machine and rank information."""
     if armi.MPI_SIZE > 1:
         processorNames = armi.MPI_NODENAMES
         uniqueNames = set(processorNames)
         nodeMappingData = []
         for uniqueName in uniqueNames:
             matchingProcs = [
                 str(rank) for rank, procName in enumerate(processorNames)
                 if procName == uniqueName
             ]
             numProcessors = str(len(matchingProcs))
             nodeMappingData.append(
                 (uniqueName, numProcessors, ", ".join(matchingProcs)))
         runLog.header("=========== Machine Information ===========")
         runLog.info(
             tabulate.tabulate(
                 nodeMappingData,
                 headers=["Machine", "Number of Processors", "Ranks"],
                 tablefmt="armi",
             ))
Exemple #10
0
    def interactEOL(self):
        """Adds the data to the report, and generates it"""
        self.cs.setSettingsReport()
        b = self.o.r.core.getFirstBlock(Flags.FUEL)
        b.setAreaFractionsReport()

        from armi.bookkeeping import plotting

        plotting.plotReactorPerformance(self.r)

        reportingUtils.setNeutronBalancesReport(self.r.core)
        self.writeRunSummary()
        self.o.timer.stopAll()  # consider the run done
        runLog.info(self.o.timer.report(inclusion_cutoff=0.001, total_time=True))
        _timelinePlot = self.o.timer.timeline(
            self.cs.caseTitle, self.cs["timelineInclusionCutoff"], total_time=True
        )
        runLog.debug("Generating report HTML.")
        self.writeReports()
        runLog.debug("Report HTML generated successfully.")
        runLog.info(self.printReports())
Exemple #11
0
    def _modifyGeometry(self, container, gridDesign):
        """Perform post-load geometry conversions like full core, edge assems."""
        # all cases should have no edge assemblies. They are added ephemerally when needed
        from armi.reactor.converters import geometryConverters  # circular imports

        runLog.header(
            "=========== Applying Geometry Modifications ===========")
        converter = geometryConverters.EdgeAssemblyChanger()
        converter.removeEdgeAssemblies(container)

        # now update the spatial grid dimensions based on the populated children
        # (unless specified on input)
        if not gridDesign.latticeDimensions:
            runLog.info(
                "Updating spatial grid pitch data for {} geometry".format(
                    container.geomType))
            if container.geomType == geometry.HEX:
                container.spatialGrid.changePitch(container[0][0].getPitch())
            elif container.geomType == geometry.CARTESIAN:
                xw, yw = container[0][0].getPitch()
                container.spatialGrid.changePitch(xw, yw)
Exemple #12
0
def summarizeMaterialData(container):
    """
    Create a summary of the material objects and source data for a reactor container.

    Parameters
    ----------
    container : Core object
        Any Core object with Blocks and Components defined.
    """

    def _getMaterialSourceData(materialObj):
        return (materialObj.DATA_SOURCE, materialObj.propertyRangeUpdated)

    runLog.header(
        "=========== Summarizing Source of Material Data for {} ===========".format(
            container
        )
    )
    materialNames = set()
    materialData = []
    for b in container.getBlocks():
        for c in b:
            if c.material.name in materialNames:
                continue
            sourceLocation, wasModified = _getMaterialSourceData(c.material)
            materialData.append((c.material.name, sourceLocation, wasModified))
            materialNames.add(c.material.name)
    materialData = sorted(materialData)
    runLog.info(
        tabulate.tabulate(
            tabular_data=materialData,
            headers=[
                "Material Name",
                "Source Location",
                "Property Data was Modified\nfrom the Source?",
            ],
            tablefmt="armi",
        )
    )
    return materialData
Exemple #13
0
    def invoke(self):
        from armi.bookkeeping.db.database3 import Database3

        db = Database3(self.args.h5db, "r")

        with db:
            settings, geom, bp = db.readInputsFromDB()

        settingsExt = ".yaml"
        if settings.lstrip()[0] == "<":
            settingsExt = ".xml"

        settingsPath = self.args.output_base + "_settings" + settingsExt
        bpPath = self.args.output_base + "_blueprints.yaml"

        geomPath = None
        if geom:
            geomExt = ".xml" if geom.lstrip()[0] == "<" else ".yaml"
            geomPath = self.args.output_base + "_geom" + geomExt

        bail = False
        for path in [settingsPath, bpPath, geomPath]:
            if os.path.exists(settingsPath):
                runLog.error("`{}` already exists. Aborting.".format(path))
                bail = True
        if bail:
            return -1

        for path, data, inp in [
            (settingsPath, settings, "settings"),
            (bpPath, bp, "blueprints"),
            (geomPath, geom, "geometry"),
        ]:
            if path is None:
                continue
            runLog.info("Writing {} to `{}`".format(inp, path))
            if isinstance(data, bytes):
                data = data.decode()
            with open(path, "w") as f:
                f.write(data)
Exemple #14
0
def migrate_database(database_path):
    """Migrate the database to be compatible with the latest ARMI code base."""
    if not os.path.exists(database_path):
        raise ValueError(
            "Database file {} does not exist".format(database_path))

    runLog.info("Migrating database file: {}".format(database_path))
    runLog.info("Generating SHA-1 hash for original database: {}".format(
        database_path))
    shaHash = utils.getFileSHA1Hash(database_path)
    runLog.info("    Database: {}\n"
                "    SHA-1: {}".format(database_path, shaHash))
    _remoteFolder, remoteDbName = os.path.split(
        database_path)  # make new DB locally
    root, ext = os.path.splitext(remoteDbName)
    newDBName = root + "_migrated" + ext
    runLog.info("Copying database from {} to {}".format(
        database_path, newDBName))
    with h5py.File(newDBName, "w") as newDB, h5py.File(database_path,
                                                       "r") as oldDB:

        typeNames = _getTypeNames(oldDB)

        def closure(name, dataset):
            _copyValidDatasets(newDB, typeNames, name, dataset)

        oldDB.visititems(closure)

        # Copy all old database attributes to the new database (h5py AttributeManager has no update method)
        for key, val in oldDB.attrs.items():
            newDB.attrs[key] = val

        newDB.attrs["original-db-version"] = oldDB.attrs["version"]
        newDB.attrs["original-db-hash"] = shaHash
        newDB.attrs["version"] = armi.__version__

        _writeAssemType(oldDB, newDB, typeNames)

    runLog.info(
        "Successfully generated migrated database file: {}".format(newDBName))
Exemple #15
0
    def readFromStream(self,
                       stream,
                       handleInvalids=True,
                       fmt=SettingsInputFormat.YAML):
        """Read from a file-like stream."""
        self.format = fmt
        if self.format == self.SettingsInputFormat.YAML:
            try:
                self._readYaml(stream)
            except ruamel.yaml.scanner.ScannerError:
                # mediocre way to detect xml vs. yaml at the stream level
                runLog.info(
                    "Could not read stream in YAML format. Attempting XML format."
                )
                self.format = self.SettingsInputFormat.XML
                stream.seek(0)

        if self.format == self.SettingsInputFormat.XML:
            self._readXml(stream)

        if handleInvalids:
            self._checkInvalidSettings()
Exemple #16
0
def migrateCrossSectionsFromBlueprints(settingsObj):
    settingsPath = settingsObj.path
    runLog.info(
        "Migrating cross section settings from blueprints file to settings file ({})..."
        .format(settingsPath))
    cs = caseSettings.Settings()
    cs.loadFromInputFile(settingsPath)

    fullBlueprintsPath = os.path.join(cs.inputDirectory, cs["loadingFile"])
    origXsInputLines = _convertBlueprints(fullBlueprintsPath)
    if not origXsInputLines:
        runLog.warning("No old input found in {}. Aborting migration.".format(
            fullBlueprintsPath))
        return cs
    newXsData = _migrateInputData(origXsInputLines)
    _writeNewSettingsFile(cs, newXsData)
    # cs now has a proper crossSection setting

    _finalize(fullBlueprintsPath, settingsPath)
    # update the existing cs with the new setting in memory so the GUI doesn't wipe it out!
    settingsObj[CONF_CROSS_SECTION] = cs.settings[CONF_CROSS_SECTION].dump()
    return cs
    def _renormalizeNeutronFluxByBlock(self, renormalizationCorePower):
        """
        Normalize the neutron flux within each block to meet the renormalization power.

        Parameters
        ----------
        renormalizationCorePower: float
            Specified power to renormalize the neutron flux for using the isotopic energy
            generation rates on the cross section libraries (in Watts)
            
        See Also
        --------
        getTotalEnergyGenerationConstants
        """
        # update the block power param here as well so
        # the ratio/multiplications below are consistent
        currentCorePower = 0.0
        for b in self.r.core.getBlocks():
            # The multi-group flux is volume integrated, so J/cm * n-cm/s gives units of Watts
            b.p.power = numpy.dot(
                b.getTotalEnergyGenerationConstants(), b.getIntegratedMgFlux()
            )
            b.p.flux = sum(b.getMgFlux())
            currentCorePower += b.p.power

        powerRatio = renormalizationCorePower / currentCorePower
        runLog.info(
            "Renormalizing the neutron flux in {:<s} by a factor of {:<8.5e}, "
            "which is derived from the current core power of {:<8.5e} W and "
            "desired power of {:<8.5e} W".format(
                self.r.core, powerRatio, currentCorePower, renormalizationCorePower
            )
        )
        for b in self.r.core.getBlocks():
            b.p.mgFlux *= powerRatio
            b.p.flux *= powerRatio
            b.p.fluxPeak *= powerRatio
            b.p.power *= powerRatio
            b.p.pdens = b.p.power / b.getVolume()
Exemple #18
0
def writeCycleSummary(core):
    r"""Prints a cycle summary to the runLog

    Parameters
    ----------
    core:  armi.reactor.reactors.Core
    cs: armi.settings.caseSettings.Settings
    """
    ## would io be worth considering for this?
    cycle = core.r.p.cycle
    str_ = []
    runLog.important("Cycle {0} Summary:".format(cycle))
    avgBu = core.calcAvgParam("percentBu",
                              typeSpec=Flags.FUEL,
                              generationNum=2)
    str_.append("Core Average Burnup: {0}".format(avgBu))
    str_.append("Idealized Outlet Temperature {}".format(
        core.p.THoutletTempIdeal))
    str_.append("End of Cycle {0:02d}. Timestamp: {1} ".format(
        cycle, time.ctime()))

    runLog.info("\n".join(str_))
Exemple #19
0
 def getTempChangeForDensityChange(self,
                                   Tc: float,
                                   densityFrac: float,
                                   quiet: bool = True) -> float:
     """Return a temperature difference for a given density perturbation."""
     linearExpansion = self.linearExpansion(Tc=Tc)
     linearChange = densityFrac**(-1.0 / 3.0) - 1.0
     deltaT = linearChange / linearExpansion
     if not quiet:
         runLog.info(
             "The linear expansion for {} at initial temperature of {} C is {}.\nA change in density of {} "
             "percent at would require a change in temperature of {} C.".
             format(
                 self.getName(),
                 Tc,
                 linearExpansion,
                 (densityFrac - 1.0) * 100.0,
                 deltaT,
             ),
             single=True,
         )
     return deltaT
Exemple #20
0
 def readWrite(self):
     runLog.info(
         "{} LABELS data {}".format(
             "Reading" if "r" in self._fileMode else "Writing", self
         )
     )
     try:
         self._rwFileID()
         self._rw1DRecord()
         self._rw2DRecord()
         if (
             self._metadata["numHalfHeightsDirection1"] > 0
             or self._metadata["numHalfHeightsDirection2"] > 0
         ):
             self._rw3DRecord()
         if self._metadata["numNuclideSets"] > 1:
             self._rw4DRecord()
         if self._metadata["numZoneAliases"] > 0:
             self._rw5DRecord()
         if self._metadata["numControlRodBanks"] > 0:
             runLog.error("Control-rod data has not been implemented")
             self._rw6DRecord()
             self._rw7DRecord()
             self._rw8DRecord()
         if self._metadata["numBurnupDependentIsotopes"] > 0:
             runLog.error("Burnup-dependent isotopes has not been implemented")
             self._rw9DRecord()
         if self._metadata["maxBurnupDependentGroups"] > 0:
             runLog.error("Burnup-dependent groups has not been implemented")
             self._rw10DRecord()
         if self._metadata["maxBurnupPolynomialOrder"] > 0:
             runLog.error(
                 "Burnup-dependent fitting coefficients has not been implemented"
             )
             self._rw11DRecord()
     except:
         runLog.error(traceback.format_exc())
         raise IOError("Failed to read/write LABELS file")
Exemple #21
0
 def readWrite(self):
     runLog.info("{} LABELS data {}".format(
         "Reading" if "r" in self._fileMode else "Writing", self))
     self._rwFileID()
     self._rw1DRecord()
     self._rw2DRecord()
     if (self._metadata["numHalfHeightsDirection1"] > 0
             or self._metadata["numHalfHeightsDirection2"] > 0):
         self._rw3DRecord()
     if self._metadata["numNuclideSets"] > 1:
         self._rw4DRecord()
     if self._metadata["numZoneAliases"] > 0:
         self._rw5DRecord()
     if self._metadata["numControlRodBanks"] > 0:
         self._rw6DRecord()
         self._rw7DRecord()
         self._rw8DRecord()
     if self._metadata["numBurnupDependentIsotopes"] > 0:
         self._rw9DRecord()
     if self._metadata["maxBurnupDependentGroups"] > 0:
         self._rw10DRecord()
     if self._metadata["maxBurnupPolynomialOrder"] > 0:
         self._rw11DRecord()
Exemple #22
0
    def _writeCaseInformation(o, cs):
        """Create a table that contains basic case information."""
        caseInfo = [
            (Operator_CaseTitle, cs.caseTitle),
            (
                Operator_CaseDescription,
                "{0}".format(textwrap.fill(cs["comment"], break_long_words=False)),
            ),
            (
                Operator_TypeOfRun,
                "{} - {}".format(cs["runType"], o.__class__.__name__),
            ),
            (Operator_CurrentUser, armi.USER),
            (Operator_ArmiCodebase, armi.ROOT),
            (Operator_WorkingDirectory, os.getcwd()),
            (Operator_PythonInterperter, sys.version),
            (Operator_MasterMachine, os.environ.get("COMPUTERNAME", "?")),
            (Operator_NumProcessors, armi.MPI_SIZE),
            (Operator_Date, armi.START_TIME),
        ]

        runLog.header("=========== Case Information ===========")
        runLog.info(tabulate.tabulate(caseInfo, tablefmt="armi"))
Exemple #23
0
def updateXSGroupStructure(cs, name, value):
    from armi.utils import units

    try:
        units.getGroupStructure(value)
        return {name: value}
    except KeyError:
        try:
            newValue = value.upper()
            units.getGroupStructure(newValue)
            runLog.info(
                "Updating the cross section group structure from {} to {}".format(
                    value, newValue
                )
            )
            return {name: newValue}
        except KeyError:
            runLog.info(
                "Unable to automatically convert the `groupStructure` setting of {}. Defaulting to {}".format(
                    value, cs.settings["groupStructure"].default
                )
            )
            return {name: cs.settings["groupStructure"].default}
Exemple #24
0
def removeGammaTransportActive(cs, _name, value):
    """
    Remove 'gammaTransportActive' and set values of 'globalFluxActive' for the same functionality.

    Arguments
    ---------
    cs : setting object
        ARMi object containing the default and user-specified settings.

    name : str
        Setting name to be modified by this rule.

    value : str
        Value of the setting identified by name.
    """
    if value == "True":
        newValue = NEUTRONGAMMA
    elif value == "False":
        newValue = NEUTRON

    cs["globalFluxActive"] = newValue
    runLog.info(
        "The `globalFluxActive` setting has been set to {} based on deprecated "
        "`gammaTransportActive`.".format(newValue))
Exemple #25
0
def convertSettingsFromXMLToYaml(cs):
    if not cs.path.endswith(".xml"):
        raise ValueError("Can only convert XML files")

    old = cs.path
    oldCopy = old + "-converted"
    newNameBase, _ext = os.path.splitext(old)
    newName = newNameBase + ".yaml"
    counter = 0
    while os.path.exists(newName):
        # don't overwrite anything
        newName = "{}{}.yaml".format(newNameBase, counter)
        counter += 1
    if counter:
        runLog.warning(
            "{} already exists in YAML format; writing {} instead".format(
                newNameBase, newName))

    runLog.info(
        "Converting {} to new YAML format. Old copy will remain intact as {}".
        format(old, oldCopy))
    cs.writeToYamlFile(newName)
    cs.path = newName
    shutil.move(old, oldCopy)
Exemple #26
0
    def test_callingStartLogMultipleTimes(self):
        """calling startLog() multiple times will lead to multiple output files, but logging should still work"""
        with mockRunLogs.BufferLog() as mock:
            # we should start with a clean slate
            self.assertEqual("", mock._outputStream)
            runLog.LOG.startLog("test_callingStartLogMultipleTimes1")
            runLog.LOG.setVerbosity(logging.INFO)

            # we should start at info level, and that should be working correctly
            self.assertEqual(runLog.LOG.getVerbosity(), logging.INFO)
            runLog.info("hi1")
            self.assertIn("hi1", mock._outputStream)
            mock._outputStream = ""

            # call startLog() again
            runLog.LOG.startLog("test_callingStartLogMultipleTimes2")
            runLog.LOG.setVerbosity(logging.INFO)

            # we should start at info level, and that should be working correctly
            self.assertEqual(runLog.LOG.getVerbosity(), logging.INFO)
            runLog.info("hi2")
            self.assertIn("hi2", mock._outputStream)
            mock._outputStream = ""

            # call startLog() again
            runLog.LOG.startLog("test_callingStartLogMultipleTimes3")
            runLog.LOG.setVerbosity(logging.INFO)

            # we should start at info level, and that should be working correctly
            self.assertEqual(runLog.LOG.getVerbosity(), logging.INFO)
            runLog.info("hi3")
            self.assertIn("hi3", mock._outputStream)
            mock._outputStream = ""

            # call startLog() again, with a duplicate logger name
            runLog.LOG.startLog("test_callingStartLogMultipleTimes3")
            runLog.LOG.setVerbosity(logging.INFO)

            # we should start at info level, and that should be working correctly
            self.assertEqual(runLog.LOG.getVerbosity(), logging.INFO)
            runLog.info("hi333")
            self.assertIn("hi333", mock._outputStream)
            mock._outputStream = ""
Exemple #27
0
def migrate_settings(settings_path):
    """Migrate a settings file to be compatible with the latest ARMI code base."""
    if not os.path.exists(settings_path):
        raise ValueError(
            "Case settings file {} does not exist".format(settings_path))

    runLog.info("Migrating case settings: {}".format(settings_path))
    shaHash = utils.getFileSHA1Hash(settings_path)
    runLog.info("\Settings: {}\n" "\tSHA-1: {}".format(settings_path, shaHash))
    cs = caseSettings.Settings()
    reader = cs.loadFromInputFile(settings_path, handleInvalids=False)
    if reader.invalidSettings:
        runLog.info(
            "The following deprecated settings will be deleted:\n  * {}"
            "".format("\n  * ".join(list(reader.invalidSettings))))

    _modify_settings(cs)
    newSettingsInput = cs.caseTitle + "_migrated.yaml"
    cs.writeToYamlFile(newSettingsInput)
    runLog.info("Successfully generated migrated settings file: {}".format(
        newSettingsInput))
Exemple #28
0
def promptForSettingsFile(choice=None):
    """
    Allows the user to select an ARMI input from the input files in the directory

    Parameters
    ----------
    choice : int, optional
        The item in the list of valid YAML files to load
    """
    runLog.info("Welcome to the ARMI Loader")
    runLog.info("Scanning for ARMI settings files...")
    files = sorted(glob.glob("*.yaml"))
    if not files:
        runLog.info(
            "No eligible settings files found. Creating settings without choice"
        )
        return None

    if choice is None:
        for i, pathToFile in enumerate(files):
            runLog.info("[{0}] - {1}".format(i, os.path.split(pathToFile)[-1]))
        choice = int(input("Enter choice: "))

    return files[choice]
Exemple #29
0
def _migrateDatabase(databasePath, preCollector, visitor, postApplier):
    """
    Generic database-traversing system to apply custom version-specific migrations.

    Parameters
    ----------
    databasePath : str
        Path to DB file to be converted
    preCollector : callable
        Function that acts on oldDB and produces some generic data object
    visitor : callable
        Function that will be called on each dataset of the old HDF5 database.
        This should map information into the new DB.
    postApplier : callable
        Function that will run after all visiting is done. Will have acecss
        to the pre-collected data.

    Raises
    ------
    OSError
        When database is not found.
    """
    if not os.path.exists(databasePath):
        raise OSError("Database file {} does not exist".format(databasePath))

    runLog.info("Migrating database file: {}".format(databasePath))
    runLog.info(
        "Generating SHA-1 hash for original database: {}".format(databasePath))
    shaHash = utils.getFileSHA1Hash(databasePath)
    runLog.info("    Database: {}\n"
                "    SHA-1: {}".format(databasePath, shaHash))
    _remoteFolder, remoteDbName = os.path.split(
        databasePath)  # make new DB locally
    root, ext = os.path.splitext(remoteDbName)
    newDBName = root + "_migrated" + ext
    runLog.info("Copying database from {} to {}".format(
        databasePath, newDBName))
    with h5py.File(newDBName, "w") as newDB, h5py.File(databasePath,
                                                       "r") as oldDB:

        preCollection = preCollector(oldDB)

        def closure(name, dataset):
            visitor(newDB, preCollection, name, dataset)

        oldDB.visititems(closure)

        # Copy all old database attributes to the new database (h5py AttributeManager has no update method)
        for key, val in oldDB.attrs.items():
            newDB.attrs[key] = val

        newDB.attrs["original-armi-version"] = oldDB.attrs["version"]
        newDB.attrs["original-db-hash"] = shaHash
        newDB.attrs["original-databaseVersion"] = oldDB.attrs[
            "databaseVersion"]
        newDB.attrs["version"] = version

        postApplier(oldDB, newDB, preCollection)

    runLog.info(
        "Successfully generated migrated database file: {}".format(newDBName))
Exemple #30
0
 def printDensities(self, lfpDens):
     """Print densities of nuclides given a LFP density."""
     for n in sorted(self.keys()):
         runLog.info("{0:6s} {1:.7E}".format(n.name, lfpDens * self[n]))