def customReporting(sciBTest, mResults): # This gets called immediately post the models being run. # First set up overall analysis images modelplots.plotOverAllRuns(mResults, 'Vrms', depName='Time', path=sciBTest.outputPathBase) import plotCpuTimesAllRuns as plotCpus plotCpus.plotAllRuns(sciBTest.outputPathBase) sciBTest.mSuite.analysisImages = ["Vrms-multiRunTimeSeries.png", 'cpuTimePerStep.png'] # Now specific per-run images fStep = mResults[0].freqOutput.finalStep() dEvery = sciBTest.mSuite.runs[0].simParams.dumpevery lastImgStep = fStep / dEvery * dEvery vrmsPeakTime = sciBTest.testComps[0]['VRMS of first diapir'].actualTime vrmsPeakStep = mResults[0].freqOutput.getClosest('Time', vrmsPeakTime)[1] vrmsPeakImgStep = int(round(vrmsPeakStep / float(dEvery))) * dEvery vrmsPeakImgStep = min([vrmsPeakImgStep, lastImgStep]) # Create an empty list of images to display sciBTest.mSuite.modelImagesToDisplay = [[] for runI in \ range(len(sciBTest.mSuite.runs))] # Choose which model timestep images to display:- note that here we're # programmatically choosing to show the Peak VRMS timestep. sciBTest.mSuite.modelImagesToDisplay[0] = [ (10, "initial state"), (120, ""), (vrmsPeakImgStep, "near first VRMS peak at t=%f" % vrmsPeakTime), (lastImgStep, "")] # Here we just ask the CREDO reporting API to get Report Generators for # PDF (ReportLab) and RST (Restructured Text) output, and create a # standard science benchmark report. for rGen in getGenerators(["RST", "ReportLab"], sciBTest.outputPathBase): sReps.makeSciBenchReport(sciBTest, mResults, rGen, os.path.join(sciBTest.outputPathBase, "%s-report.%s" %\ (sciBTest.testName, rGen.stdExt)), imgPerRow=2)
def customReport_VRMS_Nusselt(sciBTest, mResults): # Some custom output generation and analysis vrmsTCs, vrmsResults = sciBTest.getTCRes("VRMS vs Blankenbach") nusseltTCs, nusseltResults = sciBTest.getTCRes("Nusselt vs Blankenbach") vrmsActuals = [tc.actualVal for tc in vrmsTCs] nusseltActuals = [tc.actualVal for tc in nusseltTCs] # TODO: useful if below values available on modelResults automatically. for mRes in mResults: mRes.readFrequentOutput() nSteps = [mRes.freqOutput.finalStep() for mRes in mResults] # Plotting / CSV writing thermalConvPostProc.plotResultsVsBlankenbach(BBRa, BBVrmsMin, BBVrmsMax, BBNusseltMin, BBNusseltMax, {"UW Actual":vrmsActuals}, {"UW Actual":nusseltActuals}, os.path.join(sciBTest.outputPathBase, "VrmsAndNusseltValues.png")) observables = {'Vrms':vrmsActuals, 'Vrms Passed':vrmsResults, 'Nusselt':nusseltActuals, 'Nusselt Passed':nusseltResults, 'nSteps':nSteps} msuite.writeInputsOutputsToCSV(sciBTest.mSuite, observables, "thermalDimBMResults.csv") modelplots.plotOverAllRuns(mResults, 'Nusselt', path=sciBTest.outputPathBase) modelplots.plotOverAllRuns(mResults, 'Vrms', path=sciBTest.outputPathBase) #TODO: modularise the below import plotCpuTimesAllRuns as plotCpus plotCpus.plotAllRuns(sciBTest.outputPathBase) sciBTest.mSuite.analysisImages = [ 'VrmsAndNusseltValues.png', 'Nusselt-multiRunTimeSeries.png', 'Vrms-multiRunTimeSeries.png', 'cpuTimePerStep.png'] sciBTest.mSuite.modelImagesToDisplay = [[] for runI in \ range(len(sciBTest.mSuite.runs))] lastImgSteps = [] for finalStep, mRun in zip(nSteps, sciBTest.mSuite.runs): simParams = mRun.getSimParams() lastImgSteps.append(simParams.nearestDumpStep(finalStep, finalStep)) sciBTest.mSuite.modelImagesToDisplay[0] = [ (0, ""), (700, ""), (lastImgSteps[0], "")] sciBTest.mSuite.modelImagesToDisplay[1] = [ (0, ""), (800, ""), (lastImgSteps[1], "")] sciBTest.mSuite.modelImagesToDisplay[2] = [ (0, ""), (400, ""), (lastImgSteps[2], "")] for rGen in getGenerators(["RST", "ReportLab"], sciBTest.outputPathBase): sReps.makeSciBenchReport(sciBTest, mResults, rGen, os.path.join(sciBTest.outputPathBase, "%s-report.%s" %\ (sciBTest.testName, rGen.stdExt)), imgPerRow=3)
def customReport_VRMS(sciBTest, mResults): for mRes in mResults: mRes.readFrequentOutput() # Some custom output generation and analysis fSteps = [mResult.freqOutput.finalStep() for mResult in mResults] vrmsTCs, vrmsResults = sciBTest.getTCRes("VRMS Max") vrmsActuals = [tc.actualVal for tc in vrmsTCs] # Now specific per-run images dEvery = sciBTest.mSuite.runs[0].simParams.dumpevery lastImgSteps = [fStep / dEvery * dEvery for fStep in fSteps] # find the timestep vrmsPeakTimes = [tc.actualTime for tc in vrmsTCs] vrmsPeakSteps = [mRes.freqOutput.getClosest('Time', peakTime)[1] for mRes, peakTime in\ zip(mResults, vrmsPeakTimes) ] # TODO: useful if below values available on modelResults automatically. # Plotting modelplots.plotOverAllRuns(mResults, 'Vrms', depName='Time', path=sciBTest.outputPathBase) modelplots.plotOverAllRuns(mResults, 'Entrainment', path=sciBTest.outputPathBase, depName='Time') #TODO: modularise the below import plotCpuTimesAllRuns as plotCpus plotCpus.plotAllRuns(sciBTest.outputPathBase) sciBTest.mSuite.analysisImages = [ 'Vrms-multiRunTimeSeries.png', 'Entrainment-multiRunTimeSeries.png', 'cpuTimePerStep.png'] sciBTest.mSuite.modelImagesToDisplay = [[] for runI in \ range(len(sciBTest.mSuite.runs))] lastImgSteps = [] vrmsPeakImgSteps = [] for runI, mRun in enumerate(sciBTest.mSuite.runs): finalStep = fSteps[runI] simParams = mRun.getSimParams() lastImgSteps.append(simParams.nearestDumpStep(finalStep, finalStep)) vrmsPeakImgSteps.append(simParams.nearestDumpStep(vrmsPeakSteps[runI], finalStep)) for resI, mResult in enumerate(mResults): simParams = sciBTest.mSuite.runs[resI].getSimParams() qtrStep = simParams.nearestDumpStep(fSteps[resI]//4, fSteps[resI]) halfStep = simParams.nearestDumpStep(fSteps[resI]//2, fSteps[resI]) qtrTime = mResult.freqOutput.getValueAtStep("Time", qtrStep) halfTime = mResult.freqOutput.getValueAtStep("Time", halfStep) sciBTest.mSuite.modelImagesToDisplay[resI] = [ (0, ""), (vrmsPeakImgSteps[resI], "VRMS Peak, t=%f" % vrmsPeakTimes[resI]), (qtrStep, "t=%f" % qtrTime), (halfStep, "t=%f" % halfTime), (lastImgSteps[resI], "Final, t=%f" % (stopTime))] for rGen in getGenerators(["RST", "ReportLab"], sciBTest.outputPathBase): sReps.makeSciBenchReport(sciBTest, mResults, rGen, os.path.join(sciBTest.outputPathBase, "%s-report.%s" %\ (sciBTest.testName, rGen.stdExt)), imgPerRow=3)