Beispiel #1
0
def renderTemplate(modelDir, absolutePaths=False, datastoreNames={}):
	''' Render the model template to an HTML string.
	By default render a blank one for new input.
	If modelDir is valid, render results post-model-run.
	If absolutePaths, the HTML can be opened without a server. '''
	try:
		inJson = json.load(open(pJoin(modelDir,"allInputData.json")))
		modelPath, modelName = pSplit(modelDir)
		deepPath, user = pSplit(modelPath)
		inJson["modelName"] = modelName
		inJson["user"] = user
		modelType = inJson["modelType"]
		template = getattr(omf.models, modelType).template
		allInputData = json.dumps(inJson)
	except IOError:
		allInputData = None
	try:
		allOutputData = open(pJoin(modelDir,"allOutputData.json")).read()
	except IOError:
		allOutputData = None
	if absolutePaths:
		# Parent of current folder.
		pathPrefix = _omfDir
	else:
		pathPrefix = ""
	return template.render(allInputData=allInputData,
		allOutputData=allOutputData, modelStatus=getStatus(modelDir), pathPrefix=pathPrefix,
		datastoreNames=datastoreNames, modelName=modelType)
Beispiel #2
0
def renderTemplate(template, fs, modelDir="", absolutePaths=False, datastoreNames={}):
    ''' Render the model template to an HTML string.
    By default render a blank one for new input.
    If modelDir is valid, render results post-model-run.
    If absolutePaths, the HTML can be opened without a server. '''

    # Our HTML template for the interface:
    with fs.open("models/solarEngineering.html") as tempFile:
        template = Template(tempFile.read())

    try:
        inJson = json.load(fs.open(pJoin(modelDir, "allInputData.json")))
        modelPath, modelName = pSplit(modelDir)
        deepPath, user = pSplit(modelPath)
        inJson["modelName"] = modelName
        inJson["user"] = user
        allInputData = json.dumps(inJson)
    except (IOError, HdfsFileNotFoundException):
        allInputData = None
    try:
        allOutputData = fs.open(pJoin(modelDir, "allOutputData.json")).read()
    except (HdfsFileNotFoundException, IOError):
        allOutputData = None
    if absolutePaths:
        # Parent of current folder.
        pathPrefix = __metaModel__._omfDir
    else:
        pathPrefix = ""
    try:
        inputDict = json.load(fs.open(pJoin(modelDir, "allInputData.json")))
    except (IOError, HdfsFileNotFoundException):
        pass
    return template.render(allInputData=allInputData,
                           allOutputData=allOutputData, modelStatus=getStatus(modelDir, fs), pathPrefix=pathPrefix,
                           datastoreNames=datastoreNames)
Beispiel #3
0
def renderTemplate(template, fs, modelDir="", absolutePaths=False, datastoreNames={}, quickRender=False):
    ''' Render the model template to an HTML string.
    By default render a blank one for new input.
    If modelDir is valid, render results post-model-run.
    If absolutePaths, the HTML can be opened without a server. 
    If quickRender, pass this to template so we can render for non-logged-in users. '''
    logger.debug('Rendering model template... modelDir: %s; absolutePaths: %s; datastoreNames: %s; quickRender: %s',
                 modelDir, absolutePaths, datastoreNames, quickRender)
    try:
        inJson = json.load(fs.open(pJoin(modelDir, "allInputData.json")))
        modelPath, modelName = pSplit(modelDir)
        deepPath, user = pSplit(modelPath)
        inJson["modelName"] = modelName
        inJson["user"] = user
        allInputData = json.dumps(inJson)
    except (HdfsFileNotFoundException, IOError):
        allInputData = None
    try:
        allOutputData = fs.open(pJoin(modelDir, "allOutputData.json")).read()
    except (HdfsFileNotFoundException, IOError):
        allOutputData = None
    if absolutePaths:
        # Parent of current folder.
        pathPrefix = _omfDir
    else:
        pathPrefix = ""
    with open('templates/footer.html', 'r') as footer_file:
        footer = footer_file.read()
    with open('templates/nrelsObligation.html') as nrels_file:
        nrels_text = nrels_file.read()
    return template.render(allInputData=allInputData,
                           allOutputData=allOutputData, modelStatus=getStatus(modelDir, fs), pathPrefix=pathPrefix,
                           datastoreNames=datastoreNames, quickRender=quickRender, footer=footer, nrels_text=nrels_text)
Beispiel #4
0
def renderTemplate(modelDir, absolutePaths=False, datastoreNames={}):
    ''' Render the model template to an HTML string.
	By default render a blank one for new input.
	If modelDir is valid, render results post-model-run.
	If absolutePaths, the HTML can be opened without a server. '''
    try:
        inJson = json.load(open(pJoin(modelDir, "allInputData.json")))
        modelPath, modelName = pSplit(modelDir)
        deepPath, user = pSplit(modelPath)
        inJson["modelName"] = modelName
        inJson["user"] = user
        modelType = inJson["modelType"]
        template = getattr(omf.models, modelType).template
        allInputData = json.dumps(inJson)
        # Get hashes for model python and html files
        htmlFile = open(pJoin(_myDir, modelType + ".html"), "r").read()
        currentHtmlHash = hashlib.sha256(htmlFile).hexdigest()
        pythonFile = open(pJoin(_myDir, modelType + ".py"), "r").read()
        currentPythonHash = hashlib.sha256(pythonFile).hexdigest()
    except IOError:
        allInputData = None
        inJson = None
    try:
        allOutputData = open(pJoin(modelDir, "allOutputData.json")).read()
        outJson = json.load(open(pJoin(modelDir, "allOutputData.json")))
        try:
            #Needed? Should this be handled a different way? Add hashes to the output if they are not yet present
            if ('pythonHash' not in outJson) or ('htmlHash' not in outJson):
                print('new model')
                outJson['htmlHash'] = currentHtmlHash
                outJson['pythonHash'] = currentPythonHash
                outJson['oldVersion'] = False
            #If the hashes do not match, mark the model as an old version
            elif outJson['htmlHash'] != currentHtmlHash or outJson[
                    'pythonHash'] != currentPythonHash:
                outJson['oldVersion'] = True
            #If the hashes match, mark the model as up to date
            else:
                outJson['oldVersion'] = False
        except (UnboundLocalError, KeyError), e:
            print(traceback.print_exc())
            print('error:' + str(e))
    except IOError:
        allOutputData = None
        outJson = None
    if absolutePaths:
        # Parent of current folder.
        pathPrefix = _omfDir
    else:
        pathPrefix = ""
    # Raw input output include.
    return template.render(allInputData=allInputData,
                           allOutputData=allOutputData,
                           modelStatus=getStatus(modelDir),
                           pathPrefix=pathPrefix,
                           datastoreNames=datastoreNames,
                           modelName=modelType,
                           allInputDataDict=inJson,
                           allOutputDataDict=outJson)
Beispiel #5
0
def renderTemplate(template, fs, modelDir="", absolutePaths=False, datastoreNames={}):
    """ Render the model template to an HTML string.
    By default render a blank one for new input.
    If modelDir is valid, render results post-model-run.
    If absolutePaths, the HTML can be opened without a server. """

    # Our HTML template for the interface:
    with fs.open("models/gridlabMulti.html") as tempFile:
        template = Template(tempFile.read())

    try:
        inJson = json.load(fs.open(pJoin(modelDir, "allInputData.json")))
        modelPath, modelName = pSplit(modelDir)
        deepPath, user = pSplit(modelPath)
        inJson["modelName"] = modelName
        inJson["user"] = user
        allInputData = json.dumps(inJson)
    except HdfsFileNotFoundException:
        allInputData = None
    except IOError:
        allInputData = None
    try:
        allOutputData = fs.open(pJoin(modelDir, "allOutputData.json")).read()
    except HdfsFileNotFoundException:
        allOutputData = None
    except IOError:
        allOutputData = None
    if absolutePaths:
        # Parent of current folder.
        pathPrefix = __metaModel__._omfDir
    else:
        pathPrefix = ""
    feederList = []
    feederIDs = []
    try:
        inputDict = json.load(fs.open(pJoin(modelDir, "allInputData.json")))
        for key in inputDict:
            if key.startswith("feederName"):
                feederIDs.append(key)
                feederList.append(inputDict[key])
    except HdfsFileNotFoundException:
        pass
    except IOError:
        pass
    with open("templates/footer.html", "r") as footer_file:
        footer = footer_file.read()
    return template.render(
        allInputData=allInputData,
        allOutputData=allOutputData,
        modelStatus=getStatus(modelDir, fs),
        pathPrefix=pathPrefix,
        datastoreNames=datastoreNames,
        feederIDs=feederIDs,
        feederList=feederList,
        footer=footer,
    )
Beispiel #6
0
def renderTemplate(template,
                   modelDir="",
                   absolutePaths=False,
                   datastoreNames={}):
    ''' Render the model template to an HTML string.
	By default render a blank one for new input.
	If modelDir is valid, render results post-model-run.
	If absolutePaths, the HTML can be opened without a server. '''
    try:
        inJson = json.load(open(pJoin(modelDir, "allInputData.json")))
        modelPath, modelName = pSplit(modelDir)
        deepPath, user = pSplit(modelPath)
        inJson["modelName"] = modelName
        inJson["user"] = user
        allInputData = json.dumps(inJson)
    except IOError:
        allInputData = None
    try:
        allOutputData = open(pJoin(modelDir, "allOutputData.json")).read()
    except IOError:
        allOutputData = None
    if absolutePaths:
        # Parent of current folder.
        pathPrefix = __metaModel__._omfDir
    else:
        pathPrefix = ""
    feederList = []
    feederIDs = []
    try:
        inputDict = json.load(open(pJoin(modelDir, "allInputData.json")))
        for key in [
                "feederName1", "feederName2", "feederName3", "feederName4",
                "feederName5"
        ]:
            feederName = inputDict.get(key, '')
            if feederName != '':
                feederIDs.append(key)
                feederList.append(feederName)
    except IOError:
        pass
    return template.render(allInputData=allInputData,
                           allOutputData=allOutputData,
                           modelStatus=getStatus(modelDir),
                           pathPrefix=pathPrefix,
                           datastoreNames=datastoreNames,
                           feederIDs=feederIDs,
                           feederList=feederList)
Beispiel #7
0
def renderTemplate(template, modelDir="", absolutePaths=False, datastoreNames={}):
	''' Render the model template to an HTML string.
	By default render a blank one for new input.
	If modelDir is valid, render results post-model-run.
	If absolutePaths, the HTML can be opened without a server. '''
	try:
		inJson = json.load(open(pJoin(modelDir,"allInputData.json")))
		modelPath, modelName = pSplit(modelDir)
		deepPath, user = pSplit(modelPath)
		inJson["modelName"] = modelName
		inJson["user"] = user
		allInputData = json.dumps(inJson)
	except IOError:
		allInputData = None
	try:
		allOutputData = open(pJoin(modelDir,"allOutputData.json")).read()
	except IOError:
		allOutputData = None
	if absolutePaths:
		# Parent of current folder.
		pathPrefix = __metaModel__._omfDir
	else:
		pathPrefix = ""
	feederList = []
	feederIDs = []
	try:
		inputDict = json.load(open(pJoin(modelDir, "allInputData.json")))
		for key in ["feederName1", "feederName2", "feederName3", "feederName4", "feederName5"]:
			feederName = inputDict.get(key,'')
			if feederName != '':
				feederIDs.append(key)
				feederList.append(feederName)
	except IOError:
		pass
	return template.render(allInputData=allInputData,
		allOutputData=allOutputData, modelStatus=getStatus(modelDir), pathPrefix=pathPrefix,
		datastoreNames=datastoreNames, feederIDs = feederIDs, feederList = feederList, modelName=modelName)
Beispiel #8
0
			if 'feederName' in key:
				output[key] = feederName
		with open(pJoin(modelDir,"allOutputData.json"),"w") as outFile:
			json.dump(output, outFile, indent=4)
		try:
			os.remove(pJoin(modelDir, "PPID.txt"))
		except:
			pass
		# Send email to user on model success.
		emailStatus = inputDict.get('emailStatus', 0)
		if (emailStatus == "on"):
			print "\n    EMAIL ALERT ON"
			email = session['user_id']
			try:
				user = json.load(open("data/User/" + email + ".json"))
				modelPath, modelName = pSplit(modelDir)
				message = "The model " + "<i>" + str(modelName) + "</i>" + " has successfully completed running. It ran for a total of " + str(inputDict["runTime"]) + " seconds from " + str(beginTime) + ", to " + str(finishTime) + "."
				return web.send_link(email, message, user)
			except Exception, e:
				print "ERROR: Failed sending model status email to user: "******", with exception: \n", e
	except Exception, e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Send email to user on model failure.
Beispiel #9
0
def renderTemplate(modelDir, absolutePaths=False, datastoreNames={}):
    ''' Render the model template to an HTML string.
	By default render a blank one for new input.
	If modelDir is valid, render results post-model-run.
	If absolutePaths, the HTML can be opened without a server. '''
    try:
        with web.locked_open(pJoin(modelDir, 'allInputData.json')) as f:
            inJson = json.load(f)
        modelPath, modelName = pSplit(modelDir)
        deepPath, modelOwner = pSplit(modelPath)
        inJson["modelName"] = modelName
        inJson["user"] = modelOwner
        modelType = inJson["modelType"]
        template = getattr(omf.models, modelType).template
        allInputData = json.dumps(inJson)
        # Get hashes for model python and html files
        with open(pJoin(_myDir, modelType + ".html")) as f:
            htmlFile = f.read()
        currentHtmlHash = hashlib.sha256(htmlFile.encode('utf-8')).hexdigest()
        with open(pJoin(_myDir, modelType + ".py")) as f:
            pythonFile = f.read()
        currentPythonHash = hashlib.sha256(
            pythonFile.encode('utf-8')).hexdigest()
    except IOError:
        allInputData = None
        inJson = None
    try:
        with web.locked_open(pJoin(modelDir, "allOutputData.json")) as f:
            allOutputData = f.read()
        with web.locked_open(pJoin(modelDir, "allOutputData.json")) as f:
            outJson = json.load(f)
        try:
            #Needed? Should this be handled a different way? Add hashes to the output if they are not yet present
            if ('pythonHash' not in outJson) or ('htmlHash' not in outJson):
                outJson['htmlHash'] = currentHtmlHash
                outJson['pythonHash'] = currentPythonHash
                outJson['oldVersion'] = False
            #If the hashes do not match, mark the model as an old version
            elif outJson['htmlHash'] != currentHtmlHash or outJson[
                    'pythonHash'] != currentPythonHash:
                outJson['oldVersion'] = True
            #If the hashes match, mark the model as up to date
            else:
                outJson['oldVersion'] = False
        except (UnboundLocalError, KeyError) as e:
            print((traceback.print_exc()))
            print(('error:' + str(e)))
    except IOError:
        allOutputData = None
        outJson = None
    if absolutePaths:
        # Parent of current folder.
        pathPrefix = _omfDir
    else:
        pathPrefix = ""
    # Generate standard raw output files.
    rawFilesTemplate = '''
		<p class="reportTitle">Raw Input and Output Files</p>
		<div id="rawOutput" class="content" style="margin-top:0px">
			{% for name in allOutputDataDict['fileNames'] %}
				{% if loop.index > 1 %}&mdash; {% endif %}<a href="/downloadModelData/{{allInputDataDict['user']}}/{{allInputDataDict['modelName']}}/{{name}}">{{name}}</a>
			{% endfor %}
		</div>
	'''
    rawOutputFiles = Template(rawFilesTemplate).render(
        allOutputDataDict=outJson, allInputDataDict=inJson)
    # Generate standard model buttons.
    omfModelButtonsTemplate = '''
		<div class="wideInput" style="text-align:right">
		{% if modelStatus != 'running' and (loggedInUser == modelOwner or loggedInUser == 'admin') %}
		<button id="deleteButton" type="button" onclick="deleteModel()">Delete</button>
		<button id="runButton" type="submit">Run Model</button>
		{% endif %}
		{% if modelStatus == "finished" %}
		<button id="shareButton" type="button" onclick="shareModel()">Share</button>
		<button id="duplicateButton" type="button" onclick="duplicateModel()">Duplicate</button>
		{% endif %}
		{% if modelStatus == "running" and (loggedInUser == modelOwner or loggedInUser == 'admin') %}
		<button id="cancelButton" type="button" onclick="cancelModel()">Cancel Run</button>
		{% endif %}
	</div>
	'''
    # Generate standard status content.
    loggedInUser = datastoreNames.get('currentUser', 'test')
    modelStatus = getStatus(modelDir)
    omfModelButtons = Template(omfModelButtonsTemplate).render(
        modelStatus=modelStatus,
        loggedInUser=loggedInUser,
        modelOwner=modelOwner)
    now = datetime.datetime.now()
    try:
        mod_start = datetime.datetime.fromisoformat(inJson.get('runStartTime'))
    except:
        mod_start = now
    elapsed_dt = now - mod_start
    elapsed_min = elapsed_dt.total_seconds() / 60.0
    model_estimate_min = float(inJson.get('runtimeEst_min', '2.0'))
    remain_min = model_estimate_min - elapsed_min
    runDebugTemplate = '''
		{% if modelStatus == 'running' %}
		<div id ="runIndicator" class="content">
			Model has run for {{elapsed_min}} minutes. {{remain_min}} minutes estimated until completion. Results updated every 5 seconds.
		</div>
		{% endif %}
		{% if modelStatus == 'stopped' and stderr != '' %}
		<div id ="stopIndicator" class="content">
			<pre id='errorText' style='overflow-x:scroll'>MODEL ENCOUNTERED AN ERROR AS FOLLOWS:\n\n{{stderr}}</pre>
		</div>
		{% endif %}
		'''
    omfRunDebugBlock = Template(runDebugTemplate).render(
        modelStatus=modelStatus,
        stderr=inJson.get('stderr', ''),
        elapsed_min=round(elapsed_min, 2),
        remain_min=round(remain_min, 2))
    # Raw input output include.
    return template.render(allInputData=allInputData,
                           allOutputData=allOutputData,
                           modelStatus=modelStatus,
                           pathPrefix=pathPrefix,
                           datastoreNames=datastoreNames,
                           modelName=modelType,
                           allInputDataDict=inJson,
                           allOutputDataDict=outJson,
                           rawOutputFiles=rawOutputFiles,
                           omfModelButtons=omfModelButtons,
                           omfRunDebugBlock=omfRunDebugBlock)
Beispiel #10
0
def runForeground(modelDir, test_mode=False):
    ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
    with open(pJoin(modelDir, 'allInputData.json')) as f:
        inputDict = json.load(f)
    print("STARTING TO RUN", modelDir)
    beginTime = datetime.datetime.now()
    # Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders
    for dirs in os.listdir(modelDir):
        if os.path.isdir(pJoin(modelDir, dirs)):
            shutil.rmtree(pJoin(modelDir, dirs))
    # Get the names of the feeders from the .omd files:
    feederNames = [x[0:-4] for x in os.listdir(modelDir) if x.endswith(".omd")]
    for i, key in enumerate(feederNames):
        inputDict['feederName' + str(i + 1)] = feederNames[i]
    # Run GridLAB-D once for each feeder:
    for feederName in feederNames:
        try:
            os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
        except Exception as e:
            pass
        if not os.path.isdir(pJoin(modelDir, feederName)):
            os.makedirs(pJoin(modelDir,
                              feederName))  # create subfolders for feeders
        shutil.copy(pJoin(modelDir, feederName + ".omd"),
                    pJoin(modelDir, feederName, "feeder.omd"))
        inputDict["climateName"] = weather.zipCodeToClimateName(
            inputDict["zipCode"])
        shutil.copy(
            pJoin(_omfDir, "data", "Climate",
                  inputDict["climateName"] + ".tmy2"),
            pJoin(modelDir, feederName, "climate.tmy2"))
        try:
            startTime = datetime.datetime.now()
            with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
                feederJson = json.load(f)
            tree = feederJson["tree"]
            # Set up GLM with correct time and recorders:
            feeder.attachRecorders(tree, "Regulator", "object", "regulator")
            feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
            feeder.attachRecorders(tree, "Inverter", "object", "inverter")
            feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
            feeder.attachRecorders(tree, "CollectorVoltage", None, None)
            feeder.attachRecorders(tree, "Climate", "object", "climate")
            feeder.attachRecorders(tree, "OverheadLosses", None, None)
            feeder.attachRecorders(tree, "UndergroundLosses", None, None)
            feeder.attachRecorders(tree, "TriplexLosses", None, None)
            feeder.attachRecorders(tree, "TransformerLosses", None, None)
            feeder.groupSwingKids(tree)
            feeder.adjustTime(tree=tree,
                              simLength=float(inputDict["simLength"]),
                              simLengthUnits=inputDict["simLengthUnits"],
                              simStartDate=inputDict["simStartDate"])
            # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
            rawOut = gridlabd.runInFilesystem(
                tree,
                attachments=feederJson["attachments"],
                keepFiles=True,
                workDir=pJoin(modelDir, feederName))
            cleanOut = {}
            # Std Err and Std Out
            cleanOut['stderr'] = rawOut['stderr']
            cleanOut['stdout'] = rawOut['stdout']
            # Time Stamps
            for key in rawOut:
                if '# timestamp' in rawOut[key]:
                    cleanOut['timeStamps'] = rawOut[key]['# timestamp']
                    break
                elif '# property.. timestamp' in rawOut[key]:
                    cleanOut['timeStamps'] = rawOut[key][
                        '# property.. timestamp']
                else:
                    cleanOut['timeStamps'] = []
            # Day/Month Aggregation Setup:
            stamps = cleanOut.get('timeStamps', [])
            level = inputDict.get('simLengthUnits', 'hours')
            # Climate
            for key in rawOut:
                if key.startswith('Climate_') and key.endswith('.csv'):
                    cleanOut['climate'] = {}
                    cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(
                        rawOut[key].get('rainfall'), sum, level)
                    cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(
                        rawOut[key].get('wind_speed'), avg, level)
                    cleanOut['climate']['Temperature (F)'] = hdmAgg(
                        rawOut[key].get('temperature'), max, level)
                    cleanOut['climate']['Snow Depth (in)'] = hdmAgg(
                        rawOut[key].get('snowdepth'), max, level)
                    cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(
                        rawOut[key].get('solar_direct'), sum, level)
            # Voltage Band
            if 'VoltageJiggle.csv' in rawOut:
                cleanOut['allMeterVoltages'] = {}
                cleanOut['allMeterVoltages']['Min'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']
                ], min, level)
                cleanOut['allMeterVoltages']['Mean'] = hdmAgg(
                    [(i / 2) for i in rawOut['VoltageJiggle.csv']
                     ['mean(voltage_12.mag)']], avg, level)
                cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']
                ], avg, level)
                cleanOut['allMeterVoltages']['Max'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']
                ], max, level)
            cleanOut['allMeterVoltages']['stdDevPos'] = [
                (x + y / 2)
                for x, y in zip(cleanOut['allMeterVoltages']['Mean'],
                                cleanOut['allMeterVoltages']['StdDev'])
            ]
            cleanOut['allMeterVoltages']['stdDevNeg'] = [
                (x - y / 2)
                for x, y in zip(cleanOut['allMeterVoltages']['Mean'],
                                cleanOut['allMeterVoltages']['StdDev'])
            ]
            # Total # of meters
            count = 0
            with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
                for line in f:
                    if "\"objectType\": \"triplex_meter\"" in line:
                        count += 1
            # print "count=", count
            cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count)
            # Power Consumption
            cleanOut['Consumption'] = {}
            # Set default value to be 0, avoiding missing value when computing Loads
            cleanOut['Consumption']['Power'] = [0] * int(
                inputDict["simLength"])
            cleanOut['Consumption']['Losses'] = [0] * int(
                inputDict["simLength"])
            cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
            for key in rawOut:
                if key.startswith('SwingKids_') and key.endswith('.csv'):
                    oneSwingPower = hdmAgg(
                        vecPyth(rawOut[key]['sum(power_in.real)'],
                                rawOut[key]['sum(power_in.imag)']), avg, level)
                    if 'Power' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['Power'] = oneSwingPower
                    else:
                        cleanOut['Consumption']['Power'] = vecSum(
                            oneSwingPower, cleanOut['Consumption']['Power'])
                elif key.startswith('Inverter_') and key.endswith('.csv'):
                    realA = rawOut[key]['power_A.real']
                    realB = rawOut[key]['power_B.real']
                    realC = rawOut[key]['power_C.real']
                    imagA = rawOut[key]['power_A.imag']
                    imagB = rawOut[key]['power_B.imag']
                    imagC = rawOut[key]['power_C.imag']
                    oneDgPower = hdmAgg(
                        vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                               vecPyth(realC, imagC)), avg, level)
                    if 'DG' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['DG'] = oneDgPower
                    else:
                        cleanOut['Consumption']['DG'] = vecSum(
                            oneDgPower, cleanOut['Consumption']['DG'])
                elif key.startswith('Windmill_') and key.endswith('.csv'):
                    vrA = rawOut[key]['voltage_A.real']
                    vrB = rawOut[key]['voltage_B.real']
                    vrC = rawOut[key]['voltage_C.real']
                    viA = rawOut[key]['voltage_A.imag']
                    viB = rawOut[key]['voltage_B.imag']
                    viC = rawOut[key]['voltage_C.imag']
                    crB = rawOut[key]['current_B.real']
                    crA = rawOut[key]['current_A.real']
                    crC = rawOut[key]['current_C.real']
                    ciA = rawOut[key]['current_A.imag']
                    ciB = rawOut[key]['current_B.imag']
                    ciC = rawOut[key]['current_C.imag']
                    powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA))
                    powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB))
                    powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC))
                    # HACK: multiply by negative one because turbine power sign is opposite all other DG:
                    oneDgPower = [
                        -1.0 * x for x in hdmAgg(
                            vecSum(powerA, powerB, powerC), avg, level)
                    ]
                    if 'DG' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['DG'] = oneDgPower
                    else:
                        cleanOut['Consumption']['DG'] = vecSum(
                            oneDgPower, cleanOut['Consumption']['DG'])
                elif key in [
                        'OverheadLosses.csv', 'UndergroundLosses.csv',
                        'TriplexLosses.csv', 'TransformerLosses.csv'
                ]:
                    realA = rawOut[key]['sum(power_losses_A.real)']
                    imagA = rawOut[key]['sum(power_losses_A.imag)']
                    realB = rawOut[key]['sum(power_losses_B.real)']
                    imagB = rawOut[key]['sum(power_losses_B.imag)']
                    realC = rawOut[key]['sum(power_losses_C.real)']
                    imagC = rawOut[key]['sum(power_losses_C.imag)']
                    oneLoss = hdmAgg(
                        vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                               vecPyth(realC, imagC)), avg, level)
                    if 'Losses' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['Losses'] = oneLoss
                    else:
                        cleanOut['Consumption']['Losses'] = vecSum(
                            oneLoss, cleanOut['Consumption']['Losses'])
            # Aggregate up the timestamps:
            if level == 'days':
                cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                                   lambda x: x[0][0:10],
                                                   'days')
            elif level == 'months':
                cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                                   lambda x: x[0][0:7],
                                                   'months')
            # Write the output.
            with open(pJoin(modelDir, feederName, "allOutputData.json"),
                      "w") as outFile:
                json.dump(cleanOut, outFile, indent=4)
            # Update the runTime in the input file.
            endTime = datetime.datetime.now()
            inputDict["runTime"] = str(
                datetime.timedelta(seconds=int((endTime -
                                                startTime).total_seconds())))
            with open(pJoin(modelDir, feederName, "allInputData.json"),
                      "w") as inFile:
                json.dump(inputDict, inFile, indent=4)
            # Clean up the PID file.
            os.remove(pJoin(modelDir, feederName, "PID.txt"))
            print("DONE RUNNING GRIDLABMULTI", modelDir, feederName)
        except Exception as e:
            if test_mode == True:
                raise e
            print("MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName)
            cancel(pJoin(modelDir, feederName))
            with open(pJoin(modelDir, feederName, "stderr.txt"),
                      "a+") as stderrFile:
                traceback.print_exc(file=stderrFile)
    finishTime = datetime.datetime.now()
    inputDict["runTime"] = str(
        datetime.timedelta(seconds=int((finishTime -
                                        beginTime).total_seconds())))
    with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
        json.dump(inputDict, inFile, indent=4)
    # Integrate data into allOutputData.json, if error happens, cancel it
    try:
        output = {}
        output["failures"] = {}
        numOfFeeders = 0
        for root, dirs, files in os.walk(modelDir):
            # dump error info into dict
            if "stderr.txt" in files:
                with open(pJoin(root, "stderr.txt"), "r") as stderrFile:
                    tempString = stderrFile.read()
                    if "ERROR" in tempString or "FATAL" in tempString or "Traceback" in tempString:
                        output["failures"]["feeder_" +
                                           str(os.path.split(root)[-1])] = {
                                               "stderr": tempString
                                           }
                        continue
            # dump simulated data into dict
            if "allOutputData.json" in files:
                with open(pJoin(root, "allOutputData.json"),
                          "r") as feederOutputData:
                    numOfFeeders += 1
                    feederOutput = json.load(feederOutputData)
                    # TODO: a better feeder name
                    output["feeder_" + str(os.path.split(root)[-1])] = {}
                    output["feeder_" +
                           str(os.path.split(root)[-1]
                               )]["Consumption"] = feederOutput["Consumption"]
                    output["feeder_" + str(os.path.split(root)[-1])][
                        "allMeterVoltages"] = feederOutput["allMeterVoltages"]
                    output["feeder_" + str(os.path.split(
                        root)[-1])]["stderr"] = feederOutput["stderr"]
                    output["feeder_" + str(os.path.split(
                        root)[-1])]["stdout"] = feederOutput["stdout"]
                    # output[root] = {feederOutput["Consumption"], feederOutput["allMeterVoltages"], feederOutput["stdout"], feederOutput["stderr"]}
        output["numOfFeeders"] = numOfFeeders
        output["timeStamps"] = feederOutput.get("timeStamps", [])
        output["climate"] = feederOutput.get("climate", [])
        # Add feederNames to output so allInputData feederName changes don't cause output rendering to disappear.
        for key, feederName in inputDict.items():
            if 'feederName' in key:
                output[key] = feederName
        with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile:
            json.dump(output, outFile, indent=4)
        try:
            os.remove(pJoin(modelDir, "PPID.txt"))
        except:
            pass
        # Send email to user on model success.
        emailStatus = inputDict.get('emailStatus', 0)
        if (emailStatus == "on"):
            print("\n    EMAIL ALERT ON")
            email = session['user_id']
            try:
                with open("data/User/" + email + ".json") as f:
                    user = json.load(f)
                modelPath, modelName = pSplit(modelDir)
                message = "The model " + "<i>" + str(
                    modelName
                ) + "</i>" + " has successfully completed running. It ran for a total of " + str(
                    inputDict["runTime"]) + " seconds from " + str(
                        beginTime) + ", to " + str(finishTime) + "."
                return web.send_link(email, message, user)
            except Exception as e:
                print("ERROR: Failed sending model status email to user: "******", with exception: \n", e)
    except Exception as e:
        # If input range wasn't valid delete output, write error to disk.
        cancel(modelDir)
        thisErr = traceback.format_exc()
        print('ERROR IN MODEL', modelDir, thisErr)
        inputDict['stderr'] = thisErr
        with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile:
            errorFile.write(thisErr)
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
        # Send email to user on model failure.
        email = 'NoEmail'
        try:
            email = session['user_id']
            with open("data/User/" + email + ".json") as f:
                user = json.load(f)
            modelPath, modelName = pSplit(modelDir)
            message = "The model " + "<i>" + str(
                modelName
            ) + "</i>" + " has failed to complete running. It ran for a total of " + str(
                inputDict["runTime"]) + " seconds from " + str(
                    beginTime) + ", to " + str(finishTime) + "."
            return web.send_link(email, message, user)
        except Exception as e:
            print("Failed sending model status email to user: "******", with exception: \n", e)