def runPowerflowIter(tree,scadaSubPower): '''Runs powerflow once, then iterates.''' # Run initial powerflow to get power. print "Starting calibration." print "Goal of calibration: Error: %s, Iterations: <%s, trim: %s"%(calibrateError[0], calibrateError[1], trim) output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir,"gridlabD")) outRealPow = output["caliSub.csv"]["measured_real_power"][trim:simLength] outImagPower = output["caliSub.csv"]["measured_reactive_power"][trim:simLength] outAppPowerKw = [(x[0]**2 + x[1]**2)**0.5/1000 for x in zip(outRealPow, outImagPower)] lastFile = "subScada.player" nextFile = "subScadaCalibrated.player" nextPower = outAppPowerKw error = (sum(outRealPow)/1000-sum(scadaSubPower))/sum(scadaSubPower) iteration = 1 print "First error:", error while abs(error)>calibrateError[0] and iteration<calibrateError[1]: # Run calibration and iterate up to 5 times. SCAL_CONST = sum(scadaSubPower)/sum(nextPower) print "Calibrating & running again... Error: %s, Iteration: %s, SCAL_CONST: %s"%(str(round(abs(error*100),6)), str(iteration), round(SCAL_CONST,6)) newPlayData = [] with open(pJoin(pJoin(workDir,"gridlabD"), lastFile), "r") as playerFile: for line in playerFile: (key,val) = line.split(',') newPlayData.append(str(key) + ',' + str(float(val)*SCAL_CONST) + "\n") with open(pJoin(pJoin(workDir,"gridlabD"), nextFile), "w") as playerFile: for row in newPlayData: playerFile.write(row) tree[playerKey]["file"] = nextFile tree[outputRecorderKey]["file"] = "caliSubCheck.csv" nextOutput = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir,"gridlabD")) outRealPowIter = nextOutput["caliSubCheck.csv"]["measured_real_power"][trim:simLength] outImagPowerIter = nextOutput["caliSubCheck.csv"]["measured_reactive_power"][trim:simLength] nextAppKw = [(x[0]**2 + x[1]**2)**0.5/1000 for x in zip(outRealPowIter, outImagPowerIter)] lastFile = nextFile nextFile = "subScadaCalibrated"+str(iteration)+".player" nextPower = nextAppKw # Compute error and iterate. error = (sum(outRealPowIter)/1000-sum(scadaSubPower))/sum(scadaSubPower) iteration+=1 else: if iteration==1: outRealPowIter = outRealPow SCAL_CONST = 1.0 print "Calibration done: Error: %s, Iteration: %s, SCAL_CONST: %s"%(str(round(abs(error*100),2)), str(iteration), round(SCAL_CONST,2)) return outRealPow, outRealPowIter, lastFile, iteration
def milsoftToGridlabTests(keepFiles=False): openPrefix = '../uploads/' outPrefix = './milToGridlabTests/' import os, json, traceback, shutil from omf.solvers import gridlabd from matplotlib import pyplot as plt from milToGridlab import convert import omf.feeder as feeder try: os.mkdir(outPrefix) except: pass # Directory already there. exceptionCount = 0 # testFiles = [('INEC-RENOIR.std','INEC.seq'), ('INEC-GRAHAM.std','INEC.seq'), # ('Olin-Barre.std','Olin.seq'), ('Olin-Brown.std','Olin.seq'), # ('ABEC-FRANK.std','ABEC.seq'), ('ABEC-COLUMBIA.std','ABEC.seq'),('OMF_Norfork1.std', 'OMF_Norfork1.seq')] testFiles = [('Olin-Brown.std', 'Olin.seq')] testAttachments = {'schedules.glm':''} # testAttachments = {'schedules.glm':'', 'climate.tmy2':open('./data/Climate/KY-LEXINGTON.tmy2','r').read()} for stdString, seqString in testFiles: try: # Convert the std+seq. with open(openPrefix + stdString,'r') as stdFile, open(openPrefix + seqString,'r') as seqFile: outGlm,x,y = convert(stdFile.read(),seqFile.read()) with open(outPrefix + stdString.replace('.std','.glm'),'w') as outFile: outFile.write(feeder.sortedWrite(outGlm)) print 'WROTE GLM FOR', stdString try: # Draw the GLM. myGraph = feeder.treeToNxGraph(outGlm) feeder.latLonNxGraph(myGraph, neatoLayout=False) plt.savefig(outPrefix + stdString.replace('.std','.png')) print 'DREW GLM OF', stdString except: exceptionCount += 1 print 'FAILED DRAWING', stdString try: # Run powerflow on the GLM. HACK:blank attachments for now. output = gridlabd.runInFilesystem(outGlm, attachments=testAttachments, keepFiles=False) with open(outPrefix + stdString.replace('.std','.json'),'w') as outFile: json.dump(output, outFile, indent=4) print 'RAN GRIDLAB ON', stdString except: exceptionCount += 1 print 'POWERFLOW FAILED', stdString except: print 'FAILED CONVERTING', stdString exceptionCount += 1 traceback.print_exc() if not keepFiles: shutil.rmtree(outPrefix) return exceptionCount
def gridlabRun(temp_dir): ''' Run a .glm through GridLAB-D and return the results as JSON. Form parameters: :param glm: a GLM file. Details: :OMF fuction: omf.solvers.gridlabd.runInFileSystem(). :run-time: up to a few hours. TODO: think about attachment support. ''' fName = 'in.glm' f = request.files['glm'] glmOnDisk = os.path.join(temp_dir, fName) f.save(glmOnDisk) feed = feeder.parse(glmOnDisk) outDict = gridlabd.runInFilesystem(feed, attachments=[], keepFiles=True, workDir=temp_dir, glmName='out.glm') with open(os.path.join(temp_dir, filenames["glrun"]), 'w') as f: json.dump(outDict, f)
def attachVolts(workDir, feederPath, voltVectorA, voltVectorB, voltVectorC, simStartDate, simLength, simLengthUnits): '''read voltage vectors of 3 different phases, run gridlabd, and attach output to the feeder.''' try: timeStamp = [simStartDate['Date']] for x in range(1, 8760): timeStamp.append(timeStamp[x - 1] + dt.timedelta(hours=1)) firstDateTime = timeStamp[1] with open(pJoin(pJoin(workDir, "gridlabD"), "phaseAVoltage.player"), "w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f" % float(voltVectorA[x])) + "+0j" line = timestamp.strftime( "%Y-%m-%d %H:%M:%S" ) + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir, "gridlabD"), "phaseBVoltage.player"), "w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f" % float(voltVectorB[x])) + "-" + str( "%0.4f" % float(random.uniform(6449, 6460))) + "j" line = timestamp.strftime( "%Y-%m-%d %H:%M:%S" ) + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir, "gridlabD"), "phaseCVoltage.player"), "w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f" % float(voltVectorC[x])) + "+" + str( "%0.4f" % float(random.uniform(6449, 6460))) + "j" line = timestamp.strftime( "%Y-%m-%d %H:%M:%S" ) + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) tree = feederJson.get("tree", {}) # Find swingNode name. for key in tree: if tree[key].get('bustype', '').lower() == 'swing': swingName = tree[key].get('name') # Attach player. classOb = {'omftype': 'class player', 'argument': '{double value;}'} voltageObA = { "object": "player", "property": "voltage_A", "file": "phaseAVoltage.player", "loop": "0", "parent": swingName } voltageObB = { "object": "player", "property": "voltage_B", "file": "phaseBVoltage.player", "loop": "0", "parent": swingName } voltageObC = { "object": "player", "property": "voltage_C", "file": "phaseCVoltage.player", "loop": "0", "parent": swingName } maxKey = omf.feeder.getMaxKey(tree) voltplayerKeyA = maxKey + 2 voltplayerKeyB = maxKey + 3 voltplayerKeyC = maxKey + 4 tree[maxKey + 1] = classOb tree[voltplayerKeyA] = voltageObA tree[voltplayerKeyB] = voltageObB tree[voltplayerKeyC] = voltageObC # Adjust time and run output. omf.feeder.adjustTime(tree, simLength, simLengthUnits, firstDateTime.strftime("%Y-%m-%d %H:%M:%S")) output = gridlabd.runInFilesystem(tree, attachments=feederJson.get( 'attachments', {}), keepFiles=True, workDir=pJoin(workDir, "gridlabD")) # Write the output. with open(pJoin(pJoin(workDir, "gridlabD"), "phaseAVoltage.player")) as f: playerStringA = f.read() with open(pJoin(pJoin(workDir, "gridlabD"), "phaseBVoltage.player")) as f: playerStringB = f.read() with open(pJoin(pJoin(workDir, "gridlabD"), "phaseCVoltage.player")) as f: playerStringC = f.read() with open(pJoin(workDir, "calibratedFeeder.omd"), "w") as outJson: feederJson["attachments"]["phaseAVoltage.player"] = playerStringA feederJson["attachments"]["phaseBVoltage.player"] = playerStringB feederJson["attachments"]["phaseCVoltage.player"] = playerStringC feederJson["tree"] = tree json.dump(feederJson, outJson, indent=4) return pJoin(workDir, "calibratedFeeder.omd"), True except: print("Failed to run gridlabD with voltage players.") return "", False
def runForeground(modelDir): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' inputDict = json.load(open(pJoin(modelDir, 'allInputData.json'))) print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() # Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders for dirs in os.listdir(modelDir): if os.path.isdir(pJoin(modelDir, dirs)): shutil.rmtree(pJoin(modelDir, dirs)) # Get the names of the feeders from the .omd files: feederNames = [x[0:-4] for x in os.listdir(modelDir) if x.endswith(".omd")] for i, key in enumerate(feederNames): inputDict['feederName' + str(i + 1)] = feederNames[i] # Run GridLAB-D once for each feeder: for feederName in feederNames: try: os.remove(pJoin(modelDir, feederName, "allOutputData.json")) except Exception, e: pass if not os.path.isdir(pJoin(modelDir, feederName)): os.makedirs(pJoin(modelDir, feederName)) # create subfolders for feeders shutil.copy(pJoin(modelDir, feederName + ".omd"), pJoin(modelDir, feederName, "feeder.omd")) inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(_omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, feederName, "climate.tmy2")) try: startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.omd"))) tree = feederJson["tree"] # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir, feederName)) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) cleanOut['allMeterVoltages']['stdDevPos'] = [(x+y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])] cleanOut['allMeterVoltages']['stdDevNeg'] = [(x-y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])] # Total # of meters count = 0 with open(pJoin(modelDir, feederName, "feeder.omd")) as f: for line in f: if "\"objectType\": \"triplex_meter\"" in line: count+=1 # print "count=", count cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) # HACK: multiply by negative one because turbine power sign is opposite all other DG: oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA,powerB,powerC), avg, level)] if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses']) # Aggregate up the timestamps: if level=='days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, feederName, "allOutputData.json"),"w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, feederName, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, feederName,"PID.txt")) print "DONE RUNNING GRIDLABMULTI", modelDir, feederName except Exception as e: print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName cancel(pJoin(modelDir, feederName)) with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile: traceback.print_exc(file = stderrFile)
def omfCalibrate(workDir, feederPath, scadaPath): '''calibrates a feeder and saves the calibrated tree at a location''' logger.info('Calibrating feeder... work dir: %s; feeder path: %s; scada path: %s', workDir, feederPath, scadaPath) with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) tree = feederJson.get("tree", {}) scadaSubPower, firstDateTime = _processScadaData(workDir, scadaPath) # Force FBS powerflow, because NR fails a lot. for key in tree: if tree[key].get("module", "").lower() == "powerflow": tree[key] = {"module": "powerflow", "solver_method": "FBS"} # Attach player. classOb = {"class": "player", "variable_names": [ "value"], "variable_types": ["double"]} playerOb = {"object": "player", "property": "value", "name": "scadaLoads", "file": "subScada.player", "loop": "0"} maxKey = omf.feeder.getMaxKey(tree) tree[maxKey + 1] = classOb tree[maxKey + 2] = playerOb # Make loads reference player. loadTemplate = {"object": "triplex_load", "power_pf_12": "0.95", "impedance_pf_12": "0.98", "power_pf_12": "0.90", "impedance_fraction_12": "0.7", "power_fraction_12": "0.3"} for key in tree: ob = tree[key] if ob.get("object", "") == "triplex_node" and ob.get("power_12", "") != "": newOb = dict(loadTemplate) newOb["name"] = ob.get("name", "") newOb["parent"] = ob.get("parent", "") newOb["phases"] = ob.get("phases", "") newOb["nominal_voltage"] = ob.get("nominal_voltage", "") newOb["latitude"] = ob.get("latitude", "0") newOb["longitude"] = ob.get("longitude", "0") oldPow = ob.get("power_12", "").replace("j", "d") pythagPower = gridlabd._strClean(oldPow) newOb["base_power_12"] = "scadaLoads.value*" + str(pythagPower) tree[key] = newOb # Search for the substation regulator and attach a recorder there. for key in tree: if tree[key].get('bustype', '').lower() == 'swing': swingName = tree[key].get('name') for key in tree: if tree[key].get('object', '') in ['regulator', 'overhead_line', 'underground_line', 'transformer', 'fuse'] and tree[key].get('from', '') == swingName: SUB_REG_NAME = tree[key]['name'] recOb = {"object": "recorder", "parent": SUB_REG_NAME, "property": "power_in.real,power_in.imag", "file": "caliSub.csv", "interval": "900"} tree[maxKey + 3] = recOb HOURS = 100 omf.feeder.adjustTime(tree, HOURS, "hours", firstDateTime.strftime("%Y-%m-%d")) # Run Gridlabd. output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=workDir) # Calculate scaling constant. outRealPow = output["caliSub.csv"]["power_in.real"] outImagPower = output["caliSub.csv"]["power_in.imag"] outAppPowerKw = [ (x[0]**2 + x[1]**2)**0.5 / 1000 for x in zip(outRealPow, outImagPower)] # HACK: ignore first time step in output and input because GLD sometimes # breaks the first step. SCAL_CONST = sum(scadaSubPower[1:HOURS]) / sum(outAppPowerKw[1:HOURS]) # Rewrite the subScada.player file so all the power values are multiplied # by the SCAL_CONSTANT. newPlayData = [] with open(pJoin(workDir, "subScada.player"), "r") as playerFile: for line in playerFile: (key, val) = line.split(',') newPlayData.append( str(key) + ',' + str(float(val) * SCAL_CONST) + "\n") with open(pJoin(workDir, "subScadaCalibrated.player"), "w") as playerFile: for row in newPlayData: playerFile.write(row) # Test by running a glm with subScadaCalibrated.player and caliSub.csv2. tree[maxKey + 2]["file"] = "subScadaCalibrated.player" tree[maxKey + 3]["file"] = "caliSubCheck.csv" secondOutput = gridlabd.runInFilesystem( tree, keepFiles=True, workDir=workDir) plt.figure() plt.plot(outAppPowerKw[1:HOURS], label="initialGuess") plt.plot(scadaSubPower[1:HOURS], label="scadaSubPower") secondAppKw = [(x[0]**2 + x[1]**2)**0.5 / 1000 for x in zip(secondOutput["caliSubCheck.csv"]["power_in.real"], secondOutput["caliSubCheck.csv"]["power_in.imag"])] plt.plot(secondAppKw[1:HOURS], label="finalGuess") plt.legend(loc=3) Plot.save_fig(plt, pJoin(workDir, "caliCheckPlot.png")) # Write the final output. with open(pJoin(workDir, "calibratedFeeder.json"), "w") as outJson: playerString = open(pJoin(workDir, "subScadaCalibrated.player")).read() feederJson["attachments"]["subScadaCalibrated.player"] = playerString feederJson["tree"] = tree json.dump(feederJson, outJson, indent=4) return
def generateData(pathToOmd, workDir, inputData, outputData): omd = json.load(open(pathToOmd)) tree = omd.get('tree', {}) attachments = omd.get('attachments', []) # check to see if work directory is specified if not workDir: workDir = tempfile.mkdtemp() print '@@@@@@', workDir def safeInt(x): try: return int(x) except: return 0 biggestKey = max([safeInt(x) for x in tree.keys()]) CLOCK_START = '2000-01-01 0:00:00' dt_start = parser.parse(CLOCK_START) dt_end = dt_start + relativedelta(day=0, hour=0, minute=5, second=0) CLOCK_END = str(dt_end) CLOCK_RANGE = CLOCK_START + ',' + CLOCK_END index = 1 for key in tree: if 'clock' in tree[key]: tree[key]['starttime'] = "'" + CLOCK_START + "'" tree[key]['stoptime'] = "'" + CLOCK_END + "'" # create volt and current line dumps tree[str(biggestKey * 10 + index)] = { "object": "voltdump", "filename": "voltDump.csv", 'runtime': 'INIT' } attachments = [] # Run Gridlab. if not workDir: workDir = tempfile.mkdtemp() print '@@@@@@', workDir gridlabOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=workDir) outageMap = geo.omdGeoJson(pathToOmd, conversion=False) with open(workDir + '/nodes.csv', mode='wb') as nodes: fieldnames = ['node_name', 'coord1', 'coord2'] writer = csv.DictWriter(nodes, fieldnames) writer.writeheader() for key in tree.keys(): obtype = tree[key].get("object", "") if obtype == 'node' or obtype == 'load' or obtype == 'capacitor' or obtype == 'meter': coord1, coord2 = nodeToCoords(outageMap, tree[key]['name']) writer.writerow({ 'node_name': tree[key]['name'], 'coord1': coord1, 'coord2': coord2 }) nodes.close() with open(workDir + '/connectivity.csv', mode='wb') as connectivity: fieldnames = ['first_node', 'second_node'] writer = csv.DictWriter(connectivity, fieldnames) writer.writeheader() for key in tree.keys(): obtype = tree[key].get("object", "") if obtype == 'underground_line' or obtype == 'overhead_line' or obtype == 'triplex_line': writer.writerow({ 'first_node': tree[key]['from'], 'second_node': tree[key]['to'] }) connectivity.close() connectivity = pd.read_csv(workDir + '/connectivity.csv') print(connectivity) nodes = pd.read_csv(workDir + '/nodes.csv') row = 0 number_of_nodes = nodes.shape[0] bad_nodes = [] while row < number_of_nodes: if (not connectivity['first_node'].str.contains( nodes.loc[row, 'node_name']).any()) and ( not connectivity['second_node'].str.contains( nodes.loc[row, 'node_name']).any()): bad_nodes.append(nodes.loc[row, 'node_name']) row += 1 row = 0 number_of_bad = len(bad_nodes) while row < number_of_bad: delete_row = nodes[nodes['node_name'] == bad_nodes[row]].index nodes = nodes.drop(delete_row) row += 1 nodes.dropna() nodes = nodes.sort_values('node_name') nodes.to_csv(workDir + '/nodes1.csv') nodes = pd.read_csv(workDir + '/nodes1.csv') print(nodes) volt = pd.read_csv(workDir + '/voltDump.csv', skiprows=1) row = 0 number_of_volt = volt.shape[0] bad_nodes = [] while row < number_of_volt: if (not connectivity['first_node'].str.contains( volt.loc[row, 'node_name']).any()) and ( not connectivity['second_node'].str.contains( volt.loc[row, 'node_name']).any()): bad_nodes.append(volt.loc[row, 'node_name']) row += 1 number_of_bad = len(bad_nodes) row = 0 while row < number_of_bad: delete_row = volt[volt['node_name'] == bad_nodes[row]].index volt = volt.drop(delete_row) row += 1 volt = volt.sort_values('node_name') volt.to_csv(workDir + '/volt1.csv') volt = pd.read_csv(workDir + '/volt1.csv') print(volt) row_count = nodes.shape[0] inputDataDist = [[0 for x in range(row_count)] for y in range(row_count)] inputGraphDist = nx.Graph() row = 0 while row < row_count: column = 0 while column < row_count: data = [] if ((nodes.loc[row, 'coord1'] - nodes.loc[column, 'coord1'])**2 + (nodes.loc[row, 'coord2'] - nodes.loc[column, 'coord2'])** 2) > 10e-10: distance = math.sqrt((nodes.loc[row, 'coord1'] - nodes.loc[column, 'coord1'])**2 + (nodes.loc[row, 'coord2'] - nodes.loc[column, 'coord2'])**2) else: distance = 0.0 if distance > 10e-15: data.append(distance) #data.append(distance*10e5*1.09359893099) else: data.append(10e15) inputDataDist[row][column] = sum(data) if not inputGraphDist.has_edge(row, column): inputGraphDist.add_edge(row, column, weight=inputDataDist[row][column]) column += 1 row += 1 row_count = volt.shape[0] inputDataVolt = [[0 for x in range(row_count)] for y in range(row_count)] inputGraphVolt = nx.Graph() row = 0 while row < row_count: column = 0 while column < row_count: data = [] if (float(volt.loc[row, 'voltA_real']) > 10e-10 or float(volt.loc[row, 'voltA_imag']) > 10e-10 or float(volt.loc[row, 'voltA_real']) < -10e-10 or float(volt.loc[row, 'voltA_imag']) < -10e-10): if (math.sqrt((float(volt.loc[row, 'voltA_real']) - float(volt.loc[column, 'voltA_real']))**2 + (float(volt.loc[row, 'voltA_imag']) - float(volt.loc[column, 'voltA_imag']))**2)) > 0: data.append( math.sqrt((float(volt.loc[row, 'voltA_real']) - float(volt.loc[column, 'voltA_real']))**2 + (float(volt.loc[row, 'voltA_imag']) - float(volt.loc[column, 'voltA_imag']))**2)) else: data.append(100.0) else: data.append(100.0) if (float(volt.loc[row, 'voltB_real']) > 10e-10 or float(volt.loc[row, 'voltB_imag']) > 10e-10 or float(volt.loc[row, 'voltB_real']) < -10e-10 or float(volt.loc[row, 'voltB_imag']) < -10e-10): if (math.sqrt((float(volt.loc[row, 'voltB_real']) - float(volt.loc[column, 'voltB_real']))**2 + (float(volt.loc[row, 'voltB_imag']) - float(volt.loc[column, 'voltB_imag']))**2)) > 0: data.append( math.sqrt((float(volt.loc[row, 'voltB_real']) - float(volt.loc[column, 'voltB_real']))**2 + (float(volt.loc[row, 'voltB_imag']) - float(volt.loc[column, 'voltB_imag']))**2)) else: data.append(100.0) else: data.append(100.0) if (float(volt.loc[row, 'voltC_real']) > 10e-10 or float(volt.loc[row, 'voltC_imag']) > 10e-10 or float(volt.loc[row, 'voltC_real']) < -10e-10 or float(volt.loc[row, 'voltC_imag']) < -10e-10): if (math.sqrt((float(volt.loc[row, 'voltC_real']) - float(volt.loc[column, 'voltC_real']))**2 + (float(volt.loc[row, 'voltC_imag']) - float(volt.loc[column, 'voltC_imag']))**2)) > 0: data.append( math.sqrt((float(volt.loc[row, 'voltC_real']) - float(volt.loc[column, 'voltC_real']))**2 + (float(volt.loc[row, 'voltC_imag']) - float(volt.loc[column, 'voltC_imag']))**2)) else: data.append(100.0) else: data.append(100.0) inputDataVolt[row][column] = sum(data) if not inputGraphVolt.has_edge(row, column): inputGraphVolt.add_edge(row, column, weight=inputDataVolt[row][column]) column += 1 row += 1 nodes_count = nodes.shape[0] connectivity_count = connectivity.shape[0] row = 0 outputData = [[0 for x in range(nodes_count)] for y in range(nodes_count)] outputGraph = nx.Graph() while row < nodes_count: column = 0 while column < nodes_count: rowName = nodes.loc[row, 'node_name'] colName = nodes.loc[column, 'node_name'] conEntry = 0 found = False while conEntry < connectivity_count: if ((rowName == connectivity.loc[conEntry, 'first_node'] and colName == connectivity.loc[conEntry, 'second_node']) or (rowName == connectivity.loc[conEntry, 'second_node'] and colName == connectivity.loc[conEntry, 'first_node'])): outputData[row][column] = -1 if not outputGraph.has_edge(row, column): outputGraph.add_edge(row, column, weight=-1) found = True break conEntry += 1 if found == False: if not outputGraph.has_edge(row, column): outputData[row][column] = 0 outputGraph.add_edge(row, column, weight=0) column += 1 row += 1 print(len(outputData)) print(len(inputDataDist)) return inputDataDist, inputGraphDist, inputDataVolt, inputGraphVolt, outputData, outputGraph
'power_factor': '1.0', 'generator_status': 'ONLINE', 'generator_mode': 'CONSTANT_PF' } feed[maxKey + 5] = { 'object': 'solar', 'name': 'test_solar', 'parent': 'test_solar_inverter', 'area': '1000000 sf', 'generator_status': 'ONLINE', 'efficiency': '0.2', 'generator_mode': 'SUPPLY_DRIVEN', 'panel_type': 'SINGLE_CRYSTAL_SILICON' } feed[maxKey + 6] = { 'object': 'recorder', 'parent': 'test_solar_meter', 'property': 'voltage_A.real,voltage_A.imag,voltage_B.real,voltage_B.imag,voltage_C.real,voltage_C.imag', 'file': 'GC-addSolar-voltages.csv', 'interval': '60', 'limit': '1440' } runInFilesystem(feed, keepFiles=True, workDir='.', glmName='GC-solarAdd.glm') ''' output = open('GC-solarAdd.glm', 'w') output.write(feeder.write(feed)) output.close() '''
def heavyProcessing(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() # Get feeder name and data in. try: os.mkdir(pJoin(modelDir,'gldContainer')) except: pass try: feederName = inputDict["feederName1"] weather = inputDict["weather"] if weather == "typical": inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "gldContainer", "climate.tmy2")) startTime = datetime.datetime.now() else: #hack for testing makeClimateCsv('2010-07-01', '2010-08-01', 'DFW', 'Output/Automated dsoSimSuite Test/gldContainer/weather.csv') startTime = datetime.datetime.now() startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] #add a check to see if there is already a climate object in the omd file #if there is delete the climate from attachments and the climate object attachKeys = feederJson["attachments"].keys() for key in attachKeys: if key.endswith('.tmy2'): del feederJson['attachments'][key] treeKeys = feederJson["tree"].keys() for key in treeKeys: if 'object' in feederJson['tree'][key]: if feederJson['tree'][key]['object'] == 'climate': del feederJson['tree'][key] #add weather objects and modules to .glm if there is no climate file in the omd file if weather == "historical": oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] = {'omftype':'module', 'argument':'tape'} tree[oldMax + 2] = {'omftype':'module', 'argument':'climate'} tree[oldMax + 3] = {'object':'csv_reader', 'name':'weatherReader', 'filename':'weather.csv'} tree[oldMax + 4] = {'object':'climate', 'name':'exampleClimate', 'tmyfile':'weather.csv', 'reader':'weatherReader'} else: oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir,'gldContainer')) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A'] cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B'] cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C'] cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA'] cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB'] cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC'] cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) cleanOut['genTime'] = genTime # Aggregate up the timestamps: if level=='days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, "gldContainer", "PID.txt")) print "DONE RUNNING", modelDir except Exception as e: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print 'ERROR IN MODEL', modelDir, thisErr inputDict['stderr'] = thisErr with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir,"allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) finishTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent = 4) try: os.remove(pJoin(modelDir,"PPID.txt")) except: pass
def work(modelDir, inputDict): ''' Run the model in its directory. ''' outData = {} feederName = inputDict["feederName1"] with open(pJoin(modelDir, inputDict['weatherImpactsFileName']), 'w') as hazardFile: hazardFile.write(inputDict['weatherImpacts']) with open(pJoin(modelDir, feederName + '.omd'), "r") as jsonIn: feederModel = json.load(jsonIn) # Create GFM input file. print "Running GFM ************************************" gfmInputTemplate = { 'phase_variation': float(inputDict['phaseVariation']), 'chance_constraint': float(inputDict['chanceConstraint']), 'critical_load_met': float(inputDict['criticalLoadMet']), 'total_load_met': 1.0, #(float(inputDict['criticalLoadMet']) + float(inputDict['nonCriticalLoadMet'])), 'xrMatrices': inputDict["xrMatrices"], 'maxDGPerGenerator': float(inputDict["maxDGPerGenerator"]), 'newLineCandidates': inputDict['newLineCandidates'], 'hardeningCandidates': inputDict['hardeningCandidates'], 'switchCandidates': inputDict['switchCandidates'], 'hardeningUnitCost': inputDict['hardeningUnitCost'], 'switchCost': inputDict['switchCost'], 'generatorCandidates': inputDict['generatorCandidates'], 'lineUnitCost': inputDict['lineUnitCost'] } gfmJson = convertToGFM(gfmInputTemplate, feederModel) gfmInputFilename = 'gfmInput.json' with open(pJoin(modelDir, gfmInputFilename), "w") as outFile: json.dump(gfmJson, outFile, indent=4) # Run GFM gfmBinaryPath = pJoin(__neoMetaModel__._omfDir, 'solvers', 'gfm', 'Fragility.jar') proc = subprocess.Popen([ 'java', '-jar', gfmBinaryPath, '-r', gfmInputFilename, '-wf', inputDict['weatherImpactsFileName'], '-num', '3' ], cwd=modelDir) proc.wait() # HACK: rename the hardcoded gfm output rdtInputFilePath = pJoin(modelDir, 'rdtInput.json') print 'Before weird RENAMING STUFF!!!!' os.rename(pJoin(modelDir, 'rdt_OUTPUT.json'), rdtInputFilePath) # print 'RENAME FROM', pJoin(modelDir,'rdt_OUTPUT.json') # print 'RENAME TO', rdtInputFilePath # print 'After weird RENAMING STUFF!!!!' #raise Exception('Go no further') # Pull GFM input data on lines and generators for HTML presentation. with open(rdtInputFilePath, 'r') as rdtInputFile: # HACK: we use rdtInput as a string in the frontend. rdtJsonAsString = rdtInputFile.read() rdtJson = json.loads(rdtJsonAsString) # Calculate line costs. lineData = [] for line in rdtJson["lines"]: lineData.append((line["id"], '{:,.2f}'.format( float(line["length"]) * float(inputDict["lineUnitCost"])))) outData["lineData"] = lineData outData["generatorData"] = '{:,.2f}'.format( float(inputDict["dgUnitCost"]) * float(inputDict["maxDGPerGenerator"])) outData['gfmRawOut'] = rdtJsonAsString if inputDict['scenarios'] != "": rdtJson['scenarios'] = json.loads(inputDict['scenarios']) with open(pJoin(rdtInputFilePath), "w") as rdtInputFile: json.dump(rdtJson, rdtInputFile, indent=4) # Run GridLAB-D first time to generate xrMatrices. if platform.system() == "Windows": omdPath = pJoin(modelDir, feederName + ".omd") with open(omdPath, "r") as omd: omd = json.load(omd) #REMOVE NEWLINECANDIDATES deleteList = [] newLines = inputDict["newLineCandidates"].strip().replace( ' ', '').split(',') for newLine in newLines: for omdObj in omd["tree"]: if ("name" in omd["tree"][omdObj]): if (newLine == omd["tree"][omdObj]["name"]): deleteList.append(omdObj) for delItem in deleteList: del omd["tree"][delItem] #Load a blank glm file and use it to write to it feederPath = pJoin(modelDir, 'feeder.glm') with open(feederPath, 'w') as glmFile: #toWrite = omf.feeder.sortedWrite(omd['tree']) + "object jsondump {\n\tfilename_dump_reliability test_JSON_dump1.json;\n\twrite_reliability true;\n\tfilename_dump_line test_JSON_dump2.json;\n\twrite_line true;\n};\n"# + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};" toWrite = omf.feeder.sortedWrite( omd['tree'] ) + "object jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n" # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};" glmFile.write(toWrite) #Write attachments from omd, if no file, one will be created for fileName in omd['attachments']: with open(os.path.join(modelDir, fileName), 'w') as file: file.write(omd['attachments'][fileName]) #Wire in the file the user specifies via zipcode. climateFileName, latforpvwatts = zipCodeToClimateName( inputDict["simulationZipCode"]) shutil.copy( pJoin(__neoMetaModel__._omfDir, "data", "Climate", climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2')) proc = subprocess.Popen(['gridlabd', 'feeder.glm'], stdout=subprocess.PIPE, shell=True, cwd=modelDir) (out, err) = proc.communicate() accumulator = "" with open(pJoin(modelDir, "JSON_dump_line.json"), "r") as gldOut: accumulator = json.load(gldOut) outData['gridlabdRawOut'] = accumulator #THIS IS THE CODE THAT ONCE FRANK GETS DONE WITH GRIDLAB-D NEEDS TO BE UNCOMMENTED '''rdtJson["line_codes"] = accumulator["properties"]["line_codes"] rdtJson["lines"] = accumulator["properties"]["lines"] with open(pJoin(modelDir, rdtInputFilePath), "w") as outFile: json.dump(rdtJson, outFile, indent=4)''' else: tree = feederModel.get("tree", {}) attachments = feederModel.get("attachments", {}) climateFileName, latforpvwatts = zipCodeToClimateName( inputDict["simulationZipCode"]) shutil.copy( pJoin(__neoMetaModel__._omfDir, "data", "Climate", climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2')) gridlabdRawOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=modelDir) outData['gridlabdRawOut'] = gridlabdRawOut # Run RDT. print "Running RDT ************************************" rdtOutFile = modelDir + '/rdtOutput.json' rdtSolverFolder = pJoin(__neoMetaModel__._omfDir, 'solvers', 'rdt') rdtJarPath = pJoin(rdtSolverFolder, 'micot-rdt.jar') proc = subprocess.Popen([ 'java', "-Djna.library.path=" + rdtSolverFolder, '-jar', rdtJarPath, '-c', rdtInputFilePath, '-e', rdtOutFile ]) proc.wait() rdtRawOut = open(rdtOutFile).read() outData['rdtRawOut'] = rdtRawOut # Indent the RDT output nicely. with open(pJoin(rdtOutFile), "w") as outFile: rdtOut = json.loads(rdtRawOut) json.dump(rdtOut, outFile, indent=4) # TODO: run GridLAB-D second time to validate RDT results with new control schemes. # Draw the feeder. genDiagram(modelDir, feederModel) with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile: outData["oneLineDiagram"] = inFile.read().encode("base64") return outData
def voltPlot(tree, workDir=None, neatoLayout=False): """ Draw a color-coded map of the voltage drop on a feeder. Returns a matplotlib object. """ # Get rid of schedules and climate: for key in tree.keys(): if tree[key].get("argument", "") == '"schedules.glm"' or tree[key].get("tmyfile", "") != "": del tree[key] # Make sure we have a voltDump: def safeInt(x): try: return int(x) except: return 0 biggestKey = max([safeInt(x) for x in tree.keys()]) tree[str(biggestKey * 10)] = {"object": "voltdump", "filename": "voltDump.csv"} # Run Gridlab. if not workDir: workDir = tempfile.mkdtemp() print "gridlabD runInFilesystem with no specified workDir. Working in", workDir gridlabOut = gridlabd.runInFilesystem(tree, attachments=[], workDir=workDir) with open(pJoin(workDir, "voltDump.csv"), "r") as dumpFile: reader = csv.reader(dumpFile) reader.next() # Burn the header. keys = reader.next() voltTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] voltTable.append(rowDict) # Calculate average node voltage deviation. First, helper functions. def pythag(x, y): """ For right triangle with sides a and b, return the hypotenuse. """ return math.sqrt(x ** 2 + y ** 2) def digits(x): """ Returns number of digits before the decimal in the float x. """ return math.ceil(math.log10(x + 1)) def avg(l): """ Average of a list of ints or floats. """ return sum(l) / len(l) # Detect the feeder nominal voltage: for key in tree: ob = tree[key] if type(ob) == dict and ob.get("bustype", "") == "SWING": feedVoltage = float(ob.get("nominal_voltage", 1)) # Tot it all up. nodeVolts = {} for row in voltTable: allVolts = [] for phase in ["A", "B", "C"]: phaseVolt = pythag(float(row["volt" + phase + "_real"]), float(row["volt" + phase + "_imag"])) if phaseVolt != 0.0: if digits(phaseVolt) > 3: # Normalize to 120 V standard phaseVolt = phaseVolt * (120 / feedVoltage) allVolts.append(phaseVolt) nodeVolts[row.get("node_name", "")] = avg(allVolts) # Color nodes by VOLTAGE. fGraph = feeder.treeToNxGraph(tree) voltChart = plt.figure(figsize=(15, 15)) plt.axes(frameon=0) plt.axis("off") # set axes step equal voltChart.gca().set_aspect("equal") if neatoLayout: # HACK: work on a new graph without attributes because graphViz tries to read attrs. cleanG = nx.Graph(fGraph.edges()) cleanG.add_nodes_from(fGraph) positions = nx.graphviz_layout(cleanG, prog="neato") else: positions = {n: fGraph.node[n].get("pos", (0, 0)) for n in fGraph} edgeIm = nx.draw_networkx_edges(fGraph, positions) nodeIm = nx.draw_networkx_nodes( fGraph, pos=positions, node_color=[nodeVolts.get(n, 0) for n in fGraph.nodes()], linewidths=0, node_size=30, cmap=plt.cm.jet, ) plt.sci(nodeIm) plt.clim(110, 130) plt.colorbar() return voltChart
def attachVolts(workDir, feederPath, voltVectorA, voltVectorB, voltVectorC, simStartDate, simLength, simLengthUnits): '''read voltage vectors of 3 different phases, run gridlabd, and attach output to the feeder.''' try: timeStamp = [simStartDate['Date']] for x in range (1, 8760): timeStamp.append(timeStamp[x-1] + dt.timedelta(hours=1)) firstDateTime = timeStamp[1] with open(pJoin(pJoin(workDir,"gridlabD"),"phaseAVoltage.player"),"w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f"%float(voltVectorA[x]))+"+0j" line = timestamp.strftime("%Y-%m-%d %H:%M:%S") + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir,"gridlabD"),"phaseBVoltage.player"),"w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f"%float(voltVectorB[x]))+"-"+str("%0.4f"%float(random.uniform(6449,6460)))+"j" line = timestamp.strftime("%Y-%m-%d %H:%M:%S") + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir,"gridlabD"),"phaseCVoltage.player"),"w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f"%float(voltVectorC[x]))+"+"+str("%0.4f"%float(random.uniform(6449,6460)))+"j" line = timestamp.strftime("%Y-%m-%d %H:%M:%S") + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) tree = feederJson.get("tree", {}) # Find swingNode name. for key in tree: if tree[key].get('bustype','').lower() == 'swing': swingName = tree[key].get('name') # Attach player. classOb = {'omftype':'class player','argument':'{double value;}'} voltageObA = {"object":"player", "property":"voltage_A", "file":"phaseAVoltage.player", "loop":"0", "parent":swingName} voltageObB = {"object":"player", "property":"voltage_B", "file":"phaseBVoltage.player", "loop":"0", "parent":swingName} voltageObC = {"object":"player", "property":"voltage_C", "file":"phaseCVoltage.player", "loop":"0", "parent":swingName} maxKey = feeder.getMaxKey(tree) voltplayerKeyA = maxKey + 2 voltplayerKeyB = maxKey + 3 voltplayerKeyC = maxKey + 4 tree[maxKey+1] = classOb tree[voltplayerKeyA] = voltageObA tree[voltplayerKeyB] = voltageObB tree[voltplayerKeyC] = voltageObC # Adjust time and run output. feeder.adjustTime(tree, simLength, simLengthUnits, firstDateTime.strftime("%Y-%m-%d %H:%M:%S")) output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir,"gridlabD")) # Write the output. with open(pJoin(workDir,"calibratedFeeder.omd"),"w") as outJson: playerStringA = open(pJoin(pJoin(workDir,"gridlabD"),"phaseAVoltage.player")).read() playerStringB = open(pJoin(pJoin(workDir,"gridlabD"),"phaseBVoltage.player")).read() playerStringC = open(pJoin(pJoin(workDir,"gridlabD"),"phaseCVoltage.player")).read() feederJson["attachments"]["phaseAVoltage.player"] = playerStringA feederJson["attachments"]["phaseBVoltage.player"] = playerStringB feederJson["attachments"]["phaseCVoltage.player"] = playerStringC feederJson["tree"] = tree json.dump(feederJson, outJson, indent=4) return pJoin(workDir,"calibratedFeeder.omd"), True except: print "Failed to run gridlabD with voltage players." return "", False
def drawPlot(path, workDir=None, neatoLayout=False, edgeLabs=None, nodeLabs=None, edgeCol=None, nodeCol=None, faultLoc=None, faultType=None, customColormap=False, scaleMin=None, scaleMax=None, rezSqIn=400, simTime='2000-01-01 0:00:00', loadLoc=None): ''' Draw a color-coded map of the voltage drop on a feeder. path is the full path to the GridLAB-D .glm file or OMF .omd file. workDir is where GridLAB-D will run, if it's None then a temp dir is used. neatoLayout=True means the circuit is displayed using a force-layout approach. edgeCol property must be either 'Current', 'Power', 'Rating', 'PercentOfRating', or None nodeCol property must be either 'Voltage', 'VoltageImbalance', 'perUnitVoltage', 'perUnit120Voltage', or None edgeLabs and nodeLabs properties must be either 'Name', 'Value', or None edgeCol and nodeCol can be set to false to avoid coloring edges or nodes customColormap=True means use a one that is nicely scaled to perunit values highlighting extremes. faultType and faultLoc are the type of fault and the name of the line that it occurs on. Returns a matplotlib object.''' # Be quiet matplotlib: # warnings.filterwarnings("ignore") if path.endswith('.glm'): tree = feeder.parse(path) attachments = [] elif path.endswith('.omd'): with open(path) as f: omd = json.load(f) tree = omd.get('tree', {}) attachments = omd.get('attachments', []) else: raise Exception('Invalid input file type. We require a .glm or .omd.') #print path # add fault object to tree def safeInt(x): try: return int(x) except: return 0 biggestKey = max([safeInt(x) for x in tree.keys()]) # Add Reliability module tree[str(biggestKey * 10)] = { "module": "reliability", "maximum_event_length": "18000", "report_event_log": "true" } CLOCK_START = simTime dt_start = parser.parse(CLOCK_START) dt_end = dt_start + relativedelta(seconds=+20) CLOCK_END = str(dt_end) CLOCK_RANGE = CLOCK_START + ',' + CLOCK_END if faultType != None: # Add eventgen object (the fault) tree[str(biggestKey * 10 + 1)] = { "object": "eventgen", "name": "ManualEventGen", "parent": "RelMetrics", "fault_type": faultType, "manual_outages": faultLoc + ',' + CLOCK_RANGE } # TODO: change CLOCK_RANGE to read the actual start and stop time, not just hard-coded # Add fault_check object tree[str(biggestKey * 10 + 2)] = { "object": "fault_check", "name": "test_fault", "check_mode": "ONCHANGE", "eventgen_object": "ManualEventGen", "output_filename": "Fault_check_out.txt" } # Add reliabilty metrics object tree[str(biggestKey * 10 + 3)] = { "object": "metrics", "name": "RelMetrics", "report_file": "Metrics_Output.csv", "module_metrics_object": "PwrMetrics", "metrics_of_interest": '"SAIFI,SAIDI,CAIDI,ASAI,MAIFI"', "customer_group": '"groupid=METERTEST"', "metric_interval": "5 h", "report_interval": "5 h" } # Add power_metrics object tree[str(biggestKey * 10 + 4)] = { "object": "power_metrics", "name": "PwrMetrics", "base_time_value": "1 h" } # HACK: set groupid for all meters so outage stats are collected. noMeters = True for key in tree: if tree[key].get('object', '') in ['meter', 'triplex_meter']: tree[key]['groupid'] = "METERTEST" noMeters = False if noMeters: raise Exception( "No meters detected on the circuit. Please add at least one meter to allow for collection of outage statistics." ) for key in tree: if 'clock' in tree[key]: tree[key]['starttime'] = "'" + CLOCK_START + "'" tree[key]['stoptime'] = "'" + CLOCK_END + "'" # dictionary to hold info on lines present in glm edge_bools = dict.fromkeys([ 'underground_line', 'overhead_line', 'triplex_line', 'transformer', 'regulator', 'fuse', 'switch' ], False) # Map to speed up name lookups. nameToIndex = {tree[key].get('name', ''): key for key in tree.keys()} # Get rid of schedules and climate and check for all edge types: for key in list(tree.keys()): obtype = tree[key].get("object", "") if obtype == 'underground_line': edge_bools['underground_line'] = True elif obtype == 'overhead_line': edge_bools['overhead_line'] = True elif obtype == 'triplex_line': edge_bools['triplex_line'] = True elif obtype == 'transformer': edge_bools['transformer'] = True elif obtype == 'regulator': edge_bools['regulator'] = True elif obtype == 'fuse': edge_bools['fuse'] = True elif obtype == 'switch': edge_bools['switch'] = True if tree[key].get("argument", "") == "\"schedules.glm\"" or tree[key].get( "tmyfile", "") != "": del tree[key] # Make sure we have a voltage dump and current dump: tree[str(biggestKey * 10 + 5)] = { "object": "voltdump", "filename": "voltDump.csv" } tree[str(biggestKey * 10 + 6)] = { "object": "currdump", "filename": "currDump.csv" } # Line rating dumps tree[feeder.getMaxKey(tree) + 1] = {'module': 'tape'} for key in edge_bools.keys(): if edge_bools[key]: tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=' + key + '"', 'property': 'continuous_rating', 'file': key + '_cont_rating.csv' } #Record initial status readout of each fuse/recloser/switch/sectionalizer before running # Reminder: fuse objects have 'phase_X_status' instead of 'phase_X_state' protDevices = dict.fromkeys( ['fuse', 'recloser', 'switch', 'sectionalizer'], False) #dictionary of protective device initial states for each phase protDevInitStatus = {} #dictionary of protective devices final states for each phase after running Gridlab-D protDevFinalStatus = {} #dictionary of protective device types to help the testing and debugging process protDevTypes = {} protDevOpModes = {} for key in tree: obj = tree[key] obType = obj.get('object') if obType in protDevices.keys(): obName = obj.get('name', '') protDevTypes[obName] = obType if obType != 'fuse': protDevOpModes[obName] = obj.get('operating_mode', 'INDIVIDUAL') protDevices[obType] = True protDevInitStatus[obName] = {} protDevFinalStatus[obName] = {} for phase in ['A', 'B', 'C']: if obType != 'fuse': phaseState = obj.get('phase_' + phase + '_state', 'CLOSED') else: phaseState = obj.get('phase_' + phase + '_status', 'GOOD') if phase in obj.get('phases', ''): protDevInitStatus[obName][phase] = phaseState #print protDevInitStatus #Create a recorder for protective device states for key in protDevices.keys(): if protDevices[key]: for phase in ['A', 'B', 'C']: if key != 'fuse': tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=' + key + '"', 'property': 'phase_' + phase + '_state', 'file': key + '_phase_' + phase + '_state.csv' } else: tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=' + key + '"', 'property': 'phase_' + phase + '_status', 'file': key + '_phase_' + phase + '_state.csv' } # Run Gridlab. if not workDir: workDir = tempfile.mkdtemp() print('@@@@@@', workDir) # for i in range(6): # gridlabOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=workDir) # #HACK: workaround for shoddy macOS gridlabd build. # if 'error when setting parent' not in gridlabOut.get('stderr','OOPS'): # break gridlabOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=workDir) #Record final status readout of each fuse/recloser/switch/sectionalizer after running try: for key in protDevices.keys(): if protDevices[key]: for phase in ['A', 'B', 'C']: with open(pJoin(workDir, key + '_phase_' + phase + '_state.csv'), newline='') as statusFile: reader = csv.reader(statusFile) # loop past the header, keys = [] vals = [] for row in reader: if '# timestamp' in row: keys = row i = keys.index('# timestamp') keys.pop(i) vals = next(reader) vals.pop(i) for pos, key2 in enumerate(keys): protDevFinalStatus[key2][phase] = vals[pos] except: pass #print protDevFinalStatus #compare initial and final states of protective devices #quick compare to see if they are equal #print cmp(protDevInitStatus, protDevFinalStatus) #find which values changed changedStates = {} #read voltDump values into a dictionary. try: with open(pJoin(workDir, 'voltDump.csv'), newline='') as dumpFile: reader = csv.reader(dumpFile) next(reader) # Burn the header. keys = next(reader) voltTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] voltTable.append(rowDict) except: raise Exception( 'GridLAB-D failed to run with the following errors:\n' + gridlabOut['stderr']) # read currDump values into a dictionary with open(pJoin(workDir, 'currDump.csv'), newline='') as currDumpFile: reader = csv.reader(currDumpFile) next(reader) # Burn the header. keys = next(reader) currTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] currTable.append(rowDict) # read line rating values into a single dictionary lineRatings = {} rating_in_VA = [] for key1 in edge_bools.keys(): if edge_bools[key1]: with open(pJoin(workDir, key1 + '_cont_rating.csv'), newline='') as ratingFile: reader = csv.reader(ratingFile) # loop past the header, keys = [] vals = [] for row in reader: if '# timestamp' in row: keys = row i = keys.index('# timestamp') keys.pop(i) vals = next(reader) vals.pop(i) for pos, key2 in enumerate(keys): lineRatings[key2] = abs(float(vals[pos])) #edgeTupleRatings = lineRatings copy with to-from tuple as keys for labeling edgeTupleRatings = {} for edge in lineRatings: for obj in tree.values(): if obj.get('name', '').replace('"', '') == edge: nodeFrom = obj.get('from') nodeTo = obj.get('to') coord = (nodeFrom, nodeTo) ratingVal = lineRatings.get(edge) edgeTupleRatings[coord] = ratingVal # Calculate average node voltage deviation. First, helper functions. def digits(x): ''' Returns number of digits before the decimal in the float x. ''' return math.ceil(math.log10(x + 1)) def avg(l): ''' Average of a list of ints or floats. ''' # HACK: add a small value to the denominator to avoid divide by zero for out of service locations (i.e. zero voltage). return sum(l) / (len(l) + 0.00000000000000001) # Detect the feeder nominal voltage: for key in tree: ob = tree[key] if type(ob) == dict and ob.get('bustype', '') == 'SWING': feedVoltage = float(ob.get('nominal_voltage', 1)) # Tot it all up. nodeVolts = {} nodeVoltsPU = {} nodeVoltsPU120 = {} voltImbalances = {} for row in voltTable: allVolts = [] allVoltsPU = [] allDiffs = [] nodeName = row.get('node_name', '') for phase in ['A', 'B', 'C']: realVolt = abs(float(row['volt' + phase + '_real'])) imagVolt = abs(float(row['volt' + phase + '_imag'])) phaseVolt = math.sqrt((realVolt**2) + (imagVolt**2)) if phaseVolt != 0.0: treeKey = nameToIndex.get(nodeName, 0) nodeObj = tree.get(treeKey, {}) try: nominal_voltage = float(nodeObj['nominal_voltage']) except: nominal_voltage = feedVoltage allVolts.append(phaseVolt) normVolt = (phaseVolt / nominal_voltage) allVoltsPU.append(normVolt) avgVolts = avg(allVolts) avgVoltsPU = avg(allVoltsPU) avgVoltsPU120 = 120 * avgVoltsPU nodeVolts[nodeName] = float("{0:.2f}".format(avgVolts)) nodeVoltsPU[nodeName] = float("{0:.2f}".format(avgVoltsPU)) nodeVoltsPU120[nodeName] = float("{0:.2f}".format(avgVoltsPU120)) if len(allVolts) == 3: voltA = allVolts.pop() voltB = allVolts.pop() voltC = allVolts.pop() allDiffs.append(abs(float(voltA - voltB))) allDiffs.append(abs(float(voltA - voltC))) allDiffs.append(abs(float(voltB - voltC))) maxDiff = max(allDiffs) voltImbal = maxDiff / avgVolts voltImbalances[nodeName] = float("{0:.2f}".format(voltImbal)) # Use float("{0:.2f}".format(avg(allVolts))) if displaying the node labels nodeLoadNames = {} nodeNames = {} for key in nodeVolts.keys(): nodeNames[key] = key if key == loadLoc: nodeLoadNames[key] = "LOAD: " + key # find edge currents by parsing currdump edgeCurrentSum = {} edgeCurrentMax = {} for row in currTable: allCurr = [] for phase in ['A', 'B', 'C']: realCurr = abs(float(row['curr' + phase + '_real'])) imagCurr = abs(float(row['curr' + phase + '_imag'])) phaseCurr = math.sqrt((realCurr**2) + (imagCurr**2)) allCurr.append(phaseCurr) edgeCurrentSum[row.get('link_name', '')] = sum(allCurr) edgeCurrentMax[row.get('link_name', '')] = max(allCurr) # When just showing current as labels, use sum of the three lines' current values, when showing the per unit values (current/rating), use the max of the three #edgeTupleCurrents = edgeCurrents copy with to-from tuple as keys for labeling edgeTupleCurrents = {} #edgeValsPU = values normalized per unit by line ratings edgeValsPU = {} #edgeTupleValsPU = edgeValsPU copy with to-from tuple as keys for labeling edgeTupleValsPU = {} #edgeTuplePower = dict with to-from tuples as keys and sim power as values for debugging edgeTuplePower = {} #edgeTupleNames = dict with to-from tuples as keys and names as values for debugging edgeTupleNames = {} #edgeTupleFaultNames = dict with to-from tuples as keys and the name of the Fault as the only value edgeTupleFaultNames = {} #edgeTupleProtDevs = dict with to-from tuples as keys and the initial of the type of protective device as the value edgeTupleProtDevs = {} #linePhases = dictionary containing the number of phases on each line for line-width purposes linePhases = {} edgePower = {} for edge in edgeCurrentSum: for obj in tree.values(): obname = obj.get('name', '').replace('"', '') if obname == edge: objType = obj.get('object') nodeFrom = obj.get('from') nodeTo = obj.get('to') coord = (nodeFrom, nodeTo) currVal = edgeCurrentSum.get(edge) voltVal = avg([nodeVolts.get(nodeFrom), nodeVolts.get(nodeTo)]) power = (currVal * voltVal) / 1000 lineRating = lineRatings.get(edge, 10.0**9) edgePerUnitVal = (edgeCurrentMax.get(edge)) / lineRating edgeTupleCurrents[coord] = "{0:.2f}".format(currVal) edgeTuplePower[coord] = "{0:.2f}".format(power) edgePower[edge] = power edgeValsPU[edge] = edgePerUnitVal edgeTupleValsPU[coord] = "{0:.2f}".format(edgePerUnitVal) edgeTupleNames[coord] = edge if faultLoc == edge: edgeTupleFaultNames[coord] = "FAULT: " + edge phaseStr = obj.get('phases', '').replace('"', '').replace( 'N', '').replace('S', '') numPhases = len(phaseStr) if (numPhases < 1) or (numPhases > 3): numPhases = 1 linePhases[edge] = numPhases protDevLabel = "" protDevBlownStr = "" if objType in protDevices.keys(): for phase in protDevFinalStatus[obname].keys(): if objType == 'fuse': if protDevFinalStatus[obname][phase] == "BLOWN": protDevBlownStr = "!" else: if protDevFinalStatus[obname][phase] == "OPEN": protDevBlownStr = "!" if objType == 'fuse': protDevLabel = 'F' elif objType == 'switch': protDevLabel = 'S' elif objType == 'recloser': protDevLabel = 'R' elif objType == 'sectionalizer': protDevLabel = 'X' edgeTupleProtDevs[coord] = protDevLabel + protDevBlownStr #define which dict will be used for edge line color edgeColors = edgeValsPU #define which dict will be used for edge label edgeLabels = edgeTupleValsPU # Build the graph. fGraph = feeder.treeToNxGraph(tree) # TODO: consider whether we can set figsize dynamically. wlVal = int(math.sqrt(float(rezSqIn))) voltChart = plt.figure(figsize=(wlVal, wlVal)) plt.axes(frameon=0) plt.axis('off') voltChart.gca().set_aspect('equal') plt.tight_layout() #set axes step equal if neatoLayout: # HACK: work on a new graph without attributes because graphViz tries to read attrs. cleanG = nx.Graph(fGraph.edges()) cleanG.add_nodes_from(fGraph) positions = graphviz_layout(cleanG, prog='neato') else: remove_nodes = [ n for n in fGraph if fGraph.nodes[n].get('pos', (0, 0)) == (0, 0) ] fGraph.remove_nodes_from(remove_nodes) positions = {n: fGraph.nodes[n].get('pos', (0, 0)) for n in fGraph} # Need to get edge names from pairs of connected node names. edgeNames = [] for e in fGraph.edges(): edgeNames.append((fGraph.edges[e].get('name', 'BLANK')).replace('"', '')) #create custom colormap if customColormap: if scaleMin != None and scaleMax != None: scaleDif = scaleMax - scaleMin custom_cm = matplotlib.colors.LinearSegmentedColormap.from_list( 'custColMap', [(scaleMin, 'blue'), (scaleMin + (0.12 * scaleDif), 'darkgray'), (scaleMin + (0.56 * scaleDif), 'darkgray'), (scaleMin + (0.8 * scaleDif), 'red')]) vmin = scaleMin vmax = scaleMax else: custom_cm = matplotlib.colors.LinearSegmentedColormap.from_list( 'custColMap', [(0.0, 'blue'), (0.15, 'darkgray'), (0.7, 'darkgray'), (1.0, 'red')]) vmin = 0 vmax = 1.25 custom_cm.set_under(color='black') else: custom_cm = plt.cm.get_cmap('viridis') if scaleMin != None and scaleMax != None: vmin = scaleMin vmax = scaleMax else: vmin = None vmax = None drawColorbar = False emptyColors = {} #draw edges with or without colors if edgeCol != None: drawColorbar = True if edgeCol == "Current": edgeList = [edgeCurrentSum.get(n, 1) for n in edgeNames] drawColorbar = True elif edgeCol == "Power": edgeList = [edgePower.get(n, 1) for n in edgeNames] drawColorbar = True elif edgeCol == "Rating": edgeList = [lineRatings.get(n, 10.0**9) for n in edgeNames] drawColorbar = True elif edgeCol == "PercentOfRating": edgeList = [edgeValsPU.get(n, .5) for n in edgeNames] drawColorbar = True else: edgeList = [emptyColors.get(n, .6) for n in edgeNames] print( "WARNING: edgeCol property must be 'Current', 'Power', 'Rating', 'PercentOfRating', or None" ) else: edgeList = [emptyColors.get(n, .6) for n in edgeNames] edgeIm = nx.draw_networkx_edges( fGraph, pos=positions, edge_color=edgeList, width=[linePhases.get(n, 1) for n in edgeNames], edge_cmap=custom_cm) #draw edge labels if edgeLabs != None: if edgeLabs == "Name": edgeLabels = edgeTupleNames elif edgeLabs == "Fault": edgeLabels = edgeTupleFaultNames elif edgeLabs == "Value": if edgeCol == "Current": edgeLabels = edgeTupleCurrents elif edgeCol == "Power": edgeLabels = edgeTuplePower elif edgeCol == "Rating": edgeLabels = edgeTupleRatings elif edgeCol == "PercentOfRating": edgeLabels = edgeTupleValsPU else: edgeLabels = None print( "WARNING: edgeCol property cannot be set to None when edgeLabs property is set to 'Value'" ) elif edgeLabs == "ProtDevs": edgeLabels = edgeTupleProtDevs else: edgeLabs = None print( "WARNING: edgeLabs property must be either 'Name', 'Value', or None" ) if edgeLabs != None: edgeLabelsIm = nx.draw_networkx_edge_labels(fGraph, pos=positions, edge_labels=edgeLabels, font_size=8) # draw nodes with or without color if nodeCol != None: if nodeCol == "Voltage": nodeList = [nodeVolts.get(n, 1) for n in fGraph.nodes()] drawColorbar = True elif nodeCol == "VoltageImbalance": nodeList = [voltImbalances.get(n, 1) for n in fGraph.nodes()] drawColorbar = True elif nodeCol == "perUnitVoltage": nodeList = [nodeVoltsPU.get(n, .5) for n in fGraph.nodes()] drawColorbar = True elif nodeCol == "perUnit120Voltage": nodeList = [nodeVoltsPU120.get(n, 120) for n in fGraph.nodes()] drawColorbar = True else: nodeList = [emptyColors.get(n, 1) for n in fGraph.nodes()] print( "WARNING: nodeCol property must be 'Voltage', 'VoltageImbalance', 'perUnitVoltage', 'perUnit120Voltage', or None" ) else: nodeList = [emptyColors.get(n, .6) for n in fGraph.nodes()] nodeIm = nx.draw_networkx_nodes(fGraph, pos=positions, node_color=nodeList, linewidths=0, node_size=30, vmin=vmin, vmax=vmax, cmap=custom_cm) #draw node labels nodeLabels = {} if nodeLabs != None: if nodeLabs == "Name": nodeLabels = nodeNames elif nodeLabs == "Value": if nodeCol == "Voltage": nodeLabels = nodeVolts elif nodeCol == "VoltageImbalance": nodeLabels = voltImbalances elif nodeCol == "perUnitVoltage": nodeLabels = nodeVoltsPU elif nodeCol == "perUnit120Voltage": nodeLabels = nodeVoltsPU120 else: nodeLabels = None print( "WARNING: nodeCol property cannot be set to None when nodeLabs property is set to 'Value'" ) #HACK: add hidden node label option for displaying specified load name elif nodeLabs == "Load": nodeLabels = nodeLoadNames else: nodeLabs = None print( "WARNING: nodeLabs property must be either 'Name', 'Value', or None" ) if nodeLabs != None: nodeLabelsIm = nx.draw_networkx_labels(fGraph, pos=positions, labels=nodeLabels, font_size=8) plt.sci(nodeIm) # plt.clim(110,130) if drawColorbar: plt.colorbar() return voltChart
# Delete all climate then reinsert. reader_name = 'weatherReader' climate_name = 'MyClimate' for key in myTree.keys(): obName = myTree[key].get('name','') obType = myTree[key].get('object','') if obName in [reader_name, climate_name] or obType is 'climate': del myTree[key] oldMax = feeder.getMaxKey(myTree) myTree[oldMax + 1] = {'omftype':'module', 'argument':'tape'} myTree[oldMax + 2] = {'omftype':'module', 'argument':'climate'} myTree[oldMax + 3] = {'object':'csv_reader', 'name':reader_name, 'filename':CSV_NAME} myTree[oldMax + 4] = {'object':'climate', 'name':climate_name, 'reader': reader_name, 'tmyfile':CSV_NAME} # Set the time correctly. feeder.adjustTime(myTree, 240, 'hours', '{}-{}-{}'.format(INIT_TIME.year, INIT_TIME.month, INIT_TIME.day)) # Run here to test. rawOut = runInFilesystem(myTree, attachments=[], keepFiles=True, workDir='.', glmName='./outFile.glm') # Write back the full feeder. # outJson = dict(myFeed) # with open(CSV_NAME,'r') as weatherFile: # weatherString = weatherFile.read() # outJson['attachments']['weatheryearDCA.csv'] = weatherString # outJson['tree'] = myTree # try: os.remove('./Orville Tree Pond Calibrated With Weather.json') # except: pass # with open('./Orville Tree Pond Calibrated With Weather.json', 'w') as outFile: # json.dump(outJson, outFile, indent=4)
def run(modelDir, inputDict): ''' Run the model in its directory. ''' try: # Set up GLM with correct time and recorders: omd = json.load(open(pJoin(modelDir, 'feeder.omd'))) tree = omd.get('tree', {}) feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) feeder.adjustTime(tree, 120, 'hours', '2011-01-01') # Run GridLAB-D startTime = dt.datetime.now() rawOut = gridlabd.runInFilesystem(tree, attachments=omd.get( 'attachments', {}), workDir=modelDir) # Clean the output. cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps', []) level = inputDict.get('simLengthUnits', 'hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = rawOut[key].get( 'rainfall') cleanOut['climate']['Wind Speed (m/s)'] = rawOut[key].get( 'wind_speed') cleanOut['climate']['Temperature (F)'] = rawOut[key].get( 'temperature') cleanOut['climate']['Snow Depth (in)'] = rawOut[key].get( 'snowdepth') cleanOut['climate']['Direct Normal (W/sf)'] = rawOut[key].get( 'solar_direct') climateWbySFList = rawOut[key].get('solar_global') #converting W/sf to W/sm climateWbySMList = [x * 10.76392 for x in climateWbySFList] cleanOut['climate'][ 'Global Horizontal (W/sm)'] = climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = [ float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)'] ] cleanOut['allMeterVoltages']['Mean'] = [ float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)'] ] cleanOut['allMeterVoltages']['StdDev'] = [ float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)'] ] cleanOut['allMeterVoltages']['Max'] = [ float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)'] ] # Dump the results. endTime = dt.datetime.now() inputDict["runTime"] = str( dt.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"), "w") as inFile: json.dump(inputDict, inFile, indent=4) with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile: json.dump(cleanOut, outFile, indent=4) except: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print 'ERROR IN MODEL', modelDir, thisErr inputDict['stderr'] = thisErr with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir, "allInputData.json"), "w") as inFile: json.dump(inputDict, inFile, indent=4)
from flask import url_for, request import omf from omf import feeder from omf.solvers import gridlabd from omf.scratch.GRIP import grip # Place to hack on stuff. Not intended to be run with real tests via pytest if __name__ == '__main__': temp_dir = tempfile.mkdtemp() path = Path(omf.omfDir) / 'scratch/CIGAR/test_ieee123nodeBetter.glm' #path = Path(__file__).parent / 'test-files/ieee123_pole_vulnerability.glm' feed = feeder.parse(path) #import pdb; pdb.set_trace() outDict = gridlabd.runInFilesystem(feed, attachments=[], keepFiles=True, workDir=temp_dir, glmName='out.glm') print(outDict) @pytest.fixture(scope="module") # The client should only be created once def client(): # testing must be set to true on the Flask application grip.app.config['TESTING'] = True # create a test client with built-in Flask code client = grip.app.test_client() # 'yield' instead of 'return' due to how fixtures work in pytest yield client # Could put teardown code here if needed
'object': 'underground_line', 'name': 'test_solar_line', 'phases': 'ABCN', 'from': 'test_solar_node', 'to': 'GC-12-47-1_node_26', 'length': '100', 'configuration': 'line_configuration:6' } feed[maxKey + 3] = { 'object': 'meter', 'name': 'test_solar_meter', 'parent': 'test_solar_node', 'phases': 'ABCN', 'nominal_voltage': '480' } feed[maxKey + 4] = { 'object': 'inverter', 'name': 'test_solar_inverter', 'parent': 'test_solar_meter', 'phases': 'AS', 'inverter_type': 'PWM', 'power_factor': '1.0', 'generator_status': 'ONLINE', 'generator_mode': 'CONSTANT_PF' } feed[maxKey + 5] = { 'object': 'solar', 'name': 'test_solar', 'parent': 'test_solar_inverter', 'area': '1000000 sf', 'generator_status': 'ONLINE', 'efficiency': '0.2', 'generator_mode': 'SUPPLY_DRIVEN', 'panel_type': 'SINGLE_CRYSTAL_SILICON' } feed[maxKey + 6] = { 'object': 'recorder', 'parent': 'test_solar_meter', 'property': 'voltage_A.real,voltage_A.imag,voltage_B.real,voltage_B.imag,voltage_C.real,voltage_C.imag', 'file': 'GC-addSolar-voltages.csv', 'interval': '60', 'limit': '1440' } runInFilesystem(feed, keepFiles = True, workDir = '.', glmName = 'GC-solarAdd.glm') ''' output = open('GC-solarAdd.glm', 'w') output.write(feeder.write(feed)) output.close() '''
if simNum == 0: rmtree(file) # copy model and its schedule into the working directory copy2(TEMPLATE_DIR + MODEL_FILE, WORKING_DIR) copy2(TEMPLATE_DIR + SCHEDULE_FILE, WORKING_DIR) # get gridlab model from template gridlabModel = getRandomGridlabModelFromTemplate( \ WORKING_DIR+MODEL_FILE, deleteEVCharger, \ waterHeaterType, coolingType, heatingType) # run gridlabd start = time.time() gridlabd.runInFilesystem(gridlabModel, workDir=WORKING_DIR, keepFiles=True) end = time.time() # move all csv files into a new folder, # delete all files, retain directories newFolder = WORKING_DIR + SIMULATION_DIR_PREFIX + str( simNum) os.mkdir(newFolder) files = glob(WORKING_DIR + '*') for file in files: if file[-3:] == 'csv': copy2(file, newFolder) try: os.remove(file) except IsADirectoryError:
def runForeground(modelDir, inputDict, fs): ''' Run the model in the foreground. WARNING: can take about a minute. ''' # Global vars, and load data from the model directory. print "STARTING TO RUN", modelDir try: startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, "feeder.json"))) tree = feederJson.get("tree", {}) attachments = feederJson.get("attachments", {}) allOutput = {} ''' Run CVR analysis. ''' # Reformate monthData and rates. rates = {k: float(inputDict[k]) for k in ["capitalCost", "omCost", "wholesaleEnergyCostPerKwh", "retailEnergyCostPerKwh", "peakDemandCostSpringPerKw", "peakDemandCostSummerPerKw", "peakDemandCostFallPerKw", "peakDemandCostWinterPerKw"]} # print "RATES", rates monthNames = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"] monthToSeason = {'January': 'Winter', 'February': 'Winter', 'March': 'Spring', 'April': 'Spring', 'May': 'Spring', 'June': 'Summer', 'July': 'Summer', 'August': 'Summer', 'September': 'Fall', 'October': 'Fall', 'November': 'Fall', 'December': 'Winter'} monthData = [] for i, x in enumerate(monthNames): monShort = x[0:3].lower() season = monthToSeason[x] histAvg = float(inputDict.get(monShort + "Avg", 0)) histPeak = float(inputDict.get(monShort + "Peak", 0)) monthData.append({"monthId": i, "monthName": x, "histAverage": histAvg, "histPeak": histPeak, "season": season}) # for row in monthData: # print row # Graph the SCADA data. fig = plt.figure(figsize=(10, 6)) indices = [r['monthName'] for r in monthData] d1 = [r['histPeak'] / (10**3) for r in monthData] d2 = [r['histAverage'] / (10**3) for r in monthData] ticks = range(len(d1)) bar_peak = plt.bar(ticks, d1, color='gray') bar_avg = plt.bar(ticks, d2, color='dimgray') plt.legend([bar_peak[0], bar_avg[0]], ['histPeak', 'histAverage'], bbox_to_anchor=(0., 1.015, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.xticks([t + 0.5 for t in ticks], indices) plt.ylabel('Mean and peak historical power consumptions (kW)') fig.autofmt_xdate() Plot.save_fig(plt, pJoin(modelDir, "scadaChart.png")) allOutput["histPeak"] = d1 allOutput["histAverage"] = d2 allOutput["monthName"] = [name[0:3] for name in monthNames] # Graph feeder. fig = plt.figure(figsize=(10, 10)) myGraph = omf.feeder.treeToNxGraph(tree) omf.feeder.latLonNxGraph(myGraph, neatoLayout=False) Plot.save_fig(plt, pJoin(modelDir, "feederChart.png")) with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile: allOutput["feederChart"] = inFile.read().encode("base64") # Get the load levels we need to test. allLoadLevels = [x.get( 'histPeak', 0) for x in monthData] + [y.get('histAverage', 0) for y in monthData] maxLev = _roundOne(max(allLoadLevels), 'up') minLev = _roundOne(min(allLoadLevels), 'down') tenLoadLevels = range( int(minLev), int(maxLev), int((maxLev - minLev) / 10)) # Gather variables from the feeder. for key in tree.keys(): # Set clock to single timestep. if tree[key].get('clock', '') == 'clock': tree[key] = {"timezone": "PST+8PDT", "stoptime": "'2013-01-01 00:00:00'", "starttime": "'2013-01-01 00:00:00'", "clock": "clock"} # Save swing node index. if tree[key].get('bustype', '').lower() == 'swing': swingIndex = key swingName = tree[key].get('name') # Remove all includes. if tree[key].get('omftype', '') == '#include': del key # Find the substation regulator and config. for key in tree: if tree[key].get('object', '') == 'regulator' and tree[key].get('from', '') == swingName: regIndex = key regConfName = tree[key]['configuration'] for key in tree: if tree[key].get('name', '') == regConfName: regConfIndex = key # Set substation regulator to manual operation. # GLOBAL VARIABLE FOR DEFAULT TAP POSITION baselineTap = int(inputDict.get("baselineTap")) tree[regConfIndex] = { 'name': tree[regConfIndex]['name'], 'object': 'regulator_configuration', 'connect_type': '1', 'raise_taps': '10', 'lower_taps': '10', 'CT_phase': 'ABC', 'PT_phase': 'ABC', # Yo, 0.10 means at tap_pos 10 we're 10% above 120V. 'regulation': '0.10', 'Control': 'MANUAL', 'control_level': 'INDIVIDUAL', 'Type': 'A', 'tap_pos_A': str(baselineTap), 'tap_pos_B': str(baselineTap), 'tap_pos_C': str(baselineTap)} # Attach recorders relevant to CVR. recorders = [ {'object': 'collector', 'file': 'ZlossesTransformer.csv', 'group': 'class=transformer', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'collector', 'file': 'ZlossesUnderground.csv', 'group': 'class=underground_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'collector', 'file': 'ZlossesOverhead.csv', 'group': 'class=overhead_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'recorder', 'file': 'Zregulator.csv', 'limit': '0', 'parent': tree[regIndex]['name'], 'property': 'tap_A,tap_B,tap_C,power_in.real,power_in.imag'}, {'object': 'collector', 'file': 'ZvoltageJiggle.csv', 'group': 'class=triplex_meter', 'limit': '0', 'property': 'min(voltage_12.mag),mean(voltage_12.mag),max(voltage_12.mag),std(voltage_12.mag)'}, {'object': 'recorder', 'file': 'ZsubstationTop.csv', 'limit': '0', 'parent': tree[swingIndex]['name'], 'property': 'voltage_A,voltage_B,voltage_C'}, {'object': 'recorder', 'file': 'ZsubstationBottom.csv', 'limit': '0', 'parent': tree[regIndex]['to'], 'property': 'voltage_A,voltage_B,voltage_C'}] biggest = 1 + max([int(k) for k in tree.keys()]) for index, rec in enumerate(recorders): tree[biggest + index] = rec # Change constant PF loads to ZIP loads. (See evernote for rationale # about 50/50 power/impedance mix.) blankZipModel = {'object': 'triplex_load', 'name': 'NAMEVARIABLE', 'base_power_12': 'POWERVARIABLE', 'power_fraction_12': str(inputDict.get("p_percent")), 'impedance_fraction_12': str(inputDict.get("z_percent")), 'current_fraction_12': str(inputDict.get("i_percent")), # MAYBEFIX: we can probably get this PF data from the # Milsoft loads. 'power_pf_12': str(inputDict.get("power_factor")), 'impedance_pf_12': str(inputDict.get("power_factor")), 'current_pf_12': str(inputDict.get("power_factor")), 'nominal_voltage': '120', 'phases': 'PHASESVARIABLE', 'parent': 'PARENTVARIABLE'} def powerClean(powerStr): ''' take 3339.39+1052.29j to 3339.39 ''' return powerStr[0:powerStr.find('+')] for key in tree: if tree[key].get('object', '') == 'triplex_node': # Get existing variables. name = tree[key].get('name', '') power = tree[key].get('power_12', '') parent = tree[key].get('parent', '') phases = tree[key].get('phases', '') # Replace object and reintroduce variables. tree[key] = copy(blankZipModel) tree[key]['name'] = name tree[key]['base_power_12'] = powerClean(power) tree[key]['parent'] = parent tree[key]['phases'] = phases # Function to determine how low we can tap down in the CVR case: def loweringPotential(baseLine): ''' Given a baseline end of line voltage, how many more percent can we shave off the substation voltage? ''' ''' testsWePass = [122.0,118.0,200.0,110.0] ''' lower = int(math.floor((baseLine / 114.0 - 1) * 100)) - 1 # If lower is negative, we can't return it because we'd be # undervolting beyond what baseline already was! if lower < 0: return baselineTap else: return baselineTap - lower # Run all the powerflows. powerflows = [] for doingCvr in [False, True]: # For each load level in the tenLoadLevels, run a powerflow with # the load objects scaled to the level. for desiredLoad in tenLoadLevels: # Find the total load that was defined in Milsoft: loadList = [] for key in tree: if tree[key].get('object', '') == 'triplex_load': loadList.append(tree[key].get('base_power_12', '')) totalLoad = sum([float(x) for x in loadList]) # Rescale each triplex load: for key in tree: if tree[key].get('object', '') == 'triplex_load': currentPow = float(tree[key]['base_power_12']) ratio = desiredLoad / totalLoad tree[key]['base_power_12'] = str(currentPow * ratio) # If we're doing CVR then lower the voltage. if doingCvr: # Find the minimum voltage we can tap down to: newTapPos = baselineTap for row in powerflows: if row.get('loadLevel', '') == desiredLoad: newTapPos = loweringPotential( row.get('lowVoltage', 114)) # Tap it down to there. # MAYBEFIX: do each phase separately because that's how # it's done in the field... Oof. tree[regConfIndex]['tap_pos_A'] = str(newTapPos) tree[regConfIndex]['tap_pos_B'] = str(newTapPos) tree[regConfIndex]['tap_pos_C'] = str(newTapPos) # Run the model through gridlab and put outputs in the table. output = gridlabd.runInFilesystem(tree, attachments=attachments, keepFiles=True, workDir=modelDir) os.remove(pJoin(modelDir, "PID.txt")) p = output['Zregulator.csv']['power_in.real'][0] q = output['Zregulator.csv']['power_in.imag'][0] s = math.sqrt(p**2 + q**2) lossTotal = 0.0 for device in ['ZlossesOverhead.csv', 'ZlossesTransformer.csv', 'ZlossesUnderground.csv']: for letter in ['A', 'B', 'C']: r = output[device][ 'sum(power_losses_' + letter + '.real)'][0] i = output[device][ 'sum(power_losses_' + letter + '.imag)'][0] lossTotal += math.sqrt(r**2 + i**2) # Entire output: powerflows.append({ 'doingCvr': doingCvr, 'loadLevel': desiredLoad, 'realPower': p, 'powerFactor': p / s, 'losses': lossTotal, 'subVoltage': ( output['ZsubstationBottom.csv']['voltage_A'][0] + output['ZsubstationBottom.csv']['voltage_B'][0] + output['ZsubstationBottom.csv']['voltage_C'][0]) / 3 / 60, 'lowVoltage': output['ZvoltageJiggle.csv']['min(voltage_12.mag)'][0] / 2, 'highVoltage': output['ZvoltageJiggle.csv']['max(voltage_12.mag)'][0] / 2}) # For a given load level, find two points to interpolate on. def getInterpPoints(t): ''' Find the two points we can interpolate from. ''' ''' tests pass on [tenLoadLevels[0],tenLoadLevels[5]+499,tenLoadLevels[-1]-988] ''' loc = sorted(tenLoadLevels + [t]).index(t) if loc == 0: return (tenLoadLevels[0], tenLoadLevels[1]) elif loc > len(tenLoadLevels) - 2: return (tenLoadLevels[-2], tenLoadLevels[-1]) else: return (tenLoadLevels[loc - 1], tenLoadLevels[loc + 1]) # Calculate peak reduction. for row in monthData: peak = row['histPeak'] peakPoints = getInterpPoints(peak) peakTopBase = [x for x in powerflows if x.get( 'loadLevel', '') == peakPoints[-1] and x.get('doingCvr', '') == False][0] peakTopCvr = [x for x in powerflows if x.get( 'loadLevel', '') == peakPoints[-1] and x.get('doingCvr', '') == True][0] peakBottomBase = [x for x in powerflows if x.get( 'loadLevel', '') == peakPoints[0] and x.get('doingCvr', '') == False][0] peakBottomCvr = [x for x in powerflows if x.get( 'loadLevel', '') == peakPoints[0] and x.get('doingCvr', '') == True][0] # Linear interpolation so we aren't running umpteen million # loadflows. x = (peakPoints[0], peakPoints[1]) y = (peakTopBase['realPower'] - peakTopCvr['realPower'], peakBottomBase['realPower'] - peakBottomCvr['realPower']) peakRed = y[0] + (y[1] - y[0]) * (peak - x[0]) / (x[1] - x[0]) row['peakReduction'] = peakRed # Calculate energy reduction and loss reduction based on average load. for row in monthData: avgEnergy = row['histAverage'] energyPoints = getInterpPoints(avgEnergy) avgTopBase = [x for x in powerflows if x.get( 'loadLevel', '') == energyPoints[-1] and x.get('doingCvr', '') == False][0] avgTopCvr = [x for x in powerflows if x.get( 'loadLevel', '') == energyPoints[-1] and x.get('doingCvr', '') == True][0] avgBottomBase = [x for x in powerflows if x.get( 'loadLevel', '') == energyPoints[0] and x.get('doingCvr', '') == False][0] avgBottomCvr = [x for x in powerflows if x.get( 'loadLevel', '') == energyPoints[0] and x.get('doingCvr', '') == True][0] # Linear interpolation so we aren't running umpteen million # loadflows. x = (energyPoints[0], energyPoints[1]) y = (avgTopBase['realPower'] - avgTopCvr['realPower'], avgBottomBase['realPower'] - avgBottomCvr['realPower']) energyRed = y[0] + \ (y[1] - y[0]) * (avgEnergy - x[0]) / (x[1] - x[0]) row['energyReduction'] = energyRed lossY = (avgTopBase['losses'] - avgTopCvr['losses'], avgBottomBase['losses'] - avgBottomCvr['losses']) lossRed = lossY[0] + (lossY[1] - lossY[0]) * \ (avgEnergy - x[0]) / (x[1] - x[0]) row['lossReduction'] = lossRed # Multiply by dollars. for row in monthData: row['energyReductionDollars'] = row['energyReduction'] / 1000 * \ (rates['wholesaleEnergyCostPerKwh'] - rates['retailEnergyCostPerKwh']) row['peakReductionDollars'] = row['peakReduction'] / \ 1000 * rates['peakDemandCost' + row['season'] + 'PerKw'] row['lossReductionDollars'] = row['lossReduction'] / \ 1000 * rates['wholesaleEnergyCostPerKwh'] # Pretty output def plotTable(inData): fig = plt.figure(figsize=(10, 5)) plt.axis('off') plt.tight_layout() plt.table(cellText=[row for row in inData[1:]], loc='center', rowLabels=range(len(inData) - 1), colLabels=inData[0]) def dictalToMatrix(dictList): ''' Take our dictal format to a matrix. ''' matrix = [dictList[0].keys()] for row in dictList: matrix.append(row.values()) return matrix # Powerflow results. plotTable(dictalToMatrix(powerflows)) Plot.save_fig(plt, pJoin(modelDir, "powerflowTable.png")) # Monetary results. # To print partial money table monthDataMat = dictalToMatrix(monthData) dimX = len(monthDataMat) dimY = len(monthDataMat[0]) monthDataPart = [] for k in range(0, dimX): monthDatatemp = [] for m in range(4, dimY): monthDatatemp.append(monthDataMat[k][m]) monthDataPart.append(monthDatatemp) plotTable(monthDataPart) Plot.save_fig(plt, pJoin(modelDir, "moneyTable.png")) allOutput["monthDataMat"] = dictalToMatrix(monthData) allOutput["monthDataPart"] = monthDataPart # Graph the money data. fig = plt.figure(figsize=(10, 8)) indices = [r['monthName'] for r in monthData] d1 = [r['energyReductionDollars'] for r in monthData] d2 = [r['lossReductionDollars'] for r in monthData] d3 = [r['peakReductionDollars'] for r in monthData] ticks = range(len(d1)) bar_erd = plt.bar(ticks, d1, color='red') bar_lrd = plt.bar(ticks, d2, color='green') bar_prd = plt.bar(ticks, d3, color='blue', yerr=d2) plt.legend([bar_prd[0], bar_lrd[0], bar_erd[0]], ['peakReductionDollars', 'lossReductionDollars', 'energyReductionDollars'], bbox_to_anchor=(0., 1.015, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.xticks([t + 0.5 for t in ticks], indices) plt.ylabel('Utility Savings ($)') plt.tight_layout(5.5, 1.3, 1.2) fig.autofmt_xdate() Plot.save_fig(plt, pJoin(modelDir, "spendChart.png")) allOutput["energyReductionDollars"] = d1 allOutput["lossReductionDollars"] = d2 allOutput["peakReductionDollars"] = d3 # Graph the cumulative savings. fig = plt.figure(figsize=(10, 5)) annualSavings = sum(d1) + sum(d2) + sum(d3) annualSave = lambda x: ( annualSavings - rates['omCost']) * x - rates['capitalCost'] simplePayback = rates['capitalCost'] / \ (annualSavings - rates['omCost']) plt.xlabel('Year After Installation') plt.xlim(0, 30) plt.ylabel('Cumulative Savings ($)') plt.plot([0 for x in range(31)], c='gray') plt.axvline(x=simplePayback, ymin=0, ymax=1, c='gray', linestyle='--') plt.plot([annualSave(x) for x in range(31)], c='green') Plot.save_fig(plt, pJoin(modelDir, "savingsChart.png")) allOutput["annualSave"] = [annualSave(x) for x in range(31)] # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str( datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) fs.save(pJoin(modelDir, "allInputData.json"), json.dumps(inputDict, indent=4)) # Write output file. fs.save(pJoin(modelDir, "allOutputData.json"), json.dumps(allOutput, indent=4)) # For autotest, there won't be such file. try: os.remove(pJoin(modelDir, "PPID.txt")) except: pass print "DONE RUNNING", modelDir except Exception as e: print "Oops, Model Crashed!!!" print e cancel(modelDir)
def work(modelDir, inputDict): ''' Run the model in its directory. ''' outData = {} feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict["feederName1"] = feederName with open(pJoin(modelDir, inputDict['weatherImpactsFileName']), 'w') as hazardFile: hazardFile.write(inputDict['weatherImpacts']) with open(pJoin(modelDir, feederName + '.omd'), "r") as jsonIn: feederModel = json.load(jsonIn) # Create GFM input file. print "RUNNING GFM FOR", modelDir gfmInputTemplate = { 'phase_variation': float(inputDict['phaseVariation']), 'chance_constraint': float(inputDict['chanceConstraint']), 'critical_load_met': float(inputDict['criticalLoadMet']), 'total_load_met': 1.0, #(float(inputDict['criticalLoadMet']) + float(inputDict['nonCriticalLoadMet'])), 'xrMatrices': inputDict["xrMatrices"], 'maxDGPerGenerator': float(inputDict["maxDGPerGenerator"]), 'dgUnitCost': float(inputDict["dgUnitCost"]), 'newLineCandidates': inputDict['newLineCandidates'], 'hardeningCandidates': inputDict['hardeningCandidates'], 'switchCandidates': inputDict['switchCandidates'], 'hardeningUnitCost': inputDict['hardeningUnitCost'], 'switchCost': inputDict['switchCost'], 'generatorCandidates': inputDict['generatorCandidates'], 'lineUnitCost': inputDict['lineUnitCost'] } gfmJson = convertToGFM(gfmInputTemplate, feederModel) gfmInputFilename = 'gfmInput.json' with open(pJoin(modelDir, gfmInputFilename), "w") as outFile: json.dump(gfmJson, outFile, indent=4) # Run GFM gfmBinaryPath = pJoin(__neoMetaModel__._omfDir, 'solvers', 'gfm', 'Fragility.jar') proc = subprocess.Popen([ 'java', '-jar', gfmBinaryPath, '-r', gfmInputFilename, '-wf', inputDict['weatherImpactsFileName'], '-num', '3' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=modelDir) (stdout, stderr) = proc.communicate() with open(pJoin(modelDir, "gfmConsoleOut.txt"), "w") as gfmConsoleOut: gfmConsoleOut.write(stdout) # HACK: rename the hardcoded gfm output rdtInputFilePath = pJoin(modelDir, 'rdtInput.json') #fix for windows web server hangup rdtInputFilePath = pJoin(modelDir, 'rdt_OUTPUT.json') #os.rename(pJoin(modelDir,'rdt_OUTPUT.json'),rdtInputFilePath) # Pull GFM input data on lines and generators for HTML presentation. with open(rdtInputFilePath, 'r') as rdtInputFile: # HACK: we use rdtInput as a string in the frontend. rdtJsonAsString = rdtInputFile.read() rdtJson = json.loads(rdtJsonAsString) # Calculate line costs. lineData = {} for line in rdtJson["lines"]: lineData[line["id"]] = '{:,.2f}'.format( float(line["length"]) * float(inputDict["lineUnitCost"])) outData["lineData"] = lineData outData["generatorData"] = '{:,.2f}'.format( float(inputDict["dgUnitCost"]) * float(inputDict["maxDGPerGenerator"])) outData['gfmRawOut'] = rdtJsonAsString if inputDict['scenarios'] != "": rdtJson['scenarios'] = json.loads(inputDict['scenarios']) with open(pJoin(rdtInputFilePath), "w") as rdtInputFile: json.dump(rdtJson, rdtInputFile, indent=4) # Run GridLAB-D first time to generate xrMatrices. print "RUNNING GLD FOR", modelDir if platform.system() == "Windows": omdPath = pJoin(modelDir, feederName + ".omd") with open(omdPath, "r") as omd: omd = json.load(omd) #REMOVE NEWLINECANDIDATES deleteList = [] newLines = inputDict["newLineCandidates"].strip().replace( ' ', '').split(',') for newLine in newLines: for omdObj in omd["tree"]: if ("name" in omd["tree"][omdObj]): if (newLine == omd["tree"][omdObj]["name"]): deleteList.append(omdObj) for delItem in deleteList: del omd["tree"][delItem] #Load a blank glm file and use it to write to it feederPath = pJoin(modelDir, 'feeder.glm') with open(feederPath, 'w') as glmFile: toWrite = omf.feeder.sortedWrite( omd['tree'] ) + "object jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n" # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};" glmFile.write(toWrite) #Write attachments from omd, if no file, one will be created for fileName in omd['attachments']: with open(os.path.join(modelDir, fileName), 'w') as file: file.write(omd['attachments'][fileName]) #Wire in the file the user specifies via zipcode. climateFileName, latforpvwatts = zipCodeToClimateName( inputDict["simulationZipCode"]) shutil.copy( pJoin(__neoMetaModel__._omfDir, "data", "Climate", climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2')) proc = subprocess.Popen(['gridlabd', 'feeder.glm'], stdout=subprocess.PIPE, shell=True, cwd=modelDir) (out, err) = proc.communicate() with open(pJoin(modelDir, "gldConsoleOut.txt"), "w") as gldConsoleOut: gldConsoleOut.write(out) accumulator = "" with open(pJoin(modelDir, "JSON_dump_line.json"), "r") as gldOut: accumulator = json.load(gldOut) outData['gridlabdRawOut'] = accumulator #Data trabsformation for GLD rdtJson["line_codes"] = accumulator["properties"]["line_codes"] rdtJson["lines"] = accumulator["properties"]["lines"] for item in rdtJson["lines"]: item['node1_id'] = item['node1_id'] + "_bus" item['node2_id'] = item['node2_id'] + "_bus" with open(pJoin(modelDir, rdtInputFilePath), "w") as outFile: json.dump(rdtJson, outFile, indent=4) '''rdtJson["line_codes"] = accumulator["properties"]["line_codes"] counter = 1 lineCodeTracker = {} for item in rdtJson["line_codes"]: lineCodeTracker[item['line_code']] = counter item['line_code'] = counter counter = counter + 1 rdtJson["lines"] = accumulator["properties"]["lines"] print lineCodeTracker for line in rdtJson["lines"]: line["line_code"] = lineCodeTracker[line["line_code"]] with open(pJoin(modelDir, rdtInputFilePath), "w") as outFile: json.dump(rdtJson, outFile, indent=4)''' else: tree = feederModel.get("tree", {}) attachments = feederModel.get("attachments", {}) climateFileName, latforpvwatts = zipCodeToClimateName( inputDict["simulationZipCode"]) shutil.copy( pJoin(__neoMetaModel__._omfDir, "data", "Climate", climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2')) gridlabdRawOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=modelDir) outData['gridlabdRawOut'] = gridlabdRawOut # Run RDT. print "RUNNING RDT FOR", modelDir rdtOutFile = modelDir + '/rdtOutput.json' rdtSolverFolder = pJoin(__neoMetaModel__._omfDir, 'solvers', 'rdt') rdtJarPath = pJoin(rdtSolverFolder, 'micot-rdt.jar') #TEST RUSSELL THING, DELETE WHEN DONE #shutil.copy(pJoin(__neoMetaModel__._omfDir, "scratch", "rdt_OUTPUTTEST.json"), pJoin(modelDir, 'rdt_OUTPUT.json')) ############# proc = subprocess.Popen([ 'java', "-Djna.library.path=" + rdtSolverFolder, '-jar', rdtJarPath, '-c', rdtInputFilePath, '-e', rdtOutFile ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() with open(pJoin(modelDir, "rdtConsoleOut.txt"), "w") as rdtConsoleOut: rdtConsoleOut.write(stdout) rdtRawOut = open(rdtOutFile).read() outData['rdtRawOut'] = rdtRawOut # Indent the RDT output nicely. with open(pJoin(rdtOutFile), "w") as outFile: rdtOut = json.loads(rdtRawOut) json.dump(rdtOut, outFile, indent=4) # Generate and run 2nd copy of GridLAB-D model with changes specified by RDT. print "RUNNING GLD FOR", modelDir feederCopy = copy.deepcopy(feederModel) lineSwitchList = [] for line in rdtOut['design_solution']['lines']: if ('switch_built' in line): lineSwitchList.append(line['id']) # Remove nonessential lines in second model as indicated by RDT output. for key in feederCopy['tree'].keys(): value = feederCopy['tree'][key] if ('object' in value): if (value['object'] == 'underground_line') or (value['object'] == 'overhead_line'): if value['name'] not in lineSwitchList: del feederCopy['tree'][key] #Add generators to second model. maxTreeKey = int(max(feederCopy['tree'], key=int)) + 1 '''for gen in rdtOut['design_solution']['generators']: newGen = {} newGen["object"] = "diesel_dg" newGen["name"] = gen['id'] newGen["parent"] = gen['id'][:-4] newGen["phases"] = "ABC" newGen["Gen_type"] = "CONSTANT_PQ" newGen["Rated_VA"] = "5.0 kVA" newGen["power_out_A"] = "250.0+120.0j" newGen["power_out_B"] = "230.0+130.0j" newGen["power_out_C"] = "220.0+150.0j" feederCopy['tree'][str(maxTreeKey)] = newGen maxTreeKey = maxTreeKey + 1 ''' maxTreeKey = max(feederCopy['tree'], key=int) # Load a blank glm file and use it to write to it feederPath = pJoin(modelDir, 'feederSecond.glm') with open(feederPath, 'w') as glmFile: toWrite = "module generators;\n\n" + omf.feeder.sortedWrite( feederCopy['tree'] ) + "object voltdump {\n\tfilename voltDump2ndRun.csv;\n};\nobject jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n" # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};" glmFile.write(toWrite) # Run GridLAB-D second time. if platform.system() == "Windows": proc = subprocess.Popen(['gridlabd', 'feederSecond.glm'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=modelDir) (out, err) = proc.communicate() outData["secondGLD"] = str( os.path.isfile(pJoin(modelDir, "voltDump2ndRun.csv"))) else: # TODO: make 2nd run of GridLAB-D work on Unixes. outData["secondGLD"] = str(False) # Draw the feeder. genDiagram(modelDir, feederModel) with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile: outData["oneLineDiagram"] = inFile.read().encode("base64") # And we're done. return outData
def runForeground(modelDir, inputDict, fs): """ Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. """ print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() feederList = [] # Get prepare of data and clean workspace if re-run, If re-run remove all # the data in the subfolders for dirs in os.listdir(modelDir): if os.path.isdir(pJoin(modelDir, dirs)): shutil.rmtree(pJoin(modelDir, dirs)) # Get each feeder, prepare data in separate folders, and run there. for key in sorted(inputDict, key=inputDict.get): if key.startswith("feederName"): feederDir, feederName = inputDict[key].split("___") feederList.append(feederName) try: os.remove(pJoin(modelDir, feederName, "allOutputData.json")) fs.remove(pJoin(modelDir, feederName, "allOutputData.json")) except Exception, e: pass if not os.path.isdir(pJoin(modelDir, feederName)): # create subfolders for feeders os.makedirs(pJoin(modelDir, feederName)) fs.export_from_fs_to_local( pJoin("data", "Feeder", feederDir, feederName + ".json"), pJoin(modelDir, feederName, "feeder.json") ) inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"], fs) fs.export_from_fs_to_local( pJoin("data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, feederName, "climate.tmy2"), ) try: startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.json"))) tree = feederJson["tree"] # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) feeder.adjustTime( tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"], ) if "attachments" in feederJson: attachments = feederJson["attachments"] else: attachments = [] # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem( tree, attachments=attachments, keepFiles=True, workDir=pJoin(modelDir, feederName) ) cleanOut = {} # Std Err and Std Out cleanOut["stderr"] = rawOut["stderr"] cleanOut["stdout"] = rawOut["stdout"] # Time Stamps for key in rawOut: if "# timestamp" in rawOut[key]: cleanOut["timeStamps"] = rawOut[key]["# timestamp"] break elif "# property.. timestamp" in rawOut[key]: cleanOut["timeStamps"] = rawOut[key]["# property.. timestamp"] else: cleanOut["timeStamps"] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get("timeStamps", []) level = inputDict.get("simLengthUnits", "hours") # Climate for key in rawOut: if key.startswith("Climate_") and key.endswith(".csv"): cleanOut["climate"] = {} cleanOut["climate"]["Rain Fall (in/h)"] = hdmAgg( rawOut[key].get("rainfall"), sum, level, stamps ) cleanOut["climate"]["Wind Speed (m/s)"] = hdmAgg( rawOut[key].get("wind_speed"), avg, level, stamps ) cleanOut["climate"]["Temperature (F)"] = hdmAgg( rawOut[key].get("temperature"), max, level, stamps ) cleanOut["climate"]["Snow Depth (in)"] = hdmAgg( rawOut[key].get("snowdepth"), max, level, stamps ) cleanOut["climate"]["Direct Insolation (W/m^2)"] = hdmAgg( rawOut[key].get("solar_direct"), sum, level, stamps ) # Voltage Band if "VoltageJiggle.csv" in rawOut: cleanOut["allMeterVoltages"] = {} cleanOut["allMeterVoltages"]["Min"] = hdmAgg( [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["min(voltage_12.mag)"]], min, level, stamps ) cleanOut["allMeterVoltages"]["Mean"] = hdmAgg( [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["mean(voltage_12.mag)"]], avg, level, stamps ) cleanOut["allMeterVoltages"]["StdDev"] = hdmAgg( [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["std(voltage_12.mag)"]], avg, level, stamps ) cleanOut["allMeterVoltages"]["Max"] = hdmAgg( [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["max(voltage_12.mag)"]], max, level, stamps ) # Power Consumption cleanOut["Consumption"] = {} # Set default value to be 0, avoiding missing value when # computing Loads cleanOut["Consumption"]["Power"] = [0] * int(inputDict["simLength"]) cleanOut["Consumption"]["Losses"] = [0] * int(inputDict["simLength"]) cleanOut["Consumption"]["DG"] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith("SwingKids_") and key.endswith(".csv"): oneSwingPower = hdmAgg( vecPyth(rawOut[key]["sum(power_in.real)"], rawOut[key]["sum(power_in.imag)"]), avg, level, stamps, ) if "Power" not in cleanOut["Consumption"]: cleanOut["Consumption"]["Power"] = oneSwingPower else: cleanOut["Consumption"]["Power"] = vecSum(oneSwingPower, cleanOut["Consumption"]["Power"]) elif key.startswith("Inverter_") and key.endswith(".csv"): realA = rawOut[key]["power_A.real"] realB = rawOut[key]["power_B.real"] realC = rawOut[key]["power_C.real"] imagA = rawOut[key]["power_A.imag"] imagB = rawOut[key]["power_B.imag"] imagC = rawOut[key]["power_C.imag"] oneDgPower = hdmAgg( vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)), avg, level, stamps, ) if "DG" not in cleanOut["Consumption"]: cleanOut["Consumption"]["DG"] = oneDgPower else: cleanOut["Consumption"]["DG"] = vecSum(oneDgPower, cleanOut["Consumption"]["DG"]) elif key.startswith("Windmill_") and key.endswith(".csv"): vrA = rawOut[key]["voltage_A.real"] vrB = rawOut[key]["voltage_B.real"] vrC = rawOut[key]["voltage_C.real"] viA = rawOut[key]["voltage_A.imag"] viB = rawOut[key]["voltage_B.imag"] viC = rawOut[key]["voltage_C.imag"] crB = rawOut[key]["current_B.real"] crA = rawOut[key]["current_A.real"] crC = rawOut[key]["current_C.real"] ciA = rawOut[key]["current_A.imag"] ciB = rawOut[key]["current_B.imag"] ciC = rawOut[key]["current_C.imag"] powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA)) powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB)) powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC)) # HACK: multiply by negative one because turbine power # sign is opposite all other DG: oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA, powerB, powerC), avg, level, stamps)] if "DG" not in cleanOut["Consumption"]: cleanOut["Consumption"]["DG"] = oneDgPower else: cleanOut["Consumption"]["DG"] = vecSum(oneDgPower, cleanOut["Consumption"]["DG"]) elif key in [ "OverheadLosses.csv", "UndergroundLosses.csv", "TriplexLosses.csv", "TransformerLosses.csv", ]: realA = rawOut[key]["sum(power_losses_A.real)"] imagA = rawOut[key]["sum(power_losses_A.imag)"] realB = rawOut[key]["sum(power_losses_B.real)"] imagB = rawOut[key]["sum(power_losses_B.imag)"] realC = rawOut[key]["sum(power_losses_C.real)"] imagC = rawOut[key]["sum(power_losses_C.imag)"] oneLoss = hdmAgg( vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)), avg, level, stamps, ) if "Losses" not in cleanOut["Consumption"]: cleanOut["Consumption"]["Losses"] = oneLoss else: cleanOut["Consumption"]["Losses"] = vecSum(oneLoss, cleanOut["Consumption"]["Losses"]) # Aggregate up the timestamps: if level == "days": cleanOut["timeStamps"] = aggSeries(stamps, stamps, lambda x: x[0][0:10], "days") elif level == "months": cleanOut["timeStamps"] = aggSeries(stamps, stamps, lambda x: x[0][0:7], "months") # Write the output. with open(pJoin(modelDir, feederName, "allOutputData.json"), "w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, feederName, "allInputData.json"), "w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, feederName, "PID.txt")) print "DONE RUNNING GRIDLABMULTI", modelDir, feederName except Exception as e: print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName cancel(pJoin(modelDir, feederName)) with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile: traceback.print_exc(file=stderrFile)
def runForeground(modelDir, test_mode=False): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' with open(pJoin(modelDir, 'allInputData.json')) as f: inputDict = json.load(f) print("STARTING TO RUN", modelDir) beginTime = datetime.datetime.now() # Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders for dirs in os.listdir(modelDir): if os.path.isdir(pJoin(modelDir, dirs)): shutil.rmtree(pJoin(modelDir, dirs)) # Get the names of the feeders from the .omd files: feederNames = [x[0:-4] for x in os.listdir(modelDir) if x.endswith(".omd")] for i, key in enumerate(feederNames): inputDict['feederName' + str(i + 1)] = feederNames[i] # Run GridLAB-D once for each feeder: for feederName in feederNames: try: os.remove(pJoin(modelDir, feederName, "allOutputData.json")) except Exception as e: pass if not os.path.isdir(pJoin(modelDir, feederName)): os.makedirs(pJoin(modelDir, feederName)) # create subfolders for feeders shutil.copy(pJoin(modelDir, feederName + ".omd"), pJoin(modelDir, feederName, "feeder.omd")) inputDict["climateName"] = weather.zipCodeToClimateName( inputDict["zipCode"]) shutil.copy( pJoin(_omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, feederName, "climate.tmy2")) try: startTime = datetime.datetime.now() with open(pJoin(modelDir, feederName, "feeder.omd")) as f: feederJson = json.load(f) tree = feederJson["tree"] # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem( tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir, feederName)) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key][ '# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps', []) level = inputDict.get('simLengthUnits', 'hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg( rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg( rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg( rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg( rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg( rawOut[key].get('solar_direct'), sum, level) # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([ (i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)'] ], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg( [(i / 2) for i in rawOut['VoltageJiggle.csv'] ['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([ (i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)'] ], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([ (i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)'] ], max, level) cleanOut['allMeterVoltages']['stdDevPos'] = [ (x + y / 2) for x, y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev']) ] cleanOut['allMeterVoltages']['stdDevNeg'] = [ (x - y / 2) for x, y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev']) ] # Total # of meters count = 0 with open(pJoin(modelDir, feederName, "feeder.omd")) as f: for line in f: if "\"objectType\": \"triplex_meter\"" in line: count += 1 # print "count=", count cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int( inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int( inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg( vecPyth(rawOut[key]['sum(power_in.real)'], rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum( oneSwingPower, cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg( vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum( oneDgPower, cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA)) powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB)) powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC)) # HACK: multiply by negative one because turbine power sign is opposite all other DG: oneDgPower = [ -1.0 * x for x in hdmAgg( vecSum(powerA, powerB, powerC), avg, level) ] if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum( oneDgPower, cleanOut['Consumption']['DG']) elif key in [ 'OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv' ]: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg( vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum( oneLoss, cleanOut['Consumption']['Losses']) # Aggregate up the timestamps: if level == 'days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x: x[0][0:10], 'days') elif level == 'months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x: x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, feederName, "allOutputData.json"), "w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str( datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, feederName, "allInputData.json"), "w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, feederName, "PID.txt")) print("DONE RUNNING GRIDLABMULTI", modelDir, feederName) except Exception as e: if test_mode == True: raise e print("MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName) cancel(pJoin(modelDir, feederName)) with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile: traceback.print_exc(file=stderrFile) finishTime = datetime.datetime.now() inputDict["runTime"] = str( datetime.timedelta(seconds=int((finishTime - beginTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"), "w") as inFile: json.dump(inputDict, inFile, indent=4) # Integrate data into allOutputData.json, if error happens, cancel it try: output = {} output["failures"] = {} numOfFeeders = 0 for root, dirs, files in os.walk(modelDir): # dump error info into dict if "stderr.txt" in files: with open(pJoin(root, "stderr.txt"), "r") as stderrFile: tempString = stderrFile.read() if "ERROR" in tempString or "FATAL" in tempString or "Traceback" in tempString: output["failures"]["feeder_" + str(os.path.split(root)[-1])] = { "stderr": tempString } continue # dump simulated data into dict if "allOutputData.json" in files: with open(pJoin(root, "allOutputData.json"), "r") as feederOutputData: numOfFeeders += 1 feederOutput = json.load(feederOutputData) # TODO: a better feeder name output["feeder_" + str(os.path.split(root)[-1])] = {} output["feeder_" + str(os.path.split(root)[-1] )]["Consumption"] = feederOutput["Consumption"] output["feeder_" + str(os.path.split(root)[-1])][ "allMeterVoltages"] = feederOutput["allMeterVoltages"] output["feeder_" + str(os.path.split( root)[-1])]["stderr"] = feederOutput["stderr"] output["feeder_" + str(os.path.split( root)[-1])]["stdout"] = feederOutput["stdout"] # output[root] = {feederOutput["Consumption"], feederOutput["allMeterVoltages"], feederOutput["stdout"], feederOutput["stderr"]} output["numOfFeeders"] = numOfFeeders output["timeStamps"] = feederOutput.get("timeStamps", []) output["climate"] = feederOutput.get("climate", []) # Add feederNames to output so allInputData feederName changes don't cause output rendering to disappear. for key, feederName in inputDict.items(): if 'feederName' in key: output[key] = feederName with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile: json.dump(output, outFile, indent=4) try: os.remove(pJoin(modelDir, "PPID.txt")) except: pass # Send email to user on model success. emailStatus = inputDict.get('emailStatus', 0) if (emailStatus == "on"): print("\n EMAIL ALERT ON") email = session['user_id'] try: with open("data/User/" + email + ".json") as f: user = json.load(f) modelPath, modelName = pSplit(modelDir) message = "The model " + "<i>" + str( modelName ) + "</i>" + " has successfully completed running. It ran for a total of " + str( inputDict["runTime"]) + " seconds from " + str( beginTime) + ", to " + str(finishTime) + "." return web.send_link(email, message, user) except Exception as e: print("ERROR: Failed sending model status email to user: "******", with exception: \n", e) except Exception as e: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print('ERROR IN MODEL', modelDir, thisErr) inputDict['stderr'] = thisErr with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir, "allInputData.json"), "w") as inFile: json.dump(inputDict, inFile, indent=4) # Send email to user on model failure. email = 'NoEmail' try: email = session['user_id'] with open("data/User/" + email + ".json") as f: user = json.load(f) modelPath, modelName = pSplit(modelDir) message = "The model " + "<i>" + str( modelName ) + "</i>" + " has failed to complete running. It ran for a total of " + str( inputDict["runTime"]) + " seconds from " + str( beginTime) + ", to " + str(finishTime) + "." return web.send_link(email, message, user) except Exception as e: print("Failed sending model status email to user: "******", with exception: \n", e)
def work(modelDir,inputDict): '''This reads a glm file, changes the method of powerflow and reruns''' feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict['feederName1'] = feederName feederPath = pJoin(modelDir,feederName+'.omd') # Reads a pre-calibrated feeder. outData = {} with open(feederPath, 'r') as jsonIn: feederJson = json.load(jsonIn) localTree = feederJson.get('tree', {}) attachments = feederJson.get('attachments', {}) for key in localTree: if 'solver_method' in localTree[key].keys(): localTree[key]['solver_method'] = 'FBS' #find the swing bus and recorder attached to substation try: for key in localTree: if localTree[key].get('bustype','').lower() == 'swing': swingIndex = key swingName = localTree[key].get('name') if localTree[key].get('object','') == 'regulator' and localTree[key].get('from','') == swingName: regIndex = key regConfName = localTree[key]['configuration'] except: raise ValueError('Invalid feeder selected:', str(inputDict['feederName1'])) #find the regulator and capacitor names and combine to form a string for volt-var control object regKeys = [] accum_reg = '' for key in localTree: if localTree[key].get('object','') == 'regulator': accum_reg += localTree[key].get('name','ERROR') + ',' regKeys.append(key) regstr = accum_reg[:-1] capKeys = [] accum_cap = '' for key in localTree: if localTree[key].get('object','') == 'capacitor': accum_cap += localTree[key].get('name','ERROR') + ',' capKeys.append(key) if localTree[key].get('control','').lower() == 'manual': localTree[key]['control'] = 'VOLT' capstr = accum_cap[:-1] # Attach recorders relevant to CVR. recorders = [ {'object': 'collector', 'file': 'ZlossesTransformer.csv', 'group': 'class=transformer', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'collector', 'file': 'ZlossesUnderground.csv', 'group': 'class=underground_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'collector', 'file': 'ZlossesOverhead.csv', 'group': 'class=overhead_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'recorder', 'file': 'Zregulator.csv', 'limit': '0', 'parent': localTree[regIndex]['name'], 'property': 'tap_A,tap_B,tap_C,power_in.real,power_in.imag'}, {'object': 'collector', 'file': 'ZvoltageJiggle.csv', 'group': 'class=triplex_meter', 'limit': '0', 'property': 'min(voltage_12.mag),mean(voltage_12.mag),max(voltage_12.mag),std(voltage_12.mag)'}, {'object': 'recorder', 'file': 'ZsubstationTop.csv', 'limit': '0', 'parent': localTree[swingIndex]['name'], 'property': 'voltage_A,voltage_B,voltage_C'}, {'object': 'recorder', 'file': 'ZsubstationBottom.csv', 'limit': '0', 'parent': localTree[regIndex]['to'], 'property': 'voltage_A,voltage_B,voltage_C'}] #recorder object for capacitor switching - if capacitors exist if capKeys != []: for key in capKeys: recorders.append({'object': 'recorder', 'file': 'ZcapSwitch' + str(key) + '.csv', 'limit': '0', 'parent': localTree[key]['name'], 'property': 'switchA,switchB,switchC'}) #attach recorder process biggest = 1 + max([int(k) for k in localTree.keys()]) for index, rec in enumerate(recorders): localTree[biggest + index] = rec #run a reference load flow HOURS = float(inputDict['simLengthHours']) simStartDate = inputDict['simStart'] feeder.adjustTime(localTree,HOURS,'hours',simStartDate) output = gridlabd.runInFilesystem(localTree, attachments, keepFiles=False,workDir=modelDir) try: os.remove(pJoin(modelDir,'PID.txt')) except: pass p = output['Zregulator.csv']['power_in.real'] q = output['Zregulator.csv']['power_in.imag'] #calculating length of simulation because it migth be different from the simulation input HOURS simRealLength = int(len(p)) #time delays from configuration files time_delay_reg = '30.0' time_delay_cap = '300.0' for key in localTree: if localTree[key].get('object','') == 'regulator_configuration': time_delay_reg = localTree[key]['time_delay'] # if localTree[key].get('object','') == "capacitor": # time_delay_cap = localTree[key]['time_delay'] #change the recorder names for key in localTree: if localTree[key].get('object','') == 'collector' or localTree[key].get('object','') == 'recorder': if localTree[key].get('file','').startswith('Z'): localTree[key]['file'] = localTree[key].get('file','').replace('Z','NewZ') #create volt-var control object max_key = max([int(key) for key in localTree.keys()]) localTree[max_key+1] = {'object' : 'volt_var_control', 'name' : 'IVVC1', 'control_method' : 'ACTIVE', 'capacitor_delay' : str(time_delay_cap), 'regulator_delay' : str(time_delay_reg), 'desired_pf' : '0.99', 'd_max' : '0.6', 'd_min' : '0.1', 'substation_link' : str(localTree[regIndex]['name']), 'regulator_list' : regstr, 'capacitor_list': capstr, 'voltage_measurements': str(inputDict.get('voltageNodes', 'IVVC1')), } #running powerflow analysis via gridalab after attaching a regulator feeder.adjustTime(localTree,HOURS,'hours',simStartDate) output1 = gridlabd.runInFilesystem(localTree,attachments,keepFiles=True,workDir=modelDir) os.remove(pJoin(modelDir,'PID.txt')) pnew = output1['NewZregulator.csv']['power_in.real'] qnew = output1['NewZregulator.csv']['power_in.imag'] #total real and imaginary losses as a function of time def vecSum(u,v): ''' Add vectors u and v element-wise. Return has len <= len(u) and <=len(v). ''' return map(sum, zip(u,v)) def zeroVec(length): ''' Give a zero vector of input length. ''' return [0 for x in xrange(length)] (realLoss, imagLoss, realLossnew, imagLossnew) = (zeroVec(int(HOURS)) for x in range(4)) for device in ['ZlossesOverhead.csv','ZlossesTransformer.csv','ZlossesUnderground.csv']: for letter in ['A','B','C']: realLoss = vecSum(realLoss, output[device]['sum(power_losses_' + letter + '.real)']) imagLoss = vecSum(imagLoss, output[device]['sum(power_losses_' + letter + '.imag)']) realLossnew = vecSum(realLossnew, output1['New'+device]['sum(power_losses_' + letter + '.real)']) imagLossnew = vecSum(imagLossnew, output1['New'+device]['sum(power_losses_' + letter + '.imag)']) #voltage calculations and tap calculations def divby2(u): '''divides by 2''' return u/2 lowVoltage = [] meanVoltage = [] highVoltage = [] lowVoltagenew = [] meanVoltagenew = [] highVoltagenew = [] tap = {'A':[],'B':[],'C':[]} tapnew = {'A':[],'B':[],'C':[]} volt = {'A':[],'B':[],'C':[]} voltnew = {'A':[],'B':[],'C':[]} switch = {'A':[],'B':[],'C':[]} switchnew = {'A':[],'B':[],'C':[]} for letter in ['A','B','C']: tap[letter] = output['Zregulator.csv']['tap_' + letter] tapnew[letter] = output1['NewZregulator.csv']['tap_' + letter] if capKeys != []: switch[letter] = output['ZcapSwitch' + str(int(capKeys[0])) + '.csv']['switch'+ letter] switchnew[letter] = output1['NewZcapSwitch' + str(int(capKeys[0])) + '.csv']['switch'+ letter] volt[letter] = map(returnMag,output['ZsubstationBottom.csv']['voltage_'+letter]) voltnew[letter] = map(returnMag,output1['NewZsubstationBottom.csv']['voltage_'+letter]) lowVoltage = map(divby2,output['ZvoltageJiggle.csv']['min(voltage_12.mag)']) lowVoltagenew = map(divby2,output1['NewZvoltageJiggle.csv']['min(voltage_12.mag)']) meanVoltage = map(divby2,output['ZvoltageJiggle.csv']['mean(voltage_12.mag)']) meanVoltagenew = map(divby2,output1['NewZvoltageJiggle.csv']['mean(voltage_12.mag)']) highVoltage = map(divby2,output['ZvoltageJiggle.csv']['max(voltage_12.mag)']) highVoltagenew = map(divby2,output1['NewZvoltageJiggle.csv']['max(voltage_12.mag)']) #energy calculations whEnergy = [] whLosses = [] whLoads = [] whEnergy.append(sum(p)/10**6) whLosses.append(sum(realLoss)/10**6) whLoads.append((sum(p)-sum(realLoss))/10**6) whEnergy.append(sum(pnew)/10**6) whLosses.append(sum(realLossnew)/10**6) whLoads.append((sum(pnew)-sum(realLossnew))/10**6) indices = ['No IVVC', 'With IVVC'] # energySalesRed = (whLoads[1]-whLoads[0])*(inputDict['wholesaleEnergyCostPerKwh'])*1000 # lossSav = (whLosses[0]-whLosses[1])*inputDict['wholesaleEnergyCostPerKwh']*1000 #plots ticks = [] plt.clf() plt.title('total energy') plt.ylabel('total load and losses (MWh)') for element in range(2): ticks.append(element) bar_loss = plt.bar(element, whLosses[element], 0.15, color= 'red') bar_load = plt.bar(element+0.15, whLoads[element], 0.15, color= 'orange') plt.legend([bar_load[0],bar_loss[0]],['total load', 'total losses'],bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode='expand', borderaxespad=0.1) plt.xticks([t+0.15 for t in ticks],indices) plt.savefig(pJoin(modelDir,'totalEnergy.png')) #real and imaginary power plt.figure('real power') plt.title('Real Power at substation') plt.ylabel('substation real power (MW)') pMW = [element/10**6 for element in p] pMWn = [element/10**6 for element in pnew] pw = plt.plot(pMW) npw = plt.plot(pMWn) plt.legend([pw[0], npw[0]], ['NO IVVC','WITH IVVC'],bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode='expand', borderaxespad=0.1) plt.savefig(pJoin(modelDir,'realPower.png')) plt.figure('Reactive power') plt.title('Reactive Power at substation') plt.ylabel('substation reactive power (MVAR)') qMVAR = [element/10**6 for element in q] qMVARn = [element/10**6 for element in qnew] iw = plt.plot(qMVAR) niw = plt.plot(qMVARn) plt.legend([iw[0], niw[0]], ['NO IVVC','WITH IVVC'],bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode='expand', borderaxespad=0.1) plt.savefig(pJoin(modelDir,'imaginaryPower.png')) #voltage plots plt.figure('voltages as a function of time') f,ax = plt.subplots(2,sharex=True) f.suptitle('Min and Max voltages on the feeder') lv = ax[0].plot(lowVoltage,color = 'cadetblue') mv = ax[0].plot(meanVoltage,color = 'blue') hv = ax[0].plot(highVoltage, color = 'cadetblue') ax[0].legend([lv[0], mv[0], hv[0]], ['low voltage','mean voltage','high voltage'],bbox_to_anchor=(0., 0.915, 1., .1), loc=3, ncol=3, mode='expand', borderaxespad=0.1) ax[0].set_ylabel('NO IVVC') nlv = ax[1].plot(lowVoltagenew,color = 'cadetblue') nmv = ax[1].plot(meanVoltagenew,color = 'blue') nhv = ax[1].plot(highVoltagenew, color = 'cadetblue') ax[1].set_ylabel('WITH IVVC') plt.savefig(pJoin(modelDir,'Voltages.png')) #tap positions plt.figure('TAP positions NO IVVC') f,ax = plt.subplots(6,sharex=True) f.set_size_inches(10,12.0) #f.suptitle("Regulator Tap positions") ax[0].plot(tap['A']) ax[0].set_title('Regulator Tap positions NO IVVC') ax[0].set_ylabel('TAP A') ax[1].plot(tap['B']) ax[1].set_ylabel('TAP B') ax[2].plot(tap['C']) ax[2].set_ylabel('TAP C') ax[3].plot(tapnew['A']) ax[3].set_title('WITH IVVC') ax[3].set_ylabel('TAP A') ax[4].plot(tapnew['B']) ax[4].set_ylabel('TAP B') ax[5].plot(tapnew['C']) ax[5].set_ylabel('TAP C') for subplot in range(6): ax[subplot].set_ylim(-20,20) f.tight_layout() plt.savefig(pJoin(modelDir,'RegulatorTAPpositions.png')) #substation voltages plt.figure('substation voltage as a function of time') f,ax = plt.subplots(6,sharex=True) f.set_size_inches(10,12.0) #f.suptitle("voltages at substation NO IVVC") ax[0].plot(volt['A']) ax[0].set_title('Substation voltages NO IVVC') ax[0].set_ylabel('voltage A') ax[1].plot(volt['B']) ax[1].set_ylabel('voltage B') ax[2].plot(volt['C']) ax[2].set_ylabel('voltage C') ax[3].plot(voltnew['A']) ax[3].set_title('WITH IVVC') ax[3].set_ylabel('voltage A') ax[4].plot(voltnew['B']) ax[4].set_ylabel('voltage B') ax[5].plot(voltnew['C']) ax[5].set_ylabel('voltage C') f.tight_layout() plt.savefig(pJoin(modelDir,'substationVoltages.png')) #cap switches plt.figure('capacitor switch state as a function of time') f,ax = plt.subplots(6,sharex=True) f.set_size_inches(10,12.0) #f.suptitle("Capacitor switch state NO IVVC") ax[0].plot(switch['A']) ax[0].set_title('Capacitor switch state NO IVVC') ax[0].set_ylabel('switch A') ax[1].plot(switch['B']) ax[1].set_ylabel('switch B') ax[2].plot(switch['C']) ax[2].set_ylabel('switch C') ax[3].plot(switchnew['A']) ax[3].set_title('WITH IVVC') ax[3].set_ylabel('switch A') ax[4].plot(switchnew['B']) ax[4].set_ylabel('switch B') ax[5].plot(switchnew['C']) ax[5].set_ylabel('switch C') for subplot in range(6): ax[subplot].set_ylim(-2,2) f.tight_layout() plt.savefig(pJoin(modelDir,'capacitorSwitch.png')) #plt.show() #monetization monthNames = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] monthToSeason = {'January':'Winter','February':'Winter','March':'Spring','April':'Spring', 'May':'Spring','June':'Summer','July':'Summer','August':'Summer', 'September':'Fall','October':'Fall','November':'Fall','December':'Winter'} #calculate the month and hour of simulation start and month and hour of simulation end simStartTimestamp = simStartDate + ' 00:00:00' simFormattedDate = dt.strptime(simStartTimestamp,'%Y-%m-%d %H:%M:%S') simStartMonthNum = int(simFormattedDate.strftime('%m')) simstartMonth = monthNames[simStartMonthNum-1] simStartDay = int(simFormattedDate.strftime('%d')) if calendar.isleap(int(simFormattedDate.strftime('%Y'))): febDays = 29 else: febDays = 28 monthHours = [int(31*24),int(febDays*24),int(31*24),int(30*24),int(31*24),int(30*24),int(31*24),int(31*24),int(30*24),int(31*24),int(30*24),int(31*24)] simStartIndex = int(sum(monthHours[:(simStartMonthNum-1)])+(simStartDay-1)*24) temp = 0 cumulHours = [0] for x in range(12): temp += monthHours[x] cumulHours.append(temp) for i in range((simStartMonthNum),13): if int(simStartIndex+simRealLength)<=cumulHours[i] and int(simStartIndex+simRealLength)>cumulHours[i-1]: simEndMonthNum = i-1 simEndMonth = monthNames[simEndMonthNum] #calculate peaks for the number of months in simulation previndex = 0 monthPeak = {} monthPeakNew = {} peakSaveDollars = {} energyLostDollars = {} lossRedDollars = {} simMonthList = monthNames[monthNames.index(simstartMonth):(monthNames.index(simEndMonth)+1)] for monthElement in simMonthList: month = monthNames.index(monthElement) index1 = int(previndex) index2 = int(min((index1 + int(monthHours[month])), simRealLength)) monthPeak[monthElement] = max(p[index1:index2])/1000.0 monthPeakNew[monthElement] = max(pnew[index1:index2])/1000.0 peakSaveDollars[monthElement] = (monthPeak[monthElement]-monthPeakNew[monthElement])*float(inputDict['peakDemandCost'+str(monthToSeason[monthElement])+'PerKw']) lossRedDollars[monthElement] = (sum(realLoss[index1:index2])/1000.0 - sum(realLossnew[index1:index2])/1000.0)*(float(inputDict['wholesaleEnergyCostPerKwh'])) energyLostDollars[monthElement] = (sum(p[index1:index2])/1000.0 - sum(pnew[index1:index2])/1000.0 - sum(realLoss[index1:index2])/1000.0 + sum(realLossnew[index1:index2])/1000.0 )*(float(inputDict['wholesaleEnergyCostPerKwh']) - float(inputDict['retailEnergyCostPerKwh'])) previndex = index2 #money charts fig = plt.figure('cost benefit barchart',figsize=(10,8)) ticks = range(len(simMonthList)) ticks1 = [element+0.15 for element in ticks] ticks2 = [element+0.30 for element in ticks] eld = [energyLostDollars[month] for month in simMonthList] lrd = [lossRedDollars[month] for month in simMonthList] psd = [peakSaveDollars[month] for month in simMonthList] bar_eld = plt.bar(ticks,eld,0.15,color='red') bar_psd = plt.bar(ticks1,psd,0.15,color='blue') bar_lrd = plt.bar(ticks2,lrd,0.15,color='green') plt.legend([bar_eld[0], bar_psd[0], bar_lrd[0]], ['energyLostDollars','peakReductionDollars','lossReductionDollars'],bbox_to_anchor=(0., 1.015, 1., .102), loc=3, ncol=2, mode='expand', borderaxespad=0.1) monShort = [element[0:3] for element in simMonthList] plt.xticks([t+0.15 for t in ticks],monShort) plt.ylabel('Utility Savings ($)') plt.savefig(pJoin(modelDir,'spendChart.png')) #cumulative savings graphs fig = plt.figure('cost benefit barchart',figsize=(10,5)) annualSavings = sum(eld) + sum(lrd) + sum(psd) annualSave = lambda x:(annualSavings - float(inputDict['omCost'])) * x - float(inputDict['capitalCost']) simplePayback = float(inputDict['capitalCost'])/(annualSavings - float(inputDict['omCost'])) plt.xlabel('Year After Installation') plt.xlim(0,30) plt.ylabel('Cumulative Savings ($)') plt.plot([0 for x in range(31)],c='gray') plt.axvline(x=simplePayback, ymin=0, ymax=1, c='gray', linestyle='--') plt.plot([annualSave(x) for x in range(31)], c='green') plt.savefig(pJoin(modelDir,'savingsChart.png')) #get exact time stamps from the CSV files generated by Gridlab-D timeWithZone = output['Zregulator.csv']['# timestamp'] timestamps = [element[:19] for element in timeWithZone] #data for highcharts outData['timeStamps'] = timestamps outData['noCVRPower'] = p outData['withCVRPower'] = pnew outData['noCVRLoad'] = whLoads[0] outData['withCVRLoad'] = whLoads[1] outData['noCVRLosses'] = whLosses[0] outData['withCVRLosses'] = whLosses[1] outData['noCVRTaps'] = tap outData['withCVRTaps'] = tapnew outData['noCVRSubVolts'] = volt outData['withCVRSubVolts'] = voltnew outData['noCVRCapSwitch'] = switch outData['withCVRCapSwitch'] = switchnew outData['noCVRHighVolt'] = highVoltage outData['withCVRHighVolt'] = highVoltagenew outData['noCVRLowVolt'] = lowVoltage outData['withCVRLowVolt'] = lowVoltagenew outData['noCVRMeanVolt'] = meanVoltage outData['withCVRMeanVolt'] = meanVoltagenew #monetization outData['simMonthList'] = monShort outData['energyLostDollars'] = energyLostDollars outData['lossRedDollars'] = lossRedDollars outData['peakSaveDollars'] = peakSaveDollars outData['annualSave'] = [annualSave(x) for x in range(31)] # Generate warnings #TODO: Timezone adjustment try: # Check if times for simulation and scada match. scadaDates = [] with open(pJoin(modelDir,'subScadaCalibrated1.player'),'r') as scadaFile: for line in scadaFile: (date,val) = line.split(',') scadaDates.append(str(date)) simFormattedEndDate = simFormattedDate + timedelta(hours=HOURS) scadaStartDate = dt.strptime(scadaDates[0].split(' PST')[0],"%Y-%m-%d %H:%M:%S") scadaEndDate = dt.strptime(scadaDates[len(scadaDates)-1].split(' PST')[0],"%Y-%m-%d %H:%M:%S") beginRange = (scadaStartDate - simFormattedDate).total_seconds() endRange = (scadaEndDate - simFormattedEndDate).total_seconds() # Check if houses exist. housesExist, voltageNodeExists = False, False for key in localTree: if localTree[key].get('object','') == 'house': housesExist = True if localTree[key].get('name','') == str(inputDict.get('voltageNodes', 0)): voltageNodeExists = True if (beginRange > 0.0 or endRange < 0.0) and not housesExist: outData['warnings'] = '<strong>WARNING:</strong> The simulation dates entered are not compatible with the scada curve in the feeder.' # Check if voltage node exists. if not voltageNodeExists: if outData.get('warnings','') != '': previousWarning = outData['warnings'] outData['warnings'] = previousWarning + ' The voltage node: ' + str(inputDict.get('voltageNodes', 0)) + ' does not exist in the feeder.' else: outData['warnings'] = '<strong>WARNING:</strong> The voltage node <i>' + str(inputDict.get('voltageNodes', 0)) + '</i> does not exist in the feeder.' except: pass # # Update the runTime in the input file. # endTime = dt.now() # with open(pJoin(modelDir,"allInputData.json"),"w") as inFile: # json.dump(inputDict, inFile, indent=4) # with open(pJoin(modelDir,"outDataData.json"),"w") as outFile: # json.dump(outData, outFile, indent=4) # # For autotest, there won't be such file. # try: # os.remove(pJoin(modelDir, "PPID.txt")) # except Exception, e: # pass return outData
def runForeground(modelDir, inData): '''This reads a glm file, changes the method of powerflow and reruns''' print "STARTING TO RUN", modelDir try: startTime = dt.now() if not os.path.isdir(modelDir): os.makedirs(modelDir) inData["created"] = str(startTime) #read pre-calibrated feeder and run cvrdynamic feederName = inData.get('feederName1', 'feeder1') feederPath = pJoin(modelDir, feederName + '.omd') # Reads a pre-calibrated feeder. allOutput = {} with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) localTree = feederJson.get("tree", {}) attachments = feederJson.get("attachments", {}) for key in localTree: if "solver_method" in localTree[key].keys(): # print "current solver method", localTree[key]["solver_method"] localTree[key]["solver_method"] = 'FBS' #find the swing bus and recorder attached to substation try: for key in localTree: if localTree[key].get('bustype', '').lower() == 'swing': swingIndex = key swingName = localTree[key].get('name') if localTree[key].get( 'object', '') == 'regulator' and localTree[key].get( 'from', '') == swingName: regIndex = key regConfName = localTree[key]['configuration'] except: raise ValueError('Invalid feeder selected:', str(inData["feederName1"])) #find the regulator and capacitor names and combine to form a string for volt-var control object regKeys = [] accum_reg = "" for key in localTree: if localTree[key].get("object", "") == "regulator": accum_reg += localTree[key].get("name", "ERROR") + "," regKeys.append(key) regstr = accum_reg[:-1] # print regKeys capKeys = [] accum_cap = "" for key in localTree: if localTree[key].get("object", "") == "capacitor": accum_cap += localTree[key].get("name", "ERROR") + "," capKeys.append(key) if localTree[key].get("control", "").lower() == "manual": localTree[key]['control'] = "VOLT" # print "changing capacitor control from manual to volt" capstr = accum_cap[:-1] # print capKeys # Attach recorders relevant to CVR. recorders = [{ 'object': 'collector', 'file': 'ZlossesTransformer.csv', 'group': 'class=transformer', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)' }, { 'object': 'collector', 'file': 'ZlossesUnderground.csv', 'group': 'class=underground_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)' }, { 'object': 'collector', 'file': 'ZlossesOverhead.csv', 'group': 'class=overhead_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)' }, { 'object': 'recorder', 'file': 'Zregulator.csv', 'limit': '0', 'parent': localTree[regIndex]['name'], 'property': 'tap_A,tap_B,tap_C,power_in.real,power_in.imag' }, { 'object': 'collector', 'file': 'ZvoltageJiggle.csv', 'group': 'class=triplex_meter', 'limit': '0', 'property': 'min(voltage_12.mag),mean(voltage_12.mag),max(voltage_12.mag),std(voltage_12.mag)' }, { 'object': 'recorder', 'file': 'ZsubstationTop.csv', 'limit': '0', 'parent': localTree[swingIndex]['name'], 'property': 'voltage_A,voltage_B,voltage_C' }, { 'object': 'recorder', 'file': 'ZsubstationBottom.csv', 'limit': '0', 'parent': localTree[regIndex]['to'], 'property': 'voltage_A,voltage_B,voltage_C' }] #recorder object for capacitor switching - if capacitors exist if capKeys != []: for key in capKeys: recorders.append({ 'object': 'recorder', 'file': 'ZcapSwitch' + str(key) + '.csv', 'limit': '0', 'parent': localTree[key]['name'], 'property': 'switchA,switchB,switchC' }) #attach recorder process biggest = 1 + max([int(k) for k in localTree.keys()]) for index, rec in enumerate(recorders): localTree[biggest + index] = rec #run a reference load flow HOURS = float(inData['simLengthHours']) simStartDate = inData['simStart'] feeder.adjustTime(localTree, HOURS, "hours", simStartDate) output = gridlabd.runInFilesystem(localTree, attachments, keepFiles=False, workDir=modelDir) try: os.remove(pJoin(modelDir, "PID.txt")) except: pass p = output['Zregulator.csv']['power_in.real'] q = output['Zregulator.csv']['power_in.imag'] #calculating length of simulation because it migth be different from the simulation input HOURS simRealLength = int(len(p)) #time delays from configuration files time_delay_reg = '30.0' time_delay_cap = '300.0' for key in localTree: if localTree[key].get('object', '') == "regulator_configuration": time_delay_reg = localTree[key]['time_delay'] # print "time_delay_reg",time_delay_reg # if localTree[key].get('object','') == "capacitor": # time_delay_cap = localTree[key]['time_delay'] # print "time_delay_cap",time_delay_cap #change the recorder names for key in localTree: if localTree[key].get('object', '') == "collector" or localTree[key].get( 'object', '') == "recorder": if localTree[key].get('file', '').startswith('Z'): localTree[key]['file'] = localTree[key].get( 'file', '').replace('Z', 'NewZ') #create volt-var control object max_key = max([int(key) for key in localTree.keys()]) # print max_key localTree[max_key + 1] = { 'object': 'volt_var_control', 'name': 'IVVC1', 'control_method': 'ACTIVE', 'capacitor_delay': str(time_delay_cap), 'regulator_delay': str(time_delay_reg), 'desired_pf': '0.99', 'd_max': '0.6', 'd_min': '0.1', 'substation_link': str(localTree[regIndex]['name']), 'regulator_list': regstr, 'capacitor_list': capstr, 'voltage_measurements': str(inData.get("voltageNodes", "IVVC1")), } #running powerflow analysis via gridalab after attaching a regulator feeder.adjustTime(localTree, HOURS, "hours", simStartDate) output1 = gridlabd.runInFilesystem(localTree, attachments, keepFiles=True, workDir=modelDir) os.remove(pJoin(modelDir, "PID.txt")) pnew = output1['NewZregulator.csv']['power_in.real'] qnew = output1['NewZregulator.csv']['power_in.imag'] #total real and imaginary losses as a function of time def vecSum(u, v): ''' Add vectors u and v element-wise. Return has len <= len(u) and <=len(v). ''' return map(sum, zip(u, v)) def zeroVec(length): ''' Give a zero vector of input length. ''' return [0 for x in xrange(length)] (realLoss, imagLoss, realLossnew, imagLossnew) = (zeroVec(int(HOURS)) for x in range(4)) for device in [ 'ZlossesOverhead.csv', 'ZlossesTransformer.csv', 'ZlossesUnderground.csv' ]: for letter in ['A', 'B', 'C']: realLoss = vecSum( realLoss, output[device]['sum(power_losses_' + letter + '.real)']) imagLoss = vecSum( imagLoss, output[device]['sum(power_losses_' + letter + '.imag)']) realLossnew = vecSum( realLossnew, output1['New' + device]['sum(power_losses_' + letter + '.real)']) imagLossnew = vecSum( imagLossnew, output1['New' + device]['sum(power_losses_' + letter + '.imag)']) #voltage calculations and tap calculations def divby2(u): '''divides by 2''' return u / 2 lowVoltage = [] meanVoltage = [] highVoltage = [] lowVoltagenew = [] meanVoltagenew = [] highVoltagenew = [] tap = {'A': [], 'B': [], 'C': []} tapnew = {'A': [], 'B': [], 'C': []} volt = {'A': [], 'B': [], 'C': []} voltnew = {'A': [], 'B': [], 'C': []} switch = {'A': [], 'B': [], 'C': []} switchnew = {'A': [], 'B': [], 'C': []} for letter in ['A', 'B', 'C']: tap[letter] = output['Zregulator.csv']['tap_' + letter] tapnew[letter] = output1['NewZregulator.csv']['tap_' + letter] if capKeys != []: switch[letter] = output['ZcapSwitch' + str(int(capKeys[0])) + '.csv']['switch' + letter] switchnew[letter] = output1['NewZcapSwitch' + str(int(capKeys[0])) + '.csv']['switch' + letter] volt[letter] = map( returnMag, output['ZsubstationBottom.csv']['voltage_' + letter]) voltnew[letter] = map( returnMag, output1['NewZsubstationBottom.csv']['voltage_' + letter]) lowVoltage = map(divby2, output['ZvoltageJiggle.csv']['min(voltage_12.mag)']) lowVoltagenew = map( divby2, output1['NewZvoltageJiggle.csv']['min(voltage_12.mag)']) meanVoltage = map(divby2, output['ZvoltageJiggle.csv']['mean(voltage_12.mag)']) meanVoltagenew = map( divby2, output1['NewZvoltageJiggle.csv']['mean(voltage_12.mag)']) highVoltage = map(divby2, output['ZvoltageJiggle.csv']['max(voltage_12.mag)']) highVoltagenew = map( divby2, output1['NewZvoltageJiggle.csv']['max(voltage_12.mag)']) #energy calculations whEnergy = [] whLosses = [] whLoads = [] whEnergy.append(sum(p) / 10**6) whLosses.append(sum(realLoss) / 10**6) whLoads.append((sum(p) - sum(realLoss)) / 10**6) whEnergy.append(sum(pnew) / 10**6) whLosses.append(sum(realLossnew) / 10**6) whLoads.append((sum(pnew) - sum(realLossnew)) / 10**6) indices = ['No IVVC', 'With IVVC'] # energySalesRed = (whLoads[1]-whLoads[0])*(inData['wholesaleEnergyCostPerKwh'])*1000 # lossSav = (whLosses[0]-whLosses[1])*inData['wholesaleEnergyCostPerKwh']*1000 # print energySalesRed, lossSav #plots ticks = [] plt.clf() plt.title("total energy") plt.ylabel("total load and losses (MWh)") for element in range(2): ticks.append(element) bar_loss = plt.bar(element, whLosses[element], 0.15, color='red') bar_load = plt.bar(element + 0.15, whLoads[element], 0.15, color='orange') plt.legend([bar_load[0], bar_loss[0]], ['total load', 'total losses'], bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.xticks([t + 0.15 for t in ticks], indices) plt.savefig(pJoin(modelDir, "totalEnergy.png")) #real and imaginary power plt.figure("real power") plt.title("Real Power at substation") plt.ylabel("substation real power (MW)") pMW = [element / 10**6 for element in p] pMWn = [element / 10**6 for element in pnew] pw = plt.plot(pMW) npw = plt.plot(pMWn) plt.legend([pw[0], npw[0]], ['NO IVVC', 'WITH IVVC'], bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.savefig(pJoin(modelDir, "realPower.png")) plt.figure("Reactive power") plt.title("Reactive Power at substation") plt.ylabel("substation reactive power (MVAR)") qMVAR = [element / 10**6 for element in q] qMVARn = [element / 10**6 for element in qnew] iw = plt.plot(qMVAR) niw = plt.plot(qMVARn) plt.legend([iw[0], niw[0]], ['NO IVVC', 'WITH IVVC'], bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.savefig(pJoin(modelDir, "imaginaryPower.png")) #voltage plots plt.figure("voltages as a function of time") f, ax = plt.subplots(2, sharex=True) f.suptitle("Min and Max voltages on the feeder") lv = ax[0].plot(lowVoltage, color='cadetblue') mv = ax[0].plot(meanVoltage, color='blue') hv = ax[0].plot(highVoltage, color='cadetblue') ax[0].legend([lv[0], mv[0], hv[0]], ['low voltage', 'mean voltage', 'high voltage'], bbox_to_anchor=(0., 0.915, 1., .1), loc=3, ncol=3, mode="expand", borderaxespad=0.1) ax[0].set_ylabel('NO IVVC') nlv = ax[1].plot(lowVoltagenew, color='cadetblue') nmv = ax[1].plot(meanVoltagenew, color='blue') nhv = ax[1].plot(highVoltagenew, color='cadetblue') ax[1].set_ylabel('WITH IVVC') plt.savefig(pJoin(modelDir, "Voltages.png")) #tap positions plt.figure("TAP positions NO IVVC") f, ax = plt.subplots(6, sharex=True) f.set_size_inches(10, 12.0) #f.suptitle("Regulator Tap positions") ax[0].plot(tap['A']) ax[0].set_title("Regulator Tap positions NO IVVC") ax[0].set_ylabel("TAP A") ax[1].plot(tap['B']) ax[1].set_ylabel("TAP B") ax[2].plot(tap['C']) ax[2].set_ylabel("TAP C") ax[3].plot(tapnew['A']) ax[3].set_title("WITH IVVC") ax[3].set_ylabel("TAP A") ax[4].plot(tapnew['B']) ax[4].set_ylabel("TAP B") ax[5].plot(tapnew['C']) ax[5].set_ylabel("TAP C") for subplot in range(6): ax[subplot].set_ylim(-20, 20) f.tight_layout() plt.savefig(pJoin(modelDir, "RegulatorTAPpositions.png")) #substation voltages plt.figure("substation voltage as a function of time") f, ax = plt.subplots(6, sharex=True) f.set_size_inches(10, 12.0) #f.suptitle("voltages at substation NO IVVC") ax[0].plot(volt['A']) ax[0].set_title('Substation voltages NO IVVC') ax[0].set_ylabel('voltage A') ax[1].plot(volt['B']) ax[1].set_ylabel('voltage B') ax[2].plot(volt['C']) ax[2].set_ylabel('voltage C') ax[3].plot(voltnew['A']) ax[3].set_title("WITH IVVC") ax[3].set_ylabel('voltage A') ax[4].plot(voltnew['B']) ax[4].set_ylabel('voltage B') ax[5].plot(voltnew['C']) ax[5].set_ylabel('voltage C') f.tight_layout() plt.savefig(pJoin(modelDir, "substationVoltages.png")) #cap switches plt.figure("capacitor switch state as a function of time") f, ax = plt.subplots(6, sharex=True) f.set_size_inches(10, 12.0) #f.suptitle("Capacitor switch state NO IVVC") ax[0].plot(switch['A']) ax[0].set_title("Capacitor switch state NO IVVC") ax[0].set_ylabel("switch A") ax[1].plot(switch['B']) ax[1].set_ylabel("switch B") ax[2].plot(switch['C']) ax[2].set_ylabel("switch C") ax[3].plot(switchnew['A']) ax[3].set_title("WITH IVVC") ax[3].set_ylabel("switch A") ax[4].plot(switchnew['B']) ax[4].set_ylabel("switch B") ax[5].plot(switchnew['C']) ax[5].set_ylabel("switch C") for subplot in range(6): ax[subplot].set_ylim(-2, 2) f.tight_layout() plt.savefig(pJoin(modelDir, "capacitorSwitch.png")) #plt.show() #monetization monthNames = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ] monthToSeason = { 'January': 'Winter', 'February': 'Winter', 'March': 'Spring', 'April': 'Spring', 'May': 'Spring', 'June': 'Summer', 'July': 'Summer', 'August': 'Summer', 'September': 'Fall', 'October': 'Fall', 'November': 'Fall', 'December': 'Winter' } #calculate the month and hour of simulation start and month and hour of simulation end simStartTimestamp = simStartDate + " 00:00:00" simFormattedDate = dt.strptime(simStartTimestamp, "%Y-%m-%d %H:%M:%S") simStartMonthNum = int(simFormattedDate.strftime('%m')) simstartMonth = monthNames[simStartMonthNum - 1] simStartDay = int(simFormattedDate.strftime('%d')) if calendar.isleap(int(simFormattedDate.strftime('%Y'))): febDays = 29 else: febDays = 28 monthHours = [ int(31 * 24), int(febDays * 24), int(31 * 24), int(30 * 24), int(31 * 24), int(30 * 24), int(31 * 24), int(31 * 24), int(30 * 24), int(31 * 24), int(30 * 24), int(31 * 24) ] simStartIndex = int( sum(monthHours[:(simStartMonthNum - 1)]) + (simStartDay - 1) * 24) temp = 0 cumulHours = [0] for x in range(12): temp += monthHours[x] cumulHours.append(temp) for i in range((simStartMonthNum), 13): if int(simStartIndex + simRealLength) <= cumulHours[i] and int( simStartIndex + simRealLength) > cumulHours[i - 1]: simEndMonthNum = i - 1 simEndMonth = monthNames[simEndMonthNum] # print simstartMonth,simEndMonth #calculate peaks for the number of months in simulation previndex = 0 monthPeak = {} monthPeakNew = {} peakSaveDollars = {} energyLostDollars = {} lossRedDollars = {} simMonthList = monthNames[monthNames.index(simstartMonth):( monthNames.index(simEndMonth) + 1)] # print simMonthList for monthElement in simMonthList: # print monthElement month = monthNames.index(monthElement) index1 = int(previndex) index2 = int(min((index1 + int(monthHours[month])), simRealLength)) monthPeak[monthElement] = max(p[index1:index2]) / 1000.0 monthPeakNew[monthElement] = max(pnew[index1:index2]) / 1000.0 peakSaveDollars[monthElement] = ( monthPeak[monthElement] - monthPeakNew[monthElement]) * float( inData['peakDemandCost' + str(monthToSeason[monthElement]) + 'PerKw']) lossRedDollars[monthElement] = ( sum(realLoss[index1:index2]) / 1000.0 - sum(realLossnew[index1:index2]) / 1000.0) * (float( inData['wholesaleEnergyCostPerKwh'])) energyLostDollars[monthElement] = ( sum(p[index1:index2]) / 1000.0 - sum(pnew[index1:index2]) / 1000.0 - sum(realLoss[index1:index2]) / 1000.0 + sum(realLossnew[index1:index2]) / 1000.0) * ( float(inData['wholesaleEnergyCostPerKwh']) - float(inData['retailEnergyCostPerKwh'])) previndex = index2 #money charts fig = plt.figure("cost benefit barchart", figsize=(10, 8)) ticks = range(len(simMonthList)) ticks1 = [element + 0.15 for element in ticks] ticks2 = [element + 0.30 for element in ticks] # print ticks eld = [energyLostDollars[month] for month in simMonthList] lrd = [lossRedDollars[month] for month in simMonthList] psd = [peakSaveDollars[month] for month in simMonthList] bar_eld = plt.bar(ticks, eld, 0.15, color='red') bar_psd = plt.bar(ticks1, psd, 0.15, color='blue') bar_lrd = plt.bar(ticks2, lrd, 0.15, color='green') plt.legend([bar_eld[0], bar_psd[0], bar_lrd[0]], [ 'energyLostDollars', 'peakReductionDollars', 'lossReductionDollars' ], bbox_to_anchor=(0., 1.015, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) monShort = [element[0:3] for element in simMonthList] plt.xticks([t + 0.15 for t in ticks], monShort) plt.ylabel('Utility Savings ($)') plt.savefig(pJoin(modelDir, "spendChart.png")) #cumulative savings graphs fig = plt.figure("cost benefit barchart", figsize=(10, 5)) annualSavings = sum(eld) + sum(lrd) + sum(psd) annualSave = lambda x: (annualSavings - float(inData['omCost']) ) * x - float(inData['capitalCost']) simplePayback = float( inData['capitalCost']) / (annualSavings - float(inData['omCost'])) plt.xlabel('Year After Installation') plt.xlim(0, 30) plt.ylabel('Cumulative Savings ($)') plt.plot([0 for x in range(31)], c='gray') plt.axvline(x=simplePayback, ymin=0, ymax=1, c='gray', linestyle='--') plt.plot([annualSave(x) for x in range(31)], c='green') plt.savefig(pJoin(modelDir, "savingsChart.png")) #get exact time stamps from the CSV files generated by Gridlab-D timeWithZone = output['Zregulator.csv']['# timestamp'] timestamps = [element[:19] for element in timeWithZone] #data for highcharts allOutput["timeStamps"] = timestamps allOutput["noCVRPower"] = p allOutput["withCVRPower"] = pnew allOutput["noCVRLoad"] = whLoads[0] allOutput["withCVRLoad"] = whLoads[1] allOutput["noCVRLosses"] = whLosses[0] allOutput["withCVRLosses"] = whLosses[1] allOutput["noCVRTaps"] = tap allOutput["withCVRTaps"] = tapnew allOutput["noCVRSubVolts"] = volt allOutput["withCVRSubVolts"] = voltnew allOutput["noCVRCapSwitch"] = switch allOutput["withCVRCapSwitch"] = switchnew allOutput["noCVRHighVolt"] = highVoltage allOutput["withCVRHighVolt"] = highVoltagenew allOutput["noCVRLowVolt"] = lowVoltage allOutput["withCVRLowVolt"] = lowVoltagenew allOutput["noCVRMeanVolt"] = meanVoltage allOutput["withCVRMeanVolt"] = meanVoltagenew #monetization allOutput["simMonthList"] = monShort allOutput["energyLostDollars"] = energyLostDollars allOutput["lossRedDollars"] = lossRedDollars allOutput["peakSaveDollars"] = peakSaveDollars allOutput["annualSave"] = [annualSave(x) for x in range(31)] # Generate warnings #TODO: Timezone adjustment try: # Check if times for simulation and scada match. scadaDates = [] with open(pJoin(modelDir, "subScadaCalibrated1.player"), "r") as scadaFile: for line in scadaFile: (date, val) = line.split(',') scadaDates.append(str(date)) simFormattedEndDate = simFormattedDate + timedelta(hours=HOURS) scadaStartDate = dt.strptime(scadaDates[0].split(' PST')[0], "%Y-%m-%d %H:%M:%S") scadaEndDate = dt.strptime( scadaDates[len(scadaDates) - 1].split(' PST')[0], "%Y-%m-%d %H:%M:%S") beginRange = (scadaStartDate - simFormattedDate).total_seconds() endRange = (scadaEndDate - simFormattedEndDate).total_seconds() # Check if houses exist. housesExist, voltageNodeExists = False, False for key in localTree: if localTree[key].get('object', '') == 'house': housesExist = True if localTree[key].get('name', '') == str(inData.get("voltageNodes", 0)): voltageNodeExists = True if (beginRange > 0.0 or endRange < 0.0) and not housesExist: allOutput[ "warnings"] = "<strong>WARNING:</strong> The simulation dates entered are not compatible with the scada curve in the feeder." # Check if voltage node exists. if not voltageNodeExists: if allOutput.get('warnings', '') != "": previousWarning = allOutput["warnings"] allOutput[ "warnings"] = previousWarning + " The voltage node: " + str( inData.get("voltageNodes", 0)) + " does not exist in the feeder." else: allOutput[ "warnings"] = "<strong>WARNING:</strong> The voltage node <i>" + str( inData.get( "voltageNodes", 0)) + "</i> does not exist in the feeder." except: pass # Update the runTime in the input file. endTime = dt.now() inData["runTime"] = str( timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"), "w") as inFile: json.dump(inData, inFile, indent=4) with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile: json.dump(allOutput, outFile, indent=4) # For autotest, there won't be such file. try: os.remove(pJoin(modelDir, "PPID.txt")) except Exception, e: pass print "DONE RUNNING", modelDir
def runGridlabAndProcessData(tree, attachments, edge_bools, workDir=False): # Run Gridlab. if not workDir: workDir = tempfile.mkdtemp() # print '@@@@@@', workDir gridlabOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=workDir) # read voltDump values into a dictionary. try: with open(pJoin(workDir, 'voltDump.csv'), newline='') as dumpFile: reader = csv.reader(dumpFile) next(reader) # Burn the header. keys = next(reader) voltTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] voltTable.append(rowDict) except: raise Exception( 'GridLAB-D failed to run with the following errors:\n' + gridlabOut['stderr']) # read currDump values into a dictionary with open(pJoin(workDir, 'currDump.csv'), newline='') as currDumpFile: reader = csv.reader(currDumpFile) next(reader) # Burn the header. keys = next(reader) currTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] currTable.append(rowDict) # read line rating values into a single dictionary lineRatings = {} for key1 in edge_bools.keys(): if edge_bools[key1]: with open(pJoin(workDir, key1 + '_cont_rating.csv'), newline='') as ratingFile: reader = csv.reader(ratingFile) keys = [] vals = [] for row in reader: if '# timestamp' in row: keys = row i = keys.index('# timestamp') keys.pop(i) vals = next(reader) vals.pop(i) for pos, key2 in enumerate(keys): lineRatings[key2] = abs(float(vals[pos])) # Calculate average node voltage deviation. First, helper functions. def digits(x): ''' Returns number of digits before the decimal in the float x. ''' return math.ceil(math.log10(x + 1)) def avg(l): ''' Average of a list of ints or floats. ''' return sum(l) / len(l) # Tot it all up. nodeVolts = {} for row in voltTable: allVolts = [] for phase in ['A', 'B', 'C']: realVolt = abs(float(row['volt' + phase + '_real'])) imagVolt = abs(float(row['volt' + phase + '_imag'])) phaseVolt = math.sqrt((realVolt**2) + (imagVolt**2)) if phaseVolt != 0.0: allVolts.append(phaseVolt) avgVolts = avg(allVolts) nodeVolts[row.get('node_name', '')] = float("{0:.2f}".format(avgVolts)) nominalVolts = {} percentChangeVolts = {} for key in nodeVolts.keys(): for treeKey in tree: ob = tree[treeKey] obName = ob.get('name', '') if obName == key: nominalVolts[key] = float(ob.get('nominal_voltage', 1)) percentChangeVolts[key] = (nodeVolts[key] / nominalVolts[key]) * 120 # find edge currents by parsing currdump edgeCurrentSum = {} edgeCurrentMax = {} for row in currTable: allCurr = [] for phase in ['A', 'B', 'C']: realCurr = abs(float(row['curr' + phase + '_real'])) imagCurr = abs(float(row['curr' + phase + '_imag'])) phaseCurr = math.sqrt((realCurr**2) + (imagCurr**2)) allCurr.append(phaseCurr) edgeCurrentSum[row.get('link_name', '')] = sum(allCurr) edgeCurrentMax[row.get('link_name', '')] = max(allCurr) # When just showing current as labels, use sum of the three lines' current values, when showing the per unit values (current/rating), use the max of the three #edgeValsPU = current values normalized per unit by line ratings edgeValsPU = {} edgePower = {} for edge in edgeCurrentSum: for obj in tree.values(): obname = obj.get('name', '').replace('"', '') if obname == edge: nodeFrom = obj.get('from') nodeTo = obj.get('to') currVal = edgeCurrentSum.get(edge) voltVal = avg([nodeVolts.get(nodeFrom), nodeVolts.get(nodeTo)]) lineRatings[edge] = lineRatings.get(edge, 10.0**9) edgePerUnitVal = (edgeCurrentMax.get(edge)) / lineRatings[edge] edgeValsPU[edge] = edgePerUnitVal edgePower[edge] = ((currVal * voltVal) / 1000) # read regulator tap position values values into a single dictionary tapPositions = {} if edge_bools['regulator']: tapPositions['tapA'] = readGroupRecorderCSV(pJoin( workDir, 'tap_A.csv')) tapPositions['tapB'] = readGroupRecorderCSV(pJoin( workDir, 'tap_B.csv')) tapPositions['tapC'] = readGroupRecorderCSV(pJoin( workDir, 'tap_C.csv')) return { 'nominalVolts': nominalVolts, 'nodeVolts': nodeVolts, 'percentChangeVolts': percentChangeVolts, 'edgeCurrentSum': edgeCurrentSum, 'edgePower': edgePower, 'edgeValsPU': edgeValsPU, 'tapPositions': tapPositions }
def work(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' # feederName = inputDict["feederName1"] feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict["feederName1"] = feederName inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "climate.tmy2")) feederJson = json.load(open(pJoin(modelDir, feederName + '.omd'))) tree = feederJson["tree"] # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach recorders for system voltage map, triplex: stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'nVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach current recorder for overhead_lines currentStub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600} for phase in ['A','B','C']: copyCurrentStub = dict(currentStub) copyCurrentStub['property'] = 'current_out_' + phase copyCurrentStub['file'] = 'OH_line_current_phase' + phase + '.csv' tree[feeder.getMaxKey(tree) + 1] = copyCurrentStub rating_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600} copyRatingStub = dict(rating_stub) copyRatingStub['property'] = 'continuous_rating' copyRatingStub['file'] = 'OH_line_cont_rating.csv' tree[feeder.getMaxKey(tree) + 1] = copyRatingStub flow_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600} copyFlowStub = dict(flow_stub) copyFlowStub['property'] = 'flow_direction' copyFlowStub['file'] = 'OH_line_flow_direc.csv' tree[feeder.getMaxKey(tree) + 1] = copyFlowStub # Attach current recorder for underground_lines currentStubOH = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600} for phase in ['A','B','C']: copyCurrentStubOH = dict(currentStubOH) copyCurrentStubOH['property'] = 'current_out_' + phase copyCurrentStubOH['file'] = 'UG_line_current_phase' + phase + '.csv' tree[feeder.getMaxKey(tree) + 1] = copyCurrentStubOH ug_rating_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600} copyUGRatingStub = dict(ug_rating_stub) copyUGRatingStub['property'] = 'continuous_rating' copyUGRatingStub['file'] = 'UG_line_cont_rating.csv' tree[feeder.getMaxKey(tree) + 1] = copyUGRatingStub ug_flow_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600} ugCopyFlowStub = dict(ug_flow_stub) ugCopyFlowStub['property'] = 'flow_direction' ugCopyFlowStub['file'] = 'UG_line_flow_direc.csv' tree[feeder.getMaxKey(tree) + 1] = ugCopyFlowStub # And get meters for system voltage map: stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'mVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub for key in tree: if 'bustype' in tree[key].keys(): if tree[key]['bustype'] == 'SWING': tree[key]['object'] = 'meter' swingN = tree[key]['name'] swingRecord = {'object':'recorder', 'property':'voltage_A,measured_real_power,measured_power','file':'subVoltsA.csv','parent':swingN, 'interval':60} tree[feeder.getMaxKey(tree) + 1] = swingRecord for key in tree: if 'omftype' in tree[key].keys() and tree[key]['argument']=='minimum_timestep=3600': tree[key]['argument'] = 'minimum_timestep=60' # If there is a varvolt object in the tree, add recorder to swingbus and node from voltage_measurements property # Find var_volt object downLineNode = 'None' for key in tree: if 'object' in tree[key].keys() and tree[key]['object']=='volt_var_control': downLineNode = tree[key]['voltage_measurements'] if downLineNode != 'None': downNodeRecord = {'object':'recorder', 'property':'voltage_A','file':'firstDownlineVoltsA.csv','parent':downLineNode, 'interval':60} tree[feeder.getMaxKey(tree) + 1] = downNodeRecord # Violation recorder to display to users # violationRecorder = {'object':'violation_recorder','node_continuous_voltage_limit_lower':0.95,'file':'Violation_Log.csv', # 'secondary_dist_voltage_rise_lower_limit':-0.042,'substation_pf_lower_limit':0.85,'substation_breaker_C_limit':300, # 'secondary_dist_voltage_rise_upper_limit':0.025,'substation_breaker_B_limit':300,'violation_flag':'ALLVIOLATIONS', # 'node_instantaneous_voltage_limit_upper':1.1, 'inverter_v_chng_per_interval_lower_bound':-0.05, 'virtual_substation':swingN, # 'substation_breaker_A_limit':300, 'xfrmr_thermal_limit_lower':0,'node_continuous_voltage_interval':300,'strict':'false', # 'node_instantaneous_voltage_limit_lower':0,'line_thermal_limit_upper':1,'echo':'false','node_continuous_voltage_limit_upper':1.05, # 'interval':30,'line_thermal_limit_lower':0,'summary':'Violation_Summary.csv','inverter_v_chng_interval':60, # 'xfrmr_thermal_limit_upper':2,'inverter_v_chng_per_interval_upper_bound':0.050} # tree[feeder.getMaxKey(tree) + 1] = violationRecorder feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir)) # voltDumps have no values when gridlabD fails or the files dont exist if not os.path.isfile(pJoin(modelDir,'aVoltDump.csv')): with open (pJoin(modelDir,'stderr.txt')) as inFile: stdErrText = inFile.read() message = 'GridLAB-D crashed. Error log:\n' + stdErrText raise Exception(message) elif len(rawOut['aVoltDump.csv']['# timestamp']) == 0: with open (pJoin(modelDir,'stderr.txt')) as inFile: stdErrText = inFile.read() message = 'GridLAB-D crashed. Error log:\n' + stdErrText raise Exception(message) outData = {} # Std Err and Std Out outData['stderr'] = rawOut['stderr'] outData['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# property.. timestamp'] else: outData['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = outData.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): outData['climate'] = {} outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] outData['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: outData['allMeterVoltages'] = {} outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption outData['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads outData['Consumption']['Power'] = [0] * int(inputDict["simLength"]) outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) outData['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in outData['Consumption']: outData['Consumption']['Power'] = oneSwingPower else: outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in outData['Consumption']: outData['Consumption']['Losses'] = oneLoss else: outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] outData[newkey] ={} outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapA'] = rawOut[key]['tap_A'] outData[newkey]['RegTapB'] = rawOut[key]['tap_B'] outData[newkey]['RegTapC'] = rawOut[key]['tap_C'] outData[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] outData[newkey] ={} outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1A'] = rawOut[key]['switchA'] outData[newkey]['Cap1B'] = rawOut[key]['switchB'] outData[newkey]['Cap1C'] = rawOut[key]['switchC'] outData[newkey]['CapPhases'] = rawOut[key]['phases'][0] # Capture voltages at the swingbus # Loop through voltDump for swingbus voltages subData = [] downData = [] with open(pJoin(modelDir,"subVoltsA.csv")) as subFile: reader = csv.reader(subFile) subData = [x for x in reader] if downLineNode != 'None': with open(pJoin(modelDir,"firstDownlineVoltsA.csv")) as downFile: reader = csv.reader(downFile) downData = [x for x in reader] FIRST_DATA_ROW = 9 cleanDown = [stringToMag(x[1]) for x in downData[FIRST_DATA_ROW:-1]] swingTimestamps = [x[0] for x in subData[FIRST_DATA_ROW:-1]] cleanSub = [stringToMag(x[1]) for x in subData[FIRST_DATA_ROW:-1]] # real_power / power powerFactors = [] for row in subData[FIRST_DATA_ROW:-1]: powerFactors.append(abs(float(row[2])/stringToMag(row[3]))) outData['powerFactors'] = powerFactors outData['swingVoltage'] = cleanSub outData['downlineNodeVolts'] = cleanDown outData['swingTimestamps'] = swingTimestamps # If there is a var volt system, find the min and max voltage for a band minVoltBand = [] maxVoltBand = [] if downLineNode != 'None': for key in tree: objKeys = tree[key].keys() if 'object' in objKeys: if tree[key]['object']=='volt_var_control': minVoltBand.append(float(tree[key]['minimum_voltages'])) maxVoltBand.append(float(tree[key]['maximum_voltages'])) outData['minVoltBand'] = minVoltBand outData['maxVoltBand'] = maxVoltBand # Violation Summary and Log # violationData = '' # violationArray = [] # with open(pJoin(modelDir,"Violation_Summary.csv")) as vioSum: # reader = csv.reader(vioSum) # for row in reader: # violationArray.append(row) # for row in violationArray[4:]: # violationData += str(' '.join(row)) + "\n" # outData["violationSummary"] = violationData # violationLogArray = [] # violationLog = '' # with open(pJoin(modelDir,"Violation_Log.csv")) as vioLog: # logger = csv.reader(vioLog) # for row in logger: # violationLogArray.append(row) # for row in violationLogArray[6:]: # violationLog += str(' '.join(row)) + "\n" # outData['violationLog'] = violationLog # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime, mapTimestamp = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) outData['genTime'] = genTime outData['mapTimestamp'] = mapTimestamp # Aggregate up the timestamps: if level=='days': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') return outData
def work(modelDir, inputDict): feederName = inputDict["feederName1"] inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "climate.tmy2")) feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] # tree[feeder.getMaxKey(tree)+1] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # System check - linux doesn't support newer GridLAB-D versions if sys.platform == 'linux2': pass else: # print feeder.getMaxKey(tree) # tree[14,20,27,28,47] empty for UCS Egan, add climate object to tree[14] # HACK: tree[10:19] is empty tree[11] = {'omftype':'#include', 'argument':'\"hot_water_demand.glm\"'} tree[12] = {'omftype':'#include', 'argument':'\"lock_mode_schedule.glm\"'} tree[13] = {'omftype':'#include', 'argument':'\"control_priority_schedule.glm\"'} # Attach frequency player tree[14] = {'omftype':'class player', 'argument':'{double value;}'} tree[feeder.getMaxKey(tree)+1] = {'object':'player', 'file':'frequency.PLAYER', 'property':'value', 'name':'frequency', 'loop':0} # Set up GridBallast Controls totalWH = 0 totalZIP = 0 gbWH = 0 gbZIP = 0 for key in tree.keys(): # Waterheater Controller properties if ('name' in tree[key]) and (tree[key].get('object') == 'waterheater'): totalWH += 1 gbWH += 1 # Frequency control parameters tree[key]['enable_freq_control'] = 'true' tree[key]['measured_frequency'] = 'frequency.value' tree[key]['freq_lowlimit'] = 59 tree[key]['freq_uplimit'] = 61 tree[key]['heat_mode'] = 'ELECTRIC' # tree[key]['average_delay_time'] = 60 # Voltage control parameters # tree[key]['enable_volt_control'] = 'true' # tree[key]['volt_lowlimit'] = 240.4 # tree[key]['volt_uplimit'] = 241.4 # Lock Mode parameters # tree[key]['enable_lock'] = 'temp_lock_enable' # tree[key]['lock_STATUS'] = 'temp_lock_status' # Controller Priority: a.lock, b.freq, c.volt, d.therm tree[key]['controller_priority'] = 3214 #default:therm>lock>freq>volt # tree[key]['controller_priority'] = 1423 #freq>therm>volt>lock # tree[key]['controller_priority'] = 'control_priority' # fix waterheater property demand to water_demand for newer GridLAB-D versions if 'demand' in tree[key]: # tree[key]['water_demand'] = tree[key]['demand'] tree[key]['water_demand'] = 'weekday_hotwater*1' del tree[key]['demand'] # ZIPload Controller properties if ('name' in tree[key]) and (tree[key].get('object') == 'ZIPload'): totalZIP += 1 if tree[key]['name'].startswith('responsive'): gbZIP += 1 # Frequency control parameters tree[key]['enable_freq_control'] = 'true' tree[key]['measured_frequency'] = 'frequency.value' tree[key]['freq_lowlimit'] = 59 tree[key]['freq_uplimit'] = 61 # tree[key]['average_delay_time'] = 60 # Voltage control parameters # tree[key]['enable_volt_control'] = 'true' # tree[key]['volt_lowlimit'] = 240.4 # tree[key]['volt_uplimit'] = 241.4 # Lock Mode parameters # tree[key]['enable_lock'] = 'temp_lock_enable' # tree[key]['lock_STATUS'] = 'temp_lock_status' tree[key]['controller_priority'] = 4321 #default:lock>freq>volt>therm # tree[key]['controller_priority'] = 2431 #freq>volt>lock>therm # tree[key]['groupid'] = 'fan' # Attach collector for total network load tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':60, 'file':'allMeterPower.csv'} # Attach collector for total waterheater load tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':60, 'file':'allWaterheaterLoad.csv'} # Attach collector for total ZIPload power/load tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':60, 'file':'allZIPloadPower.csv'} # Attach recorder for each ZIPload power/load tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':60, 'file':'eachZIPloadPower.csv'} # Attach recorder for all ZIPloads demand_rate tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':60, 'file':'allZIPloadDemand.csv'} # Attach recorder for waterheaters on/off tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':60, 'file':'allWaterheaterOn.csv'} # Attach recorder for waterheater tank temperatures tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':60, 'file':'allWaterheaterTemp.csv'} # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':60} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach recorders for system voltage map, triplex: stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':60} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'nVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # And get meters for system voltage map: stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':60} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'mVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir)) outData = {} # Std Err and Std Out outData['stderr'] = rawOut['stderr'] outData['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# property.. timestamp'] else: outData['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = outData.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): outData['climate'] = {} outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] outData['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: outData['allMeterVoltages'] = {} outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption outData['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads outData['Consumption']['Power'] = [0] * int(inputDict["simLength"]) outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) outData['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in outData['Consumption']: outData['Consumption']['Power'] = oneSwingPower else: outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in outData['Consumption']: outData['Consumption']['Losses'] = oneLoss else: outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] outData[newkey] ={} outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapA'] = rawOut[key]['tap_A'] outData[newkey]['RegTapB'] = rawOut[key]['tap_B'] outData[newkey]['RegTapC'] = rawOut[key]['tap_C'] outData[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] outData[newkey] ={} outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1A'] = rawOut[key]['switchA'] outData[newkey]['Cap1B'] = rawOut[key]['switchB'] outData[newkey]['Cap1C'] = rawOut[key]['switchC'] outData[newkey]['CapPhases'] = rawOut[key]['phases'][0] # Print gridBallast Outputs to allOutputData.json outData['gridBallast'] = {} if 'allMeterPower.csv' in rawOut: outData['gridBallast']['totalNetworkLoad'] = [x / 1000 for x in rawOut.get('allMeterPower.csv')['sum(measured_real_power)']] #Convert W to kW if ('allZIPloadPower.csv' in rawOut) and ('allWaterheaterLoad.csv' in rawOut): outData['gridBallast']['availabilityMagnitude'] = [x[0] + x[1] for x in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])] if 'allZIPloadDemand.csv' in rawOut: outData['gridBallast']['ZIPloadDemand'] = {} for key in rawOut['allZIPloadDemand.csv']: if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')): outData['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key] if 'eachZIPloadPower.csv' in rawOut: outData['gridBallast']['ZIPloadPower'] = {} for key in rawOut['eachZIPloadPower.csv']: if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')): outData['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key] if 'allWaterheaterOn.csv' in rawOut: outData['gridBallast']['waterheaterOn'] = {} for key in rawOut['allWaterheaterOn.csv']: if (key.startswith('waterheater')) or (key.startswith('waterHeater')): outData['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key] if 'allWaterheaterTemp.csv' in rawOut: outData['gridBallast']['waterheaterTemp'] = {} for key in rawOut['allWaterheaterTemp.csv']: if (key.startswith('waterheater')) or (key.startswith('waterHeater')): outData['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key] # System check - linux doesn't support newer GridLAB-D versions if sys.platform == 'linux2': pass else: outData['gridBallast']['penetrationLevel'] = 100*(gbWH+gbZIP)/(totalWH+totalZIP) # Frequency Player inArray = feederJson['attachments']['frequency.PLAYER'].split('\n') tempArray = [] for each in inArray: x = each.split(',') y = float(x[1]) tempArray.append(y) outData['frequencyPlayer'] = tempArray # EventTime calculations eventTime = inputDict['eventTime'] eventLength = inputDict['eventLength'].split(':') eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1])) eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M') eventEnd = eventStart + eventDuration outData['gridBallast']['eventStart'] = str(eventStart) outData['gridBallast']['eventEnd'] = str(eventEnd) outData['gridBallast']['xMin'] = str(eventStart - datetime.timedelta(minutes=30)) outData['gridBallast']['xMax'] = str(eventEnd + datetime.timedelta(minutes=30)) # Convert string to date # HACK: remove timezones, inconsistency in matching format timeStampsDebug = [x[:19] for x in outData['timeStamps']] dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStampsDebug] eventEndIdx = dateTimeStamps.index(eventEnd) # Recovery Time whOn = outData['gridBallast']['waterheaterOn'] whOnList = whOn.values() whOnZip = zip(*whOnList) whOnSum = [sum(x) for x in whOnZip] anyOn = [x > 0 for x in whOnSum] tRecIdx = anyOn.index(True, eventEndIdx) tRec = dateTimeStamps[tRecIdx] recoveryTime = tRec - eventEnd outData['gridBallast']['recoveryTime'] = str(recoveryTime) # Waterheaters Off-Duration offDuration = tRec - eventStart outData['gridBallast']['offDuration'] = str(offDuration) # Reserve Magnitude (RM) availMag = outData['gridBallast']['availabilityMagnitude'] totalNetLoad = outData['gridBallast']['totalNetworkLoad'] availPerc = [100 * x[0]/x[1] for x in zip(availMag,totalNetLoad)] outData['gridBallast']['availabilityPercent'] = availPerc outData['gridBallast']['rm'] = [100 - x for x in availPerc] # Average RM during event eventRM = [100 - x[1] for x in zip(dateTimeStamps, availPerc) if (x[0] == eventStart) or (x[0] == eventEnd)] outData['gridBallast']['rmAvg'] = np.mean(eventRM) # Reserve Magnitude Variability Tolerance (RMVT) outData['gridBallast']['rmvt'] = np.std(eventRM) # Availability rmt = 7 available = [x[1] > rmt for x in zip(dateTimeStamps, availPerc) if (x[0] < eventStart) or (x[0] > eventEnd)] outData['gridBallast']['availability'] = 100.0 * sum(available) / (int(inputDict['simLength']) - int(eventLength[1]) - 1) # Waterheater Temperature Drop calculations whTemp = outData['gridBallast']['waterheaterTemp'] whTempList = whTemp.values() whTempZip = zip(*whTempList) whTempDrops = [] LOWER_LIMIT_TEMP = 110 # Used for calculating quality of service. Typical hot shower temp = 105 F. for time in whTempZip: tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time]) whTempDrops.append(tempDrop) outData['gridBallast']['waterheaterTempDrops'] = whTempDrops # ZIPload calculations for Availability and QoS zPower = outData['gridBallast']['ZIPloadPower'] zPowerList = zPower.values() zPowerZip = zip(*zPowerList) zDemand = outData['gridBallast']['ZIPloadDemand'] zDemandList = zDemand.values() zDemandZip = zip(*zDemandList) zDrops = [] for x, y in zip(zPowerZip,zDemandZip): zDrop = 0 for i in range(len(x)): if (x[i] == 0) and (y[i] > 0): zDrop += 1 zDrops.append(zDrop) outData['gridBallast']['qualityDrops'] = [x + y for x, y in zip(zDrops, whTempDrops)] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) outData['genTime'] = genTime # Aggregate up the timestamps: if level=='days': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') return outData
def voltPlot(omd, workDir=None, neatoLayout=False): ''' Draw a color-coded map of the voltage drop on a feeder. Returns a matplotlib object. ''' tree = omd.get('tree', {}) # # Get rid of schedules and climate: for key in tree.keys(): if tree[key].get("argument", "") == "\"schedules.glm\"" or tree[key].get( "tmyfile", "") != "": del tree[key] # Make sure we have a voltDump: def safeInt(x): try: return int(x) except: return 0 biggestKey = max([safeInt(x) for x in tree.keys()]) tree[str(biggestKey * 10)] = { "object": "voltdump", "filename": "voltDump.csv" } # Run Gridlab. if not workDir: workDir = tempfile.mkdtemp() print "gridlabD runInFilesystem with no specified workDir. Working in", workDir gridlabOut = gridlabd.runInFilesystem(tree, attachments=omd.get( 'attachments', {}), workDir=workDir) with open(pJoin(workDir, 'voltDump.csv'), 'r') as dumpFile: reader = csv.reader(dumpFile) reader.next() # Burn the header. keys = reader.next() voltTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] voltTable.append(rowDict) # Calculate average node voltage deviation. First, helper functions. def pythag(x, y): ''' For right triangle with sides a and b, return the hypotenuse. ''' return math.sqrt(x**2 + y**2) def digits(x): ''' Returns number of digits before the decimal in the float x. ''' return math.ceil(math.log10(x + 1)) def avg(l): ''' Average of a list of ints or floats. ''' return sum(l) / len(l) # Detect the feeder nominal voltage: for key in tree: ob = tree[key] if type(ob) == dict and ob.get('bustype', '') == 'SWING': feedVoltage = float(ob.get('nominal_voltage', 1)) # Tot it all up. nodeVolts = {} for row in voltTable: allVolts = [] for phase in ['A', 'B', 'C']: phaseVolt = pythag(float(row['volt' + phase + '_real']), float(row['volt' + phase + '_imag'])) if phaseVolt != 0.0: if digits(phaseVolt) > 3: # Normalize to 120 V standard phaseVolt = phaseVolt * (120 / feedVoltage) allVolts.append(phaseVolt) nodeVolts[row.get('node_name', '')] = avg(allVolts) # Color nodes by VOLTAGE. fGraph = feeder.treeToNxGraph(tree) voltChart = plt.figure(figsize=(15, 15)) plt.axes(frameon=0) plt.axis('off') #set axes step equal voltChart.gca().set_aspect('equal') if neatoLayout: # HACK: work on a new graph without attributes because graphViz tries to read attrs. cleanG = nx.Graph(fGraph.edges()) cleanG.add_nodes_from(fGraph) positions = graphviz_layout(cleanG, prog='neato') else: positions = {n: fGraph.node[n].get('pos', (0, 0)) for n in fGraph} edgeIm = nx.draw_networkx_edges(fGraph, positions) nodeIm = nx.draw_networkx_nodes( fGraph, pos=positions, node_color=[nodeVolts.get(n, 0) for n in fGraph.nodes()], linewidths=0, node_size=30, cmap=plt.cm.jet) plt.sci(nodeIm) plt.clim(110, 130) plt.colorbar() return voltChart
def heavyProcessing(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() # Get feeder name and data in. try: os.mkdir(pJoin(modelDir,'gldContainer')) except: pass try: feederName = inputDict["feederName1"] inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "gldContainer", "climate.tmy2")) startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] #add a check to see if there is already a climate object in the omd file #if there is delete the climate from attachments and the climate object attachKeys = feederJson["attachments"].keys() for key in attachKeys: if key.endswith('.tmy2'): del feederJson['attachments'][key] treeKeys = feederJson["tree"].keys() for key in treeKeys: if 'object' in feederJson['tree'][key]: if feederJson['tree'][key]['object'] == 'climate': del feederJson['tree'][key] oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] = {'omftype':'module','argument':'climate'} tree[oldMax + 2] = {'object':'climate','name':'Climate','interpolate':'QUADRATIC','tmyfile':'climate.tmy2'} # tree[oldMax + 3] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorder for waterheaters on/off stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':3600, 'file':'allWaterheaterOn.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for waterheater tank temperatures stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':3600, 'file':'allWaterheaterTemp.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach collector for total waterheater load stub = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':3600, 'file':'allWaterheaterLoad.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach collector for total network load stub = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':3600, 'file':'allMeterPower.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach collector for total overall ZIPload power/load stub = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':3600, 'file':'allZIPloadPower.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for each ZIPload power/load stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':3600, 'file':'eachZIPloadPower.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for all ZIPloads demand_rate stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':3600, 'file':'allZIPloadDemand.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for all ZIPloads on stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'number_of_devices_on', 'interval':3600, 'file':'allZIPloadOn.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach passive_controller tree[feeder.getMaxKey(tree)+1] = {'omftype':'module','argument':'market'} tree[feeder.getMaxKey(tree)+1] = {'omftype':'class auction','argument':'{\n\tdouble my_avg; double my_std;\n}'} tree[feeder.getMaxKey(tree)+1] = {'omftype':'class player','argument':'{\n\tdouble value;\n}'} stub = { 'object':'player', 'name':'cppDays', 'file':'superCpp.player' } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub stub = { 'object':'player', 'name':'superClearing', 'file':'superClearingPrice.player', 'loop':10 } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub stub = { 'object':'auction', 'name':'MARKET_1', 'my_std':0.037953, 'period':900, 'my_avg':0.110000, 'current_market.clearing_price':'superClearing.value', 'special_mode':'BUYERS_ONLY', 'unit': 'kW' } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub stub = { 'object':'passive_controller', 'name':'waterheater_controller_waterheater171923', 'parent':'waterheater171923', 'control_mode':'RAMP', 'range_high':5, 'range_low':-5, 'ramp_high':1, 'ramp_low':-1, 'period':900, 'setpoint':'is_waterheater_on', 'base_setpoint':1, 'expectation_object':'MARKET_1', 'expectation_property':'my_avg', 'observation_object':'MARKET_1', 'observation_property':'past_market.clearing_price', 'stdev_observation_property':'my_std', 'state_property':'override' } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # stub = { # 'object':'passive_controller', # 'name':'ZIPload_controller_ZIPload171922', # 'parent':'ZIPload171922', # 'control_mode':'RAMP', # 'range_high':5, # 'range_low':-5, # 'ramp_high':1, # 'ramp_low':-1, # 'period':900, # 'setpoint':'base_power' # 'base_setpoint':1, # 'expectation_object':'MARKET_1', # 'expectation_property':'my_avg', # 'observation_object':'MARKET_1', # 'observation_property':'past_market.clearing_price', # 'stdev_observation_property':'my_std' # 'state_property':'override' # } # copyStub = dict(stub) # tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach recorders for system voltage map, triplex: stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'nVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # And get meters for system voltage map: stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'mVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir,'gldContainer')) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: print key if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A'] cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B'] cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C'] cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA'] cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB'] cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC'] cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0] # Print gridBallast Outputs to allOutputData.json cleanOut['gridBallast'] = {} if 'allWaterheaterOn.csv' in rawOut: cleanOut['gridBallast']['waterheaterOn'] = {} for key in rawOut['allWaterheaterOn.csv']: if key.startswith('waterheater'): cleanOut['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key] if 'allWaterheaterTemp.csv' in rawOut: cleanOut['gridBallast']['waterheaterTemp'] = {} for key in rawOut['allWaterheaterTemp.csv']: if key.startswith('waterheater'): cleanOut['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key] if 'allMeterPower.csv' in rawOut: cleanOut['gridBallast']['totalNetworkLoad'] = rawOut.get('allMeterPower.csv')['sum(measured_real_power)'] if ('allWaterheaterLoad.csv' in rawOut) and ('allZIPloadPower.csv' in rawOut): cleanOut['gridBallast']['availabilityMagnitude'] = [x + y for x, y in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])] if 'eachZIPloadPower.csv' in rawOut: cleanOut['gridBallast']['ZIPloadPower'] = {} for key in rawOut['eachZIPloadPower.csv']: if key.startswith('ZIPload'): cleanOut['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key] if 'allZIPloadDemand.csv' in rawOut: cleanOut['gridBallast']['ZIPloadDemand'] = {} for key in rawOut['allZIPloadDemand.csv']: if key.startswith('ZIPload'): cleanOut['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key] if 'allZIPloadOn.csv' in rawOut: cleanOut['gridBallast']['ZIPloadOn'] = {} for key in rawOut['allZIPloadOn.csv']: if key.startswith('ZIPload'): cleanOut['gridBallast']['ZIPloadOn'][key] = rawOut.get('allZIPloadOn.csv')[key] # EventTime calculations eventTime = inputDict['eventTime'] eventLength = inputDict['eventLength'] eventLength = eventLength.split(':') eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1])) eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M') eventEnd = eventStart + eventDuration cleanOut['gridBallast']['eventStart'] = str(eventStart) cleanOut['gridBallast']['eventEnd'] = str(eventEnd) # Drop timezone from timeStamp, Convert string to date timeStamps = [x[:19] for x in cleanOut['timeStamps']] dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStamps] eventEndIdx = dateTimeStamps.index(eventEnd) # Recovery Time whOn = cleanOut['gridBallast']['waterheaterOn'] whOnList = whOn.values() whOnZip = zip(*whOnList) whOnSum = [sum(x) for x in whOnZip] anyOn = [x > 0 for x in whOnSum] tRecIdx = anyOn.index(True, eventEndIdx) tRec = dateTimeStamps[tRecIdx] cleanOut['gridBallast']['recoveryTime'] = str(tRec) # Waterheaters Off-Duration offDuration = tRec - eventStart cleanOut['gridBallast']['offDuration'] = str(offDuration) # Reserve Magnitude Target (RMT) availMag = cleanOut['gridBallast']['availabilityMagnitude'] totNetLoad = cleanOut['gridBallast']['totalNetworkLoad'] # loadZip = zip(availMag,totNetLoad) # rmt = [x[0]/x[1] for x in loadZip] rmt = (1000*sum(availMag))/sum(totNetLoad) cleanOut['gridBallast']['rmt'] = rmt # Reserve Magnitude Variability Tolerance (RMVT) avgAvailMag = sum(availMag)/len(availMag) rmvtMax = max(availMag)/avgAvailMag rmvtMin = min(availMag)/avgAvailMag rmvt = rmvtMax - rmvtMin cleanOut['gridBallast']['rmvt'] = rmvt # Availability notAvail = availMag.count(0)/len(timeStamps) avail = (1-notAvail)*100 cleanOut['gridBallast']['availability'] = avail # Waterheater Temperature Drop calculations whTemp = cleanOut['gridBallast']['waterheaterTemp'] whTempList = whTemp.values() whTempZip = zip(*whTempList) whTempDrops = [] LOWER_LIMIT_TEMP = 125 # Used for calculating quality of service. for time in whTempZip: tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time]) whTempDrops.append(tempDrop) cleanOut['gridBallast']['waterheaterTempDrops'] = whTempDrops # ZIPload calculations for Availability and QoS zPower = cleanOut['gridBallast']['ZIPloadPower'] zPowerList = zPower.values() zPowerZip = zip(*zPowerList) zPowerSum = [sum(x) for x in zPowerZip] zDemand = cleanOut['gridBallast']['ZIPloadDemand'] zDemandList = zDemand.values() zDemandZip = zip(*zDemandList) zDrops = [] for time in zDemandZip: for each in zPowerZip: zIdx = 0 if each[zIdx] == 0: zPowerIdx += 1 zDrop = sum([t > 0 for t in time]) zDrops.append(zDrop) else: zDrops.append(0) cleanOut['gridBallast']['qualityDrops'] = [x + y for x, y in zip(whTempDrops, zDrops)] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) cleanOut['genTime'] = genTime # Aggregate up the timestamps: if level=='days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, "gldContainer", "PID.txt")) print "DONE RUNNING", modelDir except Exception as e: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print 'ERROR IN MODEL', modelDir, thisErr inputDict['stderr'] = thisErr with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir,"allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) finishTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent = 4) try: os.remove(pJoin(modelDir,"PPID.txt")) except: pass
tree[feeder.getMaxKey(tree) + 1] = { 'object': 'eventgen', 'name': 'ManualEventGen', 'parent': 'RelMetrics', 'fault_type': '\"' + FAULT_TYPE + '\"', 'manual_outages': '\"' + FAULT_LOCATION + ',' + \ FAULT_START_TIME + ',' + \ FAULT_STOP_TIME + '\"' } # Run Gridlab ---------------------------------------------------------------------- start = time.time() gridlabOut = gridlabd.runInFilesystem( tree, attachments=attachments, workDir=WORKING_DIR ) end = time.time() print((end - start)/60.0) # Generate clean csv --------------------------------------------------------------- with open( METER_FILENAME,'r' ) as meterFile: reader = csv.reader(meterFile, delimiter=',') with open( OUTPUT_FILENAME, 'w' ) as outputFile: writer = csv.writer(outputFile, delimiter=',') #loop past header for row in reader: if '# timestamp' in row: row.append('fault') row.append('meterID')
def runForeground(modelDir, inData, fs): '''This reads a glm file, changes the method of powerflow and reruns''' try: startTime = datetime.now() # calibrate and run cvrdynamic feederPath = pJoin("data", "Feeder", inData[ "feederName"].split("___")[0], inData["feederName"].split("___")[1] + '.json') fs.export_from_fs_to_local(feederPath, feederPath) scadaPath = pJoin("uploads", (inData["scadaFile"] + '.tsv')) fs.export_from_fs_to_local(scadaPath, scadaPath) omf.calibrate.omfCalibrate(modelDir, feederPath, scadaPath) allOutput = {} print "here" with open(pJoin(modelDir, "calibratedFeeder.json"), "r") as jsonIn: feederJson = json.load(jsonIn) localTree = feederJson.get("tree", {}) for key in localTree: if "solver_method" in localTree[key].keys(): print "current solver method", localTree[key]["solver_method"] localTree[key]["solver_method"] = 'FBS' # find the swing bus and recorder attached to substation for key in localTree: if localTree[key].get('bustype', '').lower() == 'swing': swingIndex = key swingName = localTree[key].get('name') if localTree[key].get('object', '') == 'regulator' and localTree[key].get('from', '') == swingName: regIndex = key regConfName = localTree[key]['configuration'] # find the regulator and capacitor names and combine to form a string # for volt-var control object regKeys = [] accum_reg = "" for key in localTree: if localTree[key].get("object", "") == "regulator": accum_reg += localTree[key].get("name", "ERROR") + "," regKeys.append(key) regstr = accum_reg[:-1] print regKeys capKeys = [] accum_cap = "" for key in localTree: if localTree[key].get("object", "") == "capacitor": accum_cap += localTree[key].get("name", "ERROR") + "," capKeys.append(key) if localTree[key].get("control", "").lower() == "manual": localTree[key]['control'] = "VOLT" print "changing capacitor control from manual to volt" capstr = accum_cap[:-1] print capKeys # Attach recorders relevant to CVR. recorders = [ {'object': 'collector', 'file': 'ZlossesTransformer.csv', 'group': 'class=transformer', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'collector', 'file': 'ZlossesUnderground.csv', 'group': 'class=underground_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'collector', 'file': 'ZlossesOverhead.csv', 'group': 'class=overhead_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'}, {'object': 'recorder', 'file': 'Zregulator.csv', 'limit': '0', 'parent': localTree[regIndex]['name'], 'property': 'tap_A,tap_B,tap_C,power_in.real,power_in.imag'}, {'object': 'collector', 'file': 'ZvoltageJiggle.csv', 'group': 'class=triplex_meter', 'limit': '0', 'property': 'min(voltage_12.mag),mean(voltage_12.mag),max(voltage_12.mag),std(voltage_12.mag)'}, {'object': 'recorder', 'file': 'ZsubstationTop.csv', 'limit': '0', 'parent': localTree[swingIndex]['name'], 'property': 'voltage_A,voltage_B,voltage_C'}, {'object': 'recorder', 'file': 'ZsubstationBottom.csv', 'limit': '0', 'parent': localTree[regIndex]['to'], 'property': 'voltage_A,voltage_B,voltage_C'}] # recorder object for capacitor switching - if capacitors exist if capKeys != []: for key in capKeys: recorders.append({'object': 'recorder', 'file': 'ZcapSwitch' + str(key) + '.csv', 'limit': '0', 'parent': localTree[key]['name'], 'property': 'switchA,switchB,switchC'}) # attach recorder process biggest = 1 + max([int(k) for k in localTree.keys()]) for index, rec in enumerate(recorders): localTree[biggest + index] = rec # run a reference load flow HOURS = float(inData['simLengthHours']) simStartDate = inData['simStart'] feeder.adjustTime(localTree, HOURS, "hours", simStartDate) output = gridlabd.runInFilesystem( localTree, keepFiles=False, workDir=modelDir) os.remove(pJoin(modelDir, "PID.txt")) p = output['Zregulator.csv']['power_in.real'] q = output['Zregulator.csv']['power_in.imag'] # calculating length of simulation because it migth be different from # the simulation input HOURS simRealLength = int(len(p)) # time delays from configuration files time_delay_reg = '30.0' time_delay_cap = '300.0' for key in localTree: if localTree[key].get('object', '') == "regulator_configuration": time_delay_reg = localTree[key]['time_delay'] print "time_delay_reg", time_delay_reg # if localTree[key].get('object','') == "capacitor": # time_delay_cap = localTree[key]['time_delay'] # print "time_delay_cap",time_delay_cap # change the recorder names for key in localTree: if localTree[key].get('object', '') == "collector" or localTree[key].get('object', '') == "recorder": if localTree[key].get('file', '').startswith('Z'): localTree[key]['file'] = localTree[key].get( 'file', '').replace('Z', 'NewZ') # create volt-var control object max_key = max([int(key) for key in localTree.keys()]) print max_key localTree[max_key + 1] = {'object': 'volt_var_control', 'name': 'IVVC1', 'control_method': 'ACTIVE', 'capacitor_delay': str(time_delay_cap), 'regulator_delay': str(time_delay_reg), 'desired_pf': '0.99', 'd_max': '0.6', 'd_min': '0.1', 'substation_link': str(localTree[regIndex]['name']), 'regulator_list': regstr, 'capacitor_list': capstr} # running powerflow analysis via gridalab after attaching a regulator feeder.adjustTime(localTree, HOURS, "hours", simStartDate) output1 = gridlabd.runInFilesystem( localTree, keepFiles=True, workDir=modelDir) os.remove(pJoin(modelDir, "PID.txt")) pnew = output1['NewZregulator.csv']['power_in.real'] qnew = output1['NewZregulator.csv']['power_in.imag'] # total real and imaginary losses as a function of time def vecSum(u, v): ''' Add vectors u and v element-wise. Return has len <= len(u) and <=len(v). ''' return map(sum, zip(u, v)) def zeroVec(length): ''' Give a zero vector of input length. ''' return [0 for x in xrange(length)] (realLoss, imagLoss, realLossnew, imagLossnew) = (zeroVec(int(HOURS)) for x in range(4)) for device in ['ZlossesOverhead.csv', 'ZlossesTransformer.csv', 'ZlossesUnderground.csv']: for letter in ['A', 'B', 'C']: realLoss = vecSum( realLoss, output[device]['sum(power_losses_' + letter + '.real)']) imagLoss = vecSum( imagLoss, output[device]['sum(power_losses_' + letter + '.imag)']) realLossnew = vecSum( realLossnew, output1['New' + device]['sum(power_losses_' + letter + '.real)']) imagLossnew = vecSum( imagLossnew, output1['New' + device]['sum(power_losses_' + letter + '.imag)']) # voltage calculations and tap calculations def divby2(u): '''divides by 2''' return u / 2 lowVoltage = [] meanVoltage = [] highVoltage = [] lowVoltagenew = [] meanVoltagenew = [] highVoltagenew = [] tap = {'A': [], 'B': [], 'C': []} tapnew = {'A': [], 'B': [], 'C': []} volt = {'A': [], 'B': [], 'C': []} voltnew = {'A': [], 'B': [], 'C': []} switch = {'A': [], 'B': [], 'C': []} switchnew = {'A': [], 'B': [], 'C': []} for letter in ['A', 'B', 'C']: tap[letter] = output['Zregulator.csv']['tap_' + letter] tapnew[letter] = output1['NewZregulator.csv']['tap_' + letter] if capKeys != []: switch[letter] = output[ 'ZcapSwitch' + str(int(capKeys[0])) + '.csv']['switch' + letter] switchnew[letter] = output1[ 'NewZcapSwitch' + str(int(capKeys[0])) + '.csv']['switch' + letter] volt[letter] = map( returnMag, output['ZsubstationBottom.csv']['voltage_' + letter]) voltnew[letter] = map( returnMag, output1['NewZsubstationBottom.csv']['voltage_' + letter]) lowVoltage = map( divby2, output['ZvoltageJiggle.csv']['min(voltage_12.mag)']) lowVoltagenew = map( divby2, output1['NewZvoltageJiggle.csv']['min(voltage_12.mag)']) meanVoltage = map( divby2, output['ZvoltageJiggle.csv']['mean(voltage_12.mag)']) meanVoltagenew = map( divby2, output1['NewZvoltageJiggle.csv']['mean(voltage_12.mag)']) highVoltage = map( divby2, output['ZvoltageJiggle.csv']['max(voltage_12.mag)']) highVoltagenew = map( divby2, output1['NewZvoltageJiggle.csv']['max(voltage_12.mag)']) # energy calculations whEnergy = [] whLosses = [] whLoads = [] whEnergy.append(sum(p) / 10**6) whLosses.append(sum(realLoss) / 10**6) whLoads.append((sum(p) - sum(realLoss)) / 10**6) whEnergy.append(sum(pnew) / 10**6) whLosses.append(sum(realLossnew) / 10**6) whLoads.append((sum(pnew) - sum(realLossnew)) / 10**6) indices = ['No IVVC', 'With IVVC'] # energySalesRed = (whLoads[1]-whLoads[0])*(inData['wholesaleEnergyCostPerKwh'])*1000 # lossSav = (whLosses[0]-whLosses[1])*inData['wholesaleEnergyCostPerKwh']*1000 # print energySalesRed, lossSav # plots ticks = [] plt.clf() plt.title("total energy") plt.ylabel("total load and losses (MWh)") for element in range(2): ticks.append(element) bar_loss = plt.bar(element, whLosses[element], 0.15, color='red') bar_load = plt.bar( element + 0.15, whLoads[element], 0.15, color='orange') plt.legend([bar_load[0], bar_loss[0]], ['total load', 'total losses'], bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.xticks([t + 0.15 for t in ticks], indices) Plot.save_fig(plt, pJoin(modelDir, "totalEnergy.png")) # real and imaginary power plt.figure("real power") plt.title("Real Power at substation") plt.ylabel("substation real power (MW)") pMW = [element / 10**6 for element in p] pMWn = [element / 10**6 for element in pnew] pw = plt.plot(pMW) npw = plt.plot(pMWn) plt.legend([pw[0], npw[0]], ['NO IVVC', 'WITH IVVC'], bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) Plot.save_fig(plt, pJoin(modelDir, "realPower.png")) plt.figure("Reactive power") plt.title("Reactive Power at substation") plt.ylabel("substation reactive power (MVAR)") qMVAR = [element / 10**6 for element in q] qMVARn = [element / 10**6 for element in qnew] iw = plt.plot(qMVAR) niw = plt.plot(qMVARn) plt.legend([iw[0], niw[0]], ['NO IVVC', 'WITH IVVC'], bbox_to_anchor=(0., 0.915, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) Plot.save_fig(plt, pJoin(modelDir, "imaginaryPower.png")) # voltage plots plt.figure("voltages as a function of time") f, ax = plt.subplots(2, sharex=True) f.suptitle("Min and Max voltages on the feeder") lv = ax[0].plot(lowVoltage, color='cadetblue') mv = ax[0].plot(meanVoltage, color='blue') hv = ax[0].plot(highVoltage, color='cadetblue') ax[0].legend([lv[0], mv[0], hv[0]], ['low voltage', 'mean voltage', 'high voltage'], bbox_to_anchor=(0., 0.915, 1., .1), loc=3, ncol=3, mode="expand", borderaxespad=0.1) ax[0].set_ylabel('NO IVVC') nlv = ax[1].plot(lowVoltagenew, color='cadetblue') nmv = ax[1].plot(meanVoltagenew, color='blue') nhv = ax[1].plot(highVoltagenew, color='cadetblue') ax[1].set_ylabel('WITH IVVC') Plot.save_fig(plt, pJoin(modelDir, "Voltages.png")) # tap positions plt.figure("TAP positions NO IVVC") f, ax = plt.subplots(6, sharex=True) f.set_size_inches(10, 12.0) #f.suptitle("Regulator Tap positions") ax[0].plot(tap['A']) ax[0].set_title("Regulator Tap positions NO IVVC") ax[0].set_ylabel("TAP A") ax[1].plot(tap['B']) ax[1].set_ylabel("TAP B") ax[2].plot(tap['C']) ax[2].set_ylabel("TAP C") ax[3].plot(tapnew['A']) ax[3].set_title("WITH IVVC") ax[3].set_ylabel("TAP A") ax[4].plot(tapnew['B']) ax[4].set_ylabel("TAP B") ax[5].plot(tapnew['C']) ax[5].set_ylabel("TAP C") for subplot in range(6): ax[subplot].set_ylim(-20, 20) f.tight_layout() Plot.save_fig(plt, pJoin(modelDir, "RegulatorTAPpositions.png")) # substation voltages plt.figure("substation voltage as a function of time") f, ax = plt.subplots(6, sharex=True) f.set_size_inches(10, 12.0) #f.suptitle("voltages at substation NO IVVC") ax[0].plot(volt['A']) ax[0].set_title('Substation voltages NO IVVC') ax[0].set_ylabel('voltage A') ax[1].plot(volt['B']) ax[1].set_ylabel('voltage B') ax[2].plot(volt['C']) ax[2].set_ylabel('voltage C') ax[3].plot(voltnew['A']) ax[3].set_title("WITH IVVC") ax[3].set_ylabel('voltage A') ax[4].plot(voltnew['B']) ax[4].set_ylabel('voltage B') ax[5].plot(voltnew['C']) ax[5].set_ylabel('voltage C') f.tight_layout() Plot.save_fig(plt, pJoin(modelDir, "substationVoltages.png")) # cap switches plt.figure("capacitor switch state as a function of time") f, ax = plt.subplots(6, sharex=True) f.set_size_inches(10, 12.0) #f.suptitle("Capacitor switch state NO IVVC") ax[0].plot(switch['A']) ax[0].set_title("Capacitor switch state NO IVVC") ax[0].set_ylabel("switch A") ax[1].plot(switch['B']) ax[1].set_ylabel("switch B") ax[2].plot(switch['C']) ax[2].set_ylabel("switch C") ax[3].plot(switchnew['A']) ax[3].set_title("WITH IVVC") ax[3].set_ylabel("switch A") ax[4].plot(switchnew['B']) ax[4].set_ylabel("switch B") ax[5].plot(switchnew['C']) ax[5].set_ylabel("switch C") for subplot in range(6): ax[subplot].set_ylim(-2, 2) f.tight_layout() Plot.save_fig(plt, pJoin(modelDir, "capacitorSwitch.png")) # plt.show() # monetization monthNames = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"] monthToSeason = {'January': 'Winter', 'February': 'Winter', 'March': 'Spring', 'April': 'Spring', 'May': 'Spring', 'June': 'Summer', 'July': 'Summer', 'August': 'Summer', 'September': 'Fall', 'October': 'Fall', 'November': 'Fall', 'December': 'Winter'} # calculate the month and hour of simulation start and month and hour # of simulation end simStartTimestamp = simStartDate + " 00:00:00" simFormattedDate = datetime.strptime( simStartTimestamp, "%Y-%m-%d %H:%M:%S") simStartMonthNum = int(simFormattedDate.strftime('%m')) simstartMonth = monthNames[simStartMonthNum - 1] simStartDay = int(simFormattedDate.strftime('%d')) if calendar.isleap(int(simFormattedDate.strftime('%Y'))): febDays = 29 else: febDays = 28 monthHours = [int(31 * 24), int(febDays * 24), int(31 * 24), int(30 * 24), int(31 * 24), int( 30 * 24), int(31 * 24), int(31 * 24), int(30 * 24), int(31 * 24), int(30 * 24), int(31 * 24)] simStartIndex = int( sum(monthHours[:(simStartMonthNum - 1)]) + (simStartDay - 1) * 24) temp = 0 cumulHours = [0] for x in range(12): temp += monthHours[x] cumulHours.append(temp) for i in range((simStartMonthNum), 13): if int(simStartIndex + simRealLength) <= cumulHours[i] and int(simStartIndex + simRealLength) > cumulHours[i - 1]: simEndMonthNum = i - 1 simEndMonth = monthNames[simEndMonthNum] print simstartMonth, simEndMonth # calculate peaks for the number of months in simulation previndex = 0 monthPeak = {} monthPeakNew = {} peakSaveDollars = {} energyLostDollars = {} lossRedDollars = {} simMonthList = monthNames[ monthNames.index(simstartMonth):(monthNames.index(simEndMonth) + 1)] print simMonthList for monthElement in simMonthList: print monthElement month = monthNames.index(monthElement) index1 = int(previndex) index2 = int(min((index1 + int(monthHours[month])), simRealLength)) monthPeak[monthElement] = max(p[index1:index2]) / 1000.0 monthPeakNew[monthElement] = max(pnew[index1:index2]) / 1000.0 peakSaveDollars[monthElement] = (monthPeak[monthElement] - monthPeakNew[monthElement]) * float( inData['peakDemandCost' + str(monthToSeason[monthElement]) + 'PerKw']) lossRedDollars[monthElement] = (sum(realLoss[index1:index2]) / 1000.0 - sum( realLossnew[index1:index2]) / 1000.0) * (float(inData['wholesaleEnergyCostPerKwh'])) energyLostDollars[monthElement] = (sum(p[index1:index2]) / 1000.0 - sum(pnew[index1:index2]) / 1000.0 - sum(realLoss[index1:index2]) / 1000.0 + sum(realLossnew[index1:index2]) / 1000.0) * (float(inData['wholesaleEnergyCostPerKwh']) - float(inData['retailEnergyCostPerKwh'])) previndex = index2 # money charts fig = plt.figure("cost benefit barchart", figsize=(10, 8)) ticks = range(len(simMonthList)) ticks1 = [element + 0.15 for element in ticks] ticks2 = [element + 0.30 for element in ticks] print ticks eld = [energyLostDollars[month] for month in simMonthList] lrd = [lossRedDollars[month] for month in simMonthList] psd = [peakSaveDollars[month] for month in simMonthList] bar_eld = plt.bar(ticks, eld, 0.15, color='red') bar_psd = plt.bar(ticks1, psd, 0.15, color='blue') bar_lrd = plt.bar(ticks2, lrd, 0.15, color='green') plt.legend([bar_eld[0], bar_psd[0], bar_lrd[0]], ['energyLostDollars', 'peakReductionDollars', 'lossReductionDollars'], bbox_to_anchor=(0., 1.015, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) monShort = [element[0:3] for element in simMonthList] plt.xticks([t + 0.15 for t in ticks], monShort) plt.ylabel('Utility Savings ($)') Plot.save_fig(plt, pJoin(modelDir, "spendChart.png")) # cumulative savings graphs fig = plt.figure("cost benefit barchart", figsize=(10, 5)) annualSavings = sum(eld) + sum(lrd) + sum(psd) annualSave = lambda x: ( annualSavings - float(inData['omCost'])) * x - float(inData['capitalCost']) simplePayback = float( inData['capitalCost']) / (annualSavings - float(inData['omCost'])) plt.xlabel('Year After Installation') plt.xlim(0, 30) plt.ylabel('Cumulative Savings ($)') plt.plot([0 for x in range(31)], c='gray') plt.axvline(x=simplePayback, ymin=0, ymax=1, c='gray', linestyle='--') plt.plot([annualSave(x) for x in range(31)], c='green') Plot.save_fig(plt, pJoin(modelDir, "savingsChart.png")) # get exact time stamps from the CSV files generated by Gridlab-D timeWithZone = output['Zregulator.csv']['# timestamp'] timestamps = [element[:19] for element in timeWithZone] # data for highcharts allOutput["timeStamps"] = timestamps allOutput["noCVRPower"] = p allOutput["withCVRPower"] = pnew allOutput["noCVRLoad"] = whLoads[0] allOutput["withCVRLoad"] = whLoads[1] allOutput["noCVRLosses"] = whLosses[0] allOutput["withCVRLosses"] = whLosses[1] allOutput["noCVRTaps"] = tap allOutput["withCVRTaps"] = tapnew allOutput["noCVRSubVolts"] = volt allOutput["withCVRSubVolts"] = voltnew allOutput["noCVRCapSwitch"] = switch allOutput["withCVRCapSwitch"] = switchnew allOutput["noCVRHighVolt"] = highVoltage allOutput["withCVRHighVolt"] = highVoltagenew allOutput["noCVRLowVolt"] = lowVoltage allOutput["withCVRLowVolt"] = lowVoltagenew allOutput["noCVRMeanVolt"] = meanVoltage allOutput["withCVRMeanVolt"] = meanVoltagenew # monetization allOutput["simMonthList"] = monShort allOutput["energyLostDollars"] = energyLostDollars allOutput["lossRedDollars"] = lossRedDollars allOutput["peakSaveDollars"] = peakSaveDollars allOutput["annualSave"] = [annualSave(x) for x in range(31)] # Update the runTime in the input file. endTime = datetime.now() inData["runTime"] = str( timedelta(seconds=int((endTime - startTime).total_seconds()))) fs.save(pJoin(modelDir, "allInputData.json"), json.dumps(inData, indent=4)) fs.save(pJoin(modelDir, "allOutputData.json"), json.dumps(allOutput, indent=4)) # For autotest, there won't be such file. try: os.remove(pJoin(modelDir, "PPID.txt")) except: pass print "DONE RUNNING", modelDir except Exception as e: print "Oops, Model Crashed!!!" cancel(modelDir) print e
def heavyProcessing(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() # Get feeder name and data in. try: os.mkdir(pJoin(modelDir,'gldContainer')) except: pass try: feederName = inputDict["feederName1"] inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "gldContainer", "climate.tmy2")) startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] #add a check to see if there is already a climate object in the omd file #if there is delete the climate from attachments and the climate object attachKeys = feederJson["attachments"].keys() for key in attachKeys: if key.endswith('.tmy2'): del feederJson['attachments'][key] treeKeys = feederJson["tree"].keys() for key in treeKeys: if 'object' in feederJson['tree'][key]: if feederJson['tree'][key]['object'] == 'climate': del feederJson['tree'][key] oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] = {'omftype':'module', 'argument':'climate'} tree[oldMax + 2] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir,'gldContainer')) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A'] cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B'] cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C'] cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA'] cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB'] cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC'] cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) cleanOut['genTime'] = genTime # Aggregate up the timestamps: if level=='days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, "gldContainer", "PID.txt")) print "DONE RUNNING", modelDir except Exception as e: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print 'ERROR IN MODEL', modelDir, thisErr inputDict['stderr'] = thisErr with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir,"allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) finishTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent = 4) try: os.remove(pJoin(modelDir,"PPID.txt")) except: pass
def runPowerflowIter(tree, scadaSubPower): '''Runs powerflow once, then iterates.''' # Run initial powerflow to get power. print("Starting calibration.") print("Goal of calibration: Error: %s, Iterations: <%s, trim: %s" % (calibrateError[0], calibrateError[1], trim)) output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir, "gridlabD")) outRealPow = output["caliSub.csv"]["measured_real_power"][ trim:simLength] outImagPower = output["caliSub.csv"]["measured_reactive_power"][ trim:simLength] outAppPowerKw = [(x[0]**2 + x[1]**2)**0.5 / 1000 for x in zip(outRealPow, outImagPower)] lastFile = "subScada.player" nextFile = "subScadaCalibrated.player" nextPower = outAppPowerKw error = (sum(outRealPow) / 1000 - sum(scadaSubPower)) / sum(scadaSubPower) iteration = 1 print("First error:", error) while abs(error) > calibrateError[0] and iteration < calibrateError[1]: # Run calibration and iterate up to 5 times. SCAL_CONST = sum(scadaSubPower) / sum(nextPower) print( "Calibrating & running again... Error: %s, Iteration: %s, SCAL_CONST: %s" % (str(round(abs(error * 100), 6)), str(iteration), round(SCAL_CONST, 6))) newPlayData = [] with open(pJoin(pJoin(workDir, "gridlabD"), lastFile), "r") as playerFile: for line in playerFile: (key, val) = line.split(',') newPlayData.append( str(key) + ',' + str(float(val) * SCAL_CONST) + "\n") with open(pJoin(pJoin(workDir, "gridlabD"), nextFile), "w") as playerFile: for row in newPlayData: playerFile.write(row) tree[playerKey]["file"] = nextFile tree[outputRecorderKey]["file"] = "caliSubCheck.csv" nextOutput = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin( workDir, "gridlabD")) outRealPowIter = nextOutput["caliSubCheck.csv"][ "measured_real_power"][trim:simLength] outImagPowerIter = nextOutput["caliSubCheck.csv"][ "measured_reactive_power"][trim:simLength] nextAppKw = [(x[0]**2 + x[1]**2)**0.5 / 1000 for x in zip(outRealPowIter, outImagPowerIter)] lastFile = nextFile nextFile = "subScadaCalibrated" + str(iteration) + ".player" nextPower = nextAppKw # Compute error and iterate. error = (sum(outRealPowIter) / 1000 - sum(scadaSubPower)) / sum(scadaSubPower) iteration += 1 else: if iteration == 1: outRealPowIter = outRealPow SCAL_CONST = 1.0 print("Calibration done: Error: %s, Iteration: %s, SCAL_CONST: %s" % (str(round(abs(error * 100), 2)), str(iteration), round(SCAL_CONST, 2))) return outRealPow, outRealPowIter, lastFile, iteration
def work(modelDir, inputDict): ''' Run the model in the foreground. WARNING: can take about a minute. ''' # Global vars, and load data from the model directory. feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict["feederName1"] = feederName feederPath = pJoin(modelDir, feederName + '.omd') feederJson = json.load(open(feederPath)) tree = feederJson.get("tree", {}) attachments = feederJson.get("attachments", {}) outData = {} ''' Run CVR analysis. ''' # Reformate monthData and rates. rates = { k: float(inputDict[k]) for k in [ "capitalCost", "omCost", "wholesaleEnergyCostPerKwh", "retailEnergyCostPerKwh", "peakDemandCostSpringPerKw", "peakDemandCostSummerPerKw", "peakDemandCostFallPerKw", "peakDemandCostWinterPerKw" ] } monthNames = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ] monthToSeason = { 'January': 'Winter', 'February': 'Winter', 'March': 'Spring', 'April': 'Spring', 'May': 'Spring', 'June': 'Summer', 'July': 'Summer', 'August': 'Summer', 'September': 'Fall', 'October': 'Fall', 'November': 'Fall', 'December': 'Winter' } monthData = [] for i, x in enumerate(monthNames): monShort = x[0:3].lower() season = monthToSeason[x] histAvg = float(inputDict.get(monShort + "Avg", 0)) histPeak = float(inputDict.get(monShort + "Peak", 0)) monthData.append({ "monthId": i, "monthName": x, "histAverage": histAvg, "histPeak": histPeak, "season": season }) # Graph the SCADA data. fig = plt.figure(figsize=(10, 6)) indices = [r['monthName'] for r in monthData] d1 = [r['histPeak'] / (10**3) for r in monthData] d2 = [r['histAverage'] / (10**3) for r in monthData] ticks = range(len(d1)) bar_peak = plt.bar(ticks, d1, color='gray') bar_avg = plt.bar(ticks, d2, color='dimgray') plt.legend([bar_peak[0], bar_avg[0]], ['histPeak', 'histAverage'], bbox_to_anchor=(0., 1.015, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.xticks([t + 0.5 for t in ticks], indices) plt.ylabel('Mean and peak historical power consumptions (kW)') fig.autofmt_xdate() plt.savefig(pJoin(modelDir, "scadaChart.png")) outData["histPeak"] = d1 outData["histAverage"] = d2 outData["monthName"] = [name[0:3] for name in monthNames] # Graph feeder. fig = plt.figure(figsize=(10, 10)) myGraph = feeder.treeToNxGraph(tree) feeder.latLonNxGraph(myGraph, neatoLayout=False) plt.savefig(pJoin(modelDir, "feederChart.png")) with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile: outData["feederChart"] = inFile.read().encode("base64") # Get the load levels we need to test. allLoadLevels = [x.get('histPeak', 0) for x in monthData ] + [y.get('histAverage', 0) for y in monthData] maxLev = _roundOne(max(allLoadLevels), 'up') minLev = _roundOne(min(allLoadLevels), 'down') tenLoadLevels = range(int(minLev), int(maxLev), int( (maxLev - minLev) / 10)) # Gather variables from the feeder. for key in tree.keys(): # Set clock to single timestep. if tree[key].get('clock', '') == 'clock': tree[key] = { "timezone": "PST+8PDT", "stoptime": "'2013-01-01 00:00:00'", "starttime": "'2013-01-01 00:00:00'", "clock": "clock" } # Save swing node index. if tree[key].get('bustype', '').lower() == 'swing': swingIndex = key swingName = tree[key].get('name') # Remove all includes. if tree[key].get('omftype', '') == '#include': del key # Find the substation regulator and config. for key in tree: if tree[key].get('object', '') == 'regulator' and tree[key].get( 'from', '') == swingName: regIndex = key regConfName = tree[key]['configuration'] if not regConfName: regConfName = False for key in tree: if tree[key].get('name', '') == regConfName: regConfIndex = key # Set substation regulator to manual operation. baselineTap = int(inputDict.get( "baselineTap")) # GLOBAL VARIABLE FOR DEFAULT TAP POSITION tree[regConfIndex] = { 'name': tree[regConfIndex]['name'], 'object': 'regulator_configuration', 'connect_type': '1', 'raise_taps': '10', 'lower_taps': '10', 'CT_phase': 'ABC', 'PT_phase': 'ABC', 'regulation': '0.10', #Yo, 0.10 means at tap_pos 10 we're 10% above 120V. 'Control': 'MANUAL', 'control_level': 'INDIVIDUAL', 'Type': 'A', 'tap_pos_A': str(baselineTap), 'tap_pos_B': str(baselineTap), 'tap_pos_C': str(baselineTap) } # Attach recorders relevant to CVR. recorders = [{ 'object': 'collector', 'file': 'ZlossesTransformer.csv', 'group': 'class=transformer', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)' }, { 'object': 'collector', 'file': 'ZlossesUnderground.csv', 'group': 'class=underground_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)' }, { 'object': 'collector', 'file': 'ZlossesOverhead.csv', 'group': 'class=overhead_line', 'limit': '0', 'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)' }, { 'object': 'recorder', 'file': 'Zregulator.csv', 'limit': '0', 'parent': tree[regIndex]['name'], 'property': 'tap_A,tap_B,tap_C,power_in.real,power_in.imag' }, { 'object': 'collector', 'file': 'ZvoltageJiggle.csv', 'group': 'class=triplex_meter', 'limit': '0', 'property': 'min(voltage_12.mag),mean(voltage_12.mag),max(voltage_12.mag),std(voltage_12.mag)' }, { 'object': 'recorder', 'file': 'ZsubstationTop.csv', 'limit': '0', 'parent': tree[swingIndex]['name'], 'property': 'voltage_A,voltage_B,voltage_C' }, { 'object': 'recorder', 'file': 'ZsubstationBottom.csv', 'limit': '0', 'parent': tree[regIndex]['to'], 'property': 'voltage_A,voltage_B,voltage_C' }] biggest = 1 + max([int(k) for k in tree.keys()]) for index, rec in enumerate(recorders): tree[biggest + index] = rec # Change constant PF loads to ZIP loads. (See evernote for rationale about 50/50 power/impedance mix.) blankZipModel = { 'object': 'triplex_load', 'name': 'NAMEVARIABLE', 'base_power_12': 'POWERVARIABLE', 'power_fraction_12': str(inputDict.get("p_percent")), 'impedance_fraction_12': str(inputDict.get("z_percent")), 'current_fraction_12': str(inputDict.get("i_percent")), 'power_pf_12': str( inputDict.get("power_factor") ), #MAYBEFIX: we can probably get this PF data from the Milsoft loads. 'impedance_pf_12': str(inputDict.get("power_factor")), 'current_pf_12': str(inputDict.get("power_factor")), 'nominal_voltage': '120', 'phases': 'PHASESVARIABLE', 'parent': 'PARENTVARIABLE' } def powerClean(powerStr): ''' take 3339.39+1052.29j to 3339.39 ''' return powerStr[0:powerStr.find('+')] for key in tree: if tree[key].get('object', '') == 'triplex_node': # Get existing variables. name = tree[key].get('name', '') power = tree[key].get('power_12', '') parent = tree[key].get('parent', '') phases = tree[key].get('phases', '') # Replace object and reintroduce variables. tree[key] = copy(blankZipModel) tree[key]['name'] = name tree[key]['base_power_12'] = powerClean(power) tree[key]['parent'] = parent tree[key]['phases'] = phases # Function to determine how low we can tap down in the CVR case: def loweringPotential(baseLine): ''' Given a baseline end of line voltage, how many more percent can we shave off the substation voltage? ''' ''' testsWePass = [122.0,118.0,200.0,110.0] ''' lower = int(math.floor((baseLine / 114.0 - 1) * 100)) - 1 # If lower is negative, we can't return it because we'd be undervolting beyond what baseline already was! if lower < 0: return baselineTap else: return baselineTap - lower # Run all the powerflows. powerflows = [] for doingCvr in [False, True]: # For each load level in the tenLoadLevels, run a powerflow with the load objects scaled to the level. for desiredLoad in tenLoadLevels: # Find the total load that was defined in Milsoft: loadList = [] for key in tree: if tree[key].get('object', '') == 'triplex_load': loadList.append(tree[key].get('base_power_12', '')) totalLoad = sum([float(x) for x in loadList]) # Rescale each triplex load: for key in tree: if tree[key].get('object', '') == 'triplex_load': currentPow = float(tree[key]['base_power_12']) ratio = desiredLoad / totalLoad tree[key]['base_power_12'] = str(currentPow * ratio) # If we're doing CVR then lower the voltage. if doingCvr: # Find the minimum voltage we can tap down to: newTapPos = baselineTap for row in powerflows: if row.get('loadLevel', '') == desiredLoad: newTapPos = loweringPotential( row.get('lowVoltage', 114)) # Tap it down to there. # MAYBEFIX: do each phase separately because that's how it's done in the field... Oof. tree[regConfIndex]['tap_pos_A'] = str(newTapPos) tree[regConfIndex]['tap_pos_B'] = str(newTapPos) tree[regConfIndex]['tap_pos_C'] = str(newTapPos) # Run the model through gridlab and put outputs in the table. output = gridlabd.runInFilesystem(tree, attachments=attachments, keepFiles=True, workDir=modelDir) os.remove(pJoin(modelDir, "PID.txt")) p = output['Zregulator.csv']['power_in.real'][0] q = output['Zregulator.csv']['power_in.imag'][0] s = math.sqrt(p**2 + q**2) lossTotal = 0.0 for device in [ 'ZlossesOverhead.csv', 'ZlossesTransformer.csv', 'ZlossesUnderground.csv' ]: for letter in ['A', 'B', 'C']: r = output[device]['sum(power_losses_' + letter + '.real)'][0] i = output[device]['sum(power_losses_' + letter + '.imag)'][0] lossTotal += math.sqrt(r**2 + i**2) ## Entire output: powerflows.append({ 'doingCvr': doingCvr, 'loadLevel': desiredLoad, 'realPower': p, 'powerFactor': p / s, 'losses': lossTotal, 'subVoltage': (output['ZsubstationBottom.csv']['voltage_A'][0] + output['ZsubstationBottom.csv']['voltage_B'][0] + output['ZsubstationBottom.csv']['voltage_C'][0]) / 3 / 60, 'lowVoltage': output['ZvoltageJiggle.csv']['min(voltage_12.mag)'][0] / 2, 'highVoltage': output['ZvoltageJiggle.csv']['max(voltage_12.mag)'][0] / 2 }) # For a given load level, find two points to interpolate on. def getInterpPoints(t): ''' Find the two points we can interpolate from. ''' ''' tests pass on [tenLoadLevels[0],tenLoadLevels[5]+499,tenLoadLevels[-1]-988] ''' loc = sorted(tenLoadLevels + [t]).index(t) if loc == 0: return (tenLoadLevels[0], tenLoadLevels[1]) elif loc > len(tenLoadLevels) - 2: return (tenLoadLevels[-2], tenLoadLevels[-1]) else: return (tenLoadLevels[loc - 1], tenLoadLevels[loc + 1]) # Calculate peak reduction. for row in monthData: peak = row['histPeak'] peakPoints = getInterpPoints(peak) peakTopBase = [ x for x in powerflows if x.get('loadLevel', '') == peakPoints[-1] and x.get('doingCvr', '') == False ][0] peakTopCvr = [ x for x in powerflows if x.get('loadLevel', '') == peakPoints[-1] and x.get('doingCvr', '') == True ][0] peakBottomBase = [ x for x in powerflows if x.get('loadLevel', '') == peakPoints[0] and x.get('doingCvr', '') == False ][0] peakBottomCvr = [ x for x in powerflows if x.get('loadLevel', '') == peakPoints[0] and x.get('doingCvr', '') == True ][0] # Linear interpolation so we aren't running umpteen million loadflows. x = (peakPoints[0], peakPoints[1]) y = (peakTopBase['realPower'] - peakTopCvr['realPower'], peakBottomBase['realPower'] - peakBottomCvr['realPower']) peakRed = y[0] + (y[1] - y[0]) * (peak - x[0]) / (x[1] - x[0]) row['peakReduction'] = peakRed # Calculate energy reduction and loss reduction based on average load. for row in monthData: avgEnergy = row['histAverage'] energyPoints = getInterpPoints(avgEnergy) avgTopBase = [ x for x in powerflows if x.get('loadLevel', '') == energyPoints[-1] and x.get('doingCvr', '') == False ][0] avgTopCvr = [ x for x in powerflows if x.get('loadLevel', '') == energyPoints[-1] and x.get('doingCvr', '') == True ][0] avgBottomBase = [ x for x in powerflows if x.get('loadLevel', '') == energyPoints[0] and x.get('doingCvr', '') == False ][0] avgBottomCvr = [ x for x in powerflows if x.get('loadLevel', '') == energyPoints[0] and x.get('doingCvr', '') == True ][0] # Linear interpolation so we aren't running umpteen million loadflows. x = (energyPoints[0], energyPoints[1]) y = (avgTopBase['realPower'] - avgTopCvr['realPower'], avgBottomBase['realPower'] - avgBottomCvr['realPower']) energyRed = y[0] + (y[1] - y[0]) * (avgEnergy - x[0]) / (x[1] - x[0]) row['energyReduction'] = energyRed lossY = (avgTopBase['losses'] - avgTopCvr['losses'], avgBottomBase['losses'] - avgBottomCvr['losses']) lossRed = lossY[0] + (lossY[1] - lossY[0]) * (avgEnergy - x[0]) / (x[1] - x[0]) row['lossReduction'] = lossRed # Multiply by dollars. for row in monthData: row['energyReductionDollars'] = row['energyReduction'] / 1000 * ( rates['wholesaleEnergyCostPerKwh'] - rates['retailEnergyCostPerKwh']) row['peakReductionDollars'] = row['peakReduction'] / 1000 * rates[ 'peakDemandCost' + row['season'] + 'PerKw'] row['lossReductionDollars'] = row['lossReduction'] / 1000 * rates[ 'wholesaleEnergyCostPerKwh'] # Pretty output def plotTable(inData): fig = plt.figure(figsize=(10, 5)) plt.axis('off') plt.tight_layout() plt.table(cellText=[row for row in inData[1:]], loc='center', rowLabels=range(len(inData) - 1), colLabels=inData[0]) def dictalToMatrix(dictList): ''' Take our dictal format to a matrix. ''' matrix = [dictList[0].keys()] for row in dictList: matrix.append(row.values()) return matrix # Powerflow results. plotTable(dictalToMatrix(powerflows)) plt.savefig(pJoin(modelDir, "powerflowTable.png")) # Monetary results. ## To print partial money table monthDataMat = dictalToMatrix(monthData) dimX = len(monthDataMat) dimY = len(monthDataMat[0]) monthDataPart = [] for k in range(0, dimX): monthDatatemp = [] for m in range(4, dimY): monthDatatemp.append(monthDataMat[k][m]) monthDataPart.append(monthDatatemp) plotTable(monthDataPart) plt.savefig(pJoin(modelDir, "moneyTable.png")) outData["monthDataMat"] = dictalToMatrix(monthData) outData["monthDataPart"] = monthDataPart # Graph the money data. fig = plt.figure(figsize=(10, 8)) indices = [r['monthName'] for r in monthData] d1 = [r['energyReductionDollars'] for r in monthData] d2 = [r['lossReductionDollars'] for r in monthData] d3 = [r['peakReductionDollars'] for r in monthData] ticks = range(len(d1)) bar_erd = plt.bar(ticks, d1, color='red') bar_lrd = plt.bar(ticks, d2, color='green') bar_prd = plt.bar(ticks, d3, color='blue', yerr=d2) plt.legend([bar_prd[0], bar_lrd[0], bar_erd[0]], [ 'peakReductionDollars', 'lossReductionDollars', 'energyReductionDollars' ], bbox_to_anchor=(0., 1.015, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.1) plt.xticks([t + 0.5 for t in ticks], indices) plt.ylabel('Utility Savings ($)') plt.tight_layout(5.5, 1.3, 1.2) fig.autofmt_xdate() plt.savefig(pJoin(modelDir, "spendChart.png")) outData["energyReductionDollars"] = d1 outData["lossReductionDollars"] = d2 outData["peakReductionDollars"] = d3 # Graph the cumulative savings. fig = plt.figure(figsize=(10, 5)) annualSavings = sum(d1) + sum(d2) + sum(d3) annualSave = lambda x: (annualSavings - rates['omCost']) * x - rates[ 'capitalCost'] simplePayback = rates['capitalCost'] / (annualSavings - rates['omCost']) plt.xlabel('Year After Installation') plt.xlim(0, 30) plt.ylabel('Cumulative Savings ($)') plt.plot([0 for x in range(31)], c='gray') plt.axvline(x=simplePayback, ymin=0, ymax=1, c='gray', linestyle='--') plt.plot([annualSave(x) for x in range(31)], c='green') plt.savefig(pJoin(modelDir, "savingsChart.png")) outData["annualSave"] = [annualSave(x) for x in range(31)] # For autotest, there won't be such file. return outData
def main(): ''' JSON manipulation, Gridlab running, etc. goes here. ''' # Import data. feedJson = json.load(open('./ABEC Frank Calibrated.json')) tree = feedJson['tree'] # Input data, model-style. inputDict = {'simLength':24,'simStartDate':'2011-01-01', 'simLengthUnits':'hours'} # Add recorders. stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[omf.feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree, inputDict['simLength'], inputDict['simLengthUnits'], inputDict['simStartDate']) # Run gridlab. allOutputData = runInFilesystem(tree, attachments=feedJson['attachments'], keepFiles=True, workDir='.', glmName='ABEC Frank SolverGen.glm') try: os.remove('PID.txt') except: pass print 'Gridlab ran correctly', allOutputData.keys() # Make plots. #TODO: figure out what to do about neato taking like 2 minutes to run. neatoLayout = True # Detect the feeder nominal voltage: for key in tree: ob = tree[key] if type(ob)==dict and ob.get('bustype','')=='SWING': feedVoltage = float(ob.get('nominal_voltage',1)) # Make a graph object. fGraph = omf.feeder.treeToNxGraph(tree) if neatoLayout: # HACK: work on a new graph without attributes because graphViz tries to read attrs. cleanG = nx.Graph(fGraph.edges()) cleanG.add_nodes_from(fGraph) positions = nx.graphviz_layout(cleanG, prog='neato') else: positions = {n:fGraph.node[n].get('pos',(0,0)) for n in fGraph} # Plot all time steps. for step, stamp in enumerate(allOutputData['aVoltDump.csv']['# timestamp']): # Build voltage map. nodeVolts = {} for nodeName in [x for x in allOutputData['aVoltDump.csv'].keys() if x != '# timestamp']: allVolts = [] for phase in ['A','B','C']: v = complex(allOutputData[phase.lower() + 'VoltDump.csv'][nodeName][step]) phaseVolt = _pythag(v.real, v.imag) if phaseVolt != 0.0: if _digits(phaseVolt)>3: # Normalize to 120 V standard phaseVolt = phaseVolt*(120/feedVoltage) allVolts.append(phaseVolt) # HACK: Take average of all phases to collapse dimensionality. nodeVolts[nodeName] = _avg(allVolts) # Apply voltage map and chart it. voltChart = plt.figure(figsize=(10,10)) plt.axes(frameon = 0) plt.axis('off') edgeIm = nx.draw_networkx_edges(fGraph, positions) nodeIm = nx.draw_networkx_nodes(fGraph, pos = positions, node_color = [nodeVolts.get(n,0) for n in fGraph.nodes()], linewidths = 0, node_size = 30, cmap = plt.cm.jet) plt.sci(nodeIm) plt.clim(110,130) plt.colorbar() plt.title(stamp) voltChart.savefig('./pngs/volts' + str(step).zfill(3) + '.png') # Reclaim memory by closing, deleting and garbage collecting the last chart. voltChart.clf() plt.close() del voltChart gc.collect()