예제 #1
0
def genDiagram(outputDir, feederJson):
    # Load required data.
    tree = feederJson.get("tree", {})
    links = feederJson.get("links", {})
    # Generate lat/lons from nodes and links structures.
    for link in links:
        for typeLink in link.keys():
            if typeLink in ['source', 'target']:
                for key in link[typeLink].keys():
                    if key in ['x', 'y']:
                        objName = link[typeLink]['name']
                        for x in tree:
                            leaf = tree[x]
                            if leaf.get('name', '') == objName:
                                if key == 'x':
                                    leaf['latitude'] = link[typeLink][key]
                                else:
                                    leaf['longitude'] = link[typeLink][key]
    # Remove even more things (no lat, lon or from = node without a position).
    for key in tree.keys():
        aLat = tree[key].get('latitude')
        aLon = tree[key].get('longitude')
        aFrom = tree[key].get('from')
        if aLat is None and aLon is None and aFrom is None:
            tree.pop(key)
    # Create and save the graphic.
    nxG = feeder.treeToNxGraph(tree)
    feeder.latLonNxGraph(
        nxG)  # This function creates a .plt reference which can be saved here.
    plt.savefig(pJoin(outputDir, "feederChart.png"), dpi=800, pad_inches=0.0)
예제 #2
0
파일: grip.py 프로젝트: sammatuba/omf
def oneLineGridlab(temp_dir):
    '''
	Create a one-line diagram of the input GLM and return a PNG of it.

	Form parameters:
	:param glm: a GLM file.
	:param useLatLons: 'True' if the diagram should be drawn with coordinate values taken from within the GLM, 'False' if the diagram should be drawn
		with artificial coordinates using Graphviz NEATO.

	Details:
	:OMF fuction: omf.feeder.latLonNxGraph().
	:run-time: about 1 to 30 seconds.
	'''
    glm_path = os.path.join(temp_dir, 'in.glm')
    feed = feeder.parse(glm_path)
    graph = feeder.treeToNxGraph(feed)
    neatoLayout = True if request.form.get('useLatLons') == 'False' else False
    # Clear old plots.
    plt.clf()
    plt.close()
    # Plot new plot.
    feeder.latLonNxGraph(graph,
                         labels=False,
                         neatoLayout=neatoLayout,
                         showPlot=False)
    plt.savefig(os.path.join(temp_dir, filenames["ongl"]))
예제 #3
0
 def graph(graphname, mst):
     plt.close()
     outGraph = nx.Graph()
     for key in tree:
         item = tree[key]
         if 'name' in item.keys():
             obType = item.get('object')
             if 'parent' in item.keys():
                 outGraph.add_edge(item['name'],
                                   item['parent'],
                                   attr_dict={
                                       'type': 'parentChild',
                                       'phases': 1
                                   })
                 outGraph.node[item['name']]['type'] = item['object']
                 # Note that attached houses via gridEdit.html won't have lat/lon values, so this try is a workaround.
                 try:
                     outGraph.node[item['name']]['pos'] = (float(
                         item.get('latitude',
                                  0)), float(item.get('longitude', 0)))
                 except:
                     outGraph.node[item['name']]['pos'] = (0.0, 0.0)
             elif item['name'] in outGraph:
                 # Edge already led to node's addition, so just set the attributes:
                 outGraph.node[item['name']]['type'] = item['object']
             else:
                 outGraph.add_node(item['name'],
                                   attr_dict={'type': item['object']})
             if 'latitude' in item.keys() and 'longitude' in item.keys():
                 try:
                     outGraph.node.get(item['name'], {})['pos'] = (float(
                         item['latitude']), float(item['longitude']))
                 except:
                     outGraph.node.get(item['name'], {})['pos'] = (0.0, 0.0)
     size = len(mst)
     row = 0
     while row < size:
         column = 0
         while column < size:
             if mst[row][column] == 1:
                 if useDist == 'True':
                     outGraph.add_edge(str(nodes.loc[row, 'node_name']),
                                       str(nodes.loc[column, 'node_name']))
                 else:
                     outGraph.add_edge(str(volt.loc[row, 'node_name']),
                                       str(volt.loc[column, 'node_name']))
             column += 1
         row += 1
     feeder.latLonNxGraph(outGraph,
                          labels=True,
                          neatoLayout=True,
                          showPlot=True)
     plt.savefig(workDir + graphname)
예제 #4
0
def milsoftToGridlabTests(keepFiles=False):
	openPrefix = '../uploads/'
	outPrefix = './milToGridlabTests/'
	import os, json, traceback, shutil
	from omf.solvers import gridlabd
	from matplotlib import pyplot as plt
	from milToGridlab import convert
	import omf.feeder as feeder
	try:
		os.mkdir(outPrefix)
	except:
		pass # Directory already there.
	exceptionCount = 0
	# testFiles = [('INEC-RENOIR.std','INEC.seq'), ('INEC-GRAHAM.std','INEC.seq'),
	#   ('Olin-Barre.std','Olin.seq'), ('Olin-Brown.std','Olin.seq'),
	#   ('ABEC-FRANK.std','ABEC.seq'), ('ABEC-COLUMBIA.std','ABEC.seq'),('OMF_Norfork1.std', 'OMF_Norfork1.seq')]
	testFiles = [('Olin-Brown.std', 'Olin.seq')]
	testAttachments = {'schedules.glm':''}
	# testAttachments = {'schedules.glm':'', 'climate.tmy2':open('./data/Climate/KY-LEXINGTON.tmy2','r').read()}
	for stdString, seqString in testFiles:
		try:
			# Convert the std+seq.
			with open(openPrefix + stdString,'r') as stdFile, open(openPrefix + seqString,'r') as seqFile:
				outGlm,x,y = convert(stdFile.read(),seqFile.read())
			with open(outPrefix + stdString.replace('.std','.glm'),'w') as outFile:
				outFile.write(feeder.sortedWrite(outGlm))
			print 'WROTE GLM FOR', stdString
			try:
				# Draw the GLM.
				myGraph = feeder.treeToNxGraph(outGlm)
				feeder.latLonNxGraph(myGraph, neatoLayout=False)
				plt.savefig(outPrefix + stdString.replace('.std','.png'))
				print 'DREW GLM OF', stdString
			except:
				exceptionCount += 1
				print 'FAILED DRAWING', stdString
			try:
				# Run powerflow on the GLM. HACK:blank attachments for now.
				output = gridlabd.runInFilesystem(outGlm, attachments=testAttachments, keepFiles=False)
				with open(outPrefix + stdString.replace('.std','.json'),'w') as outFile:
					json.dump(output, outFile, indent=4)
				print 'RAN GRIDLAB ON', stdString
			except:
				exceptionCount += 1
				print 'POWERFLOW FAILED', stdString
		except:
			print 'FAILED CONVERTING', stdString
			exceptionCount += 1
			traceback.print_exc()
	if not keepFiles:
		shutil.rmtree(outPrefix)
	return exceptionCount
예제 #5
0
파일: cvrStatic.py 프로젝트: baifengbai/omf
def work(modelDir, inputDict):
    ''' Run the model in the foreground. WARNING: can take about a minute. '''
    # Global vars, and load data from the model directory.
    feederName = [x for x in os.listdir(modelDir)
                  if x.endswith('.omd')][0][:-4]
    inputDict["feederName1"] = feederName
    feederPath = pJoin(modelDir, feederName + '.omd')
    feederJson = json.load(open(feederPath))
    tree = feederJson.get("tree", {})
    attachments = feederJson.get("attachments", {})
    outData = {}
    ''' Run CVR analysis. '''
    # Reformate monthData and rates.
    rates = {
        k: float(inputDict[k])
        for k in [
            "capitalCost", "omCost", "wholesaleEnergyCostPerKwh",
            "retailEnergyCostPerKwh", "peakDemandCostSpringPerKw",
            "peakDemandCostSummerPerKw", "peakDemandCostFallPerKw",
            "peakDemandCostWinterPerKw"
        ]
    }
    monthNames = [
        "January", "February", "March", "April", "May", "June", "July",
        "August", "September", "October", "November", "December"
    ]
    monthToSeason = {
        'January': 'Winter',
        'February': 'Winter',
        'March': 'Spring',
        'April': 'Spring',
        'May': 'Spring',
        'June': 'Summer',
        'July': 'Summer',
        'August': 'Summer',
        'September': 'Fall',
        'October': 'Fall',
        'November': 'Fall',
        'December': 'Winter'
    }
    monthData = []
    for i, x in enumerate(monthNames):
        monShort = x[0:3].lower()
        season = monthToSeason[x]
        histAvg = float(inputDict.get(monShort + "Avg", 0))
        histPeak = float(inputDict.get(monShort + "Peak", 0))
        monthData.append({
            "monthId": i,
            "monthName": x,
            "histAverage": histAvg,
            "histPeak": histPeak,
            "season": season
        })
    # Graph the SCADA data.
    fig = plt.figure(figsize=(10, 6))
    indices = [r['monthName'] for r in monthData]
    d1 = [r['histPeak'] / (10**3) for r in monthData]
    d2 = [r['histAverage'] / (10**3) for r in monthData]
    ticks = range(len(d1))
    bar_peak = plt.bar(ticks, d1, color='gray')
    bar_avg = plt.bar(ticks, d2, color='dimgray')
    plt.legend([bar_peak[0], bar_avg[0]], ['histPeak', 'histAverage'],
               bbox_to_anchor=(0., 1.015, 1., .102),
               loc=3,
               ncol=2,
               mode="expand",
               borderaxespad=0.1)
    plt.xticks([t + 0.5 for t in ticks], indices)
    plt.ylabel('Mean and peak historical power consumptions (kW)')
    fig.autofmt_xdate()
    plt.savefig(pJoin(modelDir, "scadaChart.png"))
    outData["histPeak"] = d1
    outData["histAverage"] = d2
    outData["monthName"] = [name[0:3] for name in monthNames]
    # Graph feeder.
    fig = plt.figure(figsize=(10, 10))
    myGraph = feeder.treeToNxGraph(tree)
    feeder.latLonNxGraph(myGraph, neatoLayout=False)
    plt.savefig(pJoin(modelDir, "feederChart.png"))
    with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile:
        outData["feederChart"] = inFile.read().encode("base64")
    # Get the load levels we need to test.
    allLoadLevels = [x.get('histPeak', 0) for x in monthData
                     ] + [y.get('histAverage', 0) for y in monthData]
    maxLev = _roundOne(max(allLoadLevels), 'up')
    minLev = _roundOne(min(allLoadLevels), 'down')
    tenLoadLevels = range(int(minLev), int(maxLev), int(
        (maxLev - minLev) / 10))
    # Gather variables from the feeder.
    for key in tree.keys():
        # Set clock to single timestep.
        if tree[key].get('clock', '') == 'clock':
            tree[key] = {
                "timezone": "PST+8PDT",
                "stoptime": "'2013-01-01 00:00:00'",
                "starttime": "'2013-01-01 00:00:00'",
                "clock": "clock"
            }
        # Save swing node index.
        if tree[key].get('bustype', '').lower() == 'swing':
            swingIndex = key
            swingName = tree[key].get('name')
        # Remove all includes.
        if tree[key].get('omftype', '') == '#include':
            del key
    # Find the substation regulator and config.
    for key in tree:
        if tree[key].get('object', '') == 'regulator' and tree[key].get(
                'from', '') == swingName:
            regIndex = key
            regConfName = tree[key]['configuration']
    if not regConfName: regConfName = False
    for key in tree:
        if tree[key].get('name', '') == regConfName:
            regConfIndex = key
    # Set substation regulator to manual operation.
    baselineTap = int(inputDict.get(
        "baselineTap"))  # GLOBAL VARIABLE FOR DEFAULT TAP POSITION
    tree[regConfIndex] = {
        'name': tree[regConfIndex]['name'],
        'object': 'regulator_configuration',
        'connect_type': '1',
        'raise_taps': '10',
        'lower_taps': '10',
        'CT_phase': 'ABC',
        'PT_phase': 'ABC',
        'regulation':
        '0.10',  #Yo, 0.10 means at tap_pos 10 we're 10% above 120V.
        'Control': 'MANUAL',
        'control_level': 'INDIVIDUAL',
        'Type': 'A',
        'tap_pos_A': str(baselineTap),
        'tap_pos_B': str(baselineTap),
        'tap_pos_C': str(baselineTap)
    }
    # Attach recorders relevant to CVR.
    recorders = [{
        'object':
        'collector',
        'file':
        'ZlossesTransformer.csv',
        'group':
        'class=transformer',
        'limit':
        '0',
        'property':
        'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'
    }, {
        'object':
        'collector',
        'file':
        'ZlossesUnderground.csv',
        'group':
        'class=underground_line',
        'limit':
        '0',
        'property':
        'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'
    }, {
        'object':
        'collector',
        'file':
        'ZlossesOverhead.csv',
        'group':
        'class=overhead_line',
        'limit':
        '0',
        'property':
        'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'
    }, {
        'object': 'recorder',
        'file': 'Zregulator.csv',
        'limit': '0',
        'parent': tree[regIndex]['name'],
        'property': 'tap_A,tap_B,tap_C,power_in.real,power_in.imag'
    }, {
        'object':
        'collector',
        'file':
        'ZvoltageJiggle.csv',
        'group':
        'class=triplex_meter',
        'limit':
        '0',
        'property':
        'min(voltage_12.mag),mean(voltage_12.mag),max(voltage_12.mag),std(voltage_12.mag)'
    }, {
        'object': 'recorder',
        'file': 'ZsubstationTop.csv',
        'limit': '0',
        'parent': tree[swingIndex]['name'],
        'property': 'voltage_A,voltage_B,voltage_C'
    }, {
        'object': 'recorder',
        'file': 'ZsubstationBottom.csv',
        'limit': '0',
        'parent': tree[regIndex]['to'],
        'property': 'voltage_A,voltage_B,voltage_C'
    }]
    biggest = 1 + max([int(k) for k in tree.keys()])
    for index, rec in enumerate(recorders):
        tree[biggest + index] = rec
    # Change constant PF loads to ZIP loads. (See evernote for rationale about 50/50 power/impedance mix.)
    blankZipModel = {
        'object': 'triplex_load',
        'name': 'NAMEVARIABLE',
        'base_power_12': 'POWERVARIABLE',
        'power_fraction_12': str(inputDict.get("p_percent")),
        'impedance_fraction_12': str(inputDict.get("z_percent")),
        'current_fraction_12': str(inputDict.get("i_percent")),
        'power_pf_12': str(
            inputDict.get("power_factor")
        ),  #MAYBEFIX: we can probably get this PF data from the Milsoft loads.
        'impedance_pf_12': str(inputDict.get("power_factor")),
        'current_pf_12': str(inputDict.get("power_factor")),
        'nominal_voltage': '120',
        'phases': 'PHASESVARIABLE',
        'parent': 'PARENTVARIABLE'
    }

    def powerClean(powerStr):
        ''' take 3339.39+1052.29j to 3339.39 '''
        return powerStr[0:powerStr.find('+')]

    for key in tree:
        if tree[key].get('object', '') == 'triplex_node':
            # Get existing variables.
            name = tree[key].get('name', '')
            power = tree[key].get('power_12', '')
            parent = tree[key].get('parent', '')
            phases = tree[key].get('phases', '')
            # Replace object and reintroduce variables.
            tree[key] = copy(blankZipModel)
            tree[key]['name'] = name
            tree[key]['base_power_12'] = powerClean(power)
            tree[key]['parent'] = parent
            tree[key]['phases'] = phases
    # Function to determine how low we can tap down in the CVR case:
    def loweringPotential(baseLine):
        ''' Given a baseline end of line voltage, how many more percent can we shave off the substation voltage? '''
        ''' testsWePass = [122.0,118.0,200.0,110.0] '''
        lower = int(math.floor((baseLine / 114.0 - 1) * 100)) - 1
        # If lower is negative, we can't return it because we'd be undervolting beyond what baseline already was!
        if lower < 0:
            return baselineTap
        else:
            return baselineTap - lower

    # Run all the powerflows.
    powerflows = []
    for doingCvr in [False, True]:
        # For each load level in the tenLoadLevels, run a powerflow with the load objects scaled to the level.
        for desiredLoad in tenLoadLevels:
            # Find the total load that was defined in Milsoft:
            loadList = []
            for key in tree:
                if tree[key].get('object', '') == 'triplex_load':
                    loadList.append(tree[key].get('base_power_12', ''))
            totalLoad = sum([float(x) for x in loadList])
            # Rescale each triplex load:
            for key in tree:
                if tree[key].get('object', '') == 'triplex_load':
                    currentPow = float(tree[key]['base_power_12'])
                    ratio = desiredLoad / totalLoad
                    tree[key]['base_power_12'] = str(currentPow * ratio)
            # If we're doing CVR then lower the voltage.
            if doingCvr:
                # Find the minimum voltage we can tap down to:
                newTapPos = baselineTap
                for row in powerflows:
                    if row.get('loadLevel', '') == desiredLoad:
                        newTapPos = loweringPotential(
                            row.get('lowVoltage', 114))
                # Tap it down to there.
                # MAYBEFIX: do each phase separately because that's how it's done in the field... Oof.
                tree[regConfIndex]['tap_pos_A'] = str(newTapPos)
                tree[regConfIndex]['tap_pos_B'] = str(newTapPos)
                tree[regConfIndex]['tap_pos_C'] = str(newTapPos)
            # Run the model through gridlab and put outputs in the table.
            output = gridlabd.runInFilesystem(tree,
                                              attachments=attachments,
                                              keepFiles=True,
                                              workDir=modelDir)
            os.remove(pJoin(modelDir, "PID.txt"))
            p = output['Zregulator.csv']['power_in.real'][0]
            q = output['Zregulator.csv']['power_in.imag'][0]
            s = math.sqrt(p**2 + q**2)
            lossTotal = 0.0
            for device in [
                    'ZlossesOverhead.csv', 'ZlossesTransformer.csv',
                    'ZlossesUnderground.csv'
            ]:
                for letter in ['A', 'B', 'C']:
                    r = output[device]['sum(power_losses_' + letter +
                                       '.real)'][0]
                    i = output[device]['sum(power_losses_' + letter +
                                       '.imag)'][0]
                    lossTotal += math.sqrt(r**2 + i**2)
            ## Entire output:
            powerflows.append({
                'doingCvr':
                doingCvr,
                'loadLevel':
                desiredLoad,
                'realPower':
                p,
                'powerFactor':
                p / s,
                'losses':
                lossTotal,
                'subVoltage':
                (output['ZsubstationBottom.csv']['voltage_A'][0] +
                 output['ZsubstationBottom.csv']['voltage_B'][0] +
                 output['ZsubstationBottom.csv']['voltage_C'][0]) / 3 / 60,
                'lowVoltage':
                output['ZvoltageJiggle.csv']['min(voltage_12.mag)'][0] / 2,
                'highVoltage':
                output['ZvoltageJiggle.csv']['max(voltage_12.mag)'][0] / 2
            })
    # For a given load level, find two points to interpolate on.
    def getInterpPoints(t):
        ''' Find the two points we can interpolate from. '''
        ''' tests pass on [tenLoadLevels[0],tenLoadLevels[5]+499,tenLoadLevels[-1]-988] '''
        loc = sorted(tenLoadLevels + [t]).index(t)
        if loc == 0:
            return (tenLoadLevels[0], tenLoadLevels[1])
        elif loc > len(tenLoadLevels) - 2:
            return (tenLoadLevels[-2], tenLoadLevels[-1])
        else:
            return (tenLoadLevels[loc - 1], tenLoadLevels[loc + 1])

    # Calculate peak reduction.
    for row in monthData:
        peak = row['histPeak']
        peakPoints = getInterpPoints(peak)
        peakTopBase = [
            x for x in powerflows if x.get('loadLevel', '') == peakPoints[-1]
            and x.get('doingCvr', '') == False
        ][0]
        peakTopCvr = [
            x for x in powerflows if x.get('loadLevel', '') == peakPoints[-1]
            and x.get('doingCvr', '') == True
        ][0]
        peakBottomBase = [
            x for x in powerflows if x.get('loadLevel', '') == peakPoints[0]
            and x.get('doingCvr', '') == False
        ][0]
        peakBottomCvr = [
            x for x in powerflows if x.get('loadLevel', '') == peakPoints[0]
            and x.get('doingCvr', '') == True
        ][0]
        # Linear interpolation so we aren't running umpteen million loadflows.
        x = (peakPoints[0], peakPoints[1])
        y = (peakTopBase['realPower'] - peakTopCvr['realPower'],
             peakBottomBase['realPower'] - peakBottomCvr['realPower'])
        peakRed = y[0] + (y[1] - y[0]) * (peak - x[0]) / (x[1] - x[0])
        row['peakReduction'] = peakRed
    # Calculate energy reduction and loss reduction based on average load.
    for row in monthData:
        avgEnergy = row['histAverage']
        energyPoints = getInterpPoints(avgEnergy)
        avgTopBase = [
            x for x in powerflows if x.get('loadLevel', '') == energyPoints[-1]
            and x.get('doingCvr', '') == False
        ][0]
        avgTopCvr = [
            x for x in powerflows if x.get('loadLevel', '') == energyPoints[-1]
            and x.get('doingCvr', '') == True
        ][0]
        avgBottomBase = [
            x for x in powerflows if x.get('loadLevel', '') == energyPoints[0]
            and x.get('doingCvr', '') == False
        ][0]
        avgBottomCvr = [
            x for x in powerflows if x.get('loadLevel', '') == energyPoints[0]
            and x.get('doingCvr', '') == True
        ][0]
        # Linear interpolation so we aren't running umpteen million loadflows.
        x = (energyPoints[0], energyPoints[1])
        y = (avgTopBase['realPower'] - avgTopCvr['realPower'],
             avgBottomBase['realPower'] - avgBottomCvr['realPower'])
        energyRed = y[0] + (y[1] - y[0]) * (avgEnergy - x[0]) / (x[1] - x[0])
        row['energyReduction'] = energyRed
        lossY = (avgTopBase['losses'] - avgTopCvr['losses'],
                 avgBottomBase['losses'] - avgBottomCvr['losses'])
        lossRed = lossY[0] + (lossY[1] - lossY[0]) * (avgEnergy -
                                                      x[0]) / (x[1] - x[0])
        row['lossReduction'] = lossRed
    # Multiply by dollars.
    for row in monthData:
        row['energyReductionDollars'] = row['energyReduction'] / 1000 * (
            rates['wholesaleEnergyCostPerKwh'] -
            rates['retailEnergyCostPerKwh'])
        row['peakReductionDollars'] = row['peakReduction'] / 1000 * rates[
            'peakDemandCost' + row['season'] + 'PerKw']
        row['lossReductionDollars'] = row['lossReduction'] / 1000 * rates[
            'wholesaleEnergyCostPerKwh']
    # Pretty output
    def plotTable(inData):
        fig = plt.figure(figsize=(10, 5))
        plt.axis('off')
        plt.tight_layout()
        plt.table(cellText=[row for row in inData[1:]],
                  loc='center',
                  rowLabels=range(len(inData) - 1),
                  colLabels=inData[0])

    def dictalToMatrix(dictList):
        ''' Take our dictal format to a matrix. '''
        matrix = [dictList[0].keys()]
        for row in dictList:
            matrix.append(row.values())
        return matrix

    # Powerflow results.
    plotTable(dictalToMatrix(powerflows))
    plt.savefig(pJoin(modelDir, "powerflowTable.png"))
    # Monetary results.
    ## To print partial money table
    monthDataMat = dictalToMatrix(monthData)
    dimX = len(monthDataMat)
    dimY = len(monthDataMat[0])
    monthDataPart = []
    for k in range(0, dimX):
        monthDatatemp = []
        for m in range(4, dimY):
            monthDatatemp.append(monthDataMat[k][m])
        monthDataPart.append(monthDatatemp)
    plotTable(monthDataPart)
    plt.savefig(pJoin(modelDir, "moneyTable.png"))
    outData["monthDataMat"] = dictalToMatrix(monthData)
    outData["monthDataPart"] = monthDataPart
    # Graph the money data.
    fig = plt.figure(figsize=(10, 8))
    indices = [r['monthName'] for r in monthData]
    d1 = [r['energyReductionDollars'] for r in monthData]
    d2 = [r['lossReductionDollars'] for r in monthData]
    d3 = [r['peakReductionDollars'] for r in monthData]
    ticks = range(len(d1))
    bar_erd = plt.bar(ticks, d1, color='red')
    bar_lrd = plt.bar(ticks, d2, color='green')
    bar_prd = plt.bar(ticks, d3, color='blue', yerr=d2)
    plt.legend([bar_prd[0], bar_lrd[0], bar_erd[0]], [
        'peakReductionDollars', 'lossReductionDollars',
        'energyReductionDollars'
    ],
               bbox_to_anchor=(0., 1.015, 1., .102),
               loc=3,
               ncol=2,
               mode="expand",
               borderaxespad=0.1)
    plt.xticks([t + 0.5 for t in ticks], indices)
    plt.ylabel('Utility Savings ($)')
    plt.tight_layout(5.5, 1.3, 1.2)
    fig.autofmt_xdate()
    plt.savefig(pJoin(modelDir, "spendChart.png"))
    outData["energyReductionDollars"] = d1
    outData["lossReductionDollars"] = d2
    outData["peakReductionDollars"] = d3
    # Graph the cumulative savings.
    fig = plt.figure(figsize=(10, 5))
    annualSavings = sum(d1) + sum(d2) + sum(d3)
    annualSave = lambda x: (annualSavings - rates['omCost']) * x - rates[
        'capitalCost']
    simplePayback = rates['capitalCost'] / (annualSavings - rates['omCost'])
    plt.xlabel('Year After Installation')
    plt.xlim(0, 30)
    plt.ylabel('Cumulative Savings ($)')
    plt.plot([0 for x in range(31)], c='gray')
    plt.axvline(x=simplePayback, ymin=0, ymax=1, c='gray', linestyle='--')
    plt.plot([annualSave(x) for x in range(31)], c='green')
    plt.savefig(pJoin(modelDir, "savingsChart.png"))
    outData["annualSave"] = [annualSave(x) for x in range(31)]
    # For autotest, there won't be such file.
    return outData
예제 #6
0
 def graph(graphname, mst, referenceMST, tree):
     'create a networkx graph of expected connectivity, given the tree of a .omd file and a MST'
     plt.close('all')
     outGraph = nx.Graph()
     for key in tree:
         item = tree[key]
         if 'name' in item.keys():
             obType = item.get('object')
             reclDevices = dict.fromkeys(['recloser'], False)
             if (obType in reclDevices.keys()
                     and 'addedRecloser' in item.get('name', '')):
                 # HACK: set the recloser as a swingNode in order to make it hot pink
                 outGraph.add_edge(item['from'],
                                   item['to'],
                                   attr_dict={'type': 'swingNode'})
             elif (obType in reclDevices.keys()
                   and 'addedRecloser' not in item.get('name', '')):
                 outGraph.add_edge(item['from'], item['to'])
             elif 'parent' in item.keys() and obType not in reclDevices:
                 outGraph.add_edge(item['name'],
                                   item['parent'],
                                   attr_dict={
                                       'type': 'parentChild',
                                       'phases': 1
                                   })
                 outGraph.nodes[item['name']]['type'] = item['object']
                 # Note that attached houses via gridEdit.html won't have lat/lon values, so this try is a workaround.
                 try:
                     outGraph.nodes[item['name']]['pos'] = (float(
                         item.get('latitude',
                                  0)), float(item.get('longitude', 0)))
                 except:
                     outGraph.nodes[item['name']]['pos'] = (0.0, 0.0)
             elif 'from' in item.keys():
                 myPhase = feeder._phaseCount(item.get('phases', 'AN'))
                 # outGraph.add_edge(item['from'],item['to'],attr_dict={'name':item.get('name',''),'type':item['object'],'phases':myPhase})
             elif item['name'] in outGraph:
                 # Edge already led to node's addition, so just set the attributes:
                 outGraph.nodes[item['name']]['type'] = item['object']
             else:
                 outGraph.add_node(item['name'],
                                   attr_dict={'type': item['object']})
             if 'latitude' in item.keys() and 'longitude' in item.keys():
                 try:
                     outGraph.nodes.get(item['name'], {})['pos'] = (float(
                         item['latitude']), float(item['longitude']))
                 except:
                     outGraph.nodes.get(item['name'],
                                        {})['pos'] = (0.0, 0.0)
     # populate the graph with edges
     size = len(referenceMST)
     row = 0
     while row < size:
         column = 0
         while column < size:
             if referenceMST[row][column] == 1:
                 if useDist == 'True':
                     outGraph.add_edge(str(nodes.loc[row, 'node_name']),
                                       str(nodes.loc[column, 'node_name']))
                 else:
                     outGraph.add_edge(str(volt.loc[row, 'node_name']),
                                       str(volt.loc[column, 'node_name']))
             column += 1
         row += 1
     size = len(mst)
     row = 0
     while row < size:
         column = 0
         while column < size:
             if mst[row][column] == 1:
                 if useDist == 'True':
                     outGraph.add_edge(str(nodes.loc[row, 'node_name']),
                                       str(nodes.loc[column, 'node_name']),
                                       attr_dict={'type': 'load'})
                 else:
                     outGraph.add_edge(str(volt.loc[row, 'node_name']),
                                       str(volt.loc[column, 'node_name']),
                                       attr_dict={'type': 'load'})
             column += 1
         row += 1
     feeder.latLonNxGraph(outGraph,
                          labels=True,
                          neatoLayout=True,
                          showPlot=False)
     plt.savefig(workDir + graphname)
예제 #7
0
def valueOfAdditionalRecloser(pathToGlm, workDir, lineFaultType, lineNameForRecloser, failureDistribution, failure_1, failure_2, restorationDistribution, rest_1, rest_2, maxOutageLength, kwh_cost, restoration_cost, average_hardware_cost, simTime, faultType, sustainedOutageThreshold):
	'analyzes the value of adding an additional recloser to a feeder system'
	
	# perform analyses on the glm
	numberOfCustomers, mc1, mc2, tree1, test1, test2 = recloserAnalysis(pathToGlm, workDir, lineFaultType, lineNameForRecloser, failureDistribution, failure_1, failure_2, restorationDistribution, rest_1, rest_2, maxOutageLength, simTime, faultType, sustainedOutageThreshold)

	# check to see if work directory is specified
	if not workDir:
		workDir = tempfile.mkdtemp()
		print '@@@@@@', workDir

	# Find SAIDI/SAIFI/MAIFI manually from Metrics_Output
	manualNoReclSAIDI, manualNoReclSAIFI, manualNoReclMAIFI = manualOutageStats(numberOfCustomers, mc1, sustainedOutageThreshold)
	manualReclSAIDI, manualReclSAIFI, manualReclMAIFI = manualOutageStats(numberOfCustomers, mc2, sustainedOutageThreshold)

	# calculate average consumption over the feeder system given meter data
	numberOfMeters = 0
	sumOfVoltages = 0.0
	for key in tree1:
		if tree1[key].get('object','') in ['meter', 'triplex_meter']:
			numberOfMeters += 1
			sumOfVoltages += float(tree1[key]['nominal_voltage'])
	average_consumption = sumOfVoltages/numberOfMeters

	# Calculate initial and final outage costs
	# calculate customer costs
	initialCustomerCost = int(test1.get('noRecl-SAIDI')*numberOfCustomers*float(average_consumption))
	finalCustomerCost = int(test1.get('recl-SAIDI')*numberOfCustomers*float(average_consumption))

	# calculate restoration costs
	initialDuration = 0.0
	finalDuration = 0.0
	row = 0
	row_count_mc1 = mc1.shape[0]
	row_count_mc2 = mc2.shape[0]
	while row < row_count_mc1:
		initialDuration +=  datetime_to_float(datetime.datetime.strptime(mc1.loc[row, 'Finish'], '%Y-%m-%d %H:%M:%S')) - datetime_to_float(datetime.datetime.strptime(mc1.loc[row, 'Start'], '%Y-%m-%d %H:%M:%S'))
		row = row + 1
	row = 0
	while row < row_count_mc2:
		finalDuration +=  datetime_to_float(datetime.datetime.strptime(mc2.loc[row, 'Finish'], '%Y-%m-%d %H:%M:%S')) - datetime_to_float(datetime.datetime.strptime(mc2.loc[row, 'Start'], '%Y-%m-%d %H:%M:%S'))
		row = row + 1

	initialRestorationCost = int(initialDuration*float(restoration_cost))
	finalRestorationCost = int(finalDuration*float(restoration_cost))

	# calculate hardware costs
	initialHardwareCost = int(row_count_mc1 * float(average_hardware_cost))
	finalHardwareCost = int(row_count_mc2 * float(average_hardware_cost))

	# put it all together and calculate outage costs
	initialOutageCost = initialCustomerCost + initialRestorationCost + initialHardwareCost
	finalOutageCost = finalCustomerCost + finalRestorationCost + finalHardwareCost

	def costStatsCalc(initCustCost=None, finCustCost=None, initRestCost=None, finRestCost=None, initHardCost=None, finHardCost=None, initOutCost=None, finOutCost=None):
		new_html_str = """
			<table cellpadding="0" cellspacing="0">
				<thead>
					<tr>
						<th></th>
						<th>No-Recloser</th>
						<th>Recloser</th>
					</tr>
				</thead>
				<tbody>"""
		new_html_str += "<tr><td><b>Lost kWh Sales</b></td><td>"+str(initCustCost)+"</td><td>"+str(finCustCost)+"</td></tr>"
		new_html_str += "<tr><td><b>Restoration Labor Cost</b></td><td>"+str(initRestCost)+"</td><td>"+str(finRestCost)+"</td></tr>"
		new_html_str += "<tr><td><b>Restoration Hardware Cost</b></td><td>"+str(initHardCost)+"</td><td>"+str(finHardCost)+"</td></tr>"
		new_html_str += "<tr><td><b>Outage Cost</b></td><td>"+str(initOutCost)+"</td><td>"+str(finOutCost)+"</td></tr>"
		new_html_str +="""</tbody></table>"""

		return new_html_str


	# print all intermediate and final costs
	costStatsHtml = costStatsCalc(
		initCustCost = initialCustomerCost,
		finCustCost = finalCustomerCost,
		initRestCost = initialRestorationCost,
		finRestCost = finalRestorationCost,
		initHardCost = initialHardwareCost,
		finHardCost = finalHardwareCost,
		initOutCost = initialOutageCost,
		finOutCost = finalOutageCost)
	with open(pJoin(workDir, "costStatsCalc.html"), "w") as costFile:
		costFile.write(costStatsHtml)

	# bar chart to show change in SAIDI/SAIFI values
	row1 = sorted(test1)
	col1 = [value for (key, value) in sorted(test1.items())]
	dataSaidi = go.Bar(x = row1, y = col1, name = 'SAIDI SAIFI Recloser Analysis')

	# bar chart to show change in MAIFI values
	row2 = sorted(test2)
	col2 = [value for (key, value) in sorted(test2.items())]
	dataMaifi = go.Bar(x = row2, y = col2, name = 'MAIFI Recloser Analysis')

	fig1 = make_subplots(rows=1, cols=2)

	fig1.add_trace(dataSaidi, row=1, col=1)
	fig1.add_trace(dataMaifi, row=1, col=2)
	fig1.layout.update(showlegend=False)

	# stacked bar chart to show outage timeline without the recloser
	row = 0
	date = [[] for _ in range(365)]
	row_count_mc1 = mc1.shape[0]
	while row < row_count_mc1:
		dt = datetime.datetime.strptime(mc1.loc[row, 'Finish'], '%Y-%m-%d %H:%M:%S')
		day = int(dt.strftime('%j')) - 1
		date[day].append(datetime_to_float(datetime.datetime.strptime(mc1.loc[row, 'Finish'], '%Y-%m-%d %H:%M:%S')) - datetime_to_float(datetime.datetime.strptime(mc1.loc[row, 'Start'], '%Y-%m-%d %H:%M:%S')))
		row += 1
	# convert array of durations into jagged numpy object
	jaggedData = np.array(date)
	# get lengths of each row of data
	lens = np.array([len(i) for i in jaggedData])
	# mask of valid places in each row to fill with zeros
	mask = np.arange(lens.max()) < lens[:,None]
	# setup output array and put elements from jaggedData into masked positions
	data = np.zeros(mask.shape, dtype=jaggedData.dtype)
	data[mask] = np.concatenate(jaggedData)
	numCols = data.shape[1]
	graphData = []
	currCol = 0
	while currCol < numCols:
		graphData.append(go.Bar(name='Fault ' + str(currCol+1), x = list(range(365)), y = data[:,currCol]))
		currCol += 1
	fig3 = go.Figure(data = graphData)
	fig3.layout.update(
		barmode='stack',
		showlegend=False,
		xaxis=go.layout.XAxis(
			title=go.layout.xaxis.Title(text='Day of the year')
		),
		yaxis=go.layout.YAxis(
			title=go.layout.yaxis.Title(text='Outage time (seconds)')
		)
	)
	# stacked bar chart to show outage timeline with recloser
	row = 0
	date = [[] for _ in range(365)]
	row_count_mc2 = mc2.shape[0]
	while row < row_count_mc2:
		dt = datetime.datetime.strptime(mc2.loc[row, 'Finish'], '%Y-%m-%d %H:%M:%S')
		day = int(dt.strftime('%j')) - 1
		date[day].append(datetime_to_float(datetime.datetime.strptime(mc2.loc[row, 'Finish'], '%Y-%m-%d %H:%M:%S')) - datetime_to_float(datetime.datetime.strptime(mc2.loc[row, 'Start'], '%Y-%m-%d %H:%M:%S')))
		row += 1
	# convert array of durations into jagged numpy object
	jaggedData = np.array(date)
	# get lengths of each row of data
	lens = np.array([len(i) for i in jaggedData])
	# mask of valid places in each row to fill with zeros
	mask = np.arange(lens.max()) < lens[:,None]
	# setup output array and put elements from jaggedData into masked positions
	data = np.zeros(mask.shape, dtype=jaggedData.dtype)
	data[mask] = np.concatenate(jaggedData)
	numCols = data.shape[1]
	graphData = []
	currCol = 0
	while currCol < numCols:
		graphData.append(go.Bar(name='Fault ' + str(currCol+1), x = list(range(365)), y = data[:,currCol]))
		currCol += 1
	fig4 = go.Figure(data = graphData)
	fig4.layout.update(barmode='stack', showlegend=False, xaxis=go.layout.XAxis(title=go.layout.xaxis.Title(text='Day of the year')), yaxis=go.layout.YAxis(title=go.layout.yaxis.Title(text='Outage time (seconds)')))

	# graph distribution data
	fig2 = make_subplots(rows=1, cols=2, shared_yaxes=True, subplot_titles=('Failure Distribution', 'Restoration Distribution'))
	
	# graph failure distribution	
	dataFail = distributiongraph(failureDistribution, failure_1, failure_2, 'Failure Distribution')
	fig2.add_trace(dataFail, row=1, col=1)
	# graph restoration distribution
	dataRest = distributiongraph(restorationDistribution, rest_1, rest_2, 'Restoration Distribution')
	fig2.add_trace(dataRest,row=1, col=2)
	fig2['layout']['xaxis1'].update(title='Time to failure (seconds)')
	fig2['layout']['xaxis2'].update(title='Time to restoration (seconds)')
	fig2['layout']['yaxis1'].update(title='Probability distribution function')
	fig2.layout.update(showlegend=False)
	# feeder chart with recloser
	outGraph = nx.Graph()
	for key in tree1:
		item = tree1[key]
		if 'name' in item.keys():
			obType = item.get('object')
			reclDevices = dict.fromkeys(['recloser'], False)
			if (obType in reclDevices.keys() and 'addedRecloser' in item.get('name', '')):
				# HACK: set the recloser as a swingNode in order to make it hot pink
				outGraph.add_edge(item['from'],item['to'], attr_dict={'type':'swingNode'})
			elif (obType in reclDevices.keys() and 'addedRecloser' not in item.get('name','')):
				outGraph.add_edge(item['from'],item['to'])
			elif 'parent' in item.keys() and obType not in reclDevices:
				outGraph.add_edge(item['name'],item['parent'], attr_dict={'type':'parentChild','phases':1})
				outGraph.node[item['name']]['type']=item['object']
				# Note that attached houses via gridEdit.html won't have lat/lon values, so this try is a workaround.
				try: outGraph.node[item['name']]['pos']=(float(item.get('latitude',0)),float(item.get('longitude',0)))
				except: outGraph.node[item['name']]['pos']=(0.0,0.0)
			elif 'from' in item.keys():
				myPhase = feeder._phaseCount(item.get('phases','AN'))
				outGraph.add_edge(item['from'],item['to'],attr_dict={'name':item.get('name',''),'type':item['object'],'phases':myPhase})
			elif item['name'] in outGraph:
				# Edge already led to node's addition, so just set the attributes:
				outGraph.node[item['name']]['type']=item['object']
			else:
				outGraph.add_node(item['name'],attr_dict={'type':item['object']})
			if 'latitude' in item.keys() and 'longitude' in item.keys():
				try: outGraph.node.get(item['name'],{})['pos']=(float(item['latitude']),float(item['longitude']))
				except: outGraph.node.get(item['name'],{})['pos']=(0.0,0.0)
	feeder.latLonNxGraph(outGraph, labels=True, neatoLayout=True, showPlot=True)
	plt.savefig(workDir + '/feeder_chart')
	return {'costStatsHtml': costStatsHtml, 'fig1': fig1, 'fig2': fig2, 'fig3': fig3, 'fig4': fig4}
예제 #8
0
파일: cvrStatic.py 프로젝트: dpinney/omf
def work(modelDir, inputDict):
	''' Run the model in the foreground. WARNING: can take about a minute. '''
	# Global vars, and load data from the model directory.
	feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4]
	inputDict["feederName1"] = feederName
	feederPath = pJoin(modelDir,feederName+'.omd')
	feederJson = json.load(open(feederPath))
	tree = feederJson.get("tree",{})
	attachments = feederJson.get("attachments",{})
	outData = {}
	''' Run CVR analysis. '''
	# Reformate monthData and rates.
	rates = {k:float(inputDict[k]) for k in ['capitalCost', 'omCost', 'wholesaleEnergyCostPerKwh',
		'retailEnergyCostPerKwh', 'peakDemandCostSpringPerKw', 'peakDemandCostSummerPerKw',
		'peakDemandCostFallPerKw', 'peakDemandCostWinterPerKw']}
	monthNames = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August',
		'September', 'October', 'November', 'December']
	monthToSeason = {'January':'Winter','February':'Winter','March':'Spring','April':'Spring',
		'May':'Spring','June':'Summer','July':'Summer','August':'Summer',
		'September':'Fall','October':'Fall','November':'Fall','December':'Winter'}
	monthData = []
	for i, x in enumerate(monthNames):
		monShort = x[0:3].lower()
		season = monthToSeason[x]
		histAvg = float(inputDict.get(monShort + "Avg", 0))
		histPeak = float(inputDict.get(monShort + "Peak", 0))
		monthData.append({"monthId":i, "monthName":x, "histAverage":histAvg,
			"histPeak":histPeak, "season":season})
	# Graph the SCADA data.
	fig = plt.figure(figsize=(10,6))
	indices = [r['monthName'] for r in monthData]
	d1 = [r['histPeak']/(10**3) for r in monthData]
	d2 = [r['histAverage']/(10**3) for r in monthData]
	ticks = range(len(d1))
	bar_peak = plt.bar(ticks,d1,color='gray')
	bar_avg = plt.bar(ticks,d2,color='dimgray')
	plt.legend([bar_peak[0],bar_avg[0]],['histPeak','histAverage'],bbox_to_anchor=(0., 1.015, 1., .102), loc=3,
	   ncol=2, mode="expand", borderaxespad=0.1)
	plt.xticks([t+0.5 for t in ticks],indices)
	plt.ylabel('Mean and peak historical power consumptions (kW)')
	fig.autofmt_xdate()
	plt.savefig(pJoin(modelDir,"scadaChart.png"))
	outData["histPeak"] = d1
	outData["histAverage"] = d2
	outData["monthName"] = [name[0:3] for name in monthNames]
	# Graph feeder.
	fig = plt.figure(figsize=(10,10))
	myGraph = feeder.treeToNxGraph(tree)
	feeder.latLonNxGraph(myGraph, neatoLayout=False)
	plt.savefig(pJoin(modelDir,"feederChart.png"))
	with open(pJoin(modelDir,"feederChart.png"),"rb") as inFile:
		outData["feederChart"] = inFile.read().encode("base64")
	# Get the load levels we need to test.
	allLoadLevels = [x.get('histPeak',0) for x in monthData] + [y.get('histAverage',0) for y in monthData]
	maxLev = _roundOne(max(allLoadLevels),'up')
	minLev = _roundOne(min(allLoadLevels),'down')
	tenLoadLevels = range(int(minLev),int(maxLev),int((maxLev-minLev)/10))
	# Gather variables from the feeder.
	for key in tree.keys():
		# Set clock to single timestep.
		if tree[key].get('clock','') == 'clock':
			tree[key] = {"timezone":"PST+8PDT",
				"stoptime":"'2013-01-01 00:00:00'",
				"starttime":"'2013-01-01 00:00:00'",
				"clock":"clock"}
		# Save swing node index.
		if tree[key].get('bustype','').lower() == 'swing':
			swingIndex = key
			swingName = tree[key].get('name')
		# Remove all includes.
		if tree[key].get('omftype','') == '#include':
			del key
	# Find the substation regulator and config.
	for key in tree:
		if tree[key].get('object','') == 'regulator' and tree[key].get('from','') == swingName:
			regIndex = key
			regConfName = tree[key]['configuration']
	if not regConfName: regConfName = False
	for key in tree:
		if tree[key].get('name','') == regConfName:
			regConfIndex = key
	# Set substation regulator to manual operation.
	baselineTap = int(inputDict.get("baselineTap")) # GLOBAL VARIABLE FOR DEFAULT TAP POSITION
	tree[regConfIndex] = {
		'name':tree[regConfIndex]['name'],
		'object':'regulator_configuration',
		'connect_type':'1',
		'raise_taps':'10',
		'lower_taps':'10',
		'CT_phase':'ABC',
		'PT_phase':'ABC',
		'regulation':'0.10', #Yo, 0.10 means at tap_pos 10 we're 10% above 120V.
		'Control':'MANUAL',
		'control_level':'INDIVIDUAL',
		'Type':'A',
		'tap_pos_A':str(baselineTap),
		'tap_pos_B':str(baselineTap),
		'tap_pos_C':str(baselineTap) }
	# Attach recorders relevant to CVR.
	recorders = [
		{'object': 'collector',
		'file': 'ZlossesTransformer.csv',
		'group': 'class=transformer',
		'limit': '0',
		'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'},
		{'object': 'collector',
		'file': 'ZlossesUnderground.csv',
		'group': 'class=underground_line',
		'limit': '0',
		'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'},
		{'object': 'collector',
		'file': 'ZlossesOverhead.csv',
		'group': 'class=overhead_line',
		'limit': '0',
		'property': 'sum(power_losses_A.real),sum(power_losses_A.imag),sum(power_losses_B.real),sum(power_losses_B.imag),sum(power_losses_C.real),sum(power_losses_C.imag)'},
		{'object': 'recorder',
		'file': 'Zregulator.csv',
		'limit': '0',
		'parent': tree[regIndex]['name'],
		'property': 'tap_A,tap_B,tap_C,power_in.real,power_in.imag'},
		{'object': 'collector',
		'file': 'ZvoltageJiggle.csv',
		'group': 'class=triplex_meter',
		'limit': '0',
		'property': 'min(voltage_12.mag),mean(voltage_12.mag),max(voltage_12.mag),std(voltage_12.mag)'},
		{'object': 'recorder',
		'file': 'ZsubstationTop.csv',
		'limit': '0',
		'parent': tree[swingIndex]['name'],
		'property': 'voltage_A,voltage_B,voltage_C'},
		{'object': 'recorder',
		'file': 'ZsubstationBottom.csv',
		'limit': '0',
		'parent': tree[regIndex]['to'],
		'property': 'voltage_A,voltage_B,voltage_C'} ]
	biggest = 1 + max([int(k) for k in tree.keys()])
	for index, rec in enumerate(recorders):
		tree[biggest + index] = rec
	# Change constant PF loads to ZIP loads. (See evernote for rationale about 50/50 power/impedance mix.)
	blankZipModel = {'object':'triplex_load',
		'name':'NAMEVARIABLE',
		'base_power_12':'POWERVARIABLE',
		'power_fraction_12': str(inputDict.get("p_percent")),
		'impedance_fraction_12': str(inputDict.get("z_percent")),
		'current_fraction_12': str(inputDict.get("i_percent")),
		'power_pf_12': str(inputDict.get("power_factor")), #MAYBEFIX: we can probably get this PF data from the Milsoft loads.
		'impedance_pf_12':str(inputDict.get("power_factor")),
		'current_pf_12':str(inputDict.get("power_factor")),
		'nominal_voltage':'120',
		'phases':'PHASESVARIABLE',
		'parent':'PARENTVARIABLE' }
	def powerClean(powerStr):
		''' take 3339.39+1052.29j to 3339.39 '''
		return powerStr[0:powerStr.find('+')]
	for key in tree:
		if tree[key].get('object','') == 'triplex_node':
			# Get existing variables.
			name = tree[key].get('name','')
			power = tree[key].get('power_12','')
			parent = tree[key].get('parent','')
			phases = tree[key].get('phases','')
			# Replace object and reintroduce variables.
			tree[key] = copy(blankZipModel)
			tree[key]['name'] = name
			tree[key]['base_power_12'] = powerClean(power)
			tree[key]['parent'] = parent
			tree[key]['phases'] = phases
	# Function to determine how low we can tap down in the CVR case:
	def loweringPotential(baseLine):
		''' Given a baseline end of line voltage, how many more percent can we shave off the substation voltage? '''
		''' testsWePass = [122.0,118.0,200.0,110.0] '''
		lower = int(math.floor((baseLine/114.0-1)*100)) - 1
		# If lower is negative, we can't return it because we'd be undervolting beyond what baseline already was!
		if lower < 0:
			return baselineTap
		else:
			return baselineTap - lower
	# Run all the powerflows.
	powerflows = []
	for doingCvr in [False, True]:
		# For each load level in the tenLoadLevels, run a powerflow with the load objects scaled to the level.
		for desiredLoad in tenLoadLevels:
			# Find the total load that was defined in Milsoft:
			loadList = []
			for key in tree:
				if tree[key].get('object','') == 'triplex_load':
					loadList.append(tree[key].get('base_power_12',''))
			totalLoad = sum([float(x) for x in loadList])
			# Rescale each triplex load:
			for key in tree:
				if tree[key].get('object','') == 'triplex_load':
					currentPow = float(tree[key]['base_power_12'])
					ratio = desiredLoad/totalLoad
					tree[key]['base_power_12'] = str(currentPow*ratio)
			# If we're doing CVR then lower the voltage.
			if doingCvr:
				# Find the minimum voltage we can tap down to:
				newTapPos = baselineTap
				for row in powerflows:
					if row.get('loadLevel','') == desiredLoad:
						newTapPos = loweringPotential(row.get('lowVoltage',114))
				# Tap it down to there.
				# MAYBEFIX: do each phase separately because that's how it's done in the field... Oof.
				tree[regConfIndex]['tap_pos_A'] = str(newTapPos)
				tree[regConfIndex]['tap_pos_B'] = str(newTapPos)
				tree[regConfIndex]['tap_pos_C'] = str(newTapPos)
			# Run the model through gridlab and put outputs in the table.
			output = gridlabd.runInFilesystem(tree, attachments=attachments,
				keepFiles=True, workDir=modelDir)
			os.remove(pJoin(modelDir,"PID.txt"))
			p = output['Zregulator.csv']['power_in.real'][0]
			q = output['Zregulator.csv']['power_in.imag'][0]
			s = math.sqrt(p**2+q**2)
			lossTotal = 0.0
			for device in ['ZlossesOverhead.csv','ZlossesTransformer.csv','ZlossesUnderground.csv']:
				for letter in ['A','B','C']:
					r = output[device]['sum(power_losses_' + letter + '.real)'][0]
					i = output[device]['sum(power_losses_' + letter + '.imag)'][0]
					lossTotal += math.sqrt(r**2 + i**2)
			## Entire output:
			powerflows.append({
				'doingCvr':doingCvr,
				'loadLevel':desiredLoad,
				'realPower':p,
				'powerFactor':p/s,
				'losses':lossTotal,
				'subVoltage': (
					output['ZsubstationBottom.csv']['voltage_A'][0] +
					output['ZsubstationBottom.csv']['voltage_B'][0] +
					output['ZsubstationBottom.csv']['voltage_C'][0] )/3/60,
				'lowVoltage':output['ZvoltageJiggle.csv']['min(voltage_12.mag)'][0]/2,
				'highVoltage':output['ZvoltageJiggle.csv']['max(voltage_12.mag)'][0]/2 })
	# For a given load level, find two points to interpolate on.
	def getInterpPoints(t):
		''' Find the two points we can interpolate from. '''
		''' tests pass on [tenLoadLevels[0],tenLoadLevels[5]+499,tenLoadLevels[-1]-988] '''
		loc = sorted(tenLoadLevels + [t]).index(t)
		if loc==0:
			return (tenLoadLevels[0],tenLoadLevels[1])
		elif loc>len(tenLoadLevels)-2:
			return (tenLoadLevels[-2],tenLoadLevels[-1])
		else:
			return (tenLoadLevels[loc-1],tenLoadLevels[loc+1])
	# Calculate peak reduction.
	for row in monthData:
		peak = row['histPeak']
		peakPoints = getInterpPoints(peak)
		peakTopBase = [x for x in powerflows if x.get('loadLevel','') == peakPoints[-1] and x.get('doingCvr','') == False][0]
		peakTopCvr = [x for x in powerflows if x.get('loadLevel','') == peakPoints[-1] and x.get('doingCvr','') == True][0]
		peakBottomBase = [x for x in powerflows if x.get('loadLevel','') == peakPoints[0] and x.get('doingCvr','') == False][0]
		peakBottomCvr = [x for x in powerflows if x.get('loadLevel','') == peakPoints[0] and x.get('doingCvr','') == True][0]
		# Linear interpolation so we aren't running umpteen million loadflows.
		x = (peakPoints[0],peakPoints[1])
		y = (peakTopBase['realPower'] - peakTopCvr['realPower'],
			 peakBottomBase['realPower'] - peakBottomCvr['realPower'])
		peakRed = y[0] + (y[1] - y[0]) * (peak - x[0]) / (x[1] - x[0])
		row['peakReduction'] = peakRed
	# Calculate energy reduction and loss reduction based on average load.
	for row in monthData:
		avgEnergy = row['histAverage']
		energyPoints = getInterpPoints(avgEnergy)
		avgTopBase = [x for x in powerflows if x.get('loadLevel','') == energyPoints[-1] and x.get('doingCvr','') == False][0]
		avgTopCvr = [x for x in powerflows if x.get('loadLevel','') == energyPoints[-1] and x.get('doingCvr','') == True][0]
		avgBottomBase = [x for x in powerflows if x.get('loadLevel','') == energyPoints[0] and x.get('doingCvr','') == False][0]
		avgBottomCvr = [x for x in powerflows if x.get('loadLevel','') == energyPoints[0] and x.get('doingCvr','') == True][0]
		# Linear interpolation so we aren't running umpteen million loadflows.
		x = (energyPoints[0], energyPoints[1])
		y = (avgTopBase['realPower'] - avgTopCvr['realPower'],
			avgBottomBase['realPower'] - avgBottomCvr['realPower'])
		energyRed = y[0] + (y[1] - y[0]) * (avgEnergy - x[0]) / (x[1] - x[0])
		row['energyReduction'] = energyRed
		lossY = (avgTopBase['losses'] - avgTopCvr['losses'],
			avgBottomBase['losses'] - avgBottomCvr['losses'])
		lossRed = lossY[0] + (lossY[1] - lossY[0]) * (avgEnergy - x[0]) / (x[1] - x[0])
		row['lossReduction'] = lossRed
	# Multiply by dollars.
	for row in monthData:
		row['energyReductionDollars'] = row['energyReduction']/1000 * (rates['wholesaleEnergyCostPerKwh'] - rates['retailEnergyCostPerKwh'])
		row['peakReductionDollars'] = row['peakReduction']/1000 * rates['peakDemandCost' + row['season'] + 'PerKw']
		row['lossReductionDollars'] = row['lossReduction']/1000 * rates['wholesaleEnergyCostPerKwh']
	# Pretty output
	def plotTable(inData):
		fig = plt.figure(figsize=(10,5))
		plt.axis('off')
		plt.tight_layout()
		plt.table(cellText=[row for row in inData[1:]],
			loc = 'center',
			rowLabels = range(len(inData)-1),
			colLabels = inData[0])
	def dictalToMatrix(dictList):
		''' Take our dictal format to a matrix. '''
		matrix = [dictList[0].keys()]
		for row in dictList:
			matrix.append(row.values())
		return matrix
	# Powerflow results.
	plotTable(dictalToMatrix(powerflows))
	plt.savefig(pJoin(modelDir,"powerflowTable.png"))
	# Monetary results.
	## To print partial money table
	monthDataMat = dictalToMatrix(monthData)
	dimX = len(monthDataMat)
	dimY = len(monthDataMat[0])
	monthDataPart = []
	for k in range (0,dimX):
		monthDatatemp = []
		for m in range (4,dimY):
			monthDatatemp.append(monthDataMat[k][m])
		monthDataPart.append(monthDatatemp)
	plotTable(monthDataPart)
	plt.savefig(pJoin(modelDir,"moneyTable.png"))
	outData["monthDataMat"] = dictalToMatrix(monthData)
	outData["monthDataPart"] = monthDataPart
	# Graph the money data.
	fig = plt.figure(figsize=(10,8))
	indices = [r['monthName'] for r in monthData]
	d1 = [r['energyReductionDollars'] for r in monthData]
	d2 = [r['lossReductionDollars'] for r in monthData]
	d3 = [r['peakReductionDollars'] for r in monthData]
	ticks = range(len(d1))
	bar_erd = plt.bar(ticks,d1,color='red')
	bar_lrd = plt.bar(ticks,d2,color='green')
	bar_prd = plt.bar(ticks,d3,color='blue',yerr=d2)
	plt.legend([bar_prd[0], bar_lrd[0], bar_erd[0]], ['peakReductionDollars','lossReductionDollars','energyReductionDollars'],bbox_to_anchor=(0., 1.015, 1., .102), loc=3,
	   ncol=2, mode="expand", borderaxespad=0.1)
	plt.xticks([t+0.5 for t in ticks],indices)
	plt.ylabel('Utility Savings ($)')
	plt.tight_layout(5.5,1.3,1.2)
	fig.autofmt_xdate()
	plt.savefig(pJoin(modelDir,"spendChart.png"))
	outData["energyReductionDollars"] = d1
	outData["lossReductionDollars"] = d2
	outData["peakReductionDollars"] = d3
	# Graph the cumulative savings.
	fig = plt.figure(figsize=(10,5))
	annualSavings = sum(d1) + sum(d2) + sum(d3)
	annualSave = lambda x:(annualSavings - rates['omCost']) * x - rates['capitalCost']
	simplePayback = rates['capitalCost']/(annualSavings - rates['omCost'])
	plt.xlabel('Year After Installation')
	plt.xlim(0,30)
	plt.ylabel('Cumulative Savings ($)')
	plt.plot([0 for x in range(31)],c='gray')
	plt.axvline(x=simplePayback, ymin=0, ymax=1, c='gray', linestyle='--')
	plt.plot([annualSave(x) for x in range(31)], c='green')
	plt.savefig(pJoin(modelDir,"savingsChart.png"))
	outData["annualSave"] = [annualSave(x) for x in range(31)]
	# For autotest, there won't be such file.
	return outData