Esempio n. 1
0
def work(modelDir, inputDict):
	''' Run the model in its directory. '''
	# Copy spcific climate data into model directory
	inputDict["climateName"] = weather.zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, b'file_name', bytes(modelDir + '/climate.tmy2', 'ascii'))
	ssc.ssc_data_set_number(dat, b'system_size', float(inputDict['SystemSize']))
	# SAM options where we take defaults.
	ssc.ssc_data_set_number(dat, b'derate', 0.97)
	ssc.ssc_data_set_number(dat, b'track_mode', 0)
	ssc.ssc_data_set_number(dat, b'azimuth', 180)
	ssc.ssc_data_set_number(dat, b'tilt_eq_lat', 1)
	# Run PV system simulation.
	mod = ssc.ssc_module_create(b'pvwattsv1')
	ssc.ssc_module_exec(mod, dat)
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
	startDateTime = "2013-01-01 00:00:00 UTC"
	# Timestamp output.
	outData = {}
	outData['timeStamps'] = [
		(datetime.datetime.strptime(startDateTime[0:19], '%Y-%m-%d %H:%M:%S') + datetime.timedelta(**{'hours':x})).strftime('%Y-%m-%d %H:%M:%S') + ' UTC'
		for x in range(8760)
	]
	# HACK: makes it easier to calculate some things later.
	outData["pythonTimeStamps"] = [datetime.datetime(2012,1,1,0) + x * datetime.timedelta(hours=1) for x in range(8760)]

	# Geodata output.
	outData['city'] = ssc.ssc_data_get_string(dat, b'city').decode()
	outData['state'] = ssc.ssc_data_get_string(dat, b'state').decode()
	outData['lat'] = ssc.ssc_data_get_number(dat, b'lat')
	outData['lon'] = ssc.ssc_data_get_number(dat, b'lon')
	outData['elev'] = ssc.ssc_data_get_number(dat, b'elev')
	# Weather output.
	outData["climate"] = {}
	outData['climate']['Global Horizontal Radiation (W/m^2)'] = ssc.ssc_data_get_array(dat, b'gh')
	outData['climate']['Plane of Array Irradiance (W/m^2)'] = ssc.ssc_data_get_array(dat, b'poa')
	outData['climate']['Ambient Temperature (F)'] = ssc.ssc_data_get_array(dat, b'tamb')
	outData['climate']['Cell Temperature (F)'] = ssc.ssc_data_get_array(dat, b'tcell')
	outData['climate']['Wind Speed (m/s)'] = ssc.ssc_data_get_array(dat, b'wspd')
	# Power generation.
	outData['powerOutputAc'] = ssc.ssc_data_get_array(dat, b'ac')

	# TODO: INSERT TJ CODE BELOW
	tjCode(inputDict, outData)
	del outData["pythonTimeStamps"]
	# TODO: INSERT TJ CODE ABOVE

	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData
Esempio n. 2
0
def work(modelDir, inputDict):
	''' Run the model in its directory. '''
	# Copy spcific climate data into model directory
	inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
	ssc.ssc_data_set_number(dat, "system_size", float(inputDict["SystemSize"]))
	# SAM options where we take defaults.
	ssc.ssc_data_set_number(dat, "derate", 0.97)
	ssc.ssc_data_set_number(dat, "track_mode", 0)
	ssc.ssc_data_set_number(dat, "azimuth", 180)
	ssc.ssc_data_set_number(dat, "tilt_eq_lat", 1)
	# Run PV system simulation.
	mod = ssc.ssc_module_create("pvwattsv1")
	ssc.ssc_module_exec(mod, dat)
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
	startDateTime = "2013-01-01 00:00:00 UTC"
	# Timestamp output.
	outData = {}
	outData["timeStamps"] = [dt.strftime(
		dt.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") +
		td(**{"hours":x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(8760))]
	# HACK: makes it easier to calculate some things later.
	outData["pythonTimeStamps"] = [dt(2012,1,1,0) + x*td(hours=1) for x in range(8760)]
	# Geodata output.
	outData["city"] = ssc.ssc_data_get_string(dat, "city")
	outData["state"] = ssc.ssc_data_get_string(dat, "state")
	outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
	outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
	outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
	# Weather output.
	outData["climate"] = {}
	outData["climate"]["Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(dat, "gh")
	outData["climate"]["Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(dat, "poa")
	outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(dat, "tamb")
	outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(dat, "tcell")
	outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(dat, "wspd")
	# Power generation.
	outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")

	# TODO: INSERT TJ CODE BELOW
	tjCode(inputDict, outData)
	del outData["pythonTimeStamps"]
	# TODO: INSERT TJ CODE ABOVE

	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData
Esempio n. 3
0
def work(modelDir, inputDict):
	''' Run the model in its directory. '''
	# Copy spcific climate data into model directory
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
	ssc.ssc_data_set_number(dat, "system_size", float(inputDict["SystemSize"]))
	# SAM options where we take defaults.
	ssc.ssc_data_set_number(dat, "derate", 0.97)
	ssc.ssc_data_set_number(dat, "track_mode", 0)
	ssc.ssc_data_set_number(dat, "azimuth", 180)
	ssc.ssc_data_set_number(dat, "tilt_eq_lat", 1)
	# Run PV system simulation.
	mod = ssc.ssc_module_create("pvwattsv1")
	ssc.ssc_module_exec(mod, dat)
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
	startDateTime = "2013-01-01 00:00:00 UTC"
	# Timestamp output.
	outData = {}
	outData["timeStamps"] = [dt.strftime(
		dt.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") +
		td(**{"hours":x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(8760))]
	# HACK: makes it easier to calculate some things later.
	outData["pythonTimeStamps"] = [dt(2012,1,1,0) + x*td(hours=1) for x in range(8760)]
	# Geodata output.
	outData["city"] = ssc.ssc_data_get_string(dat, "city")
	outData["state"] = ssc.ssc_data_get_string(dat, "state")
	outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
	outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
	outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
	# Weather output.
	outData["climate"] = {}
	outData["climate"]["Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(dat, "gh")
	outData["climate"]["Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(dat, "poa")
	outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(dat, "tamb")
	outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(dat, "tcell")
	outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(dat, "wspd")
	# Power generation.
	outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")

	# TODO: INSERT TJ CODE BELOW
	tjCode(inputDict, outData)
	del outData["pythonTimeStamps"]
	# TODO: INSERT TJ CODE ABOVE

	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData
Esempio n. 4
0
def work(modelDir, inputDict):
    ''' Run the model in its directory. '''
    outData = {}
    feederName = [x for x in os.listdir(modelDir)
                  if x.endswith('.omd')][0][:-4]
    inputDict["feederName1"] = feederName
    hazardPath = pJoin(modelDir, inputDict['weatherImpactsFileName'])
    with open(hazardPath, 'w') as hazardFile:
        hazardFile.write(inputDict['weatherImpacts'])
    with open(pJoin(modelDir, feederName + '.omd'), "r") as jsonIn:
        feederModel = json.load(jsonIn)
    # Create GFM input file.
    print "RUNNING GFM FOR", modelDir
    critLoads = inputDict['criticalLoads']
    gfmInputTemplate = {
        'phase_variation': float(inputDict['phaseVariation']),
        'chance_constraint': float(inputDict['chanceConstraint']),
        'critical_load_met': float(inputDict['criticalLoadMet']),
        'total_load_met': float(inputDict['nonCriticalLoadMet']),
        'maxDGPerGenerator': float(inputDict['maxDGPerGenerator']),
        'dgUnitCost': float(inputDict['dgUnitCost']),
        'generatorCandidates': inputDict['generatorCandidates'],
        'criticalLoads': inputDict['criticalLoads']
    }
    gfmJson = convertToGFM(gfmInputTemplate, feederModel)
    gfmInputFilename = 'gfmInput.json'
    with open(pJoin(modelDir, gfmInputFilename), 'w') as outFile:
        json.dump(gfmJson, outFile, indent=4)
    # Check for overlap between hazard field and GFM circuit input:
    hazard = HazardField(hazardPath)
    if circuitOutsideOfHazard(hazard, gfmJson):
        outData[
            'warning'] = 'Warning: the hazard field does not overlap with the circuit.'
    # Draw hazard field if needed.
    if inputDict['showHazardField'] == 'Yes':
        hazard.drawHeatMap(show=False)
        plt.title('')  #Hack: remove plot title.
    # Run GFM
    gfmBinaryPath = pJoin(__neoMetaModel__._omfDir, 'solvers', 'gfm',
                          'Fragility.jar')
    rdtInputName = 'rdtInput.json'
    if platform.system() == 'Darwin':
        #HACK: force use of Java8 on MacOS.
        javaCmd = '/Library/Java/JavaVirtualMachines/jdk1.8.0_181.jdk/Contents/Home/bin/java'
    else:
        javaCmd = 'java'
    proc = subprocess.Popen([
        javaCmd, '-jar', gfmBinaryPath, '-r', gfmInputFilename, '-wf',
        inputDict['weatherImpactsFileName'], '-num',
        inputDict['scenarioCount'], '-ro', rdtInputName
    ],
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            cwd=modelDir)
    (stdout, stderr) = proc.communicate()
    with open(pJoin(modelDir, "gfmConsoleOut.txt"), "w") as gfmConsoleOut:
        gfmConsoleOut.write(stdout)
    rdtInputFilePath = pJoin(modelDir, 'rdtInput.json')
    # Pull GFM input data on lines and generators for HTML presentation.
    with open(rdtInputFilePath, 'r') as rdtInputFile:
        # HACK: we use rdtInput as a string in the frontend.
        rdtJsonAsString = rdtInputFile.read()
        rdtJson = json.loads(rdtJsonAsString)
    rdtJson["power_flow"] = inputDict["power_flow"]
    rdtJson["solver_iteration_timeout"] = 300.0
    rdtJson["algorithm"] = "miqp"
    # Calculate line costs.
    lineData = {}
    for line in rdtJson["lines"]:
        lineData[line["id"]] = '{:,.2f}'.format(
            float(line["length"]) * float(inputDict["lineUnitCost"]))
    outData["lineData"] = lineData
    outData["generatorData"] = '{:,.2f}'.format(
        float(inputDict["dgUnitCost"]) * float(inputDict["maxDGPerGenerator"]))
    outData['gfmRawOut'] = rdtJsonAsString
    # Insert user-specified scenarios block into RDT input
    if inputDict['scenarios'] != "":
        rdtJson['scenarios'] = json.loads(inputDict['scenarios'])
        with open(pJoin(rdtInputFilePath), "w") as rdtInputFile:
            json.dump(rdtJson, rdtInputFile, indent=4)
    # Run GridLAB-D first time to generate xrMatrices.
    print "RUNNING 1ST GLD RUN FOR", modelDir
    omdPath = pJoin(modelDir, feederName + ".omd")
    with open(omdPath, "r") as omd:
        omd = json.load(omd)
    # Remove new line candidates to get normal system powerflow results.
    deleteList = []
    newLines = inputDict["newLineCandidates"].strip().replace(' ',
                                                              '').split(',')
    for newLine in newLines:
        for omdObj in omd["tree"]:
            if ("name" in omd["tree"][omdObj]):
                if (newLine == omd["tree"][omdObj]["name"]):
                    deleteList.append(omdObj)
    for delItem in deleteList:
        del omd["tree"][delItem]
    #Load a blank glm file and use it to write to it
    feederPath = pJoin(modelDir, 'feeder.glm')
    with open(feederPath, 'w') as glmFile:
        toWrite = omf.feeder.sortedWrite(
            omd['tree']
        ) + "object jsondump {\n\tfilename_dump_reliability JSON_dump_line.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n"
        glmFile.write(toWrite)
    #Write attachments from omd, if no file, one will be created
    for fileName in omd['attachments']:
        with open(os.path.join(modelDir, fileName), 'w') as file:
            file.write(omd['attachments'][fileName])
    #Wire in the file the user specifies via zipcode.
    climateFileName = zipCodeToClimateName(inputDict["simulationZipCode"])
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2'))
    # Platform specific binaries for GridLAB-D First Run.
    if platform.system() == "Linux":
        myEnv = os.environ.copy()
        myEnv['GLPATH'] = omf.omfDir + '/solvers/gridlabdv990/'
        commandString = omf.omfDir + '/solvers/gridlabdv990/gridlabd.bin feeder.glm'
    elif platform.system() == "Windows":
        myEnv = os.environ.copy()
        commandString = '"' + pJoin(omf.omfDir, "solvers", "gridlabdv990",
                                    "gridlabd.exe") + '"' + " feeder.glm"
    elif platform.system() == "Darwin":
        myEnv = os.environ.copy()
        myEnv['GLPATH'] = omf.omfDir + '/solvers/gridlabdv990/MacRC4p1_std8/'
        commandString = '"' + omf.omfDir + '/solvers/gridlabdv990/MacRC4p1_std8/gld.sh" feeder.glm'
    # Run GridLAB-D First Time.
    proc = subprocess.Popen(commandString,
                            stdout=subprocess.PIPE,
                            shell=True,
                            cwd=modelDir,
                            env=myEnv)
    (out, err) = proc.communicate()
    with open(pJoin(modelDir, "gldConsoleOut.txt"), "w") as gldConsoleOut:
        gldConsoleOut.write(out)
    with open(pJoin(modelDir, "JSON_dump_line.json"), "r") as gldOut:
        gld_json_line_dump = json.load(gldOut)
    outData['gridlabdRawOut'] = gld_json_line_dump
    # Add GridLAB-D line objects and line codes in to the RDT model.
    rdtJson["line_codes"] = gld_json_line_dump["properties"]["line_codes"]
    rdtJson["lines"] = gld_json_line_dump["properties"]["lines"]
    hardCands = list(
        set(gfmJson['lineLikeObjs']) - set(inputDict['hardeningCandidates']))
    newLineCands = inputDict['newLineCandidates'].strip().replace(
        ' ', '').split(',')
    switchCands = inputDict['switchCandidates'].strip().replace(' ',
                                                                '').split(',')
    for line in rdtJson["lines"]:
        line_id = line.get('id',
                           '')  # this is equal to name in the OMD objects.
        object_type = line.get('object', '')
        line['node1_id'] = line['node1_id'] + "_bus"
        line['node2_id'] = line['node2_id'] + "_bus"
        line_code = line["line_code"]
        # Getting ratings from OMD
        tree = omd['tree']
        nameToIndex = {tree[key].get('name', ''): key for key in tree}
        treeOb = tree[nameToIndex[line_id]]
        config_name = treeOb.get('configuration', '')
        config_ob = tree.get(nameToIndex[config_name], {})
        full_rating = 0
        for phase in ['A', 'B', 'C']:
            cond_name = config_ob.get('conductor_' + phase, '')
            cond_ob = tree.get(nameToIndex.get(cond_name, ''), '')
            rating = cond_ob.get('rating.summer.continuous', '')
            try:
                full_rating = int(rating)  #TODO: replace with avg of 3 phases.
            except:
                pass
        if full_rating != 0:
            line['capacity'] = full_rating
        else:
            line['capacity'] = 10000
        # Setting other line parameters.
        line['construction_cost'] = float(inputDict['lineUnitCost'])
        line['harden_cost'] = float(inputDict['hardeningUnitCost'])
        line['switch_cost'] = float(inputDict['switchCost'])
        if line_id in hardCands:
            line['can_harden'] = True
        if line_id in switchCands:
            line['can_add_switch'] = True
        if line_id in newLineCands:
            line['is_new'] = True
        if object_type in ['transformer', 'regulator']:
            line['is_transformer'] = True
        if object_type == 'switch':
            line['has_switch'] = True
    with open(rdtInputFilePath, "w") as outFile:
        json.dump(rdtJson, outFile, indent=4)
    # Run RDT.
    print "RUNNING RDT FOR", modelDir
    rdtOutFile = modelDir + '/rdtOutput.json'
    rdtSolverFolder = pJoin(__neoMetaModel__._omfDir, 'solvers', 'rdt')
    rdtJarPath = pJoin(rdtSolverFolder, 'micot-rdt.jar')
    # TODO: modify path, don't assume SCIP installation.
    proc = subprocess.Popen([
        'java', "-Djna.library.path=" + rdtSolverFolder, '-jar', rdtJarPath,
        '-c', rdtInputFilePath, '-e', rdtOutFile
    ],
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE)
    (stdout, stderr) = proc.communicate()
    with open(pJoin(modelDir, "rdtConsoleOut.txt"), "w") as rdtConsoleOut:
        rdtConsoleOut.write(stdout)
    rdtRawOut = open(rdtOutFile).read()
    outData['rdtRawOut'] = rdtRawOut
    # Indent the RDT output nicely.
    with open(pJoin(rdtOutFile), "w") as outFile:
        rdtOut = json.loads(rdtRawOut)
        json.dump(rdtOut, outFile, indent=4)
    # Generate and run 2nd copy of GridLAB-D model with changes specified by RDT.
    print "RUNNING 2ND GLD RUN FOR", modelDir
    feederCopy = copy.deepcopy(feederModel)
    lineSwitchList = []
    edgeLabels = {}
    generatorList = []
    for gen in rdtOut['design_solution']['generators']:
        generatorList.append(gen['id'][:-4])
    damagedLoads = {}
    for scenario in rdtOut['scenario_solution']:
        for load in scenario['loads']:
            if load['id'] in damagedLoads.keys():
                damagedLoads[load['id'][:-4]] += 1
            else:
                damagedLoads[load['id'][:-4]] = 1
    for line in rdtOut['design_solution']['lines']:
        if ('switch_built' in line and 'hardened' in line):
            lineSwitchList.append(line['id'])
            if (line['switch_built'] == True and line['hardened'] == True):
                edgeLabels[line['id']] = "SH"
            elif (line['switch_built'] == True):
                edgeLabels[line['id']] = "S"
            elif (line['hardened'] == True):
                edgeLabels[line['id']] = "H"
        elif ('switch_built' in line):
            lineSwitchList.append(line['id'])
            if (line['switch_built'] == True):
                edgeLabels[line['id']] = "S"
        elif ('hardened' in line):
            if (line['hardened'] == True):
                edgeLabels[line['id']] = "H"
    # Remove nonessential lines in second model as indicated by RDT output.
    for key in feederCopy['tree'].keys():
        value = feederCopy['tree'][key]
        if ('object' in value):
            if (value['object'] == 'underground_line') or (value['object']
                                                           == 'overhead_line'):
                if value['name'] not in lineSwitchList:
                    del feederCopy['tree'][key]
    # Add generators to second model.
    maxTreeKey = int(max(feederCopy['tree'], key=int)) + 1
    maxTreeKey = max(feederCopy['tree'], key=int)
    # Load a blank glm file and use it to write to it
    feederPath = pJoin(modelDir, 'feederSecond.glm')
    with open(feederPath, 'w') as glmFile:
        toWrite = "module generators;\n\n" + omf.feeder.sortedWrite(
            feederCopy['tree']
        ) + "object voltdump {\n\tfilename voltDump2ndRun.csv;\n};\nobject jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n"  # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};"
        glmFile.write(toWrite)
    # Run GridLAB-D second time.
    if platform.system() == "Windows":
        proc = subprocess.Popen(['gridlabd', 'feederSecond.glm'],
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                shell=True,
                                cwd=modelDir)
        (out, err) = proc.communicate()
        outData["secondGLD"] = str(
            os.path.isfile(pJoin(modelDir, "voltDump2ndRun.csv")))
    else:
        # TODO: make 2nd run of GridLAB-D work on Unixes.
        outData["secondGLD"] = str(False)
    # Draw the feeder.
    damageDict = {}
    for scenario in rdtJson["scenarios"]:
        for line in scenario["disable_lines"]:
            if line in damageDict:
                damageDict[line] = damageDict[line] + 1
            else:
                damageDict[line] = 1
    genDiagram(modelDir, feederModel, damageDict, critLoads, damagedLoads,
               edgeLabels, generatorList)
    with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile:
        outData["oneLineDiagram"] = inFile.read().encode("base64")
    # And we're done.
    return outData
Esempio n. 5
0
def heavyProcessing(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get feeder name and data in.
	try: os.mkdir(pJoin(modelDir,'gldContainer'))
	except: pass
	try:	
		feederName = inputDict["feederName1"]
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, "gldContainer", "climate.tmy2"))
		startTime = datetime.datetime.now()
		feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
		tree = feederJson["tree"]
		#add a check to see if there is already a climate object in the omd file
		#if there is delete the climate from attachments and the climate object
		attachKeys = feederJson["attachments"].keys()
		for key in attachKeys:
			if key.endswith('.tmy2'):
				del feederJson['attachments'][key]	
		treeKeys = feederJson["tree"].keys()
		for key in treeKeys:
			if 'object' in feederJson['tree'][key]:
			 	if feederJson['tree'][key]['object'] == 'climate':
			 		del feederJson['tree'][key]
		oldMax = feeder.getMaxKey(tree)
		tree[oldMax + 1] = {'omftype':'module', 'argument':'climate'}
		tree[oldMax + 2] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'}
		# Set up GLM with correct time and recorders:
		feeder.attachRecorders(tree, "Regulator", "object", "regulator")
		feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
		feeder.attachRecorders(tree, "Inverter", "object", "inverter")
		feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
		feeder.attachRecorders(tree, "CollectorVoltage", None, None)
		feeder.attachRecorders(tree, "Climate", "object", "climate")
		feeder.attachRecorders(tree, "OverheadLosses", None, None)
		feeder.attachRecorders(tree, "UndergroundLosses", None, None)
		feeder.attachRecorders(tree, "TriplexLosses", None, None)
		feeder.attachRecorders(tree, "TransformerLosses", None, None)
		feeder.groupSwingKids(tree)
		# Attach recorders for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'}
		for phase in ['A','B','C']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'VoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
			simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
		rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
			keepFiles=True, workDir=pJoin(modelDir,'gldContainer'))
		cleanOut = {}
		# Std Err and Std Out
		cleanOut['stderr'] = rawOut['stderr']
		cleanOut['stdout'] = rawOut['stdout']
		# Time Stamps
		for key in rawOut:
			if '# timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# timestamp']
				break
			elif '# property.. timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
			else:
				cleanOut['timeStamps'] = []
		# Day/Month Aggregation Setup:
		stamps = cleanOut.get('timeStamps',[])
		level = inputDict.get('simLengthUnits','hours')
		# Climate
		for key in rawOut:
			if key.startswith('Climate_') and key.endswith('.csv'):
				cleanOut['climate'] = {}
				cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
				cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
				cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
				cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
				cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				#cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
				climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
				#converting W/sf to W/sm
				climateWbySMList= [x*10.76392 for x in climateWbySFList]
				cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList			
		# Voltage Band
		if 'VoltageJiggle.csv' in rawOut:
			cleanOut['allMeterVoltages'] = {}
			cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
			cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
		# Power Consumption
		cleanOut['Consumption'] = {}
		# Set default value to be 0, avoiding missing value when computing Loads
		cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
		for key in rawOut:
			if key.startswith('SwingKids_') and key.endswith('.csv'):
				oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
				if 'Power' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Power'] = oneSwingPower
				else:
					cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
			elif key.startswith('Inverter_') and key.endswith('.csv'): 	
				realA = rawOut[key]['power_A.real']
				realB = rawOut[key]['power_B.real']
				realC = rawOut[key]['power_C.real']
				imagA = rawOut[key]['power_A.imag']
				imagB = rawOut[key]['power_B.imag']
				imagC = rawOut[key]['power_C.imag']
				oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key.startswith('Windmill_') and key.endswith('.csv'):
				vrA = rawOut[key]['voltage_A.real']
				vrB = rawOut[key]['voltage_B.real']
				vrC = rawOut[key]['voltage_C.real']
				viA = rawOut[key]['voltage_A.imag']
				viB = rawOut[key]['voltage_B.imag']
				viC = rawOut[key]['voltage_C.imag']
				crB = rawOut[key]['current_B.real']
				crA = rawOut[key]['current_A.real']
				crC = rawOut[key]['current_C.real']
				ciA = rawOut[key]['current_A.imag']
				ciB = rawOut[key]['current_B.imag']
				ciC = rawOut[key]['current_C.imag']
				powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
				powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
				powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
				oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
				realA = rawOut[key]['sum(power_losses_A.real)']
				imagA = rawOut[key]['sum(power_losses_A.imag)']
				realB = rawOut[key]['sum(power_losses_B.real)']
				imagB = rawOut[key]['sum(power_losses_B.imag)']
				realC = rawOut[key]['sum(power_losses_C.real)']
				imagC = rawOut[key]['sum(power_losses_C.imag)']
				oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'Losses' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Losses'] = oneLoss
				else:
					cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			elif key.startswith('Regulator_') and key.endswith('.csv'):
				#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
				regName=""
				regName = key
				newkey=regName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
				cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
				cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
				cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
			elif key.startswith('Capacitor_') and key.endswith('.csv'):
				capName=""
				capName = key
				newkey=capName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
				cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
				cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
				cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]
		# What percentage of our keys have lat lon data?
		latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
		latPerc = 1.0*len(latKeys)/len(tree)
		if latPerc < 0.25: doNeato = True
		else: doNeato = False
		# Generate the frames for the system voltage map time traveling chart.
		genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
		cleanOut['genTime'] = genTime
		# Aggregate up the timestamps:
		if level=='days':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
		elif level=='months':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
		# Write the output.
		with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile:
			json.dump(cleanOut, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = datetime.datetime.now()
		inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Clean up the PID file.
		os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
		print "DONE RUNNING", modelDir
	except Exception as e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)	
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	finishTime = datetime.datetime.now()
	inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds())))
	with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
		json.dump(inputDict, inFile, indent = 4)
	try:
		os.remove(pJoin(modelDir,"PPID.txt"))
	except:
		pass
Esempio n. 6
0
	''' Run the model in its directory. '''
	# Delete output file every run if it exists
	try:
		os.remove(pJoin(modelDir,"allOutputData.json"))	
	except Exception, e:
		pass
	try:
		# Check whether model exist or not
		if not os.path.isdir(modelDir):
			os.makedirs(modelDir)
			inputDict["created"] = str(dt.datetime.now())
		# MAYBEFIX: remove this data dump. Check showModel in web.py and renderTemplate()
		with open(pJoin(modelDir, "allInputData.json"),"w") as inputFile:
			json.dump(inputDict, inputFile, indent = 4)
		# Copy spcific climate data into model directory
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), 
			pJoin(modelDir, "climate.tmy2"))
		# Ready to run
		startTime = dt.datetime.now()
		# Set up SAM data structures.
		ssc = nrelsam2013.SSCAPI()
		dat = ssc.ssc_data_create()
		# Required user inputs.
		ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
		ssc.ssc_data_set_number(dat, "system_size", float(inputDict.get("systemSize", 100)))
		derate = float(inputDict.get("pvModuleDerate", 99.5))/100 \
			* float(inputDict.get("mismatch", 99.5))/100 \
			* float(inputDict.get("diodes", 99.5))/100 \
			* float(inputDict.get("dcWiring", 99.5))/100 \
			* float(inputDict.get("acWiring", 99.5))/100 \
Esempio n. 7
0
def work(modelDir, inputDict):
    ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
    # feederName = inputDict["feederName1"]
    feederName = [x for x in os.listdir(modelDir)
                  if x.endswith('.omd')][0][:-4]
    inputDict["feederName1"] = feederName
    dssName = [x for x in os.listdir(modelDir) if x.endswith('.dss')][0][:-4]
    inputDict["dssName1"] = dssName
    zipCode = "59001"  #TODO get zip code from the PV and Load input file

    #Value check for attackVariable
    if inputDict.get("attackVariable", "None") == "None":
        attackAgentType = "None"
    else:
        attackAgentType = inputDict['attackVariable']

    # Value check for train
    if inputDict.get("trainAgent", "") == "True":
        trainAgentValue = True
    else:
        trainAgentValue = False

    # create solarPVLengthValue to represent number of steps in simulation - will be manipulated by number of rows in load solar data csv file
    solarPVLengthValue = 0

    #create startStep to represent which step pyCigar should start on - default = 100
    startStep = 100

    #None check for simulation length
    if inputDict.get("simLength", "None") == "None":
        simLengthValue = None
    else:
        simLengthValue = int(inputDict['simLength'])

    #None check for simulation length units
    if inputDict.get("simLengthUnits", "None") == "None":
        simLengthUnitsValue = None
    else:
        simLengthUnitsValue = inputDict["simLengthUnits"]
    #None check for simulation start date
    if inputDict.get("simStartDate", "None") == "None":
        simStartDateTimeValue = None
        simStartDateValue = None
        simStartTimeValue = None
    else:
        simStartDateTimeValue = inputDict["simStartDate"]
        simStartDateValue = simStartDateTimeValue.split('T')[0]
        simStartTimeValue = simStartDateTimeValue.split('T')[1]

    # None check for defenseVariable
    if inputDict.get("defenseVariable", "None") == "None":
        defenseAgentName = None
    else:
        defenseAgentName = inputDict['defenseVariable']
        #Check to make sure that defenseAgent selected by user exists, otherwise return a warning and set defenseAgentName to None
        if os.path.isdir(pJoin(modelDir, "pycigarOutput",
                               defenseAgentName)) == False:
            errorMessage = "ERROR: Defense Agent named " + defenseAgentName + " could not be located."
            defenseAgentName = None
            raise Exception(errorMessage)

    inputDict["climateName"] = weather.zipCodeToClimateName(zipCode)
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "climate.tmy2"))

    def convertInputs():
        #create the PyCIGAR_inputs folder to store the input files to run PyCIGAR
        try:
            os.mkdir(pJoin(modelDir, "PyCIGAR_inputs"))
        except FileExistsError:
            print("PyCIGAR_inputs folder already exists!")
            pass
        except:
            print("Error occurred creating PyCIGAR_inputs folder")

        #create misc_inputs.csv file in folder
        with open(pJoin(modelDir, "PyCIGAR_inputs", "misc_inputs.csv"),
                  "w") as miscFile:
            #Populate misc_inputs.csv
            # miscFile.write(misc_inputs)
            # for key in misc_dict.keys():
            # 	miscFile.write("%s,%s\n"%(key,misc_dict[key]))
            miscFile.write(inputDict['miscFile'])

        #create dss file in folder
        dss_filename = "circuit.dss"
        with open(pJoin(modelDir, "PyCIGAR_inputs", dss_filename),
                  "w") as dssFile:
            dssFile.write(inputDict['dssFile'])

        #create load_solar_data.csv file in folder
        rowCount = 0
        with open(pJoin(modelDir, "PyCIGAR_inputs", "load_solar_data.csv"),
                  "w") as loadPVFile:
            loadPVFile.write(inputDict['loadPV'])
            #Open load and PV input file
        try:
            with open(pJoin(modelDir, "PyCIGAR_inputs", "load_solar_data.csv"),
                      newline='') as inFile:
                reader = csv.reader(inFile)
                for row in reader:
                    rowCount = rowCount + 1
            #Check to see if the simulation length matches the load and solar csv
            # if (rowCount-1)*misc_dict["load file timestep"] != simLengthValue:
            # 	errorMessage = "Load and PV Output File does not match simulation length specified by user"
            # 	raise Exception(errorMessage)
            solarPVLengthValue = rowCount - 1
        except:
            #TODO change to an appropriate warning message
            errorMessage = "CSV file is incorrect format."
            raise Exception(errorMessage)

        #create breakpoints.csv file in folder
        # f1Name = "breakpoints.csv"
        # with open(pJoin(omf.omfDir, "static", "testFiles", "pyCIGAR", f1Name)) as f1:
        # 	breakpoints_inputs = f1.read()
        with open(pJoin(modelDir, "PyCIGAR_inputs", "breakpoints.csv"),
                  "w") as breakpointsFile:
            breakpointsFile.write(inputDict['breakpoints'])

        return solarPVLengthValue

    solarPVLengthValue = convertInputs()

    #create simLengthAdjusted to represent simLength accounting for start step offset
    simLengthAdjusted = 0

    if simLengthValue != None:
        if simLengthValue + startStep > solarPVLengthValue:
            #raise error message that simLengthValue is too large for given Load Solar csv and given timestep (set to 100)
            simLengthAdjusted = solarPVLengthValue - startStep
        else:
            #simLengthValue is equal to the value entered by the user
            simLengthAdjusted = simLengthValue
    else:
        #simLengthAdjusted accounts for the offset by startStep
        simLengthAdjusted = solarPVLengthValue - startStep
    # #hard-coding simLengthAdjusted for testing purposes
    # simLengthAdjusted = 750

    # create value to represent the timestep in which the hack starts and adjust it to make sure it is within the bounds or the simulation length
    defaultHackStart = 250
    if defaultHackStart > simLengthAdjusted:
        defaultHackStart = simLengthAdjusted / 5

    # attackVars = dict of attack types and their corresponding parameter values
    # to add new attack: attackVars[attackAgentType_name] = {"hackStart": val, "hackEnd": val, "percentHack": val}
    # MAKE SURE to add attackVars entry when adding another Attack Agent option to the html dropdown list and the name must match the value passed back from the form (inputDict["attackVariable"])!
    attackVars = {}
    attackVars["None"] = {
        "hackStart": defaultHackStart,
        "hackEnd": None,
        "percentHack": 0.0
    }
    attackVars["curveSwitch"] = {
        "hackStart": defaultHackStart,
        "hackEnd": None,
        "percentHack": 0.45
    }

    #check to make sure attackAgentType is in the attackVars dictionary, otherwise set it to None. This shouldn't ever be a problem since the user selects attackAgentType from a preset HTML dropdown.
    if attackAgentType not in attackVars:
        attackAgentType = "None"

    outData = {}
    # Std Err and Std Out
    outData['stderr'] = "This should be stderr"  #rawOut['stderr']
    outData['stdout'] = "This should be stdout"  #rawOut['stdout']

    # Create list of timestamps for simulation steps
    outData['timeStamps'] = []
    start_time = dt_parser.isoparse(simStartDateTimeValue)
    start_time = start_time + timedelta(seconds=startStep)
    for single_datetime in (start_time + timedelta(seconds=n)
                            for n in range(simLengthAdjusted)):
        single_datetime_str = single_datetime.strftime("%Y-%m-%d %H:%M:%S%z")
        outData['timeStamps'].append(single_datetime_str)

    # Day/Month Aggregation Setup:
    stamps = outData.get('timeStamps', [])
    level = inputDict.get('simLengthUnits', 'seconds')

    # TODO: Create/populate Climate data without gridlab-d
    outData['climate'] = {}
    outData['allMeterVoltages'] = {}
    outData['allMeterVoltages']['Min'] = [0] * int(simLengthAdjusted)
    outData['allMeterVoltages']['Mean'] = [0] * int(simLengthAdjusted)
    outData['allMeterVoltages']['StdDev'] = [0] * int(simLengthAdjusted)
    outData['allMeterVoltages']['Max'] = [0] * int(simLengthAdjusted)
    # Power Consumption
    outData['Consumption'] = {}
    # Set default value to be 0, avoiding missing value when computing Loads
    outData['Consumption']['Power'] = [0] * int(simLengthAdjusted)
    outData['Consumption']['Losses'] = [0] * int(simLengthAdjusted)
    outData['Consumption']['DG'] = [0] * int(simLengthAdjusted)

    outData['swingTimestamps'] = []
    outData['swingTimestamps'] = outData['timeStamps']

    # Aggregate up the timestamps:
    if level == 'days':
        outData['timeStamps'] = aggSeries(stamps, stamps, lambda x: x[0][0:10],
                                          'days')
    elif level == 'months':
        outData['timeStamps'] = aggSeries(stamps, stamps, lambda x: x[0][0:7],
                                          'months')

    #create the pycigarOutput folder to store the output file(s) generated by PyCIGAR
    try:
        os.mkdir(pJoin(modelDir, "pycigarOutput"))
    except FileExistsError:
        print("pycigarOutput folder already exists!")
        pass
    except:
        print("Error occurred creating pycigarOutput folder")

    def runPyCIGAR():
        #import and run pycigar
        import pycigar

        #Set up runType scenarios
        #runType of 2 implies the base scenario - not training a defense agent, nor is there a defense agent entered
        runType = 2
        defenseAgentPath = None

        #set default values for attack variables
        hackStartVal = defaultHackStart
        hackEndVal = None
        percentHackVal = 0.0

        #set pycigar attack variables
        hackStartVal = attackVars[attackAgentType]["hackStart"]
        hackEndVal = attackVars[attackAgentType][
            "hackEnd"]  #TODO: see if we need to change from a hard-coded value
        percentHackVal = attackVars[attackAgentType][
            "percentHack"]  #TODO: see if we need to change from a hard-coded value

        # check to see if we are trying to train a defense agent
        if trainAgentValue:
            #runType of 0 implies the training scenario - runs to train a defense agent and outputs a zip containing defense agent files
            # runType = 0
            pycigar.main(modelDir + "/PyCIGAR_inputs/misc_inputs.csv",
                         modelDir + "/PyCIGAR_inputs/circuit.dss",
                         modelDir + "/PyCIGAR_inputs/load_solar_data.csv",
                         modelDir + "/PyCIGAR_inputs/breakpoints.csv",
                         0,
                         defenseAgentPath,
                         modelDir + "/pycigarOutput/",
                         start=startStep,
                         duration=simLengthAdjusted,
                         hack_start=hackStartVal,
                         hack_end=hackEndVal,
                         percentage_hack=percentHackVal)

        #check to see if user entered a defense agent file
        elif defenseAgentName != None:
            defenseAgentPath = pJoin(modelDir, "pycigarOutput",
                                     defenseAgentName)
            #runType of 1 implies the defense scenario - not training a defense agent, but a defense agent zip was uploaded
            runType = 1

        # TODO how to factor attackAgentType into pycigar inputs
        # if there is no training selected and no attack variable, run without a defense agent
        pycigar.main(modelDir + "/PyCIGAR_inputs/misc_inputs.csv",
                     modelDir + "/PyCIGAR_inputs/circuit.dss",
                     modelDir + "/PyCIGAR_inputs/load_solar_data.csv",
                     modelDir + "/PyCIGAR_inputs/breakpoints.csv",
                     runType,
                     defenseAgentPath,
                     modelDir + "/pycigarOutput/",
                     start=startStep,
                     duration=simLengthAdjusted,
                     hack_start=hackStartVal,
                     hack_end=hackEndVal,
                     percentage_hack=percentHackVal)

        #print("Got through pyCigar!!!")

    def convertOutputs():
        #set outData[] values to those from modelDir/pycigarOutput/pycigar_output_specs_.json
        #read in the pycigar-outputed json
        with open(
                pJoin(modelDir, "pycigarOutput", "pycigar_output_specs.json"),
                'r') as f:
            pycigarJson = json.load(f)

        #convert "allMeterVoltages"
        outData["allMeterVoltages"] = pycigarJson["allMeterVoltages"]

        #convert "Consumption"."Power"
        # HACK! Units are actually kW. Needs to be fixed in pyCigar.
        outData["Consumption"]["Power"] = pycigarJson["Consumption"][
            "Power Substation (W)"]

        #convert "Consumption"."Losses"
        outData["Consumption"]["Losses"] = pycigarJson["Consumption"][
            "Losses Total (W)"]

        #convert "Consumption"."DG"
        outData["Consumption"]["DG"] = [
            -1.0 * x for x in pycigarJson["Consumption"]["DG Output (W)"]
        ]

        #convert "powerFactors"
        outData["powerFactors"] = pycigarJson["Substation Power Factor (%)"]

        #convert "swingVoltage"
        outData["swingVoltage"] = pycigarJson["Substation Top Voltage(V)"]

        #convert "downlineNodeVolts"
        outData["downlineNodeVolts"] = pycigarJson[
            "Substation Bottom Voltage(V)"]

        #convert "minVoltBand"
        outData["minVoltBand"] = pycigarJson[
            "Substation Regulator Minimum Voltage(V)"]

        #convert "maxVoltBand"
        outData["maxVoltBand"] = pycigarJson[
            "Substation Regulator Maximum Voltage(V)"]

        #create lists of circuit object names
        regNameList = []
        capNameList = []
        for key in pycigarJson:
            if key.startswith('Regulator_'):
                regNameList.append(key)
            elif key.startswith('Capacitor_'):
                capNameList.append(key)

        #convert regulator data
        for reg_name in regNameList:
            outData[reg_name] = {}
            regPhaseValue = pycigarJson[reg_name]["RegPhases"]
            if regPhaseValue.find('A') != -1:
                outData[reg_name]["RegTapA"] = pycigarJson[reg_name]["creg1a"]

            if regPhaseValue.find('B') != -1:
                outData[reg_name]["RegTapB"] = pycigarJson[reg_name]["creg1b"]

            if regPhaseValue.find('C') != -1:
                outData[reg_name]["RegTapC"] = pycigarJson[reg_name]["creg1c"]

            outData[reg_name]["RegPhases"] = regPhaseValue

        #convert inverter data
        inverter_output_dict = {}
        for inv_dict in pycigarJson["Inverter Outputs"]:
            #create a new dictionary to represent the single inverter
            new_inv_dict = {}
            #get values from pycigar output for given single inverter
            inv_name = inv_dict["Name"]
            inv_volt = inv_dict["Voltage (V)"]
            inv_pow_real = inv_dict["Power Output (W)"]
            inv_pow_imag = inv_dict["Reactive Power Output (VAR)"]
            #populate single inverter dict with pycigar values
            new_inv_dict["Voltage"] = inv_volt
            new_inv_dict["Power_Real"] = inv_pow_real
            new_inv_dict["Power_Imag"] = inv_pow_imag
            #add single inverter dict to dict of all the inverters using the inverter name as the key
            inverter_output_dict[inv_name] = new_inv_dict
        outData["Inverter_Outputs"] = inverter_output_dict

        #convert capacitor data - Need one on test circuit first!
        for cap_name in capNameList:
            outData[cap_name] = {}
            capPhaseValue = pycigarJson[cap_name]["CapPhases"]
            if capPhaseValue.find('A') != -1:
                outData[cap_name]['Cap1A'] = [0] * int(simLengthValue)
                outData[cap_name]['Cap1A'] = pycigarJson[cap_name]['switchA']

            if capPhaseValue.find('B') != -1:
                outData[cap_name]['Cap1B'] = [0] * int(simLengthValue)
                outData[cap_name]['Cap1B'] = pycigarJson[cap_name]['switchB']

            if capPhaseValue.find('C') != -1:
                outData[cap_name]['Cap1C'] = [0] * int(simLengthValue)
                outData[cap_name]['Cap1C'] = pycigarJson[cap_name]['switchC']

            outData[cap_name]["CapPhases"] = capPhaseValue

        outData["stdout"] = pycigarJson["stdout"]

    runPyCIGAR()
    # Report out the agent paths.
    defAgentFolders = os.listdir(pJoin(modelDir, "pycigarOutput"))
    outData['defenseAgents'] = [
        x for x in defAgentFolders if x.startswith('policy_')
    ]
    convertOutputs()
    return outData
Esempio n. 8
0
def run(modelDir, inputDict):
    try:
        ''' Run the model in its directory. '''
        # Check whether model exist or not
        if not os.path.isdir(modelDir):
            os.makedirs(modelDir)
            inputDict["created"] = str(dt.now())
        # MAYBEFIX: remove this data dump. Check showModel in web.py and renderTemplate()
        with open(pJoin(modelDir, "allInputData.json"), "w") as inputFile:
            json.dump(inputDict, inputFile, indent=4)
        # Copy spcific climate data into model directory
        inputDict["climateName"], latforpvwatts = zipCodeToClimateName(
            inputDict["zipCode"])
        shutil.copy(
            pJoin(__metaModel__._omfDir, "data", "Climate",
                  inputDict["climateName"] + ".tmy2"),
            pJoin(modelDir, "climate.tmy2"))
        # Ready to run
        startTime = dt.now()
        # Set up SAM data structures.
        ssc = nrelsam2013.SSCAPI()
        dat = ssc.ssc_data_create()
        # Required user inputs.
        ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
        ssc.ssc_data_set_number(dat, "system_size",
                                float(inputDict["SystemSize"]))
        # SAM options where we take defaults.
        ssc.ssc_data_set_number(dat, "derate", 0.97)
        ssc.ssc_data_set_number(dat, "track_mode", 0)
        ssc.ssc_data_set_number(dat, "azimuth", 180)
        ssc.ssc_data_set_number(dat, "tilt_eq_lat", 1)
        # Run PV system simulation.
        mod = ssc.ssc_module_create("pvwattsv1")
        ssc.ssc_module_exec(mod, dat)
        # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
        startDateTime = "2013-01-01 00:00:00 UTC"
        # Timestamp output.
        outData = {}
        outData["timeStamps"] = [
            dt.strftime(
                dt.strptime(startDateTime[0:19], "%Y-%m-%d %H:%M:%S") +
                td(**{"hours": x}), "%Y-%m-%d %H:%M:%S") + " UTC"
            for x in range(int(8760))
        ]
        # HACK: makes it easier to calculate some things later.
        outData["pythonTimeStamps"] = [
            dt(2012, 1, 1, 0) + x * td(hours=1) for x in range(8760)
        ]
        # Geodata output.
        outData["city"] = ssc.ssc_data_get_string(dat, "city")
        outData["state"] = ssc.ssc_data_get_string(dat, "state")
        outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
        outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
        outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
        # Weather output.
        outData["climate"] = {}
        outData["climate"][
            "Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(
                dat, "gh")
        outData["climate"][
            "Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(
                dat, "poa")
        outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(
            dat, "tamb")
        outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(
            dat, "tcell")
        outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(
            dat, "wspd")
        # Power generation.
        outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")

        # TODO: INSERT TJ CODE BELOW
        tjCode(inputDict, outData)
        del outData["pythonTimeStamps"]
        # TODO: INSERT TJ CODE ABOVE

        # Stdout/stderr.
        outData["stdout"] = "Success"
        outData["stderr"] = ""
        # Write the output.
        with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile:
            json.dump(outData, outFile, indent=4)
        # Update the runTime in the input file.
        endTime = dt.now()
        inputDict["runTime"] = str(
            td(seconds=int((endTime - startTime).total_seconds())))
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
    except:
        # If input range wasn't valid delete output, write error to disk.
        cancel(modelDir)
        thisErr = traceback.format_exc()
        print 'ERROR IN MODEL', modelDir, thisErr
        inputDict['stderr'] = thisErr
        with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile:
            errorFile.write(thisErr)
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
Esempio n. 9
0
def work(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	# feederName = inputDict["feederName1"]
	feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4]
	inputDict["feederName1"] = feederName
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	feederJson = json.load(open(pJoin(modelDir, feederName + '.omd')))
	tree = feederJson["tree"]
	# Set up GLM with correct time and recorders:
	feeder.attachRecorders(tree, "Regulator", "object", "regulator")
	feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
	feeder.attachRecorders(tree, "Inverter", "object", "inverter")
	feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
	feeder.attachRecorders(tree, "CollectorVoltage", None, None)
	feeder.attachRecorders(tree, "Climate", "object", "climate")
	feeder.attachRecorders(tree, "OverheadLosses", None, None)
	feeder.attachRecorders(tree, "UndergroundLosses", None, None)
	feeder.attachRecorders(tree, "TriplexLosses", None, None)
	feeder.attachRecorders(tree, "TransformerLosses", None, None)
	feeder.groupSwingKids(tree)
	# Attach recorders for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600}
	for phase in ['A','B','C']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'VoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# Attach recorders for system voltage map, triplex:
	stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'nVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# Attach current recorder for overhead_lines
	currentStub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600}
	for phase in ['A','B','C']:
		copyCurrentStub = dict(currentStub)
		copyCurrentStub['property'] = 'current_out_' + phase
		copyCurrentStub['file'] = 'OH_line_current_phase' + phase + '.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyCurrentStub
	rating_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600}
	copyRatingStub = dict(rating_stub)
	copyRatingStub['property'] = 'continuous_rating'
	copyRatingStub['file'] = 'OH_line_cont_rating.csv'
	tree[feeder.getMaxKey(tree) + 1] = copyRatingStub
	flow_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600}
	copyFlowStub = dict(flow_stub)
	copyFlowStub['property'] = 'flow_direction'
	copyFlowStub['file'] = 'OH_line_flow_direc.csv'
	tree[feeder.getMaxKey(tree) + 1] = copyFlowStub
	# Attach current recorder for underground_lines
	currentStubOH = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600}
	for phase in ['A','B','C']:
		copyCurrentStubOH = dict(currentStubOH)
		copyCurrentStubOH['property'] = 'current_out_' + phase
		copyCurrentStubOH['file'] = 'UG_line_current_phase' + phase + '.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyCurrentStubOH
	ug_rating_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600}
	copyUGRatingStub = dict(ug_rating_stub)
	copyUGRatingStub['property'] = 'continuous_rating'
	copyUGRatingStub['file'] = 'UG_line_cont_rating.csv'
	tree[feeder.getMaxKey(tree) + 1] = copyUGRatingStub
	ug_flow_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600}
	ugCopyFlowStub = dict(ug_flow_stub)
	ugCopyFlowStub['property'] = 'flow_direction'
	ugCopyFlowStub['file'] = 'UG_line_flow_direc.csv'
	tree[feeder.getMaxKey(tree) + 1] = ugCopyFlowStub
	# And get meters for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'mVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	for key in tree:
		if 'bustype' in tree[key].keys():
			if tree[key]['bustype'] == 'SWING':
				tree[key]['object'] = 'meter'
				swingN = tree[key]['name']
	swingRecord = {'object':'recorder', 'property':'voltage_A,measured_real_power,measured_power','file':'subVoltsA.csv','parent':swingN, 'interval':60}
	tree[feeder.getMaxKey(tree) + 1] = swingRecord
	for key in tree:
		if 'omftype' in tree[key].keys() and tree[key]['argument']=='minimum_timestep=3600':
			tree[key]['argument'] = 'minimum_timestep=60'
	# If there is a varvolt object in the tree, add recorder to swingbus and node from voltage_measurements property
	# Find var_volt object
	downLineNode = 'None'
	for key in tree:
		if 'object' in tree[key].keys() and tree[key]['object']=='volt_var_control':
			downLineNode = tree[key]['voltage_measurements']
	if downLineNode != 'None':
		downNodeRecord = {'object':'recorder', 'property':'voltage_A','file':'firstDownlineVoltsA.csv','parent':downLineNode, 'interval':60}
		tree[feeder.getMaxKey(tree) + 1] = downNodeRecord
	# Violation recorder to display to users 
	# violationRecorder = {'object':'violation_recorder','node_continuous_voltage_limit_lower':0.95,'file':'Violation_Log.csv',
	# 					'secondary_dist_voltage_rise_lower_limit':-0.042,'substation_pf_lower_limit':0.85,'substation_breaker_C_limit':300,
	# 					'secondary_dist_voltage_rise_upper_limit':0.025,'substation_breaker_B_limit':300,'violation_flag':'ALLVIOLATIONS',
	# 					'node_instantaneous_voltage_limit_upper':1.1, 'inverter_v_chng_per_interval_lower_bound':-0.05, 'virtual_substation':swingN,
	# 					'substation_breaker_A_limit':300, 'xfrmr_thermal_limit_lower':0,'node_continuous_voltage_interval':300,'strict':'false',
	# 					'node_instantaneous_voltage_limit_lower':0,'line_thermal_limit_upper':1,'echo':'false','node_continuous_voltage_limit_upper':1.05,
	# 					'interval':30,'line_thermal_limit_lower':0,'summary':'Violation_Summary.csv','inverter_v_chng_interval':60,
	# 					'xfrmr_thermal_limit_upper':2,'inverter_v_chng_per_interval_upper_bound':0.050}
	# tree[feeder.getMaxKey(tree) + 1] = violationRecorder
	feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
		simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
	# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
	rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
		keepFiles=True, workDir=pJoin(modelDir))
		# voltDumps have no values when gridlabD fails or the files dont exist
	if not os.path.isfile(pJoin(modelDir,'aVoltDump.csv')):
		with open (pJoin(modelDir,'stderr.txt')) as inFile:
			stdErrText = inFile.read()
		message = 'GridLAB-D crashed. Error log:\n' + stdErrText
		raise Exception(message)
	elif len(rawOut['aVoltDump.csv']['# timestamp']) == 0:
		with open (pJoin(modelDir,'stderr.txt')) as inFile:
			stdErrText = inFile.read()
		message = 'GridLAB-D crashed. Error log:\n' + stdErrText
		raise Exception(message)
	outData = {}
	# Std Err and Std Out
	outData['stderr'] = rawOut['stderr']
	outData['stdout'] = rawOut['stdout']
	# Time Stamps
	for key in rawOut:
		if '# timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# timestamp']
			break
		elif '# property.. timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# property.. timestamp']
		else:
			outData['timeStamps'] = []
	# Day/Month Aggregation Setup:
	stamps = outData.get('timeStamps',[])
	level = inputDict.get('simLengthUnits','hours')
	# Climate
	for key in rawOut:
		if key.startswith('Climate_') and key.endswith('.csv'):
			outData['climate'] = {}
			outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
			outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
			outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
			outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
			outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
			#outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
			climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
			#converting W/sf to W/sm
			climateWbySMList= [x*10.76392 for x in climateWbySFList]
			outData['climate']['Global Horizontal (W/sm)']=climateWbySMList			
	# Voltage Band
	if 'VoltageJiggle.csv' in rawOut:
		outData['allMeterVoltages'] = {}
		outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
		outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
	# Power Consumption
	outData['Consumption'] = {}
	# Set default value to be 0, avoiding missing value when computing Loads
	outData['Consumption']['Power'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['DG'] = [0] * int(inputDict["simLength"])
	for key in rawOut:
		if key.startswith('SwingKids_') and key.endswith('.csv'):
			oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
			if 'Power' not in outData['Consumption']:
				outData['Consumption']['Power'] = oneSwingPower
			else:
				outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power'])
		elif key.startswith('Inverter_') and key.endswith('.csv'): 	
			realA = rawOut[key]['power_A.real']
			realB = rawOut[key]['power_B.real']
			realC = rawOut[key]['power_C.real']
			imagA = rawOut[key]['power_A.imag']
			imagB = rawOut[key]['power_B.imag']
			imagC = rawOut[key]['power_C.imag']
			oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key.startswith('Windmill_') and key.endswith('.csv'):
			vrA = rawOut[key]['voltage_A.real']
			vrB = rawOut[key]['voltage_B.real']
			vrC = rawOut[key]['voltage_C.real']
			viA = rawOut[key]['voltage_A.imag']
			viB = rawOut[key]['voltage_B.imag']
			viC = rawOut[key]['voltage_C.imag']
			crB = rawOut[key]['current_B.real']
			crA = rawOut[key]['current_A.real']
			crC = rawOut[key]['current_C.real']
			ciA = rawOut[key]['current_A.imag']
			ciB = rawOut[key]['current_B.imag']
			ciC = rawOut[key]['current_C.imag']
			powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
			powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
			powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
			oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
			realA = rawOut[key]['sum(power_losses_A.real)']
			imagA = rawOut[key]['sum(power_losses_A.imag)']
			realB = rawOut[key]['sum(power_losses_B.real)']
			imagB = rawOut[key]['sum(power_losses_B.imag)']
			realC = rawOut[key]['sum(power_losses_C.real)']
			imagC = rawOut[key]['sum(power_losses_C.imag)']
			oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'Losses' not in outData['Consumption']:
				outData['Consumption']['Losses'] = oneLoss
			else:
				outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses'])
		elif key.startswith('Regulator_') and key.endswith('.csv'):
			#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
			regName=""
			regName = key
			newkey=regName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapA'] = rawOut[key]['tap_A']
			outData[newkey]['RegTapB'] = rawOut[key]['tap_B']
			outData[newkey]['RegTapC'] = rawOut[key]['tap_C']
			outData[newkey]['RegPhases'] = rawOut[key]['phases'][0]
		elif key.startswith('Capacitor_') and key.endswith('.csv'):
			capName=""
			capName = key
			newkey=capName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1A'] = rawOut[key]['switchA']
			outData[newkey]['Cap1B'] = rawOut[key]['switchB']
			outData[newkey]['Cap1C'] = rawOut[key]['switchC']
			outData[newkey]['CapPhases'] = rawOut[key]['phases'][0]
	# Capture voltages at the swingbus
	# Loop through voltDump for swingbus voltages
	subData = []
	downData = []
	with open(pJoin(modelDir,"subVoltsA.csv")) as subFile:
		reader = csv.reader(subFile)
		subData = [x for x in reader]
	if downLineNode != 'None':
		with open(pJoin(modelDir,"firstDownlineVoltsA.csv")) as downFile:
			reader = csv.reader(downFile)
			downData = [x for x in reader]
	FIRST_DATA_ROW = 9
	cleanDown = [stringToMag(x[1]) for x in downData[FIRST_DATA_ROW:-1]]
	swingTimestamps = [x[0] for x in subData[FIRST_DATA_ROW:-1]]
	cleanSub = [stringToMag(x[1]) for x in subData[FIRST_DATA_ROW:-1]]
	# real_power / power
	powerFactors = []
	for row in subData[FIRST_DATA_ROW:-1]:
		powerFactors.append(abs(float(row[2])/stringToMag(row[3])))
	outData['powerFactors'] = powerFactors
	outData['swingVoltage'] = cleanSub
	outData['downlineNodeVolts'] = cleanDown
	outData['swingTimestamps'] = swingTimestamps
	# If there is a var volt system, find the min and max voltage for a band
	minVoltBand = []
	maxVoltBand = []
	if downLineNode != 'None':
		for key in tree:
			objKeys = tree[key].keys()
			if 'object' in objKeys:
				if tree[key]['object']=='volt_var_control':
					minVoltBand.append(float(tree[key]['minimum_voltages']))
					maxVoltBand.append(float(tree[key]['maximum_voltages']))
		outData['minVoltBand'] = minVoltBand
		outData['maxVoltBand'] = maxVoltBand
	# Violation Summary and Log
	# violationData = ''
	# violationArray = []
	# with open(pJoin(modelDir,"Violation_Summary.csv")) as vioSum:
	# 	reader = csv.reader(vioSum)
	# 	for row in reader:
	# 		violationArray.append(row)	
	# for row in violationArray[4:]:
	# 	violationData += str(' '.join(row)) + "\n"
	# outData["violationSummary"] = violationData
	# violationLogArray = []
	# violationLog = ''
	# with open(pJoin(modelDir,"Violation_Log.csv")) as vioLog:
	# 	logger = csv.reader(vioLog)
	# 	for row in logger:
	# 		violationLogArray.append(row)
	# for row in violationLogArray[6:]:
	# 	violationLog += str(' '.join(row)) + "\n"
	# outData['violationLog'] = violationLog
	# What percentage of our keys have lat lon data?
	latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
	latPerc = 1.0*len(latKeys)/len(tree)
	if latPerc < 0.25: doNeato = True
	else: doNeato = False
	# Generate the frames for the system voltage map time traveling chart.
	genTime, mapTimestamp = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
	outData['genTime'] = genTime
	outData['mapTimestamp'] = mapTimestamp
	# Aggregate up the timestamps:
	if level=='days':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
	elif level=='months':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
	return outData
Esempio n. 10
0
def work(modelDir, inputDict):
	''' Run the model in its directory. '''
	#Set static input data
	simLength = 8760
	simStartDate = "2013-01-01"
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
	startDateTime = simStartDate + " 00:00:00 UTC"		
	simLengthUnits = "hours"
	# Associate zipcode to climate data
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	inverterSizeAC = float(inputDict.get("inverterSize",0))
	if (inputDict.get("systemSize",0) == "-"):
		arraySizeDC = 1.3908 * inverterSizeAC
	else:
		arraySizeDC = float(inputDict.get("systemSize",0))
	numberPanels = (arraySizeDC * 1000/305)
	# Set constants
	panelSize = 305		
	trackingMode = 0
	rotlim = 45.0
	gamma = 0.45
	if (inputDict.get("tilt",0) == "-"):
		tilt_eq_lat = 1.0
		manualTilt = 0.0
	else:
		tilt_eq_lat = 0.0
		manualTilt = float(inputDict.get("tilt",0))
	numberInverters = math.ceil(inverterSizeAC/1000/0.5)			
	# Copy specific climate data into model directory
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
	ssc.ssc_data_set_number(dat, "system_size", arraySizeDC)
	ssc.ssc_data_set_number(dat, "derate", float(inputDict.get("inverterEfficiency", 96))/100 * float(inputDict.get("nonInverterEfficiency", 87))/100)
	ssc.ssc_data_set_number(dat, "track_mode", float(trackingMode))
	ssc.ssc_data_set_number(dat, "azimuth", float(inputDict.get("azimuth", 180)))
	# Advanced inputs with defaults.
	ssc.ssc_data_set_number(dat, "rotlim", float(rotlim))
	ssc.ssc_data_set_number(dat, "gamma", float(-gamma/100))
	ssc.ssc_data_set_number(dat, "tilt", manualTilt)
	ssc.ssc_data_set_number(dat, "tilt_eq_lat", 0.0)
	# Run PV system simulation.
	mod = ssc.ssc_module_create("pvwattsv1")
	ssc.ssc_module_exec(mod, dat)
	# Timestamp output.
	outData = {}
	outData["timeStamps"] = [dt.datetime.strftime(
		dt.datetime.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") +
		dt.timedelta(**{simLengthUnits:x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(simLength)]
	# Geodata output.
	outData["minLandSize"] = round((arraySizeDC/1390.8*5 + 1)*math.cos(math.radians(22.5))/math.cos(math.radians(30.0)),0)
	landAmount = float(inputDict.get("landAmount", 6.0))
	outData["city"] = ssc.ssc_data_get_string(dat, "city")
	outData["state"] = ssc.ssc_data_get_string(dat, "state")
	outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
	outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
	outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
	# Weather output.
	outData["climate"] = {}
	outData["climate"]["Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(dat, "gh")
	outData["climate"]["Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(dat, "poa")
	outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(dat, "tamb")
	outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(dat, "tcell")
	outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(dat, "wspd")
	# Power generation.
	outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
	# Calculate clipping.
	outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
	invSizeWatts = inverterSizeAC * 1000
	outData["powerOutputAcInvClipped"] = [x if x < invSizeWatts else invSizeWatts for x in outData["powerOutputAc"]]
	try:
		outData["percentClipped"] = 100 * (1.0 - sum(outData["powerOutputAcInvClipped"]) / sum(outData["powerOutputAc"]))
	except ZeroDivisionError:
		outData["percentClipped"] = 0.0
	#One year generation
	outData["oneYearGenerationWh"] = sum(outData["powerOutputAcInvClipped"])
	#Annual generation for all years
	loanYears = 25
	outData["allYearGenerationMWh"] = {}
	outData["allYearGenerationMWh"][1] = float(outData["oneYearGenerationWh"])/1000000
	# outData["allYearGenerationMWh"][1] = float(2019.576)
	for i in range (2, loanYears+1):
		outData["allYearGenerationMWh"][i] = float(outData["allYearGenerationMWh"][i-1]) * (1 - float(inputDict.get("degradation", 0.8))/100)
	# Summary of Results.
	######
	### Total Costs (sum of): Hardware Costs, Design/Engineering/PM/EPC/Labor Costs, Siteprep Costs, Construction Costs, Installation Costs, Land Costs
	######
	### Hardware Costs 
	pvModules = arraySizeDC * float(inputDict.get("moduleCost",0))*1000 #off by 4000
	racking = arraySizeDC * float(inputDict.get("rackCost",0))*1000
	inverters = numberInverters * float(inputDict.get("inverterCost",0))
	inverterSize = inverterSizeAC
	if (inverterSize <= 250):
		gear = 15000
	elif (inverterSize <= 600):
		gear = 18000
	else:
		gear = inverterSize/1000 * 22000
	balance = inverterSizeAC * 1.3908 * 134
	combiners = math.ceil(numberPanels/19/24) * float(1800)  #*
	wireManagement = arraySizeDC * 1.5
	transformer = 1 * 28000
	weatherStation = 1 * 12500
	shipping = 1.02
	hardwareCosts = (pvModules + racking + inverters + gear + balance + combiners + wireManagement  + transformer + weatherStation) * shipping
	### Design/Engineering/PM/EPC/Labor Costs 
	EPCmarkup = float(inputDict.get("EPCRate",0))/100 * hardwareCosts
	#designCosts = float(inputDict.get("mechLabor",0))*160 + float(inputDict.get("elecLabor",0))*75 + float(inputDict.get("pmCost",0)) + EPCmarkup
	hoursDesign = 160*math.sqrt(arraySizeDC/1390)
	hoursElectrical = 80*math.sqrt(arraySizeDC/1391)
	designLabor = 65*hoursDesign
	electricalLabor = 75*hoursElectrical
	laborDesign = designLabor + electricalLabor + float(inputDict.get("pmCost",0)) + EPCmarkup
	materialDesign = 0
	designCosts = materialDesign + laborDesign
	### Siteprep Costs 
	surveying = 2.25 * 4 * math.sqrt(landAmount*43560)
	concrete = 8000 * math.ceil(numberInverters/2)
	fencing = 6.75 * 4 * math.sqrt(landAmount*43560)
	grading = 2.5 * 4 * math.sqrt(landAmount*43560)
	landscaping = 750 * landAmount
	siteMaterial = 8000 + 600 + 5500 + 5000 + surveying + concrete + fencing + grading + landscaping + 5600
	blueprints = float(inputDict.get("mechLabor",0))*12
	mobilization = float(inputDict.get("mechLabor",0))*208
	mobilizationMaterial = float(inputDict.get("mechLabor",0))*19.98
	siteLabor = blueprints + mobilization + mobilizationMaterial
	sitePrep = siteMaterial + siteLabor
	### Construction Costs (Office Trailer, Skid Steer, Storage Containers, etc) 
	constrEquip = 6000 + math.sqrt(landAmount)*16200
	### Installation Costs 
	moduleAndRackingInstall = numberPanels * (15.00 + 12.50 + 1.50)
	pierDriving = 1 * arraySizeDC*20
	balanceInstall = 1 * arraySizeDC*100
	installCosts = moduleAndRackingInstall + pierDriving + balanceInstall + float(inputDict.get("elecLabor",0)) * (72 + 60 + 70 + 10 + 5 + 30 + 70)
	### Land Costs 
	if (str(inputDict.get("landOwnership",0)) == "Owned" or (str(inputDict.get("landOwnership",0)) == "Leased")):
		landCosts = 0
	else:
		landCosts = float(inputDict.get("costAcre",0))*landAmount
	######
	### Total Costs 
	######
	totalCosts = hardwareCosts + designCosts + sitePrep + constrEquip + installCosts + landCosts
	totalFees= float(inputDict.get("devCost",0))/100 * totalCosts
	outData["totalCost"] = totalCosts + totalFees + float(inputDict.get("interCost",0))
	# Add to Pie Chart
	outData["costsPieChart"] = [["Land", landCosts],
		["Design/Engineering/PM/EPC", designCosts],
		["PV Modules", pvModules*shipping],
		["Racking", racking*shipping],
		["Inverters & Switchgear", (inverters+gear)*shipping],
		["BOS", hardwareCosts - pvModules*shipping - racking*shipping - (inverters+gear)*shipping],
		["Site Prep, Constr. Eq. and Installation", (siteMaterial + constrEquip) + (siteLabor + installCosts)]]
	# Cost per Wdc
	outData["costWdc"] = (totalCosts + totalFees + float(inputDict.get("interCost",0))) / (arraySizeDC * 1000)
	outData["capFactor"] = float(outData["oneYearGenerationWh"])/(inverterSizeAC*1000*365.25*24) * 100
	######
	### Loans calculations for Direct, NCREB, Lease, Tax-equity, and PPA
	######
	### Full Ownership, Direct Loan
	#Output - Direct Loan [C]
	projectCostsDirect = 0
	#Output - Direct Loan [D]
	netFinancingCostsDirect = 0
	#Output - Direct Loan [E]
	OMInsuranceETCDirect = []
	#Output - Direct Loan [F]
	distAdderDirect = []
	#Output - Direct Loan [G]
	netCoopPaymentsDirect = []
	#Output - Direct Loan [H]
	costToCustomerDirect = []
	#Output - Direct Loan [F53]
	Rate_Levelized_Direct = 0
	## Output - Direct Loan Formulas
	projectCostsDirect = 0
	#Output - Direct Loan [D]
	payment = pmt(float(inputDict.get("loanRate",0))/100, loanYears, outData["totalCost"])
	interestDirectPI = outData["totalCost"] * float(inputDict.get("loanRate",0))/100
	principleDirectPI = (-payment - interestDirectPI)
	patronageCapitalRetiredDPI = 0
	netFinancingCostsDirect = -(principleDirectPI + interestDirectPI - patronageCapitalRetiredDPI)
	#Output - Direct Loan [E] [F] [G] [H]
	firstYearOPMainCosts = (1.25 * arraySizeDC * 12)
	firstYearInsuranceCosts = (0.37 * outData["totalCost"]/100)
	if (inputDict.get("landOwnership",0) == "Leased"):
		firstYearLandLeaseCosts = float(inputDict.get("costAcre",0))*landAmount
	else:
		firstYearLandLeaseCosts = 0
	for i in range (1, len(outData["allYearGenerationMWh"])+1):
		OMInsuranceETCDirect.append(-firstYearOPMainCosts*math.pow((1 + .01),(i-1)) - firstYearInsuranceCosts*math.pow((1 + .025),(i-1)) - firstYearLandLeaseCosts*math.pow((1 + .01),(i-1)))
		distAdderDirect.append(float(inputDict.get("distAdder",0))*outData["allYearGenerationMWh"][i])
		netCoopPaymentsDirect.append(OMInsuranceETCDirect[i-1] + netFinancingCostsDirect)
		costToCustomerDirect.append((netCoopPaymentsDirect[i-1] - distAdderDirect[i-1]))
	#Output - Direct Loan [F53] 
	NPVLoanDirect = npv(float(inputDict.get("discRate",0))/100, [0,0] + costToCustomerDirect)
	NPVallYearGenerationMWh = npv(float(inputDict.get("discRate",0))/100, [0,0] + outData["allYearGenerationMWh"].values())
	Rate_Levelized_Direct = -NPVLoanDirect/NPVallYearGenerationMWh	
	#Master Output [Direct Loan]
	outData["levelCostDirect"] = Rate_Levelized_Direct
	outData["costPanelDirect"] = abs(NPVLoanDirect/numberPanels)
	outData["cost10WPanelDirect"] = (float(outData["costPanelDirect"])/panelSize)*10
	### NCREBs Financing
	ncrebsRate = float(inputDict.get("NCREBRate",4.060))/100
	ncrebBorrowingRate = 1.1 * ncrebsRate
	ncrebPaymentPeriods = 44
	ncrebCostToCustomer = []
	# TODO ASAP: FIX ARRAY OFFSETS START 0
	for i in range (1, len(outData["allYearGenerationMWh"])+1):
		coopLoanPayment = 2 * pmt(ncrebBorrowingRate/2.0, ncrebPaymentPeriods, outData["totalCost"]) if i <= ncrebPaymentPeriods / 2 else 0
		ncrebsCredit = -0.7 * (ipmt(ncrebsRate / 2, 2 * i - 1, ncrebPaymentPeriods, outData["totalCost"])
			+ ipmt(ncrebsRate / 2, 2 * i, ncrebPaymentPeriods, outData["totalCost"])) if i <= ncrebPaymentPeriods / 2 else 0
		financingCost = ncrebsCredit + coopLoanPayment
		omCost = OMInsuranceETCDirect[i - 1]
		netCoopPayments = financingCost + omCost
		distrAdder = distAdderDirect[i - 1]
		costToCustomer = netCoopPayments + distrAdder
		ncrebCostToCustomer.append(costToCustomer)
	NPVLoanNCREB = npv(float(inputDict.get("discRate", 0))/100, [0,0] + ncrebCostToCustomer)
	Rate_Levelized_NCREB = -NPVLoanNCREB/NPVallYearGenerationMWh	
	outData["levelCostNCREB"] = Rate_Levelized_NCREB
	outData["costPanelNCREB"] = abs(NPVLoanNCREB/numberPanels)
	outData["cost10WPanelNCREB"] = (float(outData["costPanelNCREB"])/panelSize)*10
	### Lease Buyback Structure
	#Output - Lease [C]
	projectCostsLease = outData["totalCost"]
	#Output - Lease [D]
	leasePaymentsLease = []
	#Output - Lease [E]
	OMInsuranceETCLease = OMInsuranceETCDirect
	#Output - Lease [F]
	distAdderLease = distAdderDirect
	#Output - Lease [G]
	netCoopPaymentsLease = []
	#Output - Lease [H]
	costToCustomerLease = []
	#Output - Lease [H44]
	NPVLease = 0
	#Output - Lease [H49]
	Rate_Levelized_Lease = 0
	## Tax Lease Formulas
	#Output - Lease [D]
	for i in range (0, 12):
		leaseRate = float(inputDict.get("taxLeaseRate",0))/100.0
		if i>8: # Special behavior in later years:
			leaseRate = leaseRate - 0.0261
		leasePaymentsLease.append(-1*projectCostsLease/((1.0-(1.0/(1.0+leaseRate)**12))/(leaseRate)))
	# Last year is different.
	leasePaymentsLease[11] += -0.2*projectCostsLease
	for i in range (12, 25):
		leasePaymentsLease.append(0)
	#Output - Lease [G]	[H]
	for i in range (1, len(outData["allYearGenerationMWh"])+1):
		netCoopPaymentsLease.append(OMInsuranceETCLease[i-1]+leasePaymentsLease[i-1])
		costToCustomerLease.append(netCoopPaymentsLease[i-1]-distAdderLease[i-1])
	#Output - Lease [H44]. Note the extra year at the zero point to get the discounting right.
	NPVLease = npv(float(inputDict.get("discRate", 0))/100, [0,0]+costToCustomerLease)
	#Output - Lease [H49] (Levelized Cost Three Loops)
	Rate_Levelized_Lease = -NPVLease/NPVallYearGenerationMWh
	#Master Output [Lease]
	outData["levelCostTaxLease"] = Rate_Levelized_Lease
	outData["costPanelTaxLease"] = abs(NPVLease/numberPanels)
	outData["cost10WPanelTaxLease"] = (float(outData["costPanelTaxLease"])/float(panelSize))*10
	### Tax Equity Flip Structure
	# Tax Equity Flip Function
	def taxEquityFlip(PPARateSixYearsTE, discRate, totalCost, allYearGenerationMWh, distAdderDirect, loanYears, firstYearLandLeaseCosts, firstYearOPMainCosts, firstYearInsuranceCosts, numberPanels):
		#Output Tax Equity Flip [C]
		coopInvestmentTaxEquity = -totalCost*(1-0.53)
		#Output Tax Equity Flip [D]
		financeCostCashTaxEquity = 0
		#Output Tax Equity Flip [E]
		cashToSPEOForPPATE  = []
		#Output Tax Equity Flip [F]
		derivedCostEnergyTE  = 0
		#Output Tax Equity Flip [G]
		OMInsuranceETCTE = []
		#Output Tax Equity Flip [H]
		cashFromSPEToBlockerTE = []
		#Output Tax Equity Flip [I]
		cashFromBlockerTE = 0
		#Output Tax Equity Flip [J]
		distAdderTaxEquity = distAdderDirect
		#Output Tax Equity Flip [K]
		netCoopPaymentsTaxEquity = []
		#Output Tax Equity Flip [L]
		costToCustomerTaxEquity = []
		#Output Tax Equity Flip [L64]
		NPVLoanTaxEquity = 0
		#Output Tax Equity Flip [F72]
		Rate_Levelized_Equity = 0
		## Tax Equity Flip Formulas
		#Output Tax Equity Flip [D]
		#TEI Calcs [E]
		financeCostOfCashTE = 0
		coopFinanceRateTE = 2.7/100
		if (coopFinanceRateTE == 0):
			financeCostOfCashTE = 0
		else:
			payment = pmt(coopFinanceRateTE, loanYears, -coopInvestmentTaxEquity)
		financeCostCashTaxEquity = payment
		#Output Tax Equity Flip [E]
		SPERevenueTE = []
		for i in range (1, len(allYearGenerationMWh)+1):
			SPERevenueTE.append(PPARateSixYearsTE * allYearGenerationMWh[i])
			if ((i>=1) and (i<=6)):
				cashToSPEOForPPATE.append(-SPERevenueTE[i-1])
			else:
				cashToSPEOForPPATE.append(0)
		#Output Tax Equity Flip [F]
		derivedCostEnergyTE = cashToSPEOForPPATE[0]/allYearGenerationMWh[1]
		#Output Tax Equity Flip [G]
		#TEI Calcs [F]	[U] [V]
		landLeaseTE = []
		OMTE = []
		insuranceTE = []
		for i in range (1, len(allYearGenerationMWh)+1):
			landLeaseTE.append(firstYearLandLeaseCosts*math.pow((1 + .01),(i-1)))
			OMTE.append(-firstYearOPMainCosts*math.pow((1 + .01),(i-1)))
			insuranceTE.append(- firstYearInsuranceCosts*math.pow((1 + .025),(i-1)) )
			if (i<7):
				OMInsuranceETCTE.append(float(landLeaseTE[i-1]))
			else:
				OMInsuranceETCTE.append(float(OMTE[i-1]) + float(insuranceTE[i-1]) + float(landLeaseTE[i-1]))
		#Output Tax Equity Flip [H]
		#TEI Calcs [T]
		SPEMgmtFeeTE = []
		EBITDATE = []
		EBITDATEREDUCED = []
		managementFee = 10000
		for i in range (1, len(SPERevenueTE)+1):
			SPEMgmtFeeTE.append(-managementFee*math.pow((1 + .01),(i-1)))
			EBITDATE.append(float(SPERevenueTE[i-1]) + float(OMTE[i-1]) + float(insuranceTE[i-1]) + float(SPEMgmtFeeTE[i-1]))
			if (i<=6):
				cashFromSPEToBlockerTE.append(float(EBITDATE[i-1]) * .01)
			else:
				cashFromSPEToBlockerTE.append(0)
				EBITDATEREDUCED.append(EBITDATE[i-1])
		#Output Tax Equity Flip [I]
		#TEI Calcs [Y21]
		cashRevenueTE = -totalCost * (1 - 0.53)
		buyoutAmountTE = 0
		for i in range (1, len(EBITDATEREDUCED) + 1):
			buyoutAmountTE = buyoutAmountTE + EBITDATEREDUCED[i-1]/(math.pow(1+0.12,i))
		buyoutAmountTE = buyoutAmountTE * 0.05
		cashFromBlockerTE = - (buyoutAmountTE) + 0.0725 * cashRevenueTE
		#Output Tax Equity Flip [K] [L]
		for i in range (1, len(allYearGenerationMWh)+1):
			if (i==6):
				netCoopPaymentsTaxEquity.append(financeCostCashTaxEquity + cashToSPEOForPPATE[i-1] + cashFromSPEToBlockerTE[i-1] + OMInsuranceETCTE[i-1] + cashFromBlockerTE)
			else:
				netCoopPaymentsTaxEquity.append(financeCostCashTaxEquity + cashFromSPEToBlockerTE[i-1] + cashToSPEOForPPATE[i-1] + OMInsuranceETCTE[i-1])
			costToCustomerTaxEquity.append(netCoopPaymentsTaxEquity[i-1] - distAdderTaxEquity[i-1])
		#Output Tax Equity Flip [L37]
		NPVLoanTaxEquity = npv(float(inputDict.get("discRate",0))/100, [0, 0] + costToCustomerTaxEquity)
		#Output - Tax Equity [F42] 
		Rate_Levelized_TaxEquity = -NPVLoanTaxEquity/NPVallYearGenerationMWh
		#TEI Calcs - Achieved Return [AW 21]
			#[AK]
		MACRDepreciation = []
		MACRDepreciation.append(-0.99*0.2*(totalCost-totalCost*0.5*0.9822*0.3))
		MACRDepreciation.append(-0.99*0.32*(totalCost-totalCost*0.5*0.9822*0.3))
		MACRDepreciation.append(-0.99*0.192*(totalCost-totalCost*0.5*0.9822*0.3))
		MACRDepreciation.append(-0.99*0.1152*(totalCost-totalCost*0.5*0.9822*0.3))
		MACRDepreciation.append(-0.99*0.1152*(totalCost-totalCost*0.5*0.9822*0.3))
		MACRDepreciation.append(-0.99*0.0576*(totalCost-totalCost*0.5*0.9822*0.3))
		#[AI] [AL]	[AN]
		cashRevenueTEI = [] 	                          	#[AI]
		slDepreciation = []		                            #[AL]
		totalDistributions = []                         	#[AN]
		cashRevenueTEI.append(-totalCost*0.53)
		for i in range (1,7):
			cashRevenueTEI.append(EBITDATE[i-1]*0.99)
			slDepreciation.append(totalCost/25)
			totalDistributions.append(-cashRevenueTEI[i])
		#[AJ]						
		ITC = totalCost*0.9822*0.3*0.99
		#[AM]						
		taxableIncLoss = [0]
		taxableIncLoss.append(cashRevenueTEI[1]+MACRDepreciation[0])
		#[AO]		
		capitalAcct = []
		capitalAcct.append(totalCost*0.53)
		condition = capitalAcct[0] - 0.5*ITC + taxableIncLoss[1] + totalDistributions[0]
		if condition > 0:
			capitalAcct.append(condition)
		else:
			capitalAcct.append(0)
		#[AQ]
		ratioTE = [0]
		#[AP]		     
		reallocatedIncLoss = []
		#AO-1 + AN + AI + AK + AJ
		for i in range (0, 5):
			reallocatedIncLoss.append(capitalAcct[i+1] + totalDistributions[i+1] + MACRDepreciation[i+1] + cashRevenueTEI[i+2])
			ratioTE.append(reallocatedIncLoss[i]/(cashRevenueTEI[i+2] + MACRDepreciation[i+1]))
			taxableIncLoss.append(cashRevenueTEI[i+2]+MACRDepreciation[i+1]-ratioTE[i+1]*(MACRDepreciation[i+1]-totalDistributions[i+1]))
			condition = capitalAcct[i+1] + taxableIncLoss[i+2] + totalDistributions[i+1]
			if condition > 0:
				capitalAcct.append(condition)
			else:
				capitalAcct.append(0)
		#[AR]
		taxesBenefitLiab = [0]
		for i in range (1, 7):
			taxesBenefitLiab.append(-taxableIncLoss[i]*0.35)
		#[AS] [AT]
		buyoutAmount = 0
		taxFromBuyout = 0
		for i in range (0, len(EBITDATEREDUCED)):
			buyoutAmount = buyoutAmount + .05*EBITDATEREDUCED[i]/(math.pow(1.12,(i+1)))
		taxFromBuyout = -buyoutAmount*0.35
		#[AU] [AV]
		totalCashTax = []
		cumulativeCashTax = [0]
		for i in range (0, 7):
			if i == 1:
				totalCashTax.append(cashRevenueTEI[i] + ITC + taxesBenefitLiab[i] + 0 + 0)
				cumulativeCashTax.append(cumulativeCashTax[i] + totalCashTax[i])
			elif i == 6:
				totalCashTax.append(cashRevenueTEI[i] + 0 + taxesBenefitLiab[i] + buyoutAmount + taxFromBuyout)
				cumulativeCashTax.append(cumulativeCashTax[i] + totalCashTax[i] + buyoutAmount + taxFromBuyout)
			else:
				totalCashTax.append(cashRevenueTEI[i] + 0 + taxesBenefitLiab[i] + 0 + 0)
				cumulativeCashTax.append(cumulativeCashTax[i] + totalCashTax[i])
		#[AW21]
		if (cumulativeCashTax[7] > 0):
			cumulativeIRR = round(irr(totalCashTax), 4)
		else:
			cumulativeIRR = 0
		# Deleteme: Variable Dump for debugging
		# variableDump = {}
		# variableDump["TaxEquity"] = {}
		# variableDump["TaxEquity"]["coopInvestmentTaxEquity"] = coopInvestmentTaxEquity
		# variableDump["TaxEquity"]["financeCostCashTaxEquity"] = financeCostCashTaxEquity
		# variableDump["TaxEquity"]["cashToSPEOForPPATE"] = cashToSPEOForPPATE
		# variableDump["TaxEquity"]["derivedCostEnergyTE"] = derivedCostEnergyTE
		# variableDump["TaxEquity"]["OMInsuranceETCTE"] = OMInsuranceETCTE
		# variableDump["TaxEquity"]["cashFromSPEToBlockerTE"] = cashFromSPEToBlockerTE
		# variableDump["TaxEquity"]["cashFromBlockerTE"] = cashFromBlockerTE
		# variableDump["TaxEquity"]["distAdderTaxEquity"] = distAdderTaxEquity
		# variableDump["TaxEquity"]["netCoopPaymentsTaxEquity"] = netCoopPaymentsTaxEquity
		# variableDump["TaxEquity"]["NPVLoanTaxEquity"] = NPVLoanTaxEquity
		return cumulativeIRR, Rate_Levelized_TaxEquity, NPVLoanTaxEquity
	# Function Calls Mega Sized Tax Equity Function Above
	z = 0
	PPARateSixYearsTE = z / 100
	nGoal = float(inputDict.get("taxEquityReturn",0))/100
	nValue = 0
	for p in range (0, 3):
		while ((z < 50000) and (nValue < nGoal)):
			achievedReturnTE, Rate_Levelized_TaxEquity, NPVLoanTaxEquity = taxEquityFlip(PPARateSixYearsTE, inputDict.get("discRate", 0), outData["totalCost"], outData["allYearGenerationMWh"], distAdderDirect, loanYears, firstYearLandLeaseCosts, firstYearOPMainCosts, firstYearInsuranceCosts, numberPanels)
			nValue = achievedReturnTE
			z = z + math.pow(10,p)
			PPARateSixYearsTE = z/100.0
	z = z - math.pow(10,p)	
	PPARateSixYearsTE = z/100
	#Master Output [Tax Equity]
	outData["levelCostTaxEquity"] = Rate_Levelized_TaxEquity
	outData["costPanelTaxEquity"] = abs(NPVLoanTaxEquity/numberPanels)
	outData["cost10WPanelTaxEquity"] = (float(outData["costPanelTaxEquity"])/panelSize)*10
	### PPA Comparison
	#Output - PPA [F]
	distAdderPPA = distAdderDirect
	#Output - PPA [G]
	netCoopPaymentsPPA = []
	#Output - PPA [H]
	costToCustomerPPA = []
	#Output - PPA [I]
	costToCustomerPPA = []
	#Output - PPA [H40]
	NPVLoanPPA = 0
	#Output - PPA [I40]
	Rate_Levelized_PPA = 0
	## PPA Formulas
	#Output - PPA [G] [H]
	for i in range (1, len(outData["allYearGenerationMWh"])+1):
		netCoopPaymentsPPA.append(-outData["allYearGenerationMWh"][i]*float(inputDict.get("firstYearEnergyCostPPA",0))*math.pow((1 + float(inputDict.get("annualEscRatePPA", 0))/100),(i-1)))
		costToCustomerPPA.append(netCoopPaymentsPPA[i-1]-distAdderPPA[i-1])
	#Output - PPA [H58] 
	NPVLoanPPA = npv(float(inputDict.get("discRate", 0))/100, [0,0]+costToCustomerPPA)
	#Output - PPA [F65] 
	Rate_Levelized_PPA = -NPVLoanPPA/NPVallYearGenerationMWh
	#Master Output [PPA]
	outData["levelCostPPA"] = Rate_Levelized_PPA
	outData["firstYearCostKWhPPA"] = float(inputDict.get("firstYearEnergyCostPPA",0))
	outData["yearlyEscalationPPA"] = float(inputDict.get("annualEscRatePPA", 0))
	# Add all Levelized Costs to Output
	outData["LevelizedCosts"] = [["Direct Loan", Rate_Levelized_Direct],
		["NCREBs Financing", Rate_Levelized_NCREB],
		["Lease Buyback", Rate_Levelized_Lease],
		["Tax Equity Flip", Rate_Levelized_TaxEquity]]
	outData["LevelizedCosts"].append({"name":"PPA Comparison", "y":Rate_Levelized_PPA, "color":"gold"})
	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData
Esempio n. 11
0
def run(modelDir, inputDict):
	try:
		''' Run the model in its directory. '''
		# Check whether model exist or not
		if not os.path.isdir(modelDir):
			os.makedirs(modelDir)
			inputDict["created"] = str(dt.now())
		# MAYBEFIX: remove this data dump. Check showModel in web.py and renderTemplate()
		with open(pJoin(modelDir, "allInputData.json"),"w") as inputFile:
			json.dump(inputDict, inputFile, indent = 4)
		# Copy spcific climate data into model directory
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, "climate.tmy2"))
		# Ready to run
		startTime = dt.now()
		# Set up SAM data structures.
		ssc = nrelsam2013.SSCAPI()
		dat = ssc.ssc_data_create()
		# Required user inputs.
		ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
		ssc.ssc_data_set_number(dat, "system_size", float(inputDict["SystemSize"]))
		# SAM options where we take defaults.
		ssc.ssc_data_set_number(dat, "derate", 0.97)
		ssc.ssc_data_set_number(dat, "track_mode", 0)
		ssc.ssc_data_set_number(dat, "azimuth", 180)
		ssc.ssc_data_set_number(dat, "tilt_eq_lat", 1)
		# Run PV system simulation.
		mod = ssc.ssc_module_create("pvwattsv1")
		ssc.ssc_module_exec(mod, dat)
		# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
		startDateTime = "2013-01-01 00:00:00 UTC"
		# Timestamp output.
		outData = {}
		outData["timeStamps"] = [dt.strftime(
			dt.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") +
			td(**{"hours":x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(8760))]
		# HACK: makes it easier to calculate some things later.
		outData["pythonTimeStamps"] = [dt(2012,1,1,0) + x*td(hours=1) for x in range(8760)]
		# Geodata output.
		outData["city"] = ssc.ssc_data_get_string(dat, "city")
		outData["state"] = ssc.ssc_data_get_string(dat, "state")
		outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
		outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
		outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
		# Weather output.
		outData["climate"] = {}
		outData["climate"]["Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(dat, "gh")
		outData["climate"]["Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(dat, "poa")
		outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(dat, "tamb")
		outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(dat, "tcell")
		outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(dat, "wspd")
		# Power generation.
		outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")

		# TODO: INSERT TJ CODE BELOW
		tjCode(inputDict, outData)
		del outData["pythonTimeStamps"]
		# TODO: INSERT TJ CODE ABOVE

		# Stdout/stderr.
		outData["stdout"] = "Success"
		outData["stderr"] = ""
		# Write the output.
		with open(pJoin(modelDir,"allOutputData.json"),"w") as outFile:
			json.dump(outData, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = dt.now()
		inputDict["runTime"] = str(td(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	except:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
Esempio n. 12
0
def work(modelDir, inputDict):
	''' Run the model in its directory. '''
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
	# TODO: FIX THIS!!!! IT SHOULD BE AVGSYS*PEN*RESCUSTOMERS
	ssc.ssc_data_set_number(dat, "system_size", float(inputDict["systemSize"]))
	# SAM options where we take defaults.
	ssc.ssc_data_set_number(dat, "derate", 0.97)
	ssc.ssc_data_set_number(dat, "track_mode", 0)
	ssc.ssc_data_set_number(dat, "azimuth", 180)
	ssc.ssc_data_set_number(dat, "tilt_eq_lat", 1)
	# Run PV system simulation.
	mod = ssc.ssc_module_create("pvwattsv1")
	ssc.ssc_module_exec(mod, dat)
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
	startDateTime = "2013-01-01 00:00:00 UTC"
	# Timestamp output.
	outData = {}
	outData["timeStamps"] = [dt.datetime.strftime(
		dt.datetime.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") +
		dt.timedelta(**{"hours":x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(8760))]
	# Geodata output.
	outData["city"] = ssc.ssc_data_get_string(dat, "city")
	outData["state"] = ssc.ssc_data_get_string(dat, "state")
	outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
	outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
	outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
	# Weather output.
	outData["climate"] = {}
	outData["climate"]["Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(dat, "gh")
	outData["climate"]["Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(dat, "poa")
	outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(dat, "tamb")
	outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(dat, "tcell")
	outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(dat, "wspd")
	# Power generation.
	outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
	# Monthly aggregation outputs.
	months = {"Jan":0,"Feb":1,"Mar":2,"Apr":3,"May":4,"Jun":5,"Jul":6,"Aug":7,"Sep":8,"Oct":9,"Nov":10,"Dec":11}
	totMonNum = lambda x:sum([z for (y,z) in zip(outData["timeStamps"], outData["powerOutputAc"]) if y.startswith(startDateTime[0:4] + "-{0:02d}".format(x+1))])
	outData["monthlyGeneration"] = [[a, roundSig(totMonNum(b),2)] for (a,b) in sorted(months.items(), key=lambda x:x[1])]
	monthlyNoConsumerServedSales = []
	monthlyKWhSold = []
	monthlyRevenue = []
	totalKWhSold = []
	totalRevenue = []
	for key in inputDict:
		# MAYBEFIX: data in list may not be ordered by month.
		if key.endswith("Sale"):
			monthlyNoConsumerServedSales.append([key[:3].title(),float(inputDict.get(key, 0))])
		elif key.endswith("KWh"):# the order of calculation matters
			monthlyKWhSold.append([key[:3].title(),float(inputDict.get(key, 0))])
		elif key.endswith("Rev"):
			monthlyRevenue.append([key[:3].title(),float(inputDict.get(key, 0))])
		elif key.endswith("KWhT"):
			totalKWhSold.append([key[:3].title(),float(inputDict.get(key, 0))])
		elif key.endswith("RevT"):
			totalRevenue.append([key[:3].title(),float(inputDict.get(key, 0))])
	outData["monthlyNoConsumerServedSales"] = sorted(monthlyNoConsumerServedSales, key=lambda x:months[x[0]])
	outData["monthlyKWhSold"] = sorted(monthlyKWhSold, key=lambda x:months[x[0]])
	outData["monthlyRevenue"] = sorted(monthlyRevenue, key=lambda x:months[x[0]])
	outData["totalKWhSold"] = sorted(totalKWhSold, key=lambda x:months[x[0]])
	outData["totalRevenue"] = sorted(totalRevenue, key=lambda x:months[x[0]])
	outData["totalGeneration"] = [[sorted(months.items(), key=lambda x:x[1])[i][0], outData["monthlyGeneration"][i][1]*outData["monthlyNoConsumerServedSales"][i][1]*(float(inputDict.get("resPenetration", 5))/100/1000)] for i in range(12)]
	outData["totalSolarSold"] = [[sorted(months.items(), key=lambda x:x[1])[i][0], outData["totalKWhSold"][i][1] - outData["totalGeneration"][i][1]] for i in range(12)]
	##################
	# TODO: add retailCost to the calculation.
	##################
	## Flow Diagram Calculations, and order of calculation matters
	# BAU case
	outData["BAU"] = {}
	# E23 = E11
	outData["BAU"]["totalKWhPurchased"] = float(inputDict.get("totalKWhPurchased", 1))
	# E24 = SUM(E19:P19)
	outData["BAU"]["totalKWhSales"] = sum([x[1] for x in totalKWhSold])
	# E25 = E23-E24
	outData["BAU"]["losses"] = float(inputDict.get("totalKWhPurchased", 0)) - sum([totalKWhSold[i][1] for i in range(12)])
	# E26 = E25/E23
	outData["BAU"]["effectiveLossRate"] = outData["BAU"]["losses"] / outData["BAU"]["totalKWhPurchased"]
	# E27 = 0
	outData["BAU"]["annualSolarGen"] = 0
	# E28 = SUM(E17:P17)
	outData["BAU"]["resNonSolarKWhSold"] = sum([monthlyKWhSold[i][1] for i in range(12)])
	# E29 = 0
	outData["BAU"]["solarResDemand"] = 0
	# E30 = 0
	outData["BAU"]["solarResSold"] = 0
	# E31 = E24-E28
	outData["BAU"]["nonResKWhSold"] = outData["BAU"]["totalKWhSales"] - outData["BAU"]["resNonSolarKWhSold"]
	# E32 = 0
	outData["BAU"]["costSolarGen"] = 0
	# E33 = SUM(E20:P20)-SUM(E18:P18)+E10
	outData["BAU"]["nonResRev"] = sum([totalRevenue[i][1] for i in range(12)]) - sum([monthlyRevenue[i][1] for i in range(12)]) + float(inputDict.get("otherElecRevenue"))
	# E34 = (SUM(E18:P18)-SUM(E16:P16)*E6)/SUM(E17:P17)
	outData["BAU"]["effectiveResRate"] = (sum ([monthlyRevenue[i][1] for i in range(12)]) - sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("customServiceCharge", 0)))/sum([monthlyKWhSold[i][1] for i in range(12)])
	# E35 = E34*E28+SUM(E16:P16)*E6
	outData["BAU"]["resNonSolarRev"] = outData["BAU"]["effectiveResRate"] * outData["BAU"]["resNonSolarKWhSold"] + sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("customServiceCharge", 0))
	# E36 = E30*E34
	outData["BAU"]["solarResRev"] = 0
	# E37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71), update after Form 7 model
	outData["BAU"]["nonPowerCosts"] = 0
	# E38 = E23-E25-E28-E30-E31
	outData["BAU"]["energyAllBal"] = 0
	# E39 = E36+E33+E35-E47-E72-E37
	outData["BAU"]["dollarAllBal"] = 0
	# E40 = 0
	outData["BAU"]["avgMonthlyBillSolarCus"] = 0
	# E41 = E35/SUM(E16:P16)
	avgCustomerCount = (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])/12)
	outData["BAU"]["avgMonthlyBillNonSolarCus"] = outData["BAU"]["resNonSolarRev"] / sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])
	# E42 = E63/E24, update after Form 7 model
	outData["BAU"]["costofService"] = 0
	# Solar case
	outData["Solar"] = {}
	# F27 = SUM(E15:P15)
	outData["Solar"]["annualSolarGen"] = sum([outData["totalGeneration"][i][1] for i in range(12)])
	# F24 = E24-F27
	outData["Solar"]["totalKWhSales"] = sum([totalKWhSold[i][1] for i in range(12)]) - outData["Solar"]["annualSolarGen"]
	# F23 =F24/(1-E26)
	outData["Solar"]["totalKWhPurchased"] = outData["Solar"]["totalKWhSales"]/ (1-outData["BAU"]["effectiveLossRate"])
	outData["totalsolarmonthly"] = [[sorted(months.items(), key=lambda x:x[1])[i][0], outData["totalSolarSold"][i][1] / (1-outData["BAU"]["effectiveLossRate"])] for i in range(12)]
	# F25 = F23-F24
	outData["Solar"]["losses"] = (outData["Solar"]["totalKWhPurchased"] - outData["Solar"]["totalKWhSales"])
	# F26 = E26
	outData["Solar"]["effectiveLossRate"] = outData["BAU"]["effectiveLossRate"]
	# F28 = (1-E5)*E28
	outData["Solar"]["resNonSolarKWhSold"] = (1-float(inputDict.get("resPenetration", 0))/100)*outData["BAU"]["resNonSolarKWhSold"]
	# F29 = E5*E28
	outData["Solar"]["solarResDemand"] = float(inputDict.get("resPenetration", 0))/100*outData["BAU"]["resNonSolarKWhSold"]
	# F30 = F29-F27
	outData["Solar"]["solarResSold"] = outData["Solar"]["solarResDemand"] - outData["Solar"]["annualSolarGen"]
	# F31 = E31
	outData["Solar"]["nonResKWhSold"] = outData["BAU"]["nonResKWhSold"]
	# F32 = E9*F27
	outData["Solar"]["costSolarGen"] = float(inputDict.get("solarLCoE", 0.07))*outData["Solar"]["annualSolarGen"]
	# F33 = E33
	outData["Solar"]["nonResRev"] = outData["BAU"]["nonResRev"]
	# F34 = E34
	outData["Solar"]["effectiveResRate"] = outData["BAU"]["effectiveResRate"]
	# F35 = E35*(1-E5)
	outData["Solar"]["resNonSolarRev"] = outData["BAU"]["resNonSolarRev"] * (1 - float(inputDict.get("resPenetration", 0.05))/100)
	# F30*E34 = Solar revenue from selling at residential rate
	solarSoldRateRev = outData["Solar"]["solarResSold"] * outData["Solar"]["effectiveResRate"]
	# (E6+E7)*SUM(E16:P16)*E5 = Solar revenue from charges
	solarChargesRev = (float(inputDict.get("customServiceCharge", 0))+float(inputDict.get("solarServiceCharge", 0)))*sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("resPenetration", 0.05))/100
	# F36 = F30*E34+(E6+E7)*SUM(E16:P16)*E5 = solarSoldRate + solarChargesRev
	outData["Solar"]["solarResRev"] = solarSoldRateRev + solarChargesRev
	# F37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71) = E37, update after Form 7 model
	outData["Solar"]["nonPowerCosts"] = 0
	# F38 = F23-F25-F28-F30-E31
	outData["Solar"]["energyAllBal"] = 0
	# F39 = F36+E33+F35-F47-F72-E37
	outData["Solar"]["dollarAllBal"] = 0
	if (float(inputDict.get("resPenetration", 0.05)) > 0):
		# F41 = (F35)/(SUM(E16:P16)*(1-E5))
		outData["Solar"]["avgMonthlyBillNonSolarCus"] = outData["Solar"]["resNonSolarRev"] / (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])* (1 - float(inputDict.get("resPenetration", 0.05))/100))
		# F42 = F30*E34/(SUM(E16:P16)*E5)+E6+E7
		outData["Solar"]["avgMonthlyBillSolarCus"] = outData["Solar"]["solarResSold"] * outData["BAU"]["effectiveResRate"] / (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)]) * float(inputDict.get("resPenetration", 0.05))/100) + float(inputDict.get("customServiceCharge", 0))+float(inputDict.get("solarServiceCharge", 0))
		# F43 = (F27/(SUM(E16:P16)*E5))*E9
		outData["Solar"]["avgMonthlyBillSolarSolarCus"] = (outData["Solar"]["annualSolarGen"] / (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)]) * float(inputDict.get("resPenetration", 0.05))/100)) * float(inputDict.get("solarLCoE", 0.07))
	else:
		outData["Solar"]["avgMonthlyBillNonSolarCus"] = 0
		outData["Solar"]["avgMonthlyBillSolarCus"] = 0
		outData["Solar"]["avgMonthlyBillSolarSolarCus"] = 0
	# Net Average Monthly Bill
	avgMonthlyBillSolarNet = outData["Solar"]["avgMonthlyBillSolarCus"] + outData["Solar"]["avgMonthlyBillSolarSolarCus"]
	outData["Solar"]["avgMonthlyBillSolarCus"] = avgMonthlyBillSolarNet
	# F45 = F63/F24, update after Form 7 model
	outData["Solar"]["costofService"] = 0
	## Form 7 Model
	# E46
	outData["Solar"]["powerProExpense"] = outData["BAU"]["powerProExpense"] = float(inputDict.get("powerProExpense", 0))
	# E47 != F47
	outData["BAU"]["costPurchasedPower"] = float(inputDict.get("costPurchasedPower", 0))
	# E48
	outData["Solar"]["transExpense"] = outData["BAU"]["transExpense"] = float(inputDict.get("transExpense", 0))
	# E49
	outData["Solar"]["distriExpenseO"] = outData["BAU"]["distriExpenseO"] = float(inputDict.get("distriExpenseO", 0))
	# E50
	outData["Solar"]["distriExpenseM"] = outData["BAU"]["distriExpenseM"] = float(inputDict.get("distriExpenseM", 0))
	# E51
	outData["Solar"]["customerAccountExpense"] = outData["BAU"]["customerAccountExpense"] = float(inputDict.get("customerAccountExpense", 0))
	# E52
	outData["Solar"]["customerServiceExpense"] = outData["BAU"]["customerServiceExpense"] = float(inputDict.get("customerServiceExpense", 0))
	# E53
	outData["Solar"]["salesExpense"] = outData["BAU"]["salesExpense"] = float(inputDict.get("salesExpense", 0))
	# E54
	outData["Solar"]["adminGeneralExpense"] = outData["BAU"]["adminGeneralExpense"] = float(inputDict.get("adminGeneralExpense", 0))
	# E56
	outData["Solar"]["depreAmortiExpense"] = outData["BAU"]["depreAmortiExpense"] = float(inputDict.get("depreAmortiExpense", 0))
	# E57
	outData["Solar"]["taxExpensePG"] = outData["BAU"]["taxExpensePG"] = float(inputDict.get("taxExpensePG", 0))
	# E58
	outData["Solar"]["taxExpense"] = outData["BAU"]["taxExpense"] = float(inputDict.get("taxExpense", 0))
	# E59
	outData["Solar"]["interestLongTerm"] = outData["BAU"]["interestLongTerm"] = float(inputDict.get("interestLongTerm", 0))
	# E60
	outData["Solar"]["interestConstruction"] = outData["BAU"]["interestConstruction"] = float(inputDict.get("interestConstruction", 0))
	# E61
	outData["Solar"]["interestExpense"] = outData["BAU"]["interestExpense"] = float(inputDict.get("interestExpense", 0))
	# E62
	outData["Solar"]["otherDeductions"] = outData["BAU"]["otherDeductions"] = float(inputDict.get("otherDeductions", 0))
	# E65
	outData["Solar"]["nonOpMarginInterest"] = outData["BAU"]["nonOpMarginInterest"] = float(inputDict.get("nonOpMarginInterest", 0))
	# E66
	outData["Solar"]["fundsUsedConstruc"] = outData["BAU"]["fundsUsedConstruc"] = float(inputDict.get("fundsUsedConstruc", 0))
	# E67
	outData["Solar"]["incomeEquityInvest"] = outData["BAU"]["incomeEquityInvest"] = float(inputDict.get("incomeEquityInvest", 0))
	# E68
	outData["Solar"]["nonOpMarginOther"] = outData["BAU"]["nonOpMarginOther"] = float(inputDict.get("nonOpMarginOther", 0))
	# E69
	outData["Solar"]["genTransCapCredits"] = outData["BAU"]["genTransCapCredits"] = float(inputDict.get("genTransCapCredits", 0))
	# E70
	outData["Solar"]["otherCapCreditsPatroDivident"] = outData["BAU"]["otherCapCreditsPatroDivident"] = float(inputDict.get("otherCapCreditsPatroDivident", 0))
	# E71
	outData["Solar"]["extraItems"] = outData["BAU"]["extraItems"] = float(inputDict.get("extraItems", 0))
	# Calculation
	# E45 = SUM(E20:P20)+E10
	outData["BAU"]["operRevPatroCap"] = sum([totalRevenue[i][1] for i in range(12)])+float(inputDict.get("otherElecRevenue", 0))
	# E55 = SUM(E46:E54)
	outData["BAU"]["totalOMExpense"] = float(inputDict.get("powerProExpense")) \
		+ float(inputDict.get("costPurchasedPower")) \
		+ float(inputDict.get("transExpense")) \
		+ float(inputDict.get("distriExpenseO")) \
		+ float(inputDict.get("distriExpenseM")) \
		+ float(inputDict.get("customerAccountExpense")) \
		+ float(inputDict.get("customerServiceExpense")) \
		+ float(inputDict.get("salesExpense"))  \
		+ float(inputDict.get("adminGeneralExpense"))
	# E63 = SUM(E55:E62)
	outData["BAU"]["totalCostElecService"] = outData["BAU"]["totalOMExpense"] \
		+ float(inputDict.get("depreAmortiExpense"))\
		+ float(inputDict.get("taxExpensePG"))\
		+ float(inputDict.get("taxExpense"))\
		+ float(inputDict.get("interestLongTerm"))\
		+ float(inputDict.get("interestExpense"))\
		+ float(inputDict.get("interestConstruction"))\
		+ outData["BAU"]["otherDeductions"]
	# E64 = E45-E63
	outData["BAU"]["patCapOperMargins"] = outData["BAU"]["operRevPatroCap"] - outData["BAU"]["totalCostElecService"]
	# E72 = SUM(E64:E71)
	outData["BAU"]["patCapital"] = outData["BAU"]["patCapOperMargins"]\
		+ float(inputDict.get("nonOpMarginInterest"))\
		+ float(inputDict.get("fundsUsedConstruc"))\
		+ float(inputDict.get("incomeEquityInvest"))\
		+ float(inputDict.get("nonOpMarginOther"))\
		+ float(inputDict.get("genTransCapCredits"))\
		+ float(inputDict.get("otherCapCreditsPatroDivident"))\
		+ float(inputDict.get("extraItems"))
	# F48 = E48-F27*E34+SUM(E16:P16)*E5*E7
	outData["Solar"]["operRevPatroCap"] = outData["BAU"]["operRevPatroCap"] - outData["BAU"]["effectiveResRate"]*outData["Solar"]["annualSolarGen"] + sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("resPenetration", 0.05))/100*float(inputDict.get("solarServiceCharge", 0))
	# F47 = (F23)*E8
	inputDict["costofPower"] = float(inputDict.get("costPurchasedPower", 0)) /  float(inputDict.get("totalKWhPurchased", 0))
	outData["Solar"]["costPurchasedPower"] = outData["Solar"]["totalKWhPurchased"] * float(inputDict.get("costofPower", 0))
	inputDict["costofPower"] = round(inputDict["costofPower"],3)
	# F55 = SUM(F46:F54)
	outData["Solar"]["totalOMExpense"] = outData["Solar"]["powerProExpense"]\
		+ outData["Solar"]["costPurchasedPower"]\
		+ outData["Solar"]["transExpense"]\
		+ outData["Solar"]["distriExpenseO"]\
		+ outData["Solar"]["distriExpenseM"]\
		+ outData["Solar"]["customerAccountExpense"]\
		+ outData["Solar"]["customerServiceExpense"]\
		+ outData["Solar"]["salesExpense"]\
		+ outData["Solar"]["adminGeneralExpense"]
	# F63 = E63
	outData["Solar"]["totalCostElecService"] = outData["Solar"]["totalOMExpense"]\
		+ outData["Solar"]["depreAmortiExpense"]\
		+ outData["Solar"]["taxExpensePG"]\
		+ outData["Solar"]["taxExpense"]\
		+ outData["Solar"]["interestLongTerm"]\
		+ outData["Solar"]["interestConstruction"]\
		+ outData["Solar"]["interestExpense"]\
		+ outData["Solar"]["otherDeductions"]
	# F64 = F45 - F63
	outData["Solar"]["patCapOperMargins"] = outData["Solar"]["operRevPatroCap"] - outData["Solar"]["totalCostElecService"]
	# F72 = SUM(F64:F71)
	outData["Solar"]["patCapital"] = outData["Solar"]["patCapOperMargins"]\
		+ outData["Solar"]["nonOpMarginInterest"]\
		+ outData["Solar"]["fundsUsedConstruc"]\
		+ outData["Solar"]["incomeEquityInvest"]\
		+ outData["Solar"]["nonOpMarginOther"]\
		+ outData["Solar"]["genTransCapCredits"]\
		+ outData["Solar"]["otherCapCreditsPatroDivident"]\
		+ outData["Solar"]["extraItems"]
	# E37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71), update after Form 7 model
	outData["BAU"]["nonPowerCosts"] = outData["BAU"]["transExpense"] \
		+ outData["BAU"]["distriExpenseO"] \
		+ outData["BAU"]["distriExpenseM"] \
		+ outData["BAU"]["customerAccountExpense"] \
		+ outData["BAU"]["customerServiceExpense"] \
		+ outData["BAU"]["salesExpense"] \
		+ outData["BAU"]["adminGeneralExpense"] \
		+ outData["BAU"]["depreAmortiExpense"] \
		+ outData["BAU"]["taxExpensePG"] \
		+ outData["BAU"]["taxExpense"] \
		+ outData["BAU"]["interestLongTerm"] \
		+ outData["BAU"]["interestConstruction"] \
		+ outData["BAU"]["interestExpense"] \
		+ outData["BAU"]["otherDeductions"] \
		- (outData["BAU"]["nonOpMarginInterest"] \
		+ outData["BAU"]["fundsUsedConstruc"] \
		+ outData["BAU"]["incomeEquityInvest"] \
		+ outData["BAU"]["nonOpMarginOther"] \
		+ outData["BAU"]["genTransCapCredits"] \
		+ outData["BAU"]["otherCapCreditsPatroDivident"] \
		+ outData["BAU"]["extraItems"])
	# E42 = E63/E24, update after Form 7 model
	outData["BAU"]["costofService"] = outData["BAU"]["totalCostElecService"] / outData["BAU"]["totalKWhSales"]
	# F37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71) = E37, update after Form 7 model
	outData["Solar"]["nonPowerCosts"] = outData["BAU"]["nonPowerCosts"]
	# F42 = F63/F24, update after Form 7 model
	outData["Solar"]["costofService"] = outData["Solar"]["totalCostElecService"] / outData["Solar"]["totalKWhSales"]
	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData
Esempio n. 13
0
def work(modelDir, inputDict):
    #plotly imports. Here for now so web server starts.
    import plotly
    # from plotly import __version__
    # from plotly.offline import download_plotlyjs, plot
    # from plotly import tools
    import plotly.graph_objs as go
    # Copy specific climate data into model directory
    inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "climate.tmy2"))
    # Set up SAM data structures.
    ssc = nrelsam2013.SSCAPI()
    dat = ssc.ssc_data_create()
    # Required user inputs.
    ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
    ssc.ssc_data_set_number(dat, "system_size", float(inputDict["systemSize"]))
    ssc.ssc_data_set_number(dat, "derate",
                            0.01 * float(inputDict["nonInverterEfficiency"]))
    ssc.ssc_data_set_number(dat, "track_mode",
                            float(inputDict["trackingMode"]))
    ssc.ssc_data_set_number(dat, "azimuth", float(inputDict["azimuth"]))
    # Advanced inputs with defaults.
    if (inputDict.get("tilt", 0) == "-"):
        tilt_eq_lat = 1.0
        manualTilt = 0.0
    else:
        tilt_eq_lat = 0.0
        manualTilt = float(inputDict.get("tilt", 0))
    ssc.ssc_data_set_number(dat, "tilt_eq_lat", tilt_eq_lat)
    ssc.ssc_data_set_number(dat, "tilt", manualTilt)
    ssc.ssc_data_set_number(dat, "rotlim", float(inputDict["rotlim"]))
    ssc.ssc_data_set_number(dat, "gamma", -1 * float(inputDict["gamma"]))
    ssc.ssc_data_set_number(dat, "inv_eff",
                            0.01 * float(inputDict["inverterEfficiency"]))
    ssc.ssc_data_set_number(dat, "w_stow", float(inputDict["w_stow"]))
    # Complicated optional inputs that we could enable later.
    # ssc.ssc_data_set_array(dat, 'shading_hourly', ...) 	# Hourly beam shading factors
    # ssc.ssc_data_set_matrix(dat, 'shading_mxh', ...) 		# Month x Hour beam shading factors
    # ssc.ssc_data_set_matrix(dat, 'shading_azal', ...) 	# Azimuth x altitude beam shading factors
    # ssc.ssc_data_set_number(dat, 'shading_diff', ...) 	# Diffuse shading factor
    # ssc.ssc_data_set_number(dat, 'enable_user_poa', ...)	# Enable user-defined POA irradiance input = 0 or 1
    # ssc.ssc_data_set_array(dat, 'user_poa', ...) 			# User-defined POA irradiance in W/m2
    # ssc.ssc_data_set_number(dat, 'tilt', 999)
    # ssc.ssc_data_set_number(dat, "t_noct", float(inputDict["t_noct"]))
    # ssc.ssc_data_set_number(dat, "t_ref", float(inputDict["t_ref"]))
    # ssc.ssc_data_set_number(dat, "fd", float(inputDict["fd"]))
    # ssc.ssc_data_set_number(dat, "i_ref", float(inputDict["i_ref"]))
    # ssc.ssc_data_set_number(dat, "poa_cutin", float(inputDict["poa_cutin"]))
    # Run PV system simulation.
    mod = ssc.ssc_module_create("pvwattsv1")
    ssc.ssc_module_exec(mod, dat)
    # Setting options for start time.
    simLengthUnits = inputDict.get("simLengthUnits", "")
    simStartDate = inputDict["simStartDate"]
    # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
    startDateTime = simStartDate + " 00:00:00 UTC"
    # Set aggregation function constants.
    agg = lambda x, y: _aggData(x, y, inputDict["simStartDate"],
                                int(inputDict["simLength"]), inputDict[
                                    "simLengthUnits"], ssc, dat)
    avg = lambda x: sum(x) / len(x)
    # Timestamp output.
    outData = {}
    outData["timeStamps"] = [
        datetime.datetime.strftime(
            datetime.datetime.strptime(startDateTime[0:19],
                                       "%Y-%m-%d %H:%M:%S") +
            datetime.timedelta(**{simLengthUnits: x}), "%Y-%m-%d %H:%M:%S") +
        " UTC" for x in range(int(inputDict["simLength"]))
    ]
    # Geodata output.
    outData["city"] = ssc.ssc_data_get_string(dat, "city")
    outData["state"] = ssc.ssc_data_get_string(dat, "state")
    outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
    outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
    outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
    # Weather output.
    outData["climate"] = {}
    outData["climate"]["Plane of Array Irradiance (W/m^2)"] = agg("poa", avg)
    outData["climate"]["Beam Normal Irradiance (W/m^2)"] = agg("dn", avg)
    outData["climate"]["Diffuse Irradiance (W/m^2)"] = agg("df", avg)
    outData["climate"]["Ambient Temperature (F)"] = agg("tamb", avg)
    outData["climate"]["Cell Temperature (F)"] = agg("tcell", avg)
    outData["climate"]["Wind Speed (m/s)"] = agg("wspd", avg)
    # Power generation.
    outData["Consumption"] = {}
    outData["Consumption"]["Power"] = [x for x in agg("ac", avg)]
    outData["Consumption"]["Losses"] = [0 for x in agg("ac", avg)]
    outData["Consumption"]["DG"] = agg("ac", avg)

    #Plotly data sets for power generation graphs
    convertedDateStrings = [
        datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S %Z")
        for x in outData["timeStamps"]
    ]
    powerGeneration = go.Scatter(x=convertedDateStrings,
                                 y=outData["Consumption"]["Power"],
                                 line=dict(color=('red')),
                                 name="Power Generated")

    chartInverter = None
    if float(inputDict["inverterSize"]) == 0:
        chartInverter = float(inputDict["systemSize"])
    else:
        chartInverter = float(inputDict["inverterSize"])

    panelsNameplate = go.Scatter(x=convertedDateStrings,
                                 y=[
                                     float(inputDict['systemSize']) * 1000
                                     for x in range(len(convertedDateStrings))
                                 ],
                                 line=dict(dash='dash', color='orange'),
                                 name="Panels Nameplate")
    inverterNameplate = go.Scatter(
        x=convertedDateStrings,
        y=[chartInverter * 1000 for x in range(len(convertedDateStrings))],
        line=dict(dash='dash', color='orange'),
        name="inverter Nameplate")

    #Set Power generation plotly layout
    powerGenerationLayout = go.Layout(width=1000,
                                      height=375,
                                      xaxis=dict(showgrid=False, ),
                                      legend=dict(x=0, y=1.25,
                                                  orientation="h"))
    #Combine all datasets for plotly graph
    powerGenerationData = [powerGeneration, panelsNameplate, inverterNameplate]
    #Example updating go object
    powerGenerationLayout['yaxis'].update(title='Power (W-AC)')
    #fig = go.Figure(data=powerGenerationData, layout=powerGenerationLayout)
    #inlinePlot = plotly.offline.plot(fig, include_plotlyjs=False, output_type='div')
    #outData["plotlyDiv"] = html.escape(json.dumps(inlinePlot, cls=plotly.utils.PlotlyJSONEncoder))

    #Plotly power generation outputs
    outData["powerGenerationData"] = json.dumps(
        powerGenerationData, cls=plotly.utils.PlotlyJSONEncoder)
    outData["powerGenerationLayout"] = json.dumps(
        powerGenerationLayout, cls=plotly.utils.PlotlyJSONEncoder)

    #Irradiance plotly data
    poaIrradiance = go.Scatter(
        x=convertedDateStrings,
        y=outData["climate"]["Plane of Array Irradiance (W/m^2)"],
        line=dict(color='yellow'),
        name="Plane of Array Irradiance (W/m^2)")
    beamNormalIrradiance = go.Scatter(
        x=convertedDateStrings,
        y=outData["climate"]["Beam Normal Irradiance (W/m^2)"],
        line=dict(color='gold'),
        name="Beam Normal Irradiance (W/m^2)")
    diffuseIrradiance = go.Scatter(
        x=convertedDateStrings,
        y=outData["climate"]["Diffuse Irradiance (W/m^2)"],
        line=dict(color='lemonchiffon'),
        name="Diffuse Irradiance (W/m^2)")
    irradianceData = [poaIrradiance, beamNormalIrradiance, diffuseIrradiance]

    #Set Power generation plotly layout
    irradianceLayout = go.Layout(width=1000,
                                 height=375,
                                 xaxis=dict(showgrid=False, ),
                                 yaxis=dict(title="Climate Units", ),
                                 legend=dict(x=0, y=1.25, orientation="h"))
    outData["irradianceData"] = json.dumps(irradianceData,
                                           cls=plotly.utils.PlotlyJSONEncoder)
    outData["irradianceLayout"] = json.dumps(
        irradianceLayout, cls=plotly.utils.PlotlyJSONEncoder)

    #Other Climate Variables plotly data
    ambientTemperature = go.Scatter(
        x=convertedDateStrings,
        y=outData["climate"]["Ambient Temperature (F)"],
        line=dict(color='dimgray'),
        name="Ambient Temperature (F)")
    cellTemperature = go.Scatter(x=convertedDateStrings,
                                 y=outData["climate"]["Cell Temperature (F)"],
                                 line=dict(color='gainsboro'),
                                 name="Cell Temperature (F)")
    windSpeed = go.Scatter(x=convertedDateStrings,
                           y=outData["climate"]["Wind Speed (m/s)"],
                           line=dict(color='darkgray'),
                           name="Wind Speed (m/s)")
    otherClimateData = [ambientTemperature, cellTemperature, windSpeed]

    #Set Power generation plotly layout
    otherClimateLayout = go.Layout(width=1000,
                                   height=375,
                                   xaxis=dict(showgrid=False, ),
                                   yaxis=dict(title="Climate Units", ),
                                   legend=dict(x=0, y=1.25, orientation="h"))
    outData["otherClimateData"] = json.dumps(
        otherClimateData, cls=plotly.utils.PlotlyJSONEncoder)
    outData["otherClimateLayout"] = json.dumps(
        otherClimateLayout, cls=plotly.utils.PlotlyJSONEncoder)
    # Stdout/stderr.
    outData["stdout"] = "Success"
    outData["stderr"] = ""
    return outData
Esempio n. 14
0
def work(modelDir, inputDict):
	''' Run the model in its directory. '''
	outData = {}
	feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4]
	inputDict["feederName1"] = feederName
	hazardPath = pJoin(modelDir,inputDict['weatherImpactsFileName'])
	with open(hazardPath,'w') as hazardFile:
		hazardFile.write(inputDict['weatherImpacts'])
	with open(pJoin(modelDir, feederName + '.omd'), "r") as jsonIn:
		feederModel = json.load(jsonIn)
	# Create GFM input file.
	print "RUNNING GFM FOR", modelDir
	critLoads = inputDict['criticalLoads']
	gfmInputTemplate = {
		'phase_variation' : float(inputDict['phaseVariation']),
		'chance_constraint' : float(inputDict['chanceConstraint']),
		'critical_load_met' : float(inputDict['criticalLoadMet']),
		'total_load_met' : float(inputDict['nonCriticalLoadMet']),
		'maxDGPerGenerator' : float(inputDict['maxDGPerGenerator']),
		'dgUnitCost' : float(inputDict['dgUnitCost']),
		'generatorCandidates' : inputDict['generatorCandidates'],
		'criticalLoads' : inputDict['criticalLoads']
	}
	gfmJson = convertToGFM(gfmInputTemplate, feederModel)
	gfmInputFilename = 'gfmInput.json'
	with open(pJoin(modelDir, gfmInputFilename), 'w') as outFile:
		json.dump(gfmJson, outFile, indent=4)
	# Check for overlap between hazard field and GFM circuit input:
	hazard = HazardField(hazardPath)
	if circuitOutsideOfHazard(hazard, gfmJson):
		outData['warning'] = 'Warning: the hazard field does not overlap with the circuit.'
	# Draw hazard field if needed.
	if inputDict['showHazardField'] == 'Yes':
		hazard.drawHeatMap(show=False)
		plt.title('') #Hack: remove plot title.
	# Run GFM
	gfmBinaryPath = pJoin(__neoMetaModel__._omfDir,'solvers','gfm', 'Fragility.jar')
	rdtInputName = 'rdtInput.json'
	if platform.system() == 'Darwin':
		#HACK: force use of Java8 on MacOS.
		javaCmd = '/Library/Java/JavaVirtualMachines/jdk1.8.0_181.jdk/Contents/Home/bin/java'
	else:
		javaCmd = 'java'
	proc = subprocess.Popen([javaCmd,'-jar', gfmBinaryPath, '-r', gfmInputFilename, '-wf', inputDict['weatherImpactsFileName'],'-num','3','-ro',rdtInputName], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=modelDir)
	(stdout,stderr) = proc.communicate()
	with open(pJoin(modelDir, "gfmConsoleOut.txt"), "w") as gfmConsoleOut:
		gfmConsoleOut.write(stdout)
	rdtInputFilePath = pJoin(modelDir,'rdtInput.json')
	# Pull GFM input data on lines and generators for HTML presentation.
	with open(rdtInputFilePath, 'r') as rdtInputFile:
		# HACK: we use rdtInput as a string in the frontend.
		rdtJsonAsString = rdtInputFile.read()
		rdtJson = json.loads(rdtJsonAsString)
	rdtJson["power_flow"] = inputDict["power_flow"]
	rdtJson["solver_iteration_timeout"] = 300.0
	rdtJson["algorithm"] = "miqp"
	# Calculate line costs.
	lineData = {}
	for line in rdtJson["lines"]:
		lineData[line["id"]] = '{:,.2f}'.format(float(line["length"]) * float(inputDict["lineUnitCost"]))
	outData["lineData"] = lineData
	outData["generatorData"] = '{:,.2f}'.format(float(inputDict["dgUnitCost"]) * float(inputDict["maxDGPerGenerator"]))
	outData['gfmRawOut'] = rdtJsonAsString
	# Insert user-specified scenarios block into RDT input
	if inputDict['scenarios'] != "":
		rdtJson['scenarios'] = json.loads(inputDict['scenarios'])
		with open(pJoin(rdtInputFilePath), "w") as rdtInputFile:
			json.dump(rdtJson, rdtInputFile, indent=4)
	# Run GridLAB-D first time to generate xrMatrices.
	print "RUNNING 1ST GLD RUN FOR", modelDir
	omdPath = pJoin(modelDir, feederName + ".omd")
	with open(omdPath, "r") as omd:
		omd = json.load(omd)
	# Remove new line candidates to get normal system powerflow results.
	deleteList = []
	newLines = inputDict["newLineCandidates"].strip().replace(' ', '').split(',')
	for newLine in newLines:
		for omdObj in omd["tree"]:
			if ("name" in omd["tree"][omdObj]):
				if (newLine == omd["tree"][omdObj]["name"]):
					deleteList.append(omdObj)
	for delItem in deleteList:
		del omd["tree"][delItem]
	#Load a blank glm file and use it to write to it
	feederPath = pJoin(modelDir, 'feeder.glm')
	with open(feederPath, 'w') as glmFile:
		toWrite =  omf.feeder.sortedWrite(omd['tree']) + "object jsondump {\n\tfilename_dump_reliability JSON_dump_line.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n"
		glmFile.write(toWrite)		
	#Write attachments from omd, if no file, one will be created
	for fileName in omd['attachments']:
		with open(os.path.join(modelDir, fileName),'w') as file:
			file.write(omd['attachments'][fileName])
	#Wire in the file the user specifies via zipcode.
	climateFileName = zipCodeToClimateName(inputDict["simulationZipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2'))
	# Platform specific binaries for GridLAB-D First Run.
	if platform.system() == "Linux":
		myEnv = os.environ.copy()
		myEnv['GLPATH'] = omf.omfDir + '/solvers/gridlabdv990/'
		commandString = omf.omfDir + '/solvers/gridlabdv990/gridlabd.bin feeder.glm'  
	elif platform.system() == "Windows":
		myEnv = os.environ.copy()
		commandString =  '"' + pJoin(omf.omfDir, "solvers", "gridlabdv990", "gridlabd.exe") + '"' + " feeder.glm"
	elif platform.system() == "Darwin":
		myEnv = os.environ.copy()
		myEnv['GLPATH'] = omf.omfDir + '/solvers/gridlabdv990/MacRC4p1_std8/'
		commandString = '"' + omf.omfDir + '/solvers/gridlabdv990/MacRC4p1_std8/gld.sh" feeder.glm'
	# Run GridLAB-D First Time.
	proc = subprocess.Popen(commandString, stdout=subprocess.PIPE, shell=True, cwd=modelDir, env=myEnv)
	(out, err) = proc.communicate()
	with open(pJoin(modelDir, "gldConsoleOut.txt"), "w") as gldConsoleOut:
		gldConsoleOut.write(out)
	with open(pJoin(modelDir, "JSON_dump_line.json"), "r") as gldOut:
		gld_json_line_dump = json.load(gldOut)
	outData['gridlabdRawOut'] = gld_json_line_dump
	# Add GridLAB-D line objects and line codes in to the RDT model.
	rdtJson["line_codes"] = gld_json_line_dump["properties"]["line_codes"]
	rdtJson["lines"] = gld_json_line_dump["properties"]["lines"]
	hardCands = list(set(gfmJson['lineLikeObjs']) - set(inputDict['hardeningCandidates']))
	newLineCands = inputDict['newLineCandidates'].strip().replace(' ', '').split(',')
	switchCands = inputDict['switchCandidates'].strip().replace(' ', '').split(',')
	for line in rdtJson["lines"]:
		line_id = line.get('id','') # this is equal to name in the OMD objects.
		object_type = line.get('object','')
		line['node1_id'] = line['node1_id'] + "_bus"
		line['node2_id'] = line['node2_id'] + "_bus"
		line_code = line["line_code"]
		# Getting ratings from OMD
		tree = omd['tree']
		nameToIndex = {tree[key].get('name',''):key for key in tree}
		treeOb = tree[nameToIndex[line_id]]
		config_name = treeOb.get('configuration','')
		config_ob = tree.get(nameToIndex[config_name], {})
		full_rating = 0
		for phase in ['A','B','C']:
			cond_name = config_ob.get('conductor_' + phase, '')
			cond_ob = tree.get(nameToIndex.get(cond_name, ''), '')
			rating = cond_ob.get('rating.summer.continuous','')
			try:
				full_rating = int(rating) #TODO: replace with avg of 3 phases.
			except:
				pass
		if full_rating != 0:
			line['capacity'] = full_rating
		else:
			line['capacity'] = 10000
		# Setting other line parameters.
		line['construction_cost'] = float(inputDict['lineUnitCost'])
		line['harden_cost'] = float(inputDict['hardeningUnitCost'])
		line['switch_cost'] = float(inputDict['switchCost'])
		if line_id in hardCands:
			line['can_harden'] = True
		if line_id in switchCands:
			line['can_add_switch'] = True
		if line_id in newLineCands:
			line['is_new'] = True
		if object_type in ['transformer','regulator']: 
			line['is_transformer'] = True
		if object_type == 'switch':
			line['has_switch'] = True
	with open(rdtInputFilePath, "w") as outFile:
		json.dump(rdtJson, outFile, indent=4)
	# Run RDT.
	print "RUNNING RDT FOR", modelDir
	rdtOutFile = modelDir + '/rdtOutput.json'
	rdtSolverFolder = pJoin(__neoMetaModel__._omfDir,'solvers','rdt')
	rdtJarPath = pJoin(rdtSolverFolder,'micot-rdt.jar')
	# TODO: modify path, don't assume SCIP installation.
	proc = subprocess.Popen(['java', "-Djna.library.path=" + rdtSolverFolder, '-jar', rdtJarPath, '-c', rdtInputFilePath, '-e', rdtOutFile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
	(stdout,stderr) = proc.communicate()
	with open(pJoin(modelDir, "rdtConsoleOut.txt"), "w") as rdtConsoleOut:
		rdtConsoleOut.write(stdout)
	rdtRawOut = open(rdtOutFile).read()
	outData['rdtRawOut'] = rdtRawOut
	# Indent the RDT output nicely.
	with open(pJoin(rdtOutFile),"w") as outFile:
		rdtOut = json.loads(rdtRawOut)
		json.dump(rdtOut, outFile, indent = 4)
	# Generate and run 2nd copy of GridLAB-D model with changes specified by RDT.
	print "RUNNING 2ND GLD RUN FOR", modelDir
	feederCopy = copy.deepcopy(feederModel)
	lineSwitchList = []
	edgeLabels = {}
	generatorList = []
	for gen in rdtOut['design_solution']['generators']:
		generatorList.append(gen['id'][:-4])
	damagedLoads = {}
	for scenario in rdtOut['scenario_solution']:
		for load in scenario['loads']:
			if load['id'] in damagedLoads.keys():
				damagedLoads[load['id'][:-4]] += 1
			else:
				damagedLoads[load['id'][:-4]] = 1
	for line in rdtOut['design_solution']['lines']:
		if('switch_built' in line and 'hardened' in line):
			lineSwitchList.append(line['id'])
			if (line['switch_built'] == True and line['hardened'] == True):
				edgeLabels[line['id']] = "SH"
			elif(line['switch_built'] == True):
				edgeLabels[line['id']] = "S"
			elif (line['hardened'] == True):
				edgeLabels[line['id']] = "H"
		elif('switch_built' in line):
			lineSwitchList.append(line['id'])
			if (line['switch_built'] == True):
				edgeLabels[line['id']] = "S"
		elif('hardened' in line):
			if (line['hardened'] == True):
				edgeLabels[line['id']] = "H"
	# Remove nonessential lines in second model as indicated by RDT output.
	for key in feederCopy['tree'].keys():
		value = feederCopy['tree'][key]
		if('object' in value):
			if (value['object'] == 'underground_line') or (value['object'] == 'overhead_line'):
				if value['name'] not in lineSwitchList:
					del feederCopy['tree'][key]
	# Add generators to second model.
	maxTreeKey = int(max(feederCopy['tree'], key=int)) + 1
	maxTreeKey = max(feederCopy['tree'], key=int)
	# Load a blank glm file and use it to write to it
	feederPath = pJoin(modelDir, 'feederSecond.glm')
	with open(feederPath, 'w') as glmFile:
		toWrite =  "module generators;\n\n" + omf.feeder.sortedWrite(feederCopy['tree']) + "object voltdump {\n\tfilename voltDump2ndRun.csv;\n};\nobject jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n"# + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};"
		glmFile.write(toWrite)
	# Run GridLAB-D second time.
	if platform.system() == "Windows":
		proc = subprocess.Popen(['gridlabd', 'feederSecond.glm'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=modelDir)
		(out, err) = proc.communicate()
		outData["secondGLD"] = str(os.path.isfile(pJoin(modelDir,"voltDump2ndRun.csv")))
	else:
		# TODO: make 2nd run of GridLAB-D work on Unixes.
		outData["secondGLD"] = str(False)
	# Draw the feeder.
	damageDict = {}
	for scenario in rdtJson["scenarios"]:
		for line in scenario["disable_lines"]:
			if line in damageDict:
				damageDict[line] = damageDict[line] + 1
			else:
				damageDict[line] = 1
	genDiagram(modelDir, feederModel, damageDict, critLoads, damagedLoads, edgeLabels, generatorList)
	with open(pJoin(modelDir,"feederChart.png"),"rb") as inFile:
		outData["oneLineDiagram"] = inFile.read().encode("base64")
	# And we're done.
	return outData
Esempio n. 15
0
def work(modelDir, inputDict):
    ''' Run the model in its directory. '''
    outData = {}
    feederName = inputDict["feederName1"]
    with open(pJoin(modelDir, inputDict['weatherImpactsFileName']),
              'w') as hazardFile:
        hazardFile.write(inputDict['weatherImpacts'])
    with open(pJoin(modelDir, feederName + '.omd'), "r") as jsonIn:
        feederModel = json.load(jsonIn)
    # Create GFM input file.
    print "Running GFM ************************************"
    gfmInputTemplate = {
        'phase_variation': float(inputDict['phaseVariation']),
        'chance_constraint': float(inputDict['chanceConstraint']),
        'critical_load_met': float(inputDict['criticalLoadMet']),
        'total_load_met':
        1.0,  #(float(inputDict['criticalLoadMet']) + float(inputDict['nonCriticalLoadMet'])),
        'xrMatrices': inputDict["xrMatrices"],
        'maxDGPerGenerator': float(inputDict["maxDGPerGenerator"]),
        'newLineCandidates': inputDict['newLineCandidates'],
        'hardeningCandidates': inputDict['hardeningCandidates'],
        'switchCandidates': inputDict['switchCandidates'],
        'hardeningUnitCost': inputDict['hardeningUnitCost'],
        'switchCost': inputDict['switchCost'],
        'generatorCandidates': inputDict['generatorCandidates'],
        'lineUnitCost': inputDict['lineUnitCost']
    }
    gfmJson = convertToGFM(gfmInputTemplate, feederModel)
    gfmInputFilename = 'gfmInput.json'
    with open(pJoin(modelDir, gfmInputFilename), "w") as outFile:
        json.dump(gfmJson, outFile, indent=4)
    # Run GFM
    gfmBinaryPath = pJoin(__neoMetaModel__._omfDir, 'solvers', 'gfm',
                          'Fragility.jar')
    proc = subprocess.Popen([
        'java', '-jar', gfmBinaryPath, '-r', gfmInputFilename, '-wf',
        inputDict['weatherImpactsFileName'], '-num', '3'
    ],
                            cwd=modelDir)
    proc.wait()
    # HACK: rename the hardcoded gfm output
    rdtInputFilePath = pJoin(modelDir, 'rdtInput.json')
    print 'Before weird RENAMING STUFF!!!!'
    os.rename(pJoin(modelDir, 'rdt_OUTPUT.json'), rdtInputFilePath)
    # print 'RENAME FROM', pJoin(modelDir,'rdt_OUTPUT.json')
    # print 'RENAME TO', rdtInputFilePath
    # print 'After weird RENAMING STUFF!!!!'
    #raise Exception('Go no further')
    # Pull GFM input data on lines and generators for HTML presentation.
    with open(rdtInputFilePath, 'r') as rdtInputFile:
        # HACK: we use rdtInput as a string in the frontend.
        rdtJsonAsString = rdtInputFile.read()
        rdtJson = json.loads(rdtJsonAsString)
    # Calculate line costs.
    lineData = []
    for line in rdtJson["lines"]:
        lineData.append((line["id"], '{:,.2f}'.format(
            float(line["length"]) * float(inputDict["lineUnitCost"]))))
    outData["lineData"] = lineData
    outData["generatorData"] = '{:,.2f}'.format(
        float(inputDict["dgUnitCost"]) * float(inputDict["maxDGPerGenerator"]))
    outData['gfmRawOut'] = rdtJsonAsString
    if inputDict['scenarios'] != "":
        rdtJson['scenarios'] = json.loads(inputDict['scenarios'])
        with open(pJoin(rdtInputFilePath), "w") as rdtInputFile:
            json.dump(rdtJson, rdtInputFile, indent=4)
    # Run GridLAB-D first time to generate xrMatrices.
    if platform.system() == "Windows":
        omdPath = pJoin(modelDir, feederName + ".omd")
        with open(omdPath, "r") as omd:
            omd = json.load(omd)
        #REMOVE NEWLINECANDIDATES
        deleteList = []
        newLines = inputDict["newLineCandidates"].strip().replace(
            ' ', '').split(',')
        for newLine in newLines:
            for omdObj in omd["tree"]:
                if ("name" in omd["tree"][omdObj]):
                    if (newLine == omd["tree"][omdObj]["name"]):
                        deleteList.append(omdObj)
        for delItem in deleteList:
            del omd["tree"][delItem]
        #Load a blank glm file and use it to write to it
        feederPath = pJoin(modelDir, 'feeder.glm')
        with open(feederPath, 'w') as glmFile:
            #toWrite =  omf.feeder.sortedWrite(omd['tree']) + "object jsondump {\n\tfilename_dump_reliability test_JSON_dump1.json;\n\twrite_reliability true;\n\tfilename_dump_line test_JSON_dump2.json;\n\twrite_line true;\n};\n"# + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};"
            toWrite = omf.feeder.sortedWrite(
                omd['tree']
            ) + "object jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n"  # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};"
            glmFile.write(toWrite)
        #Write attachments from omd, if no file, one will be created
        for fileName in omd['attachments']:
            with open(os.path.join(modelDir, fileName), 'w') as file:
                file.write(omd['attachments'][fileName])
        #Wire in the file the user specifies via zipcode.
        climateFileName, latforpvwatts = zipCodeToClimateName(
            inputDict["simulationZipCode"])
        shutil.copy(
            pJoin(__neoMetaModel__._omfDir, "data", "Climate",
                  climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2'))
        proc = subprocess.Popen(['gridlabd', 'feeder.glm'],
                                stdout=subprocess.PIPE,
                                shell=True,
                                cwd=modelDir)
        (out, err) = proc.communicate()
        accumulator = ""
        with open(pJoin(modelDir, "JSON_dump_line.json"), "r") as gldOut:
            accumulator = json.load(gldOut)
        outData['gridlabdRawOut'] = accumulator
        #THIS IS THE CODE THAT ONCE FRANK GETS DONE WITH GRIDLAB-D NEEDS TO BE UNCOMMENTED
        '''rdtJson["line_codes"] = accumulator["properties"]["line_codes"]
		rdtJson["lines"] = accumulator["properties"]["lines"]
		with open(pJoin(modelDir, rdtInputFilePath), "w") as outFile:
			json.dump(rdtJson, outFile, indent=4)'''
    else:
        tree = feederModel.get("tree", {})
        attachments = feederModel.get("attachments", {})
        climateFileName, latforpvwatts = zipCodeToClimateName(
            inputDict["simulationZipCode"])
        shutil.copy(
            pJoin(__neoMetaModel__._omfDir, "data", "Climate",
                  climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2'))
        gridlabdRawOut = gridlabd.runInFilesystem(tree,
                                                  attachments=attachments,
                                                  workDir=modelDir)
        outData['gridlabdRawOut'] = gridlabdRawOut
    # Run RDT.
    print "Running RDT ************************************"
    rdtOutFile = modelDir + '/rdtOutput.json'
    rdtSolverFolder = pJoin(__neoMetaModel__._omfDir, 'solvers', 'rdt')
    rdtJarPath = pJoin(rdtSolverFolder, 'micot-rdt.jar')
    proc = subprocess.Popen([
        'java', "-Djna.library.path=" + rdtSolverFolder, '-jar', rdtJarPath,
        '-c', rdtInputFilePath, '-e', rdtOutFile
    ])
    proc.wait()
    rdtRawOut = open(rdtOutFile).read()
    outData['rdtRawOut'] = rdtRawOut
    # Indent the RDT output nicely.
    with open(pJoin(rdtOutFile), "w") as outFile:
        rdtOut = json.loads(rdtRawOut)
        json.dump(rdtOut, outFile, indent=4)
    # TODO: run GridLAB-D second time to validate RDT results with new control schemes.
    # Draw the feeder.
    genDiagram(modelDir, feederModel)
    with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile:
        outData["oneLineDiagram"] = inFile.read().encode("base64")
    return outData
Esempio n. 16
0
def run(modelDir, inputDict):
	try:
		''' Run the model in its directory. '''
		# Check whether model exist or not
		if not os.path.isdir(modelDir):
			os.makedirs(modelDir)
			inputDict["created"] = str(dt.datetime.now())
		# MAYBEFIX: remove this data dump. Check showModel in web.py and renderTemplate()
		with open(pJoin(modelDir, "allInputData.json"),"w") as inputFile:
			json.dump(inputDict, inputFile, indent = 4)
		# Copy spcific climate data into model directory
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, "climate.tmy2"))
		# Ready to run
		startTime = dt.datetime.now()
		# Set up SAM data structures.
		ssc = nrelsam2013.SSCAPI()
		dat = ssc.ssc_data_create()
		# Required user inputs.
		ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
		# TODO: FIX THIS!!!! IT SHOULD BE AVGSYS*PEN*RESCUSTOMERS
		ssc.ssc_data_set_number(dat, "system_size", float(inputDict["systemSize"]))
		# SAM options where we take defaults.
		ssc.ssc_data_set_number(dat, "derate", 0.97)
		ssc.ssc_data_set_number(dat, "track_mode", 0)
		ssc.ssc_data_set_number(dat, "azimuth", 180)
		ssc.ssc_data_set_number(dat, "tilt_eq_lat", 1)
		# Run PV system simulation.
		mod = ssc.ssc_module_create("pvwattsv1")
		ssc.ssc_module_exec(mod, dat)
		# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
		startDateTime = "2013-01-01 00:00:00 UTC"
		# Timestamp output.
		outData = {}
		outData["timeStamps"] = [dt.datetime.strftime(
			dt.datetime.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") +
			dt.timedelta(**{"hours":x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(8760))]
		# Geodata output.
		outData["city"] = ssc.ssc_data_get_string(dat, "city")
		outData["state"] = ssc.ssc_data_get_string(dat, "state")
		outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
		outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
		outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
		# Weather output.
		outData["climate"] = {}
		outData["climate"]["Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(dat, "gh")
		outData["climate"]["Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(dat, "poa")
		outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(dat, "tamb")
		outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(dat, "tcell")
		outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(dat, "wspd")
		# Power generation.
		outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
		# Monthly aggregation outputs.
		months = {"Jan":0,"Feb":1,"Mar":2,"Apr":3,"May":4,"Jun":5,"Jul":6,"Aug":7,"Sep":8,"Oct":9,"Nov":10,"Dec":11}
		totMonNum = lambda x:sum([z for (y,z) in zip(outData["timeStamps"], outData["powerOutputAc"]) if y.startswith(startDateTime[0:4] + "-{0:02d}".format(x+1))])
		outData["monthlyGeneration"] = [[a, roundSig(totMonNum(b),2)] for (a,b) in sorted(months.items(), key=lambda x:x[1])]
		monthlyNoConsumerServedSales = []
		monthlyKWhSold = []
		monthlyRevenue = []
		totalKWhSold = []
		totalRevenue = []
		for key in inputDict:
			# MAYBEFIX: data in list may not be ordered by month.
			if key.endswith("Sale"):
				monthlyNoConsumerServedSales.append([key[:3].title(),float(inputDict.get(key, 0))])
			elif key.endswith("KWh"):# the order of calculation matters
				monthlyKWhSold.append([key[:3].title(),float(inputDict.get(key, 0))])
			elif key.endswith("Rev"):
				monthlyRevenue.append([key[:3].title(),float(inputDict.get(key, 0))])
			elif key.endswith("KWhT"):
				totalKWhSold.append([key[:3].title(),float(inputDict.get(key, 0))])
			elif key.endswith("RevT"):
				totalRevenue.append([key[:3].title(),float(inputDict.get(key, 0))])
		outData["monthlyNoConsumerServedSales"] = sorted(monthlyNoConsumerServedSales, key=lambda x:months[x[0]])
		outData["monthlyKWhSold"] = sorted(monthlyKWhSold, key=lambda x:months[x[0]])
		outData["monthlyRevenue"] = sorted(monthlyRevenue, key=lambda x:months[x[0]])
		outData["totalKWhSold"] = sorted(totalKWhSold, key=lambda x:months[x[0]])
		outData["totalRevenue"] = sorted(totalRevenue, key=lambda x:months[x[0]])
		outData["totalGeneration"] = [[sorted(months.items(), key=lambda x:x[1])[i][0], outData["monthlyGeneration"][i][1]*outData["monthlyNoConsumerServedSales"][i][1]*(float(inputDict.get("resPenetration", 5))/100/1000)] for i in range(12)]
		outData["totalSolarSold"] = [[sorted(months.items(), key=lambda x:x[1])[i][0], outData["totalKWhSold"][i][1] - outData["totalGeneration"][i][1]] for i in range(12)]
		##################
		# TODO: add retailCost to the calculation.
		##################
		## Flow Diagram Calculations, and order of calculation matters
		# BAU case
		outData["BAU"] = {}
		# E23 = E11
		outData["BAU"]["totalKWhPurchased"] = float(inputDict.get("totalKWhPurchased", 1))
		# E24 = SUM(E19:P19)
		outData["BAU"]["totalKWhSales"] = sum([x[1] for x in totalKWhSold])
		# E25 = E23-E24
		outData["BAU"]["losses"] = float(inputDict.get("totalKWhPurchased", 0)) - sum([totalKWhSold[i][1] for i in range(12)])
		# E26 = E25/E23
		outData["BAU"]["effectiveLossRate"] = outData["BAU"]["losses"] / outData["BAU"]["totalKWhPurchased"]
		# E27 = 0
		outData["BAU"]["annualSolarGen"] = 0
		# E28 = SUM(E17:P17)
		outData["BAU"]["resNonSolarKWhSold"] = sum([monthlyKWhSold[i][1] for i in range(12)])
		# E29 = 0
		outData["BAU"]["solarResDemand"] = 0
		# E30 = 0
		outData["BAU"]["solarResSold"] = 0
		# E31 = E24-E28
		outData["BAU"]["nonResKWhSold"] = outData["BAU"]["totalKWhSales"] - outData["BAU"]["resNonSolarKWhSold"]
		# E32 = 0
		outData["BAU"]["costSolarGen"] = 0
		# E33 = SUM(E20:P20)-SUM(E18:P18)+E10
		outData["BAU"]["nonResRev"] = sum([totalRevenue[i][1] for i in range(12)]) - sum([monthlyRevenue[i][1] for i in range(12)]) + float(inputDict.get("otherElecRevenue"))
		# E34 = (SUM(E18:P18)-SUM(E16:P16)*E6)/SUM(E17:P17)
		outData["BAU"]["effectiveResRate"] = (sum ([monthlyRevenue[i][1] for i in range(12)]) - sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("customServiceCharge", 0)))/sum([monthlyKWhSold[i][1] for i in range(12)])
		# E35 = E34*E28+SUM(E16:P16)*E6
		outData["BAU"]["resNonSolarRev"] = outData["BAU"]["effectiveResRate"] * outData["BAU"]["resNonSolarKWhSold"] + sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("customServiceCharge", 0))
		# E36 = E30*E34
		outData["BAU"]["solarResRev"] = 0
		# E37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71), update after Form 7 model
		outData["BAU"]["nonPowerCosts"] = 0
		# E38 = E23-E25-E28-E30-E31
		outData["BAU"]["energyAllBal"] = 0
		# E39 = E36+E33+E35-E47-E72-E37
		outData["BAU"]["dollarAllBal"] = 0
		# E40 = 0
		outData["BAU"]["avgMonthlyBillSolarCus"] = 0
		# E41 = E35/SUM(E16:P16)
		avgCustomerCount = (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])/12)
		outData["BAU"]["avgMonthlyBillNonSolarCus"] = outData["BAU"]["resNonSolarRev"] / sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])
		# E42 = E63/E24, update after Form 7 model
		outData["BAU"]["costofService"] = 0
		# Solar case
		outData["Solar"] = {}
		# F27 = SUM(E15:P15)
		outData["Solar"]["annualSolarGen"] = sum([outData["totalGeneration"][i][1] for i in range(12)])
		# F24 = E24-F27
		outData["Solar"]["totalKWhSales"] = sum([totalKWhSold[i][1] for i in range(12)]) - outData["Solar"]["annualSolarGen"]
		# F23 =F24/(1-E26)
		outData["Solar"]["totalKWhPurchased"] = outData["Solar"]["totalKWhSales"]/ (1-outData["BAU"]["effectiveLossRate"])
		outData["totalsolarmonthly"] = [[sorted(months.items(), key=lambda x:x[1])[i][0], outData["totalSolarSold"][i][1] / (1-outData["BAU"]["effectiveLossRate"])] for i in range(12)]
		# F25 = F23-F24
		outData["Solar"]["losses"] = (outData["Solar"]["totalKWhPurchased"] - outData["Solar"]["totalKWhSales"])
		# F26 = E26
		outData["Solar"]["effectiveLossRate"] = outData["BAU"]["effectiveLossRate"]
		# F28 = (1-E5)*E28
		outData["Solar"]["resNonSolarKWhSold"] = (1-float(inputDict.get("resPenetration", 0))/100)*outData["BAU"]["resNonSolarKWhSold"]
		# F29 = E5*E28
		outData["Solar"]["solarResDemand"] = float(inputDict.get("resPenetration", 0))/100*outData["BAU"]["resNonSolarKWhSold"]
		# F30 = F29-F27
		outData["Solar"]["solarResSold"] = outData["Solar"]["solarResDemand"] - outData["Solar"]["annualSolarGen"]
		# F31 = E31
		outData["Solar"]["nonResKWhSold"] = outData["BAU"]["nonResKWhSold"]
		# F32 = E9*F27
		outData["Solar"]["costSolarGen"] = float(inputDict.get("solarLCoE", 0.07))*outData["Solar"]["annualSolarGen"]
		# F33 = E33
		outData["Solar"]["nonResRev"] = outData["BAU"]["nonResRev"]
		# F34 = E34
		outData["Solar"]["effectiveResRate"] = outData["BAU"]["effectiveResRate"]
		# F35 = E35*(1-E5)
		outData["Solar"]["resNonSolarRev"] = outData["BAU"]["resNonSolarRev"] * (1 - float(inputDict.get("resPenetration", 0.05))/100)
		# F30*E34 = Solar revenue from selling at residential rate
		solarSoldRateRev = outData["Solar"]["solarResSold"] * outData["Solar"]["effectiveResRate"]
		# (E6+E7)*SUM(E16:P16)*E5 = Solar revenue from charges
		solarChargesRev = (float(inputDict.get("customServiceCharge", 0))+float(inputDict.get("solarServiceCharge", 0)))*sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("resPenetration", 0.05))/100
		# F36 = F30*E34+(E6+E7)*SUM(E16:P16)*E5 = solarSoldRate + solarChargesRev
		outData["Solar"]["solarResRev"] = solarSoldRateRev + solarChargesRev
		# F37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71) = E37, update after Form 7 model
		outData["Solar"]["nonPowerCosts"] = 0
		# F38 = F23-F25-F28-F30-E31
		outData["Solar"]["energyAllBal"] = 0
		# F39 = F36+E33+F35-F47-F72-E37
		outData["Solar"]["dollarAllBal"] = 0
		if (float(inputDict.get("resPenetration", 0.05)) > 0):
			# F41 = (F35)/(SUM(E16:P16)*(1-E5))
			outData["Solar"]["avgMonthlyBillNonSolarCus"] = outData["Solar"]["resNonSolarRev"] / (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])* (1 - float(inputDict.get("resPenetration", 0.05))/100))
			# F42 = F30*E34/(SUM(E16:P16)*E5)+E6+E7
			outData["Solar"]["avgMonthlyBillSolarCus"] = outData["Solar"]["solarResSold"] * outData["BAU"]["effectiveResRate"] / (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)]) * float(inputDict.get("resPenetration", 0.05))/100) + float(inputDict.get("customServiceCharge", 0))+float(inputDict.get("solarServiceCharge", 0))
			# F43 = (F27/(SUM(E16:P16)*E5))*E9
			outData["Solar"]["avgMonthlyBillSolarSolarCus"] = (outData["Solar"]["annualSolarGen"] / (sum([monthlyNoConsumerServedSales[i][1] for i in range(12)]) * float(inputDict.get("resPenetration", 0.05))/100)) * float(inputDict.get("solarLCoE", 0.07))
		else:
			outData["Solar"]["avgMonthlyBillNonSolarCus"] = 0
			outData["Solar"]["avgMonthlyBillSolarCus"] = 0
			outData["Solar"]["avgMonthlyBillSolarSolarCus"] = 0
		# Net Average Monthly Bill
		avgMonthlyBillSolarNet = outData["Solar"]["avgMonthlyBillSolarCus"] + outData["Solar"]["avgMonthlyBillSolarSolarCus"]
		outData["Solar"]["avgMonthlyBillSolarCus"] = avgMonthlyBillSolarNet
		# F45 = F63/F24, update after Form 7 model
		outData["Solar"]["costofService"] = 0
		## Form 7 Model
		# E46
		outData["Solar"]["powerProExpense"] = outData["BAU"]["powerProExpense"] = float(inputDict.get("powerProExpense", 0))
		# E47 != F47
		outData["BAU"]["costPurchasedPower"] = float(inputDict.get("costPurchasedPower", 0))
		# E48
		outData["Solar"]["transExpense"] = outData["BAU"]["transExpense"] = float(inputDict.get("transExpense", 0))
		# E49
		outData["Solar"]["distriExpenseO"] = outData["BAU"]["distriExpenseO"] = float(inputDict.get("distriExpenseO", 0))
		# E50
		outData["Solar"]["distriExpenseM"] = outData["BAU"]["distriExpenseM"] = float(inputDict.get("distriExpenseM", 0))
		# E51
		outData["Solar"]["customerAccountExpense"] = outData["BAU"]["customerAccountExpense"] = float(inputDict.get("customerAccountExpense", 0))
		# E52
		outData["Solar"]["customerServiceExpense"] = outData["BAU"]["customerServiceExpense"] = float(inputDict.get("customerServiceExpense", 0))
		# E53
		outData["Solar"]["salesExpense"] = outData["BAU"]["salesExpense"] = float(inputDict.get("salesExpense", 0))
		# E54
		outData["Solar"]["adminGeneralExpense"] = outData["BAU"]["adminGeneralExpense"] = float(inputDict.get("adminGeneralExpense", 0))
		# E56
		outData["Solar"]["depreAmortiExpense"] = outData["BAU"]["depreAmortiExpense"] = float(inputDict.get("depreAmortiExpense", 0))
		# E57
		outData["Solar"]["taxExpensePG"] = outData["BAU"]["taxExpensePG"] = float(inputDict.get("taxExpensePG", 0))
		# E58
		outData["Solar"]["taxExpense"] = outData["BAU"]["taxExpense"] = float(inputDict.get("taxExpense", 0))
		# E59
		outData["Solar"]["interestLongTerm"] = outData["BAU"]["interestLongTerm"] = float(inputDict.get("interestLongTerm", 0))
		# E60
		outData["Solar"]["interestConstruction"] = outData["BAU"]["interestConstruction"] = float(inputDict.get("interestConstruction", 0))
		# E61
		outData["Solar"]["interestExpense"] = outData["BAU"]["interestExpense"] = float(inputDict.get("interestExpense", 0))
		# E62
		outData["Solar"]["otherDeductions"] = outData["BAU"]["otherDeductions"] = float(inputDict.get("otherDeductions", 0))
		# E65
		outData["Solar"]["nonOpMarginInterest"] = outData["BAU"]["nonOpMarginInterest"] = float(inputDict.get("nonOpMarginInterest", 0))
		# E66
		outData["Solar"]["fundsUsedConstruc"] = outData["BAU"]["fundsUsedConstruc"] = float(inputDict.get("fundsUsedConstruc", 0))
		# E67
		outData["Solar"]["incomeEquityInvest"] = outData["BAU"]["incomeEquityInvest"] = float(inputDict.get("incomeEquityInvest", 0))
		# E68
		outData["Solar"]["nonOpMarginOther"] = outData["BAU"]["nonOpMarginOther"] = float(inputDict.get("nonOpMarginOther", 0))
		# E69
		outData["Solar"]["genTransCapCredits"] = outData["BAU"]["genTransCapCredits"] = float(inputDict.get("genTransCapCredits", 0))
		# E70
		outData["Solar"]["otherCapCreditsPatroDivident"] = outData["BAU"]["otherCapCreditsPatroDivident"] = float(inputDict.get("otherCapCreditsPatroDivident", 0))
		# E71
		outData["Solar"]["extraItems"] = outData["BAU"]["extraItems"] = float(inputDict.get("extraItems", 0))
		# Calculation
		# E45 = SUM(E20:P20)+E10
		outData["BAU"]["operRevPatroCap"] = sum([totalRevenue[i][1] for i in range(12)])+float(inputDict.get("otherElecRevenue", 0))
		# E55 = SUM(E46:E54)
		outData["BAU"]["totalOMExpense"] = float(inputDict.get("powerProExpense")) \
			+ float(inputDict.get("costPurchasedPower")) \
			+ float(inputDict.get("transExpense")) \
			+ float(inputDict.get("distriExpenseO")) \
			+ float(inputDict.get("distriExpenseM")) \
			+ float(inputDict.get("customerAccountExpense")) \
			+ float(inputDict.get("customerServiceExpense")) \
			+ float(inputDict.get("salesExpense"))  \
			+ float(inputDict.get("adminGeneralExpense"))
		# E63 = SUM(E55:E62)
		outData["BAU"]["totalCostElecService"] = outData["BAU"]["totalOMExpense"] \
			+ float(inputDict.get("depreAmortiExpense"))\
			+ float(inputDict.get("taxExpensePG"))\
			+ float(inputDict.get("taxExpense"))\
			+ float(inputDict.get("interestLongTerm"))\
			+ float(inputDict.get("interestExpense"))\
			+ float(inputDict.get("interestConstruction"))\
			+ outData["BAU"]["otherDeductions"]
		# E64 = E45-E63
		outData["BAU"]["patCapOperMargins"] = outData["BAU"]["operRevPatroCap"] - outData["BAU"]["totalCostElecService"]
		# E72 = SUM(E64:E71)
		outData["BAU"]["patCapital"] = outData["BAU"]["patCapOperMargins"]\
			+ float(inputDict.get("nonOpMarginInterest"))\
			+ float(inputDict.get("fundsUsedConstruc"))\
			+ float(inputDict.get("incomeEquityInvest"))\
			+ float(inputDict.get("nonOpMarginOther"))\
			+ float(inputDict.get("genTransCapCredits"))\
			+ float(inputDict.get("otherCapCreditsPatroDivident"))\
			+ float(inputDict.get("extraItems"))
		# F48 = E48-F27*E34+SUM(E16:P16)*E5*E7
		outData["Solar"]["operRevPatroCap"] = outData["BAU"]["operRevPatroCap"] - outData["BAU"]["effectiveResRate"]*outData["Solar"]["annualSolarGen"] + sum([monthlyNoConsumerServedSales[i][1] for i in range(12)])*float(inputDict.get("resPenetration", 0.05))/100*float(inputDict.get("solarServiceCharge", 0))
		# F47 = (F23)*E8
		inputDict["costofPower"] = float(inputDict.get("costPurchasedPower", 0)) /  float(inputDict.get("totalKWhPurchased", 0))
		outData["Solar"]["costPurchasedPower"] = outData["Solar"]["totalKWhPurchased"] * float(inputDict.get("costofPower", 0))
		inputDict["costofPower"] = round(inputDict["costofPower"],3)
		# F55 = SUM(F46:F54)
		outData["Solar"]["totalOMExpense"] = outData["Solar"]["powerProExpense"]\
			+ outData["Solar"]["costPurchasedPower"]\
			+ outData["Solar"]["transExpense"]\
			+ outData["Solar"]["distriExpenseO"]\
			+ outData["Solar"]["distriExpenseM"]\
			+ outData["Solar"]["customerAccountExpense"]\
			+ outData["Solar"]["customerServiceExpense"]\
			+ outData["Solar"]["salesExpense"]\
			+ outData["Solar"]["adminGeneralExpense"]
		# F63 = E63
		outData["Solar"]["totalCostElecService"] = outData["Solar"]["totalOMExpense"]\
			+ outData["Solar"]["depreAmortiExpense"]\
			+ outData["Solar"]["taxExpensePG"]\
			+ outData["Solar"]["taxExpense"]\
			+ outData["Solar"]["interestLongTerm"]\
			+ outData["Solar"]["interestConstruction"]\
			+ outData["Solar"]["interestExpense"]\
			+ outData["Solar"]["otherDeductions"]
		# F64 = F45 - F63
		outData["Solar"]["patCapOperMargins"] = outData["Solar"]["operRevPatroCap"] - outData["Solar"]["totalCostElecService"]
		# F72 = SUM(F64:F71)
		outData["Solar"]["patCapital"] = outData["Solar"]["patCapOperMargins"]\
			+ outData["Solar"]["nonOpMarginInterest"]\
			+ outData["Solar"]["fundsUsedConstruc"]\
			+ outData["Solar"]["incomeEquityInvest"]\
			+ outData["Solar"]["nonOpMarginOther"]\
			+ outData["Solar"]["genTransCapCredits"]\
			+ outData["Solar"]["otherCapCreditsPatroDivident"]\
			+ outData["Solar"]["extraItems"]
		# E37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71), update after Form 7 model
		outData["BAU"]["nonPowerCosts"] = outData["BAU"]["transExpense"] \
			+ outData["BAU"]["distriExpenseO"] \
			+ outData["BAU"]["distriExpenseM"] \
			+ outData["BAU"]["customerAccountExpense"] \
			+ outData["BAU"]["customerServiceExpense"] \
			+ outData["BAU"]["salesExpense"] \
			+ outData["BAU"]["adminGeneralExpense"] \
			+ outData["BAU"]["depreAmortiExpense"] \
			+ outData["BAU"]["taxExpensePG"] \
			+ outData["BAU"]["taxExpense"] \
			+ outData["BAU"]["interestLongTerm"] \
			+ outData["BAU"]["interestConstruction"] \
			+ outData["BAU"]["interestExpense"] \
			+ outData["BAU"]["otherDeductions"] \
			- (outData["BAU"]["nonOpMarginInterest"] \
			+ outData["BAU"]["fundsUsedConstruc"] \
			+ outData["BAU"]["incomeEquityInvest"] \
			+ outData["BAU"]["nonOpMarginOther"] \
			+ outData["BAU"]["genTransCapCredits"] \
			+ outData["BAU"]["otherCapCreditsPatroDivident"] \
			+ outData["BAU"]["extraItems"])
		# E42 = E63/E24, update after Form 7 model
		outData["BAU"]["costofService"] = outData["BAU"]["totalCostElecService"] / outData["BAU"]["totalKWhSales"]
		# F37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71) = E37, update after Form 7 model
		outData["Solar"]["nonPowerCosts"] = outData["BAU"]["nonPowerCosts"]
		# F42 = F63/F24, update after Form 7 model
		outData["Solar"]["costofService"] = outData["Solar"]["totalCostElecService"] / outData["Solar"]["totalKWhSales"]
		# Stdout/stderr.
		outData["stdout"] = "Success"
		outData["stderr"] = ""
		# Write the output.
		with open(pJoin(modelDir,"allOutputData.json"),"w") as outFile:
			json.dump(outData, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = dt.datetime.now()
		inputDict["runTime"] = str(dt.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	except:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
Esempio n. 17
0
def heavyProcessing(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get feeder name and data in.
	try: os.mkdir(pJoin(modelDir,'gldContainer'))
	except: pass
	try:	
		feederName = inputDict["feederName1"]
		weather = inputDict["weather"]
		if weather == "typical":
			inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
			shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
				pJoin(modelDir, "gldContainer", "climate.tmy2"))
			startTime = datetime.datetime.now()
		else:
			#hack for testing
			makeClimateCsv('2010-07-01', '2010-08-01', 'DFW', 'Output/Automated dsoSimSuite Test/gldContainer/weather.csv')
			startTime = datetime.datetime.now()
		startTime = datetime.datetime.now()
		feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
		tree = feederJson["tree"]
		#add a check to see if there is already a climate object in the omd file
		#if there is delete the climate from attachments and the climate object
		attachKeys = feederJson["attachments"].keys()
		for key in attachKeys:
			if key.endswith('.tmy2'):
				del feederJson['attachments'][key]	
		treeKeys = feederJson["tree"].keys()
		for key in treeKeys:
			if 'object' in feederJson['tree'][key]:
			 	if feederJson['tree'][key]['object'] == 'climate':
			 		del feederJson['tree'][key]	
		#add weather objects and modules to .glm if there is no climate file in the omd file
		if weather == "historical":
			oldMax = feeder.getMaxKey(tree)
			tree[oldMax + 1] = {'omftype':'module', 'argument':'tape'}
			tree[oldMax + 2] = {'omftype':'module', 'argument':'climate'}
			tree[oldMax + 3] = {'object':'csv_reader', 'name':'weatherReader', 'filename':'weather.csv'}
			tree[oldMax + 4] = {'object':'climate', 'name':'exampleClimate', 'tmyfile':'weather.csv', 'reader':'weatherReader'}
		else:
			oldMax = feeder.getMaxKey(tree)
			tree[oldMax + 1] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'}
		# Set up GLM with correct time and recorders:
		feeder.attachRecorders(tree, "Regulator", "object", "regulator")
		feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
		feeder.attachRecorders(tree, "Inverter", "object", "inverter")
		feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
		feeder.attachRecorders(tree, "CollectorVoltage", None, None)
		feeder.attachRecorders(tree, "Climate", "object", "climate")
		feeder.attachRecorders(tree, "OverheadLosses", None, None)
		feeder.attachRecorders(tree, "UndergroundLosses", None, None)
		feeder.attachRecorders(tree, "TriplexLosses", None, None)
		feeder.attachRecorders(tree, "TransformerLosses", None, None)
		feeder.groupSwingKids(tree)
		# Attach recorders for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'}
		for phase in ['A','B','C']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'VoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
			simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
		rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
			keepFiles=True, workDir=pJoin(modelDir,'gldContainer'))
		cleanOut = {}
		# Std Err and Std Out
		cleanOut['stderr'] = rawOut['stderr']
		cleanOut['stdout'] = rawOut['stdout']
		# Time Stamps
		for key in rawOut:
			if '# timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# timestamp']
				break
			elif '# property.. timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
			else:
				cleanOut['timeStamps'] = []
		# Day/Month Aggregation Setup:
		stamps = cleanOut.get('timeStamps',[])
		level = inputDict.get('simLengthUnits','hours')
		# Climate
		for key in rawOut:
			if key.startswith('Climate_') and key.endswith('.csv'):
				cleanOut['climate'] = {}
				cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
				cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
				cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
				cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
				cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				#cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
				climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
				#converting W/sf to W/sm
				climateWbySMList= [x*10.76392 for x in climateWbySFList]
				cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList			
		# Voltage Band
		if 'VoltageJiggle.csv' in rawOut:
			cleanOut['allMeterVoltages'] = {}
			cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
			cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
		# Power Consumption
		cleanOut['Consumption'] = {}
		# Set default value to be 0, avoiding missing value when computing Loads
		cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
		for key in rawOut:
			if key.startswith('SwingKids_') and key.endswith('.csv'):
				oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
				if 'Power' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Power'] = oneSwingPower
				else:
					cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
			elif key.startswith('Inverter_') and key.endswith('.csv'): 	
				realA = rawOut[key]['power_A.real']
				realB = rawOut[key]['power_B.real']
				realC = rawOut[key]['power_C.real']
				imagA = rawOut[key]['power_A.imag']
				imagB = rawOut[key]['power_B.imag']
				imagC = rawOut[key]['power_C.imag']
				oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key.startswith('Windmill_') and key.endswith('.csv'):
				vrA = rawOut[key]['voltage_A.real']
				vrB = rawOut[key]['voltage_B.real']
				vrC = rawOut[key]['voltage_C.real']
				viA = rawOut[key]['voltage_A.imag']
				viB = rawOut[key]['voltage_B.imag']
				viC = rawOut[key]['voltage_C.imag']
				crB = rawOut[key]['current_B.real']
				crA = rawOut[key]['current_A.real']
				crC = rawOut[key]['current_C.real']
				ciA = rawOut[key]['current_A.imag']
				ciB = rawOut[key]['current_B.imag']
				ciC = rawOut[key]['current_C.imag']
				powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
				powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
				powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
				oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
				realA = rawOut[key]['sum(power_losses_A.real)']
				imagA = rawOut[key]['sum(power_losses_A.imag)']
				realB = rawOut[key]['sum(power_losses_B.real)']
				imagB = rawOut[key]['sum(power_losses_B.imag)']
				realC = rawOut[key]['sum(power_losses_C.real)']
				imagC = rawOut[key]['sum(power_losses_C.imag)']
				oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'Losses' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Losses'] = oneLoss
				else:
					cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			elif key.startswith('Regulator_') and key.endswith('.csv'):
				#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
				regName=""
				regName = key
				newkey=regName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
				cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
				cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
				cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
			elif key.startswith('Capacitor_') and key.endswith('.csv'):
				capName=""
				capName = key
				newkey=capName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
				cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
				cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
				cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]
		# What percentage of our keys have lat lon data?
		latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
		latPerc = 1.0*len(latKeys)/len(tree)
		if latPerc < 0.25: doNeato = True
		else: doNeato = False
		# Generate the frames for the system voltage map time traveling chart.
		genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
		cleanOut['genTime'] = genTime
		# Aggregate up the timestamps:
		if level=='days':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
		elif level=='months':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
		# Write the output.
		with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile:
			json.dump(cleanOut, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = datetime.datetime.now()
		inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Clean up the PID file.
		os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
		print "DONE RUNNING", modelDir
	except Exception as e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)	
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	finishTime = datetime.datetime.now()
	inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds())))
	with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
		json.dump(inputDict, inFile, indent = 4)
	try:
		os.remove(pJoin(modelDir,"PPID.txt"))
	except:
		pass
Esempio n. 18
0
def runForeground(modelDir, test_mode=False):
    ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
    with open(pJoin(modelDir, 'allInputData.json')) as f:
        inputDict = json.load(f)
    print("STARTING TO RUN", modelDir)
    beginTime = datetime.datetime.now()
    # Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders
    for dirs in os.listdir(modelDir):
        if os.path.isdir(pJoin(modelDir, dirs)):
            shutil.rmtree(pJoin(modelDir, dirs))
    # Get the names of the feeders from the .omd files:
    feederNames = [x[0:-4] for x in os.listdir(modelDir) if x.endswith(".omd")]
    for i, key in enumerate(feederNames):
        inputDict['feederName' + str(i + 1)] = feederNames[i]
    # Run GridLAB-D once for each feeder:
    for feederName in feederNames:
        try:
            os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
        except Exception as e:
            pass
        if not os.path.isdir(pJoin(modelDir, feederName)):
            os.makedirs(pJoin(modelDir,
                              feederName))  # create subfolders for feeders
        shutil.copy(pJoin(modelDir, feederName + ".omd"),
                    pJoin(modelDir, feederName, "feeder.omd"))
        inputDict["climateName"] = weather.zipCodeToClimateName(
            inputDict["zipCode"])
        shutil.copy(
            pJoin(_omfDir, "data", "Climate",
                  inputDict["climateName"] + ".tmy2"),
            pJoin(modelDir, feederName, "climate.tmy2"))
        try:
            startTime = datetime.datetime.now()
            with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
                feederJson = json.load(f)
            tree = feederJson["tree"]
            # Set up GLM with correct time and recorders:
            feeder.attachRecorders(tree, "Regulator", "object", "regulator")
            feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
            feeder.attachRecorders(tree, "Inverter", "object", "inverter")
            feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
            feeder.attachRecorders(tree, "CollectorVoltage", None, None)
            feeder.attachRecorders(tree, "Climate", "object", "climate")
            feeder.attachRecorders(tree, "OverheadLosses", None, None)
            feeder.attachRecorders(tree, "UndergroundLosses", None, None)
            feeder.attachRecorders(tree, "TriplexLosses", None, None)
            feeder.attachRecorders(tree, "TransformerLosses", None, None)
            feeder.groupSwingKids(tree)
            feeder.adjustTime(tree=tree,
                              simLength=float(inputDict["simLength"]),
                              simLengthUnits=inputDict["simLengthUnits"],
                              simStartDate=inputDict["simStartDate"])
            # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
            rawOut = gridlabd.runInFilesystem(
                tree,
                attachments=feederJson["attachments"],
                keepFiles=True,
                workDir=pJoin(modelDir, feederName))
            cleanOut = {}
            # Std Err and Std Out
            cleanOut['stderr'] = rawOut['stderr']
            cleanOut['stdout'] = rawOut['stdout']
            # Time Stamps
            for key in rawOut:
                if '# timestamp' in rawOut[key]:
                    cleanOut['timeStamps'] = rawOut[key]['# timestamp']
                    break
                elif '# property.. timestamp' in rawOut[key]:
                    cleanOut['timeStamps'] = rawOut[key][
                        '# property.. timestamp']
                else:
                    cleanOut['timeStamps'] = []
            # Day/Month Aggregation Setup:
            stamps = cleanOut.get('timeStamps', [])
            level = inputDict.get('simLengthUnits', 'hours')
            # Climate
            for key in rawOut:
                if key.startswith('Climate_') and key.endswith('.csv'):
                    cleanOut['climate'] = {}
                    cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(
                        rawOut[key].get('rainfall'), sum, level)
                    cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(
                        rawOut[key].get('wind_speed'), avg, level)
                    cleanOut['climate']['Temperature (F)'] = hdmAgg(
                        rawOut[key].get('temperature'), max, level)
                    cleanOut['climate']['Snow Depth (in)'] = hdmAgg(
                        rawOut[key].get('snowdepth'), max, level)
                    cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(
                        rawOut[key].get('solar_direct'), sum, level)
            # Voltage Band
            if 'VoltageJiggle.csv' in rawOut:
                cleanOut['allMeterVoltages'] = {}
                cleanOut['allMeterVoltages']['Min'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']
                ], min, level)
                cleanOut['allMeterVoltages']['Mean'] = hdmAgg(
                    [(i / 2) for i in rawOut['VoltageJiggle.csv']
                     ['mean(voltage_12.mag)']], avg, level)
                cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']
                ], avg, level)
                cleanOut['allMeterVoltages']['Max'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']
                ], max, level)
            cleanOut['allMeterVoltages']['stdDevPos'] = [
                (x + y / 2)
                for x, y in zip(cleanOut['allMeterVoltages']['Mean'],
                                cleanOut['allMeterVoltages']['StdDev'])
            ]
            cleanOut['allMeterVoltages']['stdDevNeg'] = [
                (x - y / 2)
                for x, y in zip(cleanOut['allMeterVoltages']['Mean'],
                                cleanOut['allMeterVoltages']['StdDev'])
            ]
            # Total # of meters
            count = 0
            with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
                for line in f:
                    if "\"objectType\": \"triplex_meter\"" in line:
                        count += 1
            # print "count=", count
            cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count)
            # Power Consumption
            cleanOut['Consumption'] = {}
            # Set default value to be 0, avoiding missing value when computing Loads
            cleanOut['Consumption']['Power'] = [0] * int(
                inputDict["simLength"])
            cleanOut['Consumption']['Losses'] = [0] * int(
                inputDict["simLength"])
            cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
            for key in rawOut:
                if key.startswith('SwingKids_') and key.endswith('.csv'):
                    oneSwingPower = hdmAgg(
                        vecPyth(rawOut[key]['sum(power_in.real)'],
                                rawOut[key]['sum(power_in.imag)']), avg, level)
                    if 'Power' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['Power'] = oneSwingPower
                    else:
                        cleanOut['Consumption']['Power'] = vecSum(
                            oneSwingPower, cleanOut['Consumption']['Power'])
                elif key.startswith('Inverter_') and key.endswith('.csv'):
                    realA = rawOut[key]['power_A.real']
                    realB = rawOut[key]['power_B.real']
                    realC = rawOut[key]['power_C.real']
                    imagA = rawOut[key]['power_A.imag']
                    imagB = rawOut[key]['power_B.imag']
                    imagC = rawOut[key]['power_C.imag']
                    oneDgPower = hdmAgg(
                        vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                               vecPyth(realC, imagC)), avg, level)
                    if 'DG' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['DG'] = oneDgPower
                    else:
                        cleanOut['Consumption']['DG'] = vecSum(
                            oneDgPower, cleanOut['Consumption']['DG'])
                elif key.startswith('Windmill_') and key.endswith('.csv'):
                    vrA = rawOut[key]['voltage_A.real']
                    vrB = rawOut[key]['voltage_B.real']
                    vrC = rawOut[key]['voltage_C.real']
                    viA = rawOut[key]['voltage_A.imag']
                    viB = rawOut[key]['voltage_B.imag']
                    viC = rawOut[key]['voltage_C.imag']
                    crB = rawOut[key]['current_B.real']
                    crA = rawOut[key]['current_A.real']
                    crC = rawOut[key]['current_C.real']
                    ciA = rawOut[key]['current_A.imag']
                    ciB = rawOut[key]['current_B.imag']
                    ciC = rawOut[key]['current_C.imag']
                    powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA))
                    powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB))
                    powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC))
                    # HACK: multiply by negative one because turbine power sign is opposite all other DG:
                    oneDgPower = [
                        -1.0 * x for x in hdmAgg(
                            vecSum(powerA, powerB, powerC), avg, level)
                    ]
                    if 'DG' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['DG'] = oneDgPower
                    else:
                        cleanOut['Consumption']['DG'] = vecSum(
                            oneDgPower, cleanOut['Consumption']['DG'])
                elif key in [
                        'OverheadLosses.csv', 'UndergroundLosses.csv',
                        'TriplexLosses.csv', 'TransformerLosses.csv'
                ]:
                    realA = rawOut[key]['sum(power_losses_A.real)']
                    imagA = rawOut[key]['sum(power_losses_A.imag)']
                    realB = rawOut[key]['sum(power_losses_B.real)']
                    imagB = rawOut[key]['sum(power_losses_B.imag)']
                    realC = rawOut[key]['sum(power_losses_C.real)']
                    imagC = rawOut[key]['sum(power_losses_C.imag)']
                    oneLoss = hdmAgg(
                        vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                               vecPyth(realC, imagC)), avg, level)
                    if 'Losses' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['Losses'] = oneLoss
                    else:
                        cleanOut['Consumption']['Losses'] = vecSum(
                            oneLoss, cleanOut['Consumption']['Losses'])
            # Aggregate up the timestamps:
            if level == 'days':
                cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                                   lambda x: x[0][0:10],
                                                   'days')
            elif level == 'months':
                cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                                   lambda x: x[0][0:7],
                                                   'months')
            # Write the output.
            with open(pJoin(modelDir, feederName, "allOutputData.json"),
                      "w") as outFile:
                json.dump(cleanOut, outFile, indent=4)
            # Update the runTime in the input file.
            endTime = datetime.datetime.now()
            inputDict["runTime"] = str(
                datetime.timedelta(seconds=int((endTime -
                                                startTime).total_seconds())))
            with open(pJoin(modelDir, feederName, "allInputData.json"),
                      "w") as inFile:
                json.dump(inputDict, inFile, indent=4)
            # Clean up the PID file.
            os.remove(pJoin(modelDir, feederName, "PID.txt"))
            print("DONE RUNNING GRIDLABMULTI", modelDir, feederName)
        except Exception as e:
            if test_mode == True:
                raise e
            print("MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName)
            cancel(pJoin(modelDir, feederName))
            with open(pJoin(modelDir, feederName, "stderr.txt"),
                      "a+") as stderrFile:
                traceback.print_exc(file=stderrFile)
    finishTime = datetime.datetime.now()
    inputDict["runTime"] = str(
        datetime.timedelta(seconds=int((finishTime -
                                        beginTime).total_seconds())))
    with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
        json.dump(inputDict, inFile, indent=4)
    # Integrate data into allOutputData.json, if error happens, cancel it
    try:
        output = {}
        output["failures"] = {}
        numOfFeeders = 0
        for root, dirs, files in os.walk(modelDir):
            # dump error info into dict
            if "stderr.txt" in files:
                with open(pJoin(root, "stderr.txt"), "r") as stderrFile:
                    tempString = stderrFile.read()
                    if "ERROR" in tempString or "FATAL" in tempString or "Traceback" in tempString:
                        output["failures"]["feeder_" +
                                           str(os.path.split(root)[-1])] = {
                                               "stderr": tempString
                                           }
                        continue
            # dump simulated data into dict
            if "allOutputData.json" in files:
                with open(pJoin(root, "allOutputData.json"),
                          "r") as feederOutputData:
                    numOfFeeders += 1
                    feederOutput = json.load(feederOutputData)
                    # TODO: a better feeder name
                    output["feeder_" + str(os.path.split(root)[-1])] = {}
                    output["feeder_" +
                           str(os.path.split(root)[-1]
                               )]["Consumption"] = feederOutput["Consumption"]
                    output["feeder_" + str(os.path.split(root)[-1])][
                        "allMeterVoltages"] = feederOutput["allMeterVoltages"]
                    output["feeder_" + str(os.path.split(
                        root)[-1])]["stderr"] = feederOutput["stderr"]
                    output["feeder_" + str(os.path.split(
                        root)[-1])]["stdout"] = feederOutput["stdout"]
                    # output[root] = {feederOutput["Consumption"], feederOutput["allMeterVoltages"], feederOutput["stdout"], feederOutput["stderr"]}
        output["numOfFeeders"] = numOfFeeders
        output["timeStamps"] = feederOutput.get("timeStamps", [])
        output["climate"] = feederOutput.get("climate", [])
        # Add feederNames to output so allInputData feederName changes don't cause output rendering to disappear.
        for key, feederName in inputDict.items():
            if 'feederName' in key:
                output[key] = feederName
        with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile:
            json.dump(output, outFile, indent=4)
        try:
            os.remove(pJoin(modelDir, "PPID.txt"))
        except:
            pass
        # Send email to user on model success.
        emailStatus = inputDict.get('emailStatus', 0)
        if (emailStatus == "on"):
            print("\n    EMAIL ALERT ON")
            email = session['user_id']
            try:
                with open("data/User/" + email + ".json") as f:
                    user = json.load(f)
                modelPath, modelName = pSplit(modelDir)
                message = "The model " + "<i>" + str(
                    modelName
                ) + "</i>" + " has successfully completed running. It ran for a total of " + str(
                    inputDict["runTime"]) + " seconds from " + str(
                        beginTime) + ", to " + str(finishTime) + "."
                return web.send_link(email, message, user)
            except Exception as e:
                print("ERROR: Failed sending model status email to user: "******", with exception: \n", e)
    except Exception as e:
        # If input range wasn't valid delete output, write error to disk.
        cancel(modelDir)
        thisErr = traceback.format_exc()
        print('ERROR IN MODEL', modelDir, thisErr)
        inputDict['stderr'] = thisErr
        with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile:
            errorFile.write(thisErr)
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
        # Send email to user on model failure.
        email = 'NoEmail'
        try:
            email = session['user_id']
            with open("data/User/" + email + ".json") as f:
                user = json.load(f)
            modelPath, modelName = pSplit(modelDir)
            message = "The model " + "<i>" + str(
                modelName
            ) + "</i>" + " has failed to complete running. It ran for a total of " + str(
                inputDict["runTime"]) + " seconds from " + str(
                    beginTime) + ", to " + str(finishTime) + "."
            return web.send_link(email, message, user)
        except Exception as e:
            print("Failed sending model status email to user: "******", with exception: \n", e)
Esempio n. 19
0
def runForeground(modelDir, inputDict, fs):
    """ Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. """
    print "STARTING TO RUN", modelDir
    beginTime = datetime.datetime.now()
    feederList = []
    # Get prepare of data and clean workspace if re-run, If re-run remove all
    # the data in the subfolders
    for dirs in os.listdir(modelDir):
        if os.path.isdir(pJoin(modelDir, dirs)):
            shutil.rmtree(pJoin(modelDir, dirs))
    # Get each feeder, prepare data in separate folders, and run there.
    for key in sorted(inputDict, key=inputDict.get):
        if key.startswith("feederName"):
            feederDir, feederName = inputDict[key].split("___")
            feederList.append(feederName)
            try:
                os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
                fs.remove(pJoin(modelDir, feederName, "allOutputData.json"))
            except Exception, e:
                pass
            if not os.path.isdir(pJoin(modelDir, feederName)):
                # create subfolders for feeders
                os.makedirs(pJoin(modelDir, feederName))

            fs.export_from_fs_to_local(
                pJoin("data", "Feeder", feederDir, feederName + ".json"), pJoin(modelDir, feederName, "feeder.json")
            )
            inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"], fs)
            fs.export_from_fs_to_local(
                pJoin("data", "Climate", inputDict["climateName"] + ".tmy2"),
                pJoin(modelDir, feederName, "climate.tmy2"),
            )
            try:
                startTime = datetime.datetime.now()
                feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.json")))
                tree = feederJson["tree"]
                # Set up GLM with correct time and recorders:
                feeder.attachRecorders(tree, "Regulator", "object", "regulator")
                feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
                feeder.attachRecorders(tree, "Inverter", "object", "inverter")
                feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
                feeder.attachRecorders(tree, "CollectorVoltage", None, None)
                feeder.attachRecorders(tree, "Climate", "object", "climate")
                feeder.attachRecorders(tree, "OverheadLosses", None, None)
                feeder.attachRecorders(tree, "UndergroundLosses", None, None)
                feeder.attachRecorders(tree, "TriplexLosses", None, None)
                feeder.attachRecorders(tree, "TransformerLosses", None, None)
                feeder.groupSwingKids(tree)
                feeder.adjustTime(
                    tree=tree,
                    simLength=float(inputDict["simLength"]),
                    simLengthUnits=inputDict["simLengthUnits"],
                    simStartDate=inputDict["simStartDate"],
                )
                if "attachments" in feederJson:
                    attachments = feederJson["attachments"]
                else:
                    attachments = []
                # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
                rawOut = gridlabd.runInFilesystem(
                    tree, attachments=attachments, keepFiles=True, workDir=pJoin(modelDir, feederName)
                )
                cleanOut = {}
                # Std Err and Std Out
                cleanOut["stderr"] = rawOut["stderr"]
                cleanOut["stdout"] = rawOut["stdout"]
                # Time Stamps
                for key in rawOut:
                    if "# timestamp" in rawOut[key]:
                        cleanOut["timeStamps"] = rawOut[key]["# timestamp"]
                        break
                    elif "# property.. timestamp" in rawOut[key]:
                        cleanOut["timeStamps"] = rawOut[key]["# property.. timestamp"]
                    else:
                        cleanOut["timeStamps"] = []
                # Day/Month Aggregation Setup:
                stamps = cleanOut.get("timeStamps", [])
                level = inputDict.get("simLengthUnits", "hours")
                # Climate
                for key in rawOut:
                    if key.startswith("Climate_") and key.endswith(".csv"):
                        cleanOut["climate"] = {}
                        cleanOut["climate"]["Rain Fall (in/h)"] = hdmAgg(
                            rawOut[key].get("rainfall"), sum, level, stamps
                        )
                        cleanOut["climate"]["Wind Speed (m/s)"] = hdmAgg(
                            rawOut[key].get("wind_speed"), avg, level, stamps
                        )
                        cleanOut["climate"]["Temperature (F)"] = hdmAgg(
                            rawOut[key].get("temperature"), max, level, stamps
                        )
                        cleanOut["climate"]["Snow Depth (in)"] = hdmAgg(
                            rawOut[key].get("snowdepth"), max, level, stamps
                        )
                        cleanOut["climate"]["Direct Insolation (W/m^2)"] = hdmAgg(
                            rawOut[key].get("solar_direct"), sum, level, stamps
                        )
                # Voltage Band
                if "VoltageJiggle.csv" in rawOut:
                    cleanOut["allMeterVoltages"] = {}
                    cleanOut["allMeterVoltages"]["Min"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["min(voltage_12.mag)"]], min, level, stamps
                    )
                    cleanOut["allMeterVoltages"]["Mean"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["mean(voltage_12.mag)"]], avg, level, stamps
                    )
                    cleanOut["allMeterVoltages"]["StdDev"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["std(voltage_12.mag)"]], avg, level, stamps
                    )
                    cleanOut["allMeterVoltages"]["Max"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["max(voltage_12.mag)"]], max, level, stamps
                    )
                # Power Consumption
                cleanOut["Consumption"] = {}
                # Set default value to be 0, avoiding missing value when
                # computing Loads
                cleanOut["Consumption"]["Power"] = [0] * int(inputDict["simLength"])
                cleanOut["Consumption"]["Losses"] = [0] * int(inputDict["simLength"])
                cleanOut["Consumption"]["DG"] = [0] * int(inputDict["simLength"])
                for key in rawOut:
                    if key.startswith("SwingKids_") and key.endswith(".csv"):
                        oneSwingPower = hdmAgg(
                            vecPyth(rawOut[key]["sum(power_in.real)"], rawOut[key]["sum(power_in.imag)"]),
                            avg,
                            level,
                            stamps,
                        )
                        if "Power" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["Power"] = oneSwingPower
                        else:
                            cleanOut["Consumption"]["Power"] = vecSum(oneSwingPower, cleanOut["Consumption"]["Power"])
                    elif key.startswith("Inverter_") and key.endswith(".csv"):
                        realA = rawOut[key]["power_A.real"]
                        realB = rawOut[key]["power_B.real"]
                        realC = rawOut[key]["power_C.real"]
                        imagA = rawOut[key]["power_A.imag"]
                        imagB = rawOut[key]["power_B.imag"]
                        imagC = rawOut[key]["power_C.imag"]
                        oneDgPower = hdmAgg(
                            vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)),
                            avg,
                            level,
                            stamps,
                        )
                        if "DG" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["DG"] = oneDgPower
                        else:
                            cleanOut["Consumption"]["DG"] = vecSum(oneDgPower, cleanOut["Consumption"]["DG"])
                    elif key.startswith("Windmill_") and key.endswith(".csv"):
                        vrA = rawOut[key]["voltage_A.real"]
                        vrB = rawOut[key]["voltage_B.real"]
                        vrC = rawOut[key]["voltage_C.real"]
                        viA = rawOut[key]["voltage_A.imag"]
                        viB = rawOut[key]["voltage_B.imag"]
                        viC = rawOut[key]["voltage_C.imag"]
                        crB = rawOut[key]["current_B.real"]
                        crA = rawOut[key]["current_A.real"]
                        crC = rawOut[key]["current_C.real"]
                        ciA = rawOut[key]["current_A.imag"]
                        ciB = rawOut[key]["current_B.imag"]
                        ciC = rawOut[key]["current_C.imag"]
                        powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA))
                        powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB))
                        powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC))
                        # HACK: multiply by negative one because turbine power
                        # sign is opposite all other DG:
                        oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA, powerB, powerC), avg, level, stamps)]
                        if "DG" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["DG"] = oneDgPower
                        else:
                            cleanOut["Consumption"]["DG"] = vecSum(oneDgPower, cleanOut["Consumption"]["DG"])
                    elif key in [
                        "OverheadLosses.csv",
                        "UndergroundLosses.csv",
                        "TriplexLosses.csv",
                        "TransformerLosses.csv",
                    ]:
                        realA = rawOut[key]["sum(power_losses_A.real)"]
                        imagA = rawOut[key]["sum(power_losses_A.imag)"]
                        realB = rawOut[key]["sum(power_losses_B.real)"]
                        imagB = rawOut[key]["sum(power_losses_B.imag)"]
                        realC = rawOut[key]["sum(power_losses_C.real)"]
                        imagC = rawOut[key]["sum(power_losses_C.imag)"]
                        oneLoss = hdmAgg(
                            vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)),
                            avg,
                            level,
                            stamps,
                        )
                        if "Losses" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["Losses"] = oneLoss
                        else:
                            cleanOut["Consumption"]["Losses"] = vecSum(oneLoss, cleanOut["Consumption"]["Losses"])
                # Aggregate up the timestamps:
                if level == "days":
                    cleanOut["timeStamps"] = aggSeries(stamps, stamps, lambda x: x[0][0:10], "days")
                elif level == "months":
                    cleanOut["timeStamps"] = aggSeries(stamps, stamps, lambda x: x[0][0:7], "months")
                # Write the output.
                with open(pJoin(modelDir, feederName, "allOutputData.json"), "w") as outFile:
                    json.dump(cleanOut, outFile, indent=4)
                # Update the runTime in the input file.
                endTime = datetime.datetime.now()
                inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
                with open(pJoin(modelDir, feederName, "allInputData.json"), "w") as inFile:
                    json.dump(inputDict, inFile, indent=4)
                # Clean up the PID file.
                os.remove(pJoin(modelDir, feederName, "PID.txt"))
                print "DONE RUNNING GRIDLABMULTI", modelDir, feederName
            except Exception as e:
                print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName
                cancel(pJoin(modelDir, feederName))
                with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile:
                    traceback.print_exc(file=stderrFile)
Esempio n. 20
0
def work(modelDir, inputDict):
	feederName = inputDict["feederName1"]
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
	tree = feederJson["tree"]
	# tree[feeder.getMaxKey(tree)+1] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'}
	# Set up GLM with correct time and recorders:
	feeder.attachRecorders(tree, "Regulator", "object", "regulator")
	feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
	feeder.attachRecorders(tree, "Inverter", "object", "inverter")
	feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
	feeder.attachRecorders(tree, "CollectorVoltage", None, None)
	feeder.attachRecorders(tree, "Climate", "object", "climate")
	feeder.attachRecorders(tree, "OverheadLosses", None, None)
	feeder.attachRecorders(tree, "UndergroundLosses", None, None)
	feeder.attachRecorders(tree, "TriplexLosses", None, None)
	feeder.attachRecorders(tree, "TransformerLosses", None, None)
	feeder.groupSwingKids(tree)

	# System check - linux doesn't support newer GridLAB-D versions
	if sys.platform == 'linux2':
		pass
	else:
		# print feeder.getMaxKey(tree)
		# tree[14,20,27,28,47] empty for UCS Egan, add climate object to tree[14]
		# HACK: tree[10:19] is empty
		tree[11] = {'omftype':'#include', 'argument':'\"hot_water_demand.glm\"'}
		tree[12] = {'omftype':'#include', 'argument':'\"lock_mode_schedule.glm\"'}
		tree[13] = {'omftype':'#include', 'argument':'\"control_priority_schedule.glm\"'}
		# Attach frequency player
		tree[14] = {'omftype':'class player', 'argument':'{double value;}'}
		tree[feeder.getMaxKey(tree)+1] = {'object':'player', 'file':'frequency.PLAYER', 'property':'value', 'name':'frequency', 'loop':0}
		# Set up GridBallast Controls
		totalWH = 0
		totalZIP = 0
		gbWH = 0
		gbZIP = 0
		for key in tree.keys():
			# Waterheater Controller properties
			if ('name' in tree[key]) and (tree[key].get('object') == 'waterheater'):
		 		totalWH += 1
	 			gbWH += 1
	 			# Frequency control parameters
	 			tree[key]['enable_freq_control'] = 'true'
	 			tree[key]['measured_frequency'] = 'frequency.value'
	 			tree[key]['freq_lowlimit'] = 59
	 			tree[key]['freq_uplimit'] = 61
	 			tree[key]['heat_mode'] = 'ELECTRIC'
	 			# tree[key]['average_delay_time'] = 60
	 			# Voltage control parameters
	 			# tree[key]['enable_volt_control'] = 'true'
	 			# tree[key]['volt_lowlimit'] = 240.4
	 			# tree[key]['volt_uplimit'] = 241.4
	 			# Lock Mode parameters
	 			# tree[key]['enable_lock'] = 'temp_lock_enable'
	 			# tree[key]['lock_STATUS'] = 'temp_lock_status'
	 			# Controller Priority: a.lock, b.freq, c.volt, d.therm
	 			tree[key]['controller_priority'] = 3214 #default:therm>lock>freq>volt
	 			# tree[key]['controller_priority'] = 1423 #freq>therm>volt>lock
	 			# tree[key]['controller_priority'] = 'control_priority'
		 		# fix waterheater property demand to water_demand for newer GridLAB-D versions
		 		if 'demand' in tree[key]:
		 			# tree[key]['water_demand'] = tree[key]['demand']
		 			tree[key]['water_demand'] = 'weekday_hotwater*1'
		 			del tree[key]['demand']
			# ZIPload Controller properties
			if ('name' in tree[key]) and (tree[key].get('object') == 'ZIPload'):
		 		totalZIP += 1
				if tree[key]['name'].startswith('responsive'):
		 			gbZIP += 1
			 		# Frequency control parameters
		 			tree[key]['enable_freq_control'] = 'true'
		 			tree[key]['measured_frequency'] = 'frequency.value'
		 			tree[key]['freq_lowlimit'] = 59
		 			tree[key]['freq_uplimit'] = 61
		 			# tree[key]['average_delay_time'] = 60
		 			# Voltage control parameters
		 			# tree[key]['enable_volt_control'] = 'true'
		 			# tree[key]['volt_lowlimit'] = 240.4
		 			# tree[key]['volt_uplimit'] = 241.4
		 			# Lock Mode parameters
		 			# tree[key]['enable_lock'] = 'temp_lock_enable'
		 			# tree[key]['lock_STATUS'] = 'temp_lock_status'
		 			tree[key]['controller_priority'] = 4321 #default:lock>freq>volt>therm
		 			# tree[key]['controller_priority'] = 2431 #freq>volt>lock>therm
		 			# tree[key]['groupid'] = 'fan'

	# Attach collector for total network load
	tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':60, 'file':'allMeterPower.csv'}
	# Attach collector for total waterheater load
	tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':60, 'file':'allWaterheaterLoad.csv'}
	# Attach collector for total ZIPload power/load
	tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':60, 'file':'allZIPloadPower.csv'}
	# Attach recorder for each ZIPload power/load
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':60, 'file':'eachZIPloadPower.csv'}
	# Attach recorder for all ZIPloads demand_rate
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':60, 'file':'allZIPloadDemand.csv'}
	# Attach recorder for waterheaters on/off
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':60, 'file':'allWaterheaterOn.csv'}
	# Attach recorder for waterheater tank temperatures
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':60, 'file':'allWaterheaterTemp.csv'}
	
	# Attach recorders for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':60}
	for phase in ['A','B','C']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'VoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# Attach recorders for system voltage map, triplex:
	stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':60}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'nVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# And get meters for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':60}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'mVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
		simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
	# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
	rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
		keepFiles=True, workDir=pJoin(modelDir))
	outData = {}
	# Std Err and Std Out
	outData['stderr'] = rawOut['stderr']
	outData['stdout'] = rawOut['stdout']
	# Time Stamps
	for key in rawOut:
		if '# timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# timestamp']
			break
		elif '# property.. timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# property.. timestamp']
		else:
			outData['timeStamps'] = []
	# Day/Month Aggregation Setup:
	stamps = outData.get('timeStamps',[])
	level = inputDict.get('simLengthUnits','hours')
	# Climate
	for key in rawOut:
		if key.startswith('Climate_') and key.endswith('.csv'):
			outData['climate'] = {}
			outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
			outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
			outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
			outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
			outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
			#outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
			climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
			#converting W/sf to W/sm
			climateWbySMList= [x*10.76392 for x in climateWbySFList]
			outData['climate']['Global Horizontal (W/sm)']=climateWbySMList			
	# Voltage Band
	if 'VoltageJiggle.csv' in rawOut:
		outData['allMeterVoltages'] = {}
		outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
		outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
	# Power Consumption
	outData['Consumption'] = {}
	# Set default value to be 0, avoiding missing value when computing Loads
	outData['Consumption']['Power'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['DG'] = [0] * int(inputDict["simLength"])
	for key in rawOut:
		if key.startswith('SwingKids_') and key.endswith('.csv'):
			oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
			if 'Power' not in outData['Consumption']:
				outData['Consumption']['Power'] = oneSwingPower
			else:
				outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power'])
		elif key.startswith('Inverter_') and key.endswith('.csv'): 	
			realA = rawOut[key]['power_A.real']
			realB = rawOut[key]['power_B.real']
			realC = rawOut[key]['power_C.real']
			imagA = rawOut[key]['power_A.imag']
			imagB = rawOut[key]['power_B.imag']
			imagC = rawOut[key]['power_C.imag']
			oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key.startswith('Windmill_') and key.endswith('.csv'):
			vrA = rawOut[key]['voltage_A.real']
			vrB = rawOut[key]['voltage_B.real']
			vrC = rawOut[key]['voltage_C.real']
			viA = rawOut[key]['voltage_A.imag']
			viB = rawOut[key]['voltage_B.imag']
			viC = rawOut[key]['voltage_C.imag']
			crB = rawOut[key]['current_B.real']
			crA = rawOut[key]['current_A.real']
			crC = rawOut[key]['current_C.real']
			ciA = rawOut[key]['current_A.imag']
			ciB = rawOut[key]['current_B.imag']
			ciC = rawOut[key]['current_C.imag']
			powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
			powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
			powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
			oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
			realA = rawOut[key]['sum(power_losses_A.real)']
			imagA = rawOut[key]['sum(power_losses_A.imag)']
			realB = rawOut[key]['sum(power_losses_B.real)']
			imagB = rawOut[key]['sum(power_losses_B.imag)']
			realC = rawOut[key]['sum(power_losses_C.real)']
			imagC = rawOut[key]['sum(power_losses_C.imag)']
			oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'Losses' not in outData['Consumption']:
				outData['Consumption']['Losses'] = oneLoss
			else:
				outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses'])
		elif key.startswith('Regulator_') and key.endswith('.csv'):
			#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
			regName=""
			regName = key
			newkey=regName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapA'] = rawOut[key]['tap_A']
			outData[newkey]['RegTapB'] = rawOut[key]['tap_B']
			outData[newkey]['RegTapC'] = rawOut[key]['tap_C']
			outData[newkey]['RegPhases'] = rawOut[key]['phases'][0]
		elif key.startswith('Capacitor_') and key.endswith('.csv'):
			capName=""
			capName = key
			newkey=capName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1A'] = rawOut[key]['switchA']
			outData[newkey]['Cap1B'] = rawOut[key]['switchB']
			outData[newkey]['Cap1C'] = rawOut[key]['switchC']
			outData[newkey]['CapPhases'] = rawOut[key]['phases'][0]

	# Print gridBallast Outputs to allOutputData.json
	outData['gridBallast'] = {}
	if 'allMeterPower.csv' in rawOut:
		outData['gridBallast']['totalNetworkLoad'] = [x / 1000 for x in rawOut.get('allMeterPower.csv')['sum(measured_real_power)']] #Convert W to kW
	if ('allZIPloadPower.csv' in rawOut) and ('allWaterheaterLoad.csv' in rawOut):
		outData['gridBallast']['availabilityMagnitude'] = [x[0] + x[1] for x in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])]
	if 'allZIPloadDemand.csv' in rawOut:
		outData['gridBallast']['ZIPloadDemand'] = {}
		for key in rawOut['allZIPloadDemand.csv']:
			if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')):
				outData['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key]
	if 'eachZIPloadPower.csv' in rawOut:
				outData['gridBallast']['ZIPloadPower'] = {}
				for key in rawOut['eachZIPloadPower.csv']:
					if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')):
						outData['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key]
	if 'allWaterheaterOn.csv' in rawOut:
		outData['gridBallast']['waterheaterOn'] = {}
		for key in rawOut['allWaterheaterOn.csv']:
			if (key.startswith('waterheater')) or (key.startswith('waterHeater')):
				outData['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key]
	if 'allWaterheaterTemp.csv' in rawOut:
		outData['gridBallast']['waterheaterTemp'] = {}
		for key in rawOut['allWaterheaterTemp.csv']:
			if (key.startswith('waterheater')) or (key.startswith('waterHeater')):
				outData['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key]
	# System check - linux doesn't support newer GridLAB-D versions
	if sys.platform == 'linux2':
		pass
	else:
		outData['gridBallast']['penetrationLevel'] = 100*(gbWH+gbZIP)/(totalWH+totalZIP)
		# Frequency Player
		inArray = feederJson['attachments']['frequency.PLAYER'].split('\n')
		tempArray = []
		for each in inArray:
			x = each.split(',')
			y = float(x[1])
			tempArray.append(y)
		outData['frequencyPlayer'] = tempArray
	# EventTime calculations
	eventTime = inputDict['eventTime']
	eventLength = inputDict['eventLength'].split(':')
	eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1]))
	eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M')
	eventEnd = eventStart + eventDuration
	outData['gridBallast']['eventStart'] = str(eventStart)
	outData['gridBallast']['eventEnd'] = str(eventEnd)
	outData['gridBallast']['xMin'] = str(eventStart - datetime.timedelta(minutes=30))
	outData['gridBallast']['xMax'] = str(eventEnd + datetime.timedelta(minutes=30))
	# Convert string to date
	# HACK: remove timezones, inconsistency in matching format
	timeStampsDebug = [x[:19] for x in outData['timeStamps']]
	dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStampsDebug]	
	eventEndIdx =  dateTimeStamps.index(eventEnd)
	# Recovery Time
	whOn = outData['gridBallast']['waterheaterOn']
	whOnList = whOn.values()
	whOnZip = zip(*whOnList)
	whOnSum = [sum(x) for x in whOnZip]
	anyOn = [x > 0 for x in whOnSum]
	tRecIdx = anyOn.index(True, eventEndIdx)
	tRec = dateTimeStamps[tRecIdx]
	recoveryTime = tRec - eventEnd
	outData['gridBallast']['recoveryTime'] = str(recoveryTime)
	# Waterheaters Off-Duration
	offDuration = tRec - eventStart
	outData['gridBallast']['offDuration'] = str(offDuration)
	# Reserve Magnitude (RM)
	availMag = outData['gridBallast']['availabilityMagnitude']
	totalNetLoad = outData['gridBallast']['totalNetworkLoad']
	availPerc = [100 * x[0]/x[1] for x in zip(availMag,totalNetLoad)]
	outData['gridBallast']['availabilityPercent'] = availPerc
	outData['gridBallast']['rm'] = [100 - x for x in availPerc]
	# Average RM during event
	eventRM = [100 - x[1] for x in zip(dateTimeStamps, availPerc) if (x[0] == eventStart) or (x[0] == eventEnd)]
	outData['gridBallast']['rmAvg'] = np.mean(eventRM)
	# Reserve Magnitude Variability Tolerance (RMVT)
	outData['gridBallast']['rmvt'] = np.std(eventRM)
	# Availability
	rmt = 7
	available = [x[1] > rmt for x in zip(dateTimeStamps, availPerc) if (x[0] < eventStart) or (x[0] > eventEnd)]
	outData['gridBallast']['availability'] = 100.0 * sum(available) / (int(inputDict['simLength']) - int(eventLength[1]) - 1)
	# Waterheater Temperature Drop calculations
	whTemp = outData['gridBallast']['waterheaterTemp']
	whTempList = whTemp.values()
	whTempZip = zip(*whTempList)
	whTempDrops = []
	LOWER_LIMIT_TEMP = 110 # Used for calculating quality of service. Typical hot shower temp = 105 F.
	for time in whTempZip:
		tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time])
		whTempDrops.append(tempDrop)
	outData['gridBallast']['waterheaterTempDrops'] = whTempDrops
	# ZIPload calculations for Availability and QoS
	zPower = outData['gridBallast']['ZIPloadPower']
	zPowerList = zPower.values()
	zPowerZip = zip(*zPowerList)
	zDemand = outData['gridBallast']['ZIPloadDemand']
	zDemandList  = zDemand.values()
	zDemandZip = zip(*zDemandList)
	zDrops = []
	for x, y in zip(zPowerZip,zDemandZip):
		zDrop = 0
		for i in range(len(x)):
			if (x[i] == 0) and (y[i] > 0):
				zDrop += 1
		zDrops.append(zDrop)
	outData['gridBallast']['qualityDrops'] = [x + y for x, y in zip(zDrops, whTempDrops)]

	# What percentage of our keys have lat lon data?
	latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
	latPerc = 1.0*len(latKeys)/len(tree)
	if latPerc < 0.25:
		doNeato = True
	else:
		doNeato = False
	# Generate the frames for the system voltage map time traveling chart.
	genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
	outData['genTime'] = genTime
	# Aggregate up the timestamps:
	if level=='days':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
	elif level=='months':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
	return outData
Esempio n. 21
0
def work(modelDir, inputDict):
	# Copy specific climate data into model directory
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), 
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
	ssc.ssc_data_set_number(dat, "system_size", float(inputDict["systemSize"]))
	ssc.ssc_data_set_number(dat, "derate", 0.01 * float(inputDict["nonInverterEfficiency"]))
	ssc.ssc_data_set_number(dat, "track_mode", float(inputDict["trackingMode"]))
	ssc.ssc_data_set_number(dat, "azimuth", float(inputDict["azimuth"]))
	# Advanced inputs with defaults.
	if (inputDict.get("tilt",0) == "-"):
		tilt_eq_lat = 1.0
		manualTilt = 0.0
	else:
		tilt_eq_lat = 0.0
		manualTilt = float(inputDict.get("tilt",0))
	ssc.ssc_data_set_number(dat, "tilt_eq_lat", tilt_eq_lat)
	ssc.ssc_data_set_number(dat, "tilt", manualTilt)
	ssc.ssc_data_set_number(dat, "rotlim", float(inputDict["rotlim"]))
	ssc.ssc_data_set_number(dat, "gamma", -1 * float(inputDict["gamma"]))
	ssc.ssc_data_set_number(dat, "inv_eff", 0.01 * float(inputDict["inverterEfficiency"]))
	ssc.ssc_data_set_number(dat, "w_stow", float(inputDict["w_stow"]))
	# Complicated optional inputs that we could enable later.
	# ssc.ssc_data_set_array(dat, 'shading_hourly', ...) 	# Hourly beam shading factors
	# ssc.ssc_data_set_matrix(dat, 'shading_mxh', ...) 		# Month x Hour beam shading factors
	# ssc.ssc_data_set_matrix(dat, 'shading_azal', ...) 	# Azimuth x altitude beam shading factors
	# ssc.ssc_data_set_number(dat, 'shading_diff', ...) 	# Diffuse shading factor
	# ssc.ssc_data_set_number(dat, 'enable_user_poa', ...)	# Enable user-defined POA irradiance input = 0 or 1
	# ssc.ssc_data_set_array(dat, 'user_poa', ...) 			# User-defined POA irradiance in W/m2
	# ssc.ssc_data_set_number(dat, 'tilt', 999)
	# ssc.ssc_data_set_number(dat, "t_noct", float(inputDict["t_noct"]))
	# ssc.ssc_data_set_number(dat, "t_ref", float(inputDict["t_ref"]))
	# ssc.ssc_data_set_number(dat, "fd", float(inputDict["fd"]))
	# ssc.ssc_data_set_number(dat, "i_ref", float(inputDict["i_ref"]))
	# ssc.ssc_data_set_number(dat, "poa_cutin", float(inputDict["poa_cutin"]))
	# Run PV system simulation.
	mod = ssc.ssc_module_create("pvwattsv1")
	ssc.ssc_module_exec(mod, dat)
	# Setting options for start time.
	simLengthUnits = inputDict.get("simLengthUnits","")
	simStartDate = inputDict["simStartDate"]
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html 
	startDateTime = simStartDate + " 00:00:00 UTC"
	# Set aggregation function constants.
	agg = lambda x,y:_aggData(x,y,inputDict["simStartDate"],
		int(inputDict["simLength"]), inputDict["simLengthUnits"], ssc, dat)
	avg = lambda x:sum(x)/len(x)
	# Timestamp output.
	outData = {}
	outData["timeStamps"] = [datetime.datetime.strftime(
		datetime.datetime.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") + 
		datetime.timedelta(**{simLengthUnits:x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(inputDict["simLength"]))]
	# Geodata output.
	outData["city"] = ssc.ssc_data_get_string(dat, "city")
	outData["state"] = ssc.ssc_data_get_string(dat, "state")
	outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
	outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
	outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
	# Weather output.
	outData["climate"] = {}
	outData["climate"]["Plane of Array Irradiance (W/m^2)"] = agg("poa", avg)
	outData["climate"]["Beam Normal Irradiance (W/m^2)"] = agg("dn", avg)
	outData["climate"]["Diffuse Irradiance (W/m^2)"] = agg("df", avg)
	outData["climate"]["Ambient Temperature (F)"] = agg("tamb", avg)
	outData["climate"]["Cell Temperature (F)"] = agg("tcell", avg)
	outData["climate"]["Wind Speed (m/s)"] = agg("wspd", avg)
	# Power generation.
	outData["Consumption"] = {}
	outData["Consumption"]["Power"] = [x for x in agg("ac", avg)]
	outData["Consumption"]["Losses"] = [0 for x in agg("ac", avg)]
	outData["Consumption"]["DG"] = agg("ac", avg)
	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData
Esempio n. 22
0
def heavyProcessing(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get feeder name and data in.
	try: os.mkdir(pJoin(modelDir,'gldContainer'))
	except: pass
	try:	
		feederName = inputDict["feederName1"]
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, "gldContainer", "climate.tmy2"))
		startTime = datetime.datetime.now()
		feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
		tree = feederJson["tree"]
		#add a check to see if there is already a climate object in the omd file
		#if there is delete the climate from attachments and the climate object
		attachKeys = feederJson["attachments"].keys()
		for key in attachKeys:
			if key.endswith('.tmy2'):
				del feederJson['attachments'][key]	
		treeKeys = feederJson["tree"].keys()
		for key in treeKeys:
			if 'object' in feederJson['tree'][key]:
			 	if feederJson['tree'][key]['object'] == 'climate':
			 		del feederJson['tree'][key]
		oldMax = feeder.getMaxKey(tree)
		tree[oldMax + 1] = {'omftype':'module','argument':'climate'}
		tree[oldMax + 2] = {'object':'climate','name':'Climate','interpolate':'QUADRATIC','tmyfile':'climate.tmy2'}
		# tree[oldMax + 3] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'}
		# Set up GLM with correct time and recorders:
		feeder.attachRecorders(tree, "Regulator", "object", "regulator")
		feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
		feeder.attachRecorders(tree, "Inverter", "object", "inverter")
		feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
		feeder.attachRecorders(tree, "CollectorVoltage", None, None)
		feeder.attachRecorders(tree, "Climate", "object", "climate")
		feeder.attachRecorders(tree, "OverheadLosses", None, None)
		feeder.attachRecorders(tree, "UndergroundLosses", None, None)
		feeder.attachRecorders(tree, "TriplexLosses", None, None)
		feeder.attachRecorders(tree, "TransformerLosses", None, None)
		feeder.groupSwingKids(tree)

		# Attach recorder for waterheaters on/off
		stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':3600, 'file':'allWaterheaterOn.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for waterheater tank temperatures
		stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':3600, 'file':'allWaterheaterTemp.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach collector for total waterheater load
		stub = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':3600, 'file':'allWaterheaterLoad.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach collector for total network load
		stub = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':3600, 'file':'allMeterPower.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		# Attach collector for total overall ZIPload power/load
		stub = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':3600, 'file':'allZIPloadPower.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for each ZIPload power/load
		stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':3600, 'file':'eachZIPloadPower.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for all ZIPloads demand_rate
		stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':3600, 'file':'allZIPloadDemand.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for all ZIPloads on
		stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'number_of_devices_on', 'interval':3600, 'file':'allZIPloadOn.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		# Attach passive_controller	
		tree[feeder.getMaxKey(tree)+1] = {'omftype':'module','argument':'market'}
		tree[feeder.getMaxKey(tree)+1] = {'omftype':'class auction','argument':'{\n\tdouble my_avg; double my_std;\n}'}
		tree[feeder.getMaxKey(tree)+1] = {'omftype':'class player','argument':'{\n\tdouble value;\n}'}

		stub = {
			'object':'player',
			'name':'cppDays',
			'file':'superCpp.player'
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		stub = {
			'object':'player',
			'name':'superClearing',
			'file':'superClearingPrice.player',
			'loop':10
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		stub = {
			'object':'auction',
			'name':'MARKET_1',
			'my_std':0.037953,
			'period':900,
			'my_avg':0.110000,
			'current_market.clearing_price':'superClearing.value',
			'special_mode':'BUYERS_ONLY',
			'unit': 'kW'
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		stub = {
			'object':'passive_controller',
			'name':'waterheater_controller_waterheater171923',
			'parent':'waterheater171923',
			'control_mode':'RAMP',
			'range_high':5,
			'range_low':-5,
			'ramp_high':1,
			'ramp_low':-1,
			'period':900,
			'setpoint':'is_waterheater_on',
			'base_setpoint':1,
			'expectation_object':'MARKET_1',
			'expectation_property':'my_avg',
			'observation_object':'MARKET_1',
			'observation_property':'past_market.clearing_price',
			'stdev_observation_property':'my_std',
			'state_property':'override'
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		# stub = {
		# 	'object':'passive_controller',
		# 	'name':'ZIPload_controller_ZIPload171922',
		# 	'parent':'ZIPload171922',
		# 	'control_mode':'RAMP',
		# 	'range_high':5,
		# 	'range_low':-5,
		# 	'ramp_high':1,
		# 	'ramp_low':-1,
		# 	'period':900,
		# 	'setpoint':'base_power'
		# 	'base_setpoint':1,
		# 	'expectation_object':'MARKET_1',
		# 	'expectation_property':'my_avg',
		# 	'observation_object':'MARKET_1',
		# 	'observation_property':'past_market.clearing_price',
		# 	'stdev_observation_property':'my_std'
		# 	'state_property':'override'
		# }
		# copyStub = dict(stub)
		# tree[feeder.getMaxKey(tree)+1] = copyStub

		# Attach recorders for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600}
		for phase in ['A','B','C']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'VoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		# Attach recorders for system voltage map, triplex:
		stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600}
		for phase in ['1','2']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'nVoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		# And get meters for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600}
		for phase in ['1','2']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'mVoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
			simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
		rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
			keepFiles=True, workDir=pJoin(modelDir,'gldContainer'))
		cleanOut = {}
		# Std Err and Std Out
		cleanOut['stderr'] = rawOut['stderr']
		cleanOut['stdout'] = rawOut['stdout']
		# Time Stamps
		for key in rawOut:
			print key
			if '# timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# timestamp']
				break
			elif '# property.. timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
			else:
				cleanOut['timeStamps'] = []
		# Day/Month Aggregation Setup:
		stamps = cleanOut.get('timeStamps',[])
		level = inputDict.get('simLengthUnits','hours')
		# Climate
		for key in rawOut:
			if key.startswith('Climate_') and key.endswith('.csv'):
				cleanOut['climate'] = {}
				cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
				cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
				cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
				cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
				cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				#cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
				climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
				#converting W/sf to W/sm
				climateWbySMList= [x*10.76392 for x in climateWbySFList]
				cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList			
		# Voltage Band
		if 'VoltageJiggle.csv' in rawOut:
			cleanOut['allMeterVoltages'] = {}
			cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
			cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
		# Power Consumption
		cleanOut['Consumption'] = {}
		# Set default value to be 0, avoiding missing value when computing Loads
		cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
		for key in rawOut:
			if key.startswith('SwingKids_') and key.endswith('.csv'):
				oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
				if 'Power' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Power'] = oneSwingPower
				else:
					cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
			elif key.startswith('Inverter_') and key.endswith('.csv'): 	
				realA = rawOut[key]['power_A.real']
				realB = rawOut[key]['power_B.real']
				realC = rawOut[key]['power_C.real']
				imagA = rawOut[key]['power_A.imag']
				imagB = rawOut[key]['power_B.imag']
				imagC = rawOut[key]['power_C.imag']
				oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key.startswith('Windmill_') and key.endswith('.csv'):
				vrA = rawOut[key]['voltage_A.real']
				vrB = rawOut[key]['voltage_B.real']
				vrC = rawOut[key]['voltage_C.real']
				viA = rawOut[key]['voltage_A.imag']
				viB = rawOut[key]['voltage_B.imag']
				viC = rawOut[key]['voltage_C.imag']
				crB = rawOut[key]['current_B.real']
				crA = rawOut[key]['current_A.real']
				crC = rawOut[key]['current_C.real']
				ciA = rawOut[key]['current_A.imag']
				ciB = rawOut[key]['current_B.imag']
				ciC = rawOut[key]['current_C.imag']
				powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
				powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
				powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
				oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
				realA = rawOut[key]['sum(power_losses_A.real)']
				imagA = rawOut[key]['sum(power_losses_A.imag)']
				realB = rawOut[key]['sum(power_losses_B.real)']
				imagB = rawOut[key]['sum(power_losses_B.imag)']
				realC = rawOut[key]['sum(power_losses_C.real)']
				imagC = rawOut[key]['sum(power_losses_C.imag)']
				oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'Losses' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Losses'] = oneLoss
				else:
					cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			elif key.startswith('Regulator_') and key.endswith('.csv'):
				#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
				regName=""
				regName = key
				newkey=regName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
				cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
				cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
				cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
			elif key.startswith('Capacitor_') and key.endswith('.csv'):
				capName=""
				capName = key
				newkey=capName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
				cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
				cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
				cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]

		# Print gridBallast Outputs to allOutputData.json
		cleanOut['gridBallast'] = {}
		if 'allWaterheaterOn.csv' in rawOut:
			cleanOut['gridBallast']['waterheaterOn'] = {}
			for key in rawOut['allWaterheaterOn.csv']:
				if key.startswith('waterheater'):
					cleanOut['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key]
		if 'allWaterheaterTemp.csv' in rawOut:
			cleanOut['gridBallast']['waterheaterTemp'] = {}
			for key in rawOut['allWaterheaterTemp.csv']:
				if key.startswith('waterheater'):
					cleanOut['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key]
		if 'allMeterPower.csv' in rawOut:
			cleanOut['gridBallast']['totalNetworkLoad'] = rawOut.get('allMeterPower.csv')['sum(measured_real_power)']

		if ('allWaterheaterLoad.csv' in rawOut) and ('allZIPloadPower.csv' in rawOut):
			cleanOut['gridBallast']['availabilityMagnitude'] = [x + y for x, y in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])]
		if 'eachZIPloadPower.csv' in rawOut:
			cleanOut['gridBallast']['ZIPloadPower'] = {}
			for key in rawOut['eachZIPloadPower.csv']:
				if key.startswith('ZIPload'):
					cleanOut['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key]
		if 'allZIPloadDemand.csv' in rawOut:
			cleanOut['gridBallast']['ZIPloadDemand'] = {}
			for key in rawOut['allZIPloadDemand.csv']:
				if key.startswith('ZIPload'):
					cleanOut['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key]
		if 'allZIPloadOn.csv' in rawOut:
			cleanOut['gridBallast']['ZIPloadOn'] = {}
			for key in rawOut['allZIPloadOn.csv']:
				if key.startswith('ZIPload'):
					cleanOut['gridBallast']['ZIPloadOn'][key] = rawOut.get('allZIPloadOn.csv')[key]

		# EventTime calculations
		eventTime = inputDict['eventTime']
		eventLength = inputDict['eventLength']
		eventLength = eventLength.split(':')
		eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1]))
		eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M')
		eventEnd = eventStart + eventDuration
		cleanOut['gridBallast']['eventStart'] = str(eventStart)
		cleanOut['gridBallast']['eventEnd'] = str(eventEnd)
		# Drop timezone from timeStamp, Convert string to date
		timeStamps = [x[:19] for x in cleanOut['timeStamps']]
		dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStamps]	
		eventEndIdx =  dateTimeStamps.index(eventEnd)
		# Recovery Time
		whOn = cleanOut['gridBallast']['waterheaterOn']
		whOnList = whOn.values()
		whOnZip = zip(*whOnList)
		whOnSum = [sum(x) for x in whOnZip]
		anyOn = [x > 0 for x in whOnSum] 
		tRecIdx = anyOn.index(True, eventEndIdx)
		tRec = dateTimeStamps[tRecIdx]
		cleanOut['gridBallast']['recoveryTime'] = str(tRec)
		# Waterheaters Off-Duration
		offDuration = tRec - eventStart
		cleanOut['gridBallast']['offDuration'] = str(offDuration)
		# Reserve Magnitude Target (RMT)
		availMag = cleanOut['gridBallast']['availabilityMagnitude']
		totNetLoad = cleanOut['gridBallast']['totalNetworkLoad']
		# loadZip = zip(availMag,totNetLoad)
		# rmt = [x[0]/x[1] for x in loadZip]
		rmt = (1000*sum(availMag))/sum(totNetLoad)
		cleanOut['gridBallast']['rmt'] = rmt
		# Reserve Magnitude Variability Tolerance (RMVT)
		avgAvailMag = sum(availMag)/len(availMag)
		rmvtMax = max(availMag)/avgAvailMag
		rmvtMin = min(availMag)/avgAvailMag
		rmvt = rmvtMax - rmvtMin
		cleanOut['gridBallast']['rmvt'] = rmvt
		# Availability
		notAvail = availMag.count(0)/len(timeStamps)
		avail = (1-notAvail)*100
		cleanOut['gridBallast']['availability'] = avail
		# Waterheater Temperature Drop calculations
		whTemp = cleanOut['gridBallast']['waterheaterTemp']
		whTempList = whTemp.values()
		whTempZip = zip(*whTempList)
		whTempDrops = []
		LOWER_LIMIT_TEMP = 125 # Used for calculating quality of service.
		for time in whTempZip:
			tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time])
			whTempDrops.append(tempDrop)
		cleanOut['gridBallast']['waterheaterTempDrops'] = whTempDrops

		# ZIPload calculations for Availability and QoS
		zPower = cleanOut['gridBallast']['ZIPloadPower']
		zPowerList = zPower.values()
		zPowerZip = zip(*zPowerList)
		zPowerSum = [sum(x) for x in zPowerZip]
		zDemand = cleanOut['gridBallast']['ZIPloadDemand']
		zDemandList  = zDemand.values()
		zDemandZip = zip(*zDemandList)
		zDrops = []
		for time in zDemandZip:
			for each in zPowerZip:
				zIdx = 0
				if each[zIdx] == 0:
					zPowerIdx += 1
					zDrop = sum([t > 0 for t in time])
					zDrops.append(zDrop)
				else:
					zDrops.append(0)
		cleanOut['gridBallast']['qualityDrops'] = [x + y for x, y in zip(whTempDrops, zDrops)]

		# What percentage of our keys have lat lon data?
		latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
		latPerc = 1.0*len(latKeys)/len(tree)
		if latPerc < 0.25: doNeato = True
		else: doNeato = False
		# Generate the frames for the system voltage map time traveling chart.
		genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
		cleanOut['genTime'] = genTime
		# Aggregate up the timestamps:
		if level=='days':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
		elif level=='months':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
		# Write the output.
		with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile:
			json.dump(cleanOut, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = datetime.datetime.now()
		inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Clean up the PID file.
		os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
		print "DONE RUNNING", modelDir
	except Exception as e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)	
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	finishTime = datetime.datetime.now()
	inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds())))
	with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
		json.dump(inputDict, inFile, indent = 4)
	try:
		os.remove(pJoin(modelDir,"PPID.txt"))
	except:
		pass
Esempio n. 23
0
def work(modelDir, inputDict):
    # Copy specific climate data into model directory
    inputDict["climateName"], latforpvwatts = zipCodeToClimateName(
        inputDict["zipCode"])
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "climate.tmy2"))
    # Set up SAM data structures.
    ssc = nrelsam2013.SSCAPI()
    dat = ssc.ssc_data_create()
    # Required user inputs.
    ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
    ssc.ssc_data_set_number(dat, "system_size", float(inputDict["systemSize"]))
    ssc.ssc_data_set_number(dat, "derate",
                            0.01 * float(inputDict["nonInverterEfficiency"]))
    ssc.ssc_data_set_number(dat, "track_mode",
                            float(inputDict["trackingMode"]))
    ssc.ssc_data_set_number(dat, "azimuth", float(inputDict["azimuth"]))
    # Advanced inputs with defaults.
    if (inputDict.get("tilt", 0) == "-"):
        tilt_eq_lat = 1.0
        manualTilt = 0.0
    else:
        tilt_eq_lat = 0.0
        manualTilt = float(inputDict.get("tilt", 0))
    ssc.ssc_data_set_number(dat, "tilt_eq_lat", tilt_eq_lat)
    ssc.ssc_data_set_number(dat, "tilt", manualTilt)
    ssc.ssc_data_set_number(dat, "rotlim", float(inputDict["rotlim"]))
    ssc.ssc_data_set_number(dat, "gamma", -1 * float(inputDict["gamma"]))
    ssc.ssc_data_set_number(dat, "inv_eff",
                            0.01 * float(inputDict["inverterEfficiency"]))
    ssc.ssc_data_set_number(dat, "w_stow", float(inputDict["w_stow"]))
    # Complicated optional inputs that we could enable later.
    # ssc.ssc_data_set_array(dat, 'shading_hourly', ...) 	# Hourly beam shading factors
    # ssc.ssc_data_set_matrix(dat, 'shading_mxh', ...) 		# Month x Hour beam shading factors
    # ssc.ssc_data_set_matrix(dat, 'shading_azal', ...) 	# Azimuth x altitude beam shading factors
    # ssc.ssc_data_set_number(dat, 'shading_diff', ...) 	# Diffuse shading factor
    # ssc.ssc_data_set_number(dat, 'enable_user_poa', ...)	# Enable user-defined POA irradiance input = 0 or 1
    # ssc.ssc_data_set_array(dat, 'user_poa', ...) 			# User-defined POA irradiance in W/m2
    # ssc.ssc_data_set_number(dat, 'tilt', 999)
    # ssc.ssc_data_set_number(dat, "t_noct", float(inputDict["t_noct"]))
    # ssc.ssc_data_set_number(dat, "t_ref", float(inputDict["t_ref"]))
    # ssc.ssc_data_set_number(dat, "fd", float(inputDict["fd"]))
    # ssc.ssc_data_set_number(dat, "i_ref", float(inputDict["i_ref"]))
    # ssc.ssc_data_set_number(dat, "poa_cutin", float(inputDict["poa_cutin"]))
    # Run PV system simulation.
    mod = ssc.ssc_module_create("pvwattsv1")
    ssc.ssc_module_exec(mod, dat)
    # Setting options for start time.
    simLengthUnits = inputDict.get("simLengthUnits", "")
    simStartDate = inputDict["simStartDate"]
    # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
    startDateTime = simStartDate + " 00:00:00 UTC"
    # Set aggregation function constants.
    agg = lambda x, y: _aggData(x, y, inputDict["simStartDate"],
                                int(inputDict["simLength"]), inputDict[
                                    "simLengthUnits"], ssc, dat)
    avg = lambda x: sum(x) / len(x)
    # Timestamp output.
    outData = {}
    outData["timeStamps"] = [
        datetime.datetime.strftime(
            datetime.datetime.strptime(startDateTime[0:19],
                                       "%Y-%m-%d %H:%M:%S") +
            datetime.timedelta(**{simLengthUnits: x}), "%Y-%m-%d %H:%M:%S") +
        " UTC" for x in range(int(inputDict["simLength"]))
    ]
    # Geodata output.
    outData["city"] = ssc.ssc_data_get_string(dat, "city")
    outData["state"] = ssc.ssc_data_get_string(dat, "state")
    outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
    outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
    outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
    # Weather output.
    outData["climate"] = {}
    outData["climate"]["Plane of Array Irradiance (W/m^2)"] = agg("poa", avg)
    outData["climate"]["Beam Normal Irradiance (W/m^2)"] = agg("dn", avg)
    outData["climate"]["Diffuse Irradiance (W/m^2)"] = agg("df", avg)
    outData["climate"]["Ambient Temperature (F)"] = agg("tamb", avg)
    outData["climate"]["Cell Temperature (F)"] = agg("tcell", avg)
    outData["climate"]["Wind Speed (m/s)"] = agg("wspd", avg)
    # Power generation.
    outData["Consumption"] = {}
    outData["Consumption"]["Power"] = [x for x in agg("ac", avg)]
    outData["Consumption"]["Losses"] = [0 for x in agg("ac", avg)]
    outData["Consumption"]["DG"] = agg("ac", avg)
    # Stdout/stderr.
    outData["stdout"] = "Success"
    outData["stderr"] = ""
    return outData
Esempio n. 24
0
def work(modelDir, inputDict):
    ''' Run the model in its directory. '''
    inputDict["climateName"] = weather.zipCodeToClimateName(
        inputDict["zipCode"])
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "climate.tmy2"))
    # Set up SAM data structures.
    ssc = nrelsam2013.SSCAPI()
    dat = ssc.ssc_data_create()
    # Required user inputs.
    ssc.ssc_data_set_string(dat, b'file_name',
                            bytes(modelDir + '/climate.tmy2', 'ascii'))
    systemSize = max(float(inputDict.get('systemSize', 0)), .1)
    ssc.ssc_data_set_number(dat, b'system_size', systemSize)
    # SAM options where we take defaults.
    ssc.ssc_data_set_number(dat, b'derate', 0.97)
    ssc.ssc_data_set_number(dat, b'track_mode', 0)
    ssc.ssc_data_set_number(dat, b'azimuth', 180)
    ssc.ssc_data_set_number(dat, b'tilt_eq_lat', 1)
    # Run PV system simulation.
    mod = ssc.ssc_module_create(b'pvwattsv1')
    ssc.ssc_module_exec(mod, dat)
    # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
    startDateTime = "2013-01-01 00:00:00 UTC"
    # Timestamp output.
    outData = {}
    outData["timeStamps"] = [
        datetime.datetime.strftime(
            datetime.datetime.strptime(startDateTime[0:19],
                                       "%Y-%m-%d %H:%M:%S") +
            datetime.timedelta(**{"hours": x}), "%Y-%m-%d %H:%M:%S") + " UTC"
        for x in range(int(8760))
    ]
    # Geodata output.
    outData['city'] = ssc.ssc_data_get_string(dat, b'city').decode()
    outData['state'] = ssc.ssc_data_get_string(dat, b'state').decode()
    outData['lat'] = ssc.ssc_data_get_number(dat, b'lat')
    outData['lon'] = ssc.ssc_data_get_number(dat, b'lon')
    outData['elev'] = ssc.ssc_data_get_number(dat, b'elev')
    # Weather output.
    outData["climate"] = {}
    outData['climate'][
        'Global Horizontal Radiation (W/m^2)'] = ssc.ssc_data_get_array(
            dat, b'gh')
    outData['climate'][
        'Plane of Array Irradiance (W/m^2)'] = ssc.ssc_data_get_array(
            dat, b'poa')
    outData['climate']['Ambient Temperature (F)'] = ssc.ssc_data_get_array(
        dat, b'tamb')
    outData['climate']['Cell Temperature (F)'] = ssc.ssc_data_get_array(
        dat, b'tcell')
    outData['climate']['Wind Speed (m/s)'] = ssc.ssc_data_get_array(
        dat, b'wspd')
    # Power generation.
    outData['powerOutputAc'] = ssc.ssc_data_get_array(dat, b'ac')
    solarFraction = float(inputDict.get("resPenetration", .05)) / 100
    fossilFraction = max(1 - solarFraction, 10**-6)
    # Monthly aggregation outputs.
    months = {
        "Jan": 0,
        "Feb": 1,
        "Mar": 2,
        "Apr": 3,
        "May": 4,
        "Jun": 5,
        "Jul": 6,
        "Aug": 7,
        "Sep": 8,
        "Oct": 9,
        "Nov": 10,
        "Dec": 11
    }
    totMonNum = lambda x: sum([
        z for (y, z) in zip(outData["timeStamps"], outData["powerOutputAc"])
        if y.startswith(startDateTime[0:4] + "-{0:02d}".format(x + 1))
    ])
    outData["monthlyGeneration"] = [[
        a, __neoMetaModel__.roundSig(totMonNum(b), 2)
    ] for (a, b) in sorted(months.items(), key=lambda x: x[1])]
    monthlyConsumers = []
    monthlyResidentialkWhLoad = []
    monthlyResidentialRevenue = []
    monthlyTotalkWhLoad = []
    monthlyTotalRevenue = []
    for key in inputDict:
        # MAYBEFIX: data in list may not be ordered by month.
        if key.endswith("Sale"):
            monthlyConsumers.append(
                [key[:3].title(),
                 float(inputDict.get(key, 0))])
        elif key.endswith("KWh"):  # the order of calculation matters
            monthlyResidentialkWhLoad.append(
                [key[:3].title(),
                 float(inputDict.get(key, 0))])
        elif key.endswith("Rev"):
            monthlyResidentialRevenue.append(
                [key[:3].title(),
                 float(inputDict.get(key, 0))])
        elif key.endswith("KWhT"):
            monthlyTotalkWhLoad.append(
                [key[:3].title(),
                 float(inputDict.get(key, 0))])
        elif key.endswith("RevT"):
            monthlyTotalRevenue.append(
                [key[:3].title(),
                 float(inputDict.get(key, 0))])
    outData["monthlyConsumers"] = sorted(monthlyConsumers,
                                         key=lambda x: months[x[0]])
    outData["monthlyResidentialkWhLoad"] = sorted(monthlyResidentialkWhLoad,
                                                  key=lambda x: months[x[0]])
    outData["monthlyResidentialRevenue"] = sorted(monthlyResidentialRevenue,
                                                  key=lambda x: months[x[0]])
    outData["monthlyTotalkWhLoad"] = sorted(monthlyTotalkWhLoad,
                                            key=lambda x: months[x[0]])
    outData["monthlyTotalRevenue"] = sorted(monthlyTotalRevenue,
                                            key=lambda x: months[x[0]])
    outData["monthlySolarkWhGenerated"] = [[
        sorted(months.items(),
               key=lambda x: x[1])[i][0], outData["monthlyGeneration"][i][1] /
        1000 * outData["monthlyConsumers"][i][1] * solarFraction
    ] for i in range(12)]
    outData["monthlyTotalNetLoadAfterSolar"] = [[
        sorted(months.items(), key=lambda x: x[1])[i][0],
        outData["monthlyTotalkWhLoad"][i][1] -
        outData["monthlySolarkWhGenerated"][i][1]
    ] for i in range(12)]

    ## Flow Diagram Calculations, and order of calculation matters
    retailCost = float(inputDict.get("retailCost"))
    solarLCoE = float(inputDict.get('solarLCoE'))
    customerServiceCharge = float(inputDict.get("customServiceCharge", 0))
    solarServiceCharge = float(inputDict.get("solarServiceCharge", 0))
    totalkWhLoad = sum(monthlyTotalkWhLoad[i][1] for i in range(12))
    # BAU case
    outData["BAU"] = {}
    # E23 = E11
    outData["BAU"]["totalKWhPurchased"] = float(
        inputDict.get("totalKWhPurchased", 1))
    # E24 = SUM(E19:P19)
    outData["BAU"]["totalKWhSales"] = totalkWhLoad
    # E25 = E23-E24
    outData["BAU"]["losses"] = float(inputDict.get("totalKWhPurchased",
                                                   0)) - totalkWhLoad
    # E26 = E25/E23
    outData["BAU"]["effectiveLossRate"] = outData["BAU"]["losses"] / outData[
        "BAU"]["totalKWhPurchased"]
    # E27 = 0
    outData["BAU"]["annualSolarGen"] = 0
    # E28 = SUM(E17:P17)
    outData["BAU"]["resNonSolarKWhSold"] = sum(
        [monthlyResidentialkWhLoad[i][1] for i in range(12)])
    # E29 = 0
    outData["BAU"]["solarResDemand"] = 0
    # E30 = 0
    outData["BAU"]["solarResSold"] = 0
    # E31 = E24-E28
    outData["BAU"]["nonResKWhSold"] = outData["BAU"][
        "totalKWhSales"] - outData["BAU"]["resNonSolarKWhSold"]
    # E32 = 0
    outData["BAU"]["costSolarGen"] = 0
    # E33 = SUM(E20:P20)-SUM(E18:P18)+E10
    outData["BAU"]["nonResRev"] = sum([
        monthlyTotalRevenue[i][1] for i in range(12)
    ]) - sum([monthlyResidentialRevenue[i][1]
              for i in range(12)]) + float(inputDict.get("otherElecRevenue"))
    # E34 = (SUM(E18:P18)-SUM(E16:P16)*E6)/SUM(E17:P17):Buggy and complicated line calculating effectiveResRate replaced with retailCost. Was somehow messing up solar fixed charges.
    # outData["BAU"]["effectiveResRate"] = (sum ([monthlyResidentialRevenue[i][1] for i in range(12)]) - sum([monthlyConsumers[i][1] for i in range(12)])*customerServiceCharge)/sum([monthlyResidentialkWhLoad[i][1] for i in range(12)])
    customerMonths = sum([monthlyConsumers[i][1] for i in range(12)])
    # E35 = E34*E28+SUM(E16:P16)*E6
    outData["BAU"]["resNonSolarRev"] = retailCost * outData["BAU"][
        "resNonSolarKWhSold"] + customerMonths * customerServiceCharge
    # E36 = E30*E34
    outData["BAU"]["solarResRev"] = 0
    # E37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71), update after Form 7 model
    outData["BAU"]["nonPowerCosts"] = 0
    # E38 = E23-E25-E28-E30-E31
    outData["BAU"]["energyAllBal"] = 0
    # E39 = E36+E33+E35-E47-E72-E37
    outData["BAU"]["dollarAllBal"] = 0
    # E40 = 0
    outData["BAU"]["avgMonthlyBillSolarCus"] = 0
    # E41 = E35/SUM(E16:P16)
    avgCustomerCount = (customerMonths / 12)
    outData["BAU"]["avgMonthlyBillNonSolarCus"] = outData["BAU"][
        "resNonSolarRev"] / customerMonths
    # E42 = E63/E24, update after Form 7 model
    outData["BAU"]["costofService"] = 0
    # Solar case
    outData["Solar"] = {}
    # F27 = SUM(E15:P15)
    outData["Solar"]["annualSolarGen"] = sum(
        [outData["monthlySolarkWhGenerated"][i][1] for i in range(12)])
    # F24 = E24-F27
    outData["Solar"][
        "totalKWhSales"] = totalkWhLoad - outData["Solar"]["annualSolarGen"]
    # F23 =F24/(1-E26)
    outData["Solar"]["totalKWhPurchased"] = outData["Solar"][
        "totalKWhSales"] / (1 - outData["BAU"]["effectiveLossRate"])
    outData["totalsolarmonthly"] = [[
        sorted(months.items(), key=lambda x: x[1])[i][0],
        outData["monthlyTotalNetLoadAfterSolar"][i][1] /
        (1 - outData["BAU"]["effectiveLossRate"])
    ] for i in range(12)]
    # F25 = F23-F24
    outData["Solar"]["losses"] = (outData["Solar"]["totalKWhPurchased"] -
                                  outData["Solar"]["totalKWhSales"])
    # F26 = E26
    outData["Solar"]["effectiveLossRate"] = outData["BAU"]["effectiveLossRate"]
    # F28 = (1-E5)*E28
    outData["Solar"]["resNonSolarKWhSold"] = fossilFraction * outData["BAU"][
        "resNonSolarKWhSold"]
    # F29 = E5*E28
    outData["Solar"]["solarResDemand"] = solarFraction * outData["BAU"][
        "resNonSolarKWhSold"]
    # F30 = F29-F27
    outData["Solar"]["solarResSold"] = outData["Solar"][
        "solarResDemand"] - outData["Solar"]["annualSolarGen"]
    # F31 = E31
    outData["Solar"]["nonResKWhSold"] = outData["BAU"]["nonResKWhSold"]
    # F32 = E9*F27
    outData["Solar"][
        "costSolarGen"] = solarLCoE * outData["Solar"]["annualSolarGen"]
    # F33 = E33
    outData["Solar"]["nonResRev"] = outData["BAU"]["nonResRev"]
    # F34 = E34
    outData["Solar"]["effectiveResRate"] = retailCost
    # F35 = E35*(1-E5)
    outData["Solar"][
        "resNonSolarRev"] = outData["BAU"]["resNonSolarRev"] * fossilFraction
    # F30*E34 = Solar revenue from selling at residential rate
    solarSoldRateRev = outData["Solar"]["solarResSold"] * outData["Solar"][
        "effectiveResRate"]
    # (E6+E7)*SUM(E16:P16)*E5 = Solar revenue from charges
    solarChargesRev = (customerServiceCharge +
                       solarServiceCharge) * customerMonths * solarFraction
    # F36 = F30*E34+(E6+E7)*SUM(E16:P16)*E5 = solarSoldRate + solarChargesRev
    outData["Solar"]["solarResRev"] = solarSoldRateRev + solarChargesRev
    # F37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71) = E37, update after Form 7 model
    outData["Solar"]["nonPowerCosts"] = 0
    # F38 = F23-F25-F28-F30-E31
    outData["Solar"]["energyAllBal"] = 0
    # F39 = F36+E33+F35-F47-F72-E37
    outData["Solar"]["dollarAllBal"] = 0
    if (solarFraction > 0):
        # F41 = (F35)/(SUM(E16:P16)*(1-E5))
        outData["Solar"]["avgMonthlyBillNonSolarCus"] = outData["Solar"][
            "resNonSolarRev"] / (customerMonths * fossilFraction)
        # F42 = F30*E34/(SUM(E16:P16)*E5)+E6+E7
        outData["Solar"]["avgMonthlyBillSolarCus"] = outData["Solar"][
            "solarResSold"] * retailCost / (
                customerMonths *
                solarFraction) + customerServiceCharge + solarServiceCharge
        # F43 = (F27/(SUM(E16:P16)*E5))*E9
        outData["Solar"]["avgMonthlyBillSolarSolarCus"] = (
            outData["Solar"]["annualSolarGen"] /
            (customerMonths * solarFraction)) * solarLCoE
    else:
        outData["Solar"]["avgMonthlyBillNonSolarCus"] = 0
        outData["Solar"]["avgMonthlyBillSolarCus"] = 0
        outData["Solar"]["avgMonthlyBillSolarSolarCus"] = 0
    # Net Average Monthly Bill
    avgMonthlyBillSolarNet = outData["Solar"][
        "avgMonthlyBillSolarCus"] + outData["Solar"][
            "avgMonthlyBillSolarSolarCus"]
    outData["Solar"]["avgMonthlyBillSolarCus"] = avgMonthlyBillSolarNet
    # F45 = F63/F24, update after Form 7 model
    outData["Solar"]["costofService"] = 0
    ## Form 7 Model
    # E46
    outData["Solar"]["powerProExpense"] = outData["BAU"][
        "powerProExpense"] = float(inputDict.get("powerProExpense", 0))
    # E47 != F47
    outData["BAU"]["costPurchasedPower"] = float(
        inputDict.get("costPurchasedPower", 0))
    # E48
    outData["Solar"]["transExpense"] = outData["BAU"]["transExpense"] = float(
        inputDict.get("transExpense", 0))
    # E49
    outData["Solar"]["distriExpenseO"] = outData["BAU"][
        "distriExpenseO"] = float(inputDict.get("distriExpenseO", 0))
    # E50
    outData["Solar"]["distriExpenseM"] = outData["BAU"][
        "distriExpenseM"] = float(inputDict.get("distriExpenseM", 0))
    # E51
    outData["Solar"]["customerAccountExpense"] = outData["BAU"][
        "customerAccountExpense"] = float(
            inputDict.get("customerAccountExpense", 0))
    # E52
    outData["Solar"]["customerServiceExpense"] = outData["BAU"][
        "customerServiceExpense"] = float(
            inputDict.get("customerServiceExpense", 0))
    # E53
    outData["Solar"]["salesExpense"] = outData["BAU"]["salesExpense"] = float(
        inputDict.get("salesExpense", 0))
    # E54
    outData["Solar"]["adminGeneralExpense"] = outData["BAU"][
        "adminGeneralExpense"] = float(inputDict.get("adminGeneralExpense", 0))
    # E56
    outData["Solar"]["depreAmortiExpense"] = outData["BAU"][
        "depreAmortiExpense"] = float(inputDict.get("depreAmortiExpense", 0))
    # E57
    outData["Solar"]["taxExpensePG"] = outData["BAU"]["taxExpensePG"] = float(
        inputDict.get("taxExpensePG", 0))
    # E58
    outData["Solar"]["taxExpense"] = outData["BAU"]["taxExpense"] = float(
        inputDict.get("taxExpense", 0))
    # E59
    outData["Solar"]["interestLongTerm"] = outData["BAU"][
        "interestLongTerm"] = float(inputDict.get("interestLongTerm", 0))
    # E60
    outData["Solar"]["interestConstruction"] = outData["BAU"][
        "interestConstruction"] = float(
            inputDict.get("interestConstruction", 0))
    # E61
    outData["Solar"]["interestExpense"] = outData["BAU"][
        "interestExpense"] = float(inputDict.get("interestExpense", 0))
    # E62
    outData["Solar"]["otherDeductions"] = outData["BAU"][
        "otherDeductions"] = float(inputDict.get("otherDeductions", 0))
    # E65
    outData["Solar"]["nonOpMarginInterest"] = outData["BAU"][
        "nonOpMarginInterest"] = float(inputDict.get("nonOpMarginInterest", 0))
    # E66
    outData["Solar"]["fundsUsedConstruc"] = outData["BAU"][
        "fundsUsedConstruc"] = float(inputDict.get("fundsUsedConstruc", 0))
    # E67
    outData["Solar"]["incomeEquityInvest"] = outData["BAU"][
        "incomeEquityInvest"] = float(inputDict.get("incomeEquityInvest", 0))
    # E68
    outData["Solar"]["nonOpMarginOther"] = outData["BAU"][
        "nonOpMarginOther"] = float(inputDict.get("nonOpMarginOther", 0))
    # E69
    outData["Solar"]["genTransCapCredits"] = outData["BAU"][
        "genTransCapCredits"] = float(inputDict.get("genTransCapCredits", 0))
    # E70
    outData["Solar"]["otherCapCreditsPatroDivident"] = outData["BAU"][
        "otherCapCreditsPatroDivident"] = float(
            inputDict.get("otherCapCreditsPatroDivident", 0))
    # E71
    outData["Solar"]["extraItems"] = outData["BAU"]["extraItems"] = float(
        inputDict.get("extraItems", 0))
    # Calculation
    # E45 = SUM(E20:P20)+E10
    outData["BAU"]["operRevPatroCap"] = sum(
        [monthlyTotalRevenue[i][1]
         for i in range(12)]) + float(inputDict.get("otherElecRevenue", 0))
    # E55 = SUM(E46:E54)
    outData["BAU"]["totalOMExpense"] = float(inputDict.get("powerProExpense")) \
     + float(inputDict.get("costPurchasedPower")) \
     + float(inputDict.get("transExpense")) \
     + float(inputDict.get("distriExpenseO")) \
     + float(inputDict.get("distriExpenseM")) \
     + float(inputDict.get("customerAccountExpense")) \
     + float(inputDict.get("customerServiceExpense")) \
     + float(inputDict.get("salesExpense"))  \
     + float(inputDict.get("adminGeneralExpense"))
    # E63 = SUM(E55:E62)
    outData["BAU"]["totalCostElecService"] = outData["BAU"]["totalOMExpense"] \
     + float(inputDict.get("depreAmortiExpense"))\
     + float(inputDict.get("taxExpensePG"))\
     + float(inputDict.get("taxExpense"))\
     + float(inputDict.get("interestLongTerm"))\
     + float(inputDict.get("interestExpense"))\
     + float(inputDict.get("interestConstruction"))\
     + outData["BAU"]["otherDeductions"]
    # E64 = E45-E63
    outData["BAU"]["patCapOperMargins"] = outData["BAU"][
        "operRevPatroCap"] - outData["BAU"]["totalCostElecService"]
    # E72 = SUM(E64:E71)
    outData["BAU"]["patCapital"] = outData["BAU"]["patCapOperMargins"]\
     + float(inputDict.get("nonOpMarginInterest"))\
     + float(inputDict.get("fundsUsedConstruc"))\
     + float(inputDict.get("incomeEquityInvest"))\
     + float(inputDict.get("nonOpMarginOther"))\
     + float(inputDict.get("genTransCapCredits"))\
     + float(inputDict.get("otherCapCreditsPatroDivident"))\
     + float(inputDict.get("extraItems"))
    # F48 = E48-F27*E34+SUM(E16:P16)*E5*E7
    outData["Solar"]["operRevPatroCap"] = outData["BAU"][
        "operRevPatroCap"] - retailCost * outData["Solar"][
            "annualSolarGen"] + customerMonths * solarFraction * solarServiceCharge
    # F47 = (F23)*E8
    inputDict["costofPower"] = float(inputDict.get(
        "costPurchasedPower", 0)) / float(inputDict.get(
            "totalKWhPurchased", 0))
    outData["Solar"][
        "costPurchasedPower"] = outData["Solar"]["totalKWhPurchased"] * float(
            inputDict.get("costofPower", 0))
    inputDict["costofPower"] = round(inputDict["costofPower"], 3)
    # F55 = SUM(F46:F54)
    outData["Solar"]["totalOMExpense"] = outData["Solar"]["powerProExpense"]\
     + outData["Solar"]["costPurchasedPower"]\
     + outData["Solar"]["transExpense"]\
     + outData["Solar"]["distriExpenseO"]\
     + outData["Solar"]["distriExpenseM"]\
     + outData["Solar"]["customerAccountExpense"]\
     + outData["Solar"]["customerServiceExpense"]\
     + outData["Solar"]["salesExpense"]\
     + outData["Solar"]["adminGeneralExpense"]
    # F63 = E63
    outData["Solar"]["totalCostElecService"] = outData["Solar"]["totalOMExpense"]\
     + outData["Solar"]["depreAmortiExpense"]\
     + outData["Solar"]["taxExpensePG"]\
     + outData["Solar"]["taxExpense"]\
     + outData["Solar"]["interestLongTerm"]\
     + outData["Solar"]["interestConstruction"]\
     + outData["Solar"]["interestExpense"]\
     + outData["Solar"]["otherDeductions"]
    # F64 = F45 - F63
    outData["Solar"]["patCapOperMargins"] = outData["Solar"][
        "operRevPatroCap"] - outData["Solar"]["totalCostElecService"]
    # F72 = SUM(F64:F71)
    outData["Solar"]["patCapital"] = outData["Solar"]["patCapOperMargins"]\
     + outData["Solar"]["nonOpMarginInterest"]\
     + outData["Solar"]["fundsUsedConstruc"]\
     + outData["Solar"]["incomeEquityInvest"]\
     + outData["Solar"]["nonOpMarginOther"]\
     + outData["Solar"]["genTransCapCredits"]\
     + outData["Solar"]["otherCapCreditsPatroDivident"]\
     + outData["Solar"]["extraItems"]
    # E37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71), update after Form 7 model
    outData["BAU"]["nonPowerCosts"] = outData["BAU"]["transExpense"] \
     + outData["BAU"]["distriExpenseO"] \
     + outData["BAU"]["distriExpenseM"] \
     + outData["BAU"]["customerAccountExpense"] \
     + outData["BAU"]["customerServiceExpense"] \
     + outData["BAU"]["salesExpense"] \
     + outData["BAU"]["adminGeneralExpense"] \
     + outData["BAU"]["depreAmortiExpense"] \
     + outData["BAU"]["taxExpensePG"] \
     + outData["BAU"]["taxExpense"] \
     + outData["BAU"]["interestLongTerm"] \
     + outData["BAU"]["interestConstruction"] \
     + outData["BAU"]["interestExpense"] \
     + outData["BAU"]["otherDeductions"] \
     - (outData["BAU"]["nonOpMarginInterest"] \
     + outData["BAU"]["fundsUsedConstruc"] \
     + outData["BAU"]["incomeEquityInvest"] \
     + outData["BAU"]["nonOpMarginOther"] \
     + outData["BAU"]["genTransCapCredits"] \
     + outData["BAU"]["otherCapCreditsPatroDivident"] \
     + outData["BAU"]["extraItems"])
    # E42 = E63/E24, update after Form 7 model
    outData["BAU"]["costofService"] = outData["BAU"][
        "totalCostElecService"] / outData["BAU"]["totalKWhSales"]
    # F37 = SUM(E48:E54)+SUM(E56:E62)-SUM(E65:E71) = E37, update after Form 7 model
    outData["Solar"]["nonPowerCosts"] = outData["BAU"]["nonPowerCosts"]
    # F42 = F63/F24, update after Form 7 model
    outData["Solar"]["costofService"] = outData["Solar"][
        "totalCostElecService"] / outData["Solar"]["totalKWhSales"]
    # Stdout/stderr.
    outData["stdout"] = "Success"
    outData["stderr"] = ""
    return outData
Esempio n. 25
0
def work(modelDir, inputDict):
    ''' Run the model in its directory. '''
    #Set static input data
    simLength = 8760
    simStartDate = "2013-01-01"
    # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
    startDateTime = simStartDate + " 00:00:00 UTC"
    simLengthUnits = "hours"
    # Associate zipcode to climate data
    inputDict["climateName"], latforpvwatts = zipCodeToClimateName(
        inputDict["zipCode"])
    inverterSizeAC = float(inputDict.get("inverterSize", 0))
    if (inputDict.get("systemSize", 0) == "-"):
        arraySizeDC = 1.3908 * inverterSizeAC
    else:
        arraySizeDC = float(inputDict.get("systemSize", 0))
    numberPanels = (arraySizeDC * 1000 / 305)
    # Set constants
    panelSize = 305
    trackingMode = 0
    rotlim = 45.0
    gamma = 0.45
    if (inputDict.get("tilt", 0) == "-"):
        manualTilt = latforpvwatts
    else:
        manualTilt = float(inputDict.get("tilt", 0))
    numberInverters = math.ceil(inverterSizeAC / 1000 / 0.5)
    # Copy specific climate data into model directory
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "climate.tmy2"))
    # Set up SAM data structures.
    ssc = nrelsam2013.SSCAPI()
    dat = ssc.ssc_data_create()
    # Required user inputs.
    ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
    ssc.ssc_data_set_number(dat, "system_size", arraySizeDC)
    ssc.ssc_data_set_number(
        dat, "derate",
        float(inputDict.get("inverterEfficiency", 96)) / 100 *
        float(inputDict.get("nonInverterEfficiency", 87)) / 100)
    ssc.ssc_data_set_number(dat, "track_mode", float(trackingMode))
    ssc.ssc_data_set_number(dat, "azimuth",
                            float(inputDict.get("azimuth", 180)))
    # Advanced inputs with defaults.
    ssc.ssc_data_set_number(dat, "rotlim", float(rotlim))
    ssc.ssc_data_set_number(dat, "gamma", float(-gamma / 100))
    ssc.ssc_data_set_number(dat, "tilt", manualTilt)
    ssc.ssc_data_set_number(dat, "tilt_eq_lat", 0.0)
    # Run PV system simulation.
    mod = ssc.ssc_module_create("pvwattsv1")
    ssc.ssc_module_exec(mod, dat)
    # Timestamp output.
    outData = {}
    outData["timeStamps"] = [
        dt.datetime.strftime(
            dt.datetime.strptime(startDateTime[0:19], "%Y-%m-%d %H:%M:%S") +
            dt.timedelta(**{simLengthUnits: x}), "%Y-%m-%d %H:%M:%S") + " UTC"
        for x in range(simLength)
    ]
    # Geodata output.
    # Geodata output.
    outData["minLandSize"] = round(
        (arraySizeDC / 1390.8 * 5 + 1) * math.cos(math.radians(22.5)) /
        math.cos(math.radians(latforpvwatts)), 0)
    landAmount = float(inputDict.get("landAmount", 6.0))
    outData["city"] = ssc.ssc_data_get_string(dat, "city")
    outData["state"] = ssc.ssc_data_get_string(dat, "state")
    outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
    outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
    outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
    # Weather output.
    outData["climate"] = {}
    outData["climate"][
        "Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(
            dat, "gh")
    outData["climate"][
        "Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(
            dat, "poa")
    outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(
        dat, "tamb")
    outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(
        dat, "tcell")
    outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(
        dat, "wspd")
    # Power generation.
    outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
    # Calculate clipping.
    outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
    invSizeWatts = inverterSizeAC * 1000
    outData["powerOutputAcInvClipped"] = [
        x if x < invSizeWatts else invSizeWatts
        for x in outData["powerOutputAc"]
    ]
    try:
        outData["percentClipped"] = 100 * (
            1.0 - sum(outData["powerOutputAcInvClipped"]) /
            sum(outData["powerOutputAc"]))
    except ZeroDivisionError:
        outData["percentClipped"] = 0.0
    #One year generation
    outData["oneYearGenerationWh"] = sum(outData["powerOutputAcInvClipped"])
    #Annual generation for all years
    loanYears = 25
    outData["allYearGenerationMWh"] = {}
    outData["allYearGenerationMWh"][1] = float(
        outData["oneYearGenerationWh"]) / 1000000
    # outData["allYearGenerationMWh"][1] = float(2019.576)
    for i in range(2, loanYears + 1):
        outData["allYearGenerationMWh"][i] = float(
            outData["allYearGenerationMWh"][i - 1]) * (
                1 - float(inputDict.get("degradation", 0.8)) / 100)
    # Summary of Results.
    ######
    ### Total Costs (sum of): Hardware Costs, Design/Engineering/PM/EPC/Labor Costs, Siteprep Costs, Construction Costs, Installation Costs, Land Costs
    ######
    ### Hardware Costs
    pvModules = arraySizeDC * float(inputDict.get("moduleCost",
                                                  0)) * 1000  #off by 4000
    racking = arraySizeDC * float(inputDict.get("rackCost", 0)) * 1000
    inverters = numberInverters * float(inputDict.get("inverterCost", 0))
    inverterSize = inverterSizeAC
    if (inverterSize <= 250):
        gear = 15000
    elif (inverterSize <= 600):
        gear = 18000
    else:
        gear = inverterSize / 1000 * 22000
    balance = inverterSizeAC * 1.3908 * 134
    combiners = math.ceil(numberPanels / 19 / 24) * float(1800)  #*
    wireManagement = arraySizeDC * 1.5
    transformer = 1 * 28000
    weatherStation = 1 * 12500
    shipping = 1.02
    hardwareCosts = (pvModules + racking + inverters + gear + balance +
                     combiners + wireManagement + transformer +
                     weatherStation) * shipping
    ### Design/Engineering/PM/EPC/Labor Costs
    EPCmarkup = float(inputDict.get("EPCRate", 0)) / 100 * hardwareCosts
    #designCosts = float(inputDict.get("mechLabor",0))*160 + float(inputDict.get("elecLabor",0))*75 + float(inputDict.get("pmCost",0)) + EPCmarkup
    hoursDesign = 160 * math.sqrt(arraySizeDC / 1390)
    hoursElectrical = 80 * math.sqrt(arraySizeDC / 1391)
    designLabor = 65 * hoursDesign
    electricalLabor = 75 * hoursElectrical
    laborDesign = designLabor + electricalLabor + float(
        inputDict.get("pmCost", 0)) + EPCmarkup
    materialDesign = 0
    designCosts = materialDesign + laborDesign
    ### Siteprep Costs
    surveying = 2.25 * 4 * math.sqrt(landAmount * 43560)
    concrete = 8000 * math.ceil(numberInverters / 2)
    fencing = 6.75 * 4 * math.sqrt(landAmount * 43560)
    grading = 2.5 * 4 * math.sqrt(landAmount * 43560)
    landscaping = 750 * landAmount
    siteMaterial = 8000 + 600 + 5500 + 5000 + surveying + concrete + fencing + grading + landscaping + 5600
    blueprints = float(inputDict.get("mechLabor", 0)) * 12
    mobilization = float(inputDict.get("mechLabor", 0)) * 208
    mobilizationMaterial = float(inputDict.get("mechLabor", 0)) * 19.98
    siteLabor = blueprints + mobilization + mobilizationMaterial
    sitePrep = siteMaterial + siteLabor
    ### Construction Costs (Office Trailer, Skid Steer, Storage Containers, etc)
    constrEquip = 6000 + math.sqrt(landAmount) * 16200
    ### Installation Costs
    moduleAndRackingInstall = numberPanels * (15.00 + 12.50 + 1.50)
    pierDriving = 1 * arraySizeDC * 20
    balanceInstall = 1 * arraySizeDC * 100
    installCosts = moduleAndRackingInstall + pierDriving + balanceInstall + float(
        inputDict.get("elecLabor", 0)) * (72 + 60 + 70 + 10 + 5 + 30 + 70)
    ### Land Costs
    if (str(inputDict.get("landOwnership", 0)) == "Owned"
            or (str(inputDict.get("landOwnership", 0)) == "Leased")):
        landCosts = 0
    else:
        landCosts = float(inputDict.get("costAcre", 0)) * landAmount
    ######
    ### Total Costs
    ######
    totalCosts = hardwareCosts + designCosts + sitePrep + constrEquip + installCosts + landCosts
    totalFees = float(inputDict.get("devCost", 0)) / 100 * totalCosts
    outData["totalCost"] = totalCosts + totalFees + float(
        inputDict.get("interCost", 0))
    # Add to Pie Chart
    outData["costsPieChart"] = [
        ["Land", landCosts], ["Design/Engineering/PM/EPC", designCosts],
        ["PV Modules", pvModules * shipping], ["Racking", racking * shipping],
        ["Inverters & Switchgear", (inverters + gear) * shipping],
        [
            "BOS", hardwareCosts - pvModules * shipping - racking * shipping -
            (inverters + gear) * shipping
        ],
        [
            "Site Prep, Constr. Eq. and Installation",
            (siteMaterial + constrEquip) + (siteLabor + installCosts)
        ]
    ]
    # Cost per Wdc
    outData["costWdc"] = (totalCosts + totalFees + float(
        inputDict.get("interCost", 0))) / (arraySizeDC * 1000)
    outData["capFactor"] = float(outData["oneYearGenerationWh"]) / (
        inverterSizeAC * 1000 * 365.25 * 24) * 100
    ######
    ### Loans calculations for Direct, NCREB, Lease, Tax-equity, and PPA
    ######
    ### Full Ownership, Direct Loan
    #Output - Direct Loan [C]
    projectCostsDirect = 0
    #Output - Direct Loan [D]
    netFinancingCostsDirect = 0
    #Output - Direct Loan [E]
    OMInsuranceETCDirect = []
    #Output - Direct Loan [F]
    distAdderDirect = []
    #Output - Direct Loan [G]
    netCoopPaymentsDirect = []
    #Output - Direct Loan [H]
    costToCustomerDirect = []
    #Output - Direct Loan [F53]
    Rate_Levelized_Direct = 0
    ## Output - Direct Loan Formulas
    projectCostsDirect = 0
    #Output - Direct Loan [D]
    payment = pmt(
        float(inputDict.get("loanRate", 0)) / 100, loanYears,
        outData["totalCost"])
    interestDirectPI = outData["totalCost"] * float(
        inputDict.get("loanRate", 0)) / 100
    principleDirectPI = (-payment - interestDirectPI)
    patronageCapitalRetiredDPI = 0
    netFinancingCostsDirect = -(principleDirectPI + interestDirectPI -
                                patronageCapitalRetiredDPI)
    #Output - Direct Loan [E] [F] [G] [H]
    firstYearOPMainCosts = (1.25 * arraySizeDC * 12)
    firstYearInsuranceCosts = (0.37 * outData["totalCost"] / 100)
    if (inputDict.get("landOwnership", 0) == "Leased"):
        firstYearLandLeaseCosts = float(inputDict.get("costAcre",
                                                      0)) * landAmount
    else:
        firstYearLandLeaseCosts = 0
    for i in range(1, len(outData["allYearGenerationMWh"]) + 1):
        OMInsuranceETCDirect.append(
            -firstYearOPMainCosts * math.pow((1 + .01), (i - 1)) -
            firstYearInsuranceCosts * math.pow((1 + .025), (i - 1)) -
            firstYearLandLeaseCosts * math.pow((1 + .01), (i - 1)))
        distAdderDirect.append(
            float(inputDict.get("distAdder", 0)) *
            outData["allYearGenerationMWh"][i])
        netCoopPaymentsDirect.append(OMInsuranceETCDirect[i - 1] +
                                     netFinancingCostsDirect)
        costToCustomerDirect.append(
            (netCoopPaymentsDirect[i - 1] - distAdderDirect[i - 1]))
    #Output - Direct Loan [F53]
    NPVLoanDirect = npv(
        float(inputDict.get("discRate", 0)) / 100,
        [0, 0] + costToCustomerDirect)
    NPVallYearGenerationMWh = npv(
        float(inputDict.get("discRate", 0)) / 100,
        [0, 0] + outData["allYearGenerationMWh"].values())
    Rate_Levelized_Direct = -NPVLoanDirect / NPVallYearGenerationMWh
    #Master Output [Direct Loan]
    outData["levelCostDirect"] = Rate_Levelized_Direct
    outData["costPanelDirect"] = abs(NPVLoanDirect / numberPanels)
    outData["cost10WPanelDirect"] = (float(outData["costPanelDirect"]) /
                                     panelSize) * 10
    ### NCREBs Financing
    ncrebsRate = float(inputDict.get("NCREBRate", 4.060)) / 100
    ncrebBorrowingRate = 1.1 * ncrebsRate
    ncrebPaymentPeriods = 44
    ncrebCostToCustomer = []
    # TODO ASAP: FIX ARRAY OFFSETS START 0
    for i in range(1, len(outData["allYearGenerationMWh"]) + 1):
        coopLoanPayment = 2 * pmt(
            ncrebBorrowingRate / 2.0, ncrebPaymentPeriods,
            outData["totalCost"]) if i <= ncrebPaymentPeriods / 2 else 0
        ncrebsCredit = -0.7 * (
            ipmt(ncrebsRate / 2, 2 * i -
                 1, ncrebPaymentPeriods, outData["totalCost"]) +
            ipmt(ncrebsRate / 2, 2 * i, ncrebPaymentPeriods,
                 outData["totalCost"])) if i <= ncrebPaymentPeriods / 2 else 0
        financingCost = ncrebsCredit + coopLoanPayment
        omCost = OMInsuranceETCDirect[i - 1]
        netCoopPayments = financingCost + omCost
        distrAdder = distAdderDirect[i - 1]
        costToCustomer = netCoopPayments + distrAdder
        ncrebCostToCustomer.append(costToCustomer)
    NPVLoanNCREB = npv(
        float(inputDict.get("discRate", 0)) / 100,
        [0, 0] + ncrebCostToCustomer)
    Rate_Levelized_NCREB = -NPVLoanNCREB / NPVallYearGenerationMWh
    outData["levelCostNCREB"] = Rate_Levelized_NCREB
    outData["costPanelNCREB"] = abs(NPVLoanNCREB / numberPanels)
    outData["cost10WPanelNCREB"] = (float(outData["costPanelNCREB"]) /
                                    panelSize) * 10
    ### Lease Buyback Structure
    #Output - Lease [C]
    projectCostsLease = outData["totalCost"]
    #Output - Lease [D]
    leasePaymentsLease = []
    #Output - Lease [E]
    OMInsuranceETCLease = OMInsuranceETCDirect
    #Output - Lease [F]
    distAdderLease = distAdderDirect
    #Output - Lease [G]
    netCoopPaymentsLease = []
    #Output - Lease [H]
    costToCustomerLease = []
    #Output - Lease [H44]
    NPVLease = 0
    #Output - Lease [H49]
    Rate_Levelized_Lease = 0
    ## Tax Lease Formulas
    #Output - Lease [D]
    for i in range(0, 12):
        leaseRate = float(inputDict.get("taxLeaseRate", 0)) / 100.0
        if i > 8:  # Special behavior in later years:
            leaseRate = leaseRate - 0.0261
        leasePaymentsLease.append(-1 * projectCostsLease /
                                  ((1.0 - (1.0 / (1.0 + leaseRate)**12)) /
                                   (leaseRate)))
    # Last year is different.
    leasePaymentsLease[11] += -0.2 * projectCostsLease
    for i in range(12, 25):
        leasePaymentsLease.append(0)
    #Output - Lease [G]	[H]
    for i in range(1, len(outData["allYearGenerationMWh"]) + 1):
        netCoopPaymentsLease.append(OMInsuranceETCLease[i - 1] +
                                    leasePaymentsLease[i - 1])
        costToCustomerLease.append(netCoopPaymentsLease[i - 1] -
                                   distAdderLease[i - 1])
    #Output - Lease [H44]. Note the extra year at the zero point to get the discounting right.
    NPVLease = npv(
        float(inputDict.get("discRate", 0)) / 100,
        [0, 0] + costToCustomerLease)
    #Output - Lease [H49] (Levelized Cost Three Loops)
    Rate_Levelized_Lease = -NPVLease / NPVallYearGenerationMWh
    #Master Output [Lease]
    outData["levelCostTaxLease"] = Rate_Levelized_Lease
    outData["costPanelTaxLease"] = abs(NPVLease / numberPanels)
    outData["cost10WPanelTaxLease"] = (float(outData["costPanelTaxLease"]) /
                                       float(panelSize)) * 10

    ### Tax Equity Flip Structure
    # Tax Equity Flip Function
    def taxEquityFlip(PPARateSixYearsTE, discRate, totalCost,
                      allYearGenerationMWh, distAdderDirect, loanYears,
                      firstYearLandLeaseCosts, firstYearOPMainCosts,
                      firstYearInsuranceCosts, numberPanels):
        #Output Tax Equity Flip [C]
        coopInvestmentTaxEquity = -totalCost * (1 - 0.53)
        #Output Tax Equity Flip [D]
        financeCostCashTaxEquity = 0
        #Output Tax Equity Flip [E]
        cashToSPEOForPPATE = []
        #Output Tax Equity Flip [F]
        derivedCostEnergyTE = 0
        #Output Tax Equity Flip [G]
        OMInsuranceETCTE = []
        #Output Tax Equity Flip [H]
        cashFromSPEToBlockerTE = []
        #Output Tax Equity Flip [I]
        cashFromBlockerTE = 0
        #Output Tax Equity Flip [J]
        distAdderTaxEquity = distAdderDirect
        #Output Tax Equity Flip [K]
        netCoopPaymentsTaxEquity = []
        #Output Tax Equity Flip [L]
        costToCustomerTaxEquity = []
        #Output Tax Equity Flip [L64]
        NPVLoanTaxEquity = 0
        #Output Tax Equity Flip [F72]
        Rate_Levelized_Equity = 0
        ## Tax Equity Flip Formulas
        #Output Tax Equity Flip [D]
        #TEI Calcs [E]
        financeCostOfCashTE = 0
        coopFinanceRateTE = 2.7 / 100
        if (coopFinanceRateTE == 0):
            financeCostOfCashTE = 0
        else:
            payment = pmt(coopFinanceRateTE, loanYears,
                          -coopInvestmentTaxEquity)
        financeCostCashTaxEquity = payment
        #Output Tax Equity Flip [E]
        SPERevenueTE = []
        for i in range(1, len(allYearGenerationMWh) + 1):
            SPERevenueTE.append(PPARateSixYearsTE * allYearGenerationMWh[i])
            if ((i >= 1) and (i <= 6)):
                cashToSPEOForPPATE.append(-SPERevenueTE[i - 1])
            else:
                cashToSPEOForPPATE.append(0)
        #Output Tax Equity Flip [F]
        derivedCostEnergyTE = cashToSPEOForPPATE[0] / allYearGenerationMWh[1]
        #Output Tax Equity Flip [G]
        #TEI Calcs [F]	[U] [V]
        landLeaseTE = []
        OMTE = []
        insuranceTE = []
        for i in range(1, len(allYearGenerationMWh) + 1):
            landLeaseTE.append(firstYearLandLeaseCosts * math.pow((1 + .01),
                                                                  (i - 1)))
            OMTE.append(-firstYearOPMainCosts * math.pow((1 + .01), (i - 1)))
            insuranceTE.append(-firstYearInsuranceCosts * math.pow((1 + .025),
                                                                   (i - 1)))
            if (i < 7):
                OMInsuranceETCTE.append(float(landLeaseTE[i - 1]))
            else:
                OMInsuranceETCTE.append(
                    float(OMTE[i - 1]) + float(insuranceTE[i - 1]) +
                    float(landLeaseTE[i - 1]))
        #Output Tax Equity Flip [H]
        #TEI Calcs [T]
        SPEMgmtFeeTE = []
        EBITDATE = []
        EBITDATEREDUCED = []
        managementFee = 10000
        for i in range(1, len(SPERevenueTE) + 1):
            SPEMgmtFeeTE.append(-managementFee * math.pow((1 + .01), (i - 1)))
            EBITDATE.append(
                float(SPERevenueTE[i - 1]) + float(OMTE[i - 1]) +
                float(insuranceTE[i - 1]) + float(SPEMgmtFeeTE[i - 1]))
            if (i <= 6):
                cashFromSPEToBlockerTE.append(float(EBITDATE[i - 1]) * .01)
            else:
                cashFromSPEToBlockerTE.append(0)
                EBITDATEREDUCED.append(EBITDATE[i - 1])
        #Output Tax Equity Flip [I]
        #TEI Calcs [Y21]
        cashRevenueTE = -totalCost * (1 - 0.53)
        buyoutAmountTE = 0
        for i in range(1, len(EBITDATEREDUCED) + 1):
            buyoutAmountTE = buyoutAmountTE + EBITDATEREDUCED[i - 1] / (
                math.pow(1 + 0.12, i))
        buyoutAmountTE = buyoutAmountTE * 0.05
        cashFromBlockerTE = -(buyoutAmountTE) + 0.0725 * cashRevenueTE
        #Output Tax Equity Flip [K] [L]
        for i in range(1, len(allYearGenerationMWh) + 1):
            if (i == 6):
                netCoopPaymentsTaxEquity.append(financeCostCashTaxEquity +
                                                cashToSPEOForPPATE[i - 1] +
                                                cashFromSPEToBlockerTE[i - 1] +
                                                OMInsuranceETCTE[i - 1] +
                                                cashFromBlockerTE)
            else:
                netCoopPaymentsTaxEquity.append(financeCostCashTaxEquity +
                                                cashFromSPEToBlockerTE[i - 1] +
                                                cashToSPEOForPPATE[i - 1] +
                                                OMInsuranceETCTE[i - 1])
            costToCustomerTaxEquity.append(netCoopPaymentsTaxEquity[i - 1] -
                                           distAdderTaxEquity[i - 1])
        #Output Tax Equity Flip [L37]
        NPVLoanTaxEquity = npv(
            float(inputDict.get("discRate", 0)) / 100,
            [0, 0] + costToCustomerTaxEquity)
        #Output - Tax Equity [F42]
        Rate_Levelized_TaxEquity = -NPVLoanTaxEquity / NPVallYearGenerationMWh
        #TEI Calcs - Achieved Return [AW 21]
        #[AK]
        MACRDepreciation = []
        MACRDepreciation.append(-0.99 * 0.2 *
                                (totalCost - totalCost * 0.5 * 0.9822 * 0.3))
        MACRDepreciation.append(-0.99 * 0.32 *
                                (totalCost - totalCost * 0.5 * 0.9822 * 0.3))
        MACRDepreciation.append(-0.99 * 0.192 *
                                (totalCost - totalCost * 0.5 * 0.9822 * 0.3))
        MACRDepreciation.append(-0.99 * 0.1152 *
                                (totalCost - totalCost * 0.5 * 0.9822 * 0.3))
        MACRDepreciation.append(-0.99 * 0.1152 *
                                (totalCost - totalCost * 0.5 * 0.9822 * 0.3))
        MACRDepreciation.append(-0.99 * 0.0576 *
                                (totalCost - totalCost * 0.5 * 0.9822 * 0.3))
        #[AI] [AL]	[AN]
        cashRevenueTEI = []  #[AI]
        slDepreciation = []  #[AL]
        totalDistributions = []  #[AN]
        cashRevenueTEI.append(-totalCost * 0.53)
        for i in range(1, 7):
            cashRevenueTEI.append(EBITDATE[i - 1] * 0.99)
            slDepreciation.append(totalCost / 25)
            totalDistributions.append(-cashRevenueTEI[i])
        #[AJ]
        ITC = totalCost * 0.9822 * 0.3 * 0.99
        #[AM]
        taxableIncLoss = [0]
        taxableIncLoss.append(cashRevenueTEI[1] + MACRDepreciation[0])
        #[AO]
        capitalAcct = []
        capitalAcct.append(totalCost * 0.53)
        condition = capitalAcct[0] - 0.5 * ITC + taxableIncLoss[
            1] + totalDistributions[0]
        if condition > 0:
            capitalAcct.append(condition)
        else:
            capitalAcct.append(0)
        #[AQ]
        ratioTE = [0]
        #[AP]
        reallocatedIncLoss = []
        #AO-1 + AN + AI + AK + AJ
        for i in range(0, 5):
            reallocatedIncLoss.append(capitalAcct[i + 1] +
                                      totalDistributions[i + 1] +
                                      MACRDepreciation[i + 1] +
                                      cashRevenueTEI[i + 2])
            ratioTE.append(reallocatedIncLoss[i] /
                           (cashRevenueTEI[i + 2] + MACRDepreciation[i + 1]))
            taxableIncLoss.append(
                cashRevenueTEI[i + 2] + MACRDepreciation[i + 1] -
                ratioTE[i + 1] *
                (MACRDepreciation[i + 1] - totalDistributions[i + 1]))
            condition = capitalAcct[i + 1] + taxableIncLoss[
                i + 2] + totalDistributions[i + 1]
            if condition > 0:
                capitalAcct.append(condition)
            else:
                capitalAcct.append(0)
        #[AR]
        taxesBenefitLiab = [0]
        for i in range(1, 7):
            taxesBenefitLiab.append(-taxableIncLoss[i] * 0.35)
        #[AS] [AT]
        buyoutAmount = 0
        taxFromBuyout = 0
        for i in range(0, len(EBITDATEREDUCED)):
            buyoutAmount = buyoutAmount + .05 * EBITDATEREDUCED[i] / (math.pow(
                1.12, (i + 1)))
        taxFromBuyout = -buyoutAmount * 0.35
        #[AU] [AV]
        totalCashTax = []
        cumulativeCashTax = [0]
        for i in range(0, 7):
            if i == 1:
                totalCashTax.append(cashRevenueTEI[i] + ITC +
                                    taxesBenefitLiab[i] + 0 + 0)
                cumulativeCashTax.append(cumulativeCashTax[i] +
                                         totalCashTax[i])
            elif i == 6:
                totalCashTax.append(cashRevenueTEI[i] + 0 +
                                    taxesBenefitLiab[i] + buyoutAmount +
                                    taxFromBuyout)
                cumulativeCashTax.append(cumulativeCashTax[i] +
                                         totalCashTax[i] + buyoutAmount +
                                         taxFromBuyout)
            else:
                totalCashTax.append(cashRevenueTEI[i] + 0 +
                                    taxesBenefitLiab[i] + 0 + 0)
                cumulativeCashTax.append(cumulativeCashTax[i] +
                                         totalCashTax[i])
        #[AW21]
        if (cumulativeCashTax[7] > 0):
            cumulativeIRR = round(irr(totalCashTax), 4)
        else:
            cumulativeIRR = 0
        # Deleteme: Variable Dump for debugging
        # variableDump = {}
        # variableDump["TaxEquity"] = {}
        # variableDump["TaxEquity"]["coopInvestmentTaxEquity"] = coopInvestmentTaxEquity
        # variableDump["TaxEquity"]["financeCostCashTaxEquity"] = financeCostCashTaxEquity
        # variableDump["TaxEquity"]["cashToSPEOForPPATE"] = cashToSPEOForPPATE
        # variableDump["TaxEquity"]["derivedCostEnergyTE"] = derivedCostEnergyTE
        # variableDump["TaxEquity"]["OMInsuranceETCTE"] = OMInsuranceETCTE
        # variableDump["TaxEquity"]["cashFromSPEToBlockerTE"] = cashFromSPEToBlockerTE
        # variableDump["TaxEquity"]["cashFromBlockerTE"] = cashFromBlockerTE
        # variableDump["TaxEquity"]["distAdderTaxEquity"] = distAdderTaxEquity
        # variableDump["TaxEquity"]["netCoopPaymentsTaxEquity"] = netCoopPaymentsTaxEquity
        # variableDump["TaxEquity"]["NPVLoanTaxEquity"] = NPVLoanTaxEquity
        return cumulativeIRR, Rate_Levelized_TaxEquity, NPVLoanTaxEquity

    # Function Calls Mega Sized Tax Equity Function Above
    z = 0
    PPARateSixYearsTE = z / 100
    nGoal = float(inputDict.get("taxEquityReturn", 0)) / 100
    nValue = 0
    for p in range(0, 3):
        while ((z < 50000) and (nValue < nGoal)):
            achievedReturnTE, Rate_Levelized_TaxEquity, NPVLoanTaxEquity = taxEquityFlip(
                PPARateSixYearsTE, inputDict.get("discRate", 0),
                outData["totalCost"], outData["allYearGenerationMWh"],
                distAdderDirect, loanYears, firstYearLandLeaseCosts,
                firstYearOPMainCosts, firstYearInsuranceCosts, numberPanels)
            nValue = achievedReturnTE
            z = z + math.pow(10, p)
            PPARateSixYearsTE = z / 100.0
    z = z - math.pow(10, p)
    PPARateSixYearsTE = z / 100
    #Master Output [Tax Equity]
    outData["levelCostTaxEquity"] = Rate_Levelized_TaxEquity
    outData["costPanelTaxEquity"] = abs(NPVLoanTaxEquity / numberPanels)
    outData["cost10WPanelTaxEquity"] = (float(outData["costPanelTaxEquity"]) /
                                        panelSize) * 10
    ### PPA Comparison
    #Output - PPA [F]
    distAdderPPA = distAdderDirect
    #Output - PPA [G]
    netCoopPaymentsPPA = []
    #Output - PPA [H]
    costToCustomerPPA = []
    #Output - PPA [I]
    costToCustomerPPA = []
    #Output - PPA [H40]
    NPVLoanPPA = 0
    #Output - PPA [I40]
    Rate_Levelized_PPA = 0
    ## PPA Formulas
    #Output - PPA [G] [H]
    for i in range(1, len(outData["allYearGenerationMWh"]) + 1):
        netCoopPaymentsPPA.append(
            -outData["allYearGenerationMWh"][i] *
            float(inputDict.get("firstYearEnergyCostPPA", 0)) * math.pow(
                (1 + float(inputDict.get("annualEscRatePPA", 0)) / 100),
                (i - 1)))
        costToCustomerPPA.append(netCoopPaymentsPPA[i - 1] -
                                 distAdderPPA[i - 1])
    #Output - PPA [H58]
    NPVLoanPPA = npv(
        float(inputDict.get("discRate", 0)) / 100, [0, 0] + costToCustomerPPA)
    #Output - PPA [F65]
    Rate_Levelized_PPA = -NPVLoanPPA / NPVallYearGenerationMWh
    #Master Output [PPA]
    outData["levelCostPPA"] = Rate_Levelized_PPA
    outData["firstYearCostKWhPPA"] = float(
        inputDict.get("firstYearEnergyCostPPA", 0))
    outData["yearlyEscalationPPA"] = float(inputDict.get(
        "annualEscRatePPA", 0))
    # Add all Levelized Costs to Output
    outData["LevelizedCosts"] = [["Direct Loan", Rate_Levelized_Direct],
                                 ["NCREBs Financing", Rate_Levelized_NCREB],
                                 ["Lease Buyback", Rate_Levelized_Lease],
                                 ["Tax Equity Flip", Rate_Levelized_TaxEquity]]
    outData["LevelizedCosts"].append({
        "name": "PPA Comparison",
        "y": Rate_Levelized_PPA,
        "color": "gold"
    })
    # Stdout/stderr.
    outData["stdout"] = "Success"
    outData["stderr"] = ""
    return outData
Esempio n. 26
0
def work(modelDir, inputDict):
    ''' Run the model in its directory.'''
    feederName = [x for x in os.listdir(modelDir)
                  if x.endswith('.omd')][0][:-4]
    inputDict["feederName1"] = feederName
    zipCode = "59001"  #TODO get zip code from the PV and Load input file

    #Value check for attackVariable
    if inputDict.get("attackVariable", "None") == "None":
        attackAgentType = "None"
    else:
        attackAgentType = inputDict['attackVariable']

    # Value check for train
    if inputDict.get("trainAgent", "") == "True":
        trainAgentValue = True
    else:
        trainAgentValue = False

    # create simLengthValue to represent number of steps in simulation - will be manipulated by number of rows in load solar data csv file
    simLengthValue = 0

    #create startStep to represent which step pyCigar should start on - default = 100
    startStep = 100

    #None check for simulation length
    # if inputDict.get("simLength", "None") == "None":
    # 	simLengthValue = None
    # else:
    # 	simLengthValue = int(simLengthValue)

    #None check for simulation length units
    if inputDict.get("simLengthUnits", "None") == "None":
        simLengthUnitsValue = None
    else:
        simLengthUnitsValue = inputDict["simLengthUnits"]
    #None check for simulation start date
    if inputDict.get("simStartDate", "None") == "None":
        simStartDateTimeValue = None
        simStartDateValue = None
        simStartTimeValue = None
    else:
        simStartDateTimeValue = inputDict["simStartDate"]
        simStartDateValue = simStartDateTimeValue.split('T')[0]
        simStartTimeValue = simStartDateTimeValue.split('T')[1]

    inputDict["climateName"] = weather.zipCodeToClimateName(zipCode)
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "climate.tmy2"))

    def convertInputs():
        #create the PyCIGAR_inputs folder to store the input files to run PyCIGAR
        try:
            os.mkdir(pJoin(modelDir, "PyCIGAR_inputs"))
        except FileExistsError:
            print("PyCIGAR_inputs folder already exists!")
            pass
        except:
            print("Error occurred creating PyCIGAR_inputs folder")

        #create misc_inputs.csv file in folder
        with open(pJoin(modelDir, "PyCIGAR_inputs", "misc_inputs.csv"),
                  "w") as miscFile:
            #Populate misc_inputs.csv
            # miscFile.write(misc_inputs)
            # for key in misc_dict.keys():
            # 	miscFile.write("%s,%s\n"%(key,misc_dict[key]))
            miscFile.write(inputDict['miscFile'])

        #create ieee37.dss file in folder
        dss_filename = "circuit.dss"
        with open(pJoin(modelDir, "PyCIGAR_inputs", dss_filename),
                  "w") as dssFile:
            dssFile.write(inputDict['dssFile'])

        #create load_solar_data.csv file in folder
        rowCount = 0
        with open(pJoin(modelDir, "PyCIGAR_inputs", "load_solar_data.csv"),
                  "w") as loadPVFile:
            loadPVFile.write(inputDict['loadPV'])
            #Open load and PV input file
        try:
            with open(pJoin(modelDir, "PyCIGAR_inputs", "load_solar_data.csv"),
                      newline='') as inFile:
                reader = csv.reader(inFile)
                for row in reader:
                    rowCount = rowCount + 1
            #Check to see if the simulation length matches the load and solar csv
            # if (rowCount-1)*misc_dict["load file timestep"] != simLengthValue:
            # 	errorMessage = "Load and PV Output File does not match simulation length specified by user"
            # 	raise Exception(errorMessage)
            simLengthValue = rowCount - 1
        except:
            #TODO change to an appropriate warning message
            errorMessage = "CSV file is incorrect format. Please see valid format definition at <a target='_blank' href='https://github.com/dpinney/omf/wiki/Models-~-demandResponse#walkthrough'>OMF Wiki demandResponse</a>"
            raise Exception(errorMessage)

        #create breakpoints.csv file in folder
        # f1Name = "breakpoints.csv"
        # with open(pJoin(omf.omfDir, "static", "testFiles", "pyCIGAR", f1Name)) as f1:
        # 	breakpoints_inputs = f1.read()
        with open(pJoin(modelDir, "PyCIGAR_inputs", "breakpoints.csv"),
                  "w") as breakpointsFile:
            breakpointsFile.write(inputDict['breakpoints'])

        # Open defense agent HDF5(pb?) file if it was uploaded
        with open(pJoin(modelDir, "PyCIGAR_inputs", "defenseAgent.pb"),
                  "w") as defenseVariableFile:
            if inputDict['defenseVariable'] == "":
                defenseVariableFile.write(
                    "No Defense Agent Variable File Uploaded")
            else:
                defenseVariableFile.write(inputDict['defenseVariable'])

        return simLengthValue

    simLengthValue = convertInputs()

    #simLengthAdjusted accounts for the offset by startStep
    simLengthAdjusted = simLengthValue - startStep
    #hard-coding simLengthAdjusted for testing purposes
    simLengthAdjusted = 750

    outData = {}
    # Std Err and Std Out
    outData['stderr'] = "This should be stderr"  #rawOut['stderr']
    outData['stdout'] = "This should be stdout"  #rawOut['stdout']

    # Create list of timestamps for simulation steps
    outData['timeStamps'] = []
    start_time = dt_parser.isoparse(simStartDateTimeValue)
    for single_datetime in (start_time + timedelta(seconds=n)
                            for n in range(simLengthAdjusted)):
        single_datetime_str = single_datetime.strftime("%Y-%m-%d %H:%M:%S%z")
        outData['timeStamps'].append(single_datetime_str)

    # Day/Month Aggregation Setup:
    stamps = outData.get('timeStamps', [])
    level = inputDict.get('simLengthUnits', 'seconds')

    # TODO: Create/populate Climate data without gridlab-d
    outData['climate'] = {}
    outData['allMeterVoltages'] = {}
    outData['allMeterVoltages']['Min'] = [0] * int(simLengthAdjusted)
    outData['allMeterVoltages']['Mean'] = [0] * int(simLengthAdjusted)
    outData['allMeterVoltages']['StdDev'] = [0] * int(simLengthAdjusted)
    outData['allMeterVoltages']['Max'] = [0] * int(simLengthAdjusted)
    # Power Consumption
    outData['Consumption'] = {}
    # Set default value to be 0, avoiding missing value when computing Loads
    outData['Consumption']['Power'] = [0] * int(simLengthAdjusted)
    outData['Consumption']['Losses'] = [0] * int(simLengthAdjusted)
    outData['Consumption']['DG'] = [0] * int(simLengthAdjusted)

    outData['swingTimestamps'] = []
    outData['swingTimestamps'] = outData['timeStamps']

    # Aggregate up the timestamps:
    if level == 'days':
        outData['timeStamps'] = aggSeries(stamps, stamps, lambda x: x[0][0:10],
                                          'days')
    elif level == 'months':
        outData['timeStamps'] = aggSeries(stamps, stamps, lambda x: x[0][0:7],
                                          'months')

    def runPyCIGAR():
        #create the pycigarOutput folder to store the output file(s) generated by PyCIGAR
        try:
            os.mkdir(pJoin(modelDir, "pycigarOutput"))
        except FileExistsError:
            print("pycigarOutput folder already exists!")
            pass
        except:
            print("Error occurred creating pycigarOutput folder")

        #import and run pycigar
        import pycigar

        #Set up runType scenarios
        #runType of 2 implies the base scenario - not training a defense agent, nor is there a defense agent entered
        runType = 2
        tempDefenseAgent = None  #TODO: change tempDefenseAgent to be !None if defense agent file is input

        # check to see if we are trying to train a defense agent
        if trainAgentValue:
            #runType of 0 implies the training scenario - runs to train a defense agent and outputs a zip containing defense agent files
            runType = 0

        #check to see if user entered a defense agent file
        elif tempDefenseAgent != None:
            tempDefenseAgent = modelDir + "/PyCIGAR_inputs/defenseAgent.pb"
            #runType of 1 implies the defense scenario - not training a defense agent, but a defense agent zip was uploaded
            runType = 1

        # TODO how to factor attackAgentType into pycigar inputs
        # if there is no training selected and no attack variable, run without a defense agent
        pycigar.main(modelDir + "/PyCIGAR_inputs/misc_inputs.csv",
                     modelDir + "/PyCIGAR_inputs/circuit.dss",
                     modelDir + "/PyCIGAR_inputs/load_solar_data.csv",
                     modelDir + "/PyCIGAR_inputs/breakpoints.csv",
                     runType,
                     tempDefenseAgent,
                     modelDir + "/pycigarOutput/",
                     start=startStep,
                     duration=simLengthAdjusted,
                     hack_start=250,
                     hack_end=None,
                     percentage_hack=0.45)

        #print("Got through pyCigar!!!")

    def convertOutputs():
        #set outData[] values to those from modelDir/pycigarOutput/pycigar_output_specs_.json
        #read in the pycigar-outputed json
        with open(
                pJoin(modelDir, "pycigarOutput", "pycigar_output_specs.json"),
                'r') as f:
            pycigarJson = json.load(f)

        #convert "allMeterVoltages"
        outData["allMeterVoltages"] = pycigarJson["allMeterVoltages"]

        #convert "Consumption"."Power"
        # HACK! Units are actually kW. Needs to be fixed in pyCigar.
        outData["Consumption"]["Power"] = pycigarJson["Consumption"][
            "Power Substation (W)"]

        #convert "Consumption"."Losses"
        outData["Consumption"]["Losses"] = pycigarJson["Consumption"][
            "Losses Total (W)"]

        #convert "Consumption"."DG"
        outData["Consumption"]["DG"] = [
            -1.0 * x for x in pycigarJson["Consumption"]["DG Output (W)"]
        ]

        #convert "powerFactors"
        outData["powerFactors"] = pycigarJson["Substation Power Factor (%)"]

        #convert "swingVoltage"
        outData["swingVoltage"] = pycigarJson["Substation Top Voltage(V)"]

        #convert "downlineNodeVolts"
        outData["downlineNodeVolts"] = pycigarJson[
            "Substation Bottom Voltage(V)"]

        #convert "minVoltBand"
        outData["minVoltBand"] = pycigarJson[
            "Substation Regulator Minimum Voltage(V)"]

        #convert "maxVoltBand"
        outData["maxVoltBand"] = pycigarJson[
            "Substation Regulator Maximum Voltage(V)"]

        #create lists of circuit object names
        regNameList = []
        capNameList = []
        for key in pycigarJson:
            if key.startswith('Regulator_'):
                regNameList.append(key)
            elif key.startswith('Capacitor_'):
                capNameList.append(key)

        #convert regulator data
        for reg_name in regNameList:
            outData[reg_name] = {}
            regPhaseValue = pycigarJson[reg_name]["RegPhases"]
            if regPhaseValue.find('A') != -1:
                outData[reg_name]["RegTapA"] = pycigarJson[reg_name]["creg1a"]

            if regPhaseValue.find('B') != -1:
                outData[reg_name]["RegTapB"] = pycigarJson[reg_name]["creg1b"]

            if regPhaseValue.find('C') != -1:
                outData[reg_name]["RegTapC"] = pycigarJson[reg_name]["creg1c"]

            outData[reg_name]["RegPhases"] = regPhaseValue

        #convert inverter data
        inverter_output_dict = {}
        for inv_dict in pycigarJson["Inverter Outputs"]:
            #create a new dictionary to represent the single inverter
            new_inv_dict = {}
            #get values from pycigar output for given single inverter
            inv_name = inv_dict["Name"]
            inv_volt = inv_dict["Voltage (V)"]
            inv_pow_real = inv_dict["Power Output (W)"]
            inv_pow_imag = inv_dict["Reactive Power Output (VAR)"]
            #populate single inverter dict with pycigar values
            new_inv_dict["Voltage"] = inv_volt
            new_inv_dict["Power_Real"] = inv_pow_real
            new_inv_dict["Power_Imag"] = inv_pow_imag
            #add single inverter dict to dict of all the inverters using the inverter name as the key
            inverter_output_dict[inv_name] = new_inv_dict
        outData["Inverter_Outputs"] = inverter_output_dict

        #convert capacitor data - Need one on test circuit first!
        for cap_name in capNameList:
            outData[cap_name] = {}
            capPhaseValue = pycigarJson[cap_name]["CapPhases"]
            if capPhaseValue.find('A') != -1:
                outData[cap_name]['Cap1A'] = [0] * int(simLengthValue)
                outData[cap_name]['Cap1A'] = pycigarJson[cap_name]['switchA']

            if capPhaseValue.find('B') != -1:
                outData[cap_name]['Cap1B'] = [0] * int(simLengthValue)
                outData[cap_name]['Cap1B'] = pycigarJson[cap_name]['switchB']

            if capPhaseValue.find('C') != -1:
                outData[cap_name]['Cap1C'] = [0] * int(simLengthValue)
                outData[cap_name]['Cap1C'] = pycigarJson[cap_name]['switchC']

            outData[cap_name]["CapPhases"] = capPhaseValue

        outData["stdout"] = pycigarJson["stdout"]

    runPyCIGAR()
    convertOutputs()
    return outData
Esempio n. 27
0
try:
    # Check whether model exist or not
    if not os.path.isdir(modelDir):
        os.makedirs(modelDir)
        inputDict["created"] = str(dt.datetime.now())
    # MAYBEFIX: remove this data dump. Check showModel in web.py and renderTemplate()
    with open(pJoin(modelDir, "allInputData.json"), "w") as inputFile:
        json.dump(inputDict, inputFile, indent=4)
    #Set static input data
    simLength = 8760
    simStartDate = "2013-01-01"
    # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
    startDateTime = simStartDate + " 00:00:00 UTC"
    simLengthUnits = "hours"
    # Associate zipcode to climate data
    inputDict["climateName"], latforpvwatts = zipCodeToClimateName(
        inputDict["zipCode"])
    inverterSizeAC = float(inputDict.get("inverterSize", 0))
    if (inputDict.get("systemSize", 0) == "-"):
        arraySizeDC = 1.3908 * inverterSizeAC
    else:
        arraySizeDC = float(inputDict.get("systemSize", 0))
    numberPanels = (arraySizeDC * 1000 / 305)
    # Set constants
    panelSize = 305
    trackingMode = 0
    rotlim = 45.0
    gamma = 0.45
    if (inputDict.get("tilt", 0) == "-"):
        manualTilt = latforpvwatts
    else:
        manualTilt = float(inputDict.get("tilt", 0))
Esempio n. 28
0
def work(modelDir, inputDict):
    ''' Run the model in its directory. '''
    outData = {}
    feederName = [x for x in os.listdir(modelDir)
                  if x.endswith('.omd')][0][:-4]
    inputDict["feederName1"] = feederName
    with open(pJoin(modelDir, inputDict['weatherImpactsFileName']),
              'w') as hazardFile:
        hazardFile.write(inputDict['weatherImpacts'])
    with open(pJoin(modelDir, feederName + '.omd'), "r") as jsonIn:
        feederModel = json.load(jsonIn)
    # Create GFM input file.
    print "RUNNING GFM FOR", modelDir
    gfmInputTemplate = {
        'phase_variation': float(inputDict['phaseVariation']),
        'chance_constraint': float(inputDict['chanceConstraint']),
        'critical_load_met': float(inputDict['criticalLoadMet']),
        'total_load_met':
        1.0,  #(float(inputDict['criticalLoadMet']) + float(inputDict['nonCriticalLoadMet'])),
        'xrMatrices': inputDict["xrMatrices"],
        'maxDGPerGenerator': float(inputDict["maxDGPerGenerator"]),
        'dgUnitCost': float(inputDict["dgUnitCost"]),
        'newLineCandidates': inputDict['newLineCandidates'],
        'hardeningCandidates': inputDict['hardeningCandidates'],
        'switchCandidates': inputDict['switchCandidates'],
        'hardeningUnitCost': inputDict['hardeningUnitCost'],
        'switchCost': inputDict['switchCost'],
        'generatorCandidates': inputDict['generatorCandidates'],
        'lineUnitCost': inputDict['lineUnitCost']
    }
    gfmJson = convertToGFM(gfmInputTemplate, feederModel)
    gfmInputFilename = 'gfmInput.json'
    with open(pJoin(modelDir, gfmInputFilename), "w") as outFile:
        json.dump(gfmJson, outFile, indent=4)
    # Run GFM
    gfmBinaryPath = pJoin(__neoMetaModel__._omfDir, 'solvers', 'gfm',
                          'Fragility.jar')
    proc = subprocess.Popen([
        'java', '-jar', gfmBinaryPath, '-r', gfmInputFilename, '-wf',
        inputDict['weatherImpactsFileName'], '-num', '3'
    ],
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            cwd=modelDir)
    (stdout, stderr) = proc.communicate()
    with open(pJoin(modelDir, "gfmConsoleOut.txt"), "w") as gfmConsoleOut:
        gfmConsoleOut.write(stdout)
    # HACK: rename the hardcoded gfm output
    rdtInputFilePath = pJoin(modelDir, 'rdtInput.json')
    #fix for windows web server hangup
    rdtInputFilePath = pJoin(modelDir, 'rdt_OUTPUT.json')
    #os.rename(pJoin(modelDir,'rdt_OUTPUT.json'),rdtInputFilePath)
    # Pull GFM input data on lines and generators for HTML presentation.
    with open(rdtInputFilePath, 'r') as rdtInputFile:
        # HACK: we use rdtInput as a string in the frontend.
        rdtJsonAsString = rdtInputFile.read()
        rdtJson = json.loads(rdtJsonAsString)
    # Calculate line costs.
    lineData = {}
    for line in rdtJson["lines"]:
        lineData[line["id"]] = '{:,.2f}'.format(
            float(line["length"]) * float(inputDict["lineUnitCost"]))
    outData["lineData"] = lineData
    outData["generatorData"] = '{:,.2f}'.format(
        float(inputDict["dgUnitCost"]) * float(inputDict["maxDGPerGenerator"]))
    outData['gfmRawOut'] = rdtJsonAsString
    if inputDict['scenarios'] != "":
        rdtJson['scenarios'] = json.loads(inputDict['scenarios'])
        with open(pJoin(rdtInputFilePath), "w") as rdtInputFile:
            json.dump(rdtJson, rdtInputFile, indent=4)
    # Run GridLAB-D first time to generate xrMatrices.
    print "RUNNING GLD FOR", modelDir
    if platform.system() == "Windows":
        omdPath = pJoin(modelDir, feederName + ".omd")
        with open(omdPath, "r") as omd:
            omd = json.load(omd)
        #REMOVE NEWLINECANDIDATES
        deleteList = []
        newLines = inputDict["newLineCandidates"].strip().replace(
            ' ', '').split(',')
        for newLine in newLines:
            for omdObj in omd["tree"]:
                if ("name" in omd["tree"][omdObj]):
                    if (newLine == omd["tree"][omdObj]["name"]):
                        deleteList.append(omdObj)
        for delItem in deleteList:
            del omd["tree"][delItem]
        #Load a blank glm file and use it to write to it
        feederPath = pJoin(modelDir, 'feeder.glm')
        with open(feederPath, 'w') as glmFile:
            toWrite = omf.feeder.sortedWrite(
                omd['tree']
            ) + "object jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n"  # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};"
            glmFile.write(toWrite)
        #Write attachments from omd, if no file, one will be created
        for fileName in omd['attachments']:
            with open(os.path.join(modelDir, fileName), 'w') as file:
                file.write(omd['attachments'][fileName])
        #Wire in the file the user specifies via zipcode.
        climateFileName, latforpvwatts = zipCodeToClimateName(
            inputDict["simulationZipCode"])
        shutil.copy(
            pJoin(__neoMetaModel__._omfDir, "data", "Climate",
                  climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2'))
        proc = subprocess.Popen(['gridlabd', 'feeder.glm'],
                                stdout=subprocess.PIPE,
                                shell=True,
                                cwd=modelDir)
        (out, err) = proc.communicate()
        with open(pJoin(modelDir, "gldConsoleOut.txt"), "w") as gldConsoleOut:
            gldConsoleOut.write(out)
        accumulator = ""
        with open(pJoin(modelDir, "JSON_dump_line.json"), "r") as gldOut:
            accumulator = json.load(gldOut)
        outData['gridlabdRawOut'] = accumulator
        #Data trabsformation for GLD
        rdtJson["line_codes"] = accumulator["properties"]["line_codes"]
        rdtJson["lines"] = accumulator["properties"]["lines"]
        for item in rdtJson["lines"]:
            item['node1_id'] = item['node1_id'] + "_bus"
            item['node2_id'] = item['node2_id'] + "_bus"
        with open(pJoin(modelDir, rdtInputFilePath), "w") as outFile:
            json.dump(rdtJson, outFile, indent=4)
        '''rdtJson["line_codes"] = accumulator["properties"]["line_codes"]
		counter = 1
		lineCodeTracker = {}
		for item in rdtJson["line_codes"]:
			lineCodeTracker[item['line_code']] = counter
			item['line_code'] = counter
			counter = counter + 1
		rdtJson["lines"] = accumulator["properties"]["lines"]
		print lineCodeTracker
		for line in rdtJson["lines"]:
			line["line_code"] = lineCodeTracker[line["line_code"]]
		with open(pJoin(modelDir, rdtInputFilePath), "w") as outFile:
			json.dump(rdtJson, outFile, indent=4)'''
    else:
        tree = feederModel.get("tree", {})
        attachments = feederModel.get("attachments", {})
        climateFileName, latforpvwatts = zipCodeToClimateName(
            inputDict["simulationZipCode"])
        shutil.copy(
            pJoin(__neoMetaModel__._omfDir, "data", "Climate",
                  climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2'))
        gridlabdRawOut = gridlabd.runInFilesystem(tree,
                                                  attachments=attachments,
                                                  workDir=modelDir)
        outData['gridlabdRawOut'] = gridlabdRawOut
    # Run RDT.
    print "RUNNING RDT FOR", modelDir
    rdtOutFile = modelDir + '/rdtOutput.json'
    rdtSolverFolder = pJoin(__neoMetaModel__._omfDir, 'solvers', 'rdt')
    rdtJarPath = pJoin(rdtSolverFolder, 'micot-rdt.jar')
    #TEST RUSSELL THING, DELETE WHEN DONE
    #shutil.copy(pJoin(__neoMetaModel__._omfDir, "scratch", "rdt_OUTPUTTEST.json"), pJoin(modelDir, 'rdt_OUTPUT.json'))
    #############
    proc = subprocess.Popen([
        'java', "-Djna.library.path=" + rdtSolverFolder, '-jar', rdtJarPath,
        '-c', rdtInputFilePath, '-e', rdtOutFile
    ],
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE)
    (stdout, stderr) = proc.communicate()
    with open(pJoin(modelDir, "rdtConsoleOut.txt"), "w") as rdtConsoleOut:
        rdtConsoleOut.write(stdout)
    rdtRawOut = open(rdtOutFile).read()
    outData['rdtRawOut'] = rdtRawOut
    # Indent the RDT output nicely.
    with open(pJoin(rdtOutFile), "w") as outFile:
        rdtOut = json.loads(rdtRawOut)
        json.dump(rdtOut, outFile, indent=4)
    # Generate and run 2nd copy of GridLAB-D model with changes specified by RDT.
    print "RUNNING GLD FOR", modelDir
    feederCopy = copy.deepcopy(feederModel)
    lineSwitchList = []
    for line in rdtOut['design_solution']['lines']:
        if ('switch_built' in line):
            lineSwitchList.append(line['id'])
    # Remove nonessential lines in second model as indicated by RDT output.
    for key in feederCopy['tree'].keys():
        value = feederCopy['tree'][key]
        if ('object' in value):
            if (value['object'] == 'underground_line') or (value['object']
                                                           == 'overhead_line'):
                if value['name'] not in lineSwitchList:
                    del feederCopy['tree'][key]
    #Add generators to second model.
    maxTreeKey = int(max(feederCopy['tree'], key=int)) + 1
    '''for gen in rdtOut['design_solution']['generators']:
		newGen = {}
		newGen["object"] = "diesel_dg"
		newGen["name"] = gen['id']
		newGen["parent"] = gen['id'][:-4]
		newGen["phases"] = "ABC"
		newGen["Gen_type"] = "CONSTANT_PQ"
		newGen["Rated_VA"] = "5.0 kVA"
		newGen["power_out_A"] = "250.0+120.0j"
		newGen["power_out_B"] = "230.0+130.0j"
		newGen["power_out_C"] = "220.0+150.0j"
		feederCopy['tree'][str(maxTreeKey)] = newGen
		maxTreeKey = maxTreeKey + 1
	'''
    maxTreeKey = max(feederCopy['tree'], key=int)
    # Load a blank glm file and use it to write to it
    feederPath = pJoin(modelDir, 'feederSecond.glm')
    with open(feederPath, 'w') as glmFile:
        toWrite = "module generators;\n\n" + omf.feeder.sortedWrite(
            feederCopy['tree']
        ) + "object voltdump {\n\tfilename voltDump2ndRun.csv;\n};\nobject jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n"  # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};"
        glmFile.write(toWrite)
    # Run GridLAB-D second time.
    if platform.system() == "Windows":
        proc = subprocess.Popen(['gridlabd', 'feederSecond.glm'],
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                shell=True,
                                cwd=modelDir)
        (out, err) = proc.communicate()
        outData["secondGLD"] = str(
            os.path.isfile(pJoin(modelDir, "voltDump2ndRun.csv")))
    else:
        # TODO: make 2nd run of GridLAB-D work on Unixes.
        outData["secondGLD"] = str(False)
    # Draw the feeder.
    genDiagram(modelDir, feederModel)
    with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile:
        outData["oneLineDiagram"] = inFile.read().encode("base64")
    # And we're done.
    return outData
Esempio n. 29
0
def work(modelDir, inputDict):
    ''' Run the model in its directory. '''
    # Copy spcific climate data into model directory
    inputDict["climateName"], latforpvwatts = zipCodeToClimateName(
        inputDict["zipCode"])
    shutil.copy(
        pJoin(__neoMetaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "climate.tmy2"))
    # Set up SAM data structures.
    ssc = nrelsam2013.SSCAPI()
    dat = ssc.ssc_data_create()
    # Required user inputs.
    ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
    ssc.ssc_data_set_number(dat, "system_size",
                            float(inputDict.get("systemSize", 100)))
    derate = float(inputDict.get("pvModuleDerate", 99.5))/100 \
     * float(inputDict.get("mismatch", 99.5))/100 \
     * float(inputDict.get("diodes", 99.5))/100 \
     * float(inputDict.get("dcWiring", 99.5))/100 \
     * float(inputDict.get("acWiring", 99.5))/100 \
     * float(inputDict.get("soiling", 99.5))/100 \
     * float(inputDict.get("shading", 99.5))/100 \
     * float(inputDict.get("sysAvail", 99.5))/100 \
     * float(inputDict.get("age", 99.5))/100 \
     * float(inputDict.get("inverterEfficiency", 92))/100
    ssc.ssc_data_set_number(dat, "derate", derate)
    # TODO: Should we move inverter efficiency to 'inv_eff' below (as done in PVWatts?)
    # Doesn't seem to affect output very much
    # ssc.ssc_data_set_number(dat, "inv_eff", float(inputDict.get("inverterEfficiency", 92))/100)
    ssc.ssc_data_set_number(dat, "track_mode",
                            float(inputDict.get("trackingMode", 0)))
    ssc.ssc_data_set_number(dat, "azimuth",
                            float(inputDict.get("azimuth", 180)))
    # Advanced inputs with defaults.
    ssc.ssc_data_set_number(dat, "rotlim", float(inputDict.get("rotlim", 45)))
    ssc.ssc_data_set_number(dat, "gamma",
                            float(inputDict.get("gamma", 0.5)) / 100)
    # Complicated optional inputs.
    if (inputDict.get("tilt", 0) == "-"):
        tilt_eq_lat = 1.0
        manualTilt = 0.0
    else:
        tilt_eq_lat = 0.0
        manualTilt = float(inputDict.get("tilt", 0))
    ssc.ssc_data_set_number(dat, "tilt", manualTilt)
    ssc.ssc_data_set_number(dat, "tilt_eq_lat", tilt_eq_lat)
    # Run PV system simulation.
    mod = ssc.ssc_module_create("pvwattsv1")
    ssc.ssc_module_exec(mod, dat)
    # Setting options for start time.
    simLengthUnits = inputDict.get("simLengthUnits", "hours")
    simStartDate = inputDict.get("simStartDate", "2014-01-01")
    # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
    startDateTime = simStartDate + " 00:00:00 UTC"
    # Timestamp output.
    outData = {}
    outData["timeStamps"] = [
        dt.datetime.strftime(
            dt.datetime.strptime(startDateTime[0:19], "%Y-%m-%d %H:%M:%S") +
            dt.timedelta(**{simLengthUnits: x}), "%Y-%m-%d %H:%M:%S") + " UTC"
        for x in range(int(inputDict.get("simLength", 8760)))
    ]
    # Geodata output.
    outData["city"] = ssc.ssc_data_get_string(dat, "city")
    outData["state"] = ssc.ssc_data_get_string(dat, "state")
    outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
    outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
    outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
    # Weather output.
    outData["climate"] = {}
    outData["climate"][
        "Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(
            dat, "gh")
    outData["climate"][
        "Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(
            dat, "poa")
    outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(
        dat, "tamb")
    outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(
        dat, "tcell")
    outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(
        dat, "wspd")
    # Power generation and clipping.
    outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
    invSizeWatts = float(inputDict.get("inverterSize", 0)) * 1000
    outData["InvClipped"] = [
        x if x < invSizeWatts else invSizeWatts
        for x in outData["powerOutputAc"]
    ]
    try:
        outData["percentClipped"] = 100 * (
            1.0 - sum(outData["InvClipped"]) / sum(outData["powerOutputAc"]))
    except ZeroDivisionError:
        outData["percentClipped"] = 0.0
    # Cashflow outputs.
    lifeSpan = int(inputDict.get("lifeSpan", 30))
    lifeYears = range(1, 1 + lifeSpan)
    retailCost = float(inputDict.get("retailCost", 0.0))
    degradation = float(inputDict.get("degradation", 0.5)) / 100
    installCost = float(inputDict.get("installCost", 0.0))
    discountRate = float(inputDict.get("discountRate", 7)) / 100
    outData["oneYearGenerationWh"] = sum(outData["powerOutputAc"])
    outData["lifeGenerationDollars"] = [
        retailCost * (1.0 / 1000) * outData["oneYearGenerationWh"] *
        (1.0 - (x * degradation)) for x in lifeYears
    ]
    outData["lifeOmCosts"] = [
        -1.0 * float(inputDict["omCost"]) for x in lifeYears
    ]
    outData["lifePurchaseCosts"] = [-1.0 * installCost
                                    ] + [0 for x in lifeYears[1:]]
    srec = inputDict.get("srecCashFlow", "").split(",")
    outData["srecCashFlow"] = map(float,
                                  srec) + [0 for x in lifeYears[len(srec):]]
    outData["netCashFlow"] = [
        x + y + z + a for (
            x, y, z,
            a) in zip(outData["lifeGenerationDollars"], outData["lifeOmCosts"],
                      outData["lifePurchaseCosts"], outData["srecCashFlow"])
    ]
    outData["cumCashFlow"] = map(lambda x: x,
                                 _runningSum(outData["netCashFlow"]))
    outData["ROI"] = roundSig(sum(outData["netCashFlow"]), 3) / (
        -1 * roundSig(sum(outData["lifeOmCosts"]), 3) +
        -1 * roundSig(sum(outData["lifePurchaseCosts"], 3)))
    outData["NPV"] = roundSig(npv(discountRate, outData["netCashFlow"]), 3)
    outData["lifeGenerationWh"] = sum(outData["powerOutputAc"]) * lifeSpan
    outData["lifeEnergySales"] = sum(outData["lifeGenerationDollars"])
    try:
        # The IRR function is very bad.
        outData["IRR"] = roundSig(irr(outData["netCashFlow"]), 3)
    except:
        outData["IRR"] = "Undefined"
    # Monthly aggregation outputs.
    months = {
        "Jan": 0,
        "Feb": 1,
        "Mar": 2,
        "Apr": 3,
        "May": 4,
        "Jun": 5,
        "Jul": 6,
        "Aug": 7,
        "Sep": 8,
        "Oct": 9,
        "Nov": 10,
        "Dec": 11
    }
    totMonNum = lambda x: sum([
        z for (y, z) in zip(outData["timeStamps"], outData["powerOutputAc"])
        if y.startswith(simStartDate[0:4] + "-{0:02d}".format(x + 1))
    ])
    outData["monthlyGeneration"] = [[
        a, totMonNum(b)
    ] for (a, b) in sorted(months.items(), key=lambda x: x[1])]
    # Heatmaped hour+month outputs.
    hours = range(24)
    from calendar import monthrange
    totHourMon = lambda h, m: sum([
        z for (y, z) in zip(outData["timeStamps"], outData["powerOutputAc"])
        if y[5:7] == "{0:02d}".format(m + 1) and y[11:13] == "{0:02d}".format(
            h + 1)
    ])
    outData["seasonalPerformance"] = [[
        x, y,
        totHourMon(x, y) / monthrange(int(simStartDate[:4]), y + 1)[1]
    ] for x in hours for y in months.values()]
    # Stdout/stderr.
    outData["stdout"] = "Success"
    outData["stderr"] = ""
    return outData
Esempio n. 30
0
def work(modelDir, inputDict):
	''' Run the model in its directory. '''
	# Copy spcific climate data into model directory
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), 
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
	ssc.ssc_data_set_number(dat, "system_size", float(inputDict.get("systemSize", 100)))
	derate = float(inputDict.get("pvModuleDerate", 99.5))/100 \
		* float(inputDict.get("mismatch", 99.5))/100 \
		* float(inputDict.get("diodes", 99.5))/100 \
		* float(inputDict.get("dcWiring", 99.5))/100 \
		* float(inputDict.get("acWiring", 99.5))/100 \
		* float(inputDict.get("soiling", 99.5))/100 \
		* float(inputDict.get("shading", 99.5))/100 \
		* float(inputDict.get("sysAvail", 99.5))/100 \
		* float(inputDict.get("age", 99.5))/100 \
		* float(inputDict.get("inverterEfficiency", 92))/100
	ssc.ssc_data_set_number(dat, "derate", derate)
	# TODO: Should we move inverter efficiency to 'inv_eff' below (as done in PVWatts?) 
	# Doesn't seem to affect output very much
	# ssc.ssc_data_set_number(dat, "inv_eff", float(inputDict.get("inverterEfficiency", 92))/100)			
	ssc.ssc_data_set_number(dat, "track_mode", float(inputDict.get("trackingMode", 0)))
	ssc.ssc_data_set_number(dat, "azimuth", float(inputDict.get("azimuth", 180)))
	# Advanced inputs with defaults.
	ssc.ssc_data_set_number(dat, "rotlim", float(inputDict.get("rotlim", 45)))
	ssc.ssc_data_set_number(dat, "gamma", float(inputDict.get("gamma", 0.5))/100)
	# Complicated optional inputs.
	if (inputDict.get("tilt",0) == "-"):
		tilt_eq_lat = 1.0
		manualTilt = 0.0
	else:
		tilt_eq_lat = 0.0
		manualTilt = float(inputDict.get("tilt",0))		
	ssc.ssc_data_set_number(dat, "tilt", manualTilt)
	ssc.ssc_data_set_number(dat, "tilt_eq_lat", tilt_eq_lat)
	# Run PV system simulation.
	mod = ssc.ssc_module_create("pvwattsv1")
	ssc.ssc_module_exec(mod, dat)
	# Setting options for start time.
	simLengthUnits = inputDict.get("simLengthUnits","hours")
	simStartDate = inputDict.get("simStartDate", "2014-01-01")
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html 
	startDateTime = simStartDate + " 00:00:00 UTC"
	# Timestamp output.
	outData = {}
	outData["timeStamps"] = [dt.datetime.strftime(
		dt.datetime.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") + 
		dt.timedelta(**{simLengthUnits:x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(inputDict.get("simLength", 8760)))]
	# Geodata output.
	outData["city"] = ssc.ssc_data_get_string(dat, "city")
	outData["state"] = ssc.ssc_data_get_string(dat, "state")
	outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
	outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
	outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
	# Weather output.
	outData["climate"] = {}
	outData["climate"]["Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(dat, "gh")
	outData["climate"]["Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(dat, "poa")
	outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(dat, "tamb")
	outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(dat, "tcell")
	outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(dat, "wspd")
	# Power generation and clipping.
	outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")
	invSizeWatts = float(inputDict.get("inverterSize", 0)) * 1000
	outData["InvClipped"] = [x if x < invSizeWatts else invSizeWatts for x in outData["powerOutputAc"]]
	try:
		outData["percentClipped"] = 100 * (1.0 - sum(outData["InvClipped"]) / sum(outData["powerOutputAc"]))
	except ZeroDivisionError:
		outData["percentClipped"] = 0.0
	# Cashflow outputs.
	lifeSpan = int(inputDict.get("lifeSpan",30))
	lifeYears = range(1, 1 + lifeSpan)
	retailCost = float(inputDict.get("retailCost",0.0))
	degradation = float(inputDict.get("degradation",0.5))/100
	installCost = float(inputDict.get("installCost",0.0))
	discountRate = float(inputDict.get("discountRate", 7))/100
	outData["oneYearGenerationWh"] = sum(outData["powerOutputAc"])
	outData["lifeGenerationDollars"] = [retailCost*(1.0/1000)*outData["oneYearGenerationWh"]*(1.0-(x*degradation)) for x in lifeYears]
	outData["lifeOmCosts"] = [-1.0*float(inputDict["omCost"]) for x in lifeYears]
	outData["lifePurchaseCosts"] = [-1.0 * installCost] + [0 for x in lifeYears[1:]]
	srec = inputDict.get("srecCashFlow", "").split(",")
	outData["srecCashFlow"] = map(float,srec) + [0 for x in lifeYears[len(srec):]]
	outData["netCashFlow"] = [x+y+z+a for (x,y,z,a) in zip(outData["lifeGenerationDollars"], outData["lifeOmCosts"], outData["lifePurchaseCosts"], outData["srecCashFlow"])]
	outData["cumCashFlow"] = map(lambda x:x, _runningSum(outData["netCashFlow"]))
	outData["ROI"] = roundSig(sum(outData["netCashFlow"]), 3) / (-1*roundSig(sum(outData["lifeOmCosts"]), 3) + -1*roundSig(sum(outData["lifePurchaseCosts"], 3)))
	outData["NPV"] = roundSig(npv(discountRate, outData["netCashFlow"]), 3) 
	outData["lifeGenerationWh"] = sum(outData["powerOutputAc"])*lifeSpan	
	outData["lifeEnergySales"] = sum(outData["lifeGenerationDollars"])
	try:
		# The IRR function is very bad.
		outData["IRR"] = roundSig(irr(outData["netCashFlow"]), 3)
	except:
		outData["IRR"] = "Undefined"
	# Monthly aggregation outputs.
	months = {"Jan":0,"Feb":1,"Mar":2,"Apr":3,"May":4,"Jun":5,"Jul":6,"Aug":7,"Sep":8,"Oct":9,"Nov":10,"Dec":11}
	totMonNum = lambda x:sum([z for (y,z) in zip(outData["timeStamps"], outData["powerOutputAc"]) if y.startswith(simStartDate[0:4] + "-{0:02d}".format(x+1))])
	outData["monthlyGeneration"] = [[a, totMonNum(b)] for (a,b) in sorted(months.items(), key=lambda x:x[1])]
	# Heatmaped hour+month outputs.
	hours = range(24)
	from calendar import monthrange
	totHourMon = lambda h,m:sum([z for (y,z) in zip(outData["timeStamps"], outData["powerOutputAc"]) if y[5:7]=="{0:02d}".format(m+1) and y[11:13]=="{0:02d}".format(h+1)])
	outData["seasonalPerformance"] = [[x,y,totHourMon(x,y) / monthrange(int(simStartDate[:4]), y+1)[1]] for x in hours for y in months.values()]
	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData
Esempio n. 31
0
def runForeground(modelDir):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	inputDict = json.load(open(pJoin(modelDir, 'allInputData.json')))
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders
	for dirs in os.listdir(modelDir):
		if os.path.isdir(pJoin(modelDir, dirs)):
			shutil.rmtree(pJoin(modelDir, dirs))
	# Get the names of the feeders from the .omd files:
	feederNames = [x[0:-4] for x in os.listdir(modelDir) if x.endswith(".omd")]
	for i, key in enumerate(feederNames):
		inputDict['feederName' + str(i + 1)] = feederNames[i]
	# Run GridLAB-D once for each feeder:
	for feederName in feederNames:
		try:
			os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
		except Exception, e:
			pass
		if not os.path.isdir(pJoin(modelDir, feederName)):
			os.makedirs(pJoin(modelDir, feederName)) # create subfolders for feeders
		shutil.copy(pJoin(modelDir, feederName + ".omd"),
			pJoin(modelDir, feederName, "feeder.omd"))
		inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(_omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, feederName, "climate.tmy2"))
		try:
			startTime = datetime.datetime.now()
			feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.omd")))
			tree = feederJson["tree"]
			# Set up GLM with correct time and recorders:
			feeder.attachRecorders(tree, "Regulator", "object", "regulator")
			feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
			feeder.attachRecorders(tree, "Inverter", "object", "inverter")
			feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
			feeder.attachRecorders(tree, "CollectorVoltage", None, None)
			feeder.attachRecorders(tree, "Climate", "object", "climate")
			feeder.attachRecorders(tree, "OverheadLosses", None, None)
			feeder.attachRecorders(tree, "UndergroundLosses", None, None)
			feeder.attachRecorders(tree, "TriplexLosses", None, None)
			feeder.attachRecorders(tree, "TransformerLosses", None, None)
			feeder.groupSwingKids(tree)
			feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
				simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
			# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
			rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"],
				keepFiles=True, workDir=pJoin(modelDir, feederName))
			cleanOut = {}
			# Std Err and Std Out
			cleanOut['stderr'] = rawOut['stderr']
			cleanOut['stdout'] = rawOut['stdout']
			# Time Stamps
			for key in rawOut:
				if '# timestamp' in rawOut[key]:
					cleanOut['timeStamps'] = rawOut[key]['# timestamp']
					break
				elif '# property.. timestamp' in rawOut[key]:
					cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
				else:
					cleanOut['timeStamps'] = []
			# Day/Month Aggregation Setup:
			stamps = cleanOut.get('timeStamps',[])
			level = inputDict.get('simLengthUnits','hours')
			# Climate
			for key in rawOut:
				if key.startswith('Climate_') and key.endswith('.csv'):
					cleanOut['climate'] = {}
					cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
					cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
					cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
					cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
					cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
			# Voltage Band
			if 'VoltageJiggle.csv' in rawOut:
				cleanOut['allMeterVoltages'] = {}
				cleanOut['allMeterVoltages']['Min'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
				cleanOut['allMeterVoltages']['Mean'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
				cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
				cleanOut['allMeterVoltages']['Max'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
			cleanOut['allMeterVoltages']['stdDevPos'] = [(x+y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
			cleanOut['allMeterVoltages']['stdDevNeg'] = [(x-y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
			# Total # of meters
			count = 0
			with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
				for line in f:
					if "\"objectType\": \"triplex_meter\"" in line:
						count+=1
			# print "count=", count
			cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count)
			# Power Consumption
			cleanOut['Consumption'] = {}
			# Set default value to be 0, avoiding missing value when computing Loads
			cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
			cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
			cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
			for key in rawOut:
				if key.startswith('SwingKids_') and key.endswith('.csv'):
					oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
					if 'Power' not in cleanOut['Consumption']:
						cleanOut['Consumption']['Power'] = oneSwingPower
					else:
						cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
				elif key.startswith('Inverter_') and key.endswith('.csv'):
					realA = rawOut[key]['power_A.real']
					realB = rawOut[key]['power_B.real']
					realC = rawOut[key]['power_C.real']
					imagA = rawOut[key]['power_A.imag']
					imagB = rawOut[key]['power_B.imag']
					imagC = rawOut[key]['power_C.imag']
					oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
					if 'DG' not in cleanOut['Consumption']:
						cleanOut['Consumption']['DG'] = oneDgPower
					else:
						cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
				elif key.startswith('Windmill_') and key.endswith('.csv'):
					vrA = rawOut[key]['voltage_A.real']
					vrB = rawOut[key]['voltage_B.real']
					vrC = rawOut[key]['voltage_C.real']
					viA = rawOut[key]['voltage_A.imag']
					viB = rawOut[key]['voltage_B.imag']
					viC = rawOut[key]['voltage_C.imag']
					crB = rawOut[key]['current_B.real']
					crA = rawOut[key]['current_A.real']
					crC = rawOut[key]['current_C.real']
					ciA = rawOut[key]['current_A.imag']
					ciB = rawOut[key]['current_B.imag']
					ciC = rawOut[key]['current_C.imag']
					powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
					powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
					powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
					# HACK: multiply by negative one because turbine power sign is opposite all other DG:
					oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA,powerB,powerC), avg, level)]
					if 'DG' not in cleanOut['Consumption']:
						cleanOut['Consumption']['DG'] = oneDgPower
					else:
						cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
				elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
					realA = rawOut[key]['sum(power_losses_A.real)']
					imagA = rawOut[key]['sum(power_losses_A.imag)']
					realB = rawOut[key]['sum(power_losses_B.real)']
					imagB = rawOut[key]['sum(power_losses_B.imag)']
					realC = rawOut[key]['sum(power_losses_C.real)']
					imagC = rawOut[key]['sum(power_losses_C.imag)']
					oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
					if 'Losses' not in cleanOut['Consumption']:
						cleanOut['Consumption']['Losses'] = oneLoss
					else:
						cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			# Aggregate up the timestamps:
			if level=='days':
				cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
			elif level=='months':
				cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
			# Write the output.
			with open(pJoin(modelDir, feederName, "allOutputData.json"),"w") as outFile:
				json.dump(cleanOut, outFile, indent=4)
			# Update the runTime in the input file.
			endTime = datetime.datetime.now()
			inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
			with open(pJoin(modelDir, feederName, "allInputData.json"),"w") as inFile:
				json.dump(inputDict, inFile, indent=4)
			# Clean up the PID file.
			os.remove(pJoin(modelDir, feederName,"PID.txt"))
			print "DONE RUNNING GRIDLABMULTI", modelDir, feederName
		except Exception as e:
			print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName
			cancel(pJoin(modelDir, feederName))
			with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile:
				traceback.print_exc(file = stderrFile)
Esempio n. 32
0
def work(modelDir, inputDict):
	#plotly imports. Here for now so web server starts.
	import plotly
	# from plotly import __version__
	# from plotly.offline import download_plotlyjs, plot
	# from plotly import tools
	import plotly.graph_objs as go
	# Copy specific climate data into model directory
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), 
		pJoin(modelDir, "climate.tmy2"))
	# Set up SAM data structures.
	ssc = nrelsam2013.SSCAPI()
	dat = ssc.ssc_data_create()
	# Required user inputs.
	ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
	ssc.ssc_data_set_number(dat, "system_size", float(inputDict["systemSize"]))
	ssc.ssc_data_set_number(dat, "derate", 0.01 * float(inputDict["nonInverterEfficiency"]))
	ssc.ssc_data_set_number(dat, "track_mode", float(inputDict["trackingMode"]))
	ssc.ssc_data_set_number(dat, "azimuth", float(inputDict["azimuth"]))
	# Advanced inputs with defaults.
	if (inputDict.get("tilt",0) == "-"):
		tilt_eq_lat = 1.0
		manualTilt = 0.0
	else:
		tilt_eq_lat = 0.0
		manualTilt = float(inputDict.get("tilt",0))
	ssc.ssc_data_set_number(dat, "tilt_eq_lat", tilt_eq_lat)
	ssc.ssc_data_set_number(dat, "tilt", manualTilt)
	ssc.ssc_data_set_number(dat, "rotlim", float(inputDict["rotlim"]))
	ssc.ssc_data_set_number(dat, "gamma", -1 * float(inputDict["gamma"]))
	ssc.ssc_data_set_number(dat, "inv_eff", 0.01 * float(inputDict["inverterEfficiency"]))
	ssc.ssc_data_set_number(dat, "w_stow", float(inputDict["w_stow"]))
	# Complicated optional inputs that we could enable later.
	# ssc.ssc_data_set_array(dat, 'shading_hourly', ...) 	# Hourly beam shading factors
	# ssc.ssc_data_set_matrix(dat, 'shading_mxh', ...) 		# Month x Hour beam shading factors
	# ssc.ssc_data_set_matrix(dat, 'shading_azal', ...) 	# Azimuth x altitude beam shading factors
	# ssc.ssc_data_set_number(dat, 'shading_diff', ...) 	# Diffuse shading factor
	# ssc.ssc_data_set_number(dat, 'enable_user_poa', ...)	# Enable user-defined POA irradiance input = 0 or 1
	# ssc.ssc_data_set_array(dat, 'user_poa', ...) 			# User-defined POA irradiance in W/m2
	# ssc.ssc_data_set_number(dat, 'tilt', 999)
	# ssc.ssc_data_set_number(dat, "t_noct", float(inputDict["t_noct"]))
	# ssc.ssc_data_set_number(dat, "t_ref", float(inputDict["t_ref"]))
	# ssc.ssc_data_set_number(dat, "fd", float(inputDict["fd"]))
	# ssc.ssc_data_set_number(dat, "i_ref", float(inputDict["i_ref"]))
	# ssc.ssc_data_set_number(dat, "poa_cutin", float(inputDict["poa_cutin"]))
	# Run PV system simulation.
	mod = ssc.ssc_module_create("pvwattsv1")
	ssc.ssc_module_exec(mod, dat)
	# Setting options for start time.
	simLengthUnits = inputDict.get("simLengthUnits","")
	simStartDate = inputDict["simStartDate"]
	# Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html 
	startDateTime = simStartDate + " 00:00:00 UTC"
	# Set aggregation function constants.
	agg = lambda x,y:_aggData(x,y,inputDict["simStartDate"],
		int(inputDict["simLength"]), inputDict["simLengthUnits"], ssc, dat)
	avg = lambda x:sum(x)/len(x)
	# Timestamp output.
	outData = {}
	outData["timeStamps"] = [datetime.datetime.strftime(
		datetime.datetime.strptime(startDateTime[0:19],"%Y-%m-%d %H:%M:%S") + 
		datetime.timedelta(**{simLengthUnits:x}),"%Y-%m-%d %H:%M:%S") + " UTC" for x in range(int(inputDict["simLength"]))]
	# Geodata output.
	outData["city"] = ssc.ssc_data_get_string(dat, "city")
	outData["state"] = ssc.ssc_data_get_string(dat, "state")
	outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
	outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
	outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
	# Weather output.
	outData["climate"] = {}
	outData["climate"]["Plane of Array Irradiance (W/m^2)"] = agg("poa", avg)
	outData["climate"]["Beam Normal Irradiance (W/m^2)"] = agg("dn", avg)
	outData["climate"]["Diffuse Irradiance (W/m^2)"] = agg("df", avg)
	outData["climate"]["Ambient Temperature (F)"] = agg("tamb", avg)
	outData["climate"]["Cell Temperature (F)"] = agg("tcell", avg)
	outData["climate"]["Wind Speed (m/s)"] = agg("wspd", avg)
	# Power generation.
	outData["Consumption"] = {}
	outData["Consumption"]["Power"] = [x for x in agg("ac", avg)]
	outData["Consumption"]["Losses"] = [0 for x in agg("ac", avg)]
	outData["Consumption"]["DG"] = agg("ac", avg)

	#Plotly data sets for power generation graphs
	convertedDateStrings = [datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S %Z") for x in outData["timeStamps"]]
	powerGeneration = go.Scatter(
		x=convertedDateStrings,
		y=outData["Consumption"]["Power"],
		line=dict(
			color=('red')
			),
		name="Power Generated")

	chartInverter = None
	if float(inputDict["inverterSize"]) == 0:
		chartInverter = float(inputDict["systemSize"])
	else:
		chartInverter = float(inputDict["inverterSize"])

	panelsNameplate = go.Scatter(
		x=convertedDateStrings,
		y=[float(inputDict['systemSize'])*1000 for x in range(len(convertedDateStrings))],
		line=dict(
			dash = 'dash',
			color='orange'
			),
		name="Panels Nameplate")
	inverterNameplate = go.Scatter(
		x=convertedDateStrings,
		y=[chartInverter*1000 for x in range(len(convertedDateStrings))],
		line=dict(
			dash = 'dash',
			color='orange'
			),
		name="inverter Nameplate")

	#Set Power generation plotly layout
	powerGenerationLayout = go.Layout(
		width=1000,
		height=375,
		xaxis=dict(
			showgrid=False,
			),
		legend=dict(
			x=0,
			y=1.25,
			orientation="h")
		)
	#Combine all datasets for plotly graph
	powerGenerationData = [powerGeneration, panelsNameplate, inverterNameplate]
	#Example updating go object
	powerGenerationLayout['yaxis'].update(title='Power (W-AC)')
	#fig = go.Figure(data=powerGenerationData, layout=powerGenerationLayout)
	#inlinePlot = plotly.offline.plot(fig, include_plotlyjs=False, output_type='div')
	#outData["plotlyDiv"] = html.escape(json.dumps(inlinePlot, cls=plotly.utils.PlotlyJSONEncoder))

	#Plotly power generation outputs
	outData["powerGenerationData"] = json.dumps(powerGenerationData, cls=plotly.utils.PlotlyJSONEncoder)
	outData["powerGenerationLayout"] = json.dumps(powerGenerationLayout, cls=plotly.utils.PlotlyJSONEncoder)

	#Irradiance plotly data
	poaIrradiance = go.Scatter(
		x=convertedDateStrings,
		y=outData["climate"]["Plane of Array Irradiance (W/m^2)"],
		line=dict(
			color='yellow'
			),
		name="Plane of Array Irradiance (W/m^2)") 
	beamNormalIrradiance = go.Scatter(
		x=convertedDateStrings,
		y=outData["climate"]["Beam Normal Irradiance (W/m^2)"],
		line=dict(
			color='gold'
			),
		name="Beam Normal Irradiance (W/m^2)") 
	diffuseIrradiance = go.Scatter(
		x=convertedDateStrings,
		y=outData["climate"]["Diffuse Irradiance (W/m^2)"],
		line=dict(
			color='lemonchiffon'
			),
		name="Diffuse Irradiance (W/m^2)")
	irradianceData = [poaIrradiance, beamNormalIrradiance, diffuseIrradiance]

	#Set Power generation plotly layout
	irradianceLayout = go.Layout(
		width=1000,
		height=375,
		xaxis=dict(
			showgrid=False,
			),
		yaxis=dict(
			title="Climate Units",
			),
		legend=dict(
			x=0,
			y=1.25,
			orientation="h")
		)
	outData["irradianceData"] = json.dumps(irradianceData, cls=plotly.utils.PlotlyJSONEncoder)
	outData["irradianceLayout"] = json.dumps(irradianceLayout, cls=plotly.utils.PlotlyJSONEncoder)

	#Other Climate Variables plotly data
	ambientTemperature = go.Scatter(
		x=convertedDateStrings,
		y=	outData["climate"]["Ambient Temperature (F)"],
		line=dict(
			color='dimgray'
			),
		name="Ambient Temperature (F)") 
	cellTemperature = go.Scatter(
		x=convertedDateStrings,
		y=outData["climate"]["Cell Temperature (F)"],
		line=dict(
			color='gainsboro'
			),
		name="Cell Temperature (F)") 
	windSpeed = go.Scatter(
		x=convertedDateStrings,
		y=outData["climate"]["Wind Speed (m/s)"],
		line=dict(
			color='darkgray'
			),
		name="Wind Speed (m/s)")
	otherClimateData = [ambientTemperature, cellTemperature, windSpeed]

	#Set Power generation plotly layout
	otherClimateLayout = go.Layout(
		width=1000,
		height=375,
		xaxis=dict(
			showgrid=False,
			),
		yaxis=dict(
			title="Climate Units",
			),
		legend=dict(
			x=0,
			y=1.25,
			orientation="h")
		)
	outData["otherClimateData"] = json.dumps(otherClimateData, cls=plotly.utils.PlotlyJSONEncoder)
	outData["otherClimateLayout"] = json.dumps(otherClimateLayout, cls=plotly.utils.PlotlyJSONEncoder)
	# Stdout/stderr.
	outData["stdout"] = "Success"
	outData["stderr"] = ""
	return outData