示例#1
0
def heavyProcessing(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get feeder name and data in.
	try: os.mkdir(pJoin(modelDir,'gldContainer'))
	except: pass
	try:	
		feederName = inputDict["feederName1"]
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, "gldContainer", "climate.tmy2"))
		startTime = datetime.datetime.now()
		feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
		tree = feederJson["tree"]
		#add a check to see if there is already a climate object in the omd file
		#if there is delete the climate from attachments and the climate object
		attachKeys = feederJson["attachments"].keys()
		for key in attachKeys:
			if key.endswith('.tmy2'):
				del feederJson['attachments'][key]	
		treeKeys = feederJson["tree"].keys()
		for key in treeKeys:
			if 'object' in feederJson['tree'][key]:
			 	if feederJson['tree'][key]['object'] == 'climate':
			 		del feederJson['tree'][key]
		oldMax = feeder.getMaxKey(tree)
		tree[oldMax + 1] = {'omftype':'module', 'argument':'climate'}
		tree[oldMax + 2] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'}
		# Set up GLM with correct time and recorders:
		feeder.attachRecorders(tree, "Regulator", "object", "regulator")
		feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
		feeder.attachRecorders(tree, "Inverter", "object", "inverter")
		feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
		feeder.attachRecorders(tree, "CollectorVoltage", None, None)
		feeder.attachRecorders(tree, "Climate", "object", "climate")
		feeder.attachRecorders(tree, "OverheadLosses", None, None)
		feeder.attachRecorders(tree, "UndergroundLosses", None, None)
		feeder.attachRecorders(tree, "TriplexLosses", None, None)
		feeder.attachRecorders(tree, "TransformerLosses", None, None)
		feeder.groupSwingKids(tree)
		# Attach recorders for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'}
		for phase in ['A','B','C']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'VoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
			simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
		rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
			keepFiles=True, workDir=pJoin(modelDir,'gldContainer'))
		cleanOut = {}
		# Std Err and Std Out
		cleanOut['stderr'] = rawOut['stderr']
		cleanOut['stdout'] = rawOut['stdout']
		# Time Stamps
		for key in rawOut:
			if '# timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# timestamp']
				break
			elif '# property.. timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
			else:
				cleanOut['timeStamps'] = []
		# Day/Month Aggregation Setup:
		stamps = cleanOut.get('timeStamps',[])
		level = inputDict.get('simLengthUnits','hours')
		# Climate
		for key in rawOut:
			if key.startswith('Climate_') and key.endswith('.csv'):
				cleanOut['climate'] = {}
				cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
				cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
				cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
				cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
				cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				#cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
				climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
				#converting W/sf to W/sm
				climateWbySMList= [x*10.76392 for x in climateWbySFList]
				cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList			
		# Voltage Band
		if 'VoltageJiggle.csv' in rawOut:
			cleanOut['allMeterVoltages'] = {}
			cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
			cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
		# Power Consumption
		cleanOut['Consumption'] = {}
		# Set default value to be 0, avoiding missing value when computing Loads
		cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
		for key in rawOut:
			if key.startswith('SwingKids_') and key.endswith('.csv'):
				oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
				if 'Power' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Power'] = oneSwingPower
				else:
					cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
			elif key.startswith('Inverter_') and key.endswith('.csv'): 	
				realA = rawOut[key]['power_A.real']
				realB = rawOut[key]['power_B.real']
				realC = rawOut[key]['power_C.real']
				imagA = rawOut[key]['power_A.imag']
				imagB = rawOut[key]['power_B.imag']
				imagC = rawOut[key]['power_C.imag']
				oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key.startswith('Windmill_') and key.endswith('.csv'):
				vrA = rawOut[key]['voltage_A.real']
				vrB = rawOut[key]['voltage_B.real']
				vrC = rawOut[key]['voltage_C.real']
				viA = rawOut[key]['voltage_A.imag']
				viB = rawOut[key]['voltage_B.imag']
				viC = rawOut[key]['voltage_C.imag']
				crB = rawOut[key]['current_B.real']
				crA = rawOut[key]['current_A.real']
				crC = rawOut[key]['current_C.real']
				ciA = rawOut[key]['current_A.imag']
				ciB = rawOut[key]['current_B.imag']
				ciC = rawOut[key]['current_C.imag']
				powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
				powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
				powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
				oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
				realA = rawOut[key]['sum(power_losses_A.real)']
				imagA = rawOut[key]['sum(power_losses_A.imag)']
				realB = rawOut[key]['sum(power_losses_B.real)']
				imagB = rawOut[key]['sum(power_losses_B.imag)']
				realC = rawOut[key]['sum(power_losses_C.real)']
				imagC = rawOut[key]['sum(power_losses_C.imag)']
				oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'Losses' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Losses'] = oneLoss
				else:
					cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			elif key.startswith('Regulator_') and key.endswith('.csv'):
				#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
				regName=""
				regName = key
				newkey=regName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
				cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
				cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
				cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
			elif key.startswith('Capacitor_') and key.endswith('.csv'):
				capName=""
				capName = key
				newkey=capName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
				cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
				cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
				cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]
		# What percentage of our keys have lat lon data?
		latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
		latPerc = 1.0*len(latKeys)/len(tree)
		if latPerc < 0.25: doNeato = True
		else: doNeato = False
		# Generate the frames for the system voltage map time traveling chart.
		genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
		cleanOut['genTime'] = genTime
		# Aggregate up the timestamps:
		if level=='days':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
		elif level=='months':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
		# Write the output.
		with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile:
			json.dump(cleanOut, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = datetime.datetime.now()
		inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Clean up the PID file.
		os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
		print "DONE RUNNING", modelDir
	except Exception as e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)	
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	finishTime = datetime.datetime.now()
	inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds())))
	with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
		json.dump(inputDict, inFile, indent = 4)
	try:
		os.remove(pJoin(modelDir,"PPID.txt"))
	except:
		pass
示例#2
0
def runForeground(modelDir):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	inputDict = json.load(open(pJoin(modelDir, 'allInputData.json')))
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders
	for dirs in os.listdir(modelDir):
		if os.path.isdir(pJoin(modelDir, dirs)):
			shutil.rmtree(pJoin(modelDir, dirs))
	# Get the names of the feeders from the .omd files:
	feederNames = [x[0:-4] for x in os.listdir(modelDir) if x.endswith(".omd")]
	for i, key in enumerate(feederNames):
		inputDict['feederName' + str(i + 1)] = feederNames[i]
	# Run GridLAB-D once for each feeder:
	for feederName in feederNames:
		try:
			os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
		except Exception, e:
			pass
		if not os.path.isdir(pJoin(modelDir, feederName)):
			os.makedirs(pJoin(modelDir, feederName)) # create subfolders for feeders
		shutil.copy(pJoin(modelDir, feederName + ".omd"),
			pJoin(modelDir, feederName, "feeder.omd"))
		inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(_omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, feederName, "climate.tmy2"))
		try:
			startTime = datetime.datetime.now()
			feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.omd")))
			tree = feederJson["tree"]
			# Set up GLM with correct time and recorders:
			feeder.attachRecorders(tree, "Regulator", "object", "regulator")
			feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
			feeder.attachRecorders(tree, "Inverter", "object", "inverter")
			feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
			feeder.attachRecorders(tree, "CollectorVoltage", None, None)
			feeder.attachRecorders(tree, "Climate", "object", "climate")
			feeder.attachRecorders(tree, "OverheadLosses", None, None)
			feeder.attachRecorders(tree, "UndergroundLosses", None, None)
			feeder.attachRecorders(tree, "TriplexLosses", None, None)
			feeder.attachRecorders(tree, "TransformerLosses", None, None)
			feeder.groupSwingKids(tree)
			feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
				simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
			# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
			rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"],
				keepFiles=True, workDir=pJoin(modelDir, feederName))
			cleanOut = {}
			# Std Err and Std Out
			cleanOut['stderr'] = rawOut['stderr']
			cleanOut['stdout'] = rawOut['stdout']
			# Time Stamps
			for key in rawOut:
				if '# timestamp' in rawOut[key]:
					cleanOut['timeStamps'] = rawOut[key]['# timestamp']
					break
				elif '# property.. timestamp' in rawOut[key]:
					cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
				else:
					cleanOut['timeStamps'] = []
			# Day/Month Aggregation Setup:
			stamps = cleanOut.get('timeStamps',[])
			level = inputDict.get('simLengthUnits','hours')
			# Climate
			for key in rawOut:
				if key.startswith('Climate_') and key.endswith('.csv'):
					cleanOut['climate'] = {}
					cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
					cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
					cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
					cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
					cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
			# Voltage Band
			if 'VoltageJiggle.csv' in rawOut:
				cleanOut['allMeterVoltages'] = {}
				cleanOut['allMeterVoltages']['Min'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
				cleanOut['allMeterVoltages']['Mean'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
				cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
				cleanOut['allMeterVoltages']['Max'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
			cleanOut['allMeterVoltages']['stdDevPos'] = [(x+y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
			cleanOut['allMeterVoltages']['stdDevNeg'] = [(x-y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
			# Total # of meters
			count = 0
			with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
				for line in f:
					if "\"objectType\": \"triplex_meter\"" in line:
						count+=1
			# print "count=", count
			cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count)
			# Power Consumption
			cleanOut['Consumption'] = {}
			# Set default value to be 0, avoiding missing value when computing Loads
			cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
			cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
			cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
			for key in rawOut:
				if key.startswith('SwingKids_') and key.endswith('.csv'):
					oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
					if 'Power' not in cleanOut['Consumption']:
						cleanOut['Consumption']['Power'] = oneSwingPower
					else:
						cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
				elif key.startswith('Inverter_') and key.endswith('.csv'):
					realA = rawOut[key]['power_A.real']
					realB = rawOut[key]['power_B.real']
					realC = rawOut[key]['power_C.real']
					imagA = rawOut[key]['power_A.imag']
					imagB = rawOut[key]['power_B.imag']
					imagC = rawOut[key]['power_C.imag']
					oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
					if 'DG' not in cleanOut['Consumption']:
						cleanOut['Consumption']['DG'] = oneDgPower
					else:
						cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
				elif key.startswith('Windmill_') and key.endswith('.csv'):
					vrA = rawOut[key]['voltage_A.real']
					vrB = rawOut[key]['voltage_B.real']
					vrC = rawOut[key]['voltage_C.real']
					viA = rawOut[key]['voltage_A.imag']
					viB = rawOut[key]['voltage_B.imag']
					viC = rawOut[key]['voltage_C.imag']
					crB = rawOut[key]['current_B.real']
					crA = rawOut[key]['current_A.real']
					crC = rawOut[key]['current_C.real']
					ciA = rawOut[key]['current_A.imag']
					ciB = rawOut[key]['current_B.imag']
					ciC = rawOut[key]['current_C.imag']
					powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
					powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
					powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
					# HACK: multiply by negative one because turbine power sign is opposite all other DG:
					oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA,powerB,powerC), avg, level)]
					if 'DG' not in cleanOut['Consumption']:
						cleanOut['Consumption']['DG'] = oneDgPower
					else:
						cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
				elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
					realA = rawOut[key]['sum(power_losses_A.real)']
					imagA = rawOut[key]['sum(power_losses_A.imag)']
					realB = rawOut[key]['sum(power_losses_B.real)']
					imagB = rawOut[key]['sum(power_losses_B.imag)']
					realC = rawOut[key]['sum(power_losses_C.real)']
					imagC = rawOut[key]['sum(power_losses_C.imag)']
					oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
					if 'Losses' not in cleanOut['Consumption']:
						cleanOut['Consumption']['Losses'] = oneLoss
					else:
						cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			# Aggregate up the timestamps:
			if level=='days':
				cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
			elif level=='months':
				cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
			# Write the output.
			with open(pJoin(modelDir, feederName, "allOutputData.json"),"w") as outFile:
				json.dump(cleanOut, outFile, indent=4)
			# Update the runTime in the input file.
			endTime = datetime.datetime.now()
			inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
			with open(pJoin(modelDir, feederName, "allInputData.json"),"w") as inFile:
				json.dump(inputDict, inFile, indent=4)
			# Clean up the PID file.
			os.remove(pJoin(modelDir, feederName,"PID.txt"))
			print "DONE RUNNING GRIDLABMULTI", modelDir, feederName
		except Exception as e:
			print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName
			cancel(pJoin(modelDir, feederName))
			with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile:
				traceback.print_exc(file = stderrFile)
示例#3
0
def work(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	# feederName = inputDict["feederName1"]
	feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4]
	inputDict["feederName1"] = feederName
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	feederJson = json.load(open(pJoin(modelDir, feederName + '.omd')))
	tree = feederJson["tree"]
	# Set up GLM with correct time and recorders:
	feeder.attachRecorders(tree, "Regulator", "object", "regulator")
	feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
	feeder.attachRecorders(tree, "Inverter", "object", "inverter")
	feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
	feeder.attachRecorders(tree, "CollectorVoltage", None, None)
	feeder.attachRecorders(tree, "Climate", "object", "climate")
	feeder.attachRecorders(tree, "OverheadLosses", None, None)
	feeder.attachRecorders(tree, "UndergroundLosses", None, None)
	feeder.attachRecorders(tree, "TriplexLosses", None, None)
	feeder.attachRecorders(tree, "TransformerLosses", None, None)
	feeder.groupSwingKids(tree)
	# Attach recorders for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600}
	for phase in ['A','B','C']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'VoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# Attach recorders for system voltage map, triplex:
	stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'nVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# Attach current recorder for overhead_lines
	currentStub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600}
	for phase in ['A','B','C']:
		copyCurrentStub = dict(currentStub)
		copyCurrentStub['property'] = 'current_out_' + phase
		copyCurrentStub['file'] = 'OH_line_current_phase' + phase + '.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyCurrentStub
	rating_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600}
	copyRatingStub = dict(rating_stub)
	copyRatingStub['property'] = 'continuous_rating'
	copyRatingStub['file'] = 'OH_line_cont_rating.csv'
	tree[feeder.getMaxKey(tree) + 1] = copyRatingStub
	flow_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600}
	copyFlowStub = dict(flow_stub)
	copyFlowStub['property'] = 'flow_direction'
	copyFlowStub['file'] = 'OH_line_flow_direc.csv'
	tree[feeder.getMaxKey(tree) + 1] = copyFlowStub
	# Attach current recorder for underground_lines
	currentStubOH = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600}
	for phase in ['A','B','C']:
		copyCurrentStubOH = dict(currentStubOH)
		copyCurrentStubOH['property'] = 'current_out_' + phase
		copyCurrentStubOH['file'] = 'UG_line_current_phase' + phase + '.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyCurrentStubOH
	ug_rating_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600}
	copyUGRatingStub = dict(ug_rating_stub)
	copyUGRatingStub['property'] = 'continuous_rating'
	copyUGRatingStub['file'] = 'UG_line_cont_rating.csv'
	tree[feeder.getMaxKey(tree) + 1] = copyUGRatingStub
	ug_flow_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600}
	ugCopyFlowStub = dict(ug_flow_stub)
	ugCopyFlowStub['property'] = 'flow_direction'
	ugCopyFlowStub['file'] = 'UG_line_flow_direc.csv'
	tree[feeder.getMaxKey(tree) + 1] = ugCopyFlowStub
	# And get meters for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'mVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	for key in tree:
		if 'bustype' in tree[key].keys():
			if tree[key]['bustype'] == 'SWING':
				tree[key]['object'] = 'meter'
				swingN = tree[key]['name']
	swingRecord = {'object':'recorder', 'property':'voltage_A,measured_real_power,measured_power','file':'subVoltsA.csv','parent':swingN, 'interval':60}
	tree[feeder.getMaxKey(tree) + 1] = swingRecord
	for key in tree:
		if 'omftype' in tree[key].keys() and tree[key]['argument']=='minimum_timestep=3600':
			tree[key]['argument'] = 'minimum_timestep=60'
	# If there is a varvolt object in the tree, add recorder to swingbus and node from voltage_measurements property
	# Find var_volt object
	downLineNode = 'None'
	for key in tree:
		if 'object' in tree[key].keys() and tree[key]['object']=='volt_var_control':
			downLineNode = tree[key]['voltage_measurements']
	if downLineNode != 'None':
		downNodeRecord = {'object':'recorder', 'property':'voltage_A','file':'firstDownlineVoltsA.csv','parent':downLineNode, 'interval':60}
		tree[feeder.getMaxKey(tree) + 1] = downNodeRecord
	# Violation recorder to display to users 
	# violationRecorder = {'object':'violation_recorder','node_continuous_voltage_limit_lower':0.95,'file':'Violation_Log.csv',
	# 					'secondary_dist_voltage_rise_lower_limit':-0.042,'substation_pf_lower_limit':0.85,'substation_breaker_C_limit':300,
	# 					'secondary_dist_voltage_rise_upper_limit':0.025,'substation_breaker_B_limit':300,'violation_flag':'ALLVIOLATIONS',
	# 					'node_instantaneous_voltage_limit_upper':1.1, 'inverter_v_chng_per_interval_lower_bound':-0.05, 'virtual_substation':swingN,
	# 					'substation_breaker_A_limit':300, 'xfrmr_thermal_limit_lower':0,'node_continuous_voltage_interval':300,'strict':'false',
	# 					'node_instantaneous_voltage_limit_lower':0,'line_thermal_limit_upper':1,'echo':'false','node_continuous_voltage_limit_upper':1.05,
	# 					'interval':30,'line_thermal_limit_lower':0,'summary':'Violation_Summary.csv','inverter_v_chng_interval':60,
	# 					'xfrmr_thermal_limit_upper':2,'inverter_v_chng_per_interval_upper_bound':0.050}
	# tree[feeder.getMaxKey(tree) + 1] = violationRecorder
	feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
		simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
	# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
	rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
		keepFiles=True, workDir=pJoin(modelDir))
		# voltDumps have no values when gridlabD fails or the files dont exist
	if not os.path.isfile(pJoin(modelDir,'aVoltDump.csv')):
		with open (pJoin(modelDir,'stderr.txt')) as inFile:
			stdErrText = inFile.read()
		message = 'GridLAB-D crashed. Error log:\n' + stdErrText
		raise Exception(message)
	elif len(rawOut['aVoltDump.csv']['# timestamp']) == 0:
		with open (pJoin(modelDir,'stderr.txt')) as inFile:
			stdErrText = inFile.read()
		message = 'GridLAB-D crashed. Error log:\n' + stdErrText
		raise Exception(message)
	outData = {}
	# Std Err and Std Out
	outData['stderr'] = rawOut['stderr']
	outData['stdout'] = rawOut['stdout']
	# Time Stamps
	for key in rawOut:
		if '# timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# timestamp']
			break
		elif '# property.. timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# property.. timestamp']
		else:
			outData['timeStamps'] = []
	# Day/Month Aggregation Setup:
	stamps = outData.get('timeStamps',[])
	level = inputDict.get('simLengthUnits','hours')
	# Climate
	for key in rawOut:
		if key.startswith('Climate_') and key.endswith('.csv'):
			outData['climate'] = {}
			outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
			outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
			outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
			outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
			outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
			#outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
			climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
			#converting W/sf to W/sm
			climateWbySMList= [x*10.76392 for x in climateWbySFList]
			outData['climate']['Global Horizontal (W/sm)']=climateWbySMList			
	# Voltage Band
	if 'VoltageJiggle.csv' in rawOut:
		outData['allMeterVoltages'] = {}
		outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
		outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
	# Power Consumption
	outData['Consumption'] = {}
	# Set default value to be 0, avoiding missing value when computing Loads
	outData['Consumption']['Power'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['DG'] = [0] * int(inputDict["simLength"])
	for key in rawOut:
		if key.startswith('SwingKids_') and key.endswith('.csv'):
			oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
			if 'Power' not in outData['Consumption']:
				outData['Consumption']['Power'] = oneSwingPower
			else:
				outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power'])
		elif key.startswith('Inverter_') and key.endswith('.csv'): 	
			realA = rawOut[key]['power_A.real']
			realB = rawOut[key]['power_B.real']
			realC = rawOut[key]['power_C.real']
			imagA = rawOut[key]['power_A.imag']
			imagB = rawOut[key]['power_B.imag']
			imagC = rawOut[key]['power_C.imag']
			oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key.startswith('Windmill_') and key.endswith('.csv'):
			vrA = rawOut[key]['voltage_A.real']
			vrB = rawOut[key]['voltage_B.real']
			vrC = rawOut[key]['voltage_C.real']
			viA = rawOut[key]['voltage_A.imag']
			viB = rawOut[key]['voltage_B.imag']
			viC = rawOut[key]['voltage_C.imag']
			crB = rawOut[key]['current_B.real']
			crA = rawOut[key]['current_A.real']
			crC = rawOut[key]['current_C.real']
			ciA = rawOut[key]['current_A.imag']
			ciB = rawOut[key]['current_B.imag']
			ciC = rawOut[key]['current_C.imag']
			powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
			powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
			powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
			oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
			realA = rawOut[key]['sum(power_losses_A.real)']
			imagA = rawOut[key]['sum(power_losses_A.imag)']
			realB = rawOut[key]['sum(power_losses_B.real)']
			imagB = rawOut[key]['sum(power_losses_B.imag)']
			realC = rawOut[key]['sum(power_losses_C.real)']
			imagC = rawOut[key]['sum(power_losses_C.imag)']
			oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'Losses' not in outData['Consumption']:
				outData['Consumption']['Losses'] = oneLoss
			else:
				outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses'])
		elif key.startswith('Regulator_') and key.endswith('.csv'):
			#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
			regName=""
			regName = key
			newkey=regName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapA'] = rawOut[key]['tap_A']
			outData[newkey]['RegTapB'] = rawOut[key]['tap_B']
			outData[newkey]['RegTapC'] = rawOut[key]['tap_C']
			outData[newkey]['RegPhases'] = rawOut[key]['phases'][0]
		elif key.startswith('Capacitor_') and key.endswith('.csv'):
			capName=""
			capName = key
			newkey=capName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1A'] = rawOut[key]['switchA']
			outData[newkey]['Cap1B'] = rawOut[key]['switchB']
			outData[newkey]['Cap1C'] = rawOut[key]['switchC']
			outData[newkey]['CapPhases'] = rawOut[key]['phases'][0]
	# Capture voltages at the swingbus
	# Loop through voltDump for swingbus voltages
	subData = []
	downData = []
	with open(pJoin(modelDir,"subVoltsA.csv")) as subFile:
		reader = csv.reader(subFile)
		subData = [x for x in reader]
	if downLineNode != 'None':
		with open(pJoin(modelDir,"firstDownlineVoltsA.csv")) as downFile:
			reader = csv.reader(downFile)
			downData = [x for x in reader]
	FIRST_DATA_ROW = 9
	cleanDown = [stringToMag(x[1]) for x in downData[FIRST_DATA_ROW:-1]]
	swingTimestamps = [x[0] for x in subData[FIRST_DATA_ROW:-1]]
	cleanSub = [stringToMag(x[1]) for x in subData[FIRST_DATA_ROW:-1]]
	# real_power / power
	powerFactors = []
	for row in subData[FIRST_DATA_ROW:-1]:
		powerFactors.append(abs(float(row[2])/stringToMag(row[3])))
	outData['powerFactors'] = powerFactors
	outData['swingVoltage'] = cleanSub
	outData['downlineNodeVolts'] = cleanDown
	outData['swingTimestamps'] = swingTimestamps
	# If there is a var volt system, find the min and max voltage for a band
	minVoltBand = []
	maxVoltBand = []
	if downLineNode != 'None':
		for key in tree:
			objKeys = tree[key].keys()
			if 'object' in objKeys:
				if tree[key]['object']=='volt_var_control':
					minVoltBand.append(float(tree[key]['minimum_voltages']))
					maxVoltBand.append(float(tree[key]['maximum_voltages']))
		outData['minVoltBand'] = minVoltBand
		outData['maxVoltBand'] = maxVoltBand
	# Violation Summary and Log
	# violationData = ''
	# violationArray = []
	# with open(pJoin(modelDir,"Violation_Summary.csv")) as vioSum:
	# 	reader = csv.reader(vioSum)
	# 	for row in reader:
	# 		violationArray.append(row)	
	# for row in violationArray[4:]:
	# 	violationData += str(' '.join(row)) + "\n"
	# outData["violationSummary"] = violationData
	# violationLogArray = []
	# violationLog = ''
	# with open(pJoin(modelDir,"Violation_Log.csv")) as vioLog:
	# 	logger = csv.reader(vioLog)
	# 	for row in logger:
	# 		violationLogArray.append(row)
	# for row in violationLogArray[6:]:
	# 	violationLog += str(' '.join(row)) + "\n"
	# outData['violationLog'] = violationLog
	# What percentage of our keys have lat lon data?
	latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
	latPerc = 1.0*len(latKeys)/len(tree)
	if latPerc < 0.25: doNeato = True
	else: doNeato = False
	# Generate the frames for the system voltage map time traveling chart.
	genTime, mapTimestamp = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
	outData['genTime'] = genTime
	outData['mapTimestamp'] = mapTimestamp
	# Aggregate up the timestamps:
	if level=='days':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
	elif level=='months':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
	return outData
示例#4
0
def runForeground(modelDir, inputDict, fs):
    """ Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. """
    print "STARTING TO RUN", modelDir
    beginTime = datetime.datetime.now()
    feederList = []
    # Get prepare of data and clean workspace if re-run, If re-run remove all
    # the data in the subfolders
    for dirs in os.listdir(modelDir):
        if os.path.isdir(pJoin(modelDir, dirs)):
            shutil.rmtree(pJoin(modelDir, dirs))
    # Get each feeder, prepare data in separate folders, and run there.
    for key in sorted(inputDict, key=inputDict.get):
        if key.startswith("feederName"):
            feederDir, feederName = inputDict[key].split("___")
            feederList.append(feederName)
            try:
                os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
                fs.remove(pJoin(modelDir, feederName, "allOutputData.json"))
            except Exception, e:
                pass
            if not os.path.isdir(pJoin(modelDir, feederName)):
                # create subfolders for feeders
                os.makedirs(pJoin(modelDir, feederName))

            fs.export_from_fs_to_local(
                pJoin("data", "Feeder", feederDir, feederName + ".json"), pJoin(modelDir, feederName, "feeder.json")
            )
            inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"], fs)
            fs.export_from_fs_to_local(
                pJoin("data", "Climate", inputDict["climateName"] + ".tmy2"),
                pJoin(modelDir, feederName, "climate.tmy2"),
            )
            try:
                startTime = datetime.datetime.now()
                feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.json")))
                tree = feederJson["tree"]
                # Set up GLM with correct time and recorders:
                feeder.attachRecorders(tree, "Regulator", "object", "regulator")
                feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
                feeder.attachRecorders(tree, "Inverter", "object", "inverter")
                feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
                feeder.attachRecorders(tree, "CollectorVoltage", None, None)
                feeder.attachRecorders(tree, "Climate", "object", "climate")
                feeder.attachRecorders(tree, "OverheadLosses", None, None)
                feeder.attachRecorders(tree, "UndergroundLosses", None, None)
                feeder.attachRecorders(tree, "TriplexLosses", None, None)
                feeder.attachRecorders(tree, "TransformerLosses", None, None)
                feeder.groupSwingKids(tree)
                feeder.adjustTime(
                    tree=tree,
                    simLength=float(inputDict["simLength"]),
                    simLengthUnits=inputDict["simLengthUnits"],
                    simStartDate=inputDict["simStartDate"],
                )
                if "attachments" in feederJson:
                    attachments = feederJson["attachments"]
                else:
                    attachments = []
                # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
                rawOut = gridlabd.runInFilesystem(
                    tree, attachments=attachments, keepFiles=True, workDir=pJoin(modelDir, feederName)
                )
                cleanOut = {}
                # Std Err and Std Out
                cleanOut["stderr"] = rawOut["stderr"]
                cleanOut["stdout"] = rawOut["stdout"]
                # Time Stamps
                for key in rawOut:
                    if "# timestamp" in rawOut[key]:
                        cleanOut["timeStamps"] = rawOut[key]["# timestamp"]
                        break
                    elif "# property.. timestamp" in rawOut[key]:
                        cleanOut["timeStamps"] = rawOut[key]["# property.. timestamp"]
                    else:
                        cleanOut["timeStamps"] = []
                # Day/Month Aggregation Setup:
                stamps = cleanOut.get("timeStamps", [])
                level = inputDict.get("simLengthUnits", "hours")
                # Climate
                for key in rawOut:
                    if key.startswith("Climate_") and key.endswith(".csv"):
                        cleanOut["climate"] = {}
                        cleanOut["climate"]["Rain Fall (in/h)"] = hdmAgg(
                            rawOut[key].get("rainfall"), sum, level, stamps
                        )
                        cleanOut["climate"]["Wind Speed (m/s)"] = hdmAgg(
                            rawOut[key].get("wind_speed"), avg, level, stamps
                        )
                        cleanOut["climate"]["Temperature (F)"] = hdmAgg(
                            rawOut[key].get("temperature"), max, level, stamps
                        )
                        cleanOut["climate"]["Snow Depth (in)"] = hdmAgg(
                            rawOut[key].get("snowdepth"), max, level, stamps
                        )
                        cleanOut["climate"]["Direct Insolation (W/m^2)"] = hdmAgg(
                            rawOut[key].get("solar_direct"), sum, level, stamps
                        )
                # Voltage Band
                if "VoltageJiggle.csv" in rawOut:
                    cleanOut["allMeterVoltages"] = {}
                    cleanOut["allMeterVoltages"]["Min"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["min(voltage_12.mag)"]], min, level, stamps
                    )
                    cleanOut["allMeterVoltages"]["Mean"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["mean(voltage_12.mag)"]], avg, level, stamps
                    )
                    cleanOut["allMeterVoltages"]["StdDev"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["std(voltage_12.mag)"]], avg, level, stamps
                    )
                    cleanOut["allMeterVoltages"]["Max"] = hdmAgg(
                        [float(i / 2) for i in rawOut["VoltageJiggle.csv"]["max(voltage_12.mag)"]], max, level, stamps
                    )
                # Power Consumption
                cleanOut["Consumption"] = {}
                # Set default value to be 0, avoiding missing value when
                # computing Loads
                cleanOut["Consumption"]["Power"] = [0] * int(inputDict["simLength"])
                cleanOut["Consumption"]["Losses"] = [0] * int(inputDict["simLength"])
                cleanOut["Consumption"]["DG"] = [0] * int(inputDict["simLength"])
                for key in rawOut:
                    if key.startswith("SwingKids_") and key.endswith(".csv"):
                        oneSwingPower = hdmAgg(
                            vecPyth(rawOut[key]["sum(power_in.real)"], rawOut[key]["sum(power_in.imag)"]),
                            avg,
                            level,
                            stamps,
                        )
                        if "Power" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["Power"] = oneSwingPower
                        else:
                            cleanOut["Consumption"]["Power"] = vecSum(oneSwingPower, cleanOut["Consumption"]["Power"])
                    elif key.startswith("Inverter_") and key.endswith(".csv"):
                        realA = rawOut[key]["power_A.real"]
                        realB = rawOut[key]["power_B.real"]
                        realC = rawOut[key]["power_C.real"]
                        imagA = rawOut[key]["power_A.imag"]
                        imagB = rawOut[key]["power_B.imag"]
                        imagC = rawOut[key]["power_C.imag"]
                        oneDgPower = hdmAgg(
                            vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)),
                            avg,
                            level,
                            stamps,
                        )
                        if "DG" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["DG"] = oneDgPower
                        else:
                            cleanOut["Consumption"]["DG"] = vecSum(oneDgPower, cleanOut["Consumption"]["DG"])
                    elif key.startswith("Windmill_") and key.endswith(".csv"):
                        vrA = rawOut[key]["voltage_A.real"]
                        vrB = rawOut[key]["voltage_B.real"]
                        vrC = rawOut[key]["voltage_C.real"]
                        viA = rawOut[key]["voltage_A.imag"]
                        viB = rawOut[key]["voltage_B.imag"]
                        viC = rawOut[key]["voltage_C.imag"]
                        crB = rawOut[key]["current_B.real"]
                        crA = rawOut[key]["current_A.real"]
                        crC = rawOut[key]["current_C.real"]
                        ciA = rawOut[key]["current_A.imag"]
                        ciB = rawOut[key]["current_B.imag"]
                        ciC = rawOut[key]["current_C.imag"]
                        powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA))
                        powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB))
                        powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC))
                        # HACK: multiply by negative one because turbine power
                        # sign is opposite all other DG:
                        oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA, powerB, powerC), avg, level, stamps)]
                        if "DG" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["DG"] = oneDgPower
                        else:
                            cleanOut["Consumption"]["DG"] = vecSum(oneDgPower, cleanOut["Consumption"]["DG"])
                    elif key in [
                        "OverheadLosses.csv",
                        "UndergroundLosses.csv",
                        "TriplexLosses.csv",
                        "TransformerLosses.csv",
                    ]:
                        realA = rawOut[key]["sum(power_losses_A.real)"]
                        imagA = rawOut[key]["sum(power_losses_A.imag)"]
                        realB = rawOut[key]["sum(power_losses_B.real)"]
                        imagB = rawOut[key]["sum(power_losses_B.imag)"]
                        realC = rawOut[key]["sum(power_losses_C.real)"]
                        imagC = rawOut[key]["sum(power_losses_C.imag)"]
                        oneLoss = hdmAgg(
                            vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB), vecPyth(realC, imagC)),
                            avg,
                            level,
                            stamps,
                        )
                        if "Losses" not in cleanOut["Consumption"]:
                            cleanOut["Consumption"]["Losses"] = oneLoss
                        else:
                            cleanOut["Consumption"]["Losses"] = vecSum(oneLoss, cleanOut["Consumption"]["Losses"])
                # Aggregate up the timestamps:
                if level == "days":
                    cleanOut["timeStamps"] = aggSeries(stamps, stamps, lambda x: x[0][0:10], "days")
                elif level == "months":
                    cleanOut["timeStamps"] = aggSeries(stamps, stamps, lambda x: x[0][0:7], "months")
                # Write the output.
                with open(pJoin(modelDir, feederName, "allOutputData.json"), "w") as outFile:
                    json.dump(cleanOut, outFile, indent=4)
                # Update the runTime in the input file.
                endTime = datetime.datetime.now()
                inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
                with open(pJoin(modelDir, feederName, "allInputData.json"), "w") as inFile:
                    json.dump(inputDict, inFile, indent=4)
                # Clean up the PID file.
                os.remove(pJoin(modelDir, feederName, "PID.txt"))
                print "DONE RUNNING GRIDLABMULTI", modelDir, feederName
            except Exception as e:
                print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName
                cancel(pJoin(modelDir, feederName))
                with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile:
                    traceback.print_exc(file=stderrFile)
示例#5
0
def heavyProcessing(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get feeder name and data in.
	try: os.mkdir(pJoin(modelDir,'gldContainer'))
	except: pass
	try:	
		feederName = inputDict["feederName1"]
		weather = inputDict["weather"]
		if weather == "typical":
			inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
			shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
				pJoin(modelDir, "gldContainer", "climate.tmy2"))
			startTime = datetime.datetime.now()
		else:
			#hack for testing
			makeClimateCsv('2010-07-01', '2010-08-01', 'DFW', 'Output/Automated dsoSimSuite Test/gldContainer/weather.csv')
			startTime = datetime.datetime.now()
		startTime = datetime.datetime.now()
		feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
		tree = feederJson["tree"]
		#add a check to see if there is already a climate object in the omd file
		#if there is delete the climate from attachments and the climate object
		attachKeys = feederJson["attachments"].keys()
		for key in attachKeys:
			if key.endswith('.tmy2'):
				del feederJson['attachments'][key]	
		treeKeys = feederJson["tree"].keys()
		for key in treeKeys:
			if 'object' in feederJson['tree'][key]:
			 	if feederJson['tree'][key]['object'] == 'climate':
			 		del feederJson['tree'][key]	
		#add weather objects and modules to .glm if there is no climate file in the omd file
		if weather == "historical":
			oldMax = feeder.getMaxKey(tree)
			tree[oldMax + 1] = {'omftype':'module', 'argument':'tape'}
			tree[oldMax + 2] = {'omftype':'module', 'argument':'climate'}
			tree[oldMax + 3] = {'object':'csv_reader', 'name':'weatherReader', 'filename':'weather.csv'}
			tree[oldMax + 4] = {'object':'climate', 'name':'exampleClimate', 'tmyfile':'weather.csv', 'reader':'weatherReader'}
		else:
			oldMax = feeder.getMaxKey(tree)
			tree[oldMax + 1] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'}
		# Set up GLM with correct time and recorders:
		feeder.attachRecorders(tree, "Regulator", "object", "regulator")
		feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
		feeder.attachRecorders(tree, "Inverter", "object", "inverter")
		feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
		feeder.attachRecorders(tree, "CollectorVoltage", None, None)
		feeder.attachRecorders(tree, "Climate", "object", "climate")
		feeder.attachRecorders(tree, "OverheadLosses", None, None)
		feeder.attachRecorders(tree, "UndergroundLosses", None, None)
		feeder.attachRecorders(tree, "TriplexLosses", None, None)
		feeder.attachRecorders(tree, "TransformerLosses", None, None)
		feeder.groupSwingKids(tree)
		# Attach recorders for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'}
		for phase in ['A','B','C']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'VoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
			simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
		rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
			keepFiles=True, workDir=pJoin(modelDir,'gldContainer'))
		cleanOut = {}
		# Std Err and Std Out
		cleanOut['stderr'] = rawOut['stderr']
		cleanOut['stdout'] = rawOut['stdout']
		# Time Stamps
		for key in rawOut:
			if '# timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# timestamp']
				break
			elif '# property.. timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
			else:
				cleanOut['timeStamps'] = []
		# Day/Month Aggregation Setup:
		stamps = cleanOut.get('timeStamps',[])
		level = inputDict.get('simLengthUnits','hours')
		# Climate
		for key in rawOut:
			if key.startswith('Climate_') and key.endswith('.csv'):
				cleanOut['climate'] = {}
				cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
				cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
				cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
				cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
				cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				#cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
				climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
				#converting W/sf to W/sm
				climateWbySMList= [x*10.76392 for x in climateWbySFList]
				cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList			
		# Voltage Band
		if 'VoltageJiggle.csv' in rawOut:
			cleanOut['allMeterVoltages'] = {}
			cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
			cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
		# Power Consumption
		cleanOut['Consumption'] = {}
		# Set default value to be 0, avoiding missing value when computing Loads
		cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
		for key in rawOut:
			if key.startswith('SwingKids_') and key.endswith('.csv'):
				oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
				if 'Power' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Power'] = oneSwingPower
				else:
					cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
			elif key.startswith('Inverter_') and key.endswith('.csv'): 	
				realA = rawOut[key]['power_A.real']
				realB = rawOut[key]['power_B.real']
				realC = rawOut[key]['power_C.real']
				imagA = rawOut[key]['power_A.imag']
				imagB = rawOut[key]['power_B.imag']
				imagC = rawOut[key]['power_C.imag']
				oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key.startswith('Windmill_') and key.endswith('.csv'):
				vrA = rawOut[key]['voltage_A.real']
				vrB = rawOut[key]['voltage_B.real']
				vrC = rawOut[key]['voltage_C.real']
				viA = rawOut[key]['voltage_A.imag']
				viB = rawOut[key]['voltage_B.imag']
				viC = rawOut[key]['voltage_C.imag']
				crB = rawOut[key]['current_B.real']
				crA = rawOut[key]['current_A.real']
				crC = rawOut[key]['current_C.real']
				ciA = rawOut[key]['current_A.imag']
				ciB = rawOut[key]['current_B.imag']
				ciC = rawOut[key]['current_C.imag']
				powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
				powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
				powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
				oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
				realA = rawOut[key]['sum(power_losses_A.real)']
				imagA = rawOut[key]['sum(power_losses_A.imag)']
				realB = rawOut[key]['sum(power_losses_B.real)']
				imagB = rawOut[key]['sum(power_losses_B.imag)']
				realC = rawOut[key]['sum(power_losses_C.real)']
				imagC = rawOut[key]['sum(power_losses_C.imag)']
				oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'Losses' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Losses'] = oneLoss
				else:
					cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			elif key.startswith('Regulator_') and key.endswith('.csv'):
				#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
				regName=""
				regName = key
				newkey=regName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
				cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
				cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
				cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
			elif key.startswith('Capacitor_') and key.endswith('.csv'):
				capName=""
				capName = key
				newkey=capName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
				cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
				cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
				cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]
		# What percentage of our keys have lat lon data?
		latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
		latPerc = 1.0*len(latKeys)/len(tree)
		if latPerc < 0.25: doNeato = True
		else: doNeato = False
		# Generate the frames for the system voltage map time traveling chart.
		genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
		cleanOut['genTime'] = genTime
		# Aggregate up the timestamps:
		if level=='days':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
		elif level=='months':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
		# Write the output.
		with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile:
			json.dump(cleanOut, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = datetime.datetime.now()
		inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Clean up the PID file.
		os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
		print "DONE RUNNING", modelDir
	except Exception as e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)	
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	finishTime = datetime.datetime.now()
	inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds())))
	with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
		json.dump(inputDict, inFile, indent = 4)
	try:
		os.remove(pJoin(modelDir,"PPID.txt"))
	except:
		pass
示例#6
0
def heavyProcessing(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get feeder name and data in.
	try: os.mkdir(pJoin(modelDir,'gldContainer'))
	except: pass
	try:	
		feederName = inputDict["feederName1"]
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, "gldContainer", "climate.tmy2"))
		startTime = datetime.datetime.now()
		feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
		tree = feederJson["tree"]
		#add a check to see if there is already a climate object in the omd file
		#if there is delete the climate from attachments and the climate object
		attachKeys = feederJson["attachments"].keys()
		for key in attachKeys:
			if key.endswith('.tmy2'):
				del feederJson['attachments'][key]	
		treeKeys = feederJson["tree"].keys()
		for key in treeKeys:
			if 'object' in feederJson['tree'][key]:
			 	if feederJson['tree'][key]['object'] == 'climate':
			 		del feederJson['tree'][key]
		oldMax = feeder.getMaxKey(tree)
		tree[oldMax + 1] = {'omftype':'module','argument':'climate'}
		tree[oldMax + 2] = {'object':'climate','name':'Climate','interpolate':'QUADRATIC','tmyfile':'climate.tmy2'}
		# tree[oldMax + 3] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'}
		# Set up GLM with correct time and recorders:
		feeder.attachRecorders(tree, "Regulator", "object", "regulator")
		feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
		feeder.attachRecorders(tree, "Inverter", "object", "inverter")
		feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
		feeder.attachRecorders(tree, "CollectorVoltage", None, None)
		feeder.attachRecorders(tree, "Climate", "object", "climate")
		feeder.attachRecorders(tree, "OverheadLosses", None, None)
		feeder.attachRecorders(tree, "UndergroundLosses", None, None)
		feeder.attachRecorders(tree, "TriplexLosses", None, None)
		feeder.attachRecorders(tree, "TransformerLosses", None, None)
		feeder.groupSwingKids(tree)

		# Attach recorder for waterheaters on/off
		stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':3600, 'file':'allWaterheaterOn.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for waterheater tank temperatures
		stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':3600, 'file':'allWaterheaterTemp.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach collector for total waterheater load
		stub = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':3600, 'file':'allWaterheaterLoad.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach collector for total network load
		stub = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':3600, 'file':'allMeterPower.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		# Attach collector for total overall ZIPload power/load
		stub = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':3600, 'file':'allZIPloadPower.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for each ZIPload power/load
		stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':3600, 'file':'eachZIPloadPower.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for all ZIPloads demand_rate
		stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':3600, 'file':'allZIPloadDemand.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub
		# Attach recorder for all ZIPloads on
		stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'number_of_devices_on', 'interval':3600, 'file':'allZIPloadOn.csv'}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		# Attach passive_controller	
		tree[feeder.getMaxKey(tree)+1] = {'omftype':'module','argument':'market'}
		tree[feeder.getMaxKey(tree)+1] = {'omftype':'class auction','argument':'{\n\tdouble my_avg; double my_std;\n}'}
		tree[feeder.getMaxKey(tree)+1] = {'omftype':'class player','argument':'{\n\tdouble value;\n}'}

		stub = {
			'object':'player',
			'name':'cppDays',
			'file':'superCpp.player'
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		stub = {
			'object':'player',
			'name':'superClearing',
			'file':'superClearingPrice.player',
			'loop':10
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		stub = {
			'object':'auction',
			'name':'MARKET_1',
			'my_std':0.037953,
			'period':900,
			'my_avg':0.110000,
			'current_market.clearing_price':'superClearing.value',
			'special_mode':'BUYERS_ONLY',
			'unit': 'kW'
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		stub = {
			'object':'passive_controller',
			'name':'waterheater_controller_waterheater171923',
			'parent':'waterheater171923',
			'control_mode':'RAMP',
			'range_high':5,
			'range_low':-5,
			'ramp_high':1,
			'ramp_low':-1,
			'period':900,
			'setpoint':'is_waterheater_on',
			'base_setpoint':1,
			'expectation_object':'MARKET_1',
			'expectation_property':'my_avg',
			'observation_object':'MARKET_1',
			'observation_property':'past_market.clearing_price',
			'stdev_observation_property':'my_std',
			'state_property':'override'
		}
		copyStub = dict(stub)
		tree[feeder.getMaxKey(tree)+1] = copyStub

		# stub = {
		# 	'object':'passive_controller',
		# 	'name':'ZIPload_controller_ZIPload171922',
		# 	'parent':'ZIPload171922',
		# 	'control_mode':'RAMP',
		# 	'range_high':5,
		# 	'range_low':-5,
		# 	'ramp_high':1,
		# 	'ramp_low':-1,
		# 	'period':900,
		# 	'setpoint':'base_power'
		# 	'base_setpoint':1,
		# 	'expectation_object':'MARKET_1',
		# 	'expectation_property':'my_avg',
		# 	'observation_object':'MARKET_1',
		# 	'observation_property':'past_market.clearing_price',
		# 	'stdev_observation_property':'my_std'
		# 	'state_property':'override'
		# }
		# copyStub = dict(stub)
		# tree[feeder.getMaxKey(tree)+1] = copyStub

		# Attach recorders for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600}
		for phase in ['A','B','C']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'VoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		# Attach recorders for system voltage map, triplex:
		stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600}
		for phase in ['1','2']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'nVoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		# And get meters for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600}
		for phase in ['1','2']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'mVoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
			simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
		rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
			keepFiles=True, workDir=pJoin(modelDir,'gldContainer'))
		cleanOut = {}
		# Std Err and Std Out
		cleanOut['stderr'] = rawOut['stderr']
		cleanOut['stdout'] = rawOut['stdout']
		# Time Stamps
		for key in rawOut:
			print key
			if '# timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# timestamp']
				break
			elif '# property.. timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
			else:
				cleanOut['timeStamps'] = []
		# Day/Month Aggregation Setup:
		stamps = cleanOut.get('timeStamps',[])
		level = inputDict.get('simLengthUnits','hours')
		# Climate
		for key in rawOut:
			if key.startswith('Climate_') and key.endswith('.csv'):
				cleanOut['climate'] = {}
				cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
				cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
				cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
				cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
				cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				#cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
				climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
				#converting W/sf to W/sm
				climateWbySMList= [x*10.76392 for x in climateWbySFList]
				cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList			
		# Voltage Band
		if 'VoltageJiggle.csv' in rawOut:
			cleanOut['allMeterVoltages'] = {}
			cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
			cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
		# Power Consumption
		cleanOut['Consumption'] = {}
		# Set default value to be 0, avoiding missing value when computing Loads
		cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
		for key in rawOut:
			if key.startswith('SwingKids_') and key.endswith('.csv'):
				oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
				if 'Power' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Power'] = oneSwingPower
				else:
					cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
			elif key.startswith('Inverter_') and key.endswith('.csv'): 	
				realA = rawOut[key]['power_A.real']
				realB = rawOut[key]['power_B.real']
				realC = rawOut[key]['power_C.real']
				imagA = rawOut[key]['power_A.imag']
				imagB = rawOut[key]['power_B.imag']
				imagC = rawOut[key]['power_C.imag']
				oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key.startswith('Windmill_') and key.endswith('.csv'):
				vrA = rawOut[key]['voltage_A.real']
				vrB = rawOut[key]['voltage_B.real']
				vrC = rawOut[key]['voltage_C.real']
				viA = rawOut[key]['voltage_A.imag']
				viB = rawOut[key]['voltage_B.imag']
				viC = rawOut[key]['voltage_C.imag']
				crB = rawOut[key]['current_B.real']
				crA = rawOut[key]['current_A.real']
				crC = rawOut[key]['current_C.real']
				ciA = rawOut[key]['current_A.imag']
				ciB = rawOut[key]['current_B.imag']
				ciC = rawOut[key]['current_C.imag']
				powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
				powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
				powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
				oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
				realA = rawOut[key]['sum(power_losses_A.real)']
				imagA = rawOut[key]['sum(power_losses_A.imag)']
				realB = rawOut[key]['sum(power_losses_B.real)']
				imagB = rawOut[key]['sum(power_losses_B.imag)']
				realC = rawOut[key]['sum(power_losses_C.real)']
				imagC = rawOut[key]['sum(power_losses_C.imag)']
				oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'Losses' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Losses'] = oneLoss
				else:
					cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			elif key.startswith('Regulator_') and key.endswith('.csv'):
				#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
				regName=""
				regName = key
				newkey=regName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
				cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
				cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
				cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
			elif key.startswith('Capacitor_') and key.endswith('.csv'):
				capName=""
				capName = key
				newkey=capName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
				cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
				cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
				cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]

		# Print gridBallast Outputs to allOutputData.json
		cleanOut['gridBallast'] = {}
		if 'allWaterheaterOn.csv' in rawOut:
			cleanOut['gridBallast']['waterheaterOn'] = {}
			for key in rawOut['allWaterheaterOn.csv']:
				if key.startswith('waterheater'):
					cleanOut['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key]
		if 'allWaterheaterTemp.csv' in rawOut:
			cleanOut['gridBallast']['waterheaterTemp'] = {}
			for key in rawOut['allWaterheaterTemp.csv']:
				if key.startswith('waterheater'):
					cleanOut['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key]
		if 'allMeterPower.csv' in rawOut:
			cleanOut['gridBallast']['totalNetworkLoad'] = rawOut.get('allMeterPower.csv')['sum(measured_real_power)']

		if ('allWaterheaterLoad.csv' in rawOut) and ('allZIPloadPower.csv' in rawOut):
			cleanOut['gridBallast']['availabilityMagnitude'] = [x + y for x, y in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])]
		if 'eachZIPloadPower.csv' in rawOut:
			cleanOut['gridBallast']['ZIPloadPower'] = {}
			for key in rawOut['eachZIPloadPower.csv']:
				if key.startswith('ZIPload'):
					cleanOut['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key]
		if 'allZIPloadDemand.csv' in rawOut:
			cleanOut['gridBallast']['ZIPloadDemand'] = {}
			for key in rawOut['allZIPloadDemand.csv']:
				if key.startswith('ZIPload'):
					cleanOut['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key]
		if 'allZIPloadOn.csv' in rawOut:
			cleanOut['gridBallast']['ZIPloadOn'] = {}
			for key in rawOut['allZIPloadOn.csv']:
				if key.startswith('ZIPload'):
					cleanOut['gridBallast']['ZIPloadOn'][key] = rawOut.get('allZIPloadOn.csv')[key]

		# EventTime calculations
		eventTime = inputDict['eventTime']
		eventLength = inputDict['eventLength']
		eventLength = eventLength.split(':')
		eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1]))
		eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M')
		eventEnd = eventStart + eventDuration
		cleanOut['gridBallast']['eventStart'] = str(eventStart)
		cleanOut['gridBallast']['eventEnd'] = str(eventEnd)
		# Drop timezone from timeStamp, Convert string to date
		timeStamps = [x[:19] for x in cleanOut['timeStamps']]
		dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStamps]	
		eventEndIdx =  dateTimeStamps.index(eventEnd)
		# Recovery Time
		whOn = cleanOut['gridBallast']['waterheaterOn']
		whOnList = whOn.values()
		whOnZip = zip(*whOnList)
		whOnSum = [sum(x) for x in whOnZip]
		anyOn = [x > 0 for x in whOnSum] 
		tRecIdx = anyOn.index(True, eventEndIdx)
		tRec = dateTimeStamps[tRecIdx]
		cleanOut['gridBallast']['recoveryTime'] = str(tRec)
		# Waterheaters Off-Duration
		offDuration = tRec - eventStart
		cleanOut['gridBallast']['offDuration'] = str(offDuration)
		# Reserve Magnitude Target (RMT)
		availMag = cleanOut['gridBallast']['availabilityMagnitude']
		totNetLoad = cleanOut['gridBallast']['totalNetworkLoad']
		# loadZip = zip(availMag,totNetLoad)
		# rmt = [x[0]/x[1] for x in loadZip]
		rmt = (1000*sum(availMag))/sum(totNetLoad)
		cleanOut['gridBallast']['rmt'] = rmt
		# Reserve Magnitude Variability Tolerance (RMVT)
		avgAvailMag = sum(availMag)/len(availMag)
		rmvtMax = max(availMag)/avgAvailMag
		rmvtMin = min(availMag)/avgAvailMag
		rmvt = rmvtMax - rmvtMin
		cleanOut['gridBallast']['rmvt'] = rmvt
		# Availability
		notAvail = availMag.count(0)/len(timeStamps)
		avail = (1-notAvail)*100
		cleanOut['gridBallast']['availability'] = avail
		# Waterheater Temperature Drop calculations
		whTemp = cleanOut['gridBallast']['waterheaterTemp']
		whTempList = whTemp.values()
		whTempZip = zip(*whTempList)
		whTempDrops = []
		LOWER_LIMIT_TEMP = 125 # Used for calculating quality of service.
		for time in whTempZip:
			tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time])
			whTempDrops.append(tempDrop)
		cleanOut['gridBallast']['waterheaterTempDrops'] = whTempDrops

		# ZIPload calculations for Availability and QoS
		zPower = cleanOut['gridBallast']['ZIPloadPower']
		zPowerList = zPower.values()
		zPowerZip = zip(*zPowerList)
		zPowerSum = [sum(x) for x in zPowerZip]
		zDemand = cleanOut['gridBallast']['ZIPloadDemand']
		zDemandList  = zDemand.values()
		zDemandZip = zip(*zDemandList)
		zDrops = []
		for time in zDemandZip:
			for each in zPowerZip:
				zIdx = 0
				if each[zIdx] == 0:
					zPowerIdx += 1
					zDrop = sum([t > 0 for t in time])
					zDrops.append(zDrop)
				else:
					zDrops.append(0)
		cleanOut['gridBallast']['qualityDrops'] = [x + y for x, y in zip(whTempDrops, zDrops)]

		# What percentage of our keys have lat lon data?
		latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
		latPerc = 1.0*len(latKeys)/len(tree)
		if latPerc < 0.25: doNeato = True
		else: doNeato = False
		# Generate the frames for the system voltage map time traveling chart.
		genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
		cleanOut['genTime'] = genTime
		# Aggregate up the timestamps:
		if level=='days':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
		elif level=='months':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
		# Write the output.
		with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile:
			json.dump(cleanOut, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = datetime.datetime.now()
		inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Clean up the PID file.
		os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
		print "DONE RUNNING", modelDir
	except Exception as e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)	
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	finishTime = datetime.datetime.now()
	inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds())))
	with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
		json.dump(inputDict, inFile, indent = 4)
	try:
		os.remove(pJoin(modelDir,"PPID.txt"))
	except:
		pass
示例#7
0
def work(modelDir, inputDict):
	feederName = inputDict["feederName1"]
	inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"])
	shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
		pJoin(modelDir, "climate.tmy2"))
	feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
	tree = feederJson["tree"]
	# tree[feeder.getMaxKey(tree)+1] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'}
	# Set up GLM with correct time and recorders:
	feeder.attachRecorders(tree, "Regulator", "object", "regulator")
	feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
	feeder.attachRecorders(tree, "Inverter", "object", "inverter")
	feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
	feeder.attachRecorders(tree, "CollectorVoltage", None, None)
	feeder.attachRecorders(tree, "Climate", "object", "climate")
	feeder.attachRecorders(tree, "OverheadLosses", None, None)
	feeder.attachRecorders(tree, "UndergroundLosses", None, None)
	feeder.attachRecorders(tree, "TriplexLosses", None, None)
	feeder.attachRecorders(tree, "TransformerLosses", None, None)
	feeder.groupSwingKids(tree)

	# System check - linux doesn't support newer GridLAB-D versions
	if sys.platform == 'linux2':
		pass
	else:
		# print feeder.getMaxKey(tree)
		# tree[14,20,27,28,47] empty for UCS Egan, add climate object to tree[14]
		# HACK: tree[10:19] is empty
		tree[11] = {'omftype':'#include', 'argument':'\"hot_water_demand.glm\"'}
		tree[12] = {'omftype':'#include', 'argument':'\"lock_mode_schedule.glm\"'}
		tree[13] = {'omftype':'#include', 'argument':'\"control_priority_schedule.glm\"'}
		# Attach frequency player
		tree[14] = {'omftype':'class player', 'argument':'{double value;}'}
		tree[feeder.getMaxKey(tree)+1] = {'object':'player', 'file':'frequency.PLAYER', 'property':'value', 'name':'frequency', 'loop':0}
		# Set up GridBallast Controls
		totalWH = 0
		totalZIP = 0
		gbWH = 0
		gbZIP = 0
		for key in tree.keys():
			# Waterheater Controller properties
			if ('name' in tree[key]) and (tree[key].get('object') == 'waterheater'):
		 		totalWH += 1
	 			gbWH += 1
	 			# Frequency control parameters
	 			tree[key]['enable_freq_control'] = 'true'
	 			tree[key]['measured_frequency'] = 'frequency.value'
	 			tree[key]['freq_lowlimit'] = 59
	 			tree[key]['freq_uplimit'] = 61
	 			tree[key]['heat_mode'] = 'ELECTRIC'
	 			# tree[key]['average_delay_time'] = 60
	 			# Voltage control parameters
	 			# tree[key]['enable_volt_control'] = 'true'
	 			# tree[key]['volt_lowlimit'] = 240.4
	 			# tree[key]['volt_uplimit'] = 241.4
	 			# Lock Mode parameters
	 			# tree[key]['enable_lock'] = 'temp_lock_enable'
	 			# tree[key]['lock_STATUS'] = 'temp_lock_status'
	 			# Controller Priority: a.lock, b.freq, c.volt, d.therm
	 			tree[key]['controller_priority'] = 3214 #default:therm>lock>freq>volt
	 			# tree[key]['controller_priority'] = 1423 #freq>therm>volt>lock
	 			# tree[key]['controller_priority'] = 'control_priority'
		 		# fix waterheater property demand to water_demand for newer GridLAB-D versions
		 		if 'demand' in tree[key]:
		 			# tree[key]['water_demand'] = tree[key]['demand']
		 			tree[key]['water_demand'] = 'weekday_hotwater*1'
		 			del tree[key]['demand']
			# ZIPload Controller properties
			if ('name' in tree[key]) and (tree[key].get('object') == 'ZIPload'):
		 		totalZIP += 1
				if tree[key]['name'].startswith('responsive'):
		 			gbZIP += 1
			 		# Frequency control parameters
		 			tree[key]['enable_freq_control'] = 'true'
		 			tree[key]['measured_frequency'] = 'frequency.value'
		 			tree[key]['freq_lowlimit'] = 59
		 			tree[key]['freq_uplimit'] = 61
		 			# tree[key]['average_delay_time'] = 60
		 			# Voltage control parameters
		 			# tree[key]['enable_volt_control'] = 'true'
		 			# tree[key]['volt_lowlimit'] = 240.4
		 			# tree[key]['volt_uplimit'] = 241.4
		 			# Lock Mode parameters
		 			# tree[key]['enable_lock'] = 'temp_lock_enable'
		 			# tree[key]['lock_STATUS'] = 'temp_lock_status'
		 			tree[key]['controller_priority'] = 4321 #default:lock>freq>volt>therm
		 			# tree[key]['controller_priority'] = 2431 #freq>volt>lock>therm
		 			# tree[key]['groupid'] = 'fan'

	# Attach collector for total network load
	tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':60, 'file':'allMeterPower.csv'}
	# Attach collector for total waterheater load
	tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':60, 'file':'allWaterheaterLoad.csv'}
	# Attach collector for total ZIPload power/load
	tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':60, 'file':'allZIPloadPower.csv'}
	# Attach recorder for each ZIPload power/load
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':60, 'file':'eachZIPloadPower.csv'}
	# Attach recorder for all ZIPloads demand_rate
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':60, 'file':'allZIPloadDemand.csv'}
	# Attach recorder for waterheaters on/off
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':60, 'file':'allWaterheaterOn.csv'}
	# Attach recorder for waterheater tank temperatures
	tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':60, 'file':'allWaterheaterTemp.csv'}
	
	# Attach recorders for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':60}
	for phase in ['A','B','C']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'VoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# Attach recorders for system voltage map, triplex:
	stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':60}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'nVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	# And get meters for system voltage map:
	stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':60}
	for phase in ['1','2']:
		copyStub = dict(stub)
		copyStub['property'] = 'voltage_' + phase
		copyStub['file'] = phase.lower() + 'mVoltDump.csv'
		tree[feeder.getMaxKey(tree) + 1] = copyStub
	feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
		simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
	# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
	rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
		keepFiles=True, workDir=pJoin(modelDir))
	outData = {}
	# Std Err and Std Out
	outData['stderr'] = rawOut['stderr']
	outData['stdout'] = rawOut['stdout']
	# Time Stamps
	for key in rawOut:
		if '# timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# timestamp']
			break
		elif '# property.. timestamp' in rawOut[key]:
			outData['timeStamps'] = rawOut[key]['# property.. timestamp']
		else:
			outData['timeStamps'] = []
	# Day/Month Aggregation Setup:
	stamps = outData.get('timeStamps',[])
	level = inputDict.get('simLengthUnits','hours')
	# Climate
	for key in rawOut:
		if key.startswith('Climate_') and key.endswith('.csv'):
			outData['climate'] = {}
			outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
			outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
			outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
			outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
			outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
			#outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
			climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
			#converting W/sf to W/sm
			climateWbySMList= [x*10.76392 for x in climateWbySFList]
			outData['climate']['Global Horizontal (W/sm)']=climateWbySMList			
	# Voltage Band
	if 'VoltageJiggle.csv' in rawOut:
		outData['allMeterVoltages'] = {}
		outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
		outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
		outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
	# Power Consumption
	outData['Consumption'] = {}
	# Set default value to be 0, avoiding missing value when computing Loads
	outData['Consumption']['Power'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
	outData['Consumption']['DG'] = [0] * int(inputDict["simLength"])
	for key in rawOut:
		if key.startswith('SwingKids_') and key.endswith('.csv'):
			oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
			if 'Power' not in outData['Consumption']:
				outData['Consumption']['Power'] = oneSwingPower
			else:
				outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power'])
		elif key.startswith('Inverter_') and key.endswith('.csv'): 	
			realA = rawOut[key]['power_A.real']
			realB = rawOut[key]['power_B.real']
			realC = rawOut[key]['power_C.real']
			imagA = rawOut[key]['power_A.imag']
			imagB = rawOut[key]['power_B.imag']
			imagC = rawOut[key]['power_C.imag']
			oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key.startswith('Windmill_') and key.endswith('.csv'):
			vrA = rawOut[key]['voltage_A.real']
			vrB = rawOut[key]['voltage_B.real']
			vrC = rawOut[key]['voltage_C.real']
			viA = rawOut[key]['voltage_A.imag']
			viB = rawOut[key]['voltage_B.imag']
			viC = rawOut[key]['voltage_C.imag']
			crB = rawOut[key]['current_B.real']
			crA = rawOut[key]['current_A.real']
			crC = rawOut[key]['current_C.real']
			ciA = rawOut[key]['current_A.imag']
			ciB = rawOut[key]['current_B.imag']
			ciC = rawOut[key]['current_C.imag']
			powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
			powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
			powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
			oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
			if 'DG' not in outData['Consumption']:
				outData['Consumption']['DG'] = oneDgPower
			else:
				outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG'])
		elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
			realA = rawOut[key]['sum(power_losses_A.real)']
			imagA = rawOut[key]['sum(power_losses_A.imag)']
			realB = rawOut[key]['sum(power_losses_B.real)']
			imagB = rawOut[key]['sum(power_losses_B.imag)']
			realC = rawOut[key]['sum(power_losses_C.real)']
			imagC = rawOut[key]['sum(power_losses_C.imag)']
			oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
			if 'Losses' not in outData['Consumption']:
				outData['Consumption']['Losses'] = oneLoss
			else:
				outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses'])
		elif key.startswith('Regulator_') and key.endswith('.csv'):
			#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
			regName=""
			regName = key
			newkey=regName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
			outData[newkey]['RegTapA'] = rawOut[key]['tap_A']
			outData[newkey]['RegTapB'] = rawOut[key]['tap_B']
			outData[newkey]['RegTapC'] = rawOut[key]['tap_C']
			outData[newkey]['RegPhases'] = rawOut[key]['phases'][0]
		elif key.startswith('Capacitor_') and key.endswith('.csv'):
			capName=""
			capName = key
			newkey=capName.split(".")[0]
			outData[newkey] ={}
			outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
			outData[newkey]['Cap1A'] = rawOut[key]['switchA']
			outData[newkey]['Cap1B'] = rawOut[key]['switchB']
			outData[newkey]['Cap1C'] = rawOut[key]['switchC']
			outData[newkey]['CapPhases'] = rawOut[key]['phases'][0]

	# Print gridBallast Outputs to allOutputData.json
	outData['gridBallast'] = {}
	if 'allMeterPower.csv' in rawOut:
		outData['gridBallast']['totalNetworkLoad'] = [x / 1000 for x in rawOut.get('allMeterPower.csv')['sum(measured_real_power)']] #Convert W to kW
	if ('allZIPloadPower.csv' in rawOut) and ('allWaterheaterLoad.csv' in rawOut):
		outData['gridBallast']['availabilityMagnitude'] = [x[0] + x[1] for x in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])]
	if 'allZIPloadDemand.csv' in rawOut:
		outData['gridBallast']['ZIPloadDemand'] = {}
		for key in rawOut['allZIPloadDemand.csv']:
			if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')):
				outData['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key]
	if 'eachZIPloadPower.csv' in rawOut:
				outData['gridBallast']['ZIPloadPower'] = {}
				for key in rawOut['eachZIPloadPower.csv']:
					if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')):
						outData['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key]
	if 'allWaterheaterOn.csv' in rawOut:
		outData['gridBallast']['waterheaterOn'] = {}
		for key in rawOut['allWaterheaterOn.csv']:
			if (key.startswith('waterheater')) or (key.startswith('waterHeater')):
				outData['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key]
	if 'allWaterheaterTemp.csv' in rawOut:
		outData['gridBallast']['waterheaterTemp'] = {}
		for key in rawOut['allWaterheaterTemp.csv']:
			if (key.startswith('waterheater')) or (key.startswith('waterHeater')):
				outData['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key]
	# System check - linux doesn't support newer GridLAB-D versions
	if sys.platform == 'linux2':
		pass
	else:
		outData['gridBallast']['penetrationLevel'] = 100*(gbWH+gbZIP)/(totalWH+totalZIP)
		# Frequency Player
		inArray = feederJson['attachments']['frequency.PLAYER'].split('\n')
		tempArray = []
		for each in inArray:
			x = each.split(',')
			y = float(x[1])
			tempArray.append(y)
		outData['frequencyPlayer'] = tempArray
	# EventTime calculations
	eventTime = inputDict['eventTime']
	eventLength = inputDict['eventLength'].split(':')
	eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1]))
	eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M')
	eventEnd = eventStart + eventDuration
	outData['gridBallast']['eventStart'] = str(eventStart)
	outData['gridBallast']['eventEnd'] = str(eventEnd)
	outData['gridBallast']['xMin'] = str(eventStart - datetime.timedelta(minutes=30))
	outData['gridBallast']['xMax'] = str(eventEnd + datetime.timedelta(minutes=30))
	# Convert string to date
	# HACK: remove timezones, inconsistency in matching format
	timeStampsDebug = [x[:19] for x in outData['timeStamps']]
	dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStampsDebug]	
	eventEndIdx =  dateTimeStamps.index(eventEnd)
	# Recovery Time
	whOn = outData['gridBallast']['waterheaterOn']
	whOnList = whOn.values()
	whOnZip = zip(*whOnList)
	whOnSum = [sum(x) for x in whOnZip]
	anyOn = [x > 0 for x in whOnSum]
	tRecIdx = anyOn.index(True, eventEndIdx)
	tRec = dateTimeStamps[tRecIdx]
	recoveryTime = tRec - eventEnd
	outData['gridBallast']['recoveryTime'] = str(recoveryTime)
	# Waterheaters Off-Duration
	offDuration = tRec - eventStart
	outData['gridBallast']['offDuration'] = str(offDuration)
	# Reserve Magnitude (RM)
	availMag = outData['gridBallast']['availabilityMagnitude']
	totalNetLoad = outData['gridBallast']['totalNetworkLoad']
	availPerc = [100 * x[0]/x[1] for x in zip(availMag,totalNetLoad)]
	outData['gridBallast']['availabilityPercent'] = availPerc
	outData['gridBallast']['rm'] = [100 - x for x in availPerc]
	# Average RM during event
	eventRM = [100 - x[1] for x in zip(dateTimeStamps, availPerc) if (x[0] == eventStart) or (x[0] == eventEnd)]
	outData['gridBallast']['rmAvg'] = np.mean(eventRM)
	# Reserve Magnitude Variability Tolerance (RMVT)
	outData['gridBallast']['rmvt'] = np.std(eventRM)
	# Availability
	rmt = 7
	available = [x[1] > rmt for x in zip(dateTimeStamps, availPerc) if (x[0] < eventStart) or (x[0] > eventEnd)]
	outData['gridBallast']['availability'] = 100.0 * sum(available) / (int(inputDict['simLength']) - int(eventLength[1]) - 1)
	# Waterheater Temperature Drop calculations
	whTemp = outData['gridBallast']['waterheaterTemp']
	whTempList = whTemp.values()
	whTempZip = zip(*whTempList)
	whTempDrops = []
	LOWER_LIMIT_TEMP = 110 # Used for calculating quality of service. Typical hot shower temp = 105 F.
	for time in whTempZip:
		tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time])
		whTempDrops.append(tempDrop)
	outData['gridBallast']['waterheaterTempDrops'] = whTempDrops
	# ZIPload calculations for Availability and QoS
	zPower = outData['gridBallast']['ZIPloadPower']
	zPowerList = zPower.values()
	zPowerZip = zip(*zPowerList)
	zDemand = outData['gridBallast']['ZIPloadDemand']
	zDemandList  = zDemand.values()
	zDemandZip = zip(*zDemandList)
	zDrops = []
	for x, y in zip(zPowerZip,zDemandZip):
		zDrop = 0
		for i in range(len(x)):
			if (x[i] == 0) and (y[i] > 0):
				zDrop += 1
		zDrops.append(zDrop)
	outData['gridBallast']['qualityDrops'] = [x + y for x, y in zip(zDrops, whTempDrops)]

	# What percentage of our keys have lat lon data?
	latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
	latPerc = 1.0*len(latKeys)/len(tree)
	if latPerc < 0.25:
		doNeato = True
	else:
		doNeato = False
	# Generate the frames for the system voltage map time traveling chart.
	genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
	outData['genTime'] = genTime
	# Aggregate up the timestamps:
	if level=='days':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
	elif level=='months':
		outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
	return outData
示例#8
0
def runForeground(modelDir, test_mode=False):
    ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
    with open(pJoin(modelDir, 'allInputData.json')) as f:
        inputDict = json.load(f)
    print("STARTING TO RUN", modelDir)
    beginTime = datetime.datetime.now()
    # Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders
    for dirs in os.listdir(modelDir):
        if os.path.isdir(pJoin(modelDir, dirs)):
            shutil.rmtree(pJoin(modelDir, dirs))
    # Get the names of the feeders from the .omd files:
    feederNames = [x[0:-4] for x in os.listdir(modelDir) if x.endswith(".omd")]
    for i, key in enumerate(feederNames):
        inputDict['feederName' + str(i + 1)] = feederNames[i]
    # Run GridLAB-D once for each feeder:
    for feederName in feederNames:
        try:
            os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
        except Exception as e:
            pass
        if not os.path.isdir(pJoin(modelDir, feederName)):
            os.makedirs(pJoin(modelDir,
                              feederName))  # create subfolders for feeders
        shutil.copy(pJoin(modelDir, feederName + ".omd"),
                    pJoin(modelDir, feederName, "feeder.omd"))
        inputDict["climateName"] = weather.zipCodeToClimateName(
            inputDict["zipCode"])
        shutil.copy(
            pJoin(_omfDir, "data", "Climate",
                  inputDict["climateName"] + ".tmy2"),
            pJoin(modelDir, feederName, "climate.tmy2"))
        try:
            startTime = datetime.datetime.now()
            with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
                feederJson = json.load(f)
            tree = feederJson["tree"]
            # Set up GLM with correct time and recorders:
            feeder.attachRecorders(tree, "Regulator", "object", "regulator")
            feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
            feeder.attachRecorders(tree, "Inverter", "object", "inverter")
            feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
            feeder.attachRecorders(tree, "CollectorVoltage", None, None)
            feeder.attachRecorders(tree, "Climate", "object", "climate")
            feeder.attachRecorders(tree, "OverheadLosses", None, None)
            feeder.attachRecorders(tree, "UndergroundLosses", None, None)
            feeder.attachRecorders(tree, "TriplexLosses", None, None)
            feeder.attachRecorders(tree, "TransformerLosses", None, None)
            feeder.groupSwingKids(tree)
            feeder.adjustTime(tree=tree,
                              simLength=float(inputDict["simLength"]),
                              simLengthUnits=inputDict["simLengthUnits"],
                              simStartDate=inputDict["simStartDate"])
            # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
            rawOut = gridlabd.runInFilesystem(
                tree,
                attachments=feederJson["attachments"],
                keepFiles=True,
                workDir=pJoin(modelDir, feederName))
            cleanOut = {}
            # Std Err and Std Out
            cleanOut['stderr'] = rawOut['stderr']
            cleanOut['stdout'] = rawOut['stdout']
            # Time Stamps
            for key in rawOut:
                if '# timestamp' in rawOut[key]:
                    cleanOut['timeStamps'] = rawOut[key]['# timestamp']
                    break
                elif '# property.. timestamp' in rawOut[key]:
                    cleanOut['timeStamps'] = rawOut[key][
                        '# property.. timestamp']
                else:
                    cleanOut['timeStamps'] = []
            # Day/Month Aggregation Setup:
            stamps = cleanOut.get('timeStamps', [])
            level = inputDict.get('simLengthUnits', 'hours')
            # Climate
            for key in rawOut:
                if key.startswith('Climate_') and key.endswith('.csv'):
                    cleanOut['climate'] = {}
                    cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(
                        rawOut[key].get('rainfall'), sum, level)
                    cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(
                        rawOut[key].get('wind_speed'), avg, level)
                    cleanOut['climate']['Temperature (F)'] = hdmAgg(
                        rawOut[key].get('temperature'), max, level)
                    cleanOut['climate']['Snow Depth (in)'] = hdmAgg(
                        rawOut[key].get('snowdepth'), max, level)
                    cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(
                        rawOut[key].get('solar_direct'), sum, level)
            # Voltage Band
            if 'VoltageJiggle.csv' in rawOut:
                cleanOut['allMeterVoltages'] = {}
                cleanOut['allMeterVoltages']['Min'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']
                ], min, level)
                cleanOut['allMeterVoltages']['Mean'] = hdmAgg(
                    [(i / 2) for i in rawOut['VoltageJiggle.csv']
                     ['mean(voltage_12.mag)']], avg, level)
                cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']
                ], avg, level)
                cleanOut['allMeterVoltages']['Max'] = hdmAgg([
                    (i / 2)
                    for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']
                ], max, level)
            cleanOut['allMeterVoltages']['stdDevPos'] = [
                (x + y / 2)
                for x, y in zip(cleanOut['allMeterVoltages']['Mean'],
                                cleanOut['allMeterVoltages']['StdDev'])
            ]
            cleanOut['allMeterVoltages']['stdDevNeg'] = [
                (x - y / 2)
                for x, y in zip(cleanOut['allMeterVoltages']['Mean'],
                                cleanOut['allMeterVoltages']['StdDev'])
            ]
            # Total # of meters
            count = 0
            with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
                for line in f:
                    if "\"objectType\": \"triplex_meter\"" in line:
                        count += 1
            # print "count=", count
            cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count)
            # Power Consumption
            cleanOut['Consumption'] = {}
            # Set default value to be 0, avoiding missing value when computing Loads
            cleanOut['Consumption']['Power'] = [0] * int(
                inputDict["simLength"])
            cleanOut['Consumption']['Losses'] = [0] * int(
                inputDict["simLength"])
            cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
            for key in rawOut:
                if key.startswith('SwingKids_') and key.endswith('.csv'):
                    oneSwingPower = hdmAgg(
                        vecPyth(rawOut[key]['sum(power_in.real)'],
                                rawOut[key]['sum(power_in.imag)']), avg, level)
                    if 'Power' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['Power'] = oneSwingPower
                    else:
                        cleanOut['Consumption']['Power'] = vecSum(
                            oneSwingPower, cleanOut['Consumption']['Power'])
                elif key.startswith('Inverter_') and key.endswith('.csv'):
                    realA = rawOut[key]['power_A.real']
                    realB = rawOut[key]['power_B.real']
                    realC = rawOut[key]['power_C.real']
                    imagA = rawOut[key]['power_A.imag']
                    imagB = rawOut[key]['power_B.imag']
                    imagC = rawOut[key]['power_C.imag']
                    oneDgPower = hdmAgg(
                        vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                               vecPyth(realC, imagC)), avg, level)
                    if 'DG' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['DG'] = oneDgPower
                    else:
                        cleanOut['Consumption']['DG'] = vecSum(
                            oneDgPower, cleanOut['Consumption']['DG'])
                elif key.startswith('Windmill_') and key.endswith('.csv'):
                    vrA = rawOut[key]['voltage_A.real']
                    vrB = rawOut[key]['voltage_B.real']
                    vrC = rawOut[key]['voltage_C.real']
                    viA = rawOut[key]['voltage_A.imag']
                    viB = rawOut[key]['voltage_B.imag']
                    viC = rawOut[key]['voltage_C.imag']
                    crB = rawOut[key]['current_B.real']
                    crA = rawOut[key]['current_A.real']
                    crC = rawOut[key]['current_C.real']
                    ciA = rawOut[key]['current_A.imag']
                    ciB = rawOut[key]['current_B.imag']
                    ciC = rawOut[key]['current_C.imag']
                    powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA))
                    powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB))
                    powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC))
                    # HACK: multiply by negative one because turbine power sign is opposite all other DG:
                    oneDgPower = [
                        -1.0 * x for x in hdmAgg(
                            vecSum(powerA, powerB, powerC), avg, level)
                    ]
                    if 'DG' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['DG'] = oneDgPower
                    else:
                        cleanOut['Consumption']['DG'] = vecSum(
                            oneDgPower, cleanOut['Consumption']['DG'])
                elif key in [
                        'OverheadLosses.csv', 'UndergroundLosses.csv',
                        'TriplexLosses.csv', 'TransformerLosses.csv'
                ]:
                    realA = rawOut[key]['sum(power_losses_A.real)']
                    imagA = rawOut[key]['sum(power_losses_A.imag)']
                    realB = rawOut[key]['sum(power_losses_B.real)']
                    imagB = rawOut[key]['sum(power_losses_B.imag)']
                    realC = rawOut[key]['sum(power_losses_C.real)']
                    imagC = rawOut[key]['sum(power_losses_C.imag)']
                    oneLoss = hdmAgg(
                        vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                               vecPyth(realC, imagC)), avg, level)
                    if 'Losses' not in cleanOut['Consumption']:
                        cleanOut['Consumption']['Losses'] = oneLoss
                    else:
                        cleanOut['Consumption']['Losses'] = vecSum(
                            oneLoss, cleanOut['Consumption']['Losses'])
            # Aggregate up the timestamps:
            if level == 'days':
                cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                                   lambda x: x[0][0:10],
                                                   'days')
            elif level == 'months':
                cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                                   lambda x: x[0][0:7],
                                                   'months')
            # Write the output.
            with open(pJoin(modelDir, feederName, "allOutputData.json"),
                      "w") as outFile:
                json.dump(cleanOut, outFile, indent=4)
            # Update the runTime in the input file.
            endTime = datetime.datetime.now()
            inputDict["runTime"] = str(
                datetime.timedelta(seconds=int((endTime -
                                                startTime).total_seconds())))
            with open(pJoin(modelDir, feederName, "allInputData.json"),
                      "w") as inFile:
                json.dump(inputDict, inFile, indent=4)
            # Clean up the PID file.
            os.remove(pJoin(modelDir, feederName, "PID.txt"))
            print("DONE RUNNING GRIDLABMULTI", modelDir, feederName)
        except Exception as e:
            if test_mode == True:
                raise e
            print("MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName)
            cancel(pJoin(modelDir, feederName))
            with open(pJoin(modelDir, feederName, "stderr.txt"),
                      "a+") as stderrFile:
                traceback.print_exc(file=stderrFile)
    finishTime = datetime.datetime.now()
    inputDict["runTime"] = str(
        datetime.timedelta(seconds=int((finishTime -
                                        beginTime).total_seconds())))
    with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
        json.dump(inputDict, inFile, indent=4)
    # Integrate data into allOutputData.json, if error happens, cancel it
    try:
        output = {}
        output["failures"] = {}
        numOfFeeders = 0
        for root, dirs, files in os.walk(modelDir):
            # dump error info into dict
            if "stderr.txt" in files:
                with open(pJoin(root, "stderr.txt"), "r") as stderrFile:
                    tempString = stderrFile.read()
                    if "ERROR" in tempString or "FATAL" in tempString or "Traceback" in tempString:
                        output["failures"]["feeder_" +
                                           str(os.path.split(root)[-1])] = {
                                               "stderr": tempString
                                           }
                        continue
            # dump simulated data into dict
            if "allOutputData.json" in files:
                with open(pJoin(root, "allOutputData.json"),
                          "r") as feederOutputData:
                    numOfFeeders += 1
                    feederOutput = json.load(feederOutputData)
                    # TODO: a better feeder name
                    output["feeder_" + str(os.path.split(root)[-1])] = {}
                    output["feeder_" +
                           str(os.path.split(root)[-1]
                               )]["Consumption"] = feederOutput["Consumption"]
                    output["feeder_" + str(os.path.split(root)[-1])][
                        "allMeterVoltages"] = feederOutput["allMeterVoltages"]
                    output["feeder_" + str(os.path.split(
                        root)[-1])]["stderr"] = feederOutput["stderr"]
                    output["feeder_" + str(os.path.split(
                        root)[-1])]["stdout"] = feederOutput["stdout"]
                    # output[root] = {feederOutput["Consumption"], feederOutput["allMeterVoltages"], feederOutput["stdout"], feederOutput["stderr"]}
        output["numOfFeeders"] = numOfFeeders
        output["timeStamps"] = feederOutput.get("timeStamps", [])
        output["climate"] = feederOutput.get("climate", [])
        # Add feederNames to output so allInputData feederName changes don't cause output rendering to disappear.
        for key, feederName in inputDict.items():
            if 'feederName' in key:
                output[key] = feederName
        with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile:
            json.dump(output, outFile, indent=4)
        try:
            os.remove(pJoin(modelDir, "PPID.txt"))
        except:
            pass
        # Send email to user on model success.
        emailStatus = inputDict.get('emailStatus', 0)
        if (emailStatus == "on"):
            print("\n    EMAIL ALERT ON")
            email = session['user_id']
            try:
                with open("data/User/" + email + ".json") as f:
                    user = json.load(f)
                modelPath, modelName = pSplit(modelDir)
                message = "The model " + "<i>" + str(
                    modelName
                ) + "</i>" + " has successfully completed running. It ran for a total of " + str(
                    inputDict["runTime"]) + " seconds from " + str(
                        beginTime) + ", to " + str(finishTime) + "."
                return web.send_link(email, message, user)
            except Exception as e:
                print("ERROR: Failed sending model status email to user: "******", with exception: \n", e)
    except Exception as e:
        # If input range wasn't valid delete output, write error to disk.
        cancel(modelDir)
        thisErr = traceback.format_exc()
        print('ERROR IN MODEL', modelDir, thisErr)
        inputDict['stderr'] = thisErr
        with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile:
            errorFile.write(thisErr)
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
        # Send email to user on model failure.
        email = 'NoEmail'
        try:
            email = session['user_id']
            with open("data/User/" + email + ".json") as f:
                user = json.load(f)
            modelPath, modelName = pSplit(modelDir)
            message = "The model " + "<i>" + str(
                modelName
            ) + "</i>" + " has failed to complete running. It ran for a total of " + str(
                inputDict["runTime"]) + " seconds from " + str(
                    beginTime) + ", to " + str(finishTime) + "."
            return web.send_link(email, message, user)
        except Exception as e:
            print("Failed sending model status email to user: "******", with exception: \n", e)
示例#9
0
def run(modelDir, inputDict):
    ''' Run the model in its directory. '''
    try:
        # Set up GLM with correct time and recorders:
        omd = json.load(open(pJoin(modelDir, 'feeder.omd')))
        tree = omd.get('tree', {})
        feeder.attachRecorders(tree, "CollectorVoltage", None, None)
        feeder.attachRecorders(tree, "Climate", "object", "climate")
        feeder.attachRecorders(tree, "OverheadLosses", None, None)
        feeder.attachRecorders(tree, "UndergroundLosses", None, None)
        feeder.attachRecorders(tree, "TriplexLosses", None, None)
        feeder.attachRecorders(tree, "TransformerLosses", None, None)
        feeder.groupSwingKids(tree)
        feeder.adjustTime(tree, 120, 'hours', '2011-01-01')
        # Run GridLAB-D
        startTime = dt.datetime.now()
        rawOut = gridlabd.runInFilesystem(tree,
                                          attachments=omd.get(
                                              'attachments', {}),
                                          workDir=modelDir)
        # Clean the output.
        cleanOut = {}
        # Std Err and Std Out
        cleanOut['stderr'] = rawOut['stderr']
        cleanOut['stdout'] = rawOut['stdout']
        # Time Stamps
        for key in rawOut:
            if '# timestamp' in rawOut[key]:
                cleanOut['timeStamps'] = rawOut[key]['# timestamp']
                break
            elif '# property.. timestamp' in rawOut[key]:
                cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
            else:
                cleanOut['timeStamps'] = []
        # Day/Month Aggregation Setup:
        stamps = cleanOut.get('timeStamps', [])
        level = inputDict.get('simLengthUnits', 'hours')
        # Climate
        for key in rawOut:
            if key.startswith('Climate_') and key.endswith('.csv'):
                cleanOut['climate'] = {}
                cleanOut['climate']['Rain Fall (in/h)'] = rawOut[key].get(
                    'rainfall')
                cleanOut['climate']['Wind Speed (m/s)'] = rawOut[key].get(
                    'wind_speed')
                cleanOut['climate']['Temperature (F)'] = rawOut[key].get(
                    'temperature')
                cleanOut['climate']['Snow Depth (in)'] = rawOut[key].get(
                    'snowdepth')
                cleanOut['climate']['Direct Normal (W/sf)'] = rawOut[key].get(
                    'solar_direct')
                climateWbySFList = rawOut[key].get('solar_global')
                #converting W/sf to W/sm
                climateWbySMList = [x * 10.76392 for x in climateWbySFList]
                cleanOut['climate'][
                    'Global Horizontal (W/sm)'] = climateWbySMList
        # Voltage Band
        if 'VoltageJiggle.csv' in rawOut:
            cleanOut['allMeterVoltages'] = {}
            cleanOut['allMeterVoltages']['Min'] = [
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']
            ]
            cleanOut['allMeterVoltages']['Mean'] = [
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']
            ]
            cleanOut['allMeterVoltages']['StdDev'] = [
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']
            ]
            cleanOut['allMeterVoltages']['Max'] = [
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']
            ]
        # Dump the results.
        endTime = dt.datetime.now()
        inputDict["runTime"] = str(
            dt.timedelta(seconds=int((endTime - startTime).total_seconds())))
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
        with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile:
            json.dump(cleanOut, outFile, indent=4)
    except:
        # If input range wasn't valid delete output, write error to disk.
        cancel(modelDir)
        thisErr = traceback.format_exc()
        print 'ERROR IN MODEL', modelDir, thisErr
        inputDict['stderr'] = thisErr
        with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile:
            errorFile.write(thisErr)
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)