def runForeground(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	# Check whether model exist or not
	if not os.path.isdir(modelDir):
		os.makedirs(modelDir)
		inputDict["created"] = str(datetime.datetime.now())
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	feederList = []
	# Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders
	for dirs in os.listdir(modelDir):
		if os.path.isdir(pJoin(modelDir, dirs)):
			shutil.rmtree(pJoin(modelDir, dirs))
	# Get each feeder, prepare data in separate folders, and run there.
	for key in sorted(inputDict, key=inputDict.get):
		if key.startswith("feederName"):
			feederName = inputDict[key]
			feederList.append(feederName)
			try:
				os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
			except Exception, e:
				pass
			if not os.path.isdir(pJoin(modelDir, feederName)):
				os.makedirs(pJoin(modelDir, feederName)) # create subfolders for feeders
			shutil.copy(pJoin(modelDir, feederName + ".omd"),
				pJoin(modelDir, feederName, "feeder.omd"))
			inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
			shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
				pJoin(modelDir, feederName, "climate.tmy2"))
			try:
				startTime = datetime.datetime.now()
				feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.omd")))
				tree = feederJson["tree"]
				# Set up GLM with correct time and recorders:
				feeder.attachRecorders(tree, "Regulator", "object", "regulator")
				feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
				feeder.attachRecorders(tree, "Inverter", "object", "inverter")
				feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
				feeder.attachRecorders(tree, "CollectorVoltage", None, None)
				feeder.attachRecorders(tree, "Climate", "object", "climate")
				feeder.attachRecorders(tree, "OverheadLosses", None, None)
				feeder.attachRecorders(tree, "UndergroundLosses", None, None)
				feeder.attachRecorders(tree, "TriplexLosses", None, None)
				feeder.attachRecorders(tree, "TransformerLosses", None, None)
				feeder.groupSwingKids(tree)
				feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
					simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
				# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
				rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"],
					keepFiles=True, workDir=pJoin(modelDir, feederName))
				cleanOut = {}
				# Std Err and Std Out
				cleanOut['stderr'] = rawOut['stderr']
				cleanOut['stdout'] = rawOut['stdout']
				# Time Stamps
				for key in rawOut:
					if '# timestamp' in rawOut[key]:
						cleanOut['timeStamps'] = rawOut[key]['# timestamp']
						break
					elif '# property.. timestamp' in rawOut[key]:
						cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
					else:
						cleanOut['timeStamps'] = []
				# Day/Month Aggregation Setup:
				stamps = cleanOut.get('timeStamps',[])
				level = inputDict.get('simLengthUnits','hours')
				# Climate
				for key in rawOut:
					if key.startswith('Climate_') and key.endswith('.csv'):
						cleanOut['climate'] = {}
						cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
						cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
						cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
						cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
						cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				# Voltage Band
				if 'VoltageJiggle.csv' in rawOut:
					cleanOut['allMeterVoltages'] = {}
					cleanOut['allMeterVoltages']['Min'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
					cleanOut['allMeterVoltages']['Mean'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
					cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
					cleanOut['allMeterVoltages']['Max'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
				cleanOut['allMeterVoltages']['stdDevPos'] = [(x+y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
				cleanOut['allMeterVoltages']['stdDevNeg'] = [(x-y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
				# Total # of meters
				count = 0
				with open(pJoin(modelDir, feederName, "feeder.omd")) as f:
					for line in f:
						if "\"objectType\": \"triplex_meter\"" in line:
							count+=1
				print "count=", count
				cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count)
				# Power Consumption
				cleanOut['Consumption'] = {}
				# Set default value to be 0, avoiding missing value when computing Loads
				cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
				cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
				cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
				for key in rawOut:
					if key.startswith('SwingKids_') and key.endswith('.csv'):
						oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
						if 'Power' not in cleanOut['Consumption']:
							cleanOut['Consumption']['Power'] = oneSwingPower
						else:
							cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
					elif key.startswith('Inverter_') and key.endswith('.csv'):
						realA = rawOut[key]['power_A.real']
						realB = rawOut[key]['power_B.real']
						realC = rawOut[key]['power_C.real']
						imagA = rawOut[key]['power_A.imag']
						imagB = rawOut[key]['power_B.imag']
						imagC = rawOut[key]['power_C.imag']
						oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
						if 'DG' not in cleanOut['Consumption']:
							cleanOut['Consumption']['DG'] = oneDgPower
						else:
							cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
					elif key.startswith('Windmill_') and key.endswith('.csv'):
						vrA = rawOut[key]['voltage_A.real']
						vrB = rawOut[key]['voltage_B.real']
						vrC = rawOut[key]['voltage_C.real']
						viA = rawOut[key]['voltage_A.imag']
						viB = rawOut[key]['voltage_B.imag']
						viC = rawOut[key]['voltage_C.imag']
						crB = rawOut[key]['current_B.real']
						crA = rawOut[key]['current_A.real']
						crC = rawOut[key]['current_C.real']
						ciA = rawOut[key]['current_A.imag']
						ciB = rawOut[key]['current_B.imag']
						ciC = rawOut[key]['current_C.imag']
						powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
						powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
						powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
						# HACK: multiply by negative one because turbine power sign is opposite all other DG:
						oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA,powerB,powerC), avg, level)]
						if 'DG' not in cleanOut['Consumption']:
							cleanOut['Consumption']['DG'] = oneDgPower
						else:
							cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
					elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
						realA = rawOut[key]['sum(power_losses_A.real)']
						imagA = rawOut[key]['sum(power_losses_A.imag)']
						realB = rawOut[key]['sum(power_losses_B.real)']
						imagB = rawOut[key]['sum(power_losses_B.imag)']
						realC = rawOut[key]['sum(power_losses_C.real)']
						imagC = rawOut[key]['sum(power_losses_C.imag)']
						oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
						if 'Losses' not in cleanOut['Consumption']:
							cleanOut['Consumption']['Losses'] = oneLoss
						else:
							cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
				# Aggregate up the timestamps:
				if level=='days':
					cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
				elif level=='months':
					cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
				# Write the output.
				with open(pJoin(modelDir, feederName, "allOutputData.json"),"w") as outFile:
					json.dump(cleanOut, outFile, indent=4)
				# Update the runTime in the input file.
				endTime = datetime.datetime.now()
				inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
				with open(pJoin(modelDir, feederName, "allInputData.json"),"w") as inFile:
					json.dump(inputDict, inFile, indent=4)
				# Clean up the PID file.
				os.remove(pJoin(modelDir, feederName,"PID.txt"))
				print "DONE RUNNING GRIDLABMULTI", modelDir, feederName
			except Exception as e:
				print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName
				cancel(pJoin(modelDir, feederName))
				with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile:
					traceback.print_exc(file = stderrFile)
Beispiel #2
0
def heavyProcessing(modelDir, inputDict):
    ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
    print "STARTING TO RUN", modelDir
    beginTime = datetime.datetime.now()
    # Get feeder name and data in.
    try:
        os.mkdir(pJoin(modelDir, 'gldContainer'))
    except:
        pass
    feederDir, feederName = inputDict["feederName"].split("___")
    shutil.copy(
        pJoin(__metaModel__._omfDir, "data", "Feeder", feederDir,
              feederName + ".json"), pJoin(modelDir, "feeder.json"))
    shutil.copy(
        pJoin(__metaModel__._omfDir, "data", "Climate",
              inputDict["climateName"] + ".tmy2"),
        pJoin(modelDir, "gldContainer", "climate.tmy2"))
    try:
        startTime = datetime.datetime.now()
        feederJson = json.load(open(pJoin(modelDir, "feeder.json")))
        tree = feederJson["tree"]
        # Set up GLM with correct time and recorders:
        feeder.attachRecorders(tree, "Regulator", "object", "regulator")
        feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
        feeder.attachRecorders(tree, "Inverter", "object", "inverter")
        feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
        feeder.attachRecorders(tree, "CollectorVoltage", None, None)
        feeder.attachRecorders(tree, "Climate", "object", "climate")
        feeder.attachRecorders(tree, "OverheadLosses", None, None)
        feeder.attachRecorders(tree, "UndergroundLosses", None, None)
        feeder.attachRecorders(tree, "TriplexLosses", None, None)
        feeder.attachRecorders(tree, "TransformerLosses", None, None)
        feeder.groupSwingKids(tree)
        # Attach recorders for system voltage map:
        stub = {
            'object': 'group_recorder',
            'group': '"class=node"',
            'property': 'voltage_A',
            'interval': 3600,
            'file': 'aVoltDump.csv'
        }
        for phase in ['A', 'B', 'C']:
            copyStub = dict(stub)
            copyStub['property'] = 'voltage_' + phase
            copyStub['file'] = phase.lower() + 'VoltDump.csv'
            tree[feeder.getMaxKey(tree) + 1] = copyStub
        feeder.adjustTime(tree=tree,
                          simLength=float(inputDict["simLength"]),
                          simLengthUnits=inputDict["simLengthUnits"],
                          simStartDate=inputDict["simStartDate"])
        # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
        rawOut = gridlabd.runInFilesystem(
            tree,
            attachments=feederJson["attachments"],
            keepFiles=True,
            workDir=pJoin(modelDir, 'gldContainer'))
        cleanOut = {}
        # Std Err and Std Out
        cleanOut['stderr'] = rawOut['stderr']
        cleanOut['stdout'] = rawOut['stdout']
        # Time Stamps
        for key in rawOut:
            if '# timestamp' in rawOut[key]:
                cleanOut['timeStamps'] = rawOut[key]['# timestamp']
                break
            elif '# property.. timestamp' in rawOut[key]:
                cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
            else:
                cleanOut['timeStamps'] = []
        # Day/Month Aggregation Setup:
        stamps = cleanOut.get('timeStamps', [])
        level = inputDict.get('simLengthUnits', 'hours')
        # Climate
        for key in rawOut:
            if key.startswith('Climate_') and key.endswith('.csv'):
                cleanOut['climate'] = {}
                cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(
                    rawOut[key].get('rainfall'), sum, level)
                cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(
                    rawOut[key].get('wind_speed'), avg, level)
                cleanOut['climate']['Temperature (F)'] = hdmAgg(
                    rawOut[key].get('temperature'), max, level)
                cleanOut['climate']['Snow Depth (in)'] = hdmAgg(
                    rawOut[key].get('snowdepth'), max, level)
                cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(
                    rawOut[key].get('solar_direct'), sum, level)
                #cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)
                climateWbySFList = hdmAgg(rawOut[key].get('solar_global'), sum,
                                          level)
                #converting W/sf to W/sm
                climateWbySMList = [x * 10.76392 for x in climateWbySFList]
                cleanOut['climate'][
                    'Global Horizontal (W/sm)'] = climateWbySMList
        # Voltage Band
        if 'VoltageJiggle.csv' in rawOut:
            cleanOut['allMeterVoltages'] = {}
            cleanOut['allMeterVoltages']['Min'] = hdmAgg([
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']
            ], min, level)
            cleanOut['allMeterVoltages']['Mean'] = hdmAgg([
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']
            ], avg, level)
            cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']
            ], avg, level)
            cleanOut['allMeterVoltages']['Max'] = hdmAgg([
                float(i / 2)
                for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']
            ], max, level)
        # Power Consumption
        cleanOut['Consumption'] = {}
        # Set default value to be 0, avoiding missing value when computing Loads
        cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
        cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
        cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
        for key in rawOut:
            if key.startswith('SwingKids_') and key.endswith('.csv'):
                oneSwingPower = hdmAgg(
                    vecPyth(rawOut[key]['sum(power_in.real)'],
                            rawOut[key]['sum(power_in.imag)']), avg, level)
                if 'Power' not in cleanOut['Consumption']:
                    cleanOut['Consumption']['Power'] = oneSwingPower
                else:
                    cleanOut['Consumption']['Power'] = vecSum(
                        oneSwingPower, cleanOut['Consumption']['Power'])
            elif key.startswith('Inverter_') and key.endswith('.csv'):
                realA = rawOut[key]['power_A.real']
                realB = rawOut[key]['power_B.real']
                realC = rawOut[key]['power_C.real']
                imagA = rawOut[key]['power_A.imag']
                imagB = rawOut[key]['power_B.imag']
                imagC = rawOut[key]['power_C.imag']
                oneDgPower = hdmAgg(
                    vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                           vecPyth(realC, imagC)), avg, level)
                if 'DG' not in cleanOut['Consumption']:
                    cleanOut['Consumption']['DG'] = oneDgPower
                else:
                    cleanOut['Consumption']['DG'] = vecSum(
                        oneDgPower, cleanOut['Consumption']['DG'])
            elif key.startswith('Windmill_') and key.endswith('.csv'):
                vrA = rawOut[key]['voltage_A.real']
                vrB = rawOut[key]['voltage_B.real']
                vrC = rawOut[key]['voltage_C.real']
                viA = rawOut[key]['voltage_A.imag']
                viB = rawOut[key]['voltage_B.imag']
                viC = rawOut[key]['voltage_C.imag']
                crB = rawOut[key]['current_B.real']
                crA = rawOut[key]['current_A.real']
                crC = rawOut[key]['current_C.real']
                ciA = rawOut[key]['current_A.imag']
                ciB = rawOut[key]['current_B.imag']
                ciC = rawOut[key]['current_C.imag']
                powerA = vecProd(vecPyth(vrA, viA), vecPyth(crA, ciA))
                powerB = vecProd(vecPyth(vrB, viB), vecPyth(crB, ciB))
                powerC = vecProd(vecPyth(vrC, viC), vecPyth(crC, ciC))
                oneDgPower = hdmAgg(vecSum(powerA, powerB, powerC), avg, level)
                if 'DG' not in cleanOut['Consumption']:
                    cleanOut['Consumption']['DG'] = oneDgPower
                else:
                    cleanOut['Consumption']['DG'] = vecSum(
                        oneDgPower, cleanOut['Consumption']['DG'])
            elif key in [
                    'OverheadLosses.csv', 'UndergroundLosses.csv',
                    'TriplexLosses.csv', 'TransformerLosses.csv'
            ]:
                realA = rawOut[key]['sum(power_losses_A.real)']
                imagA = rawOut[key]['sum(power_losses_A.imag)']
                realB = rawOut[key]['sum(power_losses_B.real)']
                imagB = rawOut[key]['sum(power_losses_B.imag)']
                realC = rawOut[key]['sum(power_losses_C.real)']
                imagC = rawOut[key]['sum(power_losses_C.imag)']
                oneLoss = hdmAgg(
                    vecSum(vecPyth(realA, imagA), vecPyth(realB, imagB),
                           vecPyth(realC, imagC)), avg, level)
                if 'Losses' not in cleanOut['Consumption']:
                    cleanOut['Consumption']['Losses'] = oneLoss
                else:
                    cleanOut['Consumption']['Losses'] = vecSum(
                        oneLoss, cleanOut['Consumption']['Losses'])
            elif key.startswith('Regulator_') and key.endswith('.csv'):
                #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
                regName = ""
                regName = key
                newkey = regName.split(".")[0]
                cleanOut[newkey] = {}
                cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
                cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
                cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
                cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
                cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
                cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
                cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
            elif key.startswith('Capacitor_') and key.endswith('.csv'):
                capName = ""
                capName = key
                newkey = capName.split(".")[0]
                cleanOut[newkey] = {}
                cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
                cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
                cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
                cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
                cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
                cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
                cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]
        # What percentage of our keys have lat lon data?
        latKeys = [
            tree[key]['latitude'] for key in tree if 'latitude' in tree[key]
        ]
        latPerc = 1.0 * len(latKeys) / len(tree)
        if latPerc < 0.25: doNeato = True
        else: doNeato = False
        # Generate the frames for the system voltage map time traveling chart.
        genTime = generateVoltChart(tree,
                                    rawOut,
                                    modelDir,
                                    neatoLayout=doNeato)
        cleanOut['genTime'] = genTime
        # Aggregate up the timestamps:
        if level == 'days':
            cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                               lambda x: x[0][0:10], 'days')
        elif level == 'months':
            cleanOut['timeStamps'] = aggSeries(stamps, stamps,
                                               lambda x: x[0][0:7], 'months')
        # Write the output.
        with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile:
            json.dump(cleanOut, outFile, indent=4)
        # Update the runTime in the input file.
        endTime = datetime.datetime.now()
        inputDict["runTime"] = str(
            datetime.timedelta(seconds=int((endTime -
                                            startTime).total_seconds())))
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
        # Clean up the PID file.
        os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
        print "DONE RUNNING", modelDir
    except Exception as e:
        print "MODEL CRASHED", e
        # Cancel to get rid of extra background processes.
        try:
            os.remove(pJoin(modelDir, 'PPID.txt'))
        except:
            pass
        thisErr = traceback.format_exc()
        inputDict['stderr'] = thisErr
        with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile:
            errorFile.write(thisErr)
        # Dump input with error included.
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
    finishTime = datetime.datetime.now()
    inputDict["runTime"] = str(
        datetime.timedelta(seconds=int((finishTime -
                                        beginTime).total_seconds())))
    with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
        json.dump(inputDict, inFile, indent=4)
    try:
        os.remove(pJoin(modelDir, "PPID.txt"))
    except:
        pass
Beispiel #3
0
def runForeground(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	# Check whether model exist or not
	if not os.path.isdir(modelDir):
		os.makedirs(modelDir)
		inputDict["created"] = str(datetime.datetime.now())	
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	feederList = []
	# Get prepare of data and clean workspace if re-run, If re-run remove all the data in the subfolders
	for dirs in os.listdir(modelDir):
		if os.path.isdir(pJoin(modelDir, dirs)):
			shutil.rmtree(pJoin(modelDir, dirs))
	# Get each feeder, prepare data in separate folders, and run there.
	for key in sorted(inputDict, key=inputDict.get):
		if key.startswith("feederName"):
			feederDir, feederName = inputDict[key].split("___")
			feederList.append(feederName)
			try:
				os.remove(pJoin(modelDir, feederName, "allOutputData.json"))
			except Exception, e:
				pass
			if not os.path.isdir(pJoin(modelDir, feederName)):
				os.makedirs(pJoin(modelDir, feederName)) # create subfolders for feeders
			shutil.copy(pJoin(__metaModel__._omfDir, "data", "Feeder", feederDir, feederName + ".json"),
				pJoin(modelDir, feederName, "feeder.json"))
			inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])			
			shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
				pJoin(modelDir, feederName, "climate.tmy2"))
			try:
				startTime = datetime.datetime.now()
				feederJson = json.load(open(pJoin(modelDir, feederName, "feeder.json")))
				tree = feederJson["tree"]
				# Set up GLM with correct time and recorders:
				feeder.attachRecorders(tree, "Regulator", "object", "regulator")
				feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
				feeder.attachRecorders(tree, "Inverter", "object", "inverter")
				feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
				feeder.attachRecorders(tree, "CollectorVoltage", None, None)
				feeder.attachRecorders(tree, "Climate", "object", "climate")
				feeder.attachRecorders(tree, "OverheadLosses", None, None)
				feeder.attachRecorders(tree, "UndergroundLosses", None, None)
				feeder.attachRecorders(tree, "TriplexLosses", None, None)
				feeder.attachRecorders(tree, "TransformerLosses", None, None)
				feeder.groupSwingKids(tree)
				feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
					simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
				# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
				rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
					keepFiles=True, workDir=pJoin(modelDir, feederName))
				cleanOut = {}
				# Std Err and Std Out
				cleanOut['stderr'] = rawOut['stderr']
				cleanOut['stdout'] = rawOut['stdout']
				# Time Stamps
				for key in rawOut:
					if '# timestamp' in rawOut[key]:
						cleanOut['timeStamps'] = rawOut[key]['# timestamp']
						break
					elif '# property.. timestamp' in rawOut[key]:
						cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
					else:
						cleanOut['timeStamps'] = []
				# Day/Month Aggregation Setup:
				stamps = cleanOut.get('timeStamps',[])
				level = inputDict.get('simLengthUnits','hours')
				# Climate
				for key in rawOut:
					if key.startswith('Climate_') and key.endswith('.csv'):
						cleanOut['climate'] = {}
						cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
						cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
						cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
						cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
						cleanOut['climate']['Direct Insolation (W/m^2)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				# Voltage Band
				if 'VoltageJiggle.csv' in rawOut:
					cleanOut['allMeterVoltages'] = {}
					cleanOut['allMeterVoltages']['Min'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
					cleanOut['allMeterVoltages']['Mean'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
					cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
					cleanOut['allMeterVoltages']['Max'] = hdmAgg([(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
				cleanOut['allMeterVoltages']['stdDevPos'] = [(x+y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
				cleanOut['allMeterVoltages']['stdDevNeg'] = [(x-y/2) for x,y in zip(cleanOut['allMeterVoltages']['Mean'], cleanOut['allMeterVoltages']['StdDev'])]
				# Total # of meters
				count = 0
				with open(pJoin(modelDir, feederName, "feeder.json")) as f:
					for line in f:
						if "\"objectType\": \"triplex_meter\"" in line:
							count+=1
				print "count=", count
				cleanOut['allMeterVoltages']['triplexMeterCount'] = float(count)
				# Power Consumption
				cleanOut['Consumption'] = {}
				# Set default value to be 0, avoiding missing value when computing Loads
				cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
				cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
				cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
				for key in rawOut:
					if key.startswith('SwingKids_') and key.endswith('.csv'):
						oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
						if 'Power' not in cleanOut['Consumption']:
							cleanOut['Consumption']['Power'] = oneSwingPower
						else:
							cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
					elif key.startswith('Inverter_') and key.endswith('.csv'):
						realA = rawOut[key]['power_A.real']
						realB = rawOut[key]['power_B.real']
						realC = rawOut[key]['power_C.real']
						imagA = rawOut[key]['power_A.imag']
						imagB = rawOut[key]['power_B.imag']
						imagC = rawOut[key]['power_C.imag']
						oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
						if 'DG' not in cleanOut['Consumption']:
							cleanOut['Consumption']['DG'] = oneDgPower
						else:
							cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
					elif key.startswith('Windmill_') and key.endswith('.csv'):
						vrA = rawOut[key]['voltage_A.real']
						vrB = rawOut[key]['voltage_B.real']
						vrC = rawOut[key]['voltage_C.real']
						viA = rawOut[key]['voltage_A.imag']
						viB = rawOut[key]['voltage_B.imag']
						viC = rawOut[key]['voltage_C.imag']
						crB = rawOut[key]['current_B.real']
						crA = rawOut[key]['current_A.real']
						crC = rawOut[key]['current_C.real']
						ciA = rawOut[key]['current_A.imag']
						ciB = rawOut[key]['current_B.imag']
						ciC = rawOut[key]['current_C.imag']
						powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
						powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
						powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
						# HACK: multiply by negative one because turbine power sign is opposite all other DG:
						oneDgPower = [-1.0 * x for x in hdmAgg(vecSum(powerA,powerB,powerC), avg, level)]
						if 'DG' not in cleanOut['Consumption']:
							cleanOut['Consumption']['DG'] = oneDgPower
						else:
							cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
					elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
						realA = rawOut[key]['sum(power_losses_A.real)']
						imagA = rawOut[key]['sum(power_losses_A.imag)']
						realB = rawOut[key]['sum(power_losses_B.real)']
						imagB = rawOut[key]['sum(power_losses_B.imag)']
						realC = rawOut[key]['sum(power_losses_C.real)']
						imagC = rawOut[key]['sum(power_losses_C.imag)']
						oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
						if 'Losses' not in cleanOut['Consumption']:
							cleanOut['Consumption']['Losses'] = oneLoss
						else:
							cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
				# Aggregate up the timestamps:
				if level=='days':
					cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
				elif level=='months':
					cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
				# Write the output.
				with open(pJoin(modelDir, feederName, "allOutputData.json"),"w") as outFile:
					json.dump(cleanOut, outFile, indent=4)
				# Update the runTime in the input file.
				endTime = datetime.datetime.now()
				inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
				with open(pJoin(modelDir, feederName, "allInputData.json"),"w") as inFile:
					json.dump(inputDict, inFile, indent=4)
				# Clean up the PID file.
				os.remove(pJoin(modelDir, feederName,"PID.txt"))
				print "DONE RUNNING GRIDLABMULTI", modelDir, feederName
			except Exception as e:
				print "MODEL CRASHED GRIDLABMULTI", e, modelDir, feederName
				cancel(pJoin(modelDir, feederName))
				with open(pJoin(modelDir, feederName, "stderr.txt"), "a+") as stderrFile:
					traceback.print_exc(file = stderrFile)
def heavyProcessing(modelDir, inputDict):
	''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. '''
	print "STARTING TO RUN", modelDir
	beginTime = datetime.datetime.now()
	# Get feeder name and data in.
	try: os.mkdir(pJoin(modelDir,'gldContainer'))
	except: pass
	try:	
		feederName = inputDict["feederName1"]
		inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"])
		shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"),
			pJoin(modelDir, "gldContainer", "climate.tmy2"))
		startTime = datetime.datetime.now()
		feederJson = json.load(open(pJoin(modelDir, feederName+'.omd')))
		tree = feederJson["tree"]
		# Set up GLM with correct time and recorders:
		feeder.attachRecorders(tree, "Regulator", "object", "regulator")
		feeder.attachRecorders(tree, "Capacitor", "object", "capacitor")
		feeder.attachRecorders(tree, "Inverter", "object", "inverter")
		feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg")
		feeder.attachRecorders(tree, "CollectorVoltage", None, None)
		feeder.attachRecorders(tree, "Climate", "object", "climate")
		feeder.attachRecorders(tree, "OverheadLosses", None, None)
		feeder.attachRecorders(tree, "UndergroundLosses", None, None)
		feeder.attachRecorders(tree, "TriplexLosses", None, None)
		feeder.attachRecorders(tree, "TransformerLosses", None, None)
		feeder.groupSwingKids(tree)
		# Attach recorders for system voltage map:
		stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'}
		for phase in ['A','B','C']:
			copyStub = dict(stub)
			copyStub['property'] = 'voltage_' + phase
			copyStub['file'] = phase.lower() + 'VoltDump.csv'
			tree[feeder.getMaxKey(tree) + 1] = copyStub
		feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]),
			simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"])
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!)
		rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], 
			keepFiles=True, workDir=pJoin(modelDir,'gldContainer'))
		cleanOut = {}
		# Std Err and Std Out
		cleanOut['stderr'] = rawOut['stderr']
		cleanOut['stdout'] = rawOut['stdout']
		# Time Stamps
		for key in rawOut:
			if '# timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# timestamp']
				break
			elif '# property.. timestamp' in rawOut[key]:
				cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp']
			else:
				cleanOut['timeStamps'] = []
		# Day/Month Aggregation Setup:
		stamps = cleanOut.get('timeStamps',[])
		level = inputDict.get('simLengthUnits','hours')
		# Climate
		for key in rawOut:
			if key.startswith('Climate_') and key.endswith('.csv'):
				cleanOut['climate'] = {}
				cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level)
				cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level)
				cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level)
				cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level)
				cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level)
				#cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level)	
				climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level)
				#converting W/sf to W/sm
				climateWbySMList= [x*10.76392 for x in climateWbySFList]
				cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList			
		# Voltage Band
		if 'VoltageJiggle.csv' in rawOut:
			cleanOut['allMeterVoltages'] = {}
			cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level)
			cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level)
			cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level)
		# Power Consumption
		cleanOut['Consumption'] = {}
		# Set default value to be 0, avoiding missing value when computing Loads
		cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"])
		cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"])
		for key in rawOut:
			if key.startswith('SwingKids_') and key.endswith('.csv'):
				oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level)
				if 'Power' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Power'] = oneSwingPower
				else:
					cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power'])
			elif key.startswith('Inverter_') and key.endswith('.csv'): 	
				realA = rawOut[key]['power_A.real']
				realB = rawOut[key]['power_B.real']
				realC = rawOut[key]['power_C.real']
				imagA = rawOut[key]['power_A.imag']
				imagB = rawOut[key]['power_B.imag']
				imagC = rawOut[key]['power_C.imag']
				oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key.startswith('Windmill_') and key.endswith('.csv'):
				vrA = rawOut[key]['voltage_A.real']
				vrB = rawOut[key]['voltage_B.real']
				vrC = rawOut[key]['voltage_C.real']
				viA = rawOut[key]['voltage_A.imag']
				viB = rawOut[key]['voltage_B.imag']
				viC = rawOut[key]['voltage_C.imag']
				crB = rawOut[key]['current_B.real']
				crA = rawOut[key]['current_A.real']
				crC = rawOut[key]['current_C.real']
				ciA = rawOut[key]['current_A.imag']
				ciB = rawOut[key]['current_B.imag']
				ciC = rawOut[key]['current_C.imag']
				powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA))
				powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB))
				powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC))
				oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level)
				if 'DG' not in cleanOut['Consumption']:
					cleanOut['Consumption']['DG'] = oneDgPower
				else:
					cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG'])
			elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']:
				realA = rawOut[key]['sum(power_losses_A.real)']
				imagA = rawOut[key]['sum(power_losses_A.imag)']
				realB = rawOut[key]['sum(power_losses_B.real)']
				imagB = rawOut[key]['sum(power_losses_B.imag)']
				realC = rawOut[key]['sum(power_losses_C.real)']
				imagC = rawOut[key]['sum(power_losses_C.imag)']
				oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level)
				if 'Losses' not in cleanOut['Consumption']:
					cleanOut['Consumption']['Losses'] = oneLoss
				else:
					cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses'])
			elif key.startswith('Regulator_') and key.endswith('.csv'):
				#split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10
				regName=""
				regName = key
				newkey=regName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A']
				cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B']
				cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C']
				cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0]
			elif key.startswith('Capacitor_') and key.endswith('.csv'):
				capName=""
				capName = key
				newkey=capName.split(".")[0]
				cleanOut[newkey] ={}
				cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"])
				cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA']
				cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB']
				cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC']
				cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0]
		# What percentage of our keys have lat lon data?
		latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]]
		latPerc = 1.0*len(latKeys)/len(tree)
		if latPerc < 0.25: doNeato = True
		else: doNeato = False
		# Generate the frames for the system voltage map time traveling chart.
		genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato)
		cleanOut['genTime'] = genTime
		# Aggregate up the timestamps:
		if level=='days':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days')
		elif level=='months':
			cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months')
		# Write the output.
		with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile:
			json.dump(cleanOut, outFile, indent=4)
		# Update the runTime in the input file.
		endTime = datetime.datetime.now()
		inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds())))
		with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
		# Clean up the PID file.
		os.remove(pJoin(modelDir, "gldContainer", "PID.txt"))
		print "DONE RUNNING", modelDir
	except Exception as e:
		# If input range wasn't valid delete output, write error to disk.
		cancel(modelDir)	
		thisErr = traceback.format_exc()
		print 'ERROR IN MODEL', modelDir, thisErr
		inputDict['stderr'] = thisErr
		with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile:
			errorFile.write(thisErr)
		with open(pJoin(modelDir,"allInputData.json"),"w") as inFile:
			json.dump(inputDict, inFile, indent=4)
	finishTime = datetime.datetime.now()
	inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds())))
	with open(pJoin(modelDir, "allInputData.json"),"w") as inFile:
		json.dump(inputDict, inFile, indent = 4)
	try:
		os.remove(pJoin(modelDir,"PPID.txt"))
	except:
		pass