def addScaledRandomHouses(inFeed): ''' Take a feeder, translate each triplex_node under a meter in to a scaled, semi-randomized house object. ''' houseArchetypes = _get_house_archetypes() childrenPath = os.path.join(omf.omfDir, 'static', 'testFiles', 'houseChildren.glm') childrenArchetypes = feeder.parse(childrenPath) tripNodeKeys = _get_by_key_val(inFeed, 'object', 'triplex_node', getAll=True) tripLoadKeys = [k for k in tripNodeKeys if 'parent' in inFeed[k]] maxKey = feeder.getMaxKey(inFeed) + 1 inFeed[maxKey] = {'omftype': 'module', 'argument': 'residential'} maxKey += 1 inFeed[maxKey] = {'omftype': '#include','argument': '\"schedulesResponsiveLoads.glm\"'} maxKey += 1 for tripKey in tripLoadKeys: tMeter = inFeed[_get_by_key_val(inFeed, 'name', inFeed[tripKey]['parent'])] tPower = complex(inFeed[tripKey]['power_12']).real newHouse = dict(random.choice(list(houseArchetypes.values()))) newHouse['name'] += '_' + str(tripKey) newHouse['parent'] = tMeter['name'] newHouse['schedule_skew'] = str(random.gauss(2000,500)) newHouse['floor_area'] = str(500.0 + 0.50*tPower) # Add 500 because very small floor_areas break GLD. newHouse['latitude'] = tMeter.get('latitude','0.0') newHouse['longitude'] = tMeter.get('longitude','0.0') inFeed[maxKey] = newHouse maxKey += 1 for childKey in childrenArchetypes: newChild = dict(childrenArchetypes[childKey]) newChild['name'] += '_' + str(tripKey) + '_' + str(childKey) newChild['parent'] = newHouse['name'] newChild['latitude'] = tMeter.get('latitude','0.0') newChild['longitude'] = tMeter.get('longitude','0.0') newChild['schedule_skew'] = str(random.gauss(8000,1000)) inFeed[maxKey] = newChild maxKey += 1 del inFeed[tripKey]
def createTreeWithFault(tree, faultType, faultLocation, startTime, stopTime): treeCopy = copy.deepcopy(tree) treeCopy[feeder.getMaxKey(treeCopy) + 1] = { 'module': 'reliability ', 'maximum_event_length': '300 s', 'report_event_log': 'TRUE' } faultType = '"' + faultType + '"' outageParams = '"'+faultLocation+','+startTime.replace('\'','') + \ ','+stopTime.replace('\'','')+'"' treeCopy[feeder.getMaxKey(treeCopy) + 1] = { 'object': 'eventgen', 'name': 'ManualEventGen', 'parent': 'RelMetrics', 'fault_type': faultType, 'manual_outages': str(outageParams) } treeCopy[feeder.getMaxKey(treeCopy) + 1] = { 'object': 'fault_check ', 'name': 'test_fault', 'check_mode': 'ONCHANGE', 'eventgen_object': 'ManualEventGen', 'output_filename': 'Fault_check_out.txt' } treeCopy[feeder.getMaxKey(treeCopy) + 1] = { 'object': 'metrics', 'name': 'RelMetrics', 'report_file': 'Metrics_Output.csv', 'module_metrics_object': 'PwrMetrics', 'metrics_of_interest': '"SAIFI,SAIDI,CAIDI,ASAI,MAIFI"', 'customer_group': '"class=meter"', 'metric_interval': '5 h', 'report_interval': '5 h' } treeCopy[feeder.getMaxKey(treeCopy) + 1] = { 'object': 'power_metrics', 'name': 'PwrMetrics', 'base_time_value': '1 h' } return treeCopy
def calibrate_omd(start_date, omd_path, csv_path): """ Modify an .omd file so that it will run in GridLAB-D with the CSV of USCRN weather data. :param start_date: the starting date of the GridLAB-D simulation :type start_date: datetime :param omd_path: an absolute path to the .omd file to modify :type omd_path: str :param csv_path: an absolute path to the CSV file that contains USCRN weather data :type csv_path: str """ with open(omd_path, 'r') as f: omd = json.load(f) tree = omd["tree"] # Delete all climate objects from the feeder. Also delete any csv_reader objects that are also named "WeatherReader" weather_reader_name = "WeatherReader" for key in tree.keys(): object_type = tree[key].get("object") object_name = tree[key].get("name") if object_type == "climate" or (object_type == "csv_reader" and object_name == weather_reader_name): del tree[key] # Reinsert a new climate object and an associated csv_reader object oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] = {'omftype': 'module', 'argument': 'tape'} tree[oldMax + 2] = {'omftype': 'module', 'argument': 'climate'} csv_name = os.path.basename(csv_path) tree[oldMax + 3] = { 'object': 'csv_reader', 'name': weather_reader_name, 'filename': csv_name } climate_name = "MyClimate" tree[oldMax + 4] = { 'object': 'climate', 'name': climate_name, 'reader': weather_reader_name, 'tmyfile': csv_name } # Set the time correctly. Modify certain objects in the feeder (e.g. recorder and clock) feeder.adjustTime( tree, 240, 'hours', '{}-{}-{}'.format(start_date.year, start_date.month, start_date.day)) omd["tree"] = tree # Add the weather attachment with open(csv_path, 'r') as f: weatherString = f.read() if omd.get("attachments") is None: omd["attachments"] = {} omd['attachments'][csv_name] = weatherString with open(omd_path, 'w') as f: json.dump(omd, f, indent=4)
def addRandomSolar(feed, item, count): '''Adds a solar inverter and panel set, assgined to A, B, or C phase randomly''' phase_list = ['A','B','C'] maxKey = feeder.getMaxKey(feed) feed[maxKey + 1] = { 'object': 'inverter', 'name': 'new_solar_' + str(count), 'parent': feed[item]['name'], 'phases': rand.choice(phase_list) +'S', 'inverter_type': 'PWM', 'power_factor': '1.0', 'generator_status': 'ONLINE', 'generator_mode': 'CONSTANT_PF' } feed[maxKey + 2] = { 'object': 'solar', 'name': 'solar_' + str(count), 'parent': 'new_solar_' + str(count), 'area': '1000 sf', 'generator_status': 'ONLINE', 'efficiency': '0.2', 'generator_mode': 'SUPPLY_DRIVEN', 'panel_type': 'SINGLE_CRYSTAL_SILICON' }
def addRandomSolar(feed, item, count): '''Adds a solar inverter and panel set, assgined to A, B, or C phase randomly''' phase_list = ['A', 'B', 'C'] maxKey = feeder.getMaxKey(feed) feed[maxKey + 1] = { 'object': 'inverter', 'name': 'new_solar_' + str(count), 'parent': feed[item]['name'], 'phases': rand.choice(phase_list) + 'S', 'inverter_type': 'PWM', 'power_factor': '1.0', 'generator_status': 'ONLINE', 'generator_mode': 'CONSTANT_PF' } feed[maxKey + 2] = { 'object': 'solar', 'name': 'solar_' + str(count), 'parent': 'new_solar_' + str(count), 'area': '1000 sf', 'generator_status': 'ONLINE', 'efficiency': '0.2', 'generator_mode': 'SUPPLY_DRIVEN', 'panel_type': 'SINGLE_CRYSTAL_SILICON' }
def work(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' # feederName = inputDict["feederName1"] feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict["feederName1"] = feederName inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "climate.tmy2")) feederJson = json.load(open(pJoin(modelDir, feederName + '.omd'))) tree = feederJson["tree"] # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach recorders for system voltage map, triplex: stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'nVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach current recorder for overhead_lines currentStub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600} for phase in ['A','B','C']: copyCurrentStub = dict(currentStub) copyCurrentStub['property'] = 'current_out_' + phase copyCurrentStub['file'] = 'OH_line_current_phase' + phase + '.csv' tree[feeder.getMaxKey(tree) + 1] = copyCurrentStub rating_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600} copyRatingStub = dict(rating_stub) copyRatingStub['property'] = 'continuous_rating' copyRatingStub['file'] = 'OH_line_cont_rating.csv' tree[feeder.getMaxKey(tree) + 1] = copyRatingStub flow_stub = {'object':'group_recorder', 'group':'"class=overhead_line"', 'interval':3600} copyFlowStub = dict(flow_stub) copyFlowStub['property'] = 'flow_direction' copyFlowStub['file'] = 'OH_line_flow_direc.csv' tree[feeder.getMaxKey(tree) + 1] = copyFlowStub # Attach current recorder for underground_lines currentStubOH = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600} for phase in ['A','B','C']: copyCurrentStubOH = dict(currentStubOH) copyCurrentStubOH['property'] = 'current_out_' + phase copyCurrentStubOH['file'] = 'UG_line_current_phase' + phase + '.csv' tree[feeder.getMaxKey(tree) + 1] = copyCurrentStubOH ug_rating_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600} copyUGRatingStub = dict(ug_rating_stub) copyUGRatingStub['property'] = 'continuous_rating' copyUGRatingStub['file'] = 'UG_line_cont_rating.csv' tree[feeder.getMaxKey(tree) + 1] = copyUGRatingStub ug_flow_stub = {'object':'group_recorder', 'group':'"class=underground_line"', 'interval':3600} ugCopyFlowStub = dict(ug_flow_stub) ugCopyFlowStub['property'] = 'flow_direction' ugCopyFlowStub['file'] = 'UG_line_flow_direc.csv' tree[feeder.getMaxKey(tree) + 1] = ugCopyFlowStub # And get meters for system voltage map: stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'mVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub for key in tree: if 'bustype' in tree[key].keys(): if tree[key]['bustype'] == 'SWING': tree[key]['object'] = 'meter' swingN = tree[key]['name'] swingRecord = {'object':'recorder', 'property':'voltage_A,measured_real_power,measured_power','file':'subVoltsA.csv','parent':swingN, 'interval':60} tree[feeder.getMaxKey(tree) + 1] = swingRecord for key in tree: if 'omftype' in tree[key].keys() and tree[key]['argument']=='minimum_timestep=3600': tree[key]['argument'] = 'minimum_timestep=60' # If there is a varvolt object in the tree, add recorder to swingbus and node from voltage_measurements property # Find var_volt object downLineNode = 'None' for key in tree: if 'object' in tree[key].keys() and tree[key]['object']=='volt_var_control': downLineNode = tree[key]['voltage_measurements'] if downLineNode != 'None': downNodeRecord = {'object':'recorder', 'property':'voltage_A','file':'firstDownlineVoltsA.csv','parent':downLineNode, 'interval':60} tree[feeder.getMaxKey(tree) + 1] = downNodeRecord # Violation recorder to display to users # violationRecorder = {'object':'violation_recorder','node_continuous_voltage_limit_lower':0.95,'file':'Violation_Log.csv', # 'secondary_dist_voltage_rise_lower_limit':-0.042,'substation_pf_lower_limit':0.85,'substation_breaker_C_limit':300, # 'secondary_dist_voltage_rise_upper_limit':0.025,'substation_breaker_B_limit':300,'violation_flag':'ALLVIOLATIONS', # 'node_instantaneous_voltage_limit_upper':1.1, 'inverter_v_chng_per_interval_lower_bound':-0.05, 'virtual_substation':swingN, # 'substation_breaker_A_limit':300, 'xfrmr_thermal_limit_lower':0,'node_continuous_voltage_interval':300,'strict':'false', # 'node_instantaneous_voltage_limit_lower':0,'line_thermal_limit_upper':1,'echo':'false','node_continuous_voltage_limit_upper':1.05, # 'interval':30,'line_thermal_limit_lower':0,'summary':'Violation_Summary.csv','inverter_v_chng_interval':60, # 'xfrmr_thermal_limit_upper':2,'inverter_v_chng_per_interval_upper_bound':0.050} # tree[feeder.getMaxKey(tree) + 1] = violationRecorder feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir)) # voltDumps have no values when gridlabD fails or the files dont exist if not os.path.isfile(pJoin(modelDir,'aVoltDump.csv')): with open (pJoin(modelDir,'stderr.txt')) as inFile: stdErrText = inFile.read() message = 'GridLAB-D crashed. Error log:\n' + stdErrText raise Exception(message) elif len(rawOut['aVoltDump.csv']['# timestamp']) == 0: with open (pJoin(modelDir,'stderr.txt')) as inFile: stdErrText = inFile.read() message = 'GridLAB-D crashed. Error log:\n' + stdErrText raise Exception(message) outData = {} # Std Err and Std Out outData['stderr'] = rawOut['stderr'] outData['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# property.. timestamp'] else: outData['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = outData.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): outData['climate'] = {} outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] outData['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: outData['allMeterVoltages'] = {} outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption outData['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads outData['Consumption']['Power'] = [0] * int(inputDict["simLength"]) outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) outData['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in outData['Consumption']: outData['Consumption']['Power'] = oneSwingPower else: outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in outData['Consumption']: outData['Consumption']['Losses'] = oneLoss else: outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] outData[newkey] ={} outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapA'] = rawOut[key]['tap_A'] outData[newkey]['RegTapB'] = rawOut[key]['tap_B'] outData[newkey]['RegTapC'] = rawOut[key]['tap_C'] outData[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] outData[newkey] ={} outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1A'] = rawOut[key]['switchA'] outData[newkey]['Cap1B'] = rawOut[key]['switchB'] outData[newkey]['Cap1C'] = rawOut[key]['switchC'] outData[newkey]['CapPhases'] = rawOut[key]['phases'][0] # Capture voltages at the swingbus # Loop through voltDump for swingbus voltages subData = [] downData = [] with open(pJoin(modelDir,"subVoltsA.csv")) as subFile: reader = csv.reader(subFile) subData = [x for x in reader] if downLineNode != 'None': with open(pJoin(modelDir,"firstDownlineVoltsA.csv")) as downFile: reader = csv.reader(downFile) downData = [x for x in reader] FIRST_DATA_ROW = 9 cleanDown = [stringToMag(x[1]) for x in downData[FIRST_DATA_ROW:-1]] swingTimestamps = [x[0] for x in subData[FIRST_DATA_ROW:-1]] cleanSub = [stringToMag(x[1]) for x in subData[FIRST_DATA_ROW:-1]] # real_power / power powerFactors = [] for row in subData[FIRST_DATA_ROW:-1]: powerFactors.append(abs(float(row[2])/stringToMag(row[3]))) outData['powerFactors'] = powerFactors outData['swingVoltage'] = cleanSub outData['downlineNodeVolts'] = cleanDown outData['swingTimestamps'] = swingTimestamps # If there is a var volt system, find the min and max voltage for a band minVoltBand = [] maxVoltBand = [] if downLineNode != 'None': for key in tree: objKeys = tree[key].keys() if 'object' in objKeys: if tree[key]['object']=='volt_var_control': minVoltBand.append(float(tree[key]['minimum_voltages'])) maxVoltBand.append(float(tree[key]['maximum_voltages'])) outData['minVoltBand'] = minVoltBand outData['maxVoltBand'] = maxVoltBand # Violation Summary and Log # violationData = '' # violationArray = [] # with open(pJoin(modelDir,"Violation_Summary.csv")) as vioSum: # reader = csv.reader(vioSum) # for row in reader: # violationArray.append(row) # for row in violationArray[4:]: # violationData += str(' '.join(row)) + "\n" # outData["violationSummary"] = violationData # violationLogArray = [] # violationLog = '' # with open(pJoin(modelDir,"Violation_Log.csv")) as vioLog: # logger = csv.reader(vioLog) # for row in logger: # violationLogArray.append(row) # for row in violationLogArray[6:]: # violationLog += str(' '.join(row)) + "\n" # outData['violationLog'] = violationLog # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime, mapTimestamp = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) outData['genTime'] = genTime outData['mapTimestamp'] = mapTimestamp # Aggregate up the timestamps: if level=='days': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') return outData
def attachVolts(workDir, feederPath, voltVectorA, voltVectorB, voltVectorC, simStartDate, simLength, simLengthUnits): '''read voltage vectors of 3 different phases, run gridlabd, and attach output to the feeder.''' try: timeStamp = [simStartDate['Date']] for x in range (1, 8760): timeStamp.append(timeStamp[x-1] + dt.timedelta(hours=1)) firstDateTime = timeStamp[1] with open(pJoin(pJoin(workDir,"gridlabD"),"phaseAVoltage.player"),"w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f"%float(voltVectorA[x]))+"+0j" line = timestamp.strftime("%Y-%m-%d %H:%M:%S") + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir,"gridlabD"),"phaseBVoltage.player"),"w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f"%float(voltVectorB[x]))+"-"+str("%0.4f"%float(random.uniform(6449,6460)))+"j" line = timestamp.strftime("%Y-%m-%d %H:%M:%S") + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir,"gridlabD"),"phaseCVoltage.player"),"w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f"%float(voltVectorC[x]))+"+"+str("%0.4f"%float(random.uniform(6449,6460)))+"j" line = timestamp.strftime("%Y-%m-%d %H:%M:%S") + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) tree = feederJson.get("tree", {}) # Find swingNode name. for key in tree: if tree[key].get('bustype','').lower() == 'swing': swingName = tree[key].get('name') # Attach player. classOb = {'omftype':'class player','argument':'{double value;}'} voltageObA = {"object":"player", "property":"voltage_A", "file":"phaseAVoltage.player", "loop":"0", "parent":swingName} voltageObB = {"object":"player", "property":"voltage_B", "file":"phaseBVoltage.player", "loop":"0", "parent":swingName} voltageObC = {"object":"player", "property":"voltage_C", "file":"phaseCVoltage.player", "loop":"0", "parent":swingName} maxKey = feeder.getMaxKey(tree) voltplayerKeyA = maxKey + 2 voltplayerKeyB = maxKey + 3 voltplayerKeyC = maxKey + 4 tree[maxKey+1] = classOb tree[voltplayerKeyA] = voltageObA tree[voltplayerKeyB] = voltageObB tree[voltplayerKeyC] = voltageObC # Adjust time and run output. feeder.adjustTime(tree, simLength, simLengthUnits, firstDateTime.strftime("%Y-%m-%d %H:%M:%S")) output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir,"gridlabD")) # Write the output. with open(pJoin(workDir,"calibratedFeeder.omd"),"w") as outJson: playerStringA = open(pJoin(pJoin(workDir,"gridlabD"),"phaseAVoltage.player")).read() playerStringB = open(pJoin(pJoin(workDir,"gridlabD"),"phaseBVoltage.player")).read() playerStringC = open(pJoin(pJoin(workDir,"gridlabD"),"phaseCVoltage.player")).read() feederJson["attachments"]["phaseAVoltage.player"] = playerStringA feederJson["attachments"]["phaseBVoltage.player"] = playerStringB feederJson["attachments"]["phaseCVoltage.player"] = playerStringC feederJson["tree"] = tree json.dump(feederJson, outJson, indent=4) return pJoin(workDir,"calibratedFeeder.omd"), True except: print "Failed to run gridlabD with voltage players." return "", False
def omfCalibrate(workDir, feederPath, scadaPath, simStartDate, simLength, simLengthUnits, solver="FBS", calibrateError=(0.05,5), trim=5): '''calibrates a feeder and saves the calibrated tree at a location. Note: feeders with cap banks should be calibrated with cap banks OPEN. We have seen cap banks throw off calibration.''' with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) tree = feederJson.get("tree", {}) simLength = simLength + trim # Process scada data. scadaSubPower = _processScadaData(pJoin(workDir,"gridlabD"),scadaPath, simStartDate, simLengthUnits) # Load specified solver. for key in tree: if tree[key].get("module","").lower() == "powerflow": tree[key] = {"module":"powerflow","solver_method":solver} # Attach player. classOb = {'omftype':'class player','argument':'{double value;}'} playerOb = {"object":"player", "property":"value", "name":"scadaLoads", "file":"subScada.player", "loop":"0"} maxKey = feeder.getMaxKey(tree) playerKey = maxKey + 2 tree[maxKey+1] = classOb tree[playerKey] = playerOb # Make loads reference player. loadTemplate = {"object": "triplex_load", "power_pf_12": "0.95", "impedance_pf_12": "0.98", "power_pf_12": "0.90", "impedance_fraction_12": "0.7", "power_fraction_12": "0.3"} loadTemplateR = {"object": "load", "impedance_pf_A": "0.98", "impedance_pf_B": "0.98", "impedance_pf_C": "0.98", "power_pf_A": "0.90", "power_pf_B": "0.90", "power_pf_C": "0.90", "impedance_fraction_A": "0.7", "impedance_fraction_B": "0.7", "impedance_fraction_C": "0.7", "power_fraction_A": "0.3", "power_fraction_B": "0.3", "power_fraction_C": "0.3"} for key in tree: ob = tree[key] if ob.get("object","") in ("triplex_node", "triplex_load") and (ob.get("power_12") or ob.get("base_power_12")): # Add to triplex_nodes. newOb = dict(loadTemplate) newOb["name"] = ob.get("name", "") newOb["parent"] = ob.get("parent", "") newOb["phases"] = ob.get("phases", "") newOb["nominal_voltage"] = ob.get("nominal_voltage","") newOb["latitude"] = ob.get("latitude","0") newOb["longitude"] = ob.get("longitude","0") oldPow = ob.get("power_12","").replace("j","d") if not oldPow: oldPow = ob.get("base_power_12") if "scadaloads.value*" in oldPow: oldPow = oldPow[17:] pythagPower = gridlabd._strClean(oldPow) newOb["base_power_12"] = "scadaLoads.value*" + str(pythagPower) tree[key] = newOb elif ob.get("object","") == "load": # Add to residential_loads too. newOb = dict(loadTemplateR) newOb["name"] = ob.get("name", "") newOb["parent"] = ob.get("parent", "") newOb["phases"] = ob.get("phases", "") newOb["load_class"] = ob.get("load_class", "") newOb["nominal_voltage"] = ob.get("nominal_voltage","") newOb["latitude"] = ob.get("latitude","0") newOb["longitude"] = ob.get("longitude","0") try: oldPow = ob.get("constant_power_A","").replace("j","d") pythagPower = gridlabd._strClean(oldPow) newOb["base_power_A"] = "scadaLoads.value*" + str(pythagPower) except: pass try: oldPow = ob.get("constant_power_B","").replace("j","d") pythagPower = gridlabd._strClean(oldPow) newOb["base_power_B"] = "scadaLoads.value*" + str(pythagPower) except: pass try: oldPow = ob.get("constant_power_C","").replace("j","d") pythagPower = gridlabd._strClean(oldPow) newOb["base_power_C"] = "scadaLoads.value*" + str(pythagPower) except: pass tree[key] = newOb # Convert swing bus to a meter. for key in tree: if tree[key].get('bustype','').lower() == 'swing' and tree[key].get('object','') != 'meter': swingName = tree[key].get('name') regIndex = key tree[key]['object'] = 'meter' # Search for the substation meter and attach a recorder there. for key in tree: if tree[key].get('bustype','').lower() == 'swing': swingName = tree[key].get('name') recOb = {"object": "recorder", "parent": swingName, "property": "measured_real_power,measured_reactive_power,measured_power", "file": "caliSub.csv", "interval": "3600"} outputRecorderKey = maxKey + 3 tree[outputRecorderKey] = recOb feeder.adjustTime(tree, simLength, simLengthUnits, simStartDate['Date'].strftime("%Y-%m-%d %H:%M:%S")) # Run Gridlabd, calculate scaling constant. def runPowerflowIter(tree,scadaSubPower): '''Runs powerflow once, then iterates.''' # Run initial powerflow to get power. print "Starting calibration." print "Goal of calibration: Error: %s, Iterations: <%s, trim: %s"%(calibrateError[0], calibrateError[1], trim) output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir,"gridlabD")) outRealPow = output["caliSub.csv"]["measured_real_power"][trim:simLength] outImagPower = output["caliSub.csv"]["measured_reactive_power"][trim:simLength] outAppPowerKw = [(x[0]**2 + x[1]**2)**0.5/1000 for x in zip(outRealPow, outImagPower)] lastFile = "subScada.player" nextFile = "subScadaCalibrated.player" nextPower = outAppPowerKw error = (sum(outRealPow)/1000-sum(scadaSubPower))/sum(scadaSubPower) iteration = 1 print "First error:", error while abs(error)>calibrateError[0] and iteration<calibrateError[1]: # Run calibration and iterate up to 5 times. SCAL_CONST = sum(scadaSubPower)/sum(nextPower) print "Calibrating & running again... Error: %s, Iteration: %s, SCAL_CONST: %s"%(str(round(abs(error*100),6)), str(iteration), round(SCAL_CONST,6)) newPlayData = [] with open(pJoin(pJoin(workDir,"gridlabD"), lastFile), "r") as playerFile: for line in playerFile: (key,val) = line.split(',') newPlayData.append(str(key) + ',' + str(float(val)*SCAL_CONST) + "\n") with open(pJoin(pJoin(workDir,"gridlabD"), nextFile), "w") as playerFile: for row in newPlayData: playerFile.write(row) tree[playerKey]["file"] = nextFile tree[outputRecorderKey]["file"] = "caliSubCheck.csv" nextOutput = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir,"gridlabD")) outRealPowIter = nextOutput["caliSubCheck.csv"]["measured_real_power"][trim:simLength] outImagPowerIter = nextOutput["caliSubCheck.csv"]["measured_reactive_power"][trim:simLength] nextAppKw = [(x[0]**2 + x[1]**2)**0.5/1000 for x in zip(outRealPowIter, outImagPowerIter)] lastFile = nextFile nextFile = "subScadaCalibrated"+str(iteration)+".player" nextPower = nextAppKw # Compute error and iterate. error = (sum(outRealPowIter)/1000-sum(scadaSubPower))/sum(scadaSubPower) iteration+=1 else: if iteration==1: outRealPowIter = outRealPow SCAL_CONST = 1.0 print "Calibration done: Error: %s, Iteration: %s, SCAL_CONST: %s"%(str(round(abs(error*100),2)), str(iteration), round(SCAL_CONST,2)) return outRealPow, outRealPowIter, lastFile, iteration outRealPow, outRealPowIter, lastFile, iteration = runPowerflowIter(tree,scadaSubPower[trim:simLength]) caliPowVectors = [[float(element) for element in scadaSubPower[trim:simLength]], [float(element)/1000 for element in outRealPow], [float(element)/1000 for element in outRealPowIter]] labels = ["scadaSubPower","initialGuess","finalGuess"] colors = ['red','lightblue','blue'] chartData = {"Title":"Substation Calibration Check (Iterated "+str(iteration+1)+"X)", "fileName":"caliCheckPlot", "colors":colors,"labels":labels, "timeZone":simStartDate['timeZone']} # Trimming vectors to make them all the same length as the smallest vector minCaliPowVecLen = min(len(caliPowVectors[0]), len(caliPowVectors[1]), len(caliPowVectors[2])) caliPowVectors[0] = caliPowVectors[0][:minCaliPowVecLen] caliPowVectors[1] = caliPowVectors[1][:minCaliPowVecLen] caliPowVectors[2] = caliPowVectors[2][:minCaliPowVecLen] print "Len:", len(caliPowVectors[0]), len(caliPowVectors[1]), len(caliPowVectors[2]) plotLine(workDir, caliPowVectors, chartData, simStartDate['Date']+dt.timedelta(hours=trim), simLengthUnits) # Write the final output. with open(pJoin(workDir,"calibratedFeeder.omd"),"w") as outJson: playerString = open(pJoin(pJoin(workDir,"gridlabD"),lastFile)).read() feederJson["attachments"][lastFile] = playerString feederJson["tree"] = tree json.dump(feederJson, outJson, indent=4) return
def heavyProcessing(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() # Get feeder name and data in. try: os.mkdir(pJoin(modelDir,'gldContainer')) except: pass try: feederName = inputDict["feederName1"] weather = inputDict["weather"] if weather == "typical": inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "gldContainer", "climate.tmy2")) startTime = datetime.datetime.now() else: #hack for testing makeClimateCsv('2010-07-01', '2010-08-01', 'DFW', 'Output/Automated dsoSimSuite Test/gldContainer/weather.csv') startTime = datetime.datetime.now() startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] #add a check to see if there is already a climate object in the omd file #if there is delete the climate from attachments and the climate object attachKeys = feederJson["attachments"].keys() for key in attachKeys: if key.endswith('.tmy2'): del feederJson['attachments'][key] treeKeys = feederJson["tree"].keys() for key in treeKeys: if 'object' in feederJson['tree'][key]: if feederJson['tree'][key]['object'] == 'climate': del feederJson['tree'][key] #add weather objects and modules to .glm if there is no climate file in the omd file if weather == "historical": oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] = {'omftype':'module', 'argument':'tape'} tree[oldMax + 2] = {'omftype':'module', 'argument':'climate'} tree[oldMax + 3] = {'object':'csv_reader', 'name':'weatherReader', 'filename':'weather.csv'} tree[oldMax + 4] = {'object':'climate', 'name':'exampleClimate', 'tmyfile':'weather.csv', 'reader':'weatherReader'} else: oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir,'gldContainer')) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A'] cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B'] cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C'] cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA'] cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB'] cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC'] cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) cleanOut['genTime'] = genTime # Aggregate up the timestamps: if level=='days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, "gldContainer", "PID.txt")) print "DONE RUNNING", modelDir except Exception as e: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print 'ERROR IN MODEL', modelDir, thisErr inputDict['stderr'] = thisErr with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir,"allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) finishTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent = 4) try: os.remove(pJoin(modelDir,"PPID.txt")) except: pass
import omf.feeder as feeder from omf.solvers.gridlabd import runInFilesystem feed = feeder.parse('GC-12.47-1.glm') maxKey = feeder.getMaxKey(feed) print (feed[1]) feed[maxKey + 1] = { 'object': 'node', 'name': 'test_solar_node', 'phases': 'ABCN', 'nominal_voltage': '7200' } feed[maxKey + 2] = { 'object': 'underground_line', 'name': 'test_solar_line', 'phases': 'ABCN', 'from': 'test_solar_node', 'to': 'GC-12-47-1_node_26', 'length': '100', 'configuration': 'line_configuration:6' } feed[maxKey + 3] = { 'object': 'meter', 'name': 'test_solar_meter', 'parent': 'test_solar_node', 'phases': 'ABCN', 'nominal_voltage': '480' } feed[maxKey + 4] = { 'object': 'inverter', 'name': 'test_solar_inverter', 'parent': 'test_solar_meter', 'phases': 'AS', 'inverter_type': 'PWM', 'power_factor': '1.0', 'generator_status': 'ONLINE', 'generator_mode': 'CONSTANT_PF' } feed[maxKey + 5] = { 'object': 'solar', 'name': 'test_solar', 'parent': 'test_solar_inverter', 'area': '1000000 sf', 'generator_status': 'ONLINE', 'efficiency': '0.2', 'generator_mode': 'SUPPLY_DRIVEN', 'panel_type': 'SINGLE_CRYSTAL_SILICON' }
def heavyProcessing(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() # Get feeder name and data in. try: os.mkdir(pJoin(modelDir,'gldContainer')) except: pass try: feederName = inputDict["feederName1"] inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "gldContainer", "climate.tmy2")) startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] #add a check to see if there is already a climate object in the omd file #if there is delete the climate from attachments and the climate object attachKeys = feederJson["attachments"].keys() for key in attachKeys: if key.endswith('.tmy2'): del feederJson['attachments'][key] treeKeys = feederJson["tree"].keys() for key in treeKeys: if 'object' in feederJson['tree'][key]: if feederJson['tree'][key]['object'] == 'climate': del feederJson['tree'][key] oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] = {'omftype':'module', 'argument':'climate'} tree[oldMax + 2] ={'object':'climate','name':'Climate','interpolate':'QUADRATIC', 'tmyfile':'climate.tmy2'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'property':'voltage_A', 'interval':3600, 'file':'aVoltDump.csv'} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir,'gldContainer')) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A'] cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B'] cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C'] cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA'] cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB'] cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC'] cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) cleanOut['genTime'] = genTime # Aggregate up the timestamps: if level=='days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, "gldContainer", "PID.txt")) print "DONE RUNNING", modelDir except Exception as e: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print 'ERROR IN MODEL', modelDir, thisErr inputDict['stderr'] = thisErr with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir,"allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) finishTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent = 4) try: os.remove(pJoin(modelDir,"PPID.txt")) except: pass
def writeNewGlmAndPlayers(omdPath, amiPath, outputDir): ''' Take a glm and an AMI data set, and create a new GLM and set of players that combine them. ''' # Pull in the main data objects. with open(omdPath, 'r') as jsonFile: omdObj = json.load(jsonFile) omdName = basename(omdPath) feederObj = omdObj['tree'] amiData = amiImport(amiPath) # Make the output directory. if not os.path.isdir(outputDir): os.mkdir(outputDir) # Attach the player class to feeder if needed. omfTypes = set([feederObj[k].get('omftype', '') for k in feederObj]) if 'class player' not in omfTypes: newKey = feeder.getMaxKey(feederObj) feederObj[newKey + 1] = { 'omftype': 'class player', 'argument': '{double value;}' } # All meter names we have in the AMI data set. meterNames = set([x.get('meterName', '') for x in amiData]) # Attach all the players. for key in list(feederObj.keys()): objName = feederObj[key].get('name', '') dataPhases = set([ x.get('phase', '') for x in amiData if x.get('meterName', '') == objName ]) # Handle primary system loads. if feederObj[key].get('object', '') == 'load' and objName in meterNames: for phase in dataPhases: # Write the player file: createPlayerFile( amiData, objName, phase, outputDir + '/player_' + objName + '_' + phase + '.csv') # Put the object in the GLM: newKey = feeder.getMaxKey(feederObj) feederObj[newKey + 1] = { 'object': 'player', 'property': 'constant_power_' + phase, 'file': 'player_' + objName + '_' + phase + '.csv', 'parent': objName } # Handle secondary system loads. elif feederObj[key].get( 'object', '') == 'triplex_node' and objName in meterNames: # Write the player file: createPlayerFile(amiData, objName, 'S', outputDir + '/player_' + objName + '_S.csv') # Put the object in the GLM: newKey = feeder.getMaxKey(feederObj) feederObj[newKey + 1] = { 'object': 'player', 'property': 'power_12', 'file': 'player_' + objName + '_S.csv', 'parent': objName } # Write the GLM. with open(outputDir + '/out.glm', 'w') as outGlmFile: outString = feeder.sortedWrite(feederObj) outGlmFile.write(outString) #TODO: update omdObj tree object to match feederObj, and insert all .csv files in to the attachments, then write new .omd to outputDir. # omd = json.load(open('feederName.omd')) for player in os.listdir(outputDir): if player.startswith('player'): name = basename(player) with open(pJoin(outputDir, player), 'r') as inFile: playerContents = inFile.read() omdObj['attachments'][name + '.player'] = playerContents oneUp = pJoin(outputDir, '..') with open(pJoin(oneUp, omdName), 'w') as outFile: json.dump(omdObj, outFile, indent=4)
def heavyProcessing(modelDir, inputDict): ''' Run the model in its directory. WARNING: GRIDLAB CAN TAKE HOURS TO COMPLETE. ''' print "STARTING TO RUN", modelDir beginTime = datetime.datetime.now() # Get feeder name and data in. try: os.mkdir(pJoin(modelDir,'gldContainer')) except: pass try: feederName = inputDict["feederName1"] inputDict["climateName"], latforpvwatts = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__metaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "gldContainer", "climate.tmy2")) startTime = datetime.datetime.now() feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] #add a check to see if there is already a climate object in the omd file #if there is delete the climate from attachments and the climate object attachKeys = feederJson["attachments"].keys() for key in attachKeys: if key.endswith('.tmy2'): del feederJson['attachments'][key] treeKeys = feederJson["tree"].keys() for key in treeKeys: if 'object' in feederJson['tree'][key]: if feederJson['tree'][key]['object'] == 'climate': del feederJson['tree'][key] oldMax = feeder.getMaxKey(tree) tree[oldMax + 1] = {'omftype':'module','argument':'climate'} tree[oldMax + 2] = {'object':'climate','name':'Climate','interpolate':'QUADRATIC','tmyfile':'climate.tmy2'} # tree[oldMax + 3] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # Attach recorder for waterheaters on/off stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':3600, 'file':'allWaterheaterOn.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for waterheater tank temperatures stub = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':3600, 'file':'allWaterheaterTemp.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach collector for total waterheater load stub = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':3600, 'file':'allWaterheaterLoad.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach collector for total network load stub = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':3600, 'file':'allMeterPower.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach collector for total overall ZIPload power/load stub = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':3600, 'file':'allZIPloadPower.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for each ZIPload power/load stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':3600, 'file':'eachZIPloadPower.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for all ZIPloads demand_rate stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':3600, 'file':'allZIPloadDemand.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorder for all ZIPloads on stub = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'number_of_devices_on', 'interval':3600, 'file':'allZIPloadOn.csv'} copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # Attach passive_controller tree[feeder.getMaxKey(tree)+1] = {'omftype':'module','argument':'market'} tree[feeder.getMaxKey(tree)+1] = {'omftype':'class auction','argument':'{\n\tdouble my_avg; double my_std;\n}'} tree[feeder.getMaxKey(tree)+1] = {'omftype':'class player','argument':'{\n\tdouble value;\n}'} stub = { 'object':'player', 'name':'cppDays', 'file':'superCpp.player' } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub stub = { 'object':'player', 'name':'superClearing', 'file':'superClearingPrice.player', 'loop':10 } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub stub = { 'object':'auction', 'name':'MARKET_1', 'my_std':0.037953, 'period':900, 'my_avg':0.110000, 'current_market.clearing_price':'superClearing.value', 'special_mode':'BUYERS_ONLY', 'unit': 'kW' } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub stub = { 'object':'passive_controller', 'name':'waterheater_controller_waterheater171923', 'parent':'waterheater171923', 'control_mode':'RAMP', 'range_high':5, 'range_low':-5, 'ramp_high':1, 'ramp_low':-1, 'period':900, 'setpoint':'is_waterheater_on', 'base_setpoint':1, 'expectation_object':'MARKET_1', 'expectation_property':'my_avg', 'observation_object':'MARKET_1', 'observation_property':'past_market.clearing_price', 'stdev_observation_property':'my_std', 'state_property':'override' } copyStub = dict(stub) tree[feeder.getMaxKey(tree)+1] = copyStub # stub = { # 'object':'passive_controller', # 'name':'ZIPload_controller_ZIPload171922', # 'parent':'ZIPload171922', # 'control_mode':'RAMP', # 'range_high':5, # 'range_low':-5, # 'ramp_high':1, # 'ramp_low':-1, # 'period':900, # 'setpoint':'base_power' # 'base_setpoint':1, # 'expectation_object':'MARKET_1', # 'expectation_property':'my_avg', # 'observation_object':'MARKET_1', # 'observation_property':'past_market.clearing_price', # 'stdev_observation_property':'my_std' # 'state_property':'override' # } # copyStub = dict(stub) # tree[feeder.getMaxKey(tree)+1] = copyStub # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':3600} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach recorders for system voltage map, triplex: stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'nVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # And get meters for system voltage map: stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':3600} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'mVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir,'gldContainer')) cleanOut = {} # Std Err and Std Out cleanOut['stderr'] = rawOut['stderr'] cleanOut['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: print key if '# timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: cleanOut['timeStamps'] = rawOut[key]['# property.. timestamp'] else: cleanOut['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = cleanOut.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): cleanOut['climate'] = {} cleanOut['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) cleanOut['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) cleanOut['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) cleanOut['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) cleanOut['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #cleanOut['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] cleanOut['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: cleanOut['allMeterVoltages'] = {} cleanOut['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) cleanOut['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) cleanOut['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption cleanOut['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads cleanOut['Consumption']['Power'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) cleanOut['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in cleanOut['Consumption']: cleanOut['Consumption']['Power'] = oneSwingPower else: cleanOut['Consumption']['Power'] = vecSum(oneSwingPower,cleanOut['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in cleanOut['Consumption']: cleanOut['Consumption']['DG'] = oneDgPower else: cleanOut['Consumption']['DG'] = vecSum(oneDgPower,cleanOut['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in cleanOut['Consumption']: cleanOut['Consumption']['Losses'] = oneLoss else: cleanOut['Consumption']['Losses'] = vecSum(oneLoss,cleanOut['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['RegTapA'] = rawOut[key]['tap_A'] cleanOut[newkey]['RegTapB'] = rawOut[key]['tap_B'] cleanOut[newkey]['RegTapC'] = rawOut[key]['tap_C'] cleanOut[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] cleanOut[newkey] ={} cleanOut[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) cleanOut[newkey]['Cap1A'] = rawOut[key]['switchA'] cleanOut[newkey]['Cap1B'] = rawOut[key]['switchB'] cleanOut[newkey]['Cap1C'] = rawOut[key]['switchC'] cleanOut[newkey]['CapPhases'] = rawOut[key]['phases'][0] # Print gridBallast Outputs to allOutputData.json cleanOut['gridBallast'] = {} if 'allWaterheaterOn.csv' in rawOut: cleanOut['gridBallast']['waterheaterOn'] = {} for key in rawOut['allWaterheaterOn.csv']: if key.startswith('waterheater'): cleanOut['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key] if 'allWaterheaterTemp.csv' in rawOut: cleanOut['gridBallast']['waterheaterTemp'] = {} for key in rawOut['allWaterheaterTemp.csv']: if key.startswith('waterheater'): cleanOut['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key] if 'allMeterPower.csv' in rawOut: cleanOut['gridBallast']['totalNetworkLoad'] = rawOut.get('allMeterPower.csv')['sum(measured_real_power)'] if ('allWaterheaterLoad.csv' in rawOut) and ('allZIPloadPower.csv' in rawOut): cleanOut['gridBallast']['availabilityMagnitude'] = [x + y for x, y in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])] if 'eachZIPloadPower.csv' in rawOut: cleanOut['gridBallast']['ZIPloadPower'] = {} for key in rawOut['eachZIPloadPower.csv']: if key.startswith('ZIPload'): cleanOut['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key] if 'allZIPloadDemand.csv' in rawOut: cleanOut['gridBallast']['ZIPloadDemand'] = {} for key in rawOut['allZIPloadDemand.csv']: if key.startswith('ZIPload'): cleanOut['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key] if 'allZIPloadOn.csv' in rawOut: cleanOut['gridBallast']['ZIPloadOn'] = {} for key in rawOut['allZIPloadOn.csv']: if key.startswith('ZIPload'): cleanOut['gridBallast']['ZIPloadOn'][key] = rawOut.get('allZIPloadOn.csv')[key] # EventTime calculations eventTime = inputDict['eventTime'] eventLength = inputDict['eventLength'] eventLength = eventLength.split(':') eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1])) eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M') eventEnd = eventStart + eventDuration cleanOut['gridBallast']['eventStart'] = str(eventStart) cleanOut['gridBallast']['eventEnd'] = str(eventEnd) # Drop timezone from timeStamp, Convert string to date timeStamps = [x[:19] for x in cleanOut['timeStamps']] dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStamps] eventEndIdx = dateTimeStamps.index(eventEnd) # Recovery Time whOn = cleanOut['gridBallast']['waterheaterOn'] whOnList = whOn.values() whOnZip = zip(*whOnList) whOnSum = [sum(x) for x in whOnZip] anyOn = [x > 0 for x in whOnSum] tRecIdx = anyOn.index(True, eventEndIdx) tRec = dateTimeStamps[tRecIdx] cleanOut['gridBallast']['recoveryTime'] = str(tRec) # Waterheaters Off-Duration offDuration = tRec - eventStart cleanOut['gridBallast']['offDuration'] = str(offDuration) # Reserve Magnitude Target (RMT) availMag = cleanOut['gridBallast']['availabilityMagnitude'] totNetLoad = cleanOut['gridBallast']['totalNetworkLoad'] # loadZip = zip(availMag,totNetLoad) # rmt = [x[0]/x[1] for x in loadZip] rmt = (1000*sum(availMag))/sum(totNetLoad) cleanOut['gridBallast']['rmt'] = rmt # Reserve Magnitude Variability Tolerance (RMVT) avgAvailMag = sum(availMag)/len(availMag) rmvtMax = max(availMag)/avgAvailMag rmvtMin = min(availMag)/avgAvailMag rmvt = rmvtMax - rmvtMin cleanOut['gridBallast']['rmvt'] = rmvt # Availability notAvail = availMag.count(0)/len(timeStamps) avail = (1-notAvail)*100 cleanOut['gridBallast']['availability'] = avail # Waterheater Temperature Drop calculations whTemp = cleanOut['gridBallast']['waterheaterTemp'] whTempList = whTemp.values() whTempZip = zip(*whTempList) whTempDrops = [] LOWER_LIMIT_TEMP = 125 # Used for calculating quality of service. for time in whTempZip: tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time]) whTempDrops.append(tempDrop) cleanOut['gridBallast']['waterheaterTempDrops'] = whTempDrops # ZIPload calculations for Availability and QoS zPower = cleanOut['gridBallast']['ZIPloadPower'] zPowerList = zPower.values() zPowerZip = zip(*zPowerList) zPowerSum = [sum(x) for x in zPowerZip] zDemand = cleanOut['gridBallast']['ZIPloadDemand'] zDemandList = zDemand.values() zDemandZip = zip(*zDemandList) zDrops = [] for time in zDemandZip: for each in zPowerZip: zIdx = 0 if each[zIdx] == 0: zPowerIdx += 1 zDrop = sum([t > 0 for t in time]) zDrops.append(zDrop) else: zDrops.append(0) cleanOut['gridBallast']['qualityDrops'] = [x + y for x, y in zip(whTempDrops, zDrops)] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) cleanOut['genTime'] = genTime # Aggregate up the timestamps: if level=='days': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': cleanOut['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') # Write the output. with open(pJoin(modelDir, "allOutputData.json"),"w") as outFile: json.dump(cleanOut, outFile, indent=4) # Update the runTime in the input file. endTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds=int((endTime - startTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) # Clean up the PID file. os.remove(pJoin(modelDir, "gldContainer", "PID.txt")) print "DONE RUNNING", modelDir except Exception as e: # If input range wasn't valid delete output, write error to disk. cancel(modelDir) thisErr = traceback.format_exc() print 'ERROR IN MODEL', modelDir, thisErr inputDict['stderr'] = thisErr with open(os.path.join(modelDir,'stderr.txt'),'w') as errorFile: errorFile.write(thisErr) with open(pJoin(modelDir,"allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent=4) finishTime = datetime.datetime.now() inputDict["runTime"] = str(datetime.timedelta(seconds = int((finishTime - beginTime).total_seconds()))) with open(pJoin(modelDir, "allInputData.json"),"w") as inFile: json.dump(inputDict, inFile, indent = 4) try: os.remove(pJoin(modelDir,"PPID.txt")) except: pass
def work(modelDir, inputDict): feederName = inputDict["feederName1"] inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"]) shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), pJoin(modelDir, "climate.tmy2")) feederJson = json.load(open(pJoin(modelDir, feederName+'.omd'))) tree = feederJson["tree"] # tree[feeder.getMaxKey(tree)+1] = {'object':'capacitor','control':'VOLT','phases':'ABCN','name':'CAPTEST','parent':'tm_1','capacitor_A':'0.10 MVAr','capacitor_B':'0.10 MVAr','capacitor_C':'0.10 MVAr','time_delay':'300.0','nominal_voltage':'2401.7771','voltage_set_high':'2350.0','voltage_set_low':'2340.0','switchA':'CLOSED','switchB':'CLOSED','switchC':'CLOSED','control_level':'INDIVIDUAL','phases_connected':'ABCN','dwell_time':'0.0','pt_phases':'ABCN'} # Set up GLM with correct time and recorders: feeder.attachRecorders(tree, "Regulator", "object", "regulator") feeder.attachRecorders(tree, "Capacitor", "object", "capacitor") feeder.attachRecorders(tree, "Inverter", "object", "inverter") feeder.attachRecorders(tree, "Windmill", "object", "windturb_dg") feeder.attachRecorders(tree, "CollectorVoltage", None, None) feeder.attachRecorders(tree, "Climate", "object", "climate") feeder.attachRecorders(tree, "OverheadLosses", None, None) feeder.attachRecorders(tree, "UndergroundLosses", None, None) feeder.attachRecorders(tree, "TriplexLosses", None, None) feeder.attachRecorders(tree, "TransformerLosses", None, None) feeder.groupSwingKids(tree) # System check - linux doesn't support newer GridLAB-D versions if sys.platform == 'linux2': pass else: # print feeder.getMaxKey(tree) # tree[14,20,27,28,47] empty for UCS Egan, add climate object to tree[14] # HACK: tree[10:19] is empty tree[11] = {'omftype':'#include', 'argument':'\"hot_water_demand.glm\"'} tree[12] = {'omftype':'#include', 'argument':'\"lock_mode_schedule.glm\"'} tree[13] = {'omftype':'#include', 'argument':'\"control_priority_schedule.glm\"'} # Attach frequency player tree[14] = {'omftype':'class player', 'argument':'{double value;}'} tree[feeder.getMaxKey(tree)+1] = {'object':'player', 'file':'frequency.PLAYER', 'property':'value', 'name':'frequency', 'loop':0} # Set up GridBallast Controls totalWH = 0 totalZIP = 0 gbWH = 0 gbZIP = 0 for key in tree.keys(): # Waterheater Controller properties if ('name' in tree[key]) and (tree[key].get('object') == 'waterheater'): totalWH += 1 gbWH += 1 # Frequency control parameters tree[key]['enable_freq_control'] = 'true' tree[key]['measured_frequency'] = 'frequency.value' tree[key]['freq_lowlimit'] = 59 tree[key]['freq_uplimit'] = 61 tree[key]['heat_mode'] = 'ELECTRIC' # tree[key]['average_delay_time'] = 60 # Voltage control parameters # tree[key]['enable_volt_control'] = 'true' # tree[key]['volt_lowlimit'] = 240.4 # tree[key]['volt_uplimit'] = 241.4 # Lock Mode parameters # tree[key]['enable_lock'] = 'temp_lock_enable' # tree[key]['lock_STATUS'] = 'temp_lock_status' # Controller Priority: a.lock, b.freq, c.volt, d.therm tree[key]['controller_priority'] = 3214 #default:therm>lock>freq>volt # tree[key]['controller_priority'] = 1423 #freq>therm>volt>lock # tree[key]['controller_priority'] = 'control_priority' # fix waterheater property demand to water_demand for newer GridLAB-D versions if 'demand' in tree[key]: # tree[key]['water_demand'] = tree[key]['demand'] tree[key]['water_demand'] = 'weekday_hotwater*1' del tree[key]['demand'] # ZIPload Controller properties if ('name' in tree[key]) and (tree[key].get('object') == 'ZIPload'): totalZIP += 1 if tree[key]['name'].startswith('responsive'): gbZIP += 1 # Frequency control parameters tree[key]['enable_freq_control'] = 'true' tree[key]['measured_frequency'] = 'frequency.value' tree[key]['freq_lowlimit'] = 59 tree[key]['freq_uplimit'] = 61 # tree[key]['average_delay_time'] = 60 # Voltage control parameters # tree[key]['enable_volt_control'] = 'true' # tree[key]['volt_lowlimit'] = 240.4 # tree[key]['volt_uplimit'] = 241.4 # Lock Mode parameters # tree[key]['enable_lock'] = 'temp_lock_enable' # tree[key]['lock_STATUS'] = 'temp_lock_status' tree[key]['controller_priority'] = 4321 #default:lock>freq>volt>therm # tree[key]['controller_priority'] = 2431 #freq>volt>lock>therm # tree[key]['groupid'] = 'fan' # Attach collector for total network load tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=triplex_meter"', 'property':'sum(measured_real_power)', 'interval':60, 'file':'allMeterPower.csv'} # Attach collector for total waterheater load tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=waterheater"', 'property':'sum(actual_load)', 'interval':60, 'file':'allWaterheaterLoad.csv'} # Attach collector for total ZIPload power/load tree[feeder.getMaxKey(tree)+1] = {'object':'collector', 'group':'"class=ZIPload"', 'property':'sum(base_power)', 'interval':60, 'file':'allZIPloadPower.csv'} # Attach recorder for each ZIPload power/load tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'base_power', 'interval':60, 'file':'eachZIPloadPower.csv'} # Attach recorder for all ZIPloads demand_rate tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=ZIPload"', 'property':'demand_rate', 'interval':60, 'file':'allZIPloadDemand.csv'} # Attach recorder for waterheaters on/off tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'is_waterheater_on', 'interval':60, 'file':'allWaterheaterOn.csv'} # Attach recorder for waterheater tank temperatures tree[feeder.getMaxKey(tree)+1] = {'object':'group_recorder', 'group':'"class=waterheater"', 'property':'temperature', 'interval':60, 'file':'allWaterheaterTemp.csv'} # Attach recorders for system voltage map: stub = {'object':'group_recorder', 'group':'"class=node"', 'interval':60} for phase in ['A','B','C']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'VoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # Attach recorders for system voltage map, triplex: stub = {'object':'group_recorder', 'group':'"class=triplex_node"', 'interval':60} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'nVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub # And get meters for system voltage map: stub = {'object':'group_recorder', 'group':'"class=triplex_meter"', 'interval':60} for phase in ['1','2']: copyStub = dict(stub) copyStub['property'] = 'voltage_' + phase copyStub['file'] = phase.lower() + 'mVoltDump.csv' tree[feeder.getMaxKey(tree) + 1] = copyStub feeder.adjustTime(tree=tree, simLength=float(inputDict["simLength"]), simLengthUnits=inputDict["simLengthUnits"], simStartDate=inputDict["simStartDate"]) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) rawOut = gridlabd.runInFilesystem(tree, attachments=feederJson["attachments"], keepFiles=True, workDir=pJoin(modelDir)) outData = {} # Std Err and Std Out outData['stderr'] = rawOut['stderr'] outData['stdout'] = rawOut['stdout'] # Time Stamps for key in rawOut: if '# timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# timestamp'] break elif '# property.. timestamp' in rawOut[key]: outData['timeStamps'] = rawOut[key]['# property.. timestamp'] else: outData['timeStamps'] = [] # Day/Month Aggregation Setup: stamps = outData.get('timeStamps',[]) level = inputDict.get('simLengthUnits','hours') # Climate for key in rawOut: if key.startswith('Climate_') and key.endswith('.csv'): outData['climate'] = {} outData['climate']['Rain Fall (in/h)'] = hdmAgg(rawOut[key].get('rainfall'), sum, level) outData['climate']['Wind Speed (m/s)'] = hdmAgg(rawOut[key].get('wind_speed'), avg, level) outData['climate']['Temperature (F)'] = hdmAgg(rawOut[key].get('temperature'), max, level) outData['climate']['Snow Depth (in)'] = hdmAgg(rawOut[key].get('snowdepth'), max, level) outData['climate']['Direct Normal (W/sf)'] = hdmAgg(rawOut[key].get('solar_direct'), sum, level) #outData['climate']['Global Horizontal (W/sf)'] = hdmAgg(rawOut[key].get('solar_global'), sum, level) climateWbySFList= hdmAgg(rawOut[key].get('solar_global'), sum, level) #converting W/sf to W/sm climateWbySMList= [x*10.76392 for x in climateWbySFList] outData['climate']['Global Horizontal (W/sm)']=climateWbySMList # Voltage Band if 'VoltageJiggle.csv' in rawOut: outData['allMeterVoltages'] = {} outData['allMeterVoltages']['Min'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['min(voltage_12.mag)']], min, level) outData['allMeterVoltages']['Mean'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['mean(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['StdDev'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['std(voltage_12.mag)']], avg, level) outData['allMeterVoltages']['Max'] = hdmAgg([float(i / 2) for i in rawOut['VoltageJiggle.csv']['max(voltage_12.mag)']], max, level) # Power Consumption outData['Consumption'] = {} # Set default value to be 0, avoiding missing value when computing Loads outData['Consumption']['Power'] = [0] * int(inputDict["simLength"]) outData['Consumption']['Losses'] = [0] * int(inputDict["simLength"]) outData['Consumption']['DG'] = [0] * int(inputDict["simLength"]) for key in rawOut: if key.startswith('SwingKids_') and key.endswith('.csv'): oneSwingPower = hdmAgg(vecPyth(rawOut[key]['sum(power_in.real)'],rawOut[key]['sum(power_in.imag)']), avg, level) if 'Power' not in outData['Consumption']: outData['Consumption']['Power'] = oneSwingPower else: outData['Consumption']['Power'] = vecSum(oneSwingPower,outData['Consumption']['Power']) elif key.startswith('Inverter_') and key.endswith('.csv'): realA = rawOut[key]['power_A.real'] realB = rawOut[key]['power_B.real'] realC = rawOut[key]['power_C.real'] imagA = rawOut[key]['power_A.imag'] imagB = rawOut[key]['power_B.imag'] imagC = rawOut[key]['power_C.imag'] oneDgPower = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key.startswith('Windmill_') and key.endswith('.csv'): vrA = rawOut[key]['voltage_A.real'] vrB = rawOut[key]['voltage_B.real'] vrC = rawOut[key]['voltage_C.real'] viA = rawOut[key]['voltage_A.imag'] viB = rawOut[key]['voltage_B.imag'] viC = rawOut[key]['voltage_C.imag'] crB = rawOut[key]['current_B.real'] crA = rawOut[key]['current_A.real'] crC = rawOut[key]['current_C.real'] ciA = rawOut[key]['current_A.imag'] ciB = rawOut[key]['current_B.imag'] ciC = rawOut[key]['current_C.imag'] powerA = vecProd(vecPyth(vrA,viA),vecPyth(crA,ciA)) powerB = vecProd(vecPyth(vrB,viB),vecPyth(crB,ciB)) powerC = vecProd(vecPyth(vrC,viC),vecPyth(crC,ciC)) oneDgPower = hdmAgg(vecSum(powerA,powerB,powerC), avg, level) if 'DG' not in outData['Consumption']: outData['Consumption']['DG'] = oneDgPower else: outData['Consumption']['DG'] = vecSum(oneDgPower,outData['Consumption']['DG']) elif key in ['OverheadLosses.csv', 'UndergroundLosses.csv', 'TriplexLosses.csv', 'TransformerLosses.csv']: realA = rawOut[key]['sum(power_losses_A.real)'] imagA = rawOut[key]['sum(power_losses_A.imag)'] realB = rawOut[key]['sum(power_losses_B.real)'] imagB = rawOut[key]['sum(power_losses_B.imag)'] realC = rawOut[key]['sum(power_losses_C.real)'] imagC = rawOut[key]['sum(power_losses_C.imag)'] oneLoss = hdmAgg(vecSum(vecPyth(realA,imagA),vecPyth(realB,imagB),vecPyth(realC,imagC)), avg, level) if 'Losses' not in outData['Consumption']: outData['Consumption']['Losses'] = oneLoss else: outData['Consumption']['Losses'] = vecSum(oneLoss,outData['Consumption']['Losses']) elif key.startswith('Regulator_') and key.endswith('.csv'): #split function to strip off .csv from filename and user rest of the file name as key. for example- Regulator_VR10.csv -> key would be Regulator_VR10 regName="" regName = key newkey=regName.split(".")[0] outData[newkey] ={} outData[newkey]['RegTapA'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapB'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapC'] = [0] * int(inputDict["simLength"]) outData[newkey]['RegTapA'] = rawOut[key]['tap_A'] outData[newkey]['RegTapB'] = rawOut[key]['tap_B'] outData[newkey]['RegTapC'] = rawOut[key]['tap_C'] outData[newkey]['RegPhases'] = rawOut[key]['phases'][0] elif key.startswith('Capacitor_') and key.endswith('.csv'): capName="" capName = key newkey=capName.split(".")[0] outData[newkey] ={} outData[newkey]['Cap1A'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1B'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1C'] = [0] * int(inputDict["simLength"]) outData[newkey]['Cap1A'] = rawOut[key]['switchA'] outData[newkey]['Cap1B'] = rawOut[key]['switchB'] outData[newkey]['Cap1C'] = rawOut[key]['switchC'] outData[newkey]['CapPhases'] = rawOut[key]['phases'][0] # Print gridBallast Outputs to allOutputData.json outData['gridBallast'] = {} if 'allMeterPower.csv' in rawOut: outData['gridBallast']['totalNetworkLoad'] = [x / 1000 for x in rawOut.get('allMeterPower.csv')['sum(measured_real_power)']] #Convert W to kW if ('allZIPloadPower.csv' in rawOut) and ('allWaterheaterLoad.csv' in rawOut): outData['gridBallast']['availabilityMagnitude'] = [x[0] + x[1] for x in zip(rawOut.get('allWaterheaterLoad.csv')['sum(actual_load)'], rawOut.get('allZIPloadPower.csv')['sum(base_power)'])] if 'allZIPloadDemand.csv' in rawOut: outData['gridBallast']['ZIPloadDemand'] = {} for key in rawOut['allZIPloadDemand.csv']: if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')): outData['gridBallast']['ZIPloadDemand'][key] = rawOut.get('allZIPloadDemand.csv')[key] if 'eachZIPloadPower.csv' in rawOut: outData['gridBallast']['ZIPloadPower'] = {} for key in rawOut['eachZIPloadPower.csv']: if (key.startswith('ZIPload')) or (key.startswith('responsive')) or (key.startswith('unresponsive')): outData['gridBallast']['ZIPloadPower'][key] = rawOut.get('eachZIPloadPower.csv')[key] if 'allWaterheaterOn.csv' in rawOut: outData['gridBallast']['waterheaterOn'] = {} for key in rawOut['allWaterheaterOn.csv']: if (key.startswith('waterheater')) or (key.startswith('waterHeater')): outData['gridBallast']['waterheaterOn'][key] = rawOut.get('allWaterheaterOn.csv')[key] if 'allWaterheaterTemp.csv' in rawOut: outData['gridBallast']['waterheaterTemp'] = {} for key in rawOut['allWaterheaterTemp.csv']: if (key.startswith('waterheater')) or (key.startswith('waterHeater')): outData['gridBallast']['waterheaterTemp'][key] = rawOut.get('allWaterheaterTemp.csv')[key] # System check - linux doesn't support newer GridLAB-D versions if sys.platform == 'linux2': pass else: outData['gridBallast']['penetrationLevel'] = 100*(gbWH+gbZIP)/(totalWH+totalZIP) # Frequency Player inArray = feederJson['attachments']['frequency.PLAYER'].split('\n') tempArray = [] for each in inArray: x = each.split(',') y = float(x[1]) tempArray.append(y) outData['frequencyPlayer'] = tempArray # EventTime calculations eventTime = inputDict['eventTime'] eventLength = inputDict['eventLength'].split(':') eventDuration = datetime.timedelta(hours=int(eventLength[0]), minutes=int(eventLength[1])) eventStart = datetime.datetime.strptime(eventTime, '%Y-%m-%d %H:%M') eventEnd = eventStart + eventDuration outData['gridBallast']['eventStart'] = str(eventStart) outData['gridBallast']['eventEnd'] = str(eventEnd) outData['gridBallast']['xMin'] = str(eventStart - datetime.timedelta(minutes=30)) outData['gridBallast']['xMax'] = str(eventEnd + datetime.timedelta(minutes=30)) # Convert string to date # HACK: remove timezones, inconsistency in matching format timeStampsDebug = [x[:19] for x in outData['timeStamps']] dateTimeStamps = [datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') for x in timeStampsDebug] eventEndIdx = dateTimeStamps.index(eventEnd) # Recovery Time whOn = outData['gridBallast']['waterheaterOn'] whOnList = whOn.values() whOnZip = zip(*whOnList) whOnSum = [sum(x) for x in whOnZip] anyOn = [x > 0 for x in whOnSum] tRecIdx = anyOn.index(True, eventEndIdx) tRec = dateTimeStamps[tRecIdx] recoveryTime = tRec - eventEnd outData['gridBallast']['recoveryTime'] = str(recoveryTime) # Waterheaters Off-Duration offDuration = tRec - eventStart outData['gridBallast']['offDuration'] = str(offDuration) # Reserve Magnitude (RM) availMag = outData['gridBallast']['availabilityMagnitude'] totalNetLoad = outData['gridBallast']['totalNetworkLoad'] availPerc = [100 * x[0]/x[1] for x in zip(availMag,totalNetLoad)] outData['gridBallast']['availabilityPercent'] = availPerc outData['gridBallast']['rm'] = [100 - x for x in availPerc] # Average RM during event eventRM = [100 - x[1] for x in zip(dateTimeStamps, availPerc) if (x[0] == eventStart) or (x[0] == eventEnd)] outData['gridBallast']['rmAvg'] = np.mean(eventRM) # Reserve Magnitude Variability Tolerance (RMVT) outData['gridBallast']['rmvt'] = np.std(eventRM) # Availability rmt = 7 available = [x[1] > rmt for x in zip(dateTimeStamps, availPerc) if (x[0] < eventStart) or (x[0] > eventEnd)] outData['gridBallast']['availability'] = 100.0 * sum(available) / (int(inputDict['simLength']) - int(eventLength[1]) - 1) # Waterheater Temperature Drop calculations whTemp = outData['gridBallast']['waterheaterTemp'] whTempList = whTemp.values() whTempZip = zip(*whTempList) whTempDrops = [] LOWER_LIMIT_TEMP = 110 # Used for calculating quality of service. Typical hot shower temp = 105 F. for time in whTempZip: tempDrop = sum([t < LOWER_LIMIT_TEMP for t in time]) whTempDrops.append(tempDrop) outData['gridBallast']['waterheaterTempDrops'] = whTempDrops # ZIPload calculations for Availability and QoS zPower = outData['gridBallast']['ZIPloadPower'] zPowerList = zPower.values() zPowerZip = zip(*zPowerList) zDemand = outData['gridBallast']['ZIPloadDemand'] zDemandList = zDemand.values() zDemandZip = zip(*zDemandList) zDrops = [] for x, y in zip(zPowerZip,zDemandZip): zDrop = 0 for i in range(len(x)): if (x[i] == 0) and (y[i] > 0): zDrop += 1 zDrops.append(zDrop) outData['gridBallast']['qualityDrops'] = [x + y for x, y in zip(zDrops, whTempDrops)] # What percentage of our keys have lat lon data? latKeys = [tree[key]['latitude'] for key in tree if 'latitude' in tree[key]] latPerc = 1.0*len(latKeys)/len(tree) if latPerc < 0.25: doNeato = True else: doNeato = False # Generate the frames for the system voltage map time traveling chart. genTime = generateVoltChart(tree, rawOut, modelDir, neatoLayout=doNeato) outData['genTime'] = genTime # Aggregate up the timestamps: if level=='days': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:10], 'days') elif level=='months': outData['timeStamps'] = aggSeries(stamps, stamps, lambda x:x[0][0:7], 'months') return outData
def work(modelDir, inputDict): ''' Run the model in its directory. ''' outData = {} feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict['feederName1'] = feederName with open(pJoin(modelDir, feederName + '.omd')) as f: omd = json.load(f) if inputDict.get('layoutAlgorithm', 'geospatial') == 'geospatial': neato = False else: neato = True path = pJoin(modelDir, feederName + '.omd') if path.endswith('.glm'): tree = feeder.parse(path) attachments = [] elif path.endswith('.omd'): with open(path) as f: omd = json.load(f) tree = omd.get('tree', {}) attachments = omd.get('attachments', []) else: raise Exception('Invalid input file type. We require a .glm or .omd.') # dictionary to hold info on lines present in glm edge_bools = dict.fromkeys([ 'underground_line', 'overhead_line', 'triplex_line', 'transformer', 'regulator', 'fuse', 'switch' ], False) # Get rid of schedules and climate and check for all edge types: for key in list(tree.keys()): obtype = tree[key].get('object', '') if obtype == 'underground_line': edge_bools['underground_line'] = True elif obtype == 'overhead_line': edge_bools['overhead_line'] = True elif obtype == 'triplex_line': edge_bools['triplex_line'] = True elif obtype == 'transformer': edge_bools['transformer'] = True elif obtype == 'regulator': edge_bools['regulator'] = True elif obtype == 'fuse': edge_bools['fuse'] = True elif obtype == 'switch': edge_bools['switch'] = True if tree[key].get('argument', '') == '\"schedules.glm\"' or tree[key].get( 'tmyfile', '') != '': del tree[key] # print edge_bools # Make sure we have a voltDump: def safeInt(x): try: return int(x) except: return 0 biggestKey = max([safeInt(x) for x in tree.keys()]) tree[str(biggestKey * 10)] = { 'object': 'voltdump', 'filename': 'voltDump.csv' } tree[str(biggestKey * 10 + 1)] = { 'object': 'currdump', 'filename': 'currDump.csv' } # Line rating dumps tree[feeder.getMaxKey(tree) + 1] = {'module': 'tape'} for key in edge_bools.keys(): if edge_bools[key]: tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=' + key + '"', 'limit': 1, 'property': 'continuous_rating', 'file': key + '_cont_rating.csv' } if edge_bools['regulator']: tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=regulator"', 'limit': 1000, 'property': 'tap_A', 'file': 'tap_A.csv', 'interval': 0 } tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=regulator"', 'limit': 1000, 'property': 'tap_B', 'file': 'tap_B.csv', 'interval': 0 } tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=regulator"', 'limit': 1000, 'property': 'tap_C', 'file': 'tap_C.csv', 'interval': 0 } # get start and stop time for the simulation [startTime, stopTime] = ['', ''] for key in tree.keys(): obname = tree[key].get('object', '') starttime = tree[key].get('starttime', '') stoptime = tree[key].get('stoptime', '') if starttime != '' and stoptime != '': startTime = tree[key]['starttime'] stopTime = tree[key]['stoptime'] break # Map to speed up name lookups. nameToIndex = {tree[key].get('name', ''): key for key in tree.keys()} # find the key of the relavant added DER components addedDerKey = nameToIndex[inputDict['newGeneration']] addedDerInverterKey = nameToIndex[tree[addedDerKey]['parent']] addedBreakKey = nameToIndex[inputDict['newGenerationBreaker']] poi = tree[addedBreakKey]['to'] # set solar generation to provided insolation value insolation = float(inputDict['newGenerationInsolation']) if insolation > 1000: insolation = 1000 elif insolation < 0: insolation = 0 # cant set insolation directly but without climate object it defaults to 1000 # whilch is about 10x max sun output and we can set shading factor between 0 and 1 # to effectively control insolation tree[addedDerKey]['shading_factor'] = insolation / 1000 # initialize variables flickerViolations = [] flickerThreshold = float(inputDict['flickerThreshold']) voltageViolations = [] [upperVoltThresh, lowerVoltThresh, lowerVoltThresh600] = [1.05, 0.95, 0.975] thermalViolations = [] thermalThreshold = float(inputDict['thermalThreshold']) / 100 reversePowerFlow = [] tapViolations = [] tapThreshold = float(inputDict['tapThreshold']) faults = ['SLG-A', 'SLG-B', 'SLG-C', 'TLG'] faultLocs = [ inputDict['newGenerationBreaker'], inputDict['newGenerationStepUp'] ] faultBreaker = [[] for i in range(2 * len(faults)) ] # the 2 is for the 2 load conditions faultStepUp = [[] for i in range(2 * len(faults))] faultCurrentViolations = [] faultCurrentThreshold = float(inputDict['faultCurrentThreshold']) faultPOIVolts = [] faultVoltsThreshold = float(inputDict['faultVoltsThreshold']) # run analysis for both load conditions for loadCondition in ['Peak', 'Min']: # if a peak load file is provided, use it to set peak loads in the tree if (loadCondition == 'Peak') and (inputDict['peakLoadData'] != ''): peakLoadData = inputDict['peakLoadData'].split('\r\n') for data in peakLoadData: if str(data) != '': key = data.split(',')[0] val = data.split(',')[1] tree[key]['power_12'] = val elif (loadCondition == 'Min'): # if a min load file is provided use is to set the min loads if inputDict['minLoadData'] != '': minLoadData = inputDict['minLoadData'].split('\r\n') for data in minLoadData: if str(data) != '': key = data.split(',')[0] val = data.split(',')[1] tree[key]['power_12'] = val else: # if no min load file is provided set min load to be 1/3 of peak + noise for key in tree.keys(): obtype = tree[key].get('object', '') if obtype == 'triplex_node': load = tree[key].get('power_12', '') if load != '': load = float(load) minLoad = (load / 3) + (load * 0.1 * random.triangular(-1, 1)) tree[key]['power_12'] = str(minLoad) # initialize variables flicker = {} [maxFlickerLocation, maxFlickerVal] = ['', 0] # run analysis with DER on and off under both load conditions for der in ['On', 'Off']: # if der is Off set added DER offline, if its On set DER online if der is 'Off': tree[addedDerKey]['generator_status'] = 'OFFLINE' tree[addedDerInverterKey]['generator_status'] = 'OFFLINE' else: # der is on tree[addedDerKey]['generator_status'] = 'ONLINE' tree[addedDerInverterKey]['generator_status'] = 'ONLINE' # run gridlab solver data = runGridlabAndProcessData(tree, attachments, edge_bools, workDir=modelDir) print(tree[addedDerKey]) print(tree[addedDerInverterKey]) # generate voltage, current and thermal plots filename = 'voltageDer' + der + loadCondition chart = drawPlot(tree, nodeDict=data['percentChangeVolts'], neatoLayout=neato, nodeFlagBounds=[114, 126], defaultNodeVal=120) chart.savefig(pJoin(modelDir, filename + 'Chart.png')) with open(pJoin(modelDir, filename + 'Chart.png'), 'rb') as inFile: outData[filename] = base64.standard_b64encode( inFile.read()).decode('ascii') filename = 'currentDer' + der + loadCondition chart = drawPlot(tree, nodeDict=data['edgeCurrentSum'], neatoLayout=neato) chart.savefig(pJoin(modelDir, filename + 'Chart.png')) with open(pJoin(modelDir, filename + 'Chart.png'), 'rb') as inFile: outData[filename] = base64.standard_b64encode( inFile.read()).decode('ascii') filename = 'thermalDer' + der + loadCondition chart = drawPlot(tree, nodeDict=data['edgeValsPU'], neatoLayout=neato) chart.savefig(pJoin(modelDir, filename + 'Chart.png')) with open(pJoin(modelDir, filename + 'Chart.png'), 'rb') as inFile: outData[filename] = base64.standard_b64encode( inFile.read()).decode('ascii') # calculate max and min voltage and track badwidth violations [maxVoltsLocation, maxVoltsVal] = ['', 0] [minVoltsLocation, minVoltsVal] = ['', float('inf')] for key in data['nodeVolts'].keys(): voltVal = float(data['nodeVolts'][key]) nominalVoltVal = float(data['nominalVolts'][key]) if maxVoltsVal <= voltVal: maxVoltsVal = voltVal maxVoltsLocation = key if minVoltsVal >= voltVal: minVoltsVal = voltVal minVoltsLocation = key change = 100 * ((voltVal - nominalVoltVal) / nominalVoltVal) if voltVal > 600: violation = (voltVal >= (upperVoltThresh*nominalVoltVal)) or \ (voltVal <= (lowerVoltThresh600*nominalVoltVal)) else: violation = (voltVal >= (upperVoltThresh*nominalVoltVal)) or \ (voltVal <= (lowerVoltThresh*nominalVoltVal)) content = [key, nominalVoltVal, voltVal, change, \ loadCondition +' Load, DER ' + der,violation] voltageViolations.append(content) outData['maxVolts' + loadCondition + 'Der' + der] = [maxVoltsLocation, maxVoltsVal] outData['minVolts' + loadCondition + 'Der' + der] = [minVoltsLocation, minVoltsVal] # check for thermal violations for key in data['edgeValsPU'].keys(): thermalVal = float(data['edgeValsPU'][key]) content = [key, 100*thermalVal,\ loadCondition+' Load, DER '+der,(thermalVal>=thermalThreshold)] thermalViolations.append(content) if edge_bools['regulator']: # check for reverse regulator powerflow for key in tree.keys(): obtype = tree[key].get("object", "") obname = tree[key].get("name", "") if obtype == 'regulator': powerVal = float(data['edgePower'][obname]) content = [obname, powerVal,\ loadCondition+' Load, DER '+der,(powerVal<0)] reversePowerFlow.append(content) # check for tap_position values and violations if der == 'On': tapPositions = copy.deepcopy(data['tapPositions']) else: # der off for tapType in ['tapA', 'tapB', 'tapC']: for key in tapPositions[tapType].keys(): # calculate tapPositions tapDerOn = int(tapPositions[tapType][key]) tapDerOff = int(data['tapPositions'][tapType][key]) tapDifference = abs(tapDerOn - tapDerOff) # check for violations content = [loadCondition, key+' '+tapType, tapDerOn, \ tapDerOff,tapDifference, (tapDifference>=tapThreshold)] tapViolations.append(content) #induce faults and measure fault currents for faultLocation in faultLocs: for faultNum, faultType in enumerate(faults): faultIndex = faultNum if loadCondition == 'Min': faultIndex = faultNum + len(faults) treeCopy = createTreeWithFault( tree, \ faultType, faultLocation, startTime, stopTime ) faultData = runGridlabAndProcessData(treeCopy, attachments, \ edge_bools, workDir=modelDir) faultVolts = faultData['nodeVolts'] faultCurrents = faultData['edgeCurrentSum'] # get fault current values at the breaker when # the fault is at the breaker if faultLocation == inputDict['newGenerationBreaker']: if der == 'On': faultBreaker[faultIndex] = [ loadCondition, faultType ] faultBreaker[faultIndex].append(\ float(faultCurrents[\ inputDict['newGenerationBreaker']])) else: #der off faultBreaker[faultIndex].append(\ float(faultCurrents[inputDict['newGenerationBreaker']])) faultBreaker[faultIndex].append(\ faultBreaker[faultIndex][2] - \ faultBreaker[faultIndex][3]) # get fault voltage values at POI preFaultval = data['nodeVolts'][poi] postFaultVal = faultVolts[poi] percentChange = 100 * (postFaultVal / preFaultval) faultPOIVolts.append(['Der '+ der + ' ' + \ loadCondition + ' Load', poi, faultType, preFaultval,\ postFaultVal, percentChange, \ (percentChange>=faultVoltsThreshold)]) # get fault current values at the transformer when # the fault is at the transformer else: #faultLocation == newGenerationStepUp if der == 'On': faultStepUp[faultIndex] = [ loadCondition, faultType ] faultStepUp[faultIndex].append(\ float(faultCurrents[\ inputDict['newGenerationStepUp']])) else: #der off faultStepUp[faultIndex].append(\ float(faultCurrents[inputDict[\ 'newGenerationStepUp']])) faultStepUp[faultIndex].append(\ faultStepUp[faultIndex][2] - \ faultStepUp[faultIndex][3]) # get fault violations when der is on if der == 'On': for key in faultCurrents.keys(): preFaultval = float(data['edgeCurrentSum'][key]) postFaultVal = float(faultCurrents[key]) difference = abs(preFaultval - postFaultVal) if preFaultval == 0: percentChange = 0 else: percentChange = 100 * (difference / preFaultval) content = [loadCondition, faultLocation, faultType, key, \ preFaultval, postFaultVal, percentChange, \ (percentChange>=faultCurrentThreshold)] faultCurrentViolations.append(content) # calculate flicker, keep track of max, and violations if der == 'On': flicker = copy.deepcopy(data['nodeVolts']) else: # der off for key in flicker.keys(): # calculate flicker derOn = float(flicker[key]) derOff = float(data['nodeVolts'][key]) flickerVal = 100 * (1 - (derOff / derOn)) flicker[key] = flickerVal # check for max if maxFlickerVal <= flickerVal: maxFlickerVal = flickerVal maxFlickerLocation = key # check for violations content = [key, flickerVal,loadCondition+' Load',\ (flickerVal>=flickerThreshold)] flickerViolations.append(content) # plot flicker filename = 'flicker' + loadCondition chart = drawPlot(tree, nodeDict=flicker, neatoLayout=neato) chart.savefig(pJoin(modelDir, filename + 'Chart.png')) with open(pJoin(modelDir, filename + 'Chart.png'), "rb") as inFile: outData[filename] = base64.standard_b64encode( inFile.read()).decode('ascii') # save max flicker info to output dictionary outData['maxFlicker' + loadCondition] = [maxFlickerLocation, maxFlickerVal] outData['voltageViolations'] = voltageViolations outData['flickerViolations'] = flickerViolations outData['thermalViolations'] = thermalViolations outData['reversePowerFlow'] = reversePowerFlow outData['tapViolations'] = tapViolations outData['faultBreaker'] = faultBreaker outData['faultStepUp'] = faultStepUp outData['faultCurrentViolations'] = faultCurrentViolations outData['faultPOIVolts'] = faultPOIVolts return outData
def drawPlot(path, workDir=None, neatoLayout=False, edgeLabs=None, nodeLabs=None, edgeCol=None, nodeCol=None, faultLoc=None, faultType=None, customColormap=False, scaleMin=None, scaleMax=None, rezSqIn=400, simTime='2000-01-01 0:00:00', loadLoc=None): ''' Draw a color-coded map of the voltage drop on a feeder. path is the full path to the GridLAB-D .glm file or OMF .omd file. workDir is where GridLAB-D will run, if it's None then a temp dir is used. neatoLayout=True means the circuit is displayed using a force-layout approach. edgeCol property must be either 'Current', 'Power', 'Rating', 'PercentOfRating', or None nodeCol property must be either 'Voltage', 'VoltageImbalance', 'perUnitVoltage', 'perUnit120Voltage', or None edgeLabs and nodeLabs properties must be either 'Name', 'Value', or None edgeCol and nodeCol can be set to false to avoid coloring edges or nodes customColormap=True means use a one that is nicely scaled to perunit values highlighting extremes. faultType and faultLoc are the type of fault and the name of the line that it occurs on. Returns a matplotlib object.''' # Be quiet matplotlib: # warnings.filterwarnings("ignore") if path.endswith('.glm'): tree = feeder.parse(path) attachments = [] elif path.endswith('.omd'): with open(path) as f: omd = json.load(f) tree = omd.get('tree', {}) attachments = omd.get('attachments', []) else: raise Exception('Invalid input file type. We require a .glm or .omd.') #print path # add fault object to tree def safeInt(x): try: return int(x) except: return 0 biggestKey = max([safeInt(x) for x in tree.keys()]) # Add Reliability module tree[str(biggestKey * 10)] = { "module": "reliability", "maximum_event_length": "18000", "report_event_log": "true" } CLOCK_START = simTime dt_start = parser.parse(CLOCK_START) dt_end = dt_start + relativedelta(seconds=+20) CLOCK_END = str(dt_end) CLOCK_RANGE = CLOCK_START + ',' + CLOCK_END if faultType != None: # Add eventgen object (the fault) tree[str(biggestKey * 10 + 1)] = { "object": "eventgen", "name": "ManualEventGen", "parent": "RelMetrics", "fault_type": faultType, "manual_outages": faultLoc + ',' + CLOCK_RANGE } # TODO: change CLOCK_RANGE to read the actual start and stop time, not just hard-coded # Add fault_check object tree[str(biggestKey * 10 + 2)] = { "object": "fault_check", "name": "test_fault", "check_mode": "ONCHANGE", "eventgen_object": "ManualEventGen", "output_filename": "Fault_check_out.txt" } # Add reliabilty metrics object tree[str(biggestKey * 10 + 3)] = { "object": "metrics", "name": "RelMetrics", "report_file": "Metrics_Output.csv", "module_metrics_object": "PwrMetrics", "metrics_of_interest": '"SAIFI,SAIDI,CAIDI,ASAI,MAIFI"', "customer_group": '"groupid=METERTEST"', "metric_interval": "5 h", "report_interval": "5 h" } # Add power_metrics object tree[str(biggestKey * 10 + 4)] = { "object": "power_metrics", "name": "PwrMetrics", "base_time_value": "1 h" } # HACK: set groupid for all meters so outage stats are collected. noMeters = True for key in tree: if tree[key].get('object', '') in ['meter', 'triplex_meter']: tree[key]['groupid'] = "METERTEST" noMeters = False if noMeters: raise Exception( "No meters detected on the circuit. Please add at least one meter to allow for collection of outage statistics." ) for key in tree: if 'clock' in tree[key]: tree[key]['starttime'] = "'" + CLOCK_START + "'" tree[key]['stoptime'] = "'" + CLOCK_END + "'" # dictionary to hold info on lines present in glm edge_bools = dict.fromkeys([ 'underground_line', 'overhead_line', 'triplex_line', 'transformer', 'regulator', 'fuse', 'switch' ], False) # Map to speed up name lookups. nameToIndex = {tree[key].get('name', ''): key for key in tree.keys()} # Get rid of schedules and climate and check for all edge types: for key in list(tree.keys()): obtype = tree[key].get("object", "") if obtype == 'underground_line': edge_bools['underground_line'] = True elif obtype == 'overhead_line': edge_bools['overhead_line'] = True elif obtype == 'triplex_line': edge_bools['triplex_line'] = True elif obtype == 'transformer': edge_bools['transformer'] = True elif obtype == 'regulator': edge_bools['regulator'] = True elif obtype == 'fuse': edge_bools['fuse'] = True elif obtype == 'switch': edge_bools['switch'] = True if tree[key].get("argument", "") == "\"schedules.glm\"" or tree[key].get( "tmyfile", "") != "": del tree[key] # Make sure we have a voltage dump and current dump: tree[str(biggestKey * 10 + 5)] = { "object": "voltdump", "filename": "voltDump.csv" } tree[str(biggestKey * 10 + 6)] = { "object": "currdump", "filename": "currDump.csv" } # Line rating dumps tree[feeder.getMaxKey(tree) + 1] = {'module': 'tape'} for key in edge_bools.keys(): if edge_bools[key]: tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=' + key + '"', 'property': 'continuous_rating', 'file': key + '_cont_rating.csv' } #Record initial status readout of each fuse/recloser/switch/sectionalizer before running # Reminder: fuse objects have 'phase_X_status' instead of 'phase_X_state' protDevices = dict.fromkeys( ['fuse', 'recloser', 'switch', 'sectionalizer'], False) #dictionary of protective device initial states for each phase protDevInitStatus = {} #dictionary of protective devices final states for each phase after running Gridlab-D protDevFinalStatus = {} #dictionary of protective device types to help the testing and debugging process protDevTypes = {} protDevOpModes = {} for key in tree: obj = tree[key] obType = obj.get('object') if obType in protDevices.keys(): obName = obj.get('name', '') protDevTypes[obName] = obType if obType != 'fuse': protDevOpModes[obName] = obj.get('operating_mode', 'INDIVIDUAL') protDevices[obType] = True protDevInitStatus[obName] = {} protDevFinalStatus[obName] = {} for phase in ['A', 'B', 'C']: if obType != 'fuse': phaseState = obj.get('phase_' + phase + '_state', 'CLOSED') else: phaseState = obj.get('phase_' + phase + '_status', 'GOOD') if phase in obj.get('phases', ''): protDevInitStatus[obName][phase] = phaseState #print protDevInitStatus #Create a recorder for protective device states for key in protDevices.keys(): if protDevices[key]: for phase in ['A', 'B', 'C']: if key != 'fuse': tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=' + key + '"', 'property': 'phase_' + phase + '_state', 'file': key + '_phase_' + phase + '_state.csv' } else: tree[feeder.getMaxKey(tree) + 1] = { 'object': 'group_recorder', 'group': '"class=' + key + '"', 'property': 'phase_' + phase + '_status', 'file': key + '_phase_' + phase + '_state.csv' } # Run Gridlab. if not workDir: workDir = tempfile.mkdtemp() print('@@@@@@', workDir) # for i in range(6): # gridlabOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=workDir) # #HACK: workaround for shoddy macOS gridlabd build. # if 'error when setting parent' not in gridlabOut.get('stderr','OOPS'): # break gridlabOut = gridlabd.runInFilesystem(tree, attachments=attachments, workDir=workDir) #Record final status readout of each fuse/recloser/switch/sectionalizer after running try: for key in protDevices.keys(): if protDevices[key]: for phase in ['A', 'B', 'C']: with open(pJoin(workDir, key + '_phase_' + phase + '_state.csv'), newline='') as statusFile: reader = csv.reader(statusFile) # loop past the header, keys = [] vals = [] for row in reader: if '# timestamp' in row: keys = row i = keys.index('# timestamp') keys.pop(i) vals = next(reader) vals.pop(i) for pos, key2 in enumerate(keys): protDevFinalStatus[key2][phase] = vals[pos] except: pass #print protDevFinalStatus #compare initial and final states of protective devices #quick compare to see if they are equal #print cmp(protDevInitStatus, protDevFinalStatus) #find which values changed changedStates = {} #read voltDump values into a dictionary. try: with open(pJoin(workDir, 'voltDump.csv'), newline='') as dumpFile: reader = csv.reader(dumpFile) next(reader) # Burn the header. keys = next(reader) voltTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] voltTable.append(rowDict) except: raise Exception( 'GridLAB-D failed to run with the following errors:\n' + gridlabOut['stderr']) # read currDump values into a dictionary with open(pJoin(workDir, 'currDump.csv'), newline='') as currDumpFile: reader = csv.reader(currDumpFile) next(reader) # Burn the header. keys = next(reader) currTable = [] for row in reader: rowDict = {} for pos, key in enumerate(keys): rowDict[key] = row[pos] currTable.append(rowDict) # read line rating values into a single dictionary lineRatings = {} rating_in_VA = [] for key1 in edge_bools.keys(): if edge_bools[key1]: with open(pJoin(workDir, key1 + '_cont_rating.csv'), newline='') as ratingFile: reader = csv.reader(ratingFile) # loop past the header, keys = [] vals = [] for row in reader: if '# timestamp' in row: keys = row i = keys.index('# timestamp') keys.pop(i) vals = next(reader) vals.pop(i) for pos, key2 in enumerate(keys): lineRatings[key2] = abs(float(vals[pos])) #edgeTupleRatings = lineRatings copy with to-from tuple as keys for labeling edgeTupleRatings = {} for edge in lineRatings: for obj in tree.values(): if obj.get('name', '').replace('"', '') == edge: nodeFrom = obj.get('from') nodeTo = obj.get('to') coord = (nodeFrom, nodeTo) ratingVal = lineRatings.get(edge) edgeTupleRatings[coord] = ratingVal # Calculate average node voltage deviation. First, helper functions. def digits(x): ''' Returns number of digits before the decimal in the float x. ''' return math.ceil(math.log10(x + 1)) def avg(l): ''' Average of a list of ints or floats. ''' # HACK: add a small value to the denominator to avoid divide by zero for out of service locations (i.e. zero voltage). return sum(l) / (len(l) + 0.00000000000000001) # Detect the feeder nominal voltage: for key in tree: ob = tree[key] if type(ob) == dict and ob.get('bustype', '') == 'SWING': feedVoltage = float(ob.get('nominal_voltage', 1)) # Tot it all up. nodeVolts = {} nodeVoltsPU = {} nodeVoltsPU120 = {} voltImbalances = {} for row in voltTable: allVolts = [] allVoltsPU = [] allDiffs = [] nodeName = row.get('node_name', '') for phase in ['A', 'B', 'C']: realVolt = abs(float(row['volt' + phase + '_real'])) imagVolt = abs(float(row['volt' + phase + '_imag'])) phaseVolt = math.sqrt((realVolt**2) + (imagVolt**2)) if phaseVolt != 0.0: treeKey = nameToIndex.get(nodeName, 0) nodeObj = tree.get(treeKey, {}) try: nominal_voltage = float(nodeObj['nominal_voltage']) except: nominal_voltage = feedVoltage allVolts.append(phaseVolt) normVolt = (phaseVolt / nominal_voltage) allVoltsPU.append(normVolt) avgVolts = avg(allVolts) avgVoltsPU = avg(allVoltsPU) avgVoltsPU120 = 120 * avgVoltsPU nodeVolts[nodeName] = float("{0:.2f}".format(avgVolts)) nodeVoltsPU[nodeName] = float("{0:.2f}".format(avgVoltsPU)) nodeVoltsPU120[nodeName] = float("{0:.2f}".format(avgVoltsPU120)) if len(allVolts) == 3: voltA = allVolts.pop() voltB = allVolts.pop() voltC = allVolts.pop() allDiffs.append(abs(float(voltA - voltB))) allDiffs.append(abs(float(voltA - voltC))) allDiffs.append(abs(float(voltB - voltC))) maxDiff = max(allDiffs) voltImbal = maxDiff / avgVolts voltImbalances[nodeName] = float("{0:.2f}".format(voltImbal)) # Use float("{0:.2f}".format(avg(allVolts))) if displaying the node labels nodeLoadNames = {} nodeNames = {} for key in nodeVolts.keys(): nodeNames[key] = key if key == loadLoc: nodeLoadNames[key] = "LOAD: " + key # find edge currents by parsing currdump edgeCurrentSum = {} edgeCurrentMax = {} for row in currTable: allCurr = [] for phase in ['A', 'B', 'C']: realCurr = abs(float(row['curr' + phase + '_real'])) imagCurr = abs(float(row['curr' + phase + '_imag'])) phaseCurr = math.sqrt((realCurr**2) + (imagCurr**2)) allCurr.append(phaseCurr) edgeCurrentSum[row.get('link_name', '')] = sum(allCurr) edgeCurrentMax[row.get('link_name', '')] = max(allCurr) # When just showing current as labels, use sum of the three lines' current values, when showing the per unit values (current/rating), use the max of the three #edgeTupleCurrents = edgeCurrents copy with to-from tuple as keys for labeling edgeTupleCurrents = {} #edgeValsPU = values normalized per unit by line ratings edgeValsPU = {} #edgeTupleValsPU = edgeValsPU copy with to-from tuple as keys for labeling edgeTupleValsPU = {} #edgeTuplePower = dict with to-from tuples as keys and sim power as values for debugging edgeTuplePower = {} #edgeTupleNames = dict with to-from tuples as keys and names as values for debugging edgeTupleNames = {} #edgeTupleFaultNames = dict with to-from tuples as keys and the name of the Fault as the only value edgeTupleFaultNames = {} #edgeTupleProtDevs = dict with to-from tuples as keys and the initial of the type of protective device as the value edgeTupleProtDevs = {} #linePhases = dictionary containing the number of phases on each line for line-width purposes linePhases = {} edgePower = {} for edge in edgeCurrentSum: for obj in tree.values(): obname = obj.get('name', '').replace('"', '') if obname == edge: objType = obj.get('object') nodeFrom = obj.get('from') nodeTo = obj.get('to') coord = (nodeFrom, nodeTo) currVal = edgeCurrentSum.get(edge) voltVal = avg([nodeVolts.get(nodeFrom), nodeVolts.get(nodeTo)]) power = (currVal * voltVal) / 1000 lineRating = lineRatings.get(edge, 10.0**9) edgePerUnitVal = (edgeCurrentMax.get(edge)) / lineRating edgeTupleCurrents[coord] = "{0:.2f}".format(currVal) edgeTuplePower[coord] = "{0:.2f}".format(power) edgePower[edge] = power edgeValsPU[edge] = edgePerUnitVal edgeTupleValsPU[coord] = "{0:.2f}".format(edgePerUnitVal) edgeTupleNames[coord] = edge if faultLoc == edge: edgeTupleFaultNames[coord] = "FAULT: " + edge phaseStr = obj.get('phases', '').replace('"', '').replace( 'N', '').replace('S', '') numPhases = len(phaseStr) if (numPhases < 1) or (numPhases > 3): numPhases = 1 linePhases[edge] = numPhases protDevLabel = "" protDevBlownStr = "" if objType in protDevices.keys(): for phase in protDevFinalStatus[obname].keys(): if objType == 'fuse': if protDevFinalStatus[obname][phase] == "BLOWN": protDevBlownStr = "!" else: if protDevFinalStatus[obname][phase] == "OPEN": protDevBlownStr = "!" if objType == 'fuse': protDevLabel = 'F' elif objType == 'switch': protDevLabel = 'S' elif objType == 'recloser': protDevLabel = 'R' elif objType == 'sectionalizer': protDevLabel = 'X' edgeTupleProtDevs[coord] = protDevLabel + protDevBlownStr #define which dict will be used for edge line color edgeColors = edgeValsPU #define which dict will be used for edge label edgeLabels = edgeTupleValsPU # Build the graph. fGraph = feeder.treeToNxGraph(tree) # TODO: consider whether we can set figsize dynamically. wlVal = int(math.sqrt(float(rezSqIn))) voltChart = plt.figure(figsize=(wlVal, wlVal)) plt.axes(frameon=0) plt.axis('off') voltChart.gca().set_aspect('equal') plt.tight_layout() #set axes step equal if neatoLayout: # HACK: work on a new graph without attributes because graphViz tries to read attrs. cleanG = nx.Graph(fGraph.edges()) cleanG.add_nodes_from(fGraph) positions = graphviz_layout(cleanG, prog='neato') else: remove_nodes = [ n for n in fGraph if fGraph.nodes[n].get('pos', (0, 0)) == (0, 0) ] fGraph.remove_nodes_from(remove_nodes) positions = {n: fGraph.nodes[n].get('pos', (0, 0)) for n in fGraph} # Need to get edge names from pairs of connected node names. edgeNames = [] for e in fGraph.edges(): edgeNames.append((fGraph.edges[e].get('name', 'BLANK')).replace('"', '')) #create custom colormap if customColormap: if scaleMin != None and scaleMax != None: scaleDif = scaleMax - scaleMin custom_cm = matplotlib.colors.LinearSegmentedColormap.from_list( 'custColMap', [(scaleMin, 'blue'), (scaleMin + (0.12 * scaleDif), 'darkgray'), (scaleMin + (0.56 * scaleDif), 'darkgray'), (scaleMin + (0.8 * scaleDif), 'red')]) vmin = scaleMin vmax = scaleMax else: custom_cm = matplotlib.colors.LinearSegmentedColormap.from_list( 'custColMap', [(0.0, 'blue'), (0.15, 'darkgray'), (0.7, 'darkgray'), (1.0, 'red')]) vmin = 0 vmax = 1.25 custom_cm.set_under(color='black') else: custom_cm = plt.cm.get_cmap('viridis') if scaleMin != None and scaleMax != None: vmin = scaleMin vmax = scaleMax else: vmin = None vmax = None drawColorbar = False emptyColors = {} #draw edges with or without colors if edgeCol != None: drawColorbar = True if edgeCol == "Current": edgeList = [edgeCurrentSum.get(n, 1) for n in edgeNames] drawColorbar = True elif edgeCol == "Power": edgeList = [edgePower.get(n, 1) for n in edgeNames] drawColorbar = True elif edgeCol == "Rating": edgeList = [lineRatings.get(n, 10.0**9) for n in edgeNames] drawColorbar = True elif edgeCol == "PercentOfRating": edgeList = [edgeValsPU.get(n, .5) for n in edgeNames] drawColorbar = True else: edgeList = [emptyColors.get(n, .6) for n in edgeNames] print( "WARNING: edgeCol property must be 'Current', 'Power', 'Rating', 'PercentOfRating', or None" ) else: edgeList = [emptyColors.get(n, .6) for n in edgeNames] edgeIm = nx.draw_networkx_edges( fGraph, pos=positions, edge_color=edgeList, width=[linePhases.get(n, 1) for n in edgeNames], edge_cmap=custom_cm) #draw edge labels if edgeLabs != None: if edgeLabs == "Name": edgeLabels = edgeTupleNames elif edgeLabs == "Fault": edgeLabels = edgeTupleFaultNames elif edgeLabs == "Value": if edgeCol == "Current": edgeLabels = edgeTupleCurrents elif edgeCol == "Power": edgeLabels = edgeTuplePower elif edgeCol == "Rating": edgeLabels = edgeTupleRatings elif edgeCol == "PercentOfRating": edgeLabels = edgeTupleValsPU else: edgeLabels = None print( "WARNING: edgeCol property cannot be set to None when edgeLabs property is set to 'Value'" ) elif edgeLabs == "ProtDevs": edgeLabels = edgeTupleProtDevs else: edgeLabs = None print( "WARNING: edgeLabs property must be either 'Name', 'Value', or None" ) if edgeLabs != None: edgeLabelsIm = nx.draw_networkx_edge_labels(fGraph, pos=positions, edge_labels=edgeLabels, font_size=8) # draw nodes with or without color if nodeCol != None: if nodeCol == "Voltage": nodeList = [nodeVolts.get(n, 1) for n in fGraph.nodes()] drawColorbar = True elif nodeCol == "VoltageImbalance": nodeList = [voltImbalances.get(n, 1) for n in fGraph.nodes()] drawColorbar = True elif nodeCol == "perUnitVoltage": nodeList = [nodeVoltsPU.get(n, .5) for n in fGraph.nodes()] drawColorbar = True elif nodeCol == "perUnit120Voltage": nodeList = [nodeVoltsPU120.get(n, 120) for n in fGraph.nodes()] drawColorbar = True else: nodeList = [emptyColors.get(n, 1) for n in fGraph.nodes()] print( "WARNING: nodeCol property must be 'Voltage', 'VoltageImbalance', 'perUnitVoltage', 'perUnit120Voltage', or None" ) else: nodeList = [emptyColors.get(n, .6) for n in fGraph.nodes()] nodeIm = nx.draw_networkx_nodes(fGraph, pos=positions, node_color=nodeList, linewidths=0, node_size=30, vmin=vmin, vmax=vmax, cmap=custom_cm) #draw node labels nodeLabels = {} if nodeLabs != None: if nodeLabs == "Name": nodeLabels = nodeNames elif nodeLabs == "Value": if nodeCol == "Voltage": nodeLabels = nodeVolts elif nodeCol == "VoltageImbalance": nodeLabels = voltImbalances elif nodeCol == "perUnitVoltage": nodeLabels = nodeVoltsPU elif nodeCol == "perUnit120Voltage": nodeLabels = nodeVoltsPU120 else: nodeLabels = None print( "WARNING: nodeCol property cannot be set to None when nodeLabs property is set to 'Value'" ) #HACK: add hidden node label option for displaying specified load name elif nodeLabs == "Load": nodeLabels = nodeLoadNames else: nodeLabs = None print( "WARNING: nodeLabs property must be either 'Name', 'Value', or None" ) if nodeLabs != None: nodeLabelsIm = nx.draw_networkx_labels(fGraph, pos=positions, labels=nodeLabels, font_size=8) plt.sci(nodeIm) # plt.clim(110,130) if drawColorbar: plt.colorbar() return voltChart
# go through every entry in the circuit definition for key in tree.keys(): # check to see if the clock actually exists and update timings if it does if clockExists == False and tree[key].get('clock','') != '': clockExists = True tree[key]['starttime'] = '\"' + SIM_START_TIME + '\"' tree[key]['stoptime'] = '\"' + SIM_STOP_TIME + '\"' # check to see if the tape module actually exists if tapeModuleExists == False and tree[key].get('argument','') == 'tape': tapeModuleExists = True # if there is no clock, add it if clockExists == False: tree[feeder.getMaxKey(tree) + 1] = { 'clock': 'clock', 'timezone': TIMEZONE, 'starttime': '\"' + SIM_START_TIME + '\"', 'stoptime': '\"' + SIM_STOP_TIME + '\"', } # if there is no tape module, add it if tapeModuleExists == False: tree[feeder.getMaxKey(tree) + 1] = {'module': 'tape'} # add recorder object tree[feeder.getMaxKey(tree) + 1] = { 'object': 'recorder', 'name': 'meterRecorder', 'parent': '\"' + FAULT_METER + '\"',
import omf.feeder as feeder from omf.solvers.gridlabd import runInFilesystem feed = feeder.parse('GC-12.47-1.glm') maxKey = feeder.getMaxKey(feed) print(feed[1]) feed[maxKey + 1] = { 'object': 'node', 'name': 'test_solar_node', 'phases': 'ABCN', 'nominal_voltage': '7200' } feed[maxKey + 2] = { 'object': 'underground_line', 'name': 'test_solar_line', 'phases': 'ABCN', 'from': 'test_solar_node', 'to': 'GC-12-47-1_node_26', 'length': '100', 'configuration': 'line_configuration:6' } feed[maxKey + 3] = { 'object': 'meter', 'name': 'test_solar_meter', 'parent': 'test_solar_node', 'phases': 'ABCN', 'nominal_voltage': '480' } feed[maxKey + 4] = {
weather_writer.writerow(['temperature','wind_speed','humidity','solar_dir','solar_diff','solar_global']) for row in data_full: weather_writer.writerow(row) # Add stuff to the feeder. myTree = feeder.parse(GLM_PATH) # Delete all climate then reinsert. reader_name = 'weatherReader' climate_name = 'MyClimate' for key in myTree.keys(): obName = myTree[key].get('name','') obType = myTree[key].get('object','') if obName in [reader_name, climate_name] or obType is 'climate': del myTree[key] oldMax = feeder.getMaxKey(myTree) myTree[oldMax + 1] = {'omftype':'module', 'argument':'tape'} myTree[oldMax + 2] = {'omftype':'module', 'argument':'climate'} myTree[oldMax + 3] = {'object':'csv_reader', 'name':reader_name, 'filename':CSV_NAME} myTree[oldMax + 4] = {'object':'climate', 'name':climate_name, 'reader': reader_name, 'tmyfile':CSV_NAME} # Set the time correctly. feeder.adjustTime(myTree, 240, 'hours', '{}-{}-{}'.format(INIT_TIME.year, INIT_TIME.month, INIT_TIME.day)) # Run here to test. rawOut = runInFilesystem(myTree, attachments=[], keepFiles=True, workDir='.', glmName='./outFile.glm') # Write back the full feeder. # outJson = dict(myFeed) # with open(CSV_NAME,'r') as weatherFile: # weatherString = weatherFile.read()
def omfCalibrate(workDir, feederPath, scadaPath, simStartDate, simLength, simLengthUnits, solver="FBS", calibrateError=(0.05, 5), trim=5): '''calibrates a feeder and saves the calibrated tree at a location. Note: feeders with cap banks should be calibrated with cap banks OPEN. We have seen cap banks throw off calibration.''' with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) tree = feederJson.get("tree", {}) simLength = simLength + trim # Process scada data. scadaSubPower = _processScadaData(pJoin(workDir, "gridlabD"), scadaPath, simStartDate, simLengthUnits) # Load specified solver. for key in tree: if tree[key].get("module", "").lower() == "powerflow": tree[key] = {"module": "powerflow", "solver_method": solver} # Attach player. classOb = {'omftype': 'class player', 'argument': '{double value;}'} playerOb = { "object": "player", "property": "value", "name": "scadaLoads", "file": "subScada.player", "loop": "0" } maxKey = feeder.getMaxKey(tree) playerKey = maxKey + 2 tree[maxKey + 1] = classOb tree[playerKey] = playerOb # Make loads reference player. loadTemplate = { "object": "triplex_load", "power_pf_12": "0.95", "impedance_pf_12": "0.98", "power_pf_12": "0.90", "impedance_fraction_12": "0.7", "power_fraction_12": "0.3" } loadTemplateR = { "object": "load", "impedance_pf_A": "0.98", "impedance_pf_B": "0.98", "impedance_pf_C": "0.98", "power_pf_A": "0.90", "power_pf_B": "0.90", "power_pf_C": "0.90", "impedance_fraction_A": "0.7", "impedance_fraction_B": "0.7", "impedance_fraction_C": "0.7", "power_fraction_A": "0.3", "power_fraction_B": "0.3", "power_fraction_C": "0.3" } for key in tree: ob = tree[key] if ob.get("object", "") in ("triplex_node", "triplex_load") and ( ob.get("power_12") or ob.get("base_power_12")): # Add to triplex_nodes. newOb = dict(loadTemplate) newOb["name"] = ob.get("name", "") newOb["parent"] = ob.get("parent", "") newOb["phases"] = ob.get("phases", "") newOb["nominal_voltage"] = ob.get("nominal_voltage", "") newOb["latitude"] = ob.get("latitude", "0") newOb["longitude"] = ob.get("longitude", "0") oldPow = ob.get("power_12", "").replace("j", "d") if not oldPow: oldPow = ob.get("base_power_12") if "scadaloads.value*" in oldPow: oldPow = oldPow[17:] pythagPower = gridlabd._strClean(oldPow) newOb["base_power_12"] = "scadaLoads.value*" + str(pythagPower) tree[key] = newOb elif ob.get("object", "") == "load": # Add to residential_loads too. newOb = dict(loadTemplateR) newOb["name"] = ob.get("name", "") newOb["parent"] = ob.get("parent", "") newOb["phases"] = ob.get("phases", "") newOb["load_class"] = ob.get("load_class", "") newOb["nominal_voltage"] = ob.get("nominal_voltage", "") newOb["latitude"] = ob.get("latitude", "0") newOb["longitude"] = ob.get("longitude", "0") try: oldPow = ob.get("constant_power_A", "").replace("j", "d") pythagPower = gridlabd._strClean(oldPow) newOb["base_power_A"] = "scadaLoads.value*" + str(pythagPower) except: pass try: oldPow = ob.get("constant_power_B", "").replace("j", "d") pythagPower = gridlabd._strClean(oldPow) newOb["base_power_B"] = "scadaLoads.value*" + str(pythagPower) except: pass try: oldPow = ob.get("constant_power_C", "").replace("j", "d") pythagPower = gridlabd._strClean(oldPow) newOb["base_power_C"] = "scadaLoads.value*" + str(pythagPower) except: pass tree[key] = newOb # Convert swing bus to a meter. for key in tree: if tree[key].get('bustype', '').lower() == 'swing' and tree[key].get( 'object', '') != 'meter': swingName = tree[key].get('name') regIndex = key tree[key]['object'] = 'meter' # Search for the substation meter and attach a recorder there. for key in tree: if tree[key].get('bustype', '').lower() == 'swing': swingName = tree[key].get('name') recOb = { "object": "recorder", "parent": swingName, "property": "measured_real_power,measured_reactive_power,measured_power", "file": "caliSub.csv", "interval": "3600" } outputRecorderKey = maxKey + 3 tree[outputRecorderKey] = recOb feeder.adjustTime(tree, simLength, simLengthUnits, simStartDate['Date'].strftime("%Y-%m-%d %H:%M:%S")) # Run Gridlabd, calculate scaling constant. def runPowerflowIter(tree, scadaSubPower): '''Runs powerflow once, then iterates.''' # Run initial powerflow to get power. print "Starting calibration." print "Goal of calibration: Error: %s, Iterations: <%s, trim: %s" % ( calibrateError[0], calibrateError[1], trim) output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir, "gridlabD")) outRealPow = output["caliSub.csv"]["measured_real_power"][ trim:simLength] outImagPower = output["caliSub.csv"]["measured_reactive_power"][ trim:simLength] outAppPowerKw = [(x[0]**2 + x[1]**2)**0.5 / 1000 for x in zip(outRealPow, outImagPower)] lastFile = "subScada.player" nextFile = "subScadaCalibrated.player" nextPower = outAppPowerKw error = (sum(outRealPow) / 1000 - sum(scadaSubPower)) / sum(scadaSubPower) iteration = 1 print "First error:", error while abs(error) > calibrateError[0] and iteration < calibrateError[1]: # Run calibration and iterate up to 5 times. SCAL_CONST = sum(scadaSubPower) / sum(nextPower) print "Calibrating & running again... Error: %s, Iteration: %s, SCAL_CONST: %s" % ( str(round(abs(error * 100), 6)), str(iteration), round(SCAL_CONST, 6)) newPlayData = [] with open(pJoin(pJoin(workDir, "gridlabD"), lastFile), "r") as playerFile: for line in playerFile: (key, val) = line.split(',') newPlayData.append( str(key) + ',' + str(float(val) * SCAL_CONST) + "\n") with open(pJoin(pJoin(workDir, "gridlabD"), nextFile), "w") as playerFile: for row in newPlayData: playerFile.write(row) tree[playerKey]["file"] = nextFile tree[outputRecorderKey]["file"] = "caliSubCheck.csv" nextOutput = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin( workDir, "gridlabD")) outRealPowIter = nextOutput["caliSubCheck.csv"][ "measured_real_power"][trim:simLength] outImagPowerIter = nextOutput["caliSubCheck.csv"][ "measured_reactive_power"][trim:simLength] nextAppKw = [(x[0]**2 + x[1]**2)**0.5 / 1000 for x in zip(outRealPowIter, outImagPowerIter)] lastFile = nextFile nextFile = "subScadaCalibrated" + str(iteration) + ".player" nextPower = nextAppKw # Compute error and iterate. error = (sum(outRealPowIter) / 1000 - sum(scadaSubPower)) / sum(scadaSubPower) iteration += 1 else: if iteration == 1: outRealPowIter = outRealPow SCAL_CONST = 1.0 print "Calibration done: Error: %s, Iteration: %s, SCAL_CONST: %s" % ( str(round(abs(error * 100), 2)), str(iteration), round(SCAL_CONST, 2)) return outRealPow, outRealPowIter, lastFile, iteration outRealPow, outRealPowIter, lastFile, iteration = runPowerflowIter( tree, scadaSubPower[trim:simLength]) caliPowVectors = [[ float(element) for element in scadaSubPower[trim:simLength] ], [float(element) / 1000 for element in outRealPow], [float(element) / 1000 for element in outRealPowIter]] labels = ["scadaSubPower", "initialGuess", "finalGuess"] colors = ['red', 'lightblue', 'blue'] chartData = { "Title": "Substation Calibration Check (Iterated " + str(iteration + 1) + "X)", "fileName": "caliCheckPlot", "colors": colors, "labels": labels, "timeZone": simStartDate['timeZone'] } # Trimming vectors to make them all the same length as the smallest vector minCaliPowVecLen = min(len(caliPowVectors[0]), len(caliPowVectors[1]), len(caliPowVectors[2])) caliPowVectors[0] = caliPowVectors[0][:minCaliPowVecLen] caliPowVectors[1] = caliPowVectors[1][:minCaliPowVecLen] caliPowVectors[2] = caliPowVectors[2][:minCaliPowVecLen] print "Len:", len(caliPowVectors[0]), len(caliPowVectors[1]), len( caliPowVectors[2]) plotLine(workDir, caliPowVectors, chartData, simStartDate['Date'] + dt.timedelta(hours=trim), simLengthUnits) # Write the final output. with open(pJoin(workDir, "calibratedFeeder.omd"), "w") as outJson: playerString = open(pJoin(pJoin(workDir, "gridlabD"), lastFile)).read() feederJson["attachments"][lastFile] = playerString feederJson["tree"] = tree json.dump(feederJson, outJson, indent=4) return
def attachVolts(workDir, feederPath, voltVectorA, voltVectorB, voltVectorC, simStartDate, simLength, simLengthUnits): '''read voltage vectors of 3 different phases, run gridlabd, and attach output to the feeder.''' try: timeStamp = [simStartDate['Date']] for x in range(1, 8760): timeStamp.append(timeStamp[x - 1] + dt.timedelta(hours=1)) firstDateTime = timeStamp[1] with open(pJoin(pJoin(workDir, "gridlabD"), "phaseAVoltage.player"), "w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f" % float(voltVectorA[x])) + "+0j" line = timestamp.strftime( "%Y-%m-%d %H:%M:%S" ) + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir, "gridlabD"), "phaseBVoltage.player"), "w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f" % float(voltVectorB[x])) + "-" + str( "%0.4f" % float(random.uniform(6449, 6460))) + "j" line = timestamp.strftime( "%Y-%m-%d %H:%M:%S" ) + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(pJoin(pJoin(workDir, "gridlabD"), "phaseCVoltage.player"), "w") as voltFile: for x in range(0, 8760): timestamp = timeStamp[x] voltage = str("%0.2f" % float(voltVectorC[x])) + "+" + str( "%0.4f" % float(random.uniform(6449, 6460))) + "j" line = timestamp.strftime( "%Y-%m-%d %H:%M:%S" ) + " " + simStartDate['timeZone'] + "," + str(voltage) + "\n" voltFile.write(line) with open(feederPath, "r") as jsonIn: feederJson = json.load(jsonIn) tree = feederJson.get("tree", {}) # Find swingNode name. for key in tree: if tree[key].get('bustype', '').lower() == 'swing': swingName = tree[key].get('name') # Attach player. classOb = {'omftype': 'class player', 'argument': '{double value;}'} voltageObA = { "object": "player", "property": "voltage_A", "file": "phaseAVoltage.player", "loop": "0", "parent": swingName } voltageObB = { "object": "player", "property": "voltage_B", "file": "phaseBVoltage.player", "loop": "0", "parent": swingName } voltageObC = { "object": "player", "property": "voltage_C", "file": "phaseCVoltage.player", "loop": "0", "parent": swingName } maxKey = feeder.getMaxKey(tree) voltplayerKeyA = maxKey + 2 voltplayerKeyB = maxKey + 3 voltplayerKeyC = maxKey + 4 tree[maxKey + 1] = classOb tree[voltplayerKeyA] = voltageObA tree[voltplayerKeyB] = voltageObB tree[voltplayerKeyC] = voltageObC # Adjust time and run output. feeder.adjustTime(tree, simLength, simLengthUnits, firstDateTime.strftime("%Y-%m-%d %H:%M:%S")) output = gridlabd.runInFilesystem(tree, keepFiles=True, workDir=pJoin(workDir, "gridlabD")) # Write the output. with open(pJoin(workDir, "calibratedFeeder.omd"), "w") as outJson: playerStringA = open( pJoin(pJoin(workDir, "gridlabD"), "phaseAVoltage.player")).read() playerStringB = open( pJoin(pJoin(workDir, "gridlabD"), "phaseBVoltage.player")).read() playerStringC = open( pJoin(pJoin(workDir, "gridlabD"), "phaseCVoltage.player")).read() feederJson["attachments"]["phaseAVoltage.player"] = playerStringA feederJson["attachments"]["phaseBVoltage.player"] = playerStringB feederJson["attachments"]["phaseCVoltage.player"] = playerStringC feederJson["tree"] = tree json.dump(feederJson, outJson, indent=4) return pJoin(workDir, "calibratedFeeder.omd"), True except: print "Failed to run gridlabD with voltage players." return "", False