def milsoftToGridlab(temp_dir): ''' Convert a Milsoft Windmil ASCII export (.std & .seq) in to a GridLAB-D .glm and return the .glm. Form parameters: :param std: an STD file. :param seq: an SEQ file. Details: :OMF function: omf.milToGridlab.convert(). :run-time: up to a few minutes ''' stdPath = os.path.join(temp_dir, 'in.std') request.files['std'].save(stdPath) seqPath = os.path.join(temp_dir, 'in.seq') request.files['seq'].save(seqPath) with open(stdPath) as f: stdFile = f.read() with open(seqPath) as f: seqFile = f.read() tree = milToGridlab.convert(stdFile, seqFile, rescale=True) # Remove '#include "schedules.glm' objects from the tree. Would be faster if this was incorported in sortedWrite() or something tree = {k: v for k, v in tree.items() if v.get('omftype') != '#include'} with open(os.path.join(temp_dir, filenames['msgl']), 'w') as outFile: outFile.write(feeder.sortedWrite(tree))
def milsoftToGridlabTests(keepFiles=False): openPrefix = '../uploads/' outPrefix = './milToGridlabTests/' import os, json, traceback, shutil from omf.solvers import gridlabd from matplotlib import pyplot as plt from milToGridlab import convert import omf.feeder as feeder try: os.mkdir(outPrefix) except: pass # Directory already there. exceptionCount = 0 # testFiles = [('INEC-RENOIR.std','INEC.seq'), ('INEC-GRAHAM.std','INEC.seq'), # ('Olin-Barre.std','Olin.seq'), ('Olin-Brown.std','Olin.seq'), # ('ABEC-FRANK.std','ABEC.seq'), ('ABEC-COLUMBIA.std','ABEC.seq'),('OMF_Norfork1.std', 'OMF_Norfork1.seq')] testFiles = [('Olin-Brown.std', 'Olin.seq')] testAttachments = {'schedules.glm':''} # testAttachments = {'schedules.glm':'', 'climate.tmy2':open('./data/Climate/KY-LEXINGTON.tmy2','r').read()} for stdString, seqString in testFiles: try: # Convert the std+seq. with open(openPrefix + stdString,'r') as stdFile, open(openPrefix + seqString,'r') as seqFile: outGlm,x,y = convert(stdFile.read(),seqFile.read()) with open(outPrefix + stdString.replace('.std','.glm'),'w') as outFile: outFile.write(feeder.sortedWrite(outGlm)) print 'WROTE GLM FOR', stdString try: # Draw the GLM. myGraph = feeder.treeToNxGraph(outGlm) feeder.latLonNxGraph(myGraph, neatoLayout=False) plt.savefig(outPrefix + stdString.replace('.std','.png')) print 'DREW GLM OF', stdString except: exceptionCount += 1 print 'FAILED DRAWING', stdString try: # Run powerflow on the GLM. HACK:blank attachments for now. output = gridlabd.runInFilesystem(outGlm, attachments=testAttachments, keepFiles=False) with open(outPrefix + stdString.replace('.std','.json'),'w') as outFile: json.dump(output, outFile, indent=4) print 'RAN GRIDLAB ON', stdString except: exceptionCount += 1 print 'POWERFLOW FAILED', stdString except: print 'FAILED CONVERTING', stdString exceptionCount += 1 traceback.print_exc() if not keepFiles: shutil.rmtree(outPrefix) return exceptionCount
def glmForceLayout(temp_dir): ''' Inject artifical coordinates into a GridLAB-D .glm and return the .glm. Form parameters: :param glm: a GLM file Details: :OMF function: omf.distNetViz.insert_coordinates() :run-time: a few seconds ''' glm_path = os.path.join(temp_dir, 'in.glm') glm_file = request.files['glm'] glm_file.save(glm_path) tree = feeder.parse(glm_path) distNetViz.insert_coordinates(tree) with open(os.path.join(temp_dir, filenames['gfl']), 'w') as f: f.write(feeder.sortedWrite(tree))
def cymeToGridlab(temp_dir): ''' Convert an Eaton Cymdist .mdb export in to a GridLAB-D .glm and return the .glm. Form parameters: :param mdb: a MDB file. Details: :OMF function: omf.cymeToGridlab.convertCymeModel(). :run-time: up to a few minutes. ''' mdbPath = os.path.join(temp_dir, "in.mdb") request.files["mdb"].save(mdbPath) import locale locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') tree = cymeToGridlab_.convertCymeModel(mdbPath, temp_dir) # Remove '#include "schedules.glm' objects from the tree. Would be faster if this was incorported in sortedWrite() or something tree = {k: v for k, v in tree.items() if v.get('omftype') != '#include'} with open(os.path.join(temp_dir, filenames["cygl"]), 'w') as outFile: outFile.write(feeder.sortedWrite(tree))
def work(modelDir, ind): ''' Run the model in its directory. ''' o = {} assert not ( ind['pvConnection'] == 'Delta' and ind['objectiveFunction'] == 'I0' ), ('Delta function does not currently support I0 minimization function.') SIGN_CORRECTION = -1 if ind['pvConnection'] == 'Delta' else 1 neato = False if ind.get("layoutAlgorithm", "geospatial") == "geospatial" else True edgeColValue = ind.get("edgeCol", None) if ind.get("edgeCol") != "None" else None nodeColValue = ind.get("nodeCol", None) if ind.get("nodeCol") != "None" else None edgeLabsValue = ind.get("edgeLabs", None) if ind.get("edgeLabs") != "None" else None nodeLabsValue = ind.get("nodeLabs", None) if ind.get("nodeLabs") != "None" else None customColormapValue = True if ind.get("customColormap", "True") == "True" else False # -------------------------- BASE CHART --------------------------- # with open( pJoin(modelDir, [x for x in os.listdir(modelDir) if x.endswith('.omd')][0])) as f: tree_base = json.load(f)['tree'] with open(pJoin(modelDir, 'input.glm'), 'w') as f: treeString = feeder.sortedWrite(tree_base) f.write(treeString) base_suffix = "_base" tree_base = _turnOffSolar(tree_base) tree_base = _addCollectors(tree_base, suffix=base_suffix, pvConnection=ind['pvConnection']) with open(pJoin(modelDir, '_base.glm'), 'w') as f: treeString = feeder.sortedWrite(tree_base) f.write(treeString) voltageDrop.drawPlot(pJoin(modelDir, "_base.glm"), workDir=modelDir, neatoLayout=neato, edgeCol=edgeColValue, nodeCol=nodeColValue, nodeLabs=nodeLabsValue, edgeLabs=edgeLabsValue, customColormap=customColormapValue, rezSqIn=int(ind["rezSqIn"]), scaleMin=float(ind['colorMin']) if ind['colorMin'].lower() != 'auto' else None, scaleMax=float(ind['colorMax']) if ind['colorMax'].lower() != 'auto' else None).savefig( pJoin(modelDir, "output" + base_suffix + ".png")) with open(pJoin(modelDir, "output" + base_suffix + ".png"), "rb") as f: o['base_image'] = base64.standard_b64encode(f.read()).decode() os.rename(pJoin(modelDir, "voltDump.csv"), pJoin(modelDir, "voltDump_base.csv")) # ---------------------------- CONTROLLED CHART ----------------------------- # controlled_suffix = '_controlled' SteinmetzController.SteinmetzController(pJoin(modelDir, 'input.glm'), ind['pvConnection'], ind['criticalNode'], int(ind['iterations']), ind['objectiveFunction'], modelDir) if ind["pvConnection"] == 'Delta': glmPath = pJoin(modelDir, 'input_NewDeltaPV_Final.glm') else: glmPath = pJoin(modelDir, 'input_Final.glm') omdPath = pJoin(modelDir, '_controlled.omd') feeder.glmToOmd(glmPath, omdPath) with open(omdPath) as f: tree_controlled = json.load(f)['tree'] constant_pf = float(ind['constant_pf']) for k, v in tree_controlled.items(): if ('PV' in v.get('groupid', '')) and v.get('object', '') == 'load': if ind['strategy'] == 'constant': if v.get('constant_power_C', '') != '': v['constant_power_C'] = respect_pf(v['constant_power_C'], constant_pf) elif v.get('constant_power_B', '') != '': v['constant_power_B'] = respect_pf(v['constant_power_B'], constant_pf) elif v.get('constant_power_A', '') != '': v['constant_power_A'] = respect_pf(v['constant_power_A'], constant_pf) v['groupid'] = 'PV' tree_controlled = _addCollectors(tree_controlled, suffix=controlled_suffix, pvConnection=ind['pvConnection']) with open(pJoin(modelDir, '_controlled.glm'), 'w') as f: treeString = feeder.sortedWrite(tree_controlled) f.write(treeString) voltageDrop.drawPlot( pJoin(modelDir, "_controlled.glm"), workDir=modelDir, neatoLayout=neato, edgeCol=edgeColValue, nodeCol=nodeColValue, nodeLabs=nodeLabsValue, edgeLabs=edgeLabsValue, customColormap=customColormapValue, rezSqIn=int(ind["rezSqIn"]), scaleMin=float(ind['colorMin']) if ind['colorMin'] != 'auto' else None, scaleMax=float(ind['colorMax']) if ind['colorMax'] != 'auto' else None).savefig(pJoin(modelDir, "output" + controlled_suffix + ".png")) with open(pJoin(modelDir, "output" + controlled_suffix + ".png"), "rb") as f: o['controlled_image'] = base64.standard_b64encode(f.read()).decode() os.rename(pJoin(modelDir, "voltDump.csv"), pJoin(modelDir, "voltDump_controlled.csv")) # ---------------------------- SOLAR CHART ----------------------------- # if ind["pvConnection"] == 'Delta': glmPath = pJoin(modelDir, 'input_NewDeltaPV_Start.glm') else: glmPath = pJoin(modelDir, 'input_Wye_Start.glm') omdPath = pJoin(modelDir, '_solar.omd') feeder.glmToOmd(glmPath, omdPath) with open(omdPath) as f: tree_solar = json.load(f)['tree'] for k, v in tree_solar.items(): if ('PV' in v.get('groupid', '')) and v.get('object', '') == 'load': v['groupid'] = 'PV' solar_suffix = "_solar" tree_solar = _addCollectors(tree_solar, suffix=solar_suffix, pvConnection=ind['pvConnection']) with open(modelDir + '/_solar.glm', 'w') as f: treeString = feeder.sortedWrite(tree_solar) f.write(treeString) voltageDrop.drawPlot( pJoin(modelDir, "_solar.glm"), workDir=modelDir, neatoLayout=neato, edgeCol=edgeColValue, nodeCol=nodeColValue, nodeLabs=nodeLabsValue, edgeLabs=edgeLabsValue, customColormap=customColormapValue, rezSqIn=int(ind["rezSqIn"]), scaleMin=float(ind['colorMin']) if ind['colorMin'] != 'auto' else None, scaleMax=float(ind['colorMax']) if ind['colorMax'] != 'auto' else None).savefig(pJoin(modelDir, "output" + solar_suffix + ".png")) with open(pJoin(modelDir, "output" + solar_suffix + ".png"), "rb") as f: o['solar_image'] = base64.standard_b64encode(f.read()).decode() os.rename(pJoin(modelDir, "voltDump.csv"), pJoin(modelDir, "voltDump_solar.csv")) # --------------------------- SERVICE TABLE ----------------------------- df_invs = {} sums = {} for suffix in [base_suffix, solar_suffix, controlled_suffix]: df_invs[suffix] = { phase: _readCSV( pJoin(modelDir, 'all_inverters_VA_Out_AC_' + phase + suffix + '.csv')) for phase in 'ABC' } for suffix in [base_suffix, solar_suffix, controlled_suffix]: df_invs[suffix] = {} sums[suffix] = 0 for phase in 'ABC': df = _readCSV( pJoin(modelDir, 'all_inverters_VA_Out_AC_' + phase + suffix + '.csv')) df_invs[suffix][phase] = df sums[suffix] += complex(df['real'].sum(), df['imag'].sum()) loss_items = get_loss_items(tree_base) o['service_cost'] = { 'load': { 'base': n( _totals(pJoin(modelDir, 'load' + base_suffix + '.csv'), 'real') + _totals(pJoin(modelDir, 'load_node' + base_suffix + '.csv'), 'real')), 'solar': n( _totals(pJoin(modelDir, 'load' + solar_suffix + '.csv'), 'real') + _totals(pJoin(modelDir, 'load_node' + solar_suffix + '.csv'), 'real')), 'controlled': n( _totals(pJoin(modelDir, 'load' + controlled_suffix + '.csv'), 'real') + _totals( pJoin(modelDir, 'load_node' + controlled_suffix + '.csv'), 'real')) }, 'distributed_gen': { 'base': n(sums[base_suffix].real), 'solar': n(SIGN_CORRECTION * sums[solar_suffix].real), 'controlled': n(SIGN_CORRECTION * sums[controlled_suffix].real) }, 'losses': { 'base': n( sum([ _totals( pJoin(modelDir, 'Zlosses_' + loss + base_suffix + '.csv'), 'real') for loss in loss_items ])), 'solar': n( sum([ _totals( pJoin(modelDir, 'Zlosses_' + loss + solar_suffix + '.csv'), 'real') for loss in loss_items ])), 'controlled': n( sum([ _totals( pJoin(modelDir, 'Zlosses_' + loss + controlled_suffix + '.csv'), 'real') for loss in loss_items ])), }, 'VARs': { 'base': n( sum([ _totals( pJoin(modelDir, 'Zlosses_' + loss + base_suffix + '.csv'), 'imag') for loss in loss_items ]) + sums[base_suffix].imag + _totals(pJoin(modelDir, 'load' + base_suffix + '.csv'), 'imag') + _totals(pJoin(modelDir, 'load_node' + base_suffix + '.csv'), 'imag')), 'solar': n( sum([ _totals( pJoin(modelDir, 'Zlosses_' + loss + solar_suffix + '.csv'), 'imag') for loss in loss_items ]) + sums[solar_suffix].imag + _totals(pJoin(modelDir, 'load' + solar_suffix + '.csv'), 'imag') + _totals(pJoin(modelDir, 'load_node' + solar_suffix + '.csv'), 'imag')), 'controlled': n( sum([ _totals( pJoin(modelDir, 'Zlosses_' + loss + controlled_suffix + '.csv'), 'imag') for loss in loss_items ]) + sums[controlled_suffix].imag + _totals(pJoin(modelDir, 'load' + controlled_suffix + '.csv'), 'imag') + _totals( pJoin(modelDir, 'load_node' + controlled_suffix + '.csv'), 'imag')) }, # Motor derating and lifespan below. 'motor_derating': {}, 'lifespan': {} } sub_df = { 'base': _readCSV('substation_power' + base_suffix + '.csv', voltage=False), 'solar': _readCSV('substation_power' + solar_suffix + '.csv', voltage=False), 'controlled': _readCSV('substation_power' + controlled_suffix + '.csv', voltage=False), } o['service_cost']['power_factor'] = { 'base': n(pf(sub_df['base']['real'].sum(), sub_df['base']['imag'].sum())), 'solar': n(pf(sub_df['solar']['real'].sum(), sub_df['solar']['imag'].sum())), 'controlled': n( pf(sub_df['controlled']['real'].sum(), sub_df['controlled']['imag'].sum())) } # hack correction if ind['pvConnection'] == 'Delta': o['service_cost']['load']['controlled'] = n( float(o['service_cost']['load']['controlled'].replace(',', '')) + float(o['service_cost']['distributed_gen']['controlled'].replace( ',', ''))) o['service_cost']['load']['solar'] = n( float(o['service_cost']['load']['solar'].replace(',', '')) + float(o['service_cost']['distributed_gen']['solar'].replace( ',', ''))) # ----------------------------------------------------------------------- # # -------------------------- INVERTER TABLE ----------------------------- # if ind['pvConnection'] == 'Wye': inverter_list = set( list(df_invs[controlled_suffix]['A'].index) + list(df_invs[controlled_suffix]['B'].index) + list(df_invs[controlled_suffix]['C'].index)) else: inverter_list = df_invs[controlled_suffix]['A'].index inverter_rows = { inverter: { '_solarA': '0j', '_solarB': '0j', '_solarC': '0j', '_controlledA': '0j', '_controlledB': '0j', '_controlledC': '0j', } for inverter in inverter_list } for suffix in [solar_suffix, controlled_suffix]: for phase in 'ABC': for inverter, row in df_invs[suffix][phase].iterrows(): inverter_rows[inverter][suffix + phase] = str( SIGN_CORRECTION * complex(row['real'], row['imag'])).strip('()') o['inverter_table'] = ''.join([( "<tr>" "<td>{}</td><td style='border-left: solid black 1px;'>{}</td><td>{}</td><td>{}</td><td style='border-left: solid black 1px;'>{}</td><td>{}</td><td>{}</td>" "</tr>").format(inverter, v['_solarA'], v['_solarB'], v['_solarC'], v['_controlledA'], v['_controlledB'], v['_controlledC']) for (inverter, v) in inverter_rows.items()]) # ----------------------------------------------------------------------- # # ----------------- MOTOR VOLTAGE and IMBALANCE TABLES ------------------ # df_vs = {} for suffix in [base_suffix, solar_suffix, controlled_suffix]: df_v = pd.DataFrame() for phase in ['A', 'B', 'C']: df_phase = _readCSV( pJoin(modelDir, 'threephase_VA_' + phase + suffix + '.csv')) df_phase.columns = [phase + '_' + str(c) for c in df_phase.columns] if df_v.shape[0] == 0: df_v = df_phase else: df_v = df_v.join(df_phase) df_vs[suffix] = df_v motor_names = [motor for motor, r in df_v.iterrows()] all_motor_unbalance = {} for suffix in [base_suffix, solar_suffix, controlled_suffix]: df_all_motors = pd.DataFrame() df_all_motors = _readVoltage( pJoin(modelDir, 'voltDump' + suffix + '.csv'), motor_names, ind['objectiveFunction']) o['motor_table' + suffix] = ''.join([( "<tr>" "<td>{0}</td><td>{1}</td><td>{2}</td><td>{3}</td><td>{4}</td><td>{5}</td><td>{6}</td><td>{7}</td><td>{8}</td>" "</tr>" if r['node_name'] != ind['criticalNode'] or ind['strategy'] == 'constant' else "<tr>" "<td {9}>{0}</td><td {9}>{1}</td><td {9}>{2}</td><td {9}>{3}</td><td {9}>{4}</td><td {9}>{5}</td><td {9}>{6}</td><td {9}>{7}</td><td {9}>{8}</td>" "</tr>").format(r['node_name'], n(r2['A_real'] + r2['B_real'] + r2['C_real']), n(r2['A_imag'] + r2['B_imag'] + r2['C_imag']), n(r['voltA']), n(r['voltB']), n(r['voltC']), n(r['unbalance']), n(motor_efficiency(r['unbalance'])), n(lifespan(r['unbalance'])), "style='background:yellow'") for (i, r), ( j, r2) in zip(df_all_motors.iterrows(), df_vs[suffix].iterrows())]) all_motor_unbalance[suffix] = [ r['unbalance'] for i, r in df_all_motors.iterrows() ] o['service_cost']['motor_derating'][suffix[1:]] = n( df_all_motors['unbalance'].apply(motor_efficiency).max()) o['service_cost']['lifespan'][suffix[1:]] = n( df_all_motors['unbalance'].apply(lifespan).mean()) # ----------------------------------------------------------------------- # # ---------------------------- COST TABLE ------------------------------- # cost = float(ind['productionCost']) revenue = float(ind['retailCost']) pf_p = float(ind['pf_penalty']) pf_t = float(ind['pf_threshold']) motor_p = float(ind['motor_penalty']) motor_t = float(ind['motor_threshold']) o['cost_table'] = { 'energy_cost': { 'base': '-$' + n(cost * floats(o['service_cost']['load']['base'])), 'solar': '-$' + n(cost * floats(o['service_cost']['load']['solar'])), 'controlled': '-$' + n(cost * floats(o['service_cost']['load']['controlled'])), }, 'energy_revenue': { 'base': '$' + n(revenue * floats(o['service_cost']['load']['base']) - cost * floats(o['service_cost']['distributed_gen']['base'])), 'solar': '$' + n(revenue * floats(o['service_cost']['load']['solar']) - cost * floats(o['service_cost']['distributed_gen']['solar'])), 'controlled': '$' + n(revenue * floats(o['service_cost']['load']['controlled']) - cost * floats(o['service_cost']['distributed_gen']['controlled'])), }, 'pf_penalty': { 'base': '-$' + n(pf_p if floats(o['service_cost']['power_factor']['base']) <= pf_t else 0), 'solar': '-$' + n(pf_p if floats(o['service_cost']['power_factor']['solar'] ) <= pf_t else 0), 'controlled': '-$' + n(pf_p if floats(o['service_cost']['power_factor']['controlled'] ) <= pf_t else 0), }, 'motor_damage': { 'base': '-$' + n(motor_p * len([m for m in all_motor_unbalance['_base'] if m > motor_t])), 'solar': '-$' + n(motor_p * len([m for m in all_motor_unbalance['_solar'] if m > motor_t])), 'controlled': '-$' + n(motor_p * len( [m for m in all_motor_unbalance['_controlled'] if m > motor_t])), }, } # ----------------------------------------------------------------------- # if ind['pvConnection'] == 'Delta': o['inverter_header'] = "<tr><th>Name</th><th>AB (VA)</th><th>BC (VA)</th><th>AC (VA)</th><th>AB (VA)</th><th>BC (VA)</th><th>AC (VA)</th></tr>" else: o['inverter_header'] = "<tr><th>Name</th><th>A (VA)</th><th>B (VA)</th><th>C (VA)</th><th>A (VA)</th><th>B (VA)</th><th>C (VA)</th></tr>" return o
def runInFilesystem(feederTree, attachments=[], keepFiles=False, workDir=None, glmName=None): ''' Execute gridlab in the local filesystem. Return a nice dictionary of results. ''' logger.info( 'Running GridLab-D for %d feeders (working dir=%s)', len(feederTree), workDir) try: binaryName = "gridlabd" # Create a running directory and fill it, unless we've specified where # we're running. if not workDir: workDir = tempfile.mkdtemp() print "gridlabD runInFilesystem with no specified workDir. Working in", workDir # Need to zero out lat/lon data on copy because it frequently breaks # Gridlab. localTree = deepcopy(feederTree) for key in localTree.keys(): try: del localTree[key]["latitude"] del localTree[key]["longitude"] except: pass # No lat lons. # Write attachments and glm. for attach in attachments: with open(pJoin(workDir, attach), 'w') as attachFile: attachFile.write(attachments[attach]) glmString = feeder.sortedWrite(localTree) if not glmName: glmName = "main." + \ datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') + ".glm" with open(pJoin(workDir, glmName), 'w') as glmFile: glmFile.write(glmString) logger.debug('Wrote GLM file: %s', glmName) # RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) with open(pJoin(workDir, 'stdout.txt'), 'w') as stdout, open(pJoin(workDir, 'stderr.txt'), 'w') as stderr, open(pJoin(workDir, 'PID.txt'), 'w') as pidFile: # MAYBEFIX: turn standerr WARNINGS back on once we figure out how # to supress the 500MB of lines gridlabd wants to write... logger.info( 'Running <%s -w %s> in <%s>', binaryName, glmName, workDir) proc = subprocess.Popen( [binaryName, '-w', glmName], cwd=workDir, stdout=stdout, stderr=stderr) pidFile.write(str(proc.pid)) logger.info('Launched gridlabd with pid=%d', proc.pid) returnCode = proc.wait() logger.info('gridlabd finished with exit code=%d', returnCode) # Build raw JSON output. rawOut = anaDataTree(workDir, lambda x: True) with open(pJoin(workDir, 'stderr.txt'), 'r') as stderrFile: rawOut['stderr'] = stderrFile.read().strip() with open(pJoin(workDir, 'stdout.txt'), 'r') as stdoutFile: rawOut['stdout'] = stdoutFile.read().strip() logger.info('GridlabD STDOUT:\n%s', rawOut['stdout']) logger.info('GridlabD STDERR:\n%s', rawOut['stderr']) # Delete the folder and return. if not keepFiles and not workDir: # NOTE: if we've specify a working directory, don't just blow it # away. for attempt in range(5): try: shutil.rmtree(workDir) break except OSError: # HACK: if we don't sleep 1 second, windows intermittantly fails to delete things and an exception is thrown. # Probably cus dropbox is monkeying around in these folders # on my dev machine. Disabled for now since it works when # dropbox is off. time.sleep(2) return rawOut except: with open(pJoin(workDir, "stderr.txt"), "a+") as stderrFile: traceback.print_exc(file=stderrFile) return {}
def writeNewGlmAndPlayers(omdPath, amiPath, outputDir): ''' Take a glm and an AMI data set, and create a new GLM and set of players that combine them. ''' # Pull in the main data objects. with open(omdPath, 'r') as jsonFile: omdObj = json.load(jsonFile) omdName = basename(omdPath) feederObj = omdObj['tree'] amiData = amiImport(amiPath) # Make the output directory. if not os.path.isdir(outputDir): os.mkdir(outputDir) # Attach the player class to feeder if needed. omfTypes = set([feederObj[k].get('omftype', '') for k in feederObj]) if 'class player' not in omfTypes: newKey = feeder.getMaxKey(feederObj) feederObj[newKey + 1] = { 'omftype': 'class player', 'argument': '{double value;}' } # All meter names we have in the AMI data set. meterNames = set([x.get('meterName', '') for x in amiData]) # Attach all the players. for key in list(feederObj.keys()): objName = feederObj[key].get('name', '') dataPhases = set([ x.get('phase', '') for x in amiData if x.get('meterName', '') == objName ]) # Handle primary system loads. if feederObj[key].get('object', '') == 'load' and objName in meterNames: for phase in dataPhases: # Write the player file: createPlayerFile( amiData, objName, phase, outputDir + '/player_' + objName + '_' + phase + '.csv') # Put the object in the GLM: newKey = feeder.getMaxKey(feederObj) feederObj[newKey + 1] = { 'object': 'player', 'property': 'constant_power_' + phase, 'file': 'player_' + objName + '_' + phase + '.csv', 'parent': objName } # Handle secondary system loads. elif feederObj[key].get( 'object', '') == 'triplex_node' and objName in meterNames: # Write the player file: createPlayerFile(amiData, objName, 'S', outputDir + '/player_' + objName + '_S.csv') # Put the object in the GLM: newKey = feeder.getMaxKey(feederObj) feederObj[newKey + 1] = { 'object': 'player', 'property': 'power_12', 'file': 'player_' + objName + '_S.csv', 'parent': objName } # Write the GLM. with open(outputDir + '/out.glm', 'w') as outGlmFile: outString = feeder.sortedWrite(feederObj) outGlmFile.write(outString) #TODO: update omdObj tree object to match feederObj, and insert all .csv files in to the attachments, then write new .omd to outputDir. # omd = json.load(open('feederName.omd')) for player in os.listdir(outputDir): if player.startswith('player'): name = basename(player) with open(pJoin(outputDir, player), 'r') as inFile: playerContents = inFile.read() omdObj['attachments'][name + '.player'] = playerContents oneUp = pJoin(outputDir, '..') with open(pJoin(oneUp, omdName), 'w') as outFile: json.dump(omdObj, outFile, indent=4)
player = {'object':'player', 'file': './solarToNegLoadPlayerFiles/'+file, 'property':'constant_power_A' } dieselObj = {'object':'triplex_load', 'name':row['name'], 'parent':met['parent'], } dieselObj['player'] = player dieselObjs.append(dieselObj) # Delete solar objects from tree for row in solarKeys: del tree[row] # Delete inverter objects from tree for row in inverterKeys: del tree[row] # Deleter meter objects from tree for row in meterKeys: del tree[row] # Insert new generators into tree print dieselObjs for row in dieselObjs: maxKey = max(tree.keys()) +1 tree[maxKey] = row newTree = feeder.sortedWrite(tree) fileName = basename(glmFile)[:-4] # Write new glm to file with open('../static/testFiles/'+fileName+'Neg.glm','w+') as outFile: outFile.write(newTree)
def work(modelDir, inputDict): ''' Run the model in its directory. ''' outData = {} # Copy spcific climate data into model directory (I think this is unnecessary?) # inputDict["climateName"] = zipCodeToClimateName(inputDict["zipCode"]) # shutil.copy(pJoin(__neoMetaModel__._omfDir, "data", "Climate", inputDict["climateName"] + ".tmy2"), # pJoin(modelDir, "climate.tmy2")) feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict["feederName1"] = feederName # Create voltage drop plot. # print "*DEBUG: feederName:", feederName omd = json.load(open(pJoin(modelDir,feederName + '.omd'))) tree = omd['tree'] # COLLECT ALL INVERTER OUTPUTS all_inverters = [tree[k]['name'] for k, v in tree.iteritems() if tree.get(k, {}).get('object') == 'inverter'] m = [i for i, m in enumerate(all_inverters)] html_out = ["<tr><td>{0}</td><td>{1}</td><td>{1}</td><td>{1}</td><td>{1}</td></tr>".format(inverter, np.nan) for inverter, i in zip(all_inverters, m)] outData['inverter_table'] = ''.join(html_out) tree = _addCollectors(tree) with open(modelDir + '/withCollectors.glm', 'w') as collFile: treeString = feeder.sortedWrite(tree) collFile.write(treeString) # json.dump(tree, f1, indent=4) neato = False if inputDict.get("layoutAlgorithm", "geospatial") == "geospatial" else True edgeColValue = inputDict.get("edgeCol", "None") nodeColValue = inputDict.get("nodeCol", "None") edgeLabsValue = inputDict.get("edgeLabs", "None") nodeLabsValue = inputDict.get("nodeLabs", "None") customColormapValue = True if inputDict.get("customColormap", "True") == "True" else False # chart = voltPlot(omd, workDir=modelDir, neatoLayout=neato) chart = drawPlot( pJoin(modelDir, "withCollectors.glm"), workDir = modelDir, neatoLayout = False, #neato, edgeCol = edgeColValue, nodeCol = nodeColValue, nodeLabs = nodeLabsValue, edgeLabs = edgeLabsValue, customColormap = customColormapValue, rezSqIn = int(inputDict["rezSqIn"])) chart.savefig(pJoin(modelDir,"output.png")) with open(pJoin(modelDir,"output.png"),"rb") as f: outData["voltageDrop"] = f.read().encode("base64") outData['threePhase'] = _readCollectorCSV(modelDir+'/threephaseloads.csv') sub_d = { 'base': np.nan, 'solar': np.nan, 'controlled_solar': np.nan, } outData['service_cost'] = { 'load': sub_d, 'distributed_gen': sub_d, 'losses': sub_d, } #outData['overheadLosses'] = _readCollectorCSV(modelDir+'/ZlossesOverhead.csv') return outData
def ConvertAndwork(filePath, gb_on_off='on', area=500): #Converts omd to glm, adds in necessary recorder, collector, and attributes+parameters for gridballast gld to run on waterheaters and ziploads with open(filePath, 'r') as inFile: if gb_on_off == 'on': gb_status = 'true' else: gb_status = 'false' print ("gridballast is "+gb_on_off) area = str(area) inFeeder = json.load(inFile) attachments = inFeeder.get('attachments',[]) include_files = attachments.keys() if 'schedules.glm' in include_files: with open('schedules.glm', 'w') as outFile: outFile.write(attachments['schedules.glm'].encode('utf8')) if 'schedulesResponsiveLoads.glm' in include_files: with open('schedulesResponsiveLoads.glm', 'w') as outFile: outFile.write(attachments['schedulesResponsiveLoads.glm'].encode('utf8')) inFeeder['tree'][u'01'] = {u'omftype': u'#include', u'argument': u'"hot_water_demand1.glm"'} inFeeder['tree'][u'011'] = {u'class': u'player', u'double': u'value'}# add in manually for now inFeeder['tree'][u'0111'] = {u'object': u'voltdump', u'filename': u'voltDump.csv'} name_volt_dict ={} solar_meters=[] wind_obs=[] substation = None rooftopSolars = [] rooftopInverters =[] for key, value in inFeeder['tree'].iteritems(): if 'name' in value and 'solar' in value['name']: inverter_ob = value['parent'] for key, value in inFeeder['tree'].iteritems(): if 'name' in value and value['name']==inverter_ob: solar_meters.append(value['parent']) if 'name' in value and 'wind' in value['name']: wind_obs.append(value['name']) if 'name' in value and 'nominal_voltage' in value: name_volt_dict[value['name']] = {'Nominal_Voltage': value['nominal_voltage']} if 'object' in value and (value['object'] == 'waterheater'): inFeeder['tree'][key].update({'heat_mode':'ELECTRIC'}) inFeeder['tree'][key].update({'enable_volt_control':gb_status}) inFeeder['tree'][key].update({'volt_lowlimit':'113.99'}) inFeeder['tree'][key].update({'volt_uplimit':'126.99'}) inFeeder['tree'][key].pop('demand') inFeeder['tree'][key].update({'water_demand':'weekday_hotwater*1.00'}) if'object' in value and (value['object']== 'ZIPload'): inFeeder['tree'][key].update({'enable_volt_control':gb_status}) inFeeder['tree'][key].update({'volt_lowlimit':'113.99'}) inFeeder['tree'][key].update({'volt_uplimit':'126.99'}) if 'object' in value and (value['object']== 'house'): houseMeter = value['parent'] houseName = value['name'] houseLon = str(value['longitude']) houseLat = str(value['latitude']) rooftopSolar_inverter = houseName+"_rooftop_inverter;" rooftopSolars.append("object solar {\n\tname "+houseName+"_rooftopSolar;\n\tparent "+rooftopSolar_inverter+"\n\tgenerator_status ONLINE;\n\tefficiency 0.2;\n\tlongitude "+houseLon+";\n\tgenerator_mode SUPPLY_DRIVEN;\n\tpanel_type SINGLE_CRYSTAL_SILICON;\n\tlatitude "+houseLat+";\n\tarea "+area+";\n\t};\n") rooftopInverters.append("object inverter {\n\tphases ABCN;\n\tpower_factor 1.0;\n\tname "+rooftopSolar_inverter+"\n\tparent "+houseMeter+";\n\tinverter_type PWM;\n\tlongitude "+houseLon+";\n\tgenerator_mode CONSTANT_PF;\n\tlatitude "+houseLat+";\n\t};\n") if 'argument' in value and ('minimum_timestep' in value['argument']): interval = int(re.search(r'\d+', value['argument']).group()) if 'bustype' in value and 'SWING' in value['bustype']: substation = value['name'] value['object'] = 'meter' # Create Collectors for different load objects in circuit collectorwat=("object collector {\n\tname collector_Waterheater;\n\tgroup class=waterheater;\n\tproperty sum(actual_load);\n\tinterval "+str(interval)+";\n\tfile out_load_waterheaters.csv;\n};\n") collectorz=("object collector {\n\tname collector_ZIPloads;\n\tgroup class=ZIPload;\n\tproperty sum(base_power);\n\tinterval "+str(interval)+";\n\tfile out_load_ziploads.csv;\n};\n") collectorh=("object collector {\n\tname collector_HVAC;\n\tgroup class=house;\n\tproperty sum(heating_demand), sum(cooling_demand);\n\tinterval "+str(interval)+";\n\tfile out_HVAC.csv;\n};\n") # Measure powerflow over Triplex meters, this will determine if solar is generating power. Negative powerflow means solar is generating. Positive means no. collectorRoof=("object collector {\n\tname collector_rooftop;\n\tgroup class=triplex_meter;\n\tproperty sum(measured_real_power);\n\tinterval "+str(interval)+";\n\tfile out_load_triplex.csv;\n};\n") #Create recorder for substation powerflow recordersub=("object recorder {\n\tinterval "+str(interval)+";\n\tproperty measured_real_power;\n\tfile out_substation_power.csv;\n\tparent "+str(substation)+";\n\t};\n") # Create Create a recorder for a solar roof object, just to record powerflow over that unit # recorderSolarRoof = ("object recorder {\n\tinterval "+str(interval)+";\n\tproperty measured_real_power;\n\tfile out_standard_solar_roof.csv;\n\tparent nreca_synthetic_meter_11283;\n\t};\n") # Create arrays of solar objects and wind objects to attach recorders to. recorders = [] recorderw=[] for i in range(len(solar_meters)): recorders.append(("object recorder {\n\tinterval "+str(interval)+";\n\tproperty measured_real_power;\n\tfile out_solar_"+str(i)+".csv;\n\tparent "+str(solar_meters[i])+";\n\t};\n")) for i in range(len(wind_obs)): recorderw.append(("object recorder {\n\tinterval "+str(interval)+";\n\tproperty Pconv;\n\tfile out_wind_"+str(i)+".csv;\n\tparent "+str(wind_obs[i])+";\n\t};\n")) with open('outGLM_rooftop.glm', "w") as outFile: # Write collectors and recorders to end # addedString = collectorwat+collectorz+collectorh+recordersub+collectorRoof+recorderSolarRoof addedString = collectorwat+collectorz+collectorh+recordersub+collectorRoof for i in recorders: addedString = addedString+i for i in recorderw: addedString = addedString + i for i, j in zip(rooftopInverters, rooftopSolars): #Write the recorders for solar and wind objects to end of .glm addedString = addedString + i + j outFile.write(feeder.sortedWrite(inFeeder['tree'])+addedString) os.system(omf.omfDir +'/solvers/gridlabd_gridballast/local_gd/bin/gridlabd outGLM_rooftop.glm') return name_volt_dict
def work(modelDir, inputDict): ''' Run the model in its directory. ''' outData = {} feederName = [x for x in os.listdir(modelDir) if x.endswith('.omd')][0][:-4] inputDict["feederName1"] = feederName hazardPath = pJoin(modelDir, inputDict['weatherImpactsFileName']) with open(hazardPath, 'w') as hazardFile: hazardFile.write(inputDict['weatherImpacts']) with open(pJoin(modelDir, feederName + '.omd'), "r") as jsonIn: feederModel = json.load(jsonIn) # Create GFM input file. print("RUNNING GFM FOR", modelDir) critLoads = inputDict['criticalLoads'] gfmInputTemplate = { 'phase_variation': float(inputDict['phaseVariation']), 'chance_constraint': float(inputDict['chanceConstraint']), 'critical_load_met': float(inputDict['criticalLoadMet']), 'total_load_met': float(inputDict['nonCriticalLoadMet']), 'maxDGPerGenerator': float(inputDict['maxDGPerGenerator']), 'dgUnitCost': float(inputDict['dgUnitCost']), 'generatorCandidates': inputDict['generatorCandidates'], 'criticalLoads': inputDict['criticalLoads'] } gfmJson = convertToGFM(gfmInputTemplate, feederModel) gfmInputFilename = 'gfmInput.json' with open(pJoin(modelDir, gfmInputFilename), 'w') as outFile: json.dump(gfmJson, outFile, indent=4) # Check for overlap between hazard field and GFM circuit input: hazard = HazardField(hazardPath) if circuitOutsideOfHazard(hazard, gfmJson): outData[ 'warning'] = 'Warning: the hazard field does not overlap with the circuit.' # Draw hazard field if needed. if inputDict['showHazardField'] == 'Yes': hazard.drawHeatMap(show=False) plt.title('') #Hack: remove plot title. # Run GFM gfmBinaryPath = pJoin(__neoMetaModel__._omfDir, 'solvers', 'gfm', 'Fragility.jar') rdtInputName = 'rdtInput.json' if platform.system() == 'Darwin': #HACK: force use of Java8 on MacOS. javaCmd = '/Library/Java/JavaVirtualMachines/jdk1.8.0_181.jdk/Contents/Home/bin/java' else: javaCmd = 'java' proc = subprocess.Popen([ javaCmd, '-jar', gfmBinaryPath, '-r', gfmInputFilename, '-wf', inputDict['weatherImpactsFileName'], '-num', inputDict['scenarioCount'], '-ro', rdtInputName ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=modelDir) (stdout, stderr) = proc.communicate() with open(pJoin(modelDir, "gfmConsoleOut.txt"), "w") as gfmConsoleOut: gfmConsoleOut.write(stdout.decode()) rdtInputFilePath = pJoin(modelDir, 'rdtInput.json') # Pull GFM input data on lines and generators for HTML presentation. with open(rdtInputFilePath, 'r') as rdtInputFile: # HACK: we use rdtInput as a string in the frontend. rdtJsonAsString = rdtInputFile.read() rdtJson = json.loads(rdtJsonAsString) rdtJson["power_flow"] = inputDict["power_flow"] rdtJson["solver_iteration_timeout"] = 300.0 rdtJson["algorithm"] = "miqp" # Calculate line costs. lineData = {} for line in rdtJson["lines"]: lineData[line["id"]] = '{:,.2f}'.format( float(line["length"]) * float(inputDict["lineUnitCost"])) outData["lineData"] = lineData outData["generatorData"] = '{:,.2f}'.format( float(inputDict["dgUnitCost"]) * float(inputDict["maxDGPerGenerator"])) outData['gfmRawOut'] = rdtJsonAsString # Insert user-specified scenarios block into RDT input if inputDict['scenarios'] != "": rdtJson['scenarios'] = json.loads(inputDict['scenarios']) with open(pJoin(rdtInputFilePath), "w") as rdtInputFile: json.dump(rdtJson, rdtInputFile, indent=4) # Run GridLAB-D first time to generate xrMatrices. print("RUNNING 1ST GLD RUN FOR", modelDir) omdPath = pJoin(modelDir, feederName + ".omd") with open(omdPath, "r") as omd: omd = json.load(omd) # Remove new line candidates to get normal system powerflow results. deleteList = [] newLines = inputDict["newLineCandidates"].strip().replace(' ', '').split(',') for newLine in newLines: for omdObj in omd["tree"]: if ("name" in omd["tree"][omdObj]): if (newLine == omd["tree"][omdObj]["name"]): deleteList.append(omdObj) for delItem in deleteList: del omd["tree"][delItem] #Load a blank glm file and use it to write to it feederPath = pJoin(modelDir, 'feeder.glm') with open(feederPath, 'w') as glmFile: toWrite = feeder.sortedWrite( omd['tree'] ) + "object jsondump {\n\tfilename_dump_reliability JSON_dump_line.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n" glmFile.write(toWrite) #Write attachments from omd, if no file, one will be created for fileName in omd['attachments']: with open(os.path.join(modelDir, fileName), 'w') as file: file.write(omd['attachments'][fileName]) #Wire in the file the user specifies via zipcode. climateFileName = weather.zipCodeToClimateName( inputDict["simulationZipCode"]) shutil.copy( pJoin(__neoMetaModel__._omfDir, "data", "Climate", climateFileName + ".tmy2"), pJoin(modelDir, 'climate.tmy2')) # Platform specific binaries for GridLAB-D First Run. if platform.system() == "Linux": myEnv = os.environ.copy() myEnv['GLPATH'] = omf.omfDir + '/solvers/gridlabdv990/' commandString = omf.omfDir + '/solvers/gridlabdv990/gridlabd.bin feeder.glm' elif platform.system() == "Windows": myEnv = os.environ.copy() commandString = '"' + pJoin(omf.omfDir, "solvers", "gridlabdv990", "gridlabd.exe") + '"' + " feeder.glm" elif platform.system() == "Darwin": myEnv = os.environ.copy() myEnv['GLPATH'] = omf.omfDir + '/solvers/gridlabdv990/MacRC4p1_std8/' commandString = '"' + omf.omfDir + '/solvers/gridlabdv990/MacRC4p1_std8/gld.sh" feeder.glm' # Run GridLAB-D First Time. proc = subprocess.Popen(commandString, stdout=subprocess.PIPE, shell=True, cwd=modelDir, env=myEnv) (out, err) = proc.communicate() with open(pJoin(modelDir, "gldConsoleOut.txt"), "w") as gldConsoleOut: gldConsoleOut.write(out.decode()) with open(pJoin(modelDir, "JSON_dump_line.json"), "r") as gldOut: gld_json_line_dump = json.load(gldOut) outData['gridlabdRawOut'] = gld_json_line_dump # Add GridLAB-D line objects and line codes in to the RDT model. rdtJson["line_codes"] = gld_json_line_dump["properties"]["line_codes"] rdtJson["lines"] = gld_json_line_dump["properties"]["lines"] hardCands = list( set(gfmJson['lineLikeObjs']) - set(inputDict['hardeningCandidates'])) newLineCands = inputDict['newLineCandidates'].strip().replace( ' ', '').split(',') switchCands = inputDict['switchCandidates'].strip().replace(' ', '').split(',') for line in rdtJson["lines"]: line_id = line.get('id', '') # this is equal to name in the OMD objects. object_type = line.get('object', '') line['node1_id'] = line['node1_id'] + "_bus" line['node2_id'] = line['node2_id'] + "_bus" line_code = line["line_code"] # Getting ratings from OMD tree = omd['tree'] nameToIndex = {tree[key].get('name', ''): key for key in tree} treeOb = tree[nameToIndex[line_id]] config_name = treeOb.get('configuration', '') config_ob = tree.get(nameToIndex[config_name], {}) full_rating = 0 for phase in ['A', 'B', 'C']: cond_name = config_ob.get('conductor_' + phase, '') cond_ob = tree.get(nameToIndex.get(cond_name, ''), '') rating = cond_ob.get('rating.summer.continuous', '') try: full_rating = int(rating) #TODO: replace with avg of 3 phases. except: pass if full_rating != 0: line['capacity'] = full_rating else: line['capacity'] = 10000 # Setting other line parameters. line['construction_cost'] = float(inputDict['lineUnitCost']) line['harden_cost'] = float(inputDict['hardeningUnitCost']) line['switch_cost'] = float(inputDict['switchCost']) if line_id in hardCands: line['can_harden'] = True if line_id in switchCands: line['can_add_switch'] = True if line_id in newLineCands: line['is_new'] = True if object_type in ['transformer', 'regulator']: line['is_transformer'] = True if object_type == 'switch': line['has_switch'] = True with open(rdtInputFilePath, "w") as outFile: json.dump(rdtJson, outFile, indent=4) # Run RDT. print("RUNNING RDT FOR", modelDir) rdtOutFile = modelDir + '/rdtOutput.json' rdtSolverFolder = pJoin(__neoMetaModel__._omfDir, 'solvers', 'rdt') rdtJarPath = pJoin(rdtSolverFolder, 'micot-rdt.jar') # TODO: modify path, don't assume SCIP installation. proc = subprocess.Popen([ 'java', "-Djna.library.path=" + rdtSolverFolder, '-jar', rdtJarPath, '-c', rdtInputFilePath, '-e', rdtOutFile ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() with open(pJoin(modelDir, "rdtConsoleOut.txt"), "w") as rdtConsoleOut: rdtConsoleOut.write(str(stdout)) with open(rdtOutFile) as f: rdtRawOut = f.read() outData['rdtRawOut'] = rdtRawOut # Indent the RDT output nicely. with open(pJoin(rdtOutFile), "w") as outFile: rdtOut = json.loads(rdtRawOut) json.dump(rdtOut, outFile, indent=4) # Generate and run 2nd copy of GridLAB-D model with changes specified by RDT. print("RUNNING 2ND GLD RUN FOR", modelDir) feederCopy = copy.deepcopy(feederModel) lineSwitchList = [] edgeLabels = {} generatorList = [] for gen in rdtOut['design_solution']['generators']: generatorList.append(gen['id'][:-4]) damagedLoads = {} for scenario in rdtOut['scenario_solution']: for load in scenario['loads']: if load['id'] in damagedLoads.keys(): damagedLoads[load['id'][:-4]] += 1 else: damagedLoads[load['id'][:-4]] = 1 for line in rdtOut['design_solution']['lines']: if ('switch_built' in line and 'hardened' in line): lineSwitchList.append(line['id']) if (line['switch_built'] == True and line['hardened'] == True): edgeLabels[line['id']] = "SH" elif (line['switch_built'] == True): edgeLabels[line['id']] = "S" elif (line['hardened'] == True): edgeLabels[line['id']] = "H" elif ('switch_built' in line): lineSwitchList.append(line['id']) if (line['switch_built'] == True): edgeLabels[line['id']] = "S" elif ('hardened' in line): if (line['hardened'] == True): edgeLabels[line['id']] = "H" # Remove nonessential lines in second model as indicated by RDT output. for key in list(feederCopy['tree'].keys()): value = feederCopy['tree'][key] if ('object' in value): if (value['object'] == 'underground_line') or (value['object'] == 'overhead_line'): if value['name'] not in lineSwitchList: del feederCopy['tree'][key] # Add generators to second model. maxTreeKey = int(max(feederCopy['tree'], key=int)) + 1 maxTreeKey = max(feederCopy['tree'], key=int) # Load a blank glm file and use it to write to it feederPath = pJoin(modelDir, 'feederSecond.glm') with open(feederPath, 'w') as glmFile: toWrite = "module generators;\n\n" + feeder.sortedWrite( feederCopy['tree'] ) + "object voltdump {\n\tfilename voltDump2ndRun.csv;\n};\nobject jsondump {\n\tfilename_dump_reliability test_JSON_dump.json;\n\twrite_system_info true;\n\twrite_per_unit true;\n\tsystem_base 100.0 MVA;\n};\n" # + "object jsonreader {\n\tfilename " + insertRealRdtOutputNameHere + ";\n};" glmFile.write(toWrite) # Run GridLAB-D second time. if platform.system() == "Windows": proc = subprocess.Popen(['gridlabd', 'feederSecond.glm'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=modelDir) (out, err) = proc.communicate() outData["secondGLD"] = str( os.path.isfile(pJoin(modelDir, "voltDump2ndRun.csv"))) else: # TODO: make 2nd run of GridLAB-D work on Unixes. outData["secondGLD"] = str(False) # Draw the feeder. damageDict = {} for scenario in rdtJson["scenarios"]: for line in scenario["disable_lines"]: if line in damageDict: damageDict[line] = damageDict[line] + 1 else: damageDict[line] = 1 genDiagram(modelDir, feederModel, damageDict, critLoads, damagedLoads, edgeLabels, generatorList) with open(pJoin(modelDir, "feederChart.png"), "rb") as inFile: outData["oneLineDiagram"] = base64.standard_b64encode( inFile.read()).decode() # And we're done. return outData
def ConvertAndwork(filePath, gb_on_off='on'): """ Converts omd to glm, adds in necessary recorder, collector, and attributes+parameters for gridballast gridlabD to run on waterheaters and ziploads """ with open(filePath, 'r') as inFile: if gb_on_off == 'on': gb_status = 'true' else: gb_status = 'false' print("Gridballast is " + gb_on_off) inFeeder = json.load(inFile) attachments = inFeeder.get('attachments', []) include_files = attachments.keys() if 'schedules.glm' in include_files: with open('schedules.glm', 'w') as outFile: outFile.write(attachments['schedules.glm'].encode('utf8')) with open('_voltViz/schedules.glm', 'w') as outFile: outFile.write(attachments['schedules.glm'].encode('utf8')) if 'schedulesResponsiveLoads.glm' in include_files: with open('schedulesResponsiveLoads.glm', 'w') as outFile: outFile.write( attachments['schedulesResponsiveLoads.glm'].encode('utf8')) with open('_voltViz/schedulesResponsiveLoads.glm', 'w') as outFile: outFile.write( attachments['schedulesResponsiveLoads.glm'].encode('utf8')) inFeeder['tree'][u'01'] = { u'omftype': u'#include', u'argument': u'"hot_water_demand1.glm"' } inFeeder['tree'][u'011'] = { u'class': u'player', u'double': u'value' } # add in manually for now inFeeder['tree'][u'0111'] = { u'object': u'voltdump', u'filename': u'voltDump.csv' } name_volt_dict = {} solar_meters = [] wind_obs = [] substation = None for key, value in inFeeder['tree'].iteritems(): if 'name' in value and 'solar' in value['name']: inverter_ob = value['parent'] for key, value in inFeeder['tree'].iteritems(): if 'name' in value and value['name'] == inverter_ob: solar_meters.append(value['parent']) if 'name' in value and 'wind' in value['name']: wind_obs.append(value['name']) if 'name' in value and 'nominal_voltage' in value: name_volt_dict[value['name']] = { 'Nominal_Voltage': value['nominal_voltage'] } if 'object' in value and (value['object'] == 'waterheater'): inFeeder['tree'][key].update({'heat_mode': 'ELECTRIC'}) inFeeder['tree'][key].update( {'enable_volt_control': gb_status}) inFeeder['tree'][key].update({'volt_lowlimit': '113.99'}) inFeeder['tree'][key].update({'volt_uplimit': '126.99'}) inFeeder['tree'][key].pop('demand') inFeeder['tree'][key].update( {'water_demand': 'weekday_hotwater*1.00'}) if 'object' in value and (value['object'] == 'ZIPload'): inFeeder['tree'][key].update( {'enable_volt_control': gb_status}) inFeeder['tree'][key].update({'volt_lowlimit': '113.99'}) inFeeder['tree'][key].update({'volt_uplimit': '126.99'}) if 'object' in value and (value['object'] == 'house'): houseMeter = value['parent'] if 'argument' in value and ('minimum_timestep' in value['argument']): interval = int(re.search(r'\d+', value['argument']).group()) if 'bustype' in value and 'SWING' in value['bustype']: substation = value['name'] value['object'] = 'meter' collectorwat = ( "object collector {\n\tname collector_Waterheater;\n\tgroup class=waterheater;\n\tproperty sum(actual_load);\n\tinterval " + str(interval) + ";\n\tfile out_load_waterheaters.csv;\n};\n") collectorz = ( "object collector {\n\tname collector_ZIPloads;\n\tgroup class=ZIPload;\n\tproperty sum(base_power);\n\tinterval " + str(interval) + ";\n\tfile out_load_ziploads.csv;\n};\n") collectorh = ( "object collector {\n\tname collector_HVAC;\n\tgroup class=house;\n\tproperty sum(heating_demand), sum(cooling_demand);\n\tinterval " + str(interval) + ";\n\tfile out_HVAC.csv;\n};\n") recordersub = ( "object recorder {\n\tinterval " + str(interval) + ";\n\tproperty measured_real_power;\n\tfile out_substation_power.csv;\n\tparent " + str(substation) + ";\n\t};\n") recorders = [] recorderw = [] for i in range(len(solar_meters)): recorders.append( ("object recorder {\n\tinterval " + str(interval) + ";\n\tproperty measured_real_power;\n\tfile out_solar_gen_" + str(i) + ".csv;\n\tparent " + str(solar_meters[i]) + ";\n\t};\n")) for i in range(len(wind_obs)): recorderw.append( ("object recorder {\n\tinterval " + str(interval) + ";\n\tproperty Pconv;\n\tfile out_wind_gen" + str(i) + ".csv;\n\tparent " + str(wind_obs[i]) + ";\n\t};\n")) with open('outGLM.glm', "w") as outFile: addedString = collectorwat + collectorz + collectorh + recordersub for i in recorders: addedString = addedString + i for i in recorderw: addedString = addedString + i outFile.write(feeder.sortedWrite(inFeeder['tree']) + addedString) copyfile('outGLM.glm', '_voltViz/outGLM.glm') os.system(omf.omfDir + '/solvers/gridlabd_gridballast/local_gd/bin/gridlabd outGLM.glm') return name_volt_dict