Example #1
0
def _convertForCvr(stdPath, seqPath, outFilePath):
	''' Convert a feeder to a GLM'''
	with open(stdPath) as stdFile, open(seqPath) as seqFile:
		stdString = stdFile.read()
		seqString = seqFile.read()
	tree,xScale,yScale = milToGridlab.convert(stdString,seqString)
	with open(outFilePath,'w') as glmFile:
		glmFile.write(feeder.sortedWrite(tree))
def _tests():
    #tests here
    glm_object_dict = feeder.parse('./IEEE13Basic.glm')
    baseGLM, last_key = GLD_Feeder(glm_object_dict, 0, None)
    glm_string = feeder.sortedWrite(baseGLM)
    file = open('./IEEE13BasePopulation.glm', 'w')
    file.write(glm_string)
    file.close()
    print('success!')
def _tests():
	#tests here
	glm_object_dict = feeder.parse('./IEEE13Basic.glm')
	baseGLM, last_key = GLD_Feeder(glm_object_dict,0,None)
	glm_string = feeder.sortedWrite(baseGLM)
	file = open('./IEEE13BasePopulation.glm','w')
	file.write(glm_string)
	file.close()
	print('success!')
Example #4
0
def runInFilesystem(feederTree, attachments=[], keepFiles=False, workDir=None, glmName=None):
	''' Execute gridlab in the local filesystem. Return a nice dictionary of results. '''
	try:
		binaryName = "gridlabd"
		# Create a running directory and fill it, unless we've specified where we're running.
		if not workDir:
			workDir = tempfile.mkdtemp()
			print "gridlabD runInFilesystem with no specified workDir. Working in", workDir
		# Need to zero out lat/lon data on copy because it frequently breaks Gridlab.
		localTree = deepcopy(feederTree)
		for key in localTree.keys():
			try:
				del localTree[key]["latitude"]
				del localTree[key]["longitude"]
			except:
				pass # No lat lons.
		# Write attachments and glm.
		for attach in attachments:
			with open (pJoin(workDir,attach),'w') as attachFile:
				attachFile.write(attachments[attach])
		glmString = feeder.sortedWrite(localTree)
		if not glmName:
			glmName = "main." + datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') + ".glm"
		with open(pJoin(workDir, glmName),'w') as glmFile:
			glmFile.write(glmString)
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) 
		with open(pJoin(workDir,'stdout.txt'),'w') as stdout, open(pJoin(workDir,'stderr.txt'),'w') as stderr, open(pJoin(workDir,'PID.txt'),'w') as pidFile:
			# MAYBEFIX: turn standerr WARNINGS back on once we figure out how to supress the 500MB of lines gridlabd wants to write...
			proc = subprocess.Popen([binaryName,'-w', glmName], cwd=workDir, stdout=stdout, stderr=stderr)
			pidFile.write(str(proc.pid))
		returnCode = proc.wait()
		# Build raw JSON output.
		rawOut = anaDataTree(workDir, lambda x:True)
		with open(pJoin(workDir,'stderr.txt'),'r') as stderrFile:
			rawOut['stderr'] = stderrFile.read().strip()
		with open(pJoin(workDir,'stdout.txt'),'r') as stdoutFile:
			rawOut['stdout'] = stdoutFile.read().strip()
		# Delete the folder and return.
		if not keepFiles and not workDir:
			# NOTE: if we've specify a working directory, don't just blow it away.
			for attempt in range(5):
				try:
					shutil.rmtree(workDir)
					break
				except WindowsError:
					# HACK: if we don't sleep 1 second, windows intermittantly fails to delete things and an exception is thrown.
					# Probably cus dropbox is monkeying around in these folders on my dev machine. Disabled for now since it works when dropbox is off.
					time.sleep(2)
		return rawOut
	except:
		with open(pJoin(workDir, "stderr.txt"), "a+") as stderrFile:
			traceback.print_exc(file = stderrFile)
		return {}
Example #5
0
def runInFilesystem(feederTree, attachments=[], keepFiles=False, workDir=None, glmName=None):
	''' Execute gridlab in the local filesystem. Return a nice dictionary of results. '''
	try:
		binaryName = _myDir + "/local_gd/bin/gridlabd"
		# Create a running directory and fill it, unless we've specified where we're running.
		if not workDir:
			workDir = tempfile.mkdtemp()
			print "gridlabD runInFilesystem with no specified workDir. Working in", workDir
		# Need to zero out lat/lon data on copy because it frequently breaks Gridlab.
		localTree = deepcopy(feederTree)
		for key in localTree.keys():
			try:
				del localTree[key]["latitude"]
				del localTree[key]["longitude"]
			except:
				pass # No lat lons.
		# Write attachments and glm.
		for attach in attachments:
			with open (pJoin(workDir,attach),'w') as attachFile:
				attachFile.write(attachments[attach])
		glmString = feeder.sortedWrite(localTree)
		if not glmName:
			glmName = "main." + datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') + ".glm"
		with open(pJoin(workDir, glmName),'w') as glmFile:
			glmFile.write(glmString)
		# RUN GRIDLABD IN FILESYSTEM (EXPENSIVE!) 
		with open(pJoin(workDir,'stdout.txt'),'w') as stdout, open(pJoin(workDir,'stderr.txt'),'w') as stderr, open(pJoin(workDir,'PID.txt'),'w') as pidFile:
			# MAYBEFIX: turn standerr WARNINGS back on once we figure out how to supress the 500MB of lines gridlabd wants to write...
			proc = subprocess.Popen([binaryName,'-w', glmName], cwd=workDir, stdout=stdout, stderr=stderr)
			pidFile.write(str(proc.pid))
		returnCode = proc.wait()
		# Build raw JSON output.
		rawOut = anaDataTree(workDir, lambda x:True)
		with open(pJoin(workDir,'stderr.txt'),'r') as stderrFile:
			rawOut['stderr'] = stderrFile.read().strip()
		with open(pJoin(workDir,'stdout.txt'),'r') as stdoutFile:
			rawOut['stdout'] = stdoutFile.read().strip()
		# Delete the folder and return.
		if not keepFiles and not workDir:
			# NOTE: if we've specify a working directory, don't just blow it away.
			for attempt in range(5):
				try:
					shutil.rmtree(workDir)
					break
				except WindowsError:
					# HACK: if we don't sleep 1 second, windows intermittantly fails to delete things and an exception is thrown.
					# Probably cus dropbox is monkeying around in these folders on my dev machine. Disabled for now since it works when dropbox is off.
					time.sleep(2)
		return rawOut
	except:
		with open(pJoin(workDir, "stderr.txt"), "a+") as stderrFile:
			traceback.print_exc(file = stderrFile)
		return {}
Example #6
0
def OMFmain(milsoft,
            scada,
            case_flag,
            calibration_config,
            model_name='Feeder',
            user_flag_to_calibrate=1):

    if milsoft is None:
        print("Please input a model to convert!")
        # error
        return None, None
    else:
        internal_flag_to_calibrate = 0
        if scada is None:
            pass  # Well, we can't do any calibration but we can still pump out a populated model by using defaults.
        else:
            internal_flag_to_calibrate = 1
            days, SCADA = processSCADA.getValues(scada)

        outGLM = milsoft
        directory = tempfile.mkdtemp()
        print "Calibration testing in ", directory

        # write base .glm to file (save as .txt so that it isn't run when batch file executed)
        basefile = open(directory + '/' + model_name + '_base_glm.txt', 'w')
        basefile.write(
            '\\\\ Base feeder model generated by milToGridlab.py.\n')
        basefile.write(feeder.sortedWrite(outGLM))
        basefile.close()

        if internal_flag_to_calibrate == 1 and user_flag_to_calibrate == 1:  # The user must want to calibrate (user_flag_to_calibrate = 1) and we must have SCADA input (internal_flag_to_calibrate = 1).
            # Send base .glm dictionary to calibration function
            final_calib_file, final_dict, last_key = calibrateFeeder.calibrateFeeder(
                outGLM, days, SCADA, case_flag, calibration_config, directory)
        else:
            # Populate the feeder.
            print(
                "Either the user selected not to calibrate this feeder, the SCADA was not input, or this feeder has already been calibrated."
            )
            final_dict, last_key = Milsoft_GridLAB_D_Feeder_Generation.GLD_Feeder(
                outGLM, case_flag, directory, calibration_config)
        #AddTapeObjects
        #filename = 'test_feeder'
        if final_dict is not None:
            #AddTapeObjects.add_recorders(final_dict,None,last_key,None,1,0,filename,None,0,0)
            dict_with_recorders, last_key = AddTapeObjects.add_recorders(
                final_dict, case_flag, 0, 1, model_name, last_key)
            return dict_with_recorders, final_calib_file
        else:
            return None, None
Example #7
0
def handleMdbFile(mdb_path, modelDir, failure=False):
    ''' Convert mdb database to glm file. '''
    try:
        outputFname = mdb_path.split('/')[-1].replace('.mdb', '.glm')
        with open(outputFname, 'w') as output_file:
            glm, x_scale, y_scale = cyme.convertCymeModel(mdb_path, modelDir)
            output_file.write(feeder.sortedWrite(glm))
    except IOError:
        print 'UNABLE TO WRITE GLM FILE.'
        failure = True
    except:
        print 'ERROR IN CYME MODEL FUNCTION.', sys.exc_info()[0]
        failure = True
    finally:
        output_file.close()
    return failure
Example #8
0
def handleMilFile(std_path, seq_path, failure=False):
    ''' Conversion routine for the std and seq files. '''
    # Attempt to open std and seq files and convert to glm.
    try:
        with open(std_path, 'r') as std_file, open(seq_path, 'r') as seq_file:
            output_path = std_path.split('/')[-1].replace(
                '.std', '.glm'
            )  # We wish to put the file in the current running directory.
            output_file = open(output_path, 'w')
            glm, x_scale, y_scale = mil.convert(std_file.read(),
                                                seq_file.read())
            output_file.write(feeder.sortedWrite(glm))
            print 'GLM FILE WRITTEN FOR STD/SEQ COMBO.'
    except IOError:
        print 'UNABLE TO WRITE GLM FILE.'
        failure = True
    finally:
        output_file.close()
    return failure
Example #9
0
def handleMdbFile(mdb_path, modelDir, failure=False):
    ''' Convert mdb database to glm file. '''
    try:
        outputFname = mdb_path.split('/')[-1].replace('.mdb', '.glm')
        with open(outputFname, 'w') as output_file:
            glm, x_scale, y_scale = cyme.convertCymeModel(mdb_path, modelDir)
            output_file.write(feeder.sortedWrite(glm))
    except IOError:
        print 'UNABLE TO WRITE GLM FILE.'
        failure = True
    except IndexError:
        print 'INDEX ACCESSING ERROR IN CYME MODEL FUNCTION AT: ' + str(
            sys.exc_info()[2])
        print traceback.format_exc()
    except KeyError:
        print 'DICTIONARY ERROR IN CYME MODEL FUNCTION AT: ' + str(
            sys.exc_info()[2])
    except Exception, err:
        print str(e)
        print traceback.format_exc()
        failure = True
Example #10
0
def OMFmain(milsoft, scada, case_flag, calibration_config,  model_name='Feeder', user_flag_to_calibrate=1):

	if milsoft is None:
		print ("Please input a model to convert!")
		# error
		return None, None
	else:
		internal_flag_to_calibrate = 0
		if scada is None:
			pass  # Well, we can't do any calibration but we can still pump out a populated model by using defaults.
		else:
			internal_flag_to_calibrate = 1
			days, SCADA = processSCADA.getValues(scada)

		outGLM = milsoft
		directory = tempfile.mkdtemp()
		print "Calibration testing in ", directory
		
		# write base .glm to file (save as .txt so that it isn't run when batch file executed)
		basefile = open(directory+'/'+model_name+'_base_glm.txt','w')
		basefile.write('\\\\ Base feeder model generated by milToGridlab.py.\n')
		basefile.write(feeder.sortedWrite(outGLM))
		basefile.close()
		
		if internal_flag_to_calibrate == 1 and user_flag_to_calibrate == 1:  # The user must want to calibrate (user_flag_to_calibrate = 1) and we must have SCADA input (internal_flag_to_calibrate = 1).
			# Send base .glm dictionary to calibration function
			final_calib_file, final_dict, last_key = calibrateFeeder.calibrateFeeder(outGLM, days, SCADA, case_flag, calibration_config, directory)
		else:
			# Populate the feeder. 
			print ("Either the user selected not to calibrate this feeder, the SCADA was not input, or this feeder has already been calibrated.")
			final_dict, last_key = Milsoft_GridLAB_D_Feeder_Generation.GLD_Feeder(outGLM,case_flag,directory,calibration_config)
		#AddTapeObjects
		#filename = 'test_feeder'
		if final_dict is not None:
			#AddTapeObjects.add_recorders(final_dict,None,last_key,None,1,0,filename,None,0,0)
			dict_with_recorders, last_key = AddTapeObjects.add_recorders(final_dict,case_flag,0,1,model_name,last_key)
			return dict_with_recorders, final_calib_file
		else:
			return None, None
Example #11
0
def convertTests():
    ''' Test convert every windmil feeder we have (in uploads). Return number of exceptions we hit. '''
    exceptionCount = 0
    testFiles = [('OrvilleTreePond.std', 'OrvilleTreePond.seq')]
    # ,('OlinBarre.std','OlinBarre.seq'),('OlinBeckenham.std','OlinBeckenham.seq'), ('AutocliAlberich.std','AutocliAlberich.seq')
    for stdString, seqString in testFiles:
        try:
            # Convert the std+seq.
            with open(stdString, 'r') as stdFile, open(seqString,
                                                       'r') as seqFile:
                outGlm, x, y = milToGridlab.convert(stdFile.read(),
                                                    seqFile.read())
            with open(stdString.replace('.std', '.glm'), 'w') as outFile:
                outFile.write(feeder.sortedWrite(outGlm))
            print 'WROTE GLM FOR', stdString
            try:
                # Draw the GLM.
                myGraph = feeder.treeToNxGraph(outGlm)
                feeder.latLonNxGraph(myGraph, neatoLayout=False)
                plt.savefig(stdString.replace('.std', '.png'))
                print 'DREW GLM OF', stdString
            except:
                exceptionCount += 1
                print 'FAILED DRAWING', stdString
            try:
                # Run powerflow on the GLM.
                output = gridlabd.runInFilesystem(outGlm, keepFiles=False)
                with open(stdString.replace('.std', '.json'), 'w') as outFile:
                    json.dump(output, outFile, indent=4)
                print 'RAN GRIDLAB ON', stdString
            except:
                exceptionCount += 1
                print 'POWERFLOW FAILED', stdString
        except:
            print 'FAILED CONVERTING', stdString
            exceptionCount += 1
            traceback.print_exc()
    print exceptionCount
Example #12
0
def _tests():
    '''Self-test to verify module functions correctly.'''
    logger.info('Started parsing .glm...')
    input_glm_dict = feeder.parse("/Users/hard312/Documents/Projects/TSP/rev0/"
                                  "models/gridlabd/testbed2/"
                                  "R2_1247_2_t0_growth.glm", filePath=True)
    logger.info('Done parsing .glm. Now de-embedding')
    feeder.fullyDeEmbed(input_glm_dict)
    logger.info('Completed de-embedding.')
    logger.debug('Final input .glm dictionary...')
    logger.debug(pp.pformat(input_glm_dict))
    house_stats = gather_house_stats(input_glm_dict)
    # xfmr_summary = summarize_xfmr_stats(house_stats)
    mod_glm_dict = copy.deepcopy(input_glm_dict)
    mod_glm_dict = update_brownfield_loads(mod_glm_dict, 0.05)
    mod_glm_dict, house_stats = add_greenfield_loads(input_glm_dict,
                                                     house_stats, 0.90)
    logger.info('Completed modification of feeder.')
    logger.debug(pp.pformat(mod_glm_dict))
    feeder_str = feeder.sortedWrite(mod_glm_dict)
    glm_file = open('./modified_feeder.glm', 'w')
    glm_file.write(feeder_str)
    glm_file.close()
Example #13
0
def _tests():  
    #Import GLM and convert to dictionary when directly called from the command line. When used as a Python module, 
    # will be handed a dictionary containing the feeder model and the parameters for the battery being added.
    convOut = feeder.parse(sys.argv[1])
    nodeList = ['Node633']
    battDict = {'feederDict':convOut,
                'battNode': nodeList,
                'battEnergyRatingkWh':250,
                'invPowerRatingkW': 100,
                'battSOCpu': 0.5,
                'invControl':'LOAD_following',
                'controlSenseNode': 'Node633',
                'invChargeOnThresholdkW': 1500,
                'invChargeOffThresholdkW': 1700,
                'invDischargeOnThresholdkW': 1800,
                'invDischargeOffThresholdkW': 1750,
                }
    outTree = addBattery(battDict)
    if outTree == 0:
        print 'Failed to add battery. Continuing on...'
    GLM = feeder.sortedWrite(outTree)
    f = open('testFeeder.glm','w')
    f.write(GLM)
Example #14
0
def _tests():
    pathPrefix = '../../uploads/'
    testFiles = [
        ('INEC-RENOIR.std', 'INEC.seq'), ('INEC-GRAHAM.std', 'INEC.seq'),
        ('Olin-Barre.std', 'Olin.seq'), ('Olin-Brown.std', 'Olin.seq'),
        ('ABEC-Frank.std', 'ABEC.seq'), ('ABEC-COLUMBIA.std', 'ABEC.seq')
    ]

    for stdPath, seqPath in testFiles:
        try:
            # Conver the std+seq.
            with open(pathPrefix + stdPath,
                      'r') as stdFile, open(pathPrefix + seqPath,
                                            'r') as seqFile:
                outGlm, x, y = m2g.convert(stdFile.read(), seqFile.read())
            with open(stdPath.replace('.std', '.glm'), 'w') as outFile:
                outFile.write(feeder.sortedWrite(outGlm))
            print 'WROTE GLM FOR', stdPath
            try:
                # Draw the GLM.
                myGraph = feeder.treeToNxGraph(outGlm)
                feeder.latLonNxGraph(myGraph, neatoLayout=False)
                plt.savefig(stdPath.replace('.std', '.png'))
            except:
                print 'FAILED DRAWING', stdPath
            try:
                # Run powerflow on the GLM.
                output = gridlabd.runInFilesystem(outGlm, keepFiles=False)
                with open(stdPath.replace('.std', '.json'), 'w') as outFile:
                    json.dump(output, outFile, indent=4)
            except:
                print 'POWERFLOW FAILED', stdPath
        except:
            print 'FAILED CONVERTING', stdPath
            traceback.print_exc()

    print os.listdir('.')
Example #15
0
def convertTests():
	''' Test convert every windmil feeder we have (in uploads). Return number of exceptions we hit. '''
	exceptionCount = 0
	testFiles = [('OrvilleTreePond.std','OrvilleTreePond.seq')]
	# ,('OlinBarre.std','OlinBarre.seq'),('OlinBeckenham.std','OlinBeckenham.seq'), ('AutocliAlberich.std','AutocliAlberich.seq')
	for stdString, seqString in testFiles:
		try:
			# Convert the std+seq.
			with open(stdString,'r') as stdFile, open(seqString,'r') as seqFile:
				outGlm,x,y = milToGridlab.convert(stdFile.read(),seqFile.read())
			with open(stdString.replace('.std','.glm'),'w') as outFile:
				outFile.write(feeder.sortedWrite(outGlm))
			print 'WROTE GLM FOR', stdString
			try:
				# Draw the GLM.
				myGraph = feeder.treeToNxGraph(outGlm)
				feeder.latLonNxGraph(myGraph, neatoLayout=False)
				plt.savefig(stdString.replace('.std','.png'))
				print 'DREW GLM OF', stdString
			except:
				exceptionCount += 1
				print 'FAILED DRAWING', stdString
			try:
				# Run powerflow on the GLM.
				output = gridlabd.runInFilesystem(outGlm, keepFiles=False)
				with open(stdString.replace('.std','.json'),'w') as outFile:
					json.dump(output, outFile, indent=4)
				print 'RAN GRIDLAB ON', stdString					
			except:
				exceptionCount += 1
				print 'POWERFLOW FAILED', stdString
		except:
			print 'FAILED CONVERTING', stdString
			exceptionCount += 1
			traceback.print_exc()
	print exceptionCount
Example #16
0
def writeFeeder(tree, fname):
    with open(fname, 'w') as outFile:
        outFile.write(feeder.sortedWrite(tree))
Example #17
0
#!/usr/bin/env python

import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.getcwd())))
import feeder as tp
from pprint import pformat

tree=tp.parse('main.glm')
with open('out.txt','w') as outFile:
	outFile.write(pformat(tree))

with open('out.glm','w') as outGlm:
	outGlm.write(tp.sortedWrite(tree))
Example #18
0
	# Fix da loads.
	#{'phases': 'BN', 'object': 'load', 'name': 'S1806-32-065', 'parent': 'nodeS1806-32-065T14102', 'load_class': 'R', 'constant_power_C': '0', 'constant_power_B': '1.06969', 'constant_power_A': '0', 'nominal_voltage': '120'}
	for loadKey in all2ndLoadKeys:
		newDict = {}
		newDict['object'] = 'triplex_node'
		newDict['name'] = glm[loadKey]['name']
		newDict['phases'] = sorted(glm[loadKey]['phases'])[0] + 'S'
		a = glm[loadKey]['constant_power_A']
		b = glm[loadKey]['constant_power_B']
		c = glm[loadKey]['constant_power_C']
		powList = [x for x in [a,b,c] if x!='0' and x!='0.0']
		newDict['power_12'] = ('0' if len(powList)==0 else powList.pop())
		newDict['parent'] = glm[loadKey]['parent']
		newDict['nominal_voltage'] = '120'
		glm[loadKey] = newDict

	# Gotta fix the transformer phases too...
	for key in all2ndTransKeys:
		fromName = glm[key]['from']
		toName = glm[key]['to']
		fromToPhases = [glm[x]['phases'] for x in glm if 'name' in glm[x] and glm[x]['name'] in [fromName, toName]]
		glm[key]['phases'] = set('ABC').intersection(*map(set, fromToPhases)).pop() + 'S'
		configKey = [x for x in glm[key] if type(x) is int].pop()
		glm[key][configKey]['connect_type'] = 'SINGLE_PHASE_CENTER_TAPPED'

secondarySystemFix(glm)

# print out
with open('ACEC-Friendship-Full.glm','w') as outFile:
	outString = tp.sortedWrite(glm)
	outFile.write(outString)
Example #19
0
File: debug.py Project: cdkkim/omf
#!/usr/bin/env python

''' Let's get that texas coop's feeder working in Gridlab. '''

# First, do the path nonsense to import omf libraries.
import os, sys, json
def popPath(path):
	return os.path.split(path)[0]
thisFile = os.path.realpath(__file__)
sys.path.append(popPath(popPath(popPath(thisFile))))
import feeder

# Pull in json, write GLM.
with open('Rector2413.json','r') as feedFile:
	allJson = json.load(feedFile)
	tree = allJson.get('tree',{})
	attachments = allJson.get('attachments',{})

glm = feeder.sortedWrite(tree)

with open('Rector2413.glm','w') as glmFile:
	glmFile.write(glm)

for key in attachments:
	with open(key, 'w') as attachFile:
		attachFile.write(attachments.get(key,''))
Example #20
0
        ]
        for config in lineConfigs:
            if config['conductor_A'] in nameDictMap.keys():
                config['conductor_A'] = nameDictMap[config['conductor_A']]
            if config['conductor_B'] in nameDictMap.keys():
                config['conductor_B'] = nameDictMap[config['conductor_B']]
            if config['conductor_C'] in nameDictMap.keys():
                config['conductor_C'] = nameDictMap[config['conductor_C']]
            if config['conductor_N'] in nameDictMap.keys():
                config['conductor_N'] = nameDictMap[config['conductor_N']]
    elif compName == 'line_configuration':
        lines = [
            glmRef[x] for x in glmRef if 'object' in glmRef[x]
            and glmRef[x]['object'] in ['overhead_line', 'underground_line']
        ]
        for line in lines:
            if line['configuration'] in nameDictMap.keys():
                line['configuration'] = nameDictMap[line['configuration']]


dedupGlm('transformer_configuration', glm)
dedupGlm('regulator_configuration', glm)
dedupGlm('line_spacing', glm)
dedupGlm('overhead_line_conductor', glm)
dedupGlm('underground_line_conductor', glm)
# NOTE: This last dedup has to come last, because it relies on doing conductors and spacings first!
dedupGlm('line_configuration', glm)

outString = tp.sortedWrite(glm)
with open('ACEC-Friendship-DISEMB.glm', 'w') as outFile:
    outFile.write(outString)
Example #21
0
lineCoords = [
    x for x in glmTree
    if 'object' in glmTree[x] and (glmTree[x]['object'] == 'underground_line'
                                   or glmTree[x]['object'] == 'overhead_line')
]
print lineCoords

# Here are the keys for a line:
print[x for x in glmTree[12]]

# Replace the embedded configurations with refs to config objects.
for coord in lineCoords:
    intKeys = [x for x in glmTree[coord] if type(x) is int]
    if len(intKeys) == 1:
        target = intKeys[0]
        del glmTree[coord][target]
        if glmTree[coord]['object'] == 'underground_line':
            glmTree[coord]['configuration'] = 'lc_7211'
        elif glmTree[coord]['object'] == 'overhead_line':
            glmTree[coord]['configuration'] = 'ohconfig'

# Just write it out.
outGlmString = tp.sortedWrite(glmTree)
with open('ILEC-Rembrandt-SYNTH.glm', 'w') as synthFile:
    synthFile.write(outGlmString)

# Do a regular conversion
outGlm = milToGridlab.convert('../../uploads/ILEC-Rembrandt.std',
                              '../../uploads/ILEC.seq')
with open('ILEC-Rembrandt-AUTOSYNTH.glm', 'w') as synthFile2:
    synthFile2.write(outGlm)
Example #22
0
def _tests(Network, Equipment, keepFiles=True):
    import os, json, traceback, shutil
    from solvers import gridlabd
    from matplotlib import pyplot as plt
    import feeder
    exceptionCount = 0
    try:
        #db_network = os.path.abspath('./uploads/IEEE13.mdb')
        #db_equipment = os.path.abspath('./uploads/IEEE13.mdb')
        prefix = str(Path("testPEC.py").resolve()).strip(
            'scratch\cymeToGridlabTests\testPEC.py') + "\uploads\\"
        db_network = "C" + prefix + Network
        db_equipment = "C" + prefix + Equipment
        id_feeder = '650'
        conductors = prefix + "conductor_data.csv"
        #print "dbnet", db_network
        #print "eqnet", db_equipment
        #print "conductors", conductors
        #cyme_base, x, y = convertCymeModel(db_network, db_equipment, id_feeder, conductors)
        cyme_base, x, y = convertCymeModel(str(db_network),
                                           str(db_equipment),
                                           test=True,
                                           type=2,
                                           feeder_id='CV160')
        feeder.attachRecorders(cyme_base, "TriplexLosses", None, None)
        feeder.attachRecorders(cyme_base, "TransformerLosses", None, None)
        glmString = feeder.sortedWrite(cyme_base)
        feederglm = "C:\Users\Asus\Documents\GitHub\omf\omf\uploads\PEC.glm"
        #print "feeederglm", feederglm
        gfile = open(feederglm, 'w')
        gfile.write(glmString)
        gfile.close()
        #print 'WROTE GLM FOR'
        outPrefix = "C:\Users\Asus\Documents\GitHub\omf\omf\scratch\cymeToGridlabTests\\"
        try:
            os.mkdir(outPrefix)
        except:
            pass  # Directory already there.
        '''Attempt to graph'''
        try:
            # Draw the GLM.
            print "trying to graph"
            myGraph = feeder.treeToNxGraph(cyme_base)
            feeder.latLonNxGraph(myGraph, neatoLayout=False)
            plt.savefig(outPrefix + "PEC.png")
            print "outprefix", outPrefix + "PEC.png"
            print 'DREW GLM OF'
        except:
            exceptionCount += 1
            print 'FAILED DRAWING'
        try:
            # Run powerflow on the GLM.
            output = gridlabd.runInFilesystem(glmString, keepFiles=False)
            with open(outPrefix + "PEC.JSON", 'w') as outFile:
                json.dump(output, outFile, indent=4)
            print 'RAN GRIDLAB ON\n'
        except:
            exceptionCount += 1
            print 'POWERFLOW FAILED'
    except:
        print 'FAILED CONVERTING'
        exceptionCount += 1
        traceback.print_exc()
    if not keepFiles:
        shutil.rmtree(outPrefix)
    return exceptionCount
    '''db_network = os.path.abspath('./uploads/PasoRobles11cymsectiondevice[device]['phases']08.mdb')
Example #23
0
#!/usr/bin/env python

import os
import sys

sys.path.append(os.path.dirname(os.path.dirname(os.getcwd())))
import feeder as tp
from pprint import pformat

tree = tp.parse('main.glm')
with open('out.txt', 'w') as outFile:
    outFile.write(pformat(tree))

with open('out.glm', 'w') as outGlm:
    outGlm.write(tp.sortedWrite(tree))
Example #24
0
def makeGLM(clock, calib_file, baseGLM, case_flag, wdir):
    '''Create populated dict and write it to .glm file
	
	- clock (dictionary) links the three seasonal dates with start and stop timestamps (start simulation full 24 hour before day we're recording)
	- calib_file (string) -- filename of one of the calibration files generated during a calibration round 
	- baseGLM (dictionary) -- orignal base dictionary for use in Milsoft_GridLAB_D_Feeder_Generation.py
	- case_flag (int) -- flag technologies to test
	- feeder_config (string TODO: this is future work, leave as 'None')-- feeder configuration file (weather, sizing, etc)
	- dir(string)-- directory in which to store created .glm files
	'''
    # Create populated dictionary.
    glmDict, last_key = Milsoft_GridLAB_D_Feeder_Generation.GLD_Feeder(
        baseGLM, case_flag, calib_file)

    fnames = []
    for i in clock.keys():
        # Simulation start
        starttime = clock[i][0]
        # Recording start
        rec_starttime = i
        # Simulation and Recording stop
        stoptime = clock[i][1]

        # Calculate limit.
        j = datetime.datetime.strptime(rec_starttime, '%Y-%m-%d %H:%M:%S')
        k = datetime.datetime.strptime(stoptime, '%Y-%m-%d %H:%M:%S')
        diff = (k - j).total_seconds()
        limit = int(math.ceil(diff / interval))

        populated_dict = glmDict

        # Name the file.
        if calib_file == None:
            id = 'DefaultCalibration'
        else:
            m = re.compile('\.txt$')
            id = m.sub('', calib_file.get('name', ''))
        date = re.sub('\s.*$', '', rec_starttime)
        filename = id + '_' + date + '.glm'

        # Get into clock object and set start and stop timestamp.
        for i in populated_dict.keys():
            if 'clock' in populated_dict[i].keys():
                populated_dict[i]['starttime'] = "'{:s}'".format(starttime)
                populated_dict[i]['stoptime'] = "'{:s}'".format(stoptime)

        lkey = last_key

        if use_mysql == 1:
            # Add GridLAB-D objects for recording into MySQL database.
            populated_dict[lkey] = {'module': 'mysql'}
            lkey += 1
            populated_dict[lkey] = {
                'object': 'database',
                'name': '{:s}'.format(schema),
                'schema': '{:s}'.format(schema)
            }
            lkey += 1
            populated_dict[lkey] = {
                'object': 'mysql.recorder',
                'table': 'network_node_recorder',
                'parent': 'network_node',
                'property': 'measured_real_power,measured_real_energy',
                'interval': '{:d}'.format(interval),
                'limit': '{:d}'.format(limit),
                'start': "'{:s}'".format(rec_starttime),
                'connection': schema,
                'mode': 'a'
            }
        else:
            # Add GridLAB-D object for recording into *.csv files.
            try:
                os.mkdir(os.path.join(wdir, 'csv_output'))
            except:
                pass
            # Add GridLAB-D object for recording into *.csv files.
            populated_dict[lkey] = {
                'object':
                'tape.recorder',
                'file':
                './csv_output/{:s}_{:s}_network_node_recorder.csv'.format(
                    id, date),
                'parent':
                'network_node',
                'property':
                'measured_real_power,measured_real_energy',
                'interval':
                '{:d}'.format(interval),
                'limit':
                '{:d}'.format(limit),
                'in':
                "'{:s}'".format(rec_starttime)
            }

        # Turn dictionary into a *.glm string and print it to a file in the given directory.
        glmstring = feeder.sortedWrite(populated_dict)
        file = open(os.path.join(wdir, filename), 'w')
        file.write(glmstring)
        file.close()
        print("\t" + filename + " is ready.")

        fnames.append(filename)
    return fnames
Example #25
0
def writeFeeder(tree, fname):
	with open(fname, 'w') as outFile:
		outFile.write(feeder.sortedWrite(tree))
Example #26
0
def _tests(Network, Equipment, keepFiles=True):
    import os, json, traceback, shutil
    from solvers import gridlabd
    from matplotlib import pyplot as plt
    import feeder
    exceptionCount = 0       
    try:
        #db_network = os.path.abspath('./uploads/IEEE13.mdb')
        #db_equipment = os.path.abspath('./uploads/IEEE13.mdb')
        prefix = str(Path("testPEC.py").resolve()).strip('scratch\cymeToGridlabTests\testPEC.py') + "\uploads\\"      
        db_network = "C" + prefix + Network
        db_equipment = "C" + prefix + Equipment
        id_feeder = '650'
        conductors = prefix + "conductor_data.csv"
        #print "dbnet", db_network
        #print "eqnet", db_equipment               
        #print "conductors", conductors
        #cyme_base, x, y = convertCymeModel(db_network, db_equipment, id_feeder, conductors)
        cyme_base, x, y = convertCymeModel(str(db_network), str(db_equipment), test=True, type=2, feeder_id='CV160')    
        feeder.attachRecorders(cyme_base, "TriplexLosses", None, None)
        feeder.attachRecorders(cyme_base, "TransformerLosses", None, None)
        glmString = feeder.sortedWrite(cyme_base)
        feederglm = "C:\Users\Asus\Documents\GitHub\omf\omf\uploads\PEC.glm"
        #print "feeederglm", feederglm
        gfile = open(feederglm, 'w')
        gfile.write(glmString)
        gfile.close()
        #print 'WROTE GLM FOR'
        outPrefix = "C:\Users\Asus\Documents\GitHub\omf\omf\scratch\cymeToGridlabTests\\"          
        try:
            os.mkdir(outPrefix)
        except:
            pass # Directory already there.     
        '''Attempt to graph'''      
        try:
            # Draw the GLM.
            print "trying to graph"
            myGraph = feeder.treeToNxGraph(cyme_base)
            feeder.latLonNxGraph(myGraph, neatoLayout=False)
            plt.savefig(outPrefix + "PEC.png")
            print "outprefix", outPrefix + "PEC.png"
            print 'DREW GLM OF'
        except:
            exceptionCount += 1
            print 'FAILED DRAWING'
        try:
            # Run powerflow on the GLM.
            output = gridlabd.runInFilesystem(glmString, keepFiles=False)
            with open(outPrefix + "PEC.JSON",'w') as outFile:
                json.dump(output, outFile, indent=4)
            print 'RAN GRIDLAB ON\n'                 
        except:
            exceptionCount += 1
            print 'POWERFLOW FAILED'
    except:
        print 'FAILED CONVERTING'
        exceptionCount += 1
        traceback.print_exc()
    if not keepFiles:
        shutil.rmtree(outPrefix)
    return exceptionCount    
    '''db_network = os.path.abspath('./uploads/PasoRobles11cymsectiondevice[device]['phases']08.mdb')
Example #27
0
def readSCADA(scadaFile):
    scada = {'timestamp' : [],
                    'phaseAW' : [],
                    'phaseBW' : [],
                    'phaseCW' : [],
                    'phaseAVAR' : [],
                    'phaseBVAR' : [],
                    'phaseCVAR' : [],
                    'totalVA' : [],
                    'pfA' : [],
                    'pfB' : [],
                    'pfC' : [],
                    'puLoad' : [],
                    'VoltageA' : [],
                    'VoltageB' : [],
                    'VoltageC' : []}
    scadaInfo = {'summerDay' : None,
                        'winterDay' : None,
                        'shoulderDay' : None,
                        'summerPeakKW' : 0,
                        'summerTotalEnergy' : 30134.0730,
                        'summerPeakHour' : None,
                        'summerMinimumKW' : 1e15,
                        'summerMinimumHour' : None,
                        'winterPeakKW' : 0,
                        'winterTotalEnergy' : 37252.8585,
                        'winterPeakHour' : None,
                        'winterMinimumKW' : 1e15,
                        'winterMinimumHour' : None,
                        'shoulderPeakKW' : 0,
                        'shoulderTotalEnergy' : 38226.7564,
                        'shoulderPeakHour' : None,
                        'shoulderMinimumKW' : 1e15,
                        'shoulderMinimumHour' : None}
    scadaRaw = _readCsv(scadaFile)[1:]
    index = 0
    loadMax = 0.0
    voltA = 120.0
    
    voltB = 120.0
    voltC = 120.0
    for row in scadaRaw:
        if float(row[4]) >= 114.0:
            voltA = float(row[4])
        if float(row[5]) >= 114.0:
            voltB = float(row[5])
        if float(row[6]) >= 114.0:
            voltC = float(row[6])
        scada['timestamp'].append(datetime.datetime.strptime(row[0], "%m/%d/%Y %H:%M"))
        scada['phaseAW'].append(float(row[1])*voltA*7200.0*abs(float(row[7]))/120.0)
        scada['phaseBW'].append(float(row[2])*voltB*7200.0*abs(float(row[8]))/120.0)
        scada['phaseCW'].append(float(row[3])*voltC*7200.0*abs(float(row[9]))/120.0)
        if float(row[7]) >= 0.0:
            scada['phaseAVAR'].append(float(row[1])*voltA*7200.0*math.sqrt(1-(abs(float(row[7])))**2)/120.0)
        else:
            scada['phaseAVAR'].append(-1.0*float(row[1])*voltA*7200.0*math.sqrt(1-(abs(float(row[7])))**2)/120.0)
        if float(row[8]) >= 0.0:
            scada['phaseBVAR'].append(float(row[2])*voltB*7200.0*math.sqrt(1-(abs(float(row[8])))**2)/120.0)
        else:
            scada['phaseBVAR'].append(-1.0*float(row[2])*voltB*7200.0*math.sqrt(1-(abs(float(row[8])))**2)/120.0)
        if float(row[9]) >= 0.0:
            scada['phaseCVAR'].append(float(row[3])*voltC*7200.0*math.sqrt(1-(abs(float(row[9])))**2)/120.0)
        else:
            scada['phaseCVAR'].append(-1.0*float(row[3])*voltC*7200.0*math.sqrt(1-(abs(float(row[9])))**2)/120.0)
        scada['pfA'].append(float(row[7]))
        scada['pfB'].append(float(row[8]))
        scada['pfC'].append(float(row[9]))
        scada['VoltageA'].append(str(complex(voltA*7200.0/120.0, 0.0)).replace('(','').replace(')',''))
        scada['VoltageB'].append(str(complex(-voltB*7200.0*0.5/120.0, -voltB*7200.0*math.sqrt(3)*0.5/120.0)).replace('(','').replace(')',''))
        scada['VoltageC'].append(str(complex(-voltC*7200.0*0.5/120.0, voltC*7200.0*math.sqrt(3)*0.5/120.0)).replace('(','').replace(')',''))
        scada['totalVA'].append(complex(scada['phaseAW'][index] + scada['phaseBW'][index] + scada['phaseCW'][index], scada['phaseAVAR'][index] + scada['phaseBVAR'][index] + scada['phaseCVAR'][index]))
        if scada['timestamp'][index].year == 2013:
            if scada['timestamp'][index].month in [1, 2, 12]:
                if scadaInfo['winterPeakKW'] < scada['totalVA'][index].real/1000.0:
                    scadaInfo['winterPeakKW'] = scada['totalVA'][index].real/1000.0
                    scadaInfo['winterDay'] = scada['timestamp'][index].strftime("%Y-%m-%d")
                    scadaInfo['winterPeakHour'] = float(scada['timestamp'][index].hour)
            elif scada['timestamp'][index].month in [3, 4, 5, 9, 10, 11]:
                if scadaInfo['shoulderPeakKW'] < scada['totalVA'][index].real/1000.0:
                    scadaInfo['shoulderPeakKW'] = scada['totalVA'][index].real/1000.0
                    scadaInfo['shoulderDay'] = scada['timestamp'][index].strftime("%Y-%m-%d")
                    scadaInfo['shoulderPeakHour'] = float(scada['timestamp'][index].hour)
            elif scada['timestamp'][index].month in [6, 7 , 8]:
                if scadaInfo['summerPeakKW'] < scada['totalVA'][index].real/1000.0:
                    scadaInfo['summerPeakKW'] = scada['totalVA'][index].real/1000.0
                    scadaInfo['summerDay'] = scada['timestamp'][index].strftime("%Y-%m-%d")
                    scadaInfo['summerPeakHour'] = float(scada['timestamp'][index].hour)
            if loadMax <= abs(scada['totalVA'][index]):
                loadMax = abs(scada['totalVA'][index])
        index += 1
    for load in scada['totalVA']:
        scada['puLoad'].append(abs(load)/loadMax)
    for index in xrange(len(scada['timestamp'])):
        if scadaInfo['winterDay'] == scada['timestamp'][index].strftime("%Y-%m-%d") and scadaInfo['winterMinimumKW'] > scada['totalVA'][index].real/1000.0 and scada['totalVA'][index] != 0.0:
            scadaInfo['winterMinimumKW'] = scada['totalVA'][index].real/1000.0
            scadaInfo['winterMinimumHour'] = float(scada['timestamp'][index].hour)
        if scadaInfo['summerDay'] == scada['timestamp'][index].strftime("%Y-%m-%d") and scadaInfo['summerMinimumKW'] > scada['totalVA'][index].real/1000.0 and scada['totalVA'][index] != 0.0:
            scadaInfo['summerMinimumKW'] = scada['totalVA'][index].real/1000.0
            scadaInfo['summerMinimumHour'] = float(scada['timestamp'][index].hour)
        if scadaInfo['shoulderDay'] == scada['timestamp'][index].strftime("%Y-%m-%d") and scadaInfo['shoulderMinimumKW'] > scada['totalVA'][index].real/1000.0 and scada['totalVA'][index] != 0.0:
            scadaInfo['shoulderMinimumKW'] = scada['totalVA'][index].real/1000.0
            scadaInfo['shoulderMinimumHour'] = float(scada['timestamp'][index].hour)
    for key in scadaInfo.keys():
        print key, scadaInfo[key]
    loadShapeFile = open('./loadShapeScalar.player', 'w')
    for index in xrange(len(scada['timestamp'])):
        if scada['puLoad'][index] != 0.0:
            if scada['timestamp'][index].month in [1, 2, 12]:
                loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
            elif scada['timestamp'][index].month in [4, 5, 6, 7, 8, 9, 10]:
                loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
            elif scada['timestamp'][index].month == 3:
                if scada['timestamp'][index].day < 10:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
                elif scada['timestamp'][index].day > 10:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
                elif scada['timestamp'][index].day == 10:
                    if scada['timestamp'][index].hour < 2:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
                    elif scada['timestamp'][index].hour > 2:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
            elif scada['timestamp'][index].month == 11:
                if scada['timestamp'][index].day < 3:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
                elif scada['timestamp'][index].day > 3:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
                elif scada['timestamp'][index].day == 3:
                    if index < 29380:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
                    else:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['puLoad'][index]))
    loadShapeFile.close()
    loadShapeFile = open('./phaseApf.player', 'w')
    for index in xrange(len(scada['timestamp'])):
        if scada['puLoad'][index] != 0.0:
            if scada['timestamp'][index].month in [1, 2, 12]:
                loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
            elif scada['timestamp'][index].month in [4, 5, 6, 7, 8, 9, 10]:
                loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
            elif scada['timestamp'][index].month == 3:
                if scada['timestamp'][index].day < 10:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
                elif scada['timestamp'][index].day > 10:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
                elif scada['timestamp'][index].day == 10:
                    if scada['timestamp'][index].hour < 2:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
                    elif scada['timestamp'][index].hour > 2:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
            elif scada['timestamp'][index].month == 11:
                if scada['timestamp'][index].day < 3:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
                elif scada['timestamp'][index].day > 3:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
                elif scada['timestamp'][index].day == 3:
                    if index < 29380:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
                    else:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfA'][index]))
    loadShapeFile.close()
    loadShapeFile = open('./phaseBpf.player', 'w')
    for index in xrange(len(scada['timestamp'])):
        if scada['puLoad'][index] != 0.0:
            if scada['timestamp'][index].month in [1, 2, 12]:
                loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
            elif scada['timestamp'][index].month in [4, 5, 6, 7, 8, 9, 10]:
                loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
            elif scada['timestamp'][index].month == 3:
                if scada['timestamp'][index].day < 10:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
                elif scada['timestamp'][index].day > 10:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
                elif scada['timestamp'][index].day == 10:
                    if scada['timestamp'][index].hour < 2:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
                    elif scada['timestamp'][index].hour > 2:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
            elif scada['timestamp'][index].month == 11:
                if scada['timestamp'][index].day < 3:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
                elif scada['timestamp'][index].day > 3:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
                elif scada['timestamp'][index].day == 3:
                    if index < 29380:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
                    else:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfB'][index]))
    loadShapeFile.close()
    loadShapeFile = open('./phaseCpf.player', 'w')
    for index in xrange(len(scada['timestamp'])):
        if scada['puLoad'][index] != 0.0:
            if scada['timestamp'][index].month in [1, 2, 12]:
                loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
            elif scada['timestamp'][index].month in [4, 5, 6, 7, 8, 9, 10]:
                loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
            elif scada['timestamp'][index].month == 3:
                if scada['timestamp'][index].day < 10:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
                elif scada['timestamp'][index].day > 10:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
                elif scada['timestamp'][index].day == 10:
                    if scada['timestamp'][index].hour < 2:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
                    elif scada['timestamp'][index].hour > 2:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
            elif scada['timestamp'][index].month == 11:
                if scada['timestamp'][index].day < 3:
                    loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
                elif scada['timestamp'][index].day > 3:
                    loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
                elif scada['timestamp'][index].day == 3:
                    if index < 29380:
                        loadShapeFile.write('{:s} CDT,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
                    else:
                        loadShapeFile.write('{:s} CST,{:0.6f}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['pfC'][index]))
    loadShapeFile.close()
    loadShapeFile = open('./phaseAVoltage.player', 'w')
    for index in xrange(len(scada['timestamp'])):
        if scada['puLoad'][index] != 0.0:
            if scada['timestamp'][index].month in [1, 2, 12]:
                loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
            elif scada['timestamp'][index].month in [4, 5, 6, 7, 8, 9, 10]:
                loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
            elif scada['timestamp'][index].month == 3:
                if scada['timestamp'][index].day < 10:
                    loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
                elif scada['timestamp'][index].day > 10:
                    loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
                elif scada['timestamp'][index].day == 10:
                    if scada['timestamp'][index].hour < 2:
                        loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
                    elif scada['timestamp'][index].hour > 2:
                        loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
            elif scada['timestamp'][index].month == 11:
                if scada['timestamp'][index].day < 3:
                    loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
                elif scada['timestamp'][index].day > 3:
                    loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
                elif scada['timestamp'][index].day == 3:
                    if index < 29380:
                        loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
                    else:
                        loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageA'][index]))
    loadShapeFile.close()
    loadShapeFile = open('./phaseBVoltage.player', 'w')
    for index in xrange(len(scada['timestamp'])):
        if scada['puLoad'][index] != 0.0:
            if scada['timestamp'][index].month in [1, 2, 12]:
                loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
            elif scada['timestamp'][index].month in [4, 5, 6, 7, 8, 9, 10]:
                loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
            elif scada['timestamp'][index].month == 3:
                if scada['timestamp'][index].day < 10:
                    loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
                elif scada['timestamp'][index].day > 10:
                    loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
                elif scada['timestamp'][index].day == 10:
                    if scada['timestamp'][index].hour < 2:
                        loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
                    elif scada['timestamp'][index].hour > 2:
                        loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
            elif scada['timestamp'][index].month == 11:
                if scada['timestamp'][index].day < 3:
                    loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
                elif scada['timestamp'][index].day > 3:
                    loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
                elif scada['timestamp'][index].day == 3:
                    if index < 29380:
                        loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
                    else:
                        loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageB'][index]))
    loadShapeFile.close()
    loadShapeFile = open('./phaseCVoltage.player', 'w')
    for index in xrange(len(scada['timestamp'])):
        if scada['puLoad'][index] != 0.0:
            if scada['timestamp'][index].month in [1, 2, 12]:
                loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
            elif scada['timestamp'][index].month in [4, 5, 6, 7, 8, 9, 10]:
                loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
            elif scada['timestamp'][index].month == 3:
                if scada['timestamp'][index].day < 10:
                    loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
                elif scada['timestamp'][index].day > 10:
                    loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
                elif scada['timestamp'][index].day == 10:
                    if scada['timestamp'][index].hour < 2:
                        loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
                    elif scada['timestamp'][index].hour > 2:
                        loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
            elif scada['timestamp'][index].month == 11:
                if scada['timestamp'][index].day < 3:
                    loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
                elif scada['timestamp'][index].day > 3:
                    loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
                elif scada['timestamp'][index].day == 3:
                    if index < 29380:
                        loadShapeFile.write('{:s} CDT,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
                    else:
                        loadShapeFile.write('{:s} CST,{:s}\n'.format(scada['timestamp'][index].strftime("%Y-%m-%d %H:%M:%S"), scada['VoltageC'][index]))
    loadShapeFile.close()
    configInfo= {'timezone' : 'CST+6CDT',
        'startdate' : '2013-01-01 0:00:00',
        'stopdate' : '2014-01-01 0:00:00',
        'feeder_rating' : loadMax*1.15,
        'nom_volt' : 7200,
        'voltage_players' : [os.path.abspath('./phaseAVoltage.player').replace('\\', '/'), os.path.abspath('./phaseBVoltage.player').replace('\\', '/'), os.path.abspath('./phaseCVoltage.player').replace('\\', '/')],
        'load_shape_scalar' : 1.0,
        'r_p_pfA' : os.path.abspath('./phaseApf.player').replace('\\', '/'),
        'r_p_pfB' : os.path.abspath('./phaseBpf.player').replace('\\', '/'),
        'r_p_pfC' : os.path.abspath('./phaseCpf.player').replace('\\', '/'),
        'load_shape_player_file' : os.path.abspath('./loadShapeScalar.player').replace('\\', '/')}
    working_directory = tempfile.mkdtemp()
    feederTree = feeder.parse('./faNewestConversionNoRecorder.glm')
    calibratedFeederTree, calibrationConfiguration = feederCalibrate.startCalibration(working_directory, feederTree, scadaInfo, 'MavaCapBank', configInfo)
    print(calibrationConfiguration['load_shape_scalar'])
    calibratedFile = open('./mavaCapBanksBaseCase.glm', 'w')
    glmstring = feeder.sortedWrite(calibratedFeederTree)
    calibratedFile.write(glmstring)
    calibratedFile.close()
Example #28
0
def makeGLM(clock, calib_file, baseGLM, case_flag, wdir):
	'''Create populated dict and write it to .glm file
	
	- clock (dictionary) links the three seasonal dates with start and stop timestamps (start simulation full 24 hour before day we're recording)
	- calib_file (string) -- filename of one of the calibration files generated during a calibration round 
	- baseGLM (dictionary) -- orignal base dictionary for use in Milsoft_GridLAB_D_Feeder_Generation.py
	- case_flag (int) -- flag technologies to test
	- feeder_config (string TODO: this is future work, leave as 'None')-- feeder configuration file (weather, sizing, etc)
	- dir(string)-- directory in which to store created .glm files
	'''
	# Create populated dictionary.
	glmDict, last_key = Milsoft_GridLAB_D_Feeder_Generation.GLD_Feeder(baseGLM,case_flag,calib_file) 
	
	fnames =  []
	for i in clock.keys():
		# Simulation start
		starttime = clock[i][0]
		# Recording start
		rec_starttime = i
		# Simulation and Recording stop
		stoptime = clock[i][1]
		
		# Calculate limit.
		j = datetime.datetime.strptime(rec_starttime,'%Y-%m-%d %H:%M:%S')
		k = datetime.datetime.strptime(stoptime,'%Y-%m-%d %H:%M:%S')
		diff = (k - j).total_seconds()
		limit = int(math.ceil(diff / interval))
		
		populated_dict = glmDict
		
		# Name the file.
		if calib_file == None:
			id = 'DefaultCalibration'
		else:
			m = re.compile( '\.txt$' )
			id = m.sub('',calib_file.get('name', ''))
		date = re.sub('\s.*$','',rec_starttime)
		filename = id + '_' + date + '.glm'
		
		# Get into clock object and set start and stop timestamp.
		for i in populated_dict.keys():
			if 'clock' in populated_dict[i].keys():
				populated_dict[i]['starttime'] = "'{:s}'".format(starttime)
				populated_dict[i]['stoptime'] = "'{:s}'".format(stoptime)
		
		lkey = last_key
		
		if use_mysql == 1:
			# Add GridLAB-D objects for recording into MySQL database.
			populated_dict[lkey] = { 'module' : 'mysql' }
			lkey += 1
			populated_dict[lkey] = {'object' : 'database',
										'name' : '{:s}'.format(schema),
										'schema' : '{:s}'.format(schema) }
			lkey += 1
			populated_dict[lkey] = {'object' : 'mysql.recorder',
										'table' : 'network_node_recorder',
										'parent' : 'network_node',
										'property' : 'measured_real_power,measured_real_energy',
										'interval' : '{:d}'.format(interval),
										'limit' : '{:d}'.format(limit),
										'start': "'{:s}'".format(rec_starttime),
										'connection': schema,
										'mode': 'a'}
		else:
			# Add GridLAB-D object for recording into *.csv files.
			try:
				os.mkdir(os.path.join(wdir,'csv_output'))
			except:
				pass
			# Add GridLAB-D object for recording into *.csv files.
			populated_dict[lkey] = {'object' : 'tape.recorder',
										'file' : './csv_output/{:s}_{:s}_network_node_recorder.csv'.format(id,date),
										'parent' : 'network_node',
										'property' : 'measured_real_power,measured_real_energy',
										'interval' : '{:d}'.format(interval),
										'limit' : '{:d}'.format(limit),
										'in': "'{:s}'".format(rec_starttime) }
										
		# Turn dictionary into a *.glm string and print it to a file in the given directory.
		glmstring = feeder.sortedWrite(populated_dict)
		file = open(os.path.join(wdir, filename), 'w')
		file.write(glmstring)
		file.close()
		print ("\t"+filename+ " is ready.")
		
		fnames.append(filename)
	return fnames
Example #29
0
File: debug.py Project: cdkkim/omf
#!/usr/bin/env python

import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.getcwd())))
import feeder as tp
import json

with open('main.json', 'r') as inFile:
    jsonBlob = json.load(inFile)
    tree = jsonBlob['tree']

print jsonBlob.keys()
print tree.keys()

outString = tp.sortedWrite(tree)

tp.dictToString(tree['7140'])

# The problem we were running into was the house thermal_integrity values were integers but needed to be strings. So the darn thing was saving wrong. Lesson learned!
Example #30
0
def _makeGLM(clock, calib_file, baseGLM, case_flag, mdir):
	'''Create populated dict and write it to .glm file
	- clock (dictionary) links the three seasonal dates with start and stop timestamps (start simulation full 24 hour before day we're recording)
	- calib_file (dictionary) -- dictionary containing calibration parameters.
	- baseGLM (dictionary) -- orignal base dictionary for use in feederPopulate.py
	- case_flag (int) -- flag technologies to test
	- feeder_config (string TODO: this is future work, leave as 'None')-- feeder configuration file (weather, sizing, etc)
	- mdir(string)-- directory in which to store created .glm files
	'''
	# Create populated dictionary.
	if calib_file is not None:
		calib_obj = calib_file
	else:
		print ('Populating feeder using default calibrations.')
		calib_obj = None
	glmDict, last_key = feederPopulate.startPopulation(baseGLM,case_flag,calib_obj) 
	fnames =  []
	for i in clock.keys():
		# Simulation start
		starttime = clock[i][0]
		# Recording start
		rec_starttime = i
		# Simulation and Recording stop
		stoptime = clock[i][1]
		# Calculate limit.
		j = datetime.datetime.strptime(rec_starttime,'%Y-%m-%d %H:%M:%S')
		k = datetime.datetime.strptime(stoptime,'%Y-%m-%d %H:%M:%S')
		diff = (k - j).total_seconds()
		limit = int(math.ceil(diff / 300.0))
		populated_dict = glmDict
		# Name the file.
		if calib_file is None:
			ident = 'DefaultCalibration'
		else:
			ident= calib_file['ID']
		date = re.sub('\s.*$','',rec_starttime)
		filename = ident + '_' + date + '.glm'
		# Get into clock object and set start and stop timestamp.
		for i in populated_dict.keys():
			if 'clock' in populated_dict[i].keys():
				populated_dict[i]['starttime'] = "'{:s}'".format(starttime)
				populated_dict[i]['stoptime'] = "'{:s}'".format(stoptime)
		lkey = last_key
		# Add GridLAB-D object for recording into *.csv files.
		try:
			os.mkdir(os.path.join(mdir,'csv_output'))
		except:
			pass
		populated_dict[lkey] = {'object' : 'tape.recorder',
									'file' : './csv_output/{:s}_{:s}_network_node_recorder.csv'.format(ident,date),
									'parent' : 'network_node',
									'property' : 'measured_real_power,measured_real_energy',
									'interval' : '{:d}'.format(900),
									'limit' : '{:d}'.format(limit),
									'in': "'{:s}'".format(rec_starttime) }
		# Turn dictionary into a *.glm string and print it to a file in the given directory.
		glmstring = feeder.sortedWrite(populated_dict)
		gfile = open(os.path.join(mdir, filename), 'w')
		gfile.write(glmstring)
		gfile.close()
		print ("\t"+filename+ " is ready.")
		fnames.append(filename)
	return fnames
Example #31
0
from pprint import pprint

glmTree = tp.parse('ILEC-Rembrandt.glm')

lineCoords = [x for x in glmTree if 'object' in glmTree[x] and (glmTree[x]['object'] == 'underground_line' or glmTree[x]['object'] == 'overhead_line')]
print lineCoords

# Here are the keys for a line:
print [x for x in glmTree[12]]

# Replace the embedded configurations with refs to config objects.
for coord in lineCoords:
	intKeys = [x for x in glmTree[coord] if type(x) is int]
	if len(intKeys) == 1:
		target = intKeys[0]
		del glmTree[coord][target]
		if glmTree[coord]['object'] == 'underground_line':
			glmTree[coord]['configuration'] = 'lc_7211'
		elif glmTree[coord]['object'] == 'overhead_line':
			glmTree[coord]['configuration'] = 'ohconfig'

# Just write it out.
outGlmString = tp.sortedWrite(glmTree)
with open('ILEC-Rembrandt-SYNTH.glm','w') as synthFile:
	synthFile.write(outGlmString)


# Do a regular conversion
outGlm = milToGridlab.convert('../../uploads/ILEC-Rembrandt.std','../../uploads/ILEC.seq')
with open('ILEC-Rembrandt-AUTOSYNTH.glm','w') as synthFile2:
	synthFile2.write(outGlm)
Example #32
0
File: debug.py Project: acmbc68/omf
#!/usr/bin/env python

import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.getcwd())))
import feeder as tp
import json

with open('main.json','r') as inFile:
	jsonBlob = json.load(inFile)
	tree = jsonBlob['tree']

print jsonBlob.keys()
print tree.keys()

outString = tp.sortedWrite(tree)

tp.dictToString(tree['7140'])

# The problem we were running into was the house thermal_integrity values were integers but needed to be strings. So the darn thing was saving wrong. Lesson learned!