def _save(self, checkpoint_dir): # create model name model_name = "model_%s" % (''.join( checkpoint_dir.split('/')[splitNo].split('_')[2:4])) # Default code checkpoint_path = os.path.join(checkpoint_dir, "model.pth") config = { "insertAoAInNetwork": parameters.insertAoAInNetwork, "selectFromSamplesPerPos": parameters.selectFromSamplesPerPos, "mask": parameters.mask, "shuffling": parameters.shuffling, "randomDigest": parameters.randomDigest, "params": self.params, "filterDataAfterLoading": parameters.filterDataAfterLoading, "excludedArea": parameters.excludedArea, "numTraining": parameters.numTraining, "normalizeData": parameters.normalizeData, "amountFeatures": self.amountFeatures, "antennasRXUsed": parameters.antennasRXUsed, "antennasTXUsed": parameters.antennasTXUsed, "samplesPerChannel": parameters.samplesPerChannel } # Save the model via utils as well as via tune framework (torch.save) utils.saveFile(model_name, [self.model, config]) torch.save(self.model.state_dict(), checkpoint_path) # Embed the name of the model in the resulting log String string = self.logString.split( '<>')[0] + model_name + self.logString.split( '<>')[2][len(model_name) - 5:] # Open File with "a" => append a log entry to the logfile f = open(parameters.tuneLogFile, "a") f.write(string) f.close() return checkpoint_path
def restoreBackup(cmakelistsFile): # Inicia um diálogo para selecionar o arquivo CMakeLists.txt. cmakelistsBkpFile = cmakelistsFile + ".bkp" # Verifica se o backup existe. if os.path.isfile(cmakelistsBkpFile): # Inicia o processo de restauração. try: # Salva o arquivo original. print("\n Salvando o arquivo CMakeLists.txt atual...", end="") cmakelistsOldFile = utils.saveFile( utils.fileFromBkp(cmakelistsBkpFile)) print("\n Arquivo atual salvo com sucesso!") print( " O arquivo está localizado em: {}".format(cmakelistsOldFile)) # Restaura o backup. print("\n Restaurando o arquivo CMakeLists.txt...", end="") cmakelistsFile = utils.restoreFile(cmakelistsBkpFile) print("\n Restauração realizada com sucesso!") print(" O arquivo está localizado em: {}".format(cmakelistsFile)) # Exclui o backup. print("\n Excluindo arquivo de backup...", end="") os.remove(cmakelistsBkpFile) print("\n Backup excluído com sucesso!") buildProj(cmakelistsFile) except IOError: print( "\n AVISO: Não foi possível terminar o processo de restauração!" ) print(" Será algum problema com as permissões dos arquivos?") else: print("\n O arquivo de backup ainda não foi criado nesta pasta!") input("\n Pressione [ENTER] para voltar ao menu principal...")
def createCache(server, xml): import json import urllib print 'Creating caches...' subMasterCache = {} subMasterCache['layers'] = [] tags = None cleanServerName = server['name'].replace('/', '-') cleanLayerName = server['name'] if server['params'] and server['params']['TypeName']: cleanLayerName = utils.replaceAll(server['params']['TypeName'], {':': '-', '\\': '-'}) if server['name'] in extraInfo: tags = extraInfo[server['name']] # Layer iter if 'passthrough' in server['options'] and server['options']['passthrough']: if server['params']: encodedParams = urllib.urlencode(server['params']) subMasterCache['url'] = server['url'] + encodedParams else: subMasterCache['url'] = server['url'] layer = { 'name': cleanLayerName, 'options': server['options'] } if tags: layer['tags'] = tags subMasterCache['layers'].append(layer) elif xml != None: # Save out the xml file for later utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONXML, xml) times = processTimes(server, xml) layer = { 'name': cleanLayerName, 'options': server['options'], 'times': times } if tags: layer['tags'] = tags # Save out layer cache utils.saveFile(LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) subMasterCache['layers'].append(layer) subMasterCache['url'] = server['url'] subMasterCache['serverName'] = server['name'] print 'Cache creation complete...' # Return and save out the cache for this server return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache))
def createCache(server, capabilitiesXML, coverageXML): #import xml.etree.ElementTree as ET #from xml.etree.ElementTree import XMLParser from lxml import etree as ET import json # Save out the xml file for later utils.saveFile( SERVERCACHEPATH + server['name'] + '-GetCapabilities' + FILEEXTENSIONXML, capabilitiesXML) utils.saveFile( SERVERCACHEPATH + server['name'] + '-DescribeCoverage' + FILEEXTENSIONXML, coverageXML) print 'Creating caches...' subMasterCache = {} subMasterCache['server'] = {} #parse = XMLParser( encoding="UTF-8" ) # Parse the GetCapabilities XML #root = ET.XML(capabilitiesXML, parser=parse) root = ET.fromstring(removeNonUTF8(capabilitiesXML)) # Parse the DescribeCoverage XML coverageRoot = ET.fromstring(removeNonUTF8(coverageXML)) if root.find('./%sCapability/%sLayer/%sLayer' % (WMS_NAMESPACE, WMS_NAMESPACE, WMS_NAMESPACE)) == None: dirtyCaches.append(server) return for service in root.findall('./%sService' % (WMS_NAMESPACE)): serverTitle = service.find('./%sTitle' % (WMS_NAMESPACE)).text serverAbstract = service.find( './%sAbstract' % (WMS_NAMESPACE)).text if service.find( './%sAbstract' % (WMS_NAMESPACE)) is not None else None for product in root.findall('./%sCapability/%sLayer/%sLayer' % (WMS_NAMESPACE, WMS_NAMESPACE, WMS_NAMESPACE)): sensorName = product.find('./%sTitle' % (WMS_NAMESPACE)).text if utils.blackfilter(sensorName, productBlackList): sensorName = utils.replaceAll(sensorName, { ' ': '_', '(': '_', ')': '_', '/': '_' }) print sensorName layers = [] for layer in product.findall('./%sLayer' % (WMS_NAMESPACE)): name = layer.find('./%sName' % (WMS_NAMESPACE)).text title = layer.find('./%sTitle' % (WMS_NAMESPACE)).text abstract = layer.find('./%sAbstract' % (WMS_NAMESPACE)).text temporal = False if name not in server['indicators']: print "NOTICE: Indicator '" + name + "' found on WMS server but not in local config file, ignoring." continue #Find the CoverageOffering from DescribeCoverage coverage = findCoverageNode(coverageRoot, name) if coverage == None: print serverTitle + " " + name + " could not be found in DescribeCoverage. Not including." continue offsetVectorsArray = coverage.findall('.//%soffsetVector' % (GML_NAMESPACE)) offsetVectors = [] for i in range(0, len(offsetVectorsArray)): offsetVectors.append( float(offsetVectorsArray[i].text.split(" ")[i])) exGeographicBoundingBox = { "WestBoundLongitude": layer.find( './%sEX_GeographicBoundingBox/%swestBoundLongitude' % (WMS_NAMESPACE, WMS_NAMESPACE)).text, "EastBoundLongitude": layer.find( './%sEX_GeographicBoundingBox/%seastBoundLongitude' % (WMS_NAMESPACE, WMS_NAMESPACE)).text, "SouthBoundLatitude": layer.find( './%sEX_GeographicBoundingBox/%ssouthBoundLatitude' % (WMS_NAMESPACE, WMS_NAMESPACE)).text, "NorthBoundLatitude": layer.find( './%sEX_GeographicBoundingBox/%snorthBoundLatitude' % (WMS_NAMESPACE, WMS_NAMESPACE)).text } boundingBox = { "CRS": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('CRS'), "MinX": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('minx'), "MaxX": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('maxx'), "MinY": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('miny'), "MaxY": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('maxy') } dimensions = createDimensionsArray(layer, server) temporal = dimensions['temporal'] styles = createStylesArray(layer) if server['options']['providerShortTag'] not in providers: raise Exception("Provider shortTag " + server['options']['providerShortTag'] + " was not in the 'providers.py' file") # Get the default details for the provider providerDetails = providers[server['options'] ['providerShortTag']] if (layerHasMoreInfo(server['options']['providerShortTag'])): moreProviderInfo = True else: moreProviderInfo = False if 'providerDetails' in server['indicators'][name]: # Overwrite any details with the indicator specific details for i in server['indicators'][name]['providerDetails']: providerDetails[i] = server['indicators'][name][ 'providerDetails'][i] #import pprint #pprint.pprint(server['indicators'][name]) #print '-'*40 if utils.blackfilter(name, layerBlackList): if layerHasMoreInfo( server['indicators'][name]['niceName']): moreIndicatorInfo = True else: moreIndicatorInfo = False masterLayer = { "Name": name, "Title": title, "Abstract": abstract, "FirstDate": dimensions['firstDate'], "LastDate": dimensions['lastDate'], "OffsetVectors": offsetVectors, "ProviderDetails": providerDetails, "EX_GeographicBoundingBox": exGeographicBoundingBox, "MoreIndicatorInfo": moreIndicatorInfo, "MoreProviderInfo": moreProviderInfo } if name in server['indicators']: masterLayer['tags'] = server['indicators'][name] # Data to be sent in the mastercache layers.append(masterLayer) # Data to be saved out layer = { #"Name": name, #"wmsURL": server['wmsURL'], #"wcsURL": server['wcsURL'], #"Title": title, #"Abstract": abstract, "FirstDate": dimensions['firstDate'], "LastDate": dimensions['lastDate'], "OffsetVectors": offsetVectors, #"EX_GeographicBoundingBox": exGeographicBoundingBox, "BoundingBox": boundingBox, "Dimensions": dimensions['dimensions'], "Styles": styles } cleanServerName = server['name'].replace('/', '-') cleanLayerName = name.replace('/', '-') # Save out layer cache utils.saveFile( LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) subMasterCache['server'][sensorName] = layers subMasterCache['options'] = server['options'] subMasterCache['wmsURL'] = server['services']['wms']['url'] subMasterCache['wcsURL'] = server['services']['wcs']['url'] subMasterCache['serverName'] = server['name'] print 'Cache creation complete...' # Return and save out the cache for this server return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache))
def createCache(server, xml): import json import urllib print 'Creating caches...' subMasterCache = {} subMasterCache['layers'] = [] tags = None cleanServerName = server['name'].replace('/', '-') cleanLayerName = server['name'] if server['params'] and server['params']['TypeName']: cleanLayerName = utils.replaceAll(server['params']['TypeName'], { ':': '-', '\\': '-' }) if server['name'] in extraInfo: tags = extraInfo[server['name']] # Layer iter if 'passthrough' in server['options'] and server['options']['passthrough']: if server['params']: encodedParams = urllib.urlencode(server['params']) subMasterCache['url'] = server['url'] + encodedParams else: subMasterCache['url'] = server['url'] layer = {'name': cleanLayerName, 'options': server['options']} if tags: layer['tags'] = tags subMasterCache['layers'].append(layer) elif xml != None: # Save out the xml file for later utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONXML, xml) times = processTimes(server, xml) layer = { 'name': cleanLayerName, 'options': server['options'], 'times': times } if tags: layer['tags'] = tags # Save out layer cache utils.saveFile( LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) subMasterCache['layers'].append(layer) subMasterCache['url'] = server['url'] subMasterCache['serverName'] = server['name'] print 'Cache creation complete...' # Return and save out the cache for this server return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache))
def createCache(server, xml): import xml.etree.ElementTree as ET import json # Save out the xml file for later utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONXML, xml) print 'Creating caches...' subMasterCache = {} subMasterCache['server'] = {} #ET.register_namespace(NAMESPACE, NAMESPACE) root = ET.fromstring(xml) if root.find('./%sCapability/%sLayer/%sLayer' % (NAMESPACE,NAMESPACE,NAMESPACE)) == None: dirtyCaches.append(server) return for service in root.findall('./%sService' % (NAMESPACE)): serverTitle = service.find('./%sTitle' % (NAMESPACE)).text serverAbstract = service.find('./%sAbstract' % (NAMESPACE)).text if service.find('./%sAbstract' % (NAMESPACE)) is not None else None for product in root.findall('./%sCapability/%sLayer/%sLayer' % (NAMESPACE,NAMESPACE,NAMESPACE)): sensorName = product.find('./%sTitle' % (NAMESPACE)).text if utils.blackfilter(sensorName, productBlackList): sensorName = utils.replaceAll(sensorName, {' ':'_', '(':'_', ')':'_', '/':'_'}) print sensorName layers = [] for layer in product.findall('./%sLayer' % (NAMESPACE)): name = layer.find('./%sName' % (NAMESPACE)).text title = layer.find('./%sTitle' % (NAMESPACE)).text abstract = layer.find('./%sAbstract' % (NAMESPACE)).text temporal = False exGeographicBoundingBox = {"WestBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%swestBoundLongitude' % (NAMESPACE,NAMESPACE)).text, "EastBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%seastBoundLongitude' % (NAMESPACE,NAMESPACE)).text, "SouthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%ssouthBoundLatitude' % (NAMESPACE,NAMESPACE)).text, "NorthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%snorthBoundLatitude' % (NAMESPACE,NAMESPACE)).text} boundingBox = {"CRS": layer.find('./%sBoundingBox' % (NAMESPACE)).get('CRS'), "MinX": layer.find('./%sBoundingBox' % (NAMESPACE)).get('minx'), "MaxX": layer.find('./%sBoundingBox' % (NAMESPACE)).get('maxx'), "MinY": layer.find('./%sBoundingBox' % (NAMESPACE)).get('miny'), "MaxY": layer.find('./%sBoundingBox' % (NAMESPACE)).get('maxy')} dimensions = createDimensionsArray(layer, server) temporal = dimensions['temporal'] styles = createStylesArray(layer) if utils.blackfilter(name, layerBlackList): masterLayer = {"Name": name, "Title": title, "Abstract": abstract, "FirstDate": dimensions['firstDate'], "LastDate": dimensions['lastDate'], "EX_GeographicBoundingBox": exGeographicBoundingBox } if server['name'] in extraInfo: if name in extraInfo[server['name']]: masterLayer['tags'] = extraInfo[server['name']][name] # Data to be sent in the mastercache layers.append(masterLayer) # Data to be saved out layer = {#"Name": name, #"wmsURL": server['wmsURL'], #"wcsURL": server['wcsURL'], #"Title": title, #"Abstract": abstract, "FirstDate": dimensions['firstDate'], "LastDate": dimensions['lastDate'], #"EX_GeographicBoundingBox": exGeographicBoundingBox, "BoundingBox": boundingBox, "Dimensions": dimensions['dimensions'], "Styles": styles } cleanServerName = server['name'].replace('/', '-') cleanLayerName = name.replace('/', '-') # Save out layer cache utils.saveFile(LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) subMasterCache['server'][sensorName] = layers subMasterCache['options'] = server['options'] subMasterCache['wmsURL'] = server['url'] subMasterCache['wcsURL'] = server['wcsurl'] subMasterCache['serverName'] = server['name'] print 'Cache creation complete...' # Return and save out the cache for this server return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache))
def createCache(server, capabilitiesXML, coverageXML): #import xml.etree.ElementTree as ET #from xml.etree.ElementTree import XMLParser from lxml import etree as ET import json # Save out the xml file for later utils.saveFile(SERVERCACHEPATH + server['name'] + '-GetCapabilities' + FILEEXTENSIONXML, capabilitiesXML) utils.saveFile(SERVERCACHEPATH + server['name'] + '-DescribeCoverage' + FILEEXTENSIONXML, coverageXML) print 'Creating caches...' subMasterCache = {} subMasterCache['server'] = {} #parse = XMLParser( encoding="UTF-8" ) # Parse the GetCapabilities XML #root = ET.XML(capabilitiesXML, parser=parse) root = ET.fromstring( removeNonUTF8(capabilitiesXML) ) # Parse the DescribeCoverage XML coverageRoot = ET.fromstring( removeNonUTF8(coverageXML) ) if root.find('./%sCapability/%sLayer/%sLayer' % (WMS_NAMESPACE,WMS_NAMESPACE,WMS_NAMESPACE)) == None: dirtyCaches.append(server) return for service in root.findall('./%sService' % (WMS_NAMESPACE)): serverTitle = service.find('./%sTitle' % (WMS_NAMESPACE)).text serverAbstract = service.find('./%sAbstract' % (WMS_NAMESPACE)).text if service.find('./%sAbstract' % (WMS_NAMESPACE)) is not None else None for product in root.findall('./%sCapability/%sLayer/%sLayer' % (WMS_NAMESPACE,WMS_NAMESPACE,WMS_NAMESPACE)): sensorName = product.find('./%sTitle' % (WMS_NAMESPACE)).text if utils.blackfilter(sensorName, productBlackList): sensorName = utils.replaceAll(sensorName, {' ':'_', '(':'_', ')':'_', '/':'_'}) print sensorName layers = [] for layer in product.findall('./%sLayer' % (WMS_NAMESPACE)): name = layer.find('./%sName' % (WMS_NAMESPACE)).text title = layer.find('./%sTitle' % (WMS_NAMESPACE)).text abstract = layer.find('./%sAbstract' % (WMS_NAMESPACE)).text temporal = False if name not in server['indicators']: print "NOTICE: Indicator '" + name + "' found on WMS server but not in local config file, ignoring." continue #Find the CoverageOffering from DescribeCoverage coverage = findCoverageNode( coverageRoot, name ) if coverage == None: print serverTitle + " " + name + " could not be found in DescribeCoverage. Not including." continue offsetVectorsArray = coverage.findall( './/%soffsetVector' % (GML_NAMESPACE) ) offsetVectors = [] for i in range( 0 , len( offsetVectorsArray )): offsetVectors.append(float(offsetVectorsArray[i].text.split(" ")[i])) exGeographicBoundingBox = {"WestBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%swestBoundLongitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text, "EastBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%seastBoundLongitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text, "SouthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%ssouthBoundLatitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text, "NorthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%snorthBoundLatitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text} boundingBox = {"CRS": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('CRS'), "MinX": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('minx'), "MaxX": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('maxx'), "MinY": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('miny'), "MaxY": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('maxy')} dimensions = createDimensionsArray(layer, server) temporal = dimensions['temporal'] styles = createStylesArray(layer) if server['options']['providerShortTag'] not in providers: raise Exception("Provider shortTag " + server['options']['providerShortTag'] + " was not in the 'providers.py' file") # Get the default details for the provider providerDetails = providers[ server['options']['providerShortTag'] ] if (layerHasMoreInfo(server['options']['providerShortTag'])): moreProviderInfo = True else: moreProviderInfo = False if 'providerDetails' in server['indicators'][name]: # Overwrite any details with the indicator specific details for i in server['indicators'][name]['providerDetails']: providerDetails[ i ] = server['indicators'][name]['providerDetails'][ i ] #import pprint #pprint.pprint(server['indicators'][name]) #print '-'*40 if utils.blackfilter(name, layerBlackList): if layerHasMoreInfo(server['indicators'][name]['niceName']): moreIndicatorInfo = True else: moreIndicatorInfo = False masterLayer = {"Name": name, "Title": title, "Abstract": abstract, "FirstDate": dimensions['firstDate'], "LastDate": dimensions['lastDate'], "OffsetVectors": offsetVectors, "ProviderDetails": providerDetails, "EX_GeographicBoundingBox": exGeographicBoundingBox, "MoreIndicatorInfo" : moreIndicatorInfo, "MoreProviderInfo" : moreProviderInfo } if name in server['indicators']: masterLayer['tags'] = server['indicators'][name] # Data to be sent in the mastercache layers.append(masterLayer) # Data to be saved out layer = {#"Name": name, #"wmsURL": server['wmsURL'], #"wcsURL": server['wcsURL'], #"Title": title, #"Abstract": abstract, "FirstDate": dimensions['firstDate'], "LastDate": dimensions['lastDate'], "OffsetVectors": offsetVectors, #"EX_GeographicBoundingBox": exGeographicBoundingBox, "BoundingBox": boundingBox, "Dimensions": dimensions['dimensions'], "Styles": styles } cleanServerName = server['name'].replace('/', '-') cleanLayerName = name.replace('/', '-') # Save out layer cache utils.saveFile(LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) subMasterCache['server'][sensorName] = layers subMasterCache['options'] = server['options'] subMasterCache['wmsURL'] = server['services']['wms']['url'] subMasterCache['wcsURL'] = server['services']['wcs']['url'] subMasterCache['serverName'] = server['name'] print 'Cache creation complete...' # Return and save out the cache for this server return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache))
if not os.path.isfile(backendFilename[format]): usage(cmd, "error: unable to find the given backend (%s)" % backendFilename[format]) sys.exit(2) """ 1'. Intermediate step - if the filename ends with .i, then the file has to be stripped and transformed back into a .c file in order to get properly parsed with PYCparser """ seqfile = utils.rreplace(inputfile, "/", "/_cs_", 1) if "/" in inputfile else "_cs_" + inputfile oldinputfile = inputfile (filestripped, contents) = utils.stripIfNeeded(inputfile) if filestripped: inputfile = inputfile[:-2] + ".strip.c" utils.saveFile(inputfile, contents) logfilename = seqfile + "." + format + ".log" if witness == "" else witness """ 2. Call CSeq and save the sequentialised file as '_cs_.....' """ if donotsequentialize == False: if verbose: print ("\ninput: %s") % (inputfile) cmdline = "./new.py -i%s -t%s -r%s -b%s -e%s" % (str(inputfile), str(threads), str(rounds), format, label) if unwind != -1: cmdline += " -u" + str(unwind) if forunwind != -1: cmdline += " -f" + str(forunwind)
myfile = open(inputfile) lines = list(myfile) for line in lines: sourcecode += line # Merge all the source files into one string. # try: timeBefore = time.time() if debug: print "/* merger", Merger = merger.Merger() Merger.loadfromstring(sourcecode, cseqenvironment) sourcecode = Merger.string() transforms += 1 if debug: utils.saveFile('___%s_%s.c' % (0,"merger"), sourcecode) if debug: print "ok %0.2fs */" % (int(time.time()) - int(timeBefore)) if debug: if os.path.isfile("___0_merger.linemap"): os.remove("___0_merger.linemap") fd = os.open("___0_merger.linemap", os.O_RDWR|os.O_CREAT) for i in range(1, Merger.lastoutputlineno+1): if Merger.outputtoinput.has_key(i): os.write(fd, "%d %d\n" % (i, Merger.outputtoinput[i])) #exceptions.SystemExit TODO except ParseError as e: if debug: print "ERROR: %s */\n" % sys.exc_info()[0] print "Parse error (%s):\n" % str(e) print "%s%s%s" % (utils.colors.YELLOW, utils.snippet(sourcecode, Merger.getLineNumber(e), Merger.getColumnNumber(e), 5, True), utils.colors.NO) sys.exit(1)
def __init__(self, *args, **kwargs): tk.Frame.__init__(self, *args, **kwargs) self.text = CustomText(self) self.vsb = tk.Scrollbar(orient="vertical", command=self.text.yview) self.text.configure(yscrollcommand=self.vsb.set, font=("Helvetica", "14", "bold")) self.linenumbers = TextLineNumbers(self, width=20) self.linenumbers.attach(self.text) self.vsb.pack(side="right", fill="y") self.text.pack(side="right", fill="both", expand=True) self.linenumbers.pack(side="right", fill="y") self.text.bind("<<Change>>", self._on_change) self.text.bind("<Configure>", self._on_change) self.console = CustomText(self) self.vsbConsole = tk.Scrollbar(command=self.console.yview) self.console.configure(yscrollcommand=self.vsbConsole.set, state=tk.DISABLED, background="black", foreground="green", font=("Helvetica", "12")) self.console.tag_config('error', foreground="red", font=("Helvetica", "12", "bold")) self.console.tag_config('normal', foreground="green", font=("Helvetica", "12")) self.linenumbersConsole = TextLineNumbers(self, width=20) self.linenumbersConsole.attach(self.console) self.vsbConsole.pack(side="right", fill="y") self.console.pack(side="right", fill="both", expand=True) self.linenumbersConsole.pack(side="right", fill="y") self.console.bind("<<Change>>", self._on_change_console) self.console.bind("<Configure>", self._on_change_console) menu = tk.Menu(root) root.config(menu=menu) fileMenu = tk.Menu(menu) menu.add_cascade(label="File", menu=fileMenu) fileMenu.add_command(label="New", command=lambda: utils.newFile(self.text)) fileMenu.add_command(label="Open", command=lambda: utils.openFile(root, self.text)) fileMenu.add_command(label="Save", command=lambda: utils.saveFile(self.text)) fileMenu.add_separator() fileMenu.add_command(label="Exit", command=utils.exitRoot) runMenu = tk.Menu(menu) menu.add_cascade(label="Run", menu=runMenu) runMenu.add_command(label="compile", command=lambda: compileOnly(self, self.assembler)) runMenu.add_command( label="compile and run", command=lambda: compileAndRun(self, self.assembler)) ramMenu = tk.Menu(menu) menu.add_cascade(label="Options", menu=ramMenu) ramMenu.add_command(label="Generate New RAM", command=utils.generateCleanMemory) ramMenu.add_command( label="Choose RAM File", command=lambda: utils.chooseRamFile(self.assembler)) ramMenu.add_command( label="Choose Debug directory", command=lambda: utils.chooseDebugDirectory(self.assembler)) helpMenu = tk.Menu(menu) menu.add_cascade(label="Help", menu=helpMenu) helpMenu.add_command(label="How To Use", command=utils.showHelp) helpMenu.add_command(label="About", command=utils.showAbout) sys.stdout = utils.PrintLogger(self.console)
./runLimit.py --batch-run %(job_dir)s/${SGE_TASK_ID} --fork %(fork)d --timeout %(timeout)d """ % {"cwd": os.getcwd(), "job_dir" : job_dir, "fork" : opts.fork, "timeout" : opts.timeout, "source": "%s/root_svn.sh" % os.getcwd()}) subJobs(job_dir, idx) print "Scheduled %s jobs: %s" % (idx, job_dir) elif opts.run or opts.batch_run: if opts.batch_run: task = json.load(open("%s/job.json" % opts.batch_run)) print "Starting batch job!" ofile = "%s/results.json" % opts.batch_run orootfile = opts.batch_run + "/results_%d.root" else: ofile = opts.output_file orootfile = opts.output_file.replace(".json", "_%d.root").replace(".pkl", "_%d.root") results = runLimit(task, opts.fork, opts.timeout, orootfile) utils.saveFile({"results" : results}, ofile) elif opts.merge: if os.path.exists(opts.output_file): raise IOError("Output file exists: %s" % opts.output_file) else: print "Dumping to file: %s" % opts.output_file files = [] idx = 1 nonexist =0 while True: path = "%s/%d/results.json" % (opts.merge, idx) print path if not os.path.exists(path): nonexist += 1 if nonexist == 10: break else: files += [path] nonexist=0
def loadData(forTraining, DATAPART, considerAoA, errorLog=[]): # Create filename with the given parameters (-1 = not present/not necessary) filename = utils.createFileName(dataID=parameters.dataID, netID=-1, lossID=-1, extremes=-1, numExtremes=-1) # load data object data = utils.loadFile(filename) dataLen = len(data) data_rest = [] print("%s loaded (%d samples)" % (filename, dataLen)) # If AoA should be used and loaded if (considerAoA and parameters.insertAoAInNetwork): # Load AoA File filenameAoA = utils.createFileName(dataID=parameters.dataID + "_AoA", netID=-1, lossID=-1, extremes=-1, numExtremes=-1) data_AoA = utils.loadFile(filenameAoA) # Add AoA Information to the data for i in range(0, len(data)): data[i] = [data[i][0], data[i][1], np.real(data_AoA[i])] # make the space smaller, from which the training samples should be selected if (DATAPART != enumerators.DATA.ALL and parameters.selectFromSamplesPerPos < parameters.filesPerTarget): data_filtered = [] print("Data filtered Samples per Position") # Iterate over all and split the data into data (used for training) and data_rest (used for testing later) for i in range(0, dataLen // parameters.filesPerTarget): for j in range(0, parameters.selectFromSamplesPerPos): data_filtered.append(data[i * parameters.filesPerTarget + j]) for j2 in range(parameters.selectFromSamplesPerPos, parameters.filesPerTarget): data_rest.append(data[i * parameters.filesPerTarget + j2]) # assign it to data data = data_filtered dataLen = len(data_filtered) # Apply mask -> should be tested if (not isinstance(parameters.mask, int)): data, errorLog = dataManagement.applyMaskOnData( data=data, coordInfo=parameters.coordInfo, filesPerTarget=parameters.filesPerTarget, room=parameters.room, mask=parameters.mask, debug=parameters.debug, errorLog=errorLog) # Store shuffled data shuffled_data = [] # If a training is done and not the prev value is used -> new random permutation necessary if (forTraining and parameters.shuffling and not parameters.usePrevRandomVal): # Get random permutation random_indices = np.random.permutation(dataLen) # Add the data permutated for i in range(0, dataLen): shuffled_data.append(data[random_indices[i]]) data = shuffled_data # Create Hash to describe the random permutation and store it sha256 = hashlib.sha256() sha256.update(bytes(random_indices)) digestName = sha256.hexdigest()[:10] utils.saveFile("R_" + digestName, random_indices) parameters.randomDigest = digestName # As it is for a training, only make use of the first entires (length = amount of training samples) data = shuffled_data[:parameters.numTraining] dataLen = len(data) # If the whole data should be used -> no random permutation necessary elif (DATAPART == enumerators.DATA.ALL): # no shuffling necessary as all the data requested dataLen = len(data) # Either the testing or training is requested and shuffling should be applied (based on prev permutation) elif (parameters.shuffling): print(len(data)) # Load permutation information random_indices = utils.loadFile("R_" + parameters.randomDigest) # Permutate the data according to permutation for i in range(0, len(random_indices)): shuffled_data.append(data[random_indices[i]]) # Filter according to requested set if (DATAPART == enumerators.DATA.TRAINING): data = shuffled_data[:parameters.numTraining] dataLen = len(data) elif (DATAPART == enumerators.DATA.TESTING): # Also add the data, which was ignored previously for trainingsample selection! data_rest.extend(shuffled_data[parameters.numTraining:]) data = data_rest dataLen = len(data) # No shuffling else: # Filter according to requested set if (DATAPART == enumerators.DATA.TRAINING): data = data[:parameters.numTraining] dataLen = len(data) elif (DATAPART == enumerators.DATA.TESTING): # # Also add the data, which was ignored previously for trainingsample selection! data_rest.extend(data[parameters.numTraining:]) data = data_rest dataLen = len(data) #First filter after TX, then RX if (parameters.antennasTX < 4 or parameters.antennasRX < 4): for item in range(0, len(data)): # Init with fake data (deleted in the end) dataobj = torch.zeros([1, len(data[0][0][0])]) if (parameters.antennasTX < 4): for i in range(0, 4): if i + 1 in parameters.antennasTXUsed: dataobj = torch.cat( (dataobj.to(torch.float32), data[item][0][i * 4 * 2:(i + 1) * 4 * 2].to( torch.float32)), dim=0).to(torch.float32) # Delete zero row dataobj = dataobj[1:] if (parameters.antennasRX < 4): dataobj_new = torch.zeros([1, len(data[0][0][0])]) for tx in range(0, parameters.antennasTX): for i in range(0, 4): if i + 1 in parameters.antennasRXUsed: dataobj_new = torch.cat( (dataobj_new.to(torch.float32), dataobj[tx * 4 * 2 + 2 * i:tx * 4 * 2 + 2 * (i + 1)]), dim=0).to(torch.float32) # delete zero row dataobj = dataobj_new[1:] data[item][0] = dataobj # Apply filtering of Subcarrier! Use less to compare against other paper! if (len(data[0][0][0]) > parameters.samplesPerChannel): for i in range(0, len(data)): data[i][0] = data[i][0][:, 0:parameters.samplesPerChannel] # Filter the data before loading into the network if (parameters.filterDataAfterLoading): data = dataManagement.filterData(data=data, room=parameters.room, excludedArea=parameters.excludedArea) dataLen = len(data) return data, dataLen, errorLog