def quickSave(self): """saves current tab. If it has not been saved previously, opens Save As dialogue""" if self.tabWidget.currentWidget().data.currentFileLocation is None: self.saveAs() else: exp = Exporter(self.tabWidget.currentWidget().data) exp.export(self.tabWidget.currentWidget().data.currentFileLocation)
def __init__(self, info, parser_tail=None): Exporter.__init__(self, info, parser_tail) # sections of code self.sections = {} # template: each item in the list is an item into the class_<...> # section. self.sections["template"] = [] # constructor: each item in the list is a parameter to the class_ # constructor, like class_<C>(...) self.sections["constructor"] = [] # inside: everything within the class_<> statement self.sections["inside"] = [] # scope: items outside the class statement but within its scope. # scope* s = new scope(class<>()); # ... # delete s; self.sections["scope"] = [] # declarations: outside the BOOST_PYTHON_MODULE macro self.sections["declaration"] = [] self.sections["declaration-outside"] = [] self.sections["include"] = [] # a list of Constructor instances self.constructors = [] self.wrapper_generator = None # a list of code units, generated by nested declarations self.nested_codeunits = []
def saveAs(self): """opens Save File dialogue and saves current tab as a File""" tr = QtCore.QCoreApplication.translate fileName = QtWidgets.QFileDialog.getSaveFileName( self, tr("SaveDialog:", "Berechnung speichern"), "", tr("SaveDialog:", "Bauphysikberechnung (*.baup);;Alle Dateien (*)")) if (all(fileName)): file = QtCore.QFile(fileName[0]) if (not file.open(QtCore.QIODevice.WriteOnly)): QtWidgets.QMessageBox.Information( self, tr("SaveDialog:", "Datei kann nicht geöffnet werden"), file.errorString()) return fileNameStringWithEnding = fileName[0].split("/") fileNameStringWithEnding = fileNameStringWithEnding[ len(fileNameStringWithEnding) - 1] fileNameStringWithEnding = fileNameStringWithEnding.split(".") fileNameString = "" for x in range(len(fileNameStringWithEnding) - 1): if x == 0: fileNameString += fileNameStringWithEnding[x] else: fileNameString += "." + fileNameStringWithEnding[x] self.tabWidget.currentWidget().data.name = fileNameString self.tabWidget.currentWidget( ).data.currentFileLocation = fileName[0] exp = Exporter(self.tabWidget.currentWidget().data) exp.export(fileName[0]) self.updateName() else: return
def __init__(self, item): Exporter.__init__(self, item) tr = self.getTargetRect() self.params = Parameter(name='params', type='group', children=[ { 'name': 'width', 'type': 'int', 'value': tr.width(), 'limits': (0, None) }, { 'name': 'height', 'type': 'int', 'value': tr.height(), 'limits': (0, None) }, { 'name': 'antialias', 'type': 'bool', 'value': True }, { 'name': 'background', 'type': 'color', 'value': (0, 0, 0, 255) }, ]) self.params.param('width').sigValueChanged.connect(self.widthChanged) self.params.param('height').sigValueChanged.connect(self.heightChanged)
def main(): parser = ArgumentParser(description="Protobuf Parser") parser.add_argument( "-output", help="output directory. default is 'output', under current path") parser.add_argument("-f", "--force", action="store_true", help="this will remove all old files.") parser.add_argument( "-config", help="the configure file for code generator. see ppconfig.py") parser.add_argument( "input_path", help="input proto directory. only *.proto files will be processed.") option = parser.parse_args() if option.config: parse_config(option.config) ppconfig.INPUT_PATH = option.input_path ppconfig.OUTPUT_PATH = option.output if option.output else "output" if not hasattr(ppconfig, "MODULE_PATH"): ppconfig.MODULE_PATH = ppconfig.OUTPUT_PATH exporter = Exporter() try: exporter.run(option) except ProtoException as msg: print("\n**%s\n" % msg)
def __init__(self, info, parser_tail=None): Exporter.__init__(self, info, parser_tail) # sections of code self.sections = {} # template: each item in the list is an item into the class_<...> # section. self.sections['template'] = [] # constructor: each item in the list is a parameter to the class_ # constructor, like class_<C>(...) self.sections['constructor'] = [] # inside: everything within the class_<> statement self.sections['inside'] = [] # scope: items outside the class statement but within its scope. # scope* s = new scope(class<>()); # ... # delete s; self.sections['scope'] = [] # declarations: outside the BOOST_PYTHON_MODULE macro self.sections['declaration'] = [] self.sections['include'] = [] # a list of Constructor instances self.constructors = [] self.wrapper_generator = None # a list of code units, generated by nested declarations self.nested_codeunits = []
def __init__(self, item): Exporter.__init__(self, item) tr = self.getTargetRect() self.params = Parameter(name='params', type='group', children=[ {'name': 'width', 'type': 'float', 'value': tr.width(), 'limits': (0, None)}, {'name': 'height', 'type': 'float', 'value': tr.height(), 'limits': (0, None)}, ]) self.params.param('width').sigValueChanged.connect(self.widthChanged) self.params.param('height').sigValueChanged.connect(self.heightChanged)
def __init__(self, item): Exporter.__init__(self, item) self.params = Parameter(name='params', type='group', children=[ { 'name': 'separator', 'type': 'list', 'value': 'comma', 'values': ['comma', 'tab'] }, ])
def __init__(self, item): Exporter.__init__(self, item) tr = self.getTargetRect() self.params = Parameter(name='params', type='group', children=[ {'name': 'width', 'type': 'int', 'value': tr.width(), 'limits': (0, None)}, {'name': 'height', 'type': 'int', 'value': tr.height(), 'limits': (0, None)}, {'name': 'antialias', 'type': 'bool', 'value': True}, {'name': 'background', 'type': 'color', 'value': (0,0,0,255)}, ]) self.params.param('width').sigValueChanged.connect(self.widthChanged) self.params.param('height').sigValueChanged.connect(self.heightChanged)
def export_data(args): template_id = None if args.template_id is not None: template_id = int(args.template_id) exporter = Exporter(args, link_export_flag, steps) if args.start_date is not None and args.end_date is not None \ and args.time_step is not None: exporter.get_time_index(start_time=args.start_date, end_time=args.end_date, time_step=args.time_step) elif args.time_axis is not None: exporter.get_time_index(time_axis=args.time_axis) else: raise HydraPluginError('Time axis not specified.') exporter.export_network() exporter.save_file() return exporter.net
def setUp(self): #erstelle erste Schicht self.layer_1 = LayerData() self.layer_1.widthUnit = 1 self.layer_1.width = 0.02 self.layer_1.lambda_ = 0.350 #erstelle zweite Schicht self.layer_2 = LayerData() self.layer_2.widthUnit = 1 self.layer_2.width = 0.24 self.layer_2.lambda_ = 0.560 #erstelle dritte Schicht self.layer_3 = LayerData() self.layer_3.widthUnit = 1 self.layer_3.width = 0.05 self.layer_3.lambda_ = 0.045 #erstelle vierte Schicht self.layer_4 = LayerData() self.layer_4.widthUnit = 1 self.layer_4.width = 0.01 self.layer_4.lambda_ = 0.700 #erstelle Tab self.tab = TabData(0, "Test_033") self.tab.rright = 0.04 self.tab.rleft = 0.13 self.tab.tright = 21 self.tab.tleft = 4 #Schichten in Tab hinzufügen self.tab.add_layer(self.layer_1) self.tab.add_layer(self.layer_2) self.tab.add_layer(self.layer_3) self.tab.add_layer(self.layer_4) self.tab.calculate() self.Export = Exporter(self.tab) self.parser = Parser(str("TestExport"))
def __init__(self): self.version = "0.5.4" self.utils = Utilities() self.save_path = "./controllers/nn/" + self.utils.getTimestamp() + "/" self.importer = Importer() self.exporter = Exporter(self.version) self.exporter.setSaveLocation(self.save_path) self.nnm = NeuralNetworkManager() self.nnm.setSaveLocation(self.save_path) self.staticController = StaticController() self.debug_mode = False self.importer.setDebugMode(False) self.nnm.setDebugMode(self.debug_mode) print("COTONN v" + self.version + "\n")
def SetDeclarations(self, declarations): Exporter.SetDeclarations(self, declarations) decl = self.GetDeclaration(self.info.name) if isinstance(decl, Typedef): self.class_ = self.GetDeclaration(decl.type.name) if not self.info.rename: self.info.rename = decl.name else: self.class_ = decl self.public_members = \ [x for x in self.class_.members if x.visibility == Scope.public]
def export_data(args): template_id = None if args.template_id is not None: template_id = int(args.template_id) exporter=Exporter(args, link_export_flag, steps) if args.start_date is not None and args.end_date is not None \ and args.time_step is not None: exporter.get_time_index(start_time=args.start_date, end_time=args.end_date, time_step=args.time_step) elif args.time_axis is not None: exporter.get_time_index(time_axis=args.time_axis) else: raise HydraPluginError('Time axis not specified.') exporter.export_network() exporter.save_file() return exporter.net
def IIR_str_to_SDFG(iir: str): stencilInstantiation = IIR_pb2.StencilInstantiation() stencilInstantiation.ParseFromString(iir) metadata = stencilInstantiation.metadata id_resolver = IdResolver(metadata.accessIDToName, metadata.APIFieldIDs, metadata.temporaryFieldIDs, metadata.globalVariableIDs, metadata.fieldIDtoDimensions) imp = Importer(id_resolver) stencils = imp.Import_Stencils(stencilInstantiation.internalIR.stencils) UnparseCode(stencils, id_resolver) AddRegisters(stencils, id_resolver) SplitMultiStages(stencils) AddMsMemlets(stencils, id_resolver) AddDoMethodMemlets(stencils, id_resolver) exp = Exporter(id_resolver, name=metadata.stencilName) exp.Export_ApiFields(metadata.APIFieldIDs) exp.Export_TemporaryFields(metadata.temporaryFieldIDs) exp.Export_Globals({ id: stencilInstantiation.internalIR.globalVariableToValue[ id_resolver.GetName(id)].value for id in metadata.globalVariableIDs }) exp.Export_Stencils(stencils) exp.sdfg.fill_scope_connectors() return exp.sdfg
def SetDeclarations(self, declarations): Exporter.SetDeclarations(self, declarations) if self.declarations: decl = self.GetDeclaration(self.info.name) if isinstance(decl, Typedef): self.class_ = self.GetDeclaration(decl.type.name) if not self.info.rename: self.info.rename = decl.name else: self.class_ = decl self.class_ = copy.deepcopy(self.class_) else: self.class_ = None
def __init__(self): self.version = "2.0" self.utils = Utilities() self.save_path = "./nn/" + self.utils.getTimestamp() + "/" self.importer = Importer() self.exporter = Exporter(self.version) self.exporter.setSaveLocation(self.save_path) self.nnm = NeuralNetworkManager() self.nnm.setSaveLocation(self.save_path) self.staticController = StaticController() self.debug_mode = False self.importer.setDebugMode(False) self.nnm.setDebugMode(self.debug_mode) print("COTONN v" + self.version + "\n") self.encode = EncodeTypes.Boolean self.var_order = Ordering.PerCoordinate self.filename = ""
def SetDeclarations(self, declarations): def IsInternalName(name): '''Returns true if the given name looks like a internal compiler structure''' return name.startswith('__') Exporter.SetDeclarations(self, declarations) header = os.path.normpath(self.parser_header) for decl in declarations: # check if this declaration is in the header location = os.path.normpath(decl.location[0]) if location != header or IsInternalName(decl.name): continue # ok, check the type of the declaration and export it accordingly self.HandleDeclaration(decl)
def exportFile(self, path, exportFormat): total = 0 exporter = Exporter(path, self.referenceManager.iterEntryList()) if exportFormat == settings.ExportFormat.BIBTEX: total = exporter.bibtexExport() elif exportFormat == settings.ExportFormat.CSV: total = exporter.csvExport() elif exportFormat == settings.ExportFormat.HTML: if prefs.bibStyle == settings.BibStyle.ACM: total = exporter.htmlACMExport() elif prefs.bibStyle == settings.BibStyle.IEEE: total = exporter.htmlIEEETransExport() return total > 0
def add(self, exporterId, serverIP, serverPort): checkInteger('exporterId', exporterId, 0) checkIPv4('serverIP', serverIP) checkPort('serverPort', serverPort) if (exporterId in self.__exporters): raise Exception('Exporter(%d) already exists' % (exporterId)) exporterConfig = copy.deepcopy(self.__configTemplate) exporterConfig.update({'serverIP': serverIP, 'serverPort': serverPort}) session = Session() exporter = Exporter(session) exporter.configure(exporterConfig) exporter.start() self.__exporters[exporterId] = exporter
class TestScript_033(unittest.TestCase): """ Test von Export&Import mit den Daten für 4 Schichten analog manuellem TestScript_020 """ def setUp(self): #erstelle erste Schicht self.layer_1 = LayerData() self.layer_1.widthUnit = 1 self.layer_1.width = 0.02 self.layer_1.lambda_ = 0.350 #erstelle zweite Schicht self.layer_2 = LayerData() self.layer_2.widthUnit = 1 self.layer_2.width = 0.24 self.layer_2.lambda_ = 0.560 #erstelle dritte Schicht self.layer_3 = LayerData() self.layer_3.widthUnit = 1 self.layer_3.width = 0.05 self.layer_3.lambda_ = 0.045 #erstelle vierte Schicht self.layer_4 = LayerData() self.layer_4.widthUnit = 1 self.layer_4.width = 0.01 self.layer_4.lambda_ = 0.700 #erstelle Tab self.tab = TabData(0, "Test_033") self.tab.rright = 0.04 self.tab.rleft = 0.13 self.tab.tright = 21 self.tab.tleft = 4 #Schichten in Tab hinzufügen self.tab.add_layer(self.layer_1) self.tab.add_layer(self.layer_2) self.tab.add_layer(self.layer_3) self.tab.add_layer(self.layer_4) self.tab.calculate() self.Export = Exporter(self.tab) self.parser = Parser(str("TestExport")) def test_1(self): """ Export & Import """ self.Export.export("TestExport") Tab = self.parser.parse() #Test der Werte im Tab self.assertEqual(float(self.tab.rright), float(Tab.rright)) self.assertEqual(float(self.tab.rleft), float(Tab.rleft)) self.assertEqual(float(self.tab.tright), float(Tab.tright)) self.assertEqual(float(self.tab.tleft), float(Tab.tleft)) #Test der Werte in Schicht 1 self.assertEqual(float(Tab.layers[0].width), self.layer_1.width) self.assertEqual(float(Tab.layers[0].lambda_), self.layer_1.lambda_) self.assertEqual(float(Tab.layers[0].r), self.layer_1.r) self.assertEqual(float(Tab.layers[0].widthUnit), self.layer_1.widthUnit) #Test der Werte in Schicht 2 self.assertEqual(float(Tab.layers[1].width), self.layer_2.width) self.assertEqual(float(Tab.layers[1].lambda_), self.layer_2.lambda_) self.assertEqual(float(Tab.layers[1].r), self.layer_2.r) self.assertEqual(float(Tab.layers[1].widthUnit), self.layer_2.widthUnit) #Test der Werte in Schicht 3 self.assertEqual(float(Tab.layers[2].width), self.layer_3.width) self.assertEqual(float(Tab.layers[2].lambda_), self.layer_3.lambda_) self.assertEqual(float(Tab.layers[2].r), self.layer_3.r) self.assertEqual(float(Tab.layers[2].widthUnit), self.layer_3.widthUnit) #Test der Werte in Schicht 4 self.assertEqual(float(Tab.layers[3].width), self.layer_4.width) self.assertEqual(float(Tab.layers[3].lambda_), self.layer_4.lambda_) self.assertEqual(float(Tab.layers[3].r), self.layer_4.r) self.assertEqual(float(Tab.layers[3].widthUnit), self.layer_4.widthUnit)
BASEURL = 'https://itunes.apple.com/search?' reincarnation = 0 while True: try: reincarnation = reincarnation + 1 print ("Reincarnation Cound: ", reincarnation) # logging chain - file logger, console logger # TODO: Add an online logger for monitoring purposes # TODO: make logs more understandable flog = FileLogger('./log.txt') flog.AddNextLogger(ConsoleLogger()) logger = Logger(flog) # base url for the core core = Core(BASEURL, logger) # exporter exporter = Exporter(CSVExporter("out.csv")) spider = Spider(logger, core, exporter) spider.Start() print ("Task Over") break except: print (sys.exc_info()[0]) print ("Some exception: restarting") continue
def __init__(self, info): Exporter.__init__(self, info)
def __init__(self, info, parser_tail=None): Exporter.__init__(self, info, parser_tail)
def SetDeclarations(self, declarations): Exporter.SetDeclarations(self, declarations) self.enum = self.GetDeclaration(self.info.name)
def __init__(self, item): Exporter.__init__(self, item)
def get_Exporter(self): if self.exporter is None: self.exporter = Exporter(self.collectd) return self.exporter
def __init__(self, info, tail=None): Exporter.__init__(self, info, tail)
def __init__(self): Exporter.__init__(self)
def SetDeclarations(self, declarations): Exporter.SetDeclarations(self, declarations) if self.declarations: self.enum = self.GetDeclaration(self.info.name) else: self.enum = None
class COTONN: def __init__(self): self.version = "0.5.4" self.utils = Utilities() self.save_path = "./controllers/nn/" + self.utils.getTimestamp() + "/" self.importer = Importer() self.exporter = Exporter(self.version) self.exporter.setSaveLocation(self.save_path) self.nnm = NeuralNetworkManager() self.nnm.setSaveLocation(self.save_path) self.staticController = StaticController() self.debug_mode = False self.importer.setDebugMode(False) self.nnm.setDebugMode(self.debug_mode) print("COTONN v" + self.version + "\n") # Clean memory function def cleanMemory(self): del self.nnm.nn del self.nnm del self.staticController del self.exporter del self.importer # Generate MLP from fullset def fullSetMLP(self, filename, layer_width, layer_height, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(fullSet) self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.nnm.close() self.cleanMemory() # Generate MLP from subset def subSetMLP(self, filename, percentage, layer_width, layer_height, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() subSet = DataSet() subSet.readSubsetFromController(self.staticController, percentage) subSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(subSet) self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.nnm.close() self.cleanMemory() # Scout learningrate convergence def scoutLearningRateConvergence(self, filename, layer_width, layer_height, epoch_threshold, rates, batch_size, display_step): self.staticController = self.importer.readStaticController(filename) dataSet = DataSet() dataSet.readSetFromController(self.staticController) dataSet.formatToBinary() self.nnm.setDebugMode(False) fitnesses = [] for r in rates: print("\nLearning rate: " + str(r)) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(dataSet) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initializeNeuralNetwork() self.nnm.initializeTraining(r, 1.0, batch_size, display_step, epoch_threshold) self.nnm.train() fitness, wrong_states = self.nnm.checkFitness(dataSet) self.fitnesses.append(fitness) self.nnm.close() # Plot plt.semilogx(rates, fitnesses, 'r-') plt.xlabel("Rates") plt.ylabel("Fitness") plt.grid() (x1, x2, y1, y2) = plt.axis() plt.axis((min(rates), max(rates), 0.0, y2 + 0.1)) plt.show() self.cleanMemory() # Generate MLP from fullset def importMLP(self, import_path, filename, layer_width, layer_height, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(fullSet) # Option to adjust parameters for new training session self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step) self.nnm.getDataSize() # Restore Network from saved file: self.importer.restoreNetwork(self.nnm, import_path) # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) # Save Network or Variables if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.nnm.close() # Generate MLP from fullset def customFullSetMLP(self, filename, layer, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(fullSet) self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.nnm.close() self.cleanMemory()
class COTONN: def __init__(self): self.version = "2.0" self.utils = Utilities() self.save_path = "./nn/" + self.utils.getTimestamp() + "/" self.importer = Importer() self.exporter = Exporter(self.version) self.exporter.setSaveLocation(self.save_path) self.nnm = NeuralNetworkManager() self.nnm.setSaveLocation(self.save_path) self.staticController = StaticController() self.debug_mode = False self.importer.setDebugMode(False) self.nnm.setDebugMode(self.debug_mode) print("COTONN v" + self.version + "\n") self.encode = EncodeTypes.Boolean self.var_order = Ordering.PerCoordinate self.filename = "" # Clean memory function def cleanMemory(self): del self.nnm.nn del self.nnm.controller del self.nnm del self.staticController del self.exporter del self.importer # Generate MLP from fullset def deterministicMLP(self, filename, layer, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = True self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointDeterministic(self.staticController, self.encode, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Sigmoid) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # self.nnm.setEpochThreshold(172) # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def importDeterministicMLP(self, import_path, filename, layer, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = True self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) # fullSet.formatToBinary() fullSet.addAllGridPointDeterministic(self.staticController, self.encode, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) # self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Sigmoid) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController # Option to adjust parameters for new training session self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step) self.nnm.getDataSize() # Restore Network from saved file: self.importer.restoreNetwork(self.nnm, import_path) # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) self.nnm.randomCheck(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def deterministicRectMLP(self, filename, layer_width, layer_height, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = True self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointDeterministic(self.staticController, self.encode, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Sigmoid) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # self.nnm.setEpochThreshold(172) # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def importDeterministicRectMLP(self, import_path, filename, layer_width, layer_height, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = True self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) # fullSet.formatToBinary() fullSet.addAllGridPointDeterministic(self.staticController, self.encode, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) # self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Sigmoid) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController # Option to adjust parameters for new training session self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step) self.nnm.getDataSize() # Restore Network from saved file: self.importer.restoreNetwork(self.nnm, import_path) # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) self.nnm.randomCheck(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def determinizingMLP(self, filename, layer, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = False self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointDeterminizing(self.staticController, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Softmax) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def importDeterminizingMLP(self, import_path, filename, layer, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = False self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointDeterminizing(self.staticController, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Softmax) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Restore Network from saved file: self.importer.restoreNetwork(self.nnm, import_path) # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def determinizingRectMLP(self, filename, layer_width, layer_height, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = False self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointDeterminizing(self.staticController, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Softmax) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def importDeterminizingRectMLP(self, import_path, filename, layer_width, layer_height, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = False self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointDeterminizing(self.staticController, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Softmax) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Restore Network from saved file: self.importer.restoreNetwork(self.nnm, import_path) # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def nonDeterministicMLP(self, filename, input_file, layer, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = False self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointNonDeterministic(input_file, self.staticController, self.var_order) self.nnm.non_det = True self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Sigmoid) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory() # Generate MLP from fullset def importNonDeterministicMLP(self, import_path, input_file, filename, layer, encode = 0, var_order = 0, learning_rate = 0.01, dropout_rate = 0.0, fitness_threshold = 1.0, batch_size = 1024, display_step = 50, save_option=True): self.importer.det = False self.staticController = self.importer.readStaticController(filename) self.filename = filename if(encode == 0): self.encode = EncodeTypes.Boolean if(var_order == 0): self.var_order = Ordering.Original else: self.var_order = Ordering.PerCoordinate fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.addAllGridPointNonDeterministic(input_file, self.staticController, self.var_order) self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunctionHidden(NNActivationFunction.Sigmoid) self.nnm.setActivationFunctionOutput(NNActivationFunction.Sigmoid) self.nnm.setEncodeTypes(self.encode) self.nnm.setDataSet(fullSet) self.nnm.controller = self.staticController self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Restore Network from saved file: self.importer.restoreNetwork(self.nnm, import_path) # Train model and visualize performance self.nnm.train() self.nnm.plot() # fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) fitness, wrong_states = self.nnm.checkFitnessAllGridPoint(fullSet) loosing_states = self.nnm.createLoosingPoints(wrong_states) if(save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.exporter.saveTrainingData(self.nnm) self.exporter.saveUpdatedAvoid(self.filename, wrong_states, loosing_states) self.nnm.close() self.cleanMemory()
def __init__(self, item): Exporter.__init__(self, item) self.params = Parameter(name='params', type='group', children=[ {'name': 'separator', 'type': 'list', 'value': 'comma', 'values': ['comma', 'tab']}, ])