def addStream(self, s): if self.verbose: print "addStream", s if s not in self.streamList: self.streamList.append(s) self.cumfreq[s] = 0 updateConfig = 0 if self.dsConfig == None: #no config object from dataswitch yet. if DataSwitch != None: self.dsConfig = DataSwitch.DataSwitchModule.Config( []) #,[],[]) else: self.dsConfig = Config() updateConfig = -1 #we don't want to update the config - let someone else do this. if s not in [x.name for x in self.dsConfig.generic]: #this stream not yet in dataswitch config... self.rtcDecimate[s] = 1 self.dsDecimate[s] = 1 if self.defaultSave: self.saveStream[s] = Saver.Saver(s + ".log") else: self.saveStream[s] = None if DataSwitch != None: gc = DataSwitch.DataSwitchModule.GenericConfig( s, 0, 0, s + ".log", self.defaultSave) else: gc = GenericItem(s, 0, 0, s + ".log", self.defaultSave) self.dsConfig.generic.append(gc) updateConfig += 1 else: #this stream is in the dataswitch config... for obj in self.dsConfig.generic: if obj.name == s: self.rtcDecimate[s] = obj.decimate1 self.dsDecimate[s] = obj.decimate2 if not self.saveStream.has_key(s): self.saveStream[s] = None if obj.log: #logging... if self.saveStream[s] == None: self.saveStream[s] = Saver.Saver(obj.logFile) else: if self.saveStream[s] != None: self.saveStream[s].close() self.saveStream[s] = None break self.addCallback(s, self.handlePxl) if updateConfig == 1 and self.serverObject != None: self.serverObject.publishConfig(self.dsConfig) if DataSwitch != None: print "Informing dataswitch about %s" % s #create an empty stream. a = DataSwitch.DataSwitchModule.Generic(1, "i", 0, 0., 1, [0], 0, "") if self.serverObject != None: self.serverObject.publishGeneric(a, s) print "Done informing"
def __init__(self, host, port, shmname, prefix, debug, log, raw, connect=1, attempts=-1, startWithLatest=0, decimate=1): self.host = host self.port = port self.startWithLatest = startWithLatest self.shmname = shmname self.prefix = prefix self.shmOpen = 0 self.sock = None self.circbuf = None self.debug = debug self.raw = raw #if 1, send in raw mode, not serialise. self.saver = None self.attempts = attempts self.cumfreq = 0 self.decimate = decimate if log: self.saver = Saver.Saver(shmname + ".log") self.openSHM() if connect: self.connectSock() self.go = 1
def contextMenuEvent(self, event): contextMenu = QtWidgets.QMenu(self) inputAct = contextMenu.addAction('Input Block') outputAct = contextMenu.addAction('Output Block') convAct = contextMenu.addAction('Conv Block') fusionAct = contextMenu.addAction('Fusion Block') newAct = contextMenu.addAction('New') saveAct = contextMenu.addAction('Save') loadAct = contextMenu.addAction('Load') compAct = contextMenu.addAction('Compile') quitAct = contextMenu.addAction('Close') action = contextMenu.exec_(self.mapToGlobal(event.pos())) if action == quitAct: self.close() elif action == newAct: self._clear() elif action == saveAct: text, ok = QtWidgets.QInputDialog.getText(self, 'Save', 'Enter file name:') if ok: Saver.saveGraph(text, self) elif action == loadAct: text, ok = QtWidgets.QInputDialog.getText(self, 'Load', 'Enter file name:') if ok: Saver.loadGraph(text, self, True) elif action == compAct: text, ok = QtWidgets.QInputDialog.getText(self, 'Load', 'Enter file name:') if ok: compiler.compile(self, fname=text + '.py') else: text, ok = QtWidgets.QInputDialog.getText(self, 'Input dialog', 'Enter block name:') if action == inputAct: nodetype = 'startNode' if action == outputAct: nodetype = 'finalNode' if action == convAct: nodetype = 'convNode' if action == fusionAct: nodetype = 'addNode' if ok: self.createNode(text, nodetype)
def getStreams(self, data=None): if data == None: self.execute("s=c.getStreams()", "s", "streamList") else: #the list of streams has arrived... configChanged = 0 if self.dsConfig == None: #no config object from dataswitch yet. if DataSwitch != None: self.dsConfig = DataSwitch.DataSwitchModule.Config( []) #,[],[]) else: self.dsConfig = Config() configChanged = 1 self.streamList = data[2]["s"] print "stream list:", self.streamList for s in self.streamList: self.cumfreq[s] = 0 if s not in [x.name for x in self.dsConfig.generic]: #this stream not yet in dataswitch config... self.rtcDecimate[s] = 1 self.dsDecimate[s] = 1 self.saveStream[s] = None configChanged = 1 if DataSwitch != None: gc = DataSwitch.DataSwitchModule.GenericConfig( s, 0, 0, s + ".log", 0) else: gc = GenericItem(s, 0, 0, s + ".log", 0) self.dsConfig.generic.append(gc) else: #this stream is in the dataswitch config... for obj in self.dsConfig.generic: if obj.name == s: self.rtcDecimate[s] = obj.decimate1 self.dsDecimate[s] = obj.decimate2 if not self.saveStream.has_key(s): self.saveStream[s] = None if obj.log: #logging... if self.saveStream[s] == None: self.saveStream[s] = Saver.Saver( obj.logFile) else: if self.saveStream[s] != None: self.saveStream[s].close() self.saveStream[s] = None break self.addCallback(s, self.handlePxl) if DataSwitch != None and self.serverObject != None: print "Informing dataswitch about %s" % s a = DataSwitch.DataSwitchModule.Generic( 1, "i", 0, 0., 1, [0], 0, "") self.serverObject.publishGeneric(a, s) print "Done informing" if configChanged: #we have updated the config here... if self.serverObject != None: self.serverObject.publishConfig(self.dsConfig)
def compile(nodeGraph, fname=None): graphName = nodeGraph.graphName graph_dict = Saver.saveGraph('temp_graph.json', nodeGraph) node_list = construct_tree(graph_dict) lines = [] index = 0 max_index = len(node_list) while index < max_index: node = node_list[index] func_name = node.func input_args = [] for child in node.childs: arg_ind = child[0] arg_name = node.sockets[arg_ind] child_node = child[1] child_arg_ind = child[2] child_name = child_node.name if node.nodeType != 'finalNode': arg_str = arg_name + '=' else: arg_str = '' if len(child_node.plugs) == 1: arg_str += child_name + '_output' else: arg_str += child_name + '_output[%d]' % child_arg_ind input_args.append(arg_str) if not child_node in node_list: node_list.append(child_node) max_index = len(node_list) code_line = node.name + '_output = %s(%s)' % (func_name, ','.join(input_args)) # exception for outnode if node.nodeType == 'finalNode': code_line = '%s %s' % (func_name, ','.join(input_args)) # Naively add \t will cause further problem, modify this sometime in the future. lines.append('\t' + code_line) index += 1 # I will modify the function def here first_line = 'def %s():' % graphName lines.append(first_line) lines = lines[::-1] for line in lines: print(line) if not fname is None: fout = open(fname, 'w') for line in lines: fout.write(line + '\n')
def __init__(self, sess : tf.Session, name : str): self._sess = sess self._name = name self._BuildNetwork() self._sess.run(tf.global_variables_initializer()) self._tb = tb.TensorBoard(name, sess) self._SetTensorBoard() self._saver = sv.Saver(name, sess) self._SetSaver()
def __init__(self, sess, input_size, output_size, name): self._sess = sess self._input_size = input_size self._output_size = output_size self._name = name self._BulidBaseNetwork() self._BulidCriticNetwork() self._BulidActorNetwork() self._sess.run(tf.global_variables_initializer()) self._saver = sv.Saver(name, sess) self._SetSaver() print("TF준비 완료!")
def crear_dicc(self, parsed2, elemento, source, indice_link): objeto_mapa = parsed2.xpath(XPATH_MAPA) if len(objeto_mapa) > 0: # verifica que tenga mapa mapa = re.search(REGEX_LOCATION, objeto_mapa[0]) latitud = mapa.group(1) longitud = mapa.group(2) else: latitud = "Null" longitud = "Null" precio = re.findall(REGEX_PRECIO, (parsed2.xpath(XPATH_PRECIO)[1])) ubicacion = parsed2.xpath(XPATH_UBICACION) zona = parsed2.xpath(XPATH_TITULO1)[0] + parsed2.xpath(XPATH_PRECIO)[0].strip() colonia = parsed2.xpath(XPATH_TITULO1)[1].strip() + ' ' + ubicacion[3].strip() title = zona + ' ' + colonia description = parsed2.xpath(XPATH_DESCRIPCION)[0].strip() if indice_link%2 == 0: # verificar par o impar, para completar el diccionario land = "Null" construccion = "Null" else: land = ubicacion[7].strip() construccion = ubicacion[6].strip() dictionary = { "Price": precio, "Location": ubicacion[1].strip(), "Latitude": latitud.strip(), "Longitude": longitud.strip(), "Link": elemento, "Title": title.strip(), "Description": description.strip(), "Square Meter Land": land, "Square Meter Construction": construccion, "Bathroom": ubicacion[5].strip(), "Bedroom": ubicacion[4].strip(), "Source": source } file_name = "Inmobiliarias.csv" instancia_saver = Saver.Saver(file_name) instancia_saver.crear_csv(dictionary)
def dsConfigCallback(self, msg, config): """Callback called when config on the dataswitch changes. config.generic is a list of objects, o, which have: o.name - string o.decimate1 - int o.decimate2 - int o.logfile - string o.log - bool """ print "dsConfigCallback", msg, config self.dsConfig = config update = 0 for obj in config.generic: if obj.name in self.streamList: #this is the config object for this stream... self.rtcDecimate[obj.name] = obj.decimate1 self.dsDecimate[obj.name] = obj.decimate2 #self.execute("c.setRTCDecimation('%s',%d);c.subscribe(sock,'%s',%d)"%(obj.name,obj.decimate1,obj.name,obj.decimate1))#dataclient needs data at decimate1. It then decimates this by decimate2/decimate1 before sending to dataswitch. if not self.saveStream.has_key(obj.name): self.saveStream[obj.name] = None if obj.log: #logging... if self.saveStream[ obj.name] == None: #not currently logging print "Start logging" self.saveStream[obj.name] = Saver.Saver(obj.logFile) else: #not logging if self.saveStream[ obj.name] != None: #need to finish the save print "Finishing logging" self.saveStream[obj.name].close() self.saveStream[obj.name] = None if self.decDict != None: if self.decDict.has_key(obj.name): if obj.decimate1 != self.decDict[obj.name]: obj.decimate1 = self.decDict[obj.name] update = 1 self.decDict = None if update and self.serverObject != None: self.serverObject.publishConfig(self.dsConfig)
from Agent import Agent import GUI import Saver import Displayer from settings import Settings if __name__ == '__main__': tf.reset_default_graph() with tf.Session() as sess: saver = Saver.Saver(sess) displayer = Displayer.Displayer() gui = GUI.Interface(['ep_reward', 'plot', 'plot_distrib', 'render', 'gif', 'save']) gui_thread = threading.Thread(target=gui.run) agent = Agent(sess, gui, displayer, saver) if not saver.load(): sess.run(tf.global_variables_initializer()) gui_thread.start() try: agent.run() except KeyboardInterrupt: pass
import Saver as S import QtDisplay as Qt ################################################################################ ## CONFIGURATION ################################################################################ framesFolder = "./pictures/test01/xavier" # Folder where to store the data rom = "../../../ROM/breakout.bin" # Game to load dbPath = "../data_xavier.db" # Database that stores the agents agentId = 1 # Id of the agent to test refEpoch = 100 # Epoch to replay ################################################################################ env = GE.GameEnv(rom, [84, 84]) # Environment msg = M.Message() # Unused but required to initialize the agent saver = S.Saver(dbPath) # The saver that holds datas about the agents qt = Qt.DisplayHandler() # The display qt.start() qt.Qt().createPlotter() qt.Qt().createEnvDisplay(env, 30) plt = qt.Qt().plotter() # The plotter (unsused but required by the agent) print("Loading the agent ... ", end="", flush=True) agent = DM.DeepMindAgent.loadAgent(msg, saver, plt, env, agentId, None) print("done") # Delete the directories and their content if their exists an recreate # empty ones if os.path.exists(framesFolder + os.sep + "max"):
def SaveUser(self, request, context): response = user_pb2.UserReply() response.message = Saver.SaveUser(request) return response
import agent.DeepMindAgent as DM import Message as M import GameEnv as GE import Saver as S import QtDisplay as Qt if len(sys.argv) != 2: print("Usage: {} <path to rom>".format(sys.argv[0])) quit() rom = sys.argv[1] dbPath = "./data.db" info = "The agent is running. Type 'stop' to quit" m = M.Message() s = S.Saver(dbPath) e = GE.GameEnv(rom, [84, 84]) qt = Qt.DisplayHandler() qt.start() qt.Qt().createPlotter() qt.Qt().createEnvDisplay(e, 30) plt = qt.Qt().plotter() ag = DM.DeepMindAgent.createNewAgent(m, s, plt, e, "Deepmind NIPS agent") ag.start() print(info) m.write(M.Message.TRAIN, None) while True:
from PyQt5 import QtWidgets import NodeGraph import Saver app = QtWidgets.QApplication([]) # start: graph main body node_graph = NodeGraph.NodeGraph(None) Saver.loadGraph('graph0.json', node_graph, clearGraph=True) # end: graph main body app.exec_()
def main(): usage = 'python %prog [-p <project> -f <file> -l <lines> -e <evaluate> -s <save>]' parser = optparse.OptionParser(usage) parser.add_option('-a', '--all', dest = 'All', type = 'int', default = 0, help = 'input "-a 1" to find all qualified cnnvd and apis') parser.add_option('-p', '--project', dest = 'Project', type = 'string', help = 'Java project directory') parser.add_option('-f', '--file', dest = 'File', type = 'string', help = 'vulnerable file in project') parser.add_option('-l', '--line', dest = 'Line', type = 'int', help = 'vulnerable line') parser.add_option('-e', '--evaluate', dest = 'Evaluate', type = 'int', default = 0, help = 'input "-e 1" to compute projects\' stars and forks, input "-e 2" to compute projects\' vulnerable api num, input "-e 3" to compute both') parser.add_option('-s', '--save', dest = 'Save', type = 'int', default = 0, help = 'input "-s 1" to save into database') options, args=parser.parse_args() if options.All == 1: Finder.start() jar.find_jars() if options.Evaluate == 1: Evaluator.compute_project_star_fork() elif options.Evaluate == 2: Evaluator.compute_vulnerable_api_num() elif options.Evaluate == 3: Evaluator.compute_project_star_fork() Evaluator.compute_vulnerable_api_num() if options.Save == 1: Saver.save_api_into_database() Saver.save_vul_into_database() elif options.Project != None and options.File != None and options.Line != None: udbfile = options.Project + '.udb' os.system('und create -languages Java ' + udbfile) os.system('und add ' + options.Project + ' ' + udbfile) os.system('und analyze ' + udbfile) db = understand.open(udbfile) lines = [] lines.append(str(options.Line)) apis_file = open('./api_json/'+options.Project+'.json', 'a+',encoding='utf-8') total_api = [] Finder.find_vulnerable_api(db, file_name, lines, apis_file, total_api) jar.find_jar(options.Project) else: if options.Evaluate != 0: if options.Evaluate == 1: Evaluator.compute_project_star_fork() elif options.Evaluate == 2: Evaluator.compute_vulnerable_api_num() elif options.Evaluate == 3: Evaluator.compute_project_star_fork() Evaluator.compute_vulnerable_api_num() if options.Save == 1: Saver.save_api_into_database() Saver.save_vul_into_database() else: if options.Save == 1: Saver.save_api_into_database() Saver.save_vul_into_database() else: print('you should input project, file and line at the same time!')
def parse_home(): try: indice_url = 0 locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') for url in HOME_URL: link = re.findall(REGEX_LINK, url) source = re.findall(REGEX_SOURCE, url) response = requests.get(url) notice = response.content.decode('utf-8') parsed = html.fromstring(notice) ultima_pagina = (parsed.xpath(XPATH_ULTIMA_PAGINA)[0]) last_page = int( locale.atof((re.findall(REGEX_ULTIMA_PAGINA, ultima_pagina))[0])) cantidad_paginas = math.ceil(last_page / 24) indice_pagina = 1 while cantidad_paginas >= indice_pagina: contador = 1 sigiente_pagina = NUEVO_LINK[indice_url].format(indice_pagina) response3 = requests.get(sigiente_pagina) notice3 = response3.content.decode('utf-8') parsed3 = html.fromstring(notice3) unidades_href = parsed3.xpath(XPATH_HREF_UNIDADES) for elemento in unidades_href: response2 = requests.get(elemento) notice2 = response2.content.decode('utf-8') parsed2 = html.fromstring(notice2) objeto_mapa = parsed2.xpath(XPATH_MAPA) if len(objeto_mapa) > 0: mapa = re.search(REGEX_LOCATION, objeto_mapa[0]) latitud = mapa.group(1) longitud = mapa.group(2) else: latitud = "Null" longitud = "Null" precio = re.findall(REGEX_PRECIO, (parsed2.xpath(XPATH_PRECIO)[1])) ubicacion = parsed2.xpath(XPATH_UBICACION) zona = parsed2.xpath(XPATH_TITULO1)[0] + parsed2.xpath( XPATH_PRECIO)[0].strip() colonia = parsed2.xpath( XPATH_TITULO1)[1].strip() + ' ' + ubicacion[3].strip() title = zona + ' ' + colonia description = parsed2.xpath(XPATH_DESCRIPCION)[0].strip() if indice_url == 0 or indice_url == 2: land = "Null" construccion = "Null" else: land = ubicacion[7].strip() construccion = ubicacion[6].strip() dictionary = { "Price": precio, "Location": ubicacion[1].strip(), "Latitude": latitud.strip(), "Longitude": longitud.strip(), "Link": link, "Title": title.strip(), "Description": description.strip(), "Square Meter Land": land, "Square Meter Construction": construccion, "Bathroom": ubicacion[5].strip(), "Bedroom": ubicacion[4].strip(), "Source": source } file_name = "Inmobiliarias.csv" instancia_saver = Saver.Saver(file_name) instancia_saver.crear_csv(dictionary) print("nuevo href") contador += 1 print(contador) indice_pagina += 1 indice_url += 1 except ValueError as ve: print(ve)
def Use_Dicts(global_variables,demand_variables): for i in range(len(global_variables)): for j in range(len(demand_variables)): reload_all() #%matplotlib qt5 # RE DEFINE PATH TO THE RESULTS FOLDER path = 'C:/Users/danie/Dropbox/BeerGame/' TS = global_variables[i]['TS'] Mu = global_variables[i]['mu'] Sigma = global_variables[i]['sigma'] constant_ld = global_variables[i]['ltavg'] periods = 40 ''' old way of using all demand types #il faut choisir le type de demand et les actions possibles #ensuite lancer avec les variations des autres variables avec le dictionnaire crée ''' demand_type = global_variables[i]['demand_type'] if demand_type == "Seasonal": demand = Demand.Seasonal_Demand(15, 5, 0, 1.5, 0, Mu - 2, Sigma) elif demand_type == "Growing": demand = Demand.Growing_Demand(0,(2*Mu/periods), 0, Sigma) elif demand_type == "Sporadic": demand = Demand.Sporadic_Demand(Mu,0.2,5) #demand.generate(periods) #bench_agent = Agent.BS_Agent_Gauss(1, Sigma, TS, Mu) elif demand_type == "Gaussian": demand= Demand.Gaussian_Demand(Mu, Sigma, min_value = 0, max_value = 100) #demand = Demand.Gaussian_Demand(global_variables[i]['Mu'],global_variables[i]['Sigma'],global_variables[i]['Min'],global_variables[i]['Max']) elif demand_type =="Uniform": demand = Demand.Uniform_Demand(Mu ,Mu,Step = 1) elif demand_type == "Growingseasonal": demand = Demand.Growing_Seasonal_Demand(1,[Mu*0.5,Mu* 0.8,Mu*0.7,Mu*0.9,Mu,Mu,Mu * 0.9,Mu*1.2,Mu,Mu*1.1,Mu*1.5,Mu*2], Sigma) elif demand_type == "Mixedseasonal": demand = Demand.Mixed_Saisonnalities_Demand(Mu, [1,1,2,2,2,3,4,4,2,1,1,4],[0.6,0.8,0.7,0.9], Sigma) elif demand_type == "Growthstable": demand = Demand.Growth_Stable_Demand(0, 1, Mu + 5, Sigma) else: print("Did not recognize demand type") break bench_agent = demand.bench_agent(global_variables[i]['pos'],global_variables[i]['TS'],periods) game_params = { 'client_demand':demand, 'lead_times':[ld.Constant_LeadTime(global_variables[i]['lt'][0]), ld.Constant_LeadTime(global_variables[i]['lt'][1]), ld.Constant_LeadTime(global_variables[i]['lt'][2]), ld.Constant_LeadTime(global_variables[i]['lt'][3])], 'AI_possible_actions': np.arange(-10,10), 'm' : global_variables[i]['m'], 'shortage_cost':get_optimal_gaussian_SC(TS, Mu = Mu, Sigma= Sigma, lead_time=constant_ld), 'TS' : TS, 'holding_cost':1, 'initial_inventory':constant_ld * Mu + 2* Sigma, 'number_periods':periods, 'use_backorders':0, 'state_features':["IL" ,"d", "BO", "RS", "OO","t"], 'AI_DN':[10,10], # Not implemented yet 'comparison_agent' : bench_agent } ''' Alternatives to above to be more flexible { 'client_demand': demand, 'lead_times':(global_variables[i]['leadtimes'], global_variables[i]['leadtimes'], global_variables[i]['leadtimes'], global_variables[i]['leadtimes']), 'initial_inventory':global_variables[i]['leadtimes'].Mean*10, } Need to make changes to functions that creates the dictionary list ''' list_agents = ['BS20','BS20', 'BS20' , 'BS20'] list_agents[global_variables[i]['pos']] = 'DQN' agents = generate_agents(list_agents, game_params) trainer = Tr.Trainer(agents, game_params) comparator = trainer.generate_comparator(min_BS_level = 5, max_BS_level = 20) trainer.train2(400) AI_Agent = trainer.best_AI_agent #.get_AI_agent() AI_Agent.label = 'best DQN' comparator.update_AI_Agents([trainer.best_AI_agent, trainer.get_AI_agent()]) comparator.launch_comparison() comparator.histograms() comparator.one_game_results([trainer.get_AI_agent()]) importlib.reload(Saver) saver = Saver.Saver(path) saver.clean_results_folder() saver.save(trainer)