def addConnections(self): for thisPointMass in self.pointMasses: nerby = [] for otherPointMass in self.pointMasses: if thisPointMass is otherPointMass: continue if len(thisPointMass.connections) > self.maxConnectionsPerMass: continue r = np.linalg.norm(thisPointMass.pos - otherPointMass.pos) if r < self.discoveryRadius: if not thisPointMass in (connection.ends for connection in otherPointMass.connections): nerby.append([r,otherPointMass]) nerby.sort(key = lambda x: x[0]) for i in range(0, min(len(nerby), self.maxConnectionsPerMass)): otherPointMass = nerby[i][1] # Create a new connection newConnection = Connection() newConnection.attach(thisPointMass, otherPointMass) self.connections.append(newConnection) print "added connection"
def eliminaTuttiDati(): tab = ["patient"] i = 0 while (i < len(tab)): query = "truncate " + tab[i] + ";" Connection.update(query) i = i + 1
def eliminaTuttiDati(): tab = ["affect", "device", "doctor", "evaluate", "health_state", "install", "measurement", "monitoring", "observation", "parameter", "patient", "related", "set_", "therapy"] i = 0 while (i < len(tab)): query = "truncate " + tab[i] + ";" Connection.update(query) i = i + 1
def inserisciAffect(scelta): affect = getJson.getAffect(scelta) for x in affect: query = "insert into affect (id_health_state, id_observation) " \ "values ( " + str(x['id_observations']) + ", " + str( x['id_health_states']) + " );" Connection.update(query)
def inserisciAllData(scelta): people = getJson.getAllData(scelta) step = '' for x in people: step = '' query = "insert into patient (id, name, width, height, l_shank, l_thigh, lokomat_shank, lokomat_thigh, " \ "lokomat_recorded, version, legtype, lwalk_training_duration, lwalk_distance, step_datas) " \ "values ("+ str(x['id'])+", '" + str(x['name']) + "', " + str(x['width']) + ", "+ str(x['height']) +", "+ str(x['l_shank'])+", "+ str(x['l_thigh'])+", " \ + str(x['lokomat_shank'])+", "+ str(x['lokomat_thigh'])+", "+ str(x['lokomat_recorded'])+", '"+ str(x['version'])+"', '"+ str(x['legtype'])+"', " \ + str(x['lwalk_training_duration'])+", "+ str(x['lwalk_distance'])+", " step += "[" for s in x['step_datas']: step += "{step_value: " step += "[" for a in s: if (a == None): step += "'None', " else: step += "'"+str(a)+"', " step = step[:-2] step += "]}, " if(len(x['step_datas']) > 0): step = step[:-2] step += "]" query += step query += ");" Connection.update(query)
def plus(): print (colors.green + " [+] " + colors.default + "Checking for Robots.txt") robots = url + "robots.txt" print (robots) Connection.tester(robots, proxy, user_agent, verbose, saida) os.chdir("output") if google: search.google(dork, saida, url) search.DuckDuckGo(dork, saida, url) else: pass if nmap: Nmap.run(url) else: pass """====R.E.S.U.L.T=======================================================""" print (colors.red + "\n" + ("-"*80) + colors.default) print (colors.green + " [+] " + colors.default + "[Results]\n") Connection.result() print (colors.red + ("-"*80) + "\n" + colors.default) sys.exit()
def process(): final = url + ways lst.remove(ways) Connection.tester(final, proxy, user_agent, verbose, saida) if verbose: print (final) else: pass
def query3(): query = "select * from health_state where timestamp > 400073 allow filtering;" risultato = Connection.query(query) for x in risultato: query1 = "select id_patient from related where id_health_state = "+ str(x.id) +" allow filtering;" risultato2 = Connection.query(query1) for y in risultato2: query2 = "select * from patient where id = "+ y.id_patient +"allow filtering;" risultato3 = Connection.query(query2)
def query4(): query = "select distinct id_patient from install where when > 5000 allow filtering;" risultato = Connection.query(query) for x in risultato: query1 = "select id_health_state from related where id_patient = " + str(x.id_patient) + " allow filtering;" risultato2 = Connection.query(query1) for y in risultato2: query2 = "select id_therapy from set_ where id_healt_state = "+ str(y.id_health_state) +" allow filtering;" risultato3 = Connection.query(query2) for z in risultato3: query3 = "select * from therapy where id = "+ str(z.id_therapy) + " allow filtering;" risultato4 = Connection.query(query3)
def get(oDB, strTableTemplate, aTickers, strDate): aRetval = {} for strTicker in aTickers: #dont do the same work twice if strTicker in aRetval: continue strTable = strTableTemplate.replace(TABLE_WILDCARD, strTicker) strQuery = """ SELECT * FROM {0} WHERE date >= {1} ORDER BY date DESC; """.format(strTable, quoteString(strDate)) #go to next ticker if error selecting data aData = Connection.execute(oDB, strQuery) if not aData: Logger.logError("Error trying to select data") continue #add data to retval aRetval[strTicker] = mapSelect(aData) return aRetval
def getPlaces(): conn = Connection.getConnection() cur = conn.cursor() cur.execute('select * from places') rows = cur.fetchall() conn.close(); return rows
class Main: def __init__(self): print("*** starting program ***") logging.basicConfig(filename=application.log, level=logging.ERROR) self.connection = None try: self.connection = Connection() results = self.connection.executeStatement("select * from User") for row in results: print((row[1])) self.connection.executeUpdate( "insert into User (User_ID) values (1)" ) except Exception as e: print(e) logging.error(e)
def funzione(scelta): tPrima = time.time() for _ in range(0,100): risultato = Connection.query("select * from patient;") print "prova" for x in risultato: print x tTotale = time.time() - tPrima print "Il tempo per fare la query 1 e': " + str(tTotale) tPrima = time.time() for _ in range(0,100): risultato = Connection.query("SELECT * FROM patient WHERE name = 'SIVV33W0' allow filtering;") tTotale = time.time() - tPrima print "Il tempo per fare la query 2 e': " + str(tTotale) tPrima = time.time() for _ in range(0,100): query3() tTotale = time.time() - tPrima print "Il tempo per fare la query 3 e': " + str(tTotale) """
def fetchfines(): update_fines() connection = Connection.get_connection() cursor = connection.cursor() cursor.execute("use library") cursor.execute( "select B.Card_id, sum(F.Fine_amt) from BOOK_LOANS as B join (select Loan_id,Fine_amt from FINES where Paid=0) as F where F.Loan_id=B.Loan_id group by B.Card_id;" ) fines = [(x, str(y) + " USD") for x, y in cursor.fetchall()] connection.commit() connection.close() if (len(fines) == 0): return render_template("checkout_s.html", msg="All dues settled!") else: return render_template("fines.html", fines=fines)
def checkin(): form = request.form isbn = form.get('isbn') datein = form.get('datein').replace('-', '') connection = Connection.get_connection() cursor = connection.cursor() cursor.execute("use library") cursor.execute( "update BOOK_LOANS SET Date_in='{0}' where Isbn='{1}' and Date_in is NULL" .format(datein, isbn)) connection.commit() connection.close() update_fines() return render_template("checkout_s.html", msg="Checkin successful!")
def create_table(): with pyodbc.connect(Connection.connectmysql()) as con: sql_cmd = """ CREATE TABLE person( id int PRIMARY KEY AUTO_INCREMENT, name varchar(64), weight float, height float ) """ try: con.execute(sql_cmd) print("Create table success") except pyodbc.ProgrammingError: print("Error Already Table")
def Middle_Building(self): for i in range(self.m): with Connection.connection(_) as build_connection_middle: build_connection_middle.connect_front() with DQN.DQNPrioritizedReplay(_): pass with DQN.DQNPrioritizedReplay(_): pass with DQN.DQNPrioritizedReplay(_): pass with DQN.DQNPrioritizedReplay(_): pass
def show_results(): term = request.forms.get('term') location = request.forms.get('location') results = Connection.YelpSearch(location, term) picture = '/pictures/' if location == "los angeles": picture = picture + "losangeles.jpg" elif location == "san francisco": picture = picture + "sanfrancisco.jpg" else: picture = picture + "seattle.jpg" return template('results', term=term, location=location, name=results['name'], picture=picture)
def cregistry(): print("res") customerid = request.args.get("customerid") password = request.args.get("password") #注册函数 t = con.registry(customerid,password) print(t) resp = make_response() if(t == True): print(t) resp = make_response() resp.set_cookie('customerid', customerid) resp.set_cookie('password', password) return resp else: return "F"
class Source: def __init__(self): pass #Initialize Class Connection conn = myConnection.Connection() def get_all_changed_agama(self, conn=myConnection.Connection()): pdt_cur = conn.pdt_connection() pdt_cur.execute('SELECT * FROM [dbo].[audits_ref_agama]') result = pdt_cur.fetchall() return result #PDT get changeable table def get_queue_data_agama(self, conn=myConnection.Connection()): pdt_cur = conn.pdt_connection() pdt_cur.execute( 'SELECT top 1 id_agama, nama_agama,operation FROM [dbo].[audits_ref_agama] where is_executed is null order by updated_at asc' ) result = pdt_cur.fetchall() #cast tuples to list data = result[0] list_data = list(data) id_agama = list_data[0] nama_agama = list_data[1] opt = list_data[2] list_baru = [id_agama, nama_agama, opt] return tuple(list_baru) #PDT get data agama def get_agama_pdt(self, conn=myConnection.Connection()): pdt_curr = conn.pdt_connection() pdt_curr.execute('SELECT * FROM dbo.MD_ref_agama') print("======== Source Condition ============") for row in pdt_curr: print(row) #PDT cek jumlah data agama def cek_jumlah_agama_pdt(self, conn=myConnection.Connection()): pdt_curr = conn.pdt_connection() pdt_curr.execute('SELECT count(*) FROM dbo.MD_ref_agama') src_row_cnt = pdt_curr.fetchone()[0] return src_row_cnt
def book_details(isbns): connection = Connection.get_connection() cursor = connection.cursor() cursor.execute("use library") books = [] for isbn in isbns: cursor.execute("select Title from BOOK where Isbn='{0}'".format(isbn)) title = cursor.fetchall()[0][0] cursor.execute( "select Name from AUTHORS AS A join (select Author_id from BOOK_AUTHORS as BA where isbn='{0}') as x on x.Author_id=A.Author_id;" .format(isbn)) authors = [x[0] for x in cursor.fetchall()] authors = ",".join(authors) cursor.execute( "select Isbn from BOOK_LOANS where Isbn='{0}' and Date_in is NULL;" .format(isbn)) count = len(cursor.fetchall()) if count: cursor.execute( "select Date_out from BOOK_LOANS where Isbn='{0}' and Date_in is NULL" .format(isbn)) dateout = cursor.fetchall()[0][0] cursor.execute( "select Due_date from BOOK_LOANS where Isbn='{0}' and Date_in is NULL" .format(isbn)) datedue = cursor.fetchall()[0][0] cursor.execute( "select Bname from BORROWER as B join (select Card_id from BOOK_LOANS where Isbn='{0}' and Date_in is NULL) as S on B.Card_id=S.Card_id;" .format(isbn)) borrower = cursor.fetchall()[0][0] meta = { "isbn": isbn, "title": title, "authors": authors, "status": "Checked out" if count else "Available", "dateout": dateout if count else "", "datedue": datedue if count else "", "borrower": borrower if count else "" } books.append(meta) connection.close() return books
def get_queue_data_agama(self, conn=myConnection.Connection()): pdt_cur = conn.pdt_connection() pdt_cur.execute( 'SELECT top 1 id_agama, nama_agama,operation FROM [dbo].[audits_ref_agama] where is_executed is null order by updated_at asc' ) result = pdt_cur.fetchall() #cast tuples to list data = result[0] list_data = list(data) id_agama = list_data[0] nama_agama = list_data[1] opt = list_data[2] list_baru = [id_agama, nama_agama, opt] return tuple(list_baru)
def server(p, q): sender_id = 2 logger = logging.getLogger("Test server") received_packet = InPacket() sock = LossySocket(socket.AF_INET, socket.SOCK_DGRAM, q = q, p = p) #sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) #sock.bind(("10.0.3.1", 6000)) sock.bind(("127.0.0.1", 6000)) connection = Connection(sock = sock, remote_ip = "127.0.0.1", remote_port = 5000, local_session_id = 1, remote_session_id = 2, version = 1, seq_no = 1, send_ack_no = init_seq-1, logger = logger) packet_to_send = OutPacket() exp_data = 10 t = -1.0 bits = 0 try: while True: data, addr = sock.recvfrom(2000) logger.debug("received message") received_packet.packetize_raw(data) received_packet.receive_time = time.time() #received_packet.print_packet() if connection.receive_packet_start(received_packet): #print 'testing %d' % exp_data d = received_packet.get_TLVlist(tlvtype = 'DATA')[0][1000:] received_data = int(d) if exp_data != received_data: logger.error("invalid data: %d, expected: %d" % (received_data, exp_data)) exit(0) exp_data = (exp_data + 1) % 100000 bits += (len(d) + 1000) *8 if received_data % 1000 == 0: if t > 0: print 'goodput: %.2f Mbps' % (bits / (time.time() - t) / 1000000) t = time.time() bits = 0 #packet_to_send.create_packet(version=connection.version, flags=[], senderID=sender_id, # txlocalID=connection.local_session_id, txremoteID=connection.remote_session_id, # sequence=connection.seq_no, ack=connection.send_ack_no, otype='UPDATE', ocode='REQUEST') connection.receive_packet_end(received_packet, sender_id) #connection.send_packet_unreliable(packet_to_send) except KeyboardInterrupt: logger.info('CTRL+C received, killing connection...') connection.stop() stop = True
def shop(): customerid = request.cookies.get('customerid') finallist = [] finaldic = {} # getinfo function #返回一个list,有很多子list组成,每个子list代表一个商品,包括一个商品id,一个商品图片url,一个商品数量 list = con.getinfo() for sublist in list: id = sublist[0] num = sublist[3] pictureurl = sublist[4] name = sublist[2] price = sublist[1] finallist.append(json.dumps({"id":id,"num":num,"pictureurl":pictureurl,"name":name,"price":price})) #finallist.append(json.dumps({"id":"123","num":12,"pictureurl":"./img/3.jpg","name":"tname"})) finaldic["len"]=len(finallist) for i in range(len(finallist)): finaldic[str(i)] = finallist[i] return json.dumps(finaldic)
def orderinfo(): customerid = request.cookies.get('customerid') finallist = [] finaldic = {} #order list = con.orderinfo(customerid) for sublist in list: orderid = sublist[0] num = sublist[3] name = sublist[2] cus = sublist[1] finallist.append(json.dumps({"orderid":orderid,"num":num,"name":name})) finaldic["len"]=len(finallist) for i in range(len(finallist)): finaldic[str(i)] = finallist[i] print(finaldic) return json.dumps(finaldic)
def createTable(oDB, strTable): """ Creates a table if it doesn't exist :param oDB: a MySQLdb object :param strTable: The table to be created :return: boolean indicating the CREATE TABLE """ return Connection.execute(oDB, """ CREATE TABLE IF NOT EXISTS {0}( date date not null, dim_name varchar(256) not null, value varchar(256) not null, primary key (dim_name, date), key (date) ); """.format(strTable) ) != False
def select_data(): with pyodbc.connect(Connection.connectmysql()) as con: sql_cmd = sql_cmd = """ SELECT * FROM person WHERE name='Samit'; """ # keep value in list raw_data = [] # empty list # loop data for row in con.execute(sql_cmd): raw_data.append(row) # insert to table log_person sql_cmd_insert = " \ INSERT INTO log_person(name,weight,height,date_log) \ VALUES('{}',{},{},NOW()); \ ".format(row[1], row[2], row[3]) con.execute(sql_cmd_insert) print(raw_data)
def partOne(dict, arrOpenClose): doc.header('Weekly Count Ofenses:') i = 1 sum = 0 for key, val in dict.items(): domain = Connection.Connect_To_API_DomainID(str(key)) name = domain['name'] if not name: name = "Cloude" doc.writeText( str(i) + ') ' + (name) + ": " + str(len(val)) + " (Open: " + str(arrOpenClose[0][i - 1]) + " " + "Close: " + str(arrOpenClose[1][i - 1]) + ")" + '\n', None, 12, True) sum = sum + len(val) i = i + 1 doc.writeText('\n', None, 7) doc.writeText('\n', None, 7) doc.writeText("Total: " + str(sum) + '\n', None, 13) doc.writeText('\n', None, 7) doc.writeText('\n', None, 7)
def search(): minprice = request.args.get("minprice") maxprice = request.args.get("maxprice") keyword = request.args.get("keyword") finallist = [] finaldic = {} list = con.search(minprice,maxprice,keyword) #list = refreshfunction for sublist in list: id = sublist[0] num = sublist[3] pictureurl = sublist[4] name = sublist[2] price = sublist[1] finallist.append(json.dumps({"id":id,"num":num,"pictureurl":pictureurl,"name":name,"price":price})) #finallist.append(json.dumps({"id":"123","num":12,"pictureurl":"./img/3.jpg","name":"tname"})) finaldic["len"]=len(finallist) for i in range(len(finallist)): finaldic[str(i)] = finallist[i] return json.dumps(finaldic)
def __init__(self, layers): ''' 初始化一个全连接神经网络 layers: 二维数组,描述神经网络每层节点数 ''' self.connections = Connections() self.layers = [] layer_count = len(layers) node_count = 0 for i in range(layer_count): self.layers.append(Layer(i, layers[i])) for layer in range(layer_count - 1): connections = [ Connection(upstream_node, downstream_node) for upstream_node in self.layers[layer].nodes for downstream_node in self.layers[layer + 1].nodes[:-1] ] for conn in connections: self.connections.add_connection(conn) conn.downstream_node.append_upstream_connection(conn) conn.upstream_node.append_downstream_connection(conn)
def Aceptar(self): # print(self.data) self.contEditar += 1 draw = Connection.Connection(self.id).get_draw(self.data['text']) bytes_draw = draw[0][0] # print(bytes_draw) # print(bytes_draw) # print(type(bytes_draw)) # normal_json = bytes_draw.decode('utf8').replace("'", '"') # self.win.destroy() # json_draw = json.loads(normal_json) # data = json.loads(normal_json) # self.final = data # final_json = json.dumps(data, sort_keys=True) self.final = bytes_draw file = open('drawCookie', 'w') file.write(self.final) # print(data) file.close() self.win.destroy()
def add_connection(self, input_node, output_node, weight=None, enable=True): """Add a new connection. If the weights are not set, the weights are set at random.""" if weight == None: con = Connection(input=input_node, output=output_node, enable=enable) con.random_weight() con.innovation = NEAT.get_innovation(con) else: con = Connection(input=input_node, output=output_node, weight=weight) con.innovation = NEAT.get_innovation(con) # Insert sorting inx = len(self.connections) - 1 while inx >= 0: if self.connections[inx].innovation <= con.innovation: break inx -= 1 self.connections.insert(inx + 1, con)
def ace_tune(number_sim, costfunction, jobname): conn = Connection() params = dict(kp=0., ki=0., kd=0.) for i in range(number_sim): # 1 - get a trial to test dataR = {"job": jobname, "unit_diversion": str(uuid.uuid4()) } reqdata = conn.request(dataR) #2 - after receiving a trial we actually test it #just requesting the kp params['kp'] = reqdata['kp'] params['ki'] = reqdata['ki'] params['kd'] = reqdata['kd'] #this simulates the system and gets the error e = costfunction(params) #3 - After we got the error we need a signal that we want to maximize. In this case is a simple normalization of -error reward = [] if e>50: reward=0 if e<=50: reward = (50 - e)/50 #4 - we update the statistical model by logging what was the reward received in this trial dataL = { "job": jobname, "unit_diversion": str(uuid.uuid4()), 'signals': {'reward': reward, 'kp': params['kp'], 'ki': params['ki'], 'kd': params['kd']} } conn.log(dataL) #5 - After a fixed number of iteration we get the optimization result bestarmData = {'job': jobname} best_params = conn.best_arm(bestarmData) params['kp'] = best_params['kp'] params['ki'] = best_params['ki'] params['kd'] = best_params['kd'] return params
def search_booksc(isbn=False, cardid=False, name=False): connection = Connection.get_connection() cursor = connection.cursor() cursor.execute("use library") query = "" books = [] if not (isbn or cardid or name): return True, books else: if isbn: query = "select Isbn from BOOK_LOANS where Isbn='{0}' and Date_in is NULL".format( isbn) elif cardid: query = "select Isbn from BOOK_LOANS where Card_id='{0}' and Date_in is NULL".format( cardid) elif name: query = "select Isbn from BOOK_LOANS B join(select Card_id from BORROWER where Bname like '%{0}%') as A where A.Card_id=B.Card_id and Date_in is NULL;".format( name) cursor.execute(query) results = cursor.fetchall() connection.close() isbns = [x[0] for x in results] books = book_details(isbns) return False, books
def crateWordFile(dict, arrOpenClose): partOne(dict, arrOpenClose) doc.header('Here is the Details:') doc.writeText('\n', None, 10) i = 1 sum = 0 for key, val in dict.items(): domain = Connection.Connect_To_API_DomainID(str(key)) sum = sum + len(val) doc.writeText('\n', None, 7) doc.writeText( str(i) + ') ' + domain['name'] + " dec: " + domain['description'] + ' sum: ' + str(len(val)) + '\n', None, 12, True) doc.writeText("\n", None, 5) for index in val: if str(index.getID()) == "6470": print("") doc.writeText( "ID: " + str(index.getID()) + " Status: " + index.getStatus() + "\n", None, 10) doc.writeText("desc: " + index.getDesc() + '\n', None, 10, True) doc.writeText("notes:" + "\n", None, 10) for note in index.getNote(): n = (note.split("\n")) if len(n) > 0: for s in n: doc.writeText(" * " + s + "\n", None, 10) doc.writeText('\n', None, 10) i = i + 1 doc.writeText('\n', None, 10) doc.writeText('\n', None, 10) doc.writeText('Total Offenses: ' + str(sum), None, 14) doc.saveFile("Offense_Weekly.docx")
def stringMatching(stringInput): connectionObj=Connection.Config() cursor=connectionObj.cursor str1=stringInput flag=0 if flag==0: index=1 cursor.execute("select count(*) from user_commands;") table_row=cursor.fetchone() while index<=table_row[0]: sql2="select command,cmdvalue from user_commands where id='"+str(index)+"'" cursor.execute(sql2) query_row=cursor.fetchone() str2=query_row[0] if (str1==str2): return query_row[1] else: flag=1 index+=1 if flag==1: b= stringMatching2(str1) return b
def __init__(self, nGen): #re innitialized lists and Dict to make them unique to each object self.inputNodes = [] self.hiddenNodes = [] self.outputNodes = [] self.nodeDict = {} nodes = nGen.getNodes() cons = nGen.getConnections() for a in nodes.List: nNode = Node(a.getX()) self.nodeDict[a.getInnovationNum()] = nNode if (a.getX() <= 0.1): self.inputNodes.append(nNode) elif (a.getX() >= 0.9): self.outputNodes.append(nNode) else: #print("added hiddenNode "+str(nNode.getX())) self.hiddenNodes.append(nNode) #ts = time.time() #self.hiddenNodes.sort() #ts2 = time.time() #print("time to sort: "+str(ts2-ts)) for b in cons.List: origin = b.getOrigin() target = b.getTarget() nOrg = self.nodeDict[origin.getInnovationNum()] nTar = self.nodeDict[target.getInnovationNum()] nCon = Connection(nOrg, nTar) nCon.setWeight(b.getWeight()) nCon.setEnabled(b.isEnabled()) nTar.getConnections().append(nCon)
def init_connections(config, logger, isFirst): con = {} error = 0 try: if isFirst: con['oracle'] = connections.ConnectionOracle(config, logger) error = con['oracle'].create_connection() if error: return {'error': error} else: con['redis'] = connections.ConnectionRedis(config, logger) error = con['redis'].create_connection() if error: return {'error': error} con['mysql'] = connections.ConnectionMysql(config, logger) error = con['mysql'].create_connection() if error: return {'error': error} con['psql'] = connections.ConnectionPostgresql(config, logger) error = con['psql'].create_connection() if error: return {'error': error} con['osm'] = connections.ConnectionOSM(config, logger) error = con['osm'].create_connection() if error: return {'error': error} con['tz'] = connections.ConnectionTimeZoneServer(config, logger) error = con['tz'].create_connection() if error: return {'error': error} return con except Exception as e: logger.critical( "Failed to read configuration file. The error occurred: {}.". format(e)) return {'error': -10}
def run(nsim,jobname): conn = Connection() for i in range(nsim): # 1 - get a trial to test dataR = {"job": jobname, "unit_diversion": str(uuid.uuid4()) } reqdata = conn.request(dataR) # 2 - after receiving a trial we actually test it input_array = [1, 1] input_array[0] = reqdata['x1'] input_array[1] = reqdata['x2'] #3 - we want the maximum of the function so we just evaluate it reward = draw_sixhump_camelback(input_array, noise=True, scale= 0.02) # 4 - we update the statistical model by logging what was the reward received in this trial dataL = { "job": jobname, "unit_diversion": str(uuid.uuid4()), 'signals': {'reward': reward, 'x1': input_array[0], 'x2': input_array[1] } } conn.log(dataL) # 5 - After a fixed number of iteration we get the optimization result bestarmData = {'job': jobname} best_params = conn.best_arm(bestarmData) print('The six-hump Camel function is usually evaluated on the rectangle x1 ∈ [-3, 3], x2 ∈ [-2, 2]. ') print('Known maximum is at: (0.0898, -0.7126) or (-0.0898, 0.7126) without noise') print("Found maximum at: (",best_params['x1'],',',best_params['x2'],')')
def inserisciEvaluate(scelta): evaluate = getJson.getEvaluate(scelta) for x in evaluate: query = "insert into evaluate (id_doctor, id_health_state) " \ "values (" + str(x['id_doctors']) + ", " + str(x['id_health_states']) + ");" Connection.update(query)
def inserisciHealth_states(scelta): health_states = getJson.getHealthState(scelta) for x in health_states: query = "insert into health_state (id, disease_degree, disease_type, timestamp) " \ "values (" + str(x['id']) + ", " + str(x['disease_degree']) + ", '" + x['disease_type'] + "', " + str(x['timestamp']) + ");" Connection.update(query)
def inserisciTherapies(scelta): therapies = getJson.getTherapies(scelta) for x in therapies: query = "insert into therapy (id, duration, medicine, posology, starting_time) " \ "values (" + str(x['id']) + ", " + str(x['duration']) + ", '" + x['medicine'] + "', '" + x['posology'] + "', '" + x['starting_time'] + "' );" Connection.update(query)
def inserisciInstall(scelta): install = getJson.getInstall(scelta) for x in install: query = "insert into install (id_patient, id_device, when, where_) " \ "values (" + str(x['id_patients']) + ", " + str(x['id_devices']) + ", " + str(x['when']) + ", '" + x['where'] + "');" Connection.update(query)
def check(numCom): if (Connection.checkHour(numCom)): print("Add to Historique") else: print("Mail")
def inserisciPatient(scelta): patient = getJson.getPatients(scelta) for x in patient: query = "insert into patient (id, address, dateofbirth, email, name, surname, telepthon) " \ "values (" + str(x['id']) + ", '" + x['address'] + "', '" + x['date_of_birth'] + "', '" + x['email'] + "', '" + x['name'] + "', '" + x['surname'] + "', '" + x['telephone'] + "' );" Connection.update(query)
def inserisciParameter(scelta): parameter = getJson.getParameters(scelta) for x in parameter: query = "insert into parameter (id, description, frequency) " \ "values (" + str(x['id']) + ", '" + x['description'] + "', " + str(x['frequency']) + ");" Connection.update(query)
def inserisciObservations(scelta): observations = getJson.getObservations(scelta) for x in observations: query = "insert into observation (id, timestamp, uom, value) " \ "values (" + str(x['id']) + ", " + str(x['timestamp']) + ", '" + x['uom'] + "', " + str(x['value']) + " );" Connection.update(query)
def inserisciMonitoring(scelta): monitoring = getJson.getMonitoring(scelta) for x in monitoring: query = "insert into monitoring (id_parameter, id_observation) " \ "values (" + str(x['id_parameters']) + ", " + str(x['id_observations']) + ");" Connection.update(query)
def search_books(search=False): connection = Connection.get_connection() cursor = connection.cursor() cursor.execute("use library") new_search = search.replace("'", "") lower_search = new_search.lower() words = lower_search.split() new_search_words = [] prev_word = "" remove_word = "" is_isbn = False is_first_word = False is_preposition = False isbns_search = [] list_of_words = [ "what", "into", "other", "know", "this", "it", "be", "to", "for", "by", "at", "who", "and", "they", "how", "the", "why", "when", "in", "of", "on", "or", "up", "a", "i", "here", "there", "where", "now", "an", "if" ] new_word = "" for current_word in words: if (len(current_word) == 10 and any(char.isdigit() for char in current_word)): is_first_word = False is_preposition = False new_word = current_word is_isbn = True isbns_search.append(new_word) elif ((current_word in list_of_words) and not (any(char.isdigit() for char in current_word))): is_preposition = True if (prev_word == "" and not (len(words) == 1)): is_first_word = True remove_word = current_word new_word = current_word else: is_first_word = False new_word = prev_word + " " + current_word remove_word = prev_word else: if (is_preposition == True): new_word = prev_word + " " + current_word remove_word = prev_word else: new_word = current_word is_first_word = False is_preposition = False prev_word = new_word if (not is_first_word): new_search_words.append(new_word) if remove_word in new_search_words: new_search_words.remove(remove_word) statement = "" if (is_isbn == True): new_search_words = list(isbns_search) for i in new_search_words: statement = statement + " select b.isbn from book b join book_authors ba on ba.isbn=b.isbn join authors a on a.author_id=ba.author_id where b.isbn like '%{0}%' or a.name like '%{0}%' or b.title like '%{0}%' UNION".format( i) statement = statement.rsplit(' ', 1)[0] query = statement + ";" cursor.execute(query) results = cursor.fetchall() connection.close() isbns = [x[0] for x in results] no_duplicate_isbns = list(set(isbns)) books = book_details(no_duplicate_isbns) return books
def inserisciRelated(scelta): related = getJson.getRelated(scelta) for x in related: query = "insert into related (id_patient, id_health_state) " \ "values (" + str(x['id_patients']) + ", " + str(x['id_health_states']) + ");" Connection.update(query)
def inserisciMeasurement(scelta): measurement = getJson.getMeasurement(scelta) for x in measurement: query = "insert into measurement (id_parameter, id_device) " \ "values (" + str(x['id_parameters']) + ", " + str(x['id_devices']) + ");" Connection.update(query)
def instanciateConnection(self, **kwargs): self.connection = Connection.Connection(**kwargs)
def inserisciSet(scelta): set = getJson.getSet(scelta) for x in set: query = "insert into set_ (id_healt_state, id_therapy) " \ "values (" +str(x['id_health_states']) + ", " + str(x['id_therapies']) + ");" Connection.update(query)
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) # patch the socket module socket.socket = socks.socksocket socket.create_connection = create_connection br = Browser() print ("New Identity: " + br.open('http://icanhazip.com').read()) except Exception as e: print(colors.red + " [-] " + colors.default + str(e)) print(colors.yellow + " [!] " + colors.default + "Check if TOR is \ running on 127.0.0.1:9050") sys.exit() Connection.redirect_tester(url, proxy, user_agent, verbose) read_wl(wordlist) print (colors.green + "\n [+] " + colors.default + "Testing...") if __name__ == "__main__": for t in range(args.threads): # For se refere ao número de processos que será criado Thread(target = (brute_force), args = (lst,)).start() #A segunda virgula é para transformar os args em tupla sleep(1.2) # Caso não coloque o sleep os processos irão se sobrepor while 1==1: #Durante a execução do programa é checado o número de threads if threading.active_count() == 1: #sendo executados. Quando for igual a um significa que o brute force plus() #já foi concluido, podendo assim seguir para as proximas etapas elif threading.active_count() > 1: pass
elif o == "-l": logfile = a elif o == "-u": username = a elif o == "-p": password = a elif o == "-n": domain = a if "" in [interface, driver, logfile, username, password, domain]: printUsage() sys.exit(2) print "interface\t: %s\ndriver\t\t: %s\nlogfile\t\t: %s\nusername\t: %s\npassword\t: %s\ndomain\t\t: %s" % (interface, driver, logfile, username, password, domain) conf = ConfFile() conf.change_info("%s@%s" % (username, domain), "anonymous@%s" % domain, password) conf.create_file() connection = Connection(conf.get_filename(), logfile) print "Authorize..." connection.authorize(interface, driver) time.sleep(2) print "Getting IP..." connection.getip() if __name__ == "__main__": main()
def inserisciDevices(scelta): devices = getJson.getDevice(scelta) for x in devices: query = "insert into device (id, manufacturer, model) " \ "values (" + str(x['id']) + ", '"+ x['manufacturer'] +"' , '"+ x['model'] +"') ; " Connection.update(query)
import Connection import csv import datetime import os import GraphHopperUtils db, cursor = Connection.get_connection() dropdb_query = "drop database IF EXISTS ridesharing" createdb_query = "create database IF NOT EXISTS ridesharing" usedb_query = "use ridesharing" droptable_query = "drop table IF EXISTS trips" table_query = """create table IF NOT EXISTS trips (id int not null auto_increment primary key, trip_id int not null, trip_date date, pickup_time time, dropoff_time time, trip_time int, trip_distance float, pickup_lat varchar(15), pickup_long varchar(15), dropoff_lat varchar(15), dropoff_long varchar(15), passengers int, distance float, travel_time int)""" cursor.execute(dropdb_query) cursor.execute(createdb_query) cursor.execute(usedb_query) cursor.execute(droptable_query) cursor.execute(table_query) cursor.execute("use ridesharing") curr_dir = os.getcwd() processAll = True file_list = [] if processAll: file_list = [ "trip_data_1.csv", "trip_data_2.csv", "trip_data_3.csv", "trip_data_4.csv", "trip_data_5.csv", "trip_data_6.csv", "trip_data_7.csv", "trip_data_8.csv", "trip_data_9.csv", "trip_data_10.csv", "trip_data_11.csv", "trip_data_12.csv" ]
import telebot import postgresql as psql from Connection import * # Conexão local Postgresql con = Connection('pq://*****:*****@localhost/ocatolicobot') # Conexão Externa #con = Connection('pq://*****:*****@179.83.81.217/ocatolicobot') token = '776808432:AAFusuIi1GyNlWb1nFdkWFl54s8LL89nQkw' bot = telebot.TeleBot(token) # Comandos de Boas Vindas. @bot.message_handler(commands=['start']) def send_welcome(message): bot.send_message(message.chat.id, 'Olá, seja bem vindo ao O Católico Bot!') bot.send_message(message.chat.id, 'Estamos em processo de criação!') bot.send_message( message.chat.id, 'Logo logo estarei pronto para lhe ajudar a encontrar a missa/adoração/evento/confissão que estiverem disponíveis para você' ) bot.send_message( message.chat.id, 'Para mais informações ou sugestões entre em contato com: [email protected]' ) # Salvando dados do sobre o primeiro contato com o usuário. # Afim de gerar uma base de dados de todos os usuários. chat_id = '{' + str(message.chat.id) + '}' user_name = '{' + str(message.chat.username) + '}'
def inserisciDoctors(scelta): doctors = getJson.getDoctor(scelta) for x in doctors: query = "insert into doctor (id, name, surname)" \ " values (" + str(x['id']) + ", '" + x['name'] + "', '" + x['surname'] + "' );" Connection.update(query)