def apriori(): if request.method == 'OPTIONS': return template('status:200') name = request.json['name'] print(name) mysql = Mysql() taskSql = "SELECT * FROM trans_type_1" taskList = mysql.getAll(taskSql) nodeSql = "SELECT t_1,t_2,t_3,t_4,t_5,t_6,t_7 FROM task_seq WHERE station_name=" + "'" + name + "'" nodeTaskList = mysql.getAll(nodeSql) mysql.dispose() task = [] for x in nodeTaskList: nodeTask = [] for i in range(7): if x['t_' + str(i + 1)] is not None: nodeTask.append(x['t_' + str(i + 1)]) task.append(nodeTask) df = pd.DataFrame({'task_seq': task}) df.to_csv('task.csv', index=False, sep=',') inFile = dataFromFile('task.csv') items, rules = runApriori(inFile, 0.10, 0.5) rule = dealResults(taskList, rules) # return template('{"ret":{{ret}},"task":{{task}}}',task=rule,ret=200) return {"ret": 200, "task": rule}
def insert_data(): # 申请资源 mysql = Mysql() roll_max = 0.0999989 roll_min = 0.050000374106515644 roll_mean = 0.07501550874813788 roll_count = 100 pitch_max = 0.09999891074324113 pitch_min = 0.050000374106515644 pitch_mean = 0.07501550874813788 pitch_count = 1000 deviation_max = 0.09999891074324113 deviation_min = 0.050000374106515644 deviation_mean = 0.07501550874813788 deviation_count = 100 total_score = 87.76071883296575 insert_sql = "INSERT INTO posture_result(update_time,delta_roll_max,delta_roll_min,delta_roll_mean,delta_roll_count,delta_pitch_max,delta_pitch_min,delta_pitch_mean,delta_pitch_count,delta_deviation_max,delta_deviation_min,delta_deviation_mean,delat_deviation_count,score) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" update_time = datetime.datetime.now().strftime("%Y%m%d%H%M%S%f") mysql.insertMany( insert_sql, [(update_time, roll_max, roll_min, roll_mean, roll_count, pitch_max, pitch_min, pitch_mean, pitch_count, deviation_max, deviation_min, deviation_mean, deviation_count, total_score)]) # 释放资源 mysql.dispose()
def getCoverR(): mysql = Mysql() nameSql = 'SELECT name FROM bankdata_copy' res1 = mysql.getAll(nameSql) for x in res: crSql = 'SELECT f_s_dis,flng,flat,slng,slat FROM original_data WHERE station_name=' + "'" + x[ 'name'] + "'" res2 = mysql.getAll(crSql) nodeRadius = 0 sorted(res2.items(), key=lambda item: item[0]) if len(res2) > 0: nodeRadius = res2[math.floor(len(res2) * 0.2)]['f_s_dis'] tmp = res2[math.floor(len(res2) * 0.2)] k = getNewLatLng(tmp['slat'], tmp['slng'], nodeRadius) xqSql = 'SELECT total_house from fdd_xq WHERE lat >' + str( k['minLat']) + ' AND lat <' + str( k['maxLat']) + ' AND lng >' + str( k['minLng']) + ' AND lng <' + str(k['maxLng']) res3 = mysql.getAll(xqSql) total_house = 0 for m in res3: total_house += m['total_house'] print(total_house) xzlSql = 'SELECT area from biz_bld WHERE lat >' + str( k['minLat']) + ' AND lat <' + str( k['maxLat']) + ' AND lng >' + str( k['minLng']) + ' AND lng <' + str(k['maxLng']) res4 = mysql.getAll(xzlSql) total_area = 0 for m in res3: total_area += m['area'] print(total_area) netSql = 'SELECT COUNT(name) AS data from bankdata_copy WHERE lat >' + str( k['minLat']) + ' AND lat <' + str( k['maxLat']) + ' AND lng >' + str( k['minLng']) + ' AND lng <' + str(k['maxLng']) r1 = mysql.getOne(netSql) print(r1) firmSql = 'SELECT person_num from firm_info WHERE flat >' + str( k['minLat']) + ' AND flat <' + str( k['maxLat']) + ' AND flng >' + str( k['minLng']) + ' AND flng <' + str(k['maxLng']) r2 = mysql.getAll(firmSql) person_num = 0 for m in r2: person_num += m['person_num'] print(person_num) sql_r = 'UPDATE bankdata_copy SET cover_r =' + str( nodeRadius ) + ',xq_house=' + str(total_house) + ',xzl_area=' + str( total_area) + ',near_node=' + str( r1['data']) + ',firmb_in_r=' + str( person_num) + ' WHERE name=' + "'" + x['name'] + "'" r4 = mysql.update(sql_r) mysql.dispose()
def get_cycle_data_from_db(table_name, col_name): db = Mysql() query_sql = "select " + col_name + " from " + table_name + " where status = %s order by rand() limit 1" # update_sql = "update " + table_name + " set status = 1 where " + col_name + " = %s" # 取数完更新状态的sql result = db.getOne(query_sql, [0]) # db.update(update_sql, [result[col_name]]) # 更新状态 db.dispose() return result[col_name]
def get_one(): # 申请资源 mysql = Mysql() get_sql = "SELECT * FROM posture_result ORDER BY update_time DESC LIMIT 1" result = mysql.getOne(get_sql) # print("{:}\t{:}".format(result['update_time'],result['score'])) print(type(result)) print(result) # 释放资源 mysql.dispose()
def CVeReturnNsfocus(str1): result = [] cvelist = str1.split("\n"); mysql = Mysql() for cveone in cvelist: sql = "SELECT * FROM nsfocusvul where vul_cve = '%s'" % (cveone.strip()) res = mysql.getOne(sql) if res != False: resobj = VulInfo(res["vul_id"],res["vul_cve"],u''+res["vul_name"],u''+res["vul_desc"],u''+res["vul_soul"],res["vul_data"]) result.append(resobj) mysql.dispose() return result
def predict(): if request.method == 'OPTIONS': return template('status:200') os.chdir("/home/czhou/python/model") gbr_r = joblib.load("pre_r_model.m") #户数 写字楼面积 周围网点数 银行 地区 # test_X=[[150000,8300000,50,2,3]] print(request.json) loc = request.json['loc'] region = request.json['region'] bank = request.json['bank'] test = [region, bank] test_r = [test] pre_r = gbr_r.predict(test_r) print(pre_r) mysql = Mysql() tmp = getNewLatLng(loc['lat'], loc['lng'], pre_r[0]) netSql = 'SELECT COUNT(name) AS data from bankdata_copy WHERE lat >' + str( tmp['minLat']) + ' AND lat <' + str( tmp['maxLat']) + ' AND lng >' + str( tmp['minLng']) + ' AND lng <' + str(tmp['maxLng']) r1 = mysql.getOne(netSql) print(r1) firmSql = 'SELECT person_num from firm_info WHERE flat >' + str( tmp['minLat']) + ' AND flat <' + str( tmp['maxLat']) + ' AND flng >' + str( tmp['minLng']) + ' AND flng <' + str(tmp['maxLng']) r2 = mysql.getAll(firmSql) person_num = 0 if r2 != False: for k in r2: person_num += k['person_num'] print(person_num) xqSql = 'SELECT total_house from fdd_xq WHERE lat >' + str( tmp['minLat']) + ' AND lat <' + str( tmp['maxLat']) + ' AND lng >' + str( tmp['minLng']) + ' AND lng <' + str(tmp['maxLng']) r3 = mysql.getAll(xqSql) total_house = 0 if r3 != False: for k in r3: total_house += k['total_house'] print(total_house) mysql.dispose() test_b = [[r1['data'], person_num, total_house, region, bank]] gbr_b = joblib.load("pre_bsum_model.m") pre_b = gbr_b.predict(test_b) print(pre_b) return template('{"bsum":{{bsum}}}', bsum=pre_b)
def dealPreR(): # 申请资源 mysql = Mysql() sql = "delete FROM pre_r" result = mysql.delete(sql) #释放资源 mysql.dispose() a = np.loadtxt('test_pre_r.csv', delimiter=',') a = np.delete(a, 0, axis=0) for x in a: mysql = Mysql() tmpSql = "INSERT INTO pre_r (old_index, bsum, lat, lng, bank_index, region_index, pre_r) VALUES (" + str( x[0]) + ',' + str(x[1]) + ',' + str(x[2]) + ',' + str( x[3]) + ',' + str(x[5]) + ',' + str(x[6]) + ',' + str( x[7]) + ')' result = mysql.update(tmpSql) tmp = getNewLatLng(x[2], x[3], x[7]) netSql = 'SELECT COUNT(name) AS data from bankdata_copy WHERE lat >' + str( tmp['minLat']) + ' AND lat <' + str( tmp['maxLat']) + ' AND lng >' + str( tmp['minLng']) + ' AND lng <' + str(tmp['maxLng']) r1 = mysql.getOne(netSql) print(r1) firmSql = 'SELECT person_num from firm_info WHERE flat >' + str( tmp['minLat']) + ' AND flat <' + str( tmp['maxLat']) + ' AND flng >' + str( tmp['minLng']) + ' AND flng <' + str(tmp['maxLng']) r2 = mysql.getAll(firmSql) person_num = 0 for k in r2: person_num += k['person_num'] print(person_num) xqSql = 'SELECT total_house from fdd_xq WHERE lat >' + str( tmp['minLat']) + ' AND lat <' + str( tmp['maxLat']) + ' AND lng >' + str( tmp['minLng']) + ' AND lng <' + str(tmp['maxLng']) r3 = mysql.getAll(xqSql) total_house = 0 for k in r3: total_house += k['total_house'] print(total_house) sql_r = 'UPDATE pre_r SET node_in_prer=' + str( r1['data']) + ',firm_in_prer=' + str( person_num) + ',house_in_prer=' + str( total_house) + ' WHERE old_index=' + str(x[0]) r4 = mysql.update(sql_r) mysql.dispose() prebsum() return template('test:{{test}}', test="正在处理")
def InsertVul(request, VulInfo): if VulInfo != None: mysql = Mysql() try: sql = "INSERT INTO `vulinfo`.`nsfocusvul` VALUES (NULL, %s, %s, %s, %s, %s, %s)" param = (VulInfo.vul_id, VulInfo.vul_cve, VulInfo.vul_name, VulInfo.vul_desc, VulInfo.vul_soul, VulInfo.vul_data) print str(param) + "----- insert-ok" mysql.insertOne(sql, param) mysql.end() except Exception, e: Writefile("mysqllog.txt", str(e) + VulInfo.tostr() + "\n") print e mysql.dispose()
def request1(appkey, movieName, m="GET"): values = list() url = "http://op.juhe.cn/onebox/movie/video" params = { "key": appkey, # 应用APPKEY(应用详细页查询) "dtype": "json", # 返回数据的格式,xml或json,默认json "q": movieName, # 影视搜索名称 } params = urlencode(params) if m == "GET": f = urllib.urlopen("%s?%s" % (url, params)) else: f = urllib.urlopen(url, params) content = f.read() res = json.loads(content) if res: error_code = res["error_code"] if error_code == 0: # 成功请求 mysql = Mysql() url_list = getJsonContent((res["result"])) # print((res["result"])['cover']) for i in range(len(url_list)): global pic_id values.append([pic_id, url_list[i]]) pic_id += 1 mysql.insertMany( 'INSERT IGNORE INTO picture(picture_id,picture_url) values(%s,%s)', values) mysql.dispose() else: print("%s:%s" % (res["error_code"], res["reason"])) else: print("request api error")
# @Time : 2017-10-17 13:28:40 # @File : test_conn.py # @Software : PyCharm # 测试对MySQL的访问 from MySqlConn import Mysql # 申请资源 mysql = Mysql() sqlAll = "select * from gz_lianjia_xiaoqu_all" result = mysql.getAll(sqlAll) if result: print "get all" for row in result: print "%s\t%s\t%s\t%s\t%s" % (row["area_name"], row["price"], row["longtitude"], row["latitude"], row["detail_url"]) #sqlAll = "select * from gz_lianjia_xiaoqu_all" #result = mysql.getMany(sqlAll, 2) #if result: # print "get many" # for row in result: # print "%s\t%s" % (row["area_name"], row["price"]) #result = mysql.getOne(sqlAll) #print "get one" #print "%s\t%s" % (result["area_name"], result["price"]) # 释放资源 mysql.dispose()
""" author: wdw110 """ from MySqlConn import Mysql from _sqlite3 import Row #申请资源 mysql = Mysql() sqlAll = '' result = mysql.getAll(sqlAll) if result: print 'get all' for row in result: print '%s\t%s'%() sqlAll = '' result = mysql.getMany(sqlAll,2) if result: print 'get many' for row in result: print '' result = mysql.getOne(sqlAll) print 'get one' print '' #释放资源 mysql.dispose()
def prebsum(): mysql = Mysql() sql = 'SELECT bsum,node_in_prer,firm_in_prer,house_in_prer,region_index,bank_index FROM pre_r WHERE region_index=2 or region_index=7 or region_index=8' res = mysql.getAll(sql) mysql.dispose() a = [] for x in res: b = [] b.append(x['bsum']) b.append(x['node_in_prer']) b.append(x['firm_in_prer']) b.append(x['house_in_prer']) b.append(x['region_index']) b.append(x['bank_index']) a.append(b) np.savetxt('to_predict_bsum.csv', a, delimiter=',') mse_arr = [] b = permutate('pre_bsum_train.csv', 0.98) file_rows = b['file_rows'] - b['test_rows'] train_rows = math.floor(file_rows * 0.9) np.savetxt('train.csv', b['train'], delimiter=',') np.savetxt('test.csv', b['test'], delimiter=',') for time in range(50): os.chdir("C:/Users/yufeng/Desktop/python") t = permutate('train.csv', 0.98) np.savetxt('train_t.csv', t['train'], delimiter=',') df = pd.read_csv('train_t.csv', header=0, encoding='utf-8') y_train, x_train = df.ix[0:train_rows, 0:1], df.ix[0:train_rows, 1:] y_test, x_test = df.ix[train_rows:, 0:1], df.ix[train_rows:, 1:] params = { 'n_estimators': 100, 'max_depth': 5, 'min_samples_split': 2, 'learning_rate': 0.01, 'loss': 'ls' } gbr = ensemble.GradientBoostingRegressor(**params) gbr.fit(x_train, y_train) os.chdir("/home/czhou/python/model") joblib.dump(gbr, "pre_bsum_model" + str(time) + ".m") y_pre = gbr.predict(x_test) y_test = np.array(y_test) m = [] n = [] for i, j in enumerate(y_pre): m.append(y_pre[i] / (y_test[i][0] + y_pre[i])) n.append(y_test[i][0] / (y_test[i][0] + y_pre[i])) mse = mean_squared_error(n, m) mse_arr.append(mse) a = mse_arr.index(min(mse_arr)) os.chdir("/home/czhou/python") df1 = pd.read_csv('to_predict_bsum.csv', header=0, encoding='utf-8') y_test, x_test = df1.ix[0:, 0:1], df1.ix[0:, 1:] os.chdir("/home/czhou/python/model") gbr = joblib.load("pre_bsum_model" + str(a) + ".m") print("train_model" + str(a) + ".m") joblib.dump(gbr, "pre_bsum_model.m") y_pre1 = gbr.predict(x_test) index = np.arange(1, 24, 1) m = [] n = [] y_test = np.array(y_test) for i, j in enumerate(y_pre1): m.append(y_pre1[i] / (y_test[i][0] + y_pre1[i])) n.append(y_test[i][0] / (y_test[i][0] + y_pre1[i])) mse = mean_squared_error(n, m) print("MSE: %.4f" % mse)
def getscore(): # 申请资源 mysql = Mysql() sql = "SELECT station_name FROM original_data WHERE station_name!='财务部' GROUP BY station_name" nameList = mysql.getAll(sql) #释放资源 mysql.dispose() loanM = [] monthBsum = [] replaceDeg = [] bsumGrowth = [] xqHouse = [] xzlArea = [] for x in nameList: mysql = Mysql() #每个网点的贷款金额 loanOne = 0 loanSql = 'SELECT loan_money FROM original_data WHERE station_name=' + "'" + x[ 'station_name'] + "'" # loanRes = mysql.getAll(loanSql) # for k in loanRes: # if k['loan_money']!=None : # loanOne += k['loan_money'] # loanM.append(loanOne) #单个网点每月业务总量 bsumOne = 0 bsumSql = 'SELECT count(_index) AS data FROM original_data WHERE station_name=' + "'" + x[ 'station_name'] + "'" # bsumRes = mysql.getOne(bsumSql) # if bsumRes['data']: # monthBsum.append(bsumRes['data']) # else: # monthBsum.append(bsumOne) #单个网点的可替代程度 replaceSql = 'SELECT bratio,lg_to_min FROM bankdata_copy WHERE name=' + "'" + x[ 'station_name'] + "'" # replaceRes = mysql.getOne(replaceSql) # replaceDeg.append(replaceRes) #未来总业务量增长 #网点定位 业务覆盖半径 nodeLocSql = 'SELECT lng,lat,cover_r FROM bankdata_copy WHERE name=' + "'" + x[ 'station_name'] + "'" nodeLoc = mysql.getOne(nodeLocSql) tmp = getNewLatLng(nodeLoc['lat'], nodeLoc['lng'], nodeLoc['cover_r']) #网点周围新建小区 户数 newXq = 0 newXqSql = 'SELECT house_num from ajk_newxq WHERE lat >' + str( tmp['minLat']) + ' AND lat <' + str( tmp['maxLat']) + ' AND lng >' + str( tmp['minLng']) + ' AND lng <' + str(tmp['maxLng']) newXqRes = mysql.getAll(newXqSql) print(newXqRes) if newXqRes != False: for k in newXqRes: if k['house_num'] != None: newXq += k['house_num'] xqHouse.append(newXq) #网点周围新建写字楼 面积 newXzl = 0 newXzlSql = 'SELECT area from ajk_newxzl WHERE lat >' + str( tmp['minLat']) + ' AND lat <' + str( tmp['maxLat']) + ' AND lng >' + str( tmp['minLng']) + ' AND lng <' + str(tmp['maxLng']) newXzlRes = mysql.getAll(newXzlSql) if newXzlRes != False: for k in newXzlRes: newXzl += k['area'] xzlArea.append(newXzl) mysql.dispose() sumxq = sum(xqHouse) sumxzl = sum(xzlArea) bsumGrowthScore = [] for i, j in enumerate(xqHouse): tmp = j * 0.06238087 + xzlArea[i] * 0.16797319 bsumGrowth.append(tmp) maxBG = max(bsumGrowth) mysql = Mysql() for i, j in enumerate(nameList): tmp = round((bsumGrowth[i] / maxBG) * 100, 2) tmpSql = 'UPDATE node_score SET future_bsum_growth=' + str( tmp) + ' WHERE _name=' + "'" + j['station_name'] + "'" mysql.update(tmpSql) mysql.dispose()
def handle(self): print 'got connection from ', self.client_address #self.wfile.write('connection %s:%s at %s succeed!' % (host,port,ctime())) #self.wfile.write(senddata) data = '' while True: recvdata = self.request.recv(1024) if not recvdata: break if data == '' and recvdata[:2] != '\x40\x40': continue if data == '' and recvdata[:2] != '\x40\x40' and recvdata[ -2:] == '\x23\x23': continue if recvdata[:2] == '\x40\x40' and recvdata[-2:] == '\x23\x23': data = recvdata if recvdata[:2] == '\x40\x40' and recvdata[-2:] != '\x23\x23': data += recvdata continue if recvdata[:2] != '\x40\x40' and recvdata[-2:] != '\x23\x23': data += recvdata continue if recvdata[:2] != '\x40\x40' and recvdata[-2:] == '\x23\x23': data += recvdata if data != '' and data[:2] == '\x40\x40' and data[ -2:] == '\x23\x23': #print data #if ord(data[26])==2: dd = data[2:-2] arr = dd.split('##@@') desadd = '' for i in arr: s = '' for j in i[16:22][::-1]: s += hex(ord(j))[2:].zfill(2) desadd = s if True: recvdate = '' for i in arr: s = '' for j in i[4:10][::-1]: s += str(ord(j)).zfill(2) recvdate = s resadd = '' for i in arr: s = '' for j in i[10:16][::-1]: s += hex(ord(j))[2:].zfill(2) resadd = s codenumber = '' jsoninfo = '' if ord(data[27]) == 2: tmpinfo = pmd.parse_typeflag_02(data[27:-3]) codenumber = tmpinfo['codenumber'] jsoninfo = json.dumps(tmpinfo) if ord(data[27]) == 204: tmpinfo = pmd.parse_typeflag_204(data[27:-3]) codenumber = tmpinfo['codenumber'] jsoninfo = json.dumps(tmpinfo) if ord(data[27]) == 206: tmpinfo = pmd.parse_typeflag_206(data[27:-3]) codenumber = tmpinfo['codenumber'] jsoninfo = json.dumps(tmpinfo) if ord(data[27]) == 1: tmpinfo = pmd.parse_typeflag_01(data[27:-3]) codenumber = '' jsoninfo = json.dumps(tmpinfo) if ord(data[27]) == 24: tmpinfo = pmd.parse_typeflag_24(data[27:-3]) codenumber = '' jsoninfo = json.dumps(tmpinfo) if ord(data[27]) == 205: tmpinfo = pmd.parse_typeflag_205(data[27:-3]) codenumber = '' jsoninfo = json.dumps(tmpinfo) mysql = Mysql() sql = "insert into mintordata(mintortime,resadd,desadd,datainfo,controlunit,typeflag,codenumber,jsoninfo) values(%s,%s,%s,%s,%s,%s,%s,%s)" values_info = (recvdate, resadd, desadd, data[27:-3], ord(data[26]), ord(data[27]), codenumber, jsoninfo) results_value = mysql.insertOne(sql, values_info) mysql.dispose() #cur.execute('insert into mintordata(mintortime,resadd,desadd,datainfo,controlunit,typeflag,codenumber,jsoninfo) values(%s,%s,%s,%s,%s,%s,%s,%s)',(recvdate,resadd,desadd,data[27:-3],ord(data[26]),ord(data[27]),codenumber,jsoninfo)) #conn.commit() print "RECV from ", self.client_address[ 0], " at ", self.client_address[ 1], " recvdata at ", recvdate, " resadd at ", resadd, " desadd at ", desadd #senddata=data[:6]+second+minute+hour+day+month+year+data[18:24]+data[12:18]+data[24:26]+'\x03' senddata = data[: 6] + second + minute + hour + day + month + year + data[ 18:24] + data[ 12:18] + '\x00\x00' + '\x03' #senddata=data[:6]+second+minute+hour+day+month+year+data[18:24]+data[12:18]+data[24:26]+'\x04'+data[27:-3] checkstr = chr(uchar_checksum(senddata[2:])) senddata += checkstr + '##' self.request.send(senddata) data = ''