def getDataFrom(self, filename): book = xlrd.open_workbook(filename) sheet = book.sheet_by_index(len(book.sheets()) - 1) ops = [] for r in range(1, sheet.nrows): filepath = sheet.cell(r, 11).value b = open(filepath, "rb").read() origin = pymysql.Binary(b) fileUuid = str(uuid.uuid1()).replace("-", "") values = (fileUuid, sheet.cell(r, 0).value[3:], sheet.cell(r, 2).value, sheet.cell(r, 3).value, sheet.cell(r, 4).value, sheet.cell(r, 5).value, sheet.cell(r, 6).value, sheet.cell(r, 7).value, sheet.cell(r, 8).value, sheet.cell(r, 9).value, sheet.cell(r, 10).value, origin, "1010", "ZH") ops.append(values) cursor = self.conn.cursor() for n in range(0, len(ops)): try: cursor.executemany( 'insert into DOCUMENTS (UUID,CRA_DT,TITLE,AUTHOR,AURDEPT,KYWRD,ABSTRACT,JOURNAL,PUB_DT,URL,SUFFIX,CONTENT_ORI,SOURCE_CODE,LANG)values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', ops[n:n + 1]) except OSError: pass cursor.close() self.conn.commit()
def test_issue_364(self): """ Test mixed unicode/binary arguments in executemany. """ conn = pymysql.connect(charset="utf8mb4", **self.databases[0]) self.safe_create_table( conn, "issue364", "create table issue364 (value_1 binary(3), value_2 varchar(3)) " "engine=InnoDB default charset=utf8mb4") sql = "insert into issue364 (value_1, value_2) values (_binary %s, %s)" usql = u"insert into issue364 (value_1, value_2) values (_binary %s, %s)" values = [pymysql.Binary(b"\x00\xff\x00"), u"\xe4\xf6\xfc"] # test single insert and select cur = conn.cursor() cur.execute(sql, args=values) cur.execute("select * from issue364") self.assertEqual(cur.fetchone(), tuple(values)) # test single insert unicode query cur.execute(usql, args=values) # test multi insert and select cur.executemany(sql, args=(values, values, values)) cur.execute("select * from issue364") for row in cur.fetchall(): self.assertEqual(row, tuple(values)) # test multi insert with unicode query cur.executemany(usql, args=(values, values, values))
def main(): try: conn = pymysql.connect("localhost", "root", "admin123", "face_recognition") cursor = conn.cursor() except: print("Fail to connect DB!") f = open("members/members.txt") lines = f.readlines() f.close() sql = "delete from members" cursor.execute(sql) for line in lines: if len(line) == 0: continue line = line.split() ID = int(line[0]) Name = line[1] date = line[2] Authority = line[3] fp = open('members/members_pics/{}.jpg'.format(ID), 'rb') img = fp.read() sql = "INSERT INTO members(ID, Name, Picture, RegisterTime, Authority, Inside) VALUES (%s, %s, %s, %s, %s, %s)" cursor.execute(sql, (ID, Name, pymysql.Binary(img), date, Authority, 0)) conn.commit()
def insertSql(name, img_path): # 复制到main里,可插入成功name,img_path要实体化 img = face_recognition.load_image_file(img_path) encoding = face_recognition.face_encodings(img)[0] encodings = np.array(encoding).reshape(16, 8) encodingList = encodings.tolist() strEncodingList = [] for i in range(0, 16): strEncoding = [str(x) for x in encodingList[i]] string = ",".join(strEncoding) strEncodingList.append(string) # print(string) fp = open(img_path, 'rb') img = fp.read() fp.close() imgTuple = (name, pymysql.Binary(img)) encodingTuple = tuple(strEncodingList) tuple1 = imgTuple + encodingTuple # print(tuple) sql = "INSERT INTO person(name,img,encoding0,encoding1,encoding2,encoding3,encoding4,encoding5," \ "encoding6,encoding7,encoding8,encoding9,encoding10,encoding11,encoding12,encoding13,encoding14,encoding15) " \ "values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) " try: # 执行sql cursor.execute(sql, tuple1) conn.commit() print("插入数据成功") return {"result": "success"} except Exception as e: print(e) conn.rollback() print("插入数据失败") return {"result": "fail"}
def setupPhoto(): import sys try: # 用读文件模式打开图片 # fin = open("../web.jpg") fin = open("F:/FileRecv/tupian/1.jpg") # 将文本读入img对象中 img = fin.read().decode("gb18030", "ignore") # 关闭文件 fin.close() except IOError as e: print(e) # except IOError e: # # 如果出错,打印错误信息 # # print("Error %d: %s" % (e.args[0], e.args[1]) # sys.exit(1) # 链接mysql,获取对象 conn = pymysql.connect(**connection('test')) # 获取执行cursor cursor = conn.cursor() # 直接将数据作为字符串,插入数据库 cursor.execute("UPDATE student SET PHOTO='%s' WHERE id=1" % pymysql.Binary(img)) # 提交数据 conn.commit() # 提交之后,再关闭cursor和链接 cursor.close() conn.close()
def insert_file(self, table, cols, values): try: cursor = self.conn.cursor() sql = "INSERT INTO `" + table + "`(" for col in cols: sql += '`' sql += col sql += '`,' sql = sql[0:len(sql) - 2] sql += "`) VALUE(" for value in values: if type(value) == bytes: sql += '%s' else: sql += value sql += ',' sql = sql[0:len(sql) - 1] sql += ')' # print(sql) if type(values[1]) == bytes: cursor.execute(sql, pymysql.Binary(values[1])) else: cursor.execute(sql) self.conn.commit() cursor.close() return 'success' except: return 'fail'
def addadmin(request): name = request.POST['name'] username = request.POST['username'] password = request.POST['password'] sex = request.POST['sex'] a_file = request.FILES['form_data'] photoname = str(name) + '_' + str(username) + '.jpg' filename = os.path.join(settings.MEDIA_ROOT, photoname) #admin的照片文件存到static/image/pic_admin with open(filename, 'wb') as f: img = a_file.file.read() f.write(img) f.close() if (sex == '男'): sex = 0 else: sex = 1 conn = pymysql.connect(host='localhost', user='******', password='******', database='a05', port=3306) cursor = conn.cursor() sql = "insert into admin(name,username,password,photo,gender)values(%s,%s,%s,%s,%s)" cursor.execute(sql, (name, username, password, pymysql.Binary(img), sex)) # 提交,不然无法保存新建或者修改的数据 conn.commit() # 关闭游标 cursor.close() # 关闭连接 conn.close() return redirect('workersystem:login')
def upload(): if 'file' not in request.files: return jsonify({'code': 500, 'msg': '没有文件'}) files = request.files.getlist("file") conn = pymysql.Connect(host='127.0.0.1', user='******', password='******', port=3306, database='face_images') cursor = conn.cursor() num = 0 for file in files: if file.filename == '': break if file and allowed_file(file.filename): # 保存在mysql数据库中 img = file.read() name = file.filename[0:-4] sql = 'insert ignore into image_data (name,image) values(%s, %s);' data = [(name, pymysql.Binary(img))] cursor.executemany(sql, data) conn.commit() else: return jsonify({'error': '图片格式错误'}) num = num+1 # 将人脸向量保存在redis数据库 image = face_recognition.load_image_file(file) face_locations = face_recognition.face_locations(image) if len(face_locations) != 1: return jsonify({'code': 500, 'error': '人脸数量有误'}) face_encodings = face_recognition.face_encodings(image, face_locations) # 连数据库 r = redis.Redis(connection_pool=pool) # 录入人名-对应特征向量 r.set(name, face_encodings[0].tobytes()) cursor.close() conn.close() return jsonify({'number': num, 'result': '录入成功'})
def signup(self): name = self.username.text() pwd = self.password.text() age = self.ageBox.value() job = self.jobBox.currentText() fp = open(self.photoShow, "rb") photo = fp.read() print(name, pwd, age, job) sql = "INSERT INTO users(user_name, user_pwd, user_age, user_job, user_photo) VALUES (%s,%s,%s,%s,%s)" if name != "" and pwd != "": result = self.myCursor.execute( sql, [name, pwd, age, job, pymysql.Binary(photo)]) self.connection.commit() if result: QMessageBox.information(self, "恭喜," + name, "用户创建成功", QMessageBox.Ok) replay = QMessageBox.information( self, "欢迎用户" + name, "请问是否要开始人脸注册", QMessageBox.Yes | QMessageBox.No) if replay == QMessageBox.Yes: self.face_register(name, pwd) else: self.myCursor.close() self.connection.close() self.close() window.show() else: QMessageBox.information(self, "错误", "用户名和密码不能为空", QMessageBox.Ok)
def upload_simple(self, directory): f_list = os.listdir(directory) f_list_doc = [] ops = [] tag = "UP" + time.strftime('%Y%m%d%H%M%S', time.localtime(time.time())) ut = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time())) for i, f in enumerate(f_list): if os.path.splitext(f)[1] in { '.caj', '.pdf', '.txt', '.doc', '.docx' }: f_list_doc.append(tag + str(i).zfill(4) + f) for f in f_list_doc: b = open(directory + "/" + f[20:], "rb").read() origin = pymysql.Binary(b) fileUuid = str(uuid.uuid1()).replace("-", "") values = (fileUuid, f[:15][2:], ut, os.path.splitext(f)[0], os.path.splitext(f)[1], origin, "1010", "ZH") ops.append(values) cursor = self.conn.cursor() for n in range(0, len(ops)): self.CrawProcess.emit(str("正在导入%s\n" % (ops[n][3]))) try: if self.configs['flag'] == True: cursor.executemany( 'insert into DOCUMENTS(UUID,CRA_DT,UPLD_DT,TITLE,SUFFIX,CONTENT_ORI,SOURCE_CODE,LANG)values (%s, %s, %s, %s, %s, %s, %s, %s)', ops[n:n + 1]) else: exit() except Exception as e: print(e) # pass cursor.close() self.conn.commit()
def adapt_array(self, arr): out = io.BytesIO() # 将arr压缩成二进制文件 np.save(out, arr) out.seek(0) # position-> start of the stream dataa = out.read() # 压缩数据流 return mysql.Binary(zlib.compress(dataa, zlib.Z_BEST_COMPRESSION))
def updateData(self, tb, id, info): sql = "UPDATE " + tb + " SET sequence=%s where id=%s" try: self.cursor.execute(sql, [pymysql.Binary(info), id]) self.conn.commit() except: print("update data failed!") self.conn.rollback()
def to_db(self): observables_string = json.dumps( self.observables, cls=PythonObjectEncoder).encode("utf-8") observables_compressed = pymysql.Binary( bz2.compress(bytes(observables_string))) model_dict = asdict(self) model_dict.update(dict(observables=observables_compressed)) return model_dict
def inset_imgs(): try: cursor.execute("insert into img(imgs) values(%s)", (pymysql.Binary(img))) conn.commit() except Exception as e: print(e) conn.rollback() finally: cursor.close() conn.close()
def store(data): print(data['title']) cur.execute('select * from jingdian where title = %s', data['title']) row = cur.fetchone() if row == None: cur.execute( 'insert into jingdian (title, img, rate) values (%s, %s, %s)', (data['title'], pymysql.Binary(data['img']), data['rate'])) cur.connection.commit() else: print('Item already exists: %s', data['title'])
def insertData(self, tb, info): sql = "INSERT INTO " + tb + "(id, sequence)VALUES(%s, %s)" try: self.cursor.execute( sql, [info['id'], pymysql.Binary(info['sequence'])]) self.conn.commit() except Exception as identifier: print(identifier) self.conn.rollback() finally: pass
def record_announce_page(self, stock_code, json_content_list: dict): import pymysql json_list = json_content_list['announcements'] for ann in json_list: # title = base64.b64encode(ann['announcementTitle'].encode()) title = pymysql.Binary(ann['announcementTitle'].encode()) sql = ( f"INSERT IGNORE into announce_manager (" f"announce_id,stock_code,title,announce_timestamp,url,announce_type) " "VALUES (" f"'{ann['announcementId']}','{stock_code}'," f"%s,{ann['announcementTime']/1000}," f"'{ann['adjunctUrl']}','{ann['announcementType']}')") self.engine.execute(sql, title) return json_content_list["hasMore"]
def write_image(self, name: str, ext_name: str, content: str, url: str, pid: int): flag = False id = -1 # 检查关键字段是否存在 if not Util.valid(content) or pid <= 0: log.error('url=%s 信息错误!', url) return flag, -1 msql = """INSERT INTO `blog_image` ( `url`, `name`, `ext_name`, `context`, `pid`) VALUES (%s, %s, %s, %s, %s);""" try: curosr = self._connect.cursor() curosr.execute(msql, (url, name, ext_name, pymysql.Binary(content), pid)) self._connect.commit() id = int(curosr.lastrowid) if id >= 0: flag = True else: log.error('name=%s 信息保存失败', name) except Exception as e: log.error('MySQL 执行错误: ' + str(e)) return flag, id
def workersignup(request): # try: workerid = request.POST['id'] name = request.POST['name'] gender = request.POST['sex'] a_file = request.FILES['photo'] ects = a_file.name.split(".")[1] photoname = str(workerid) + '_' + str(name) + '.jpg' # admin的照片文件存到static/image/pic_admin filename = os.path.join(settings.MEDIA_ROOT, photoname) with open(filename, 'wb') as f: img = a_file.file.read() f.write(img) f.close() scale = 0.1 fa = cv.imread(filename) img2 = cv.resize(fa, (int(fa.shape[1] * scale), int(fa.shape[0] * scale)), interpolation=cv.INTER_LINEAR) face_cascade = cv.CascadeClassifier( 'E:/test\A051/templates\static/face_cascade\haarcascade_frontalface_default.xml' ) faces = face_cascade.detectMultiScale(img2, 1.1, 5) if len(faces) == 1: face = chopFace(img2, faces[0]) # face是人脸区域缩放成(FACE_SIZE, FACE_SIZE)的正方形图像 face = cv.resize(face, (256, 256), interpolation=cv.INTER_LINEAR) else: print("Warning: no face or more than 1 faces detected") face = chopCenter(img2) img_tensor = torch.from_numpy(face.astype(np.float32) / 255).permute( 2, 0, 1).unsqueeze(0) model_extract_face_feature = modelss.resnet18(pretrained=True) model_extract_face_feature.fc = nn.Identity() model_extract_face_feature.eval() face_feature = model_extract_face_feature(img_tensor) nparr = face_feature.detach().numpy() face_feature = nparr[0] face_feature_bytes = face_feature.tostring() conn = pymysql.connect(host='localhost', user='******', password='******', database='a05', port=3306) cursor = conn.cursor() if (gender == '男'): gender = 0 else: gender = 1 print(workerid) sql = "insert into workers(workerid,name,gender,photo,face_feature)values(%s,%s,%s,%s,%s)" # 执行SQL语句 cursor.execute( sql, (workerid, name, gender, pymysql.Binary(img), face_feature_bytes)) try: # 提交修改 conn.commit() except: # 发生错误时回滚 print("s") conn.rollback() # 关闭游标 cursor.close() # 关闭连接 conn.close() return HttpResponse("error", status=404) # 关闭游标 cursor.close() # 关闭连接 conn.close() return HttpResponse("ok", status=200)
# return img ty = cv2.imread('1.jpg') picture = cv2.imread('joshva.png') ht = picture.shape[0] wt = picture.shape[1] connection = pymysql.connect(host='localhost', user='******', password='', db='hci') try: with connection.cursor() as cursor: sql = "insert into head values(%s,%s,%s,%s,%s,%s,%s)" cursor.execute(sql, ("*****@*****.**", "sa", "Joshva", "Devdas", pymysql.Binary(picture), ht, wt)) connection.commit() finally: connection.close() #try: # with connection.cursor() as cursor: # sql ="select *from head" # cursor.execute(sql) # result = cursor.fetchall() # l=list(result) # img=result[1][2] # img=numpy.fromstring(img,dtype='uint8') # cv2.imshow(img) # print(l[0][0]+l[0][1]) #finally: # connection.close()
my1 = pymysql.connect(host="47.95.235.183", user='******', password="******", db='gd_map_charging_station', port=3306, charset='utf8mb4') mycur1 = my1.cursor() def downloadImageFile(local_filename, imgUrl): print "Download Image File=", local_filename local_filename = local_filename + '.ico' r = requests.get(imgUrl, stream=True) # here we need to set stream = True parameter with open("./" + local_filename, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() f.close() return local_filename if __name__ == '__main__': name = 'Evcard' file_name = downloadImageFile(name, 'http://www.evcardchina.com/Public/images/icon.ico') print file_name f = open("./" + file_name, "rb") b = f.read() f.close() mycur1.execute( "UPDATE gd_map_charging_station.brand_type SET content = %s WHERE id = 2;", (pymysql.Binary(b))) my1.commit()
def getPyMysql(x): return pymysql.Binary(x)
import sys try: # Get 图片文件 fp = open("./test.jpg") img = fp.read print(img) fp.close() except IOError as e: print("Error %d %s " % (e.args[0], e.args[1])) sys.exit(1) try: # mysql Connect conn = mysql.connect(host='localhost', user='******', password='', db='test') cursor = conn.cursor() # 注意使用Binary()函数来指定存储的是二进制 cursor.execute("INSERT INTO images SET DATA ='%s'" % mysql.Binary(img)) # 如果DB没有设置自动Submit,这里要提交一下 conn.commit() # 关闭游标 cursor.close() # 关闭数据库连接 conn.close() except mysql.Error as e: print("Error %d %s " % (e.args[0], e.args[1])) sys.exit(1)
def raw_test(self): res = 0 try: # 执行SQL语句 sql = "insert into test(t_t)values(%s)" sql2 = "insert into test(big_b)values(%s)" sql3 = "insert into test(t_mt)values(%s)" sql4 = "insert into test(big_mb)values(%s)" # self.cursor.execute("insert into test(big_mb)values(%s)" % b"123abc") # self.cursor.execute("insert into test(big_mb)values('%s')" % b"123abc") # 底层有bug。底层瞎转义,对引号转义有点问题。 # self.cursor.execute("insert into test(big_mb)values('b'123abc'')") self.cursor.execute("insert into test(big_mb)values(\"b'123abc'\")" ) #注意在python语法中写sql的原生语句 要注意引号。 这里实际上插入的是字符串。 # python底层最终的sql形式 是bytes, b'insert into test(big_mb)values("b\'123abc\'")' self.cursor.execute( "insert into test(big_mb)values(b'0101')" ) #sql 语句 insert into test(big_mb)values(b'0101') 插入bit。 self.cursor.execute( "insert into test(big_mb)values(0xff12)") # 插入byte。 #存入二进制数据。 这里字符串不管有没有转成bytes,底层都会帮你转bytes。 #传入的sql参数都会变成str,然后在最后执行sql语句时会转成b"" 让Mysql底层执行. execute ->cursors.query->Connection.query #这里也会将python中的十六进制形式中 转成sql语句中的十六进制。 self.cursor.execute( sql, pymysql.Binary("\x00\x78 123abc") ) # pymysql.Binary 没什么用,只是bytes(). 而且时无法对str进行转,pymysql.Binary中少了指定编码方式如utf-8。 self.cursor.execute( sql, "\x00\x78 123abc中") # pymysql.Binary(b"abc123")) self.cursor.execute( sql, b"\x00\x78 123abc") # pymysql.Binary(b"abc123")) self.cursor.execute( sql2, "\x00\x78123abc中") # pymysql.Binary(b"abc123")) self.cursor.execute( sql2, b"\x00\x78123abc") # pymysql.Binary(b"abc123")) self.cursor.execute( sql3, "\x00\x78123abc中") # pymysql.Binary(b"abc123")) self.cursor.execute( sql3, b"\x00\x78123abc") # pymysql.Binary(b"abc123")) self.cursor.execute( sql4, "\x00\x78123abc中") # pymysql.Binary(b"abc123")) self.cursor.execute( sql4, b"\x00\x78123abc") # pymysql.Binary(b"abc123")) data1 = b'\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01\x00H\x00H\x00\x00\xff\xfe\x00+Optimized by JPEGmini 3.12.0.2\'"\xcbX\xdbj-\xdc[S\xff\xd9' #字符串 data2 = '\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01\x00H\x00H\x00\x00\xff\xfe\x00+Optimized by JPEGmini 3.12.0.2\'"\xcbX\xdbj-\xdc[S\xff\xd9' self.cursor.execute(sql4, data1) self.cursor.execute(sql4, data2) sql5 = "insert into test(t_t)values(\"b'\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01'\")" #二进制形式的字符串 sql6 = "insert into test(t_t)values('\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01')" # sql7 = "insert into test(t_t)values(\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01)" # sql8 = "insert into test(t_t)values(\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01)" # #(1366, "Incorrect string value: '\\xFF\\xD8\\xFF\\xE0\\x00\\x10...' for column 't_mt' at row 1") # self.cursor.execute(sql3,data1) self.cursor.execute(sql3, data2) # self.cursor.execute(sql3,b"\xff\xd8\xff\xe0" ) #b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01\x00H") # 提交到数据库执行 self.conn.commit() # 获取自增id res = self.cursor.lastrowid except Exception as e: # 发生错误时回滚 log.logInfo(repr(e), threading.current_thread().name + ".log") print(e) self.conn.rollback() return res
def signup(): try: file=request.files.get("file") filename=secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) picture=cv2.imread(os.path.join(app.config['UPLOAD_FOLDER'], filename)) ht=picture.shape[0] wt=picture.shape[1] connection = pymysql.connect(host='localhost', user='******', password='', db='hci') try: with connection.cursor() as cursor: sql="insert into students values(%s,%s,%s,%s,%s,%s,%s,%s)" cursor.execute(sql,(request.form["F_name"],request.form["L_name"],request.form["email"],request.form["password"],0,pymysql.Binary(picture),ht,wt)) connection.commit() return render_template("index2.html") finally: connection.close() except Exception as e: print(e) return redirect(url_for('register'))
def insert_imgs(img): # mysql连接 cursor = mydb.cursor() # 注意使用Binary()函数来指定存储的是二进制 # cursor.execute("insert into img set imgs='%s'" % mysql.Binary(img)) cursor.execute("Insert into images values(%s,%s)", ("6a0f060cf95601d3e8f562670dd21788.jpg",pymysql.Binary(img))) # 如果数据库没有设置自动提交,这里要提交一下 mydb.commit() cursor.close() # 关闭数据库连接 mydb.close()
def collect_cnt_person(alreadyQue, Mode): # 加载数据库配置文件 with open("./dbset.pkl", 'rb') as infile: dbSet = pickle.load(infile) # 加载MTCNN图和分类文件 with open("models/knn_classifier.pkl", "rb") as infile: model = pickle.load(infile) embs = model["embs"] labels = model["labels"] classes_num = model["classes"] # 加载模型中对应的人名 names = [] fp = open("./imgs/members.txt") lines = fp.readlines() for line in lines: line = line.split(' ') names.append(line[2]) # 创建两个列表,从共享队列中读出的人脸数据经过match后均存放在这两个列表中 allPassList = [] # 最终记录有哪些人在内的列表 person_pass_ls = [] # 记录从进入共享队列中读到的三个摄像头的none数 leftNoneCnt = 0 rightNoneCnt = 0 middleNoneCnt = 0 # 定义一个变量,来记录每天的游客id id_cnt = 0 # 定义两个时间来判断是否是新的一天 lastTime = now_time() nowTime = now_time() # 记录读取列表时空的次数 emptyCnt = 0 # 记录每天凌晨2点半是否更新过模型 model_updated = False # 无限循环读列表(注意,这个操作每两秒一次,最后设置了阻塞2s) while True: # 首先判断是否是新的一天, 如果是, 则id_cnt重置 lastTime = nowTime nowTime = now_time() if lastTime[6:8] != nowTime[6:8]: id_cnt = 0 # 然后判断在每天凌晨2点半时重新读入更新过的模型 if nowTime[8:10] == "01" and nowTime[ 10:12] == "30" and not model_updated: with open("models/knn_classifier.pkl", "rb") as infile: model = pickle.load(infile) embs = model["embs"] labels = model["labels"] classes_num = model["classes"] model_updated = True elif model_updated and nowTime[8:10] != "01" and nowTime[10:12] != "30": model_updated = False emptyf = alreadyQue.empty() if emptyf: emptyCnt += 1 else: emptyCnt = 0 while not emptyf and emptyCnt < 2: face = alreadyQue.get() if face == "rightNone": rightNoneCnt += 1 elif face == "leftNone": leftNoneCnt += 1 elif face == "middleNone": middleNoneCnt += 1 else: the_face = face[0] pass_flag = face[1] match_face(the_face, pass_flag, allPassList) emptyf = alreadyQue.empty() print(rightNoneCnt, middleNoneCnt, leftNoneCnt) if rightNoneCnt + middleNoneCnt + leftNoneCnt >= 2 and emptyCnt > 1: if Mode == "IN": print("IN:", allPassList) else: print("OUT", allPassList) for face_record in allPassList: if face_record[2] < 2: continue id_cnt += 1 face = face_record[0] # 检查这张人脸是否是成员 pro_ls = np.zeros(classes_num) member_idx = -1 for i in range(len(face.embs)): emb = face.embs[i] dis_ls = np.linalg.norm(embs - emb, axis=1) min_dis = np.min(dis_ls) min_idx = np.where(dis_ls == min_dis)[0][0] label = labels[min_idx] print(min_dis, min_idx, label) if min_dis <= 0.80 and min_idx < classes_num * 20: pro_ls[labels[min_idx]] += 1 if min_dis <= 0.70 and min_idx >= classes_num * 20: pro_ls[labels[min_idx]] += 1 # 如果人脸欧式距离极小,则判断这一定是同一张人脸,那么将这张人脸保存到本地的训练集中,用于更新训练集,提高模型准确率 if min_dis <= 0.50 and min_idx >= 20 * label and min_idx < 20 * label + 20: update_img = face.face_ls[i][:, :, ::-1] update_img = cv2.resize(update_img, (160, 160)) update_imgs_path = "./imgs/train_imgs/update_imgs/" update_img_path = update_imgs_path + "{}/".format( label) if not os.path.exists(update_img_path): os.makedirs(update_img_path) imgs_cnt = len(os.listdir(update_img_path)) if imgs_cnt >= 30: first_file = update_img_path + os.listdir( update_img_path)[0] if os.path.exists(first_file): os.remove(first_file) cv2.imwrite( update_img_path + "{}.jpg".format(nowTime + '_' + str(i)), update_img) else: cv2.imwrite( update_img_path + "{}.jpg".format(nowTime + '_' + str(i)), update_img) idx = np.where(pro_ls == np.max(pro_ls))[0][0] print(pro_ls, len(face.embs)) # print(pro_ls) # print(len(predictions)) if pro_ls[idx] >= len(face.embs) // 3: member_idx = idx print("Member {} ".format(names[member_idx]) + Mode + '.') else: print("Vistor " + Mode + '.') # 将这条数据放入person_in_ls # face_record.append(member_idx) # person_in_ls.append(face_record) # 通过id_cnt合成这张人脸的id face.id = face.time + str(id_cnt) # 向数据库中写入这张人脸 if Mode == "IN": sql = "INSERT INTO entry_record(ID, EntryTime, Picture, Name, Date, Member) VALUES (%s, %s, %s, %s, %s, %s)" else: sql = "INSERT INTO departure_record(ID, DepartureTime, Picture, Name, Date, Member) VALUES (%s, %s, %s, %s, %s, %s)" # 生成存入数据库内的各条数据信息 # 图片 img = face.face_ls[len(face.face_ls) // 2][:, :, ::-1] img = cv2.imencode('.jpg', img)[1] img = np.array(img).tostring() # 名字(日期+cnt, 用来在网站上显示) Name = "Vistor" if member_idx >= 0: Name = names[member_idx] # 日期 Date = face.time[0:8] # 时间 passTime = face.time[8:14] # 执行数据库操作 try: # 连接数据库,当识别出某人离开时,对数据库中的记录进行操作, 这里每次采用一个新的连接,否则会出现问题 conn = pymysql.connect("39.98.90.118", "zyf", "zyf123456", "face_recognition", charset="utf8") cursor = conn.cursor() cursor.execute(sql, (face.id, passTime, pymysql.Binary(img), Name, Date, str(member_idx))) if member_idx > -1: if Mode == "IN": sql = "update members set inside = 1 where ID = {}".format( member_idx) else: sql = "update members set inside = 0 where ID = {}".format( member_idx) cursor.execute(sql) if not face_record[1]: sql = "INSERT INTO waring(Name, Time, WarningType) VALUES (%s, %s, %s)" cursor.execute(sql, (Name, face.time, 3)) conn.commit() conn.close() except: conn.rollback() allPassList.clear() leftNoneCnt = 0 rightNoneCnt = 0 middleNoneCnt = 0 """ # 接下来判断离开的人脸列表中是否有与进入的人脸列表中匹配的人脸 # 我们对于每个离开的人脸,和进入的人脸一一比对,离开的人脸有15张,进入的人脸也有15张,随机比对100次 # 如果有90张以上比较值小于0.95,则判断为同一个人(注意,facenet官方给定小于1.05即为同一张人脸,我们再缩小到0.95,可后期调整) # 判断为同一人后,对数据库进行操作,删除列表中这个进入和离开的人脸 # 因此每次只要读取进入的人脸列表的长度,既可知道有多少人在内 i = 0 while(i < len(alreadyOutList)): cnt_true = 0 cnt_false = 0 j = 0 if_find = False sum_dis = 0 cnt = 0 while(j < len(alreadyInList) and not if_find): in_embs = alreadyInList[j].embs out_embs = alreadyOutList[i].embs rans = random.sample(range(0, len(in_embs)), min(10, len(in_embs))) for out_emb in out_embs: for ran in rans: dis = np.linalg.norm(out_emb - in_embs[ran]) sum_dis += dis cnt += 1 if dis <= 0.95: cnt_true += 1 else: cnt_false += 1 print("average: ", sum_dis / cnt) if(sum_dis / cnt < 0.85 and int(alreadyOutList[i].time) > int(alreadyInList[j].time)): if_find = True sql = "UPDATE faces set DepartureTime = %s, Inside = '0' where ID = %s" cursor.execute(sql, (alreadyOutList[i].time[8: 14], alreadyInList[j].id)) conn.commit() del alreadyOutList[i] del alreadyInList[j] else: if_find = False j += 1 if if_find: continue else: alreadyOutList[i].no_match_cnt += 1 i += 1 print("next!") # 对于alreadyOutList中的人脸,如果匹配20次都无法匹配,则删除 i = 0 while(i < len(alreadyOutList)): if alreadyOutList[i].no_match_cnt >= 20: del alreadyOutList[i] else: i += 1 """ if Mode == "IN": print("There are {} person(s) have came in.".format(id_cnt)) else: print("There are {} person(s) have came out.".format(id_cnt)) # print(person_in_ls) time.sleep(2)
for j in range(0, 8): time = "%02d" % j cnt = np.random.randint(0, 3) for k in range(0, cnt): time += "%02d" % np.random.randint(0, 60) time += "%02d" % np.random.randint(0, 60) v_m = np.random.randint(0, 2) if v_m == 0: name = 'Vistor' ID = date + time + '%d' % id_cnt pic_id = np.random.randint(0, 11) pic = cv2.imread('./db_test_pic/vistors/{}.jpg'.format(pic_id)) sql = "INSERT INTO entry_record(ID, EntryTime, Picture, Name, Date, Member) VALUES (%s, %s, %s, %s, %s, %s)" sql = "INSERT INTO departure_record(ID, DepartureTime, Picture, Name, Date, Member) VALUES (%s, %s, %s, %s, %s, %s)" cursor.execute( sql, (ID, time, pymysql.Binary(pic), name, date, str(-1))) else: member_id = np.random.randint(0, 4) ID = date + time + '%d' % id_cnt name = names[member_id] pic = cv2.imread( './db_test_pic/members/{}.jpg'.format(member_id)) sql = "INSERT INTO entry_record(ID, EntryTime, Picture, Name, Date, Member) VALUES (%s, %s, %s, %s, %s, %s)" sql = "INSERT INTO departure_record(ID, DepartureTime, Picture, Name, Date, Member) VALUES (%s, %s, %s, %s, %s, %s)" cursor.execute(sql, (ID, time, pymysql.Binary(pic), name, date, str(member_id))) id_cnt += 1 for j in range(8, 10): time = "%02d" % j cnt = np.random.randint(10, 12)
def Binary(content): try: return pymysql.Binary(content) except Exception as e: print("exception on reconnect") return None
def upload_pfile(self, cur_dir): ''' 构建字典存储爬虫插件对应的属性文件、原文文件夹、txt文本文件夹信息 键为插件名,值为属性文件位置、原文文件文件夹位置ori、txt文本文件夹位置txt组成的列表 当ori或txt文件夹不存在时,使用None占位 ''' f_all_dict = {} print("mmmmmmmmmmmmmm", cur_dir) for f in os.listdir(cur_dir): if f.find('文献属性.xls') > 0 or f.find('文献属性.xlsx') > 0: filepro = cur_dir + f if cur_dir[ -1] == '/' else cur_dir + '/' + f filepath = cur_dir + f[:f.find('文献属性')] + '_ori/' if cur_dir[ -1] == '/' else cur_dir + '/' + f[:f.find('文献属性' )] + '_ori/' filetxt = cur_dir + f[:f.find('文献属性')] + '_txt/' if cur_dir[ -1] == '/' else cur_dir + '/' + f[:f.find('文献属性' )] + '_txt/' f_all_dict.setdefault( f[:f.find('文献属性')], []).append(filepro) if os.path.exists( filepro) else f_all_dict.setdefault( f[:f.find('文献属性')], []).append(None) f_all_dict.setdefault( f[:f.find('文献属性')], []).append(filepath) if os.path.exists( filepath) else f_all_dict.setdefault( f[:f.find('文献属性')], []).append(None) f_all_dict.setdefault( f[:f.find('文献属性')], []).append(filetxt) if os.path.exists( filetxt) else f_all_dict.setdefault( f[:f.find('文献属性')], []).append(None) for f_key in f_all_dict.keys(): if f_all_dict[f_key][0] is not None: book = xlrd.open_workbook(f_all_dict[f_key][0]) sheet = book.sheet_by_index(0) ut = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time())) ops = [] ''' 逐行读取excel中信息,当爬取标注为CRA开头时,不添加上传标志UPA信息 ''' for r in range(1, sheet.nrows): if sheet.cell(r, 0).value[0:3] == "CRA": # values = (sheet.cell(r, 0).value[3:], sheet.cell(r, 2).value, sheet.cell(r, 3).value,sheet.cell(r, 4).value,sheet.cell(r, 5).value,sheet.cell(r, 6).value,sheet.cell(r, 7).value,sheet.cell(r, 8).value,sheet.cell(r, 9).value,sheet.cell(r,10).value,ut) values = (sheet.cell(r, 0).value[3:], sheet.cell(r, 3).value, sheet.cell(r, 4).value, sheet.cell(r, 5).value, sheet.cell(r, 2).value, sheet.cell(r, 7).value, sheet.cell(r, 8).value, sheet.cell(r, 9).value, sheet.cell(r, 10).value, ut, sheet.cell(r, 6).value) # print("6666666666666", values) ops.append(values) else: tag = "UPA" + time.strftime( '%Y%m%d%H%M%S', time.localtime(time.time())) values = (tag[3:], str(sheet.cell(r, 3).value), str(sheet.cell(r, 4).value), str(sheet.cell(r, 5).value), tag + str(r).zfill(4) + str(sheet.cell(r, 2).value), str(sheet.cell(r, 7).value), str(sheet.cell(r, 8).value), str(sheet.cell(r, 9).value), str(sheet.cell(r, 10).value), ut, str(sheet.cell(r, 6).value)) # print("555555555555", values) ops.append(values) if f_all_dict[f_key][1] is not None: f_list = os.listdir(f_all_dict[f_key][1]) f_list_doc = [] '''找到原文文件路径下所有文档,存到f_list_doc中''' for f in f_list: if os.path.splitext(f)[1] in { '.caj', '.pdf', '.txt', '.doc', '.docx' }: self.suffix = os.path.splitext(f)[1] f_list_doc.append(f) temp_list = [] # print("f_list_doc", len(f_list_doc)) 40 for f in f_list_doc: for item in ops: # print("item", item[10][0:3]) if item[4][0:3] == 'CRA': if item[4] == os.path.splitext(f)[0]: temp_list.append(item) else: if item[4][21:] == os.path.splitext(f)[0]: temp_list.append(item) '''文件存在且与属性文件一一匹配的''' save_file = [] for file in temp_list: fileUuid = (str(uuid.uuid1()).replace("-", ""), ) if file[4][0:3] == 'CRA': filepath = f_all_dict[f_key][1] + file[ 4] + '.' + file[8] else: filepath = f_all_dict[f_key][1] + file[4][ 21:] + '.' + file[8] self.upload_filepath = filepath try: b = open(filepath, "rb").read() origin = (pymysql.Binary(b), ) newfile = fileUuid + ("1010", "ZH") + file + origin save_file.append(newfile) except OSError: if file[4][0:3] == 'CRA': print('未找到文件' % file[4]) else: print('未找到文件' % file[4][21:]) cursor = self.conn.cursor() for n in range(0, len(save_file)): a = globalVar.get_st() print(a) if a == 1: self.CrawProcess.emit( str("正在导入%s\n" % (save_file[n][7]))) try: self.hdfs_ip = "http://192.168.1.107:50070" self.inputpath = '/4516/upload' self.client = hdfs.Client(self.hdfs_ip) if self.configs['flag'] == True: # cursor.executemany( # "insert into DOCUMENTS(UUID,CRA_DT,TITLE,AUTHOR,AURDEPT,KYWRD,ABSTRACT,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,CONTENT_ORI,SOURCE_CODE,LANG)values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s, %s, %s)", # save_file[n:n+1]) # sql = "insert into DOCUMENTS(UUID,CRA_DT,TITLE,AUTHOR,AURDEPT,KYWRD,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,SOURCE_CODE,LANG,ABSTRACT,CONTENT_ORI)values(:1, to_date(:2,'yyyy-mm-dd hh24:mi:ss'), :3, :4, :5, :6, :7, :8, :9, :10, :11, :12, :13, :14, :15)" # sql = "insert into DOCUMENTS(UUID,CRA_DT,TITLE,AUTHOR,AURDEPT,KYWRD,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,SOURCE_CODE,LANG,ABSTRACT)values(:1, to_date(:2,'yyyy-mm-dd hh24:mi:ss'), :3, :4, :5, :6, :7, :8, :9, :10, :11, :12, :13, :14)" # txt导入 sql = "insert into DOCUMENTS(UUID,SOURCE_CODE,LANG,CRA_DT,AUTHOR,KYWRD,AURDEPT,TITLE,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,ABSTRACT,CONTENT_ORI)values(:1, :2, :3, to_date(:4,'yyyy-mm-dd hh24:mi:ss'), :5, :6, :7, :8, :9, to_date(:10,'yyyy-mm-dd hh24:mi:ss'), :11, :12, to_date(:13,'yyyy-mm-dd hh24:mi:ss'), :14,:15)" # pdf导入 # sql = "insert into DOCUMENTS(UUID,SOURCE_CODE,LANG,CRA_DT,AUTHOR,KYWRD,AURDEPT,TITLE,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,ABSTRACT)values(:1, :2, :3, to_date(:4,'yyyy-mm-dd hh24:mi:ss'), :5, :6, :7, :8, :9, to_date(:10,'yyyy-mm-dd hh24:mi:ss'), :11, :12, to_date(:13,'yyyy-mm-dd hh24:mi:ss'), :14)" # sql = "insert into DOCUMENTS(UUID,SOURCE_CODE,LANG,CRA_DT,AUTHOR,ABSTRACT,AURDEPT,KYWRD,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,TITLE)values('ce017578ec0411ea91d6a85e45b3a491', '1010', 'ZH', to_date('20200830203320','yyyy-mm-dd hh24:mi:ss'), '龙视要闻', '', '', '', '', to_date('20200830203320','yyyy-mm-dd hh24:mi:ss'), 'http://baijiahao.baidu.com/s?id=1676355714433432996', 'txt', to_date('20200830203320','yyyy-mm-dd hh24:mi:ss'), 'CRA202008302033200001美国最机密武器五年来首次现身,莫斯科:敢挑衅就摧毁')" # sql = "insert into DOCUMENTS(UUID,SOURCE_CODE,LANG,CRA_DT,AUTHOR,ABSTRACT,AURDEPT,KYWRD,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,TITLE)values('fUiiiuid', '', '', '', '', '', '', '', '', '', '', '', '', '')" # sql = "insert into DOCUMENTS(UUID,CRA_DT,TITLE,AUTHOR,AURDEPT,KYWRD)values('fUuid', to_date('2020-06-29 00:00:00','yyyy-mm-dd hh24:mi:ss'), 'hhhhhhhh', 'jjjjjjjjj', 'ooooo', 'ppppppp')" # a = ('ce017578ec0411ea91d6a85e45b3a491', '1010', 'ZH', '20200830203320', '龙视要闻', '', '', '', '', '2020-08-29', 'http://baijiahao.baidu.com/s?id=1676355714433432996', 'txt', '20200901113956', 'CRA202008302033200001美国最机密武器五年来首次现身,莫斯科:敢挑衅就摧毁') # 上传到oracle cursor.executemany( sql, # # "insert into DOCUMENTS(UUID,CRA_DT,TITLE,AUTHOR,AURDEPT,KYWRD,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,SOURCE_CODE,LANG,ABSTRACT,CONTENT_ORI)values(:1, to_date(:2,'yyyy-mm-dd hh24:mi:ss'), :3, :4, :5, :6, :7, :8, :9, :10, :11, :12, :13, :14, :15)", # # "insert into DOCUMENTS(UUID,CRA_DT,TITLE,AUTHOR,AURDEPT,KYWRD,ABSTRACT,JOURNAL,PUB_DT,URL,SUFFIX,UPLD_DT,CONTENT_ORI,SOURCE_CODE,LANG)values(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)", # # "insert into DOCUMENTS(UUID,CRA_DT,TITLE)values(:1, :2, :3)", save_file[n:n + 1]) # str(save_file[n:n+1]).replace('[','').replace(']','')) # cursor.execute(sql) try: # 上传到hdfs t = self.upload_filepath.rindex('/') self.client.upload( self.inputpath, self.upload_filepath[0:t + 1] + save_file[n][7] + self.suffix) except Exception as e: print("upload error!", e) else: break except Exception as e: print("1111111", e) else: break self.CrawProcess.emit("导入完成") cursor.close() self.conn.commit() if f_all_dict[f_key][2] is not None: self.upload_txt(f_all_dict[f_key][2])