def upload_body_data(data): global bus_id, license succ = False phone = data['basic_data']['PHONE'] data['bus_id'] = bus_id data['license'] = license data['phone'] = phone # dt = "bus_id="+bus_id+"&license="+license+"&phone=" + phone + "&basic_data=" + json.dumps(data['basic_data']) + "&hbca_data=" + json.dumps(data['hbca_data']) dt = { "bus_id": bus_id, "license": license, "phone": phone, "basic_data": json.dumps(data['basic_data']), "hbca_data": json.dumps(data['hbca_data']) } url = PREFIX_URL + "/Web/Inbody/body_data" res = http.post2(url, dt) mylog.log("update_body_data\tdt=" + json.dumps(dt) + "\tres=" + json.dumps(res)) if res != "": r = json.loads(res) errorcode = r['errorcode'] if errorcode == 0: succ = True return succ
def post(url, data, header=[]): try: b = BytesIO() ch = pycurl.Curl() ch.setopt(pycurl.CONNECTTIMEOUT, 5) ch.setopt(pycurl.HTTPHEADER, header) ch.setopt(pycurl.POSTFIELDS, data) ch.setopt(pycurl.WRITEFUNCTION, b.write) ch.setopt(pycurl.URL, url) ch.setopt(pycurl.TIMEOUT, 5) ch.setopt(pycurl.NOPROGRESS, 1) ch.setopt(pycurl.FORBID_REUSE, 1) ch.setopt(pycurl.MAXREDIRS, 1) ch.setopt(pycurl.DNS_CACHE_TIMEOUT, 30) if url.find("https://", 0, 8) >= 0: ch.setopt(pycurl.SSL_VERIFYPEER, 0) ch.setopt(pycurl.SSL_VERIFYHOST, 0) ch.perform() ch.close() res = b.getvalue() mylog.log("post\turl=" + url + "\tres=" + res) except Exception, e: mylog.log("post\texcept\turl" + url + "\tmsg=" + e.message) res = False
def check_upload_existed(self, file_hash, args): self.process_state = kProccessState.kNeedFileInfo self.recv_file_hash = file_hash if self.recv_file_type == FILE_TYPE.confidential: # 检查当前内容的文件是否已经保存在本地 check_result = is_hash_here(self.recv_file_hash) log("%s UPLOAD %s HASH %s" % (self.__user_no, self.recv_file_name, self.recv_file_hash)) if check_result: # send_info("RPL", , self.__user_no) self.__response = "EXISTED" log_file_upload(self.__user_no, self.recv_file_name, self.recv_file_hash, self.current_time) if args == FIRST_UPLOAD: update_scan_data(self.recv_file_name, self.recv_file_hash, self.recv_file_path) elif args == SECOND_UPLOAD: update_second_scan_data(self.recv_file_name, self.recv_file_hash, self.recv_file_path) return # 通知对方可以发送文件了 self.__response = "BEGIN"
def run(): global is_read_file, new_id, check_status # mylog.log("args2="+json.dumps(sys.argv)) # mylog.log("run=" + os.path.split(os.path.realpath(sys.argv[0]))[0]) # mylog.log("pwd=" +os.getcwd()) if not check_status: clearthread.clear() succ = check() if succ: check_status = True id = get_new_id() mylog.log("id=" + id) res = accessdb.get("select * from SPHYG_DATA_TBL where DATETIMES>'" + id + "' order by DATETIMES asc") mylog.log(json.dumps(res)) for v in res: # print res local_id = v[0] lastest_new_id = v[1] # 获取用户信息 # 获取各个表数据 bodys = get_body_datas(local_id, lastest_new_id) # 提交给服务器 succ = upload_body_data(bodys) # 写new_id到配置文件 if succ == True: set_new_id(lastest_new_id) is_read_file = True new_id = lastest_new_id if len(res) == 0: is_read_file = False
def check_license(bus_id, license): succ = False url = PREFIX_URL + "/Web/Inbody/check_license" info = get_system_info() # data = 'bus_id='+bus_id+ "&license="+license+"&device_info="+json.dumps(info) # mylog.log("license=" + json.dumps(data),1) # res = http.post2(url, data) for i in range(0, 1000): data = { "bus_id": bus_id, "license": license, "device_info": json.dumps(info) } mylog.log("check_license\tdata=" + json.dumps(data)) res = http.post2(url, data) mylog.log( "check_license\tdata=" + json.dumps(data) + "\tres=" + json.dumps(res), 1) if res != "": r = json.loads(res) errorcode = r['errorcode'] if errorcode == 0: succ = True break else: time.sleep(2) return succ
def init_db(): # 必须初始化 app 才能操作数据库 app = init_app() db = models.db db.drop_all() db.create_all() log('db has already init')
def run(self): log("SERVER UP") # 接受Web控制指令 Thread(target=socket_method().run, daemon=True).start() # 处理远程任务,包括任务的状态和提交结果 Thread(target=remote_event_loop, daemon=True).start() sslContext = ssl.DefaultOpenSSLContextFactory( 'CA/key.pem', # 私钥 'CA/cert.pem', # 公钥 ) # 处理文件异步上传 UploadQueue().start() # 服务启动时,初始化所有用户状态---未登录 close_all_conn() # 监听服务 reactor.listenSSL(kPort, self.protocol_factory, sslContext) reactor.callWhenRunning(self.check_remote_task) # reactor.listenTCP(kPort, self.protocol_factory) reactor.run()
def check_remote_task(self): # 向客户端派遣远程控制任务 invaild_clients = [] for seq in self.online_clients.keys(): # 非法客户标识 user_protocol = self.online_clients[seq] # 检查客户端是否超时掉线 if user_protocol.is_timeout(): # 记录掉线的客户,在map遍历完之后进行处理 invaild_clients.append(seq) continue # 非法客户标识 if GLOBAL_REMOTE_CONTROL.get(seq) is None: continue # 当前没有任务 if GLOBAL_REMOTE_CONTROL[seq]['status'] is CommandStatus.NO_TASK: continue elif GLOBAL_REMOTE_CONTROL[seq]['status'] is CommandStatus.NEW_TASK: log("[%s] I get a task" % seq) cmd = GLOBAL_REMOTE_CONTROL[seq]['cmd'] # 统一提取任务参数 args = GLOBAL_REMOTE_CONTROL[seq]['args'] user_protocol.ctl_client(cmd, args) for user in invaild_clients: self.online_clients[user].end_connection() # 每5秒检查一次web端是否有下发命令,客户端是否掉线 reactor.callLater(5, self.check_remote_task)
def __init__(self, dbstr="postgres://*****:*****@localhost/yq3", mchost="127.0.0.1:11211", sqlstr=""): #def __init__(self,dbstr="postgres://*****:*****@localhost/yq3",mchost='127.0.0.1:11211',sqlstr=""): self.dbstr = dbstr self.mchost = mchost self.sqlstr = sqlstr st = time.time() today = datetime.datetime.now().strftime('%y-%m-%d') #log("v_sale_daily:"+str(today)+" start at "+str(time.asctime())) self.doCleaning() #清理 #log("doCleaning:"+str(today)+" end at "+str(time.asctime())) #os.system('cmd.exe /c c:/ZSW/memcached/memcached.exe -m 256 -p 11211') #开启内存服务 #today='2012-08-30' zip_file = 'v_sale_daily_' + str(today) + '.csv.zip' conn = Confsql(dbstr, mchost) self.ftpdownload(zip_file) #下载zip文件 log("download:" + str(today) + " end at " + str(time.asctime())) self.ConvertZip(zip_file) self.writepostgrs(conn) #写postgres self.writepostgrs_fm(conn) #写postgres #log("writepostgrs:"+str(today)+" end at "+str(time.asctime())) conn.writemc_xiaoshou() #写内存 en = time.time()
def parse_scan_items(match_dict, file_name): if not isinstance(match_dict, dict): error_log("[MISMATCH ARGS]") return False if file_name is None or not os.path.exists(file_name): error_log("[%s] not exists" % file_name) return False _data = open(file_name, "rb").read().decode(errors='replace') details = [_line.strip() for _line in _data.split('\n') if len(_line.strip()) != 0] if len(details) < 1: error_log("EMPTY SCAN LOG") elif len(details) == 1 and details[0] == 'EMPTY': log("CLIENT IS CLEAN") return True # 开始解析扫描结果 for line in details: # ignore empty line if len(line) < 3: continue try: path, match, key_extend = line.strip().split('|') # print("path:%s match: %s key_extend: %s" % (path, match, key_extend)) match_dict[path] = (match, key_extend) except Exception as error: error_log("ERROR PARSE SCAN_ITEMS") print(error) return True
def process_cmd(self, cmd, cmd_info): if "RPL" == cmd: if self.process_state == kProccessState.kNeedFileHash: self.check_upload_existed(cmd_info, self.recv_file_arg) elif self.process_state == kProccessState.kNeedFileInfo: self.send_upload_num(cmd_info, self.recv_file_arg) # 远程控制执行成功,更新状态位 elif RemoteControl.CTL_RPL_OK == cmd or RemoteControl.CTL_RPL_FAILED == cmd: self.update_task_state(cmd, cmd_info) elif "ATH" == cmd: if self.process_state == kProccessState.kNeedRegister: # 注册成功 self.register(cmd_info) else: self.auth(cmd_info) elif "DNF" == cmd: self.send_keywords_file(cmd_info) elif "END" == cmd: self.client_offline() # 记录异常 elif "LOG" == cmd: curt_time = get_curtime() record_warnings(cmd_info, self.__user_no, curt_time) log(cmd_info, self.__user_no, curt_time) self.__response = kOK # 接受用户报警文件 elif "UPD" == cmd: self.recv_file_arg = None self.current_time = get_curtime() self.recv_file(cmd_info) # self.__user_socket.end_connection(self.__user_no) # 接收用户全盘扫描需要上传的文件 elif "UPF" == cmd: self.recv_file_arg = FIRST_UPLOAD self.current_time = get_curtime() self.recv_file(cmd_info) # 接收用户快速扫描需要上传文件 elif "UPS" == cmd: self.recv_file_arg = SECOND_UPLOAD self.current_time = get_curtime() self.recv_file(cmd_info) elif "INF" == cmd: if self.__ctl_status == RemoteControl.CTL_UPLOAD_FIRST: self.__response = get_first_upload_file_info(self.__ctl_args) elif self.__ctl_status == RemoteControl.CTL_UPLOAD_SECOND: self.__response = get_second_upload_file_info(self.__ctl_args) self.process_state = kProccessState.kUploadFile # 询问服务器时间和服务器过期时间 elif "TIM" == cmd: if "CTM" == cmd_info: self.__response = get_curtime() elif "EPT" == cmd_info: self.__response = get_expired_time() # 与客户端保持心跳 elif "HBT" == cmd: self.hbt() return self.process_state
def insult_quehuo(request): """ 缺货查询 """ t=get_template('mana1/insult_quehuo.html') sqlstr="select mdcode,mdname,barcode,spcode,spname,prodl_id||'_'||prodl as prodl,prozl_id||'_'||prozl as prozl,proxl_id||'_'||proxl as proxl,braxl_id||'_'||braxl as braxl,curqty from dhalldata where assortment='youtu' and curqty<0" result=confsql.runquery(sqlstr) log(result) html=t.render(Context({'result':result})) return HttpResponse(html)
def do3(service):#section_replication print service for section in conf_section_info: mutation = section.origin+'\n'+section.origin new_conf = spirit_method.instr_replace(section.origin, mutation, conf_origin) spirit_method.save(new_conf, service, section.name, directory) mylog.log(section.origin, service, mutation, "1","2")
def init_sock(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind((UPLOAD_HOST, UPLOAD_PORT)) sock.listen(MAX_UPLOAD_TASK) log("UploadThread listening %s:%d" % (UPLOAD_HOST, UPLOAD_PORT)) return sock
def do11(service):#key_value_pair_repulication print service for entry in conf_entry_info: mutation = entry.origin+"\n"+entry.origin new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def insult_quehuo(request): """ 缺货查询 """ t = get_template('mana1/insult_quehuo.html') sqlstr = "select mdcode,mdname,barcode,spcode,spname,prodl_id||'_'||prodl as prodl,prozl_id||'_'||prozl as prozl,proxl_id||'_'||proxl as proxl,braxl_id||'_'||braxl as braxl,curqty from dhalldata where assortment='youtu' and curqty<0" result = confsql.runquery(sqlstr) log(result) html = t.render(Context({'result': result})) return HttpResponse(html)
def do1(service):#section_order print service if conf_section_info.__len__() >= 2: strA = conf_section_info[0].origin strB = conf_section_info[1].origin new_conf = spirit_method.instr_swap(strA, strB, conf_origin) spirit_method.save(new_conf, service, "", directory) mylog.log("", service, "", "1", "2")
def do9(service):#key_value_pair_order print service if conf_entry_info.__len__() >= 2: strA = conf_entry_info[0].origin strB = conf_entry_info[1].origin new_conf = spirit_method.instr_swap(strA, strB, conf_origin) spirit_method.save(new_conf, service, "", directory) mylog.log("", service, "", "1","2")
def comment_view(todo_id): todo = TodoList.query.filter_by(id=todo_id).first() comments = todo.comments log('debug comments = ', comments) comments.sort(key=lambda t: t.created_time, reverse=True) return render_template('comment.html', comments=comments, todo=todo, user=current_user())
def send_keywords_file(self, file_type): file_state = self.make_keywords_file(file_type) if file_state == kFileState.kExisted: self.process_state = kProccessState.kMakeKwSuccess self.__response = kOK else: log("client file %s not existed!" % self.__kw_file) self.process_state = kProccessState.kMakeKwFail self.__response = kFail
def delete_kdCabinet(self,CabinetNum): #删除货架 sqlstr="update kdCabinet set flag=1 where CabinetNum="+CabinetNum log(sqlstr) sqlitedb="D:/vcms/data/db/sqlite3/yq2.db" sqlite_conn= sqlite3.connect(sqlitedb) c=sqlite_conn.cursor() c.execute(sqlstr) sqlite_conn.commit() c.close
def delete_kdCabinet(self, CabinetNum): #删除货架 sqlstr = "update kdCabinet set flag=1 where CabinetNum=" + CabinetNum log(sqlstr) sqlitedb = "D:/vcms/data/db/sqlite3/yq2.db" sqlite_conn = sqlite3.connect(sqlitedb) c = sqlite_conn.cursor() c.execute(sqlstr) sqlite_conn.commit() c.close
def auth(self, text): # 接收并解析用户发来的信息,进行身份认证 self.process_state = kProccessState.kAuthFail try: if text is None: return False else: if len(text.split('\n')) == 4: user_no, user_pas, user_mac, self.user_addr = text.split( '\n') else: self.__response = "NEED FOUR ARGUMENTS:userno userpassword usermac userip" return False except Exception as error: error_log("AUTH FAILED") print(error) self.__response = "WRONG ARISE ABOUT SOCKET COMMUNICATION" return False self.__user_no = user_no # 检查该用户是否有效(数据库中是否有该用户) if not check_user_name(user_no): self.__response = "INVALID CLIENT" error_log("INVALID CLIENT") return False # 检查用户名密码是否正确 if not check_passwd(user_no, user_pas): self.__response = "WRONG PASSWD" return False # 检查用户是否已经注册过 user_registed = is_user_registed(user_no) if user_registed: # 判断用户mac信息是否正确 if identify_usermac(user_no, user_mac): log("%s LOGIN" % user_no) self.__response = kOK self.__response += "\n" self.__response += UploadServerConfig # 登录成功,保存用户身份 self.__is_login = True self.process_state = kProccessState.kAuthSuccess self.insert_user_info() return True else: log("[login filed] userNo [%s] invalid usermac [%s]" % (user_no, user_mac)) self.__response = "MAC_DIFF" return False # 当用户尚未注册时,返回登录失败,要求用户提供主机网卡和硬盘序列号信息 elif not user_registed: self.__response = "NEED REGISTER" self.process_state = kProccessState.kNeedRegister return # 用户身份不合法 self.__response = "INVALID" return False
def do15(service):#entry_operator_wrong print service for entry in conf_entry_info: operator_wrong = spirit_method.chosewrong(entry.operator, entry_operator) mutation = spirit_method.instr_replace(entry.operator, operator_wrong, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def do7(service):#section_operator_wrong print service for section in conf_section_info: operator_wrong = spirit_method.chosewrong(section.operator,section_operator) mutation = spirit_method.instr_opereplace(section.operator, operator_wrong, section.origin) new_conf = spirit_method.instr_replace(section.origin, mutation, conf_origin) spirit_method.save(new_conf, service, section.name, directory) mylog.log(section.origin, service, mutation, "1","2")
def do10(service):#key_value_pair_loss print service for entry in conf_entry_info: mutation = "" new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) #print "processing entry: " + entry.key #print mutation spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def sql_connect(db): try: engine = scy.create_engine( 'mysql+pymysql://root:[email protected]/{}?charset=utf8'. format(db), echo=True) conn = engine.connect() except Exception as e: log(e) return conn
def do6(service):#section_name_loss print service for section in conf_section_info: str_loss = "" mutation = spirit_method.instr_replace(section.name, str_loss, section.origin) new_conf = spirit_method.instr_replace(section.origin, mutation, conf_origin) spirit_method.save(new_conf, service, section.name, directory) mylog.log(section.origin, service, mutation, "1","2")
def do17(service):#entry_operator_loss print service for entry in conf_entry_info: operator_loss = "" mutation = spirit_method.instr_replace(entry.operator, operator_loss, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def save_maxmin(request): ' 上下限导入检查 写入数据库 ' try: rs1 = [] result = [] myjson = simplejson.loads(request.POST["myjson"]) rs1 = trim_csv(myjson["table"]) banben = myjson["banben"] startdate = myjson["startdate"] enddate = myjson["enddate"] #插入数据库 sqlstr = "" for rs in rs1: if rs[6] == '': sqlstr += "delete from maxmin where braid ='" + rs[ 0] + "' and proid='" + rs[ 2] + "' and banben='" + banben + "';" if sqlstr <> "": confsql.runSql(sqlstr) #数据库 sqlstr = "" for rs in rs1: if rs[6] == '': adddate = datetime.datetime.now().strftime( '%Y-%m-%d') #最后追加插入日期 sqlstr += "insert into maxmin(braid,proid,maxval,minval,banben,startdate,enddate,adddate) values('" + rs[ 0] + "','" + rs[2] + "','" + rs[4] + "','" + rs[ 5] + "','" + banben + "','" + startdate + "','" + enddate + "','" + adddate + "');" rs[6] = '插入成功!' res = {} res['braid'] = rs[0] res['braname'] = rs[1] res['proid'] = rs[2] res['proname'] = rs[3] res['maxval'] = rs[4] res['minval'] = rs[5] res['info'] = rs[6] result.append(res) else: res = {} res['braid'] = rs[0] res['braname'] = rs[1] res['proid'] = rs[2] res['proname'] = rs[3] res['maxval'] = rs[4] res['minval'] = rs[5] res['info'] = rs[6] result.append(res) if sqlstr <> "": confsql.runSql(sqlstr) #数据库 jsonres = simplejson.dumps(result) log(jsonres) return HttpResponse(jsonres) except: return HttpResponse(0)
def do2(service):#section_loss print service #print conf_origin for section in conf_section_info: #print section.origin #print section.origin mutation = "" new_conf = spirit_method.instr_replace(section.origin, mutation, conf_origin) #print new_conf spirit_method.save(new_conf, service, section.name, directory) mylog.log(section.origin, service, mutation, "1","2")
def download_file(sock, seq): user_no, file_hash, file_name, file_size, file_passwd, file_type, current_time, aes_status, file_path = UPLOAD_QUEUE.get(seq) print("file_hash is ", file_hash) ori_file_name = file_name # 针对涉密文件,我们统一命名 if file_type != FILE_TYPE.scan_data: file_name = pro_local_file_name(file_name) # 本地文件统一保存路径 file_local_path = os.path.join(FILE_KEEP_DIR, file_name) rest_size = file_size xtrace("Begin Receive [%s %d]" % (file_name, file_size)) xtrace("%s %s %s bytes passwd: [%s]" % (user_no, file_local_path, file_size, file_passwd)) # 接收文件 fp = open(file_local_path, "wb") while rest_size: buf = sock.recv(MAX_PACKET_SIZE) rest_size -= len(buf) fp.write(buf) fp.close() log("UPLOAD %s finished" % ori_file_name) # 仅当上传文件为 ‘涉密文件’时才记录此次上传操作 if file_type == FILE_TYPE.confidential: set_file_inf(file_hash, FILE_KEEP_DIR, file_name, file_size, file_passwd) log_file_upload(user_no, ori_file_name, file_hash, current_time) if aes_status == FIRST_UPLOAD: update_scan_data(ori_file_name, file_hash, file_path) elif aes_status == SECOND_UPLOAD: update_second_scan_data(ori_file_name, file_hash, file_path) elif file_type == FILE_TYPE.scan_data: print("BEGIN PARSE SCAN_DATA") match_dict = {} if parse_scan_items(match_dict, file_local_path): set_ok_results(user_no, match_dict) else: set_failed_results(user_no, None) # 从任务队列中取消该任务 xtrace("thread finished") # 取消"正在执行"任务记录 EXECUTING_QUEUE.remove(seq) # 删除该任务 UPLOAD_QUEUE.pop(seq) return True
def do5(service):#section_name_sensitive print service for section in conf_section_info: str_sensitive = spirit_method.sens(section.name) if str_sensitive: mutation = spirit_method.instr_replace(section.name, str_sensitive, section.origin) new_conf = spirit_method.instr_replace(section.origin, mutation, conf_origin) spirit_method.save(new_conf, service, section.name, directory) else: mutation = section.name + "sens failed" mylog.log(section.origin, service, mutation, "1","2")
def do13(service):#entry_key_sensitive print service for entry in conf_entry_info: str_sensitive = spirit_method.sens(entry.key) if str_sensitive: mutation = spirit_method.instr_replace(entry.key, str_sensitive, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) else: mutation = entry.key + "sens failed" mylog.log(entry.origin, service, mutation, "1","2")
def do35(service):#port_sem print service for entry in conf_entry_info: if entry.type != "PORT" : continue else: wrong_port = "1" mutation = spirit_method.instr_replace(entry.value, wrong_port, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def do32(service):#boolean_syn print service for entry in conf_entry_info: if entry.type != "BOOL" : continue else: wrong_bool = mutator.gensyn(entry.value, entry.type, entry.constraint) mutation = spirit_method.instr_replace(entry.value, wrong_bool, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def do31(service):#url_sem print service for entry in conf_entry_info: if entry.type != "URL" : continue else: wrong_url = "www.google.com" mutation = spirit_method.instr_replace(entry.value, wrong_url, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def do4(service):#section_name_typo print service for section in conf_section_info: str_typo = spirit_method.typo(section.name) if str_typo: mutation = spirit_method.instr_replace(section.name, str_typo, section.origin) new_conf = spirit_method.instr_replace(section.origin, mutation, conf_origin) spirit_method.save(new_conf, service, section.name, directory) else: mutation = section.name + "typo failed" mylog.log(section.origin, service, mutation, "1","2")
def update(): app = init_app() one_index = One_Index_Spider() # one的格式是 { url_num: [title, content, img]} one = one_index.run for key, value in one.items(): if not models.Post.query.filter_by(url_num=int(key)).first(): post = models.Post() post.url_num = key post.title, post.content, post.img = value post.save() else: log('{} 已在数据库中'.format(key))
def do18(service):#name_syn print service for entry in conf_entry_info: if entry.type != "NAME" : continue else: wrong_name = "thisisaunexistingname" mutation = spirit_method.instr_replace(entry.value, wrong_name, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def do12(service):#entry_key_typo print service for entry in conf_entry_info: str_typo = spirit_method.typo(entry.key) if str_typo: mutation = spirit_method.instr_replace(entry.key, str_typo, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2") else: mutation = entry.key + "typo failed" mylog.log(entry.origin, service, mutation, "1","2")
def do26(service):#mail_syn print service for entry in conf_entry_info: if entry.type != "EMAIL" : continue else: wrong_mail = mutator.gensyn(entry.value, entry.type, entry.constraint) #generate a list of wrong email mutation = spirit_method.instr_replace(entry.value, wrong_mail, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def register(self, data): log("REGISTER ", self.__user_no) if register_user(self.__user_no, data): self.__response = kOK self.__response += '\n' self.__response += UploadServerConfig self.insert_user_info() self.process_state = kProccessState.kAuthSuccess self.__is_login = True return True else: self.__response = kFail self.process_state = kProccessState.kAuthFail return False
def save_maxmin(request): ' 上下限导入检查 写入数据库 ' try: rs1=[] result=[] myjson=simplejson.loads(request.POST["myjson"]) rs1=trim_csv(myjson["table"]) banben=myjson["banben"] startdate=myjson["startdate"] enddate=myjson["enddate"] #插入数据库 sqlstr="" for rs in rs1: if rs[6]=='': sqlstr+="delete from maxmin where braid ='" + rs[0] + "' and proid='" + rs[2] + "' and banben='" + banben + "';" if sqlstr<>"": confsql.runSql(sqlstr) #数据库 sqlstr="" for rs in rs1: if rs[6]=='': adddate=datetime.datetime.now().strftime('%Y-%m-%d') #最后追加插入日期 sqlstr+="insert into maxmin(braid,proid,maxval,minval,banben,startdate,enddate,adddate) values('"+rs[0]+"','"+rs[2]+"','"+rs[4]+"','"+rs[5]+"','"+banben+"','"+startdate+"','"+enddate+"','"+adddate+"');" rs[6]='插入成功!' res={} res['braid']=rs[0] res['braname']=rs[1] res['proid']=rs[2] res['proname']=rs[3] res['maxval']=rs[4] res['minval']=rs[5] res['info']=rs[6] result.append(res) else: res={} res['braid']=rs[0] res['braname']=rs[1] res['proid']=rs[2] res['proname']=rs[3] res['maxval']=rs[4] res['minval']=rs[5] res['info']=rs[6] result.append(res) if sqlstr<>"": confsql.runSql(sqlstr) #数据库 jsonres=simplejson.dumps(result) log(jsonres) return HttpResponse(jsonres) except: return HttpResponse(0)
def learn(self) -> None: if (self._count + 1) % self._T != 0: return None traj = Trajectory.tobatch(*self._current_trajectories).to(self._device) v, v_target = self._critic.value_batch(traj) obs_flat = traj.obs.flatten(0, 1) actions_flat = traj.actions.flatten(0, 1) distr_flat = self._actor._distr_generator( self._actor.policy(traj.obs.flatten(0, 1))) old_distr = distr_flat.copy() old_logp = distr_flat.log_prob(actions_flat).clone().detach() old_v = v.flatten().clone().detach() critic_value_flat = (v_target - v).flatten() full_batch_size = traj.length * traj.batch_size for ep in range(self._learn_per_step): perm = torch.randperm(full_batch_size) for start in range(0, full_batch_size // self._batch_size, self._batch_size): idxs = perm[start:start + self._batch_size] v = self._critic.value(obs_flat[idxs]).squeeze() critic_loss = self._critic.loss( v, v_target.flatten()[idxs].detach(), old_v[idxs].detach()) loss_actor = self._actor.loss( distr=self._actor._distr_generator( self._actor.policy(obs_flat[idxs])), actions=actions_flat[idxs], critic_value=critic_value_flat[idxs], old_logp=old_logp[idxs], old_distr=old_distr[idxs]) loss = loss_actor + critic_loss self._optimizer.zero_grad() loss.backward() self._optimizer.step() critic_loss = critic_loss.mean().item() critic_value = critic_value_flat.mean().item() info(f'At step {self._count}, critic loss: {critic_loss}') info(f'At step {self._count}, critic value: {critic_value}') log("loss/critic", critic_loss, self._count) log("value/critic", critic_value, self._count) self._actor.log() self._critic.log()
def do21(service):#address_sem print service for entry in conf_entry_info: if entry.type != "ADDRESS" : continue else: wrong_address = [] wrong_address.append("255.255.255.255") wrong_address.append("0.0.0.255") mutation = spirit_method.instr_replace(entry.value, wrong_address, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def command_center(install_cmd, exit_on_fail=True): # Executes the commands and return False if failed to execute install_cmd = "echo '%s' | sudo -S " % sudo_pass + install_cmd exec_cmd = subprocess.Popen(install_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) exec_output, exec_error = exec_cmd.communicate() if exec_cmd.returncode != 0: mylog.log( "FATAL", "Error while installing superset. Check below and logs file:") mylog.log("INFO", exec_error) return sys.exit(1) if exit_on_fail else (False, exec_error) return (True, exec_output)
def run(self): while True: client_fd, (clnt_addr, clnt_port) = self.sock.accept() log("get connection from %s:%d" % (clnt_addr, clnt_port)) # if len(self.client_info) <= MAX_UPLOAD_TASK: # 以IP地址为键,sockfd 为值,保存已经建立连接的客户端 # 客户端报告自己的`任务编号` try: seq = client_fd.recv(8).decode() seq = int(seq) task = UPLOAD_QUEUE.get(seq, 0) except Exception: client_fd.send("-1".encode()) client_fd.close() continue if seq <= 0: # invalid task_id error_log("invalid task_id [%d]" % seq) client_fd.send("-1".encode()) client_fd.close() continue exec_task_count = len(EXECUTING_QUEUE) # 根据`任务编号`没有找到任务,直接断开连接 if not isinstance(task, tuple): log("failed to get upload task for [%d]" % seq) # NO TASK FOUND client_fd.send("-2".encode()) client_fd.close() continue # 返回还需等待的任务总数,0 表示不需要等待,可以上传了 if exec_task_count >= MAX_UPLOAD_TASK: EXECUTING_QUEUE.sort() wait_tasks = seq - EXECUTING_QUEUE[-1] if wait_tasks < 0: wait_tasks = 0 client_fd.send(str(wait_tasks)) else: client_fd.send('0'.encode()) EXECUTING_QUEUE.append(seq) tk = threading.Thread(target=download_file, args=(client_fd, seq)) # 设置该属性后,则不必处理‘线程回收’工作 tk.setDaemon(True) tk.start()
def make_keywords_file(self, file_type): log("%s REQUEST %s" % (self.__user_no, file_type)) if file_type == "Fullkeywords.txt": self.__kw_file = get_keywords(self.__user_no, 1) # 全盘扫描-关键字的文件名 elif file_type == "Fastkeywords.txt": self.__kw_file = get_keywords(self.__user_no, 2) # 快速扫描-关键字的文件名 elif file_type == "Specialkeywords.txt": # 上线获取-特殊关键字的文件名 self.__kw_file = get_keywords(self.__user_no, 0) mk_keyWords_file(self.__kw_file, self.__user_no) elif file_type == "Selfkeywords.txt": # 个人自查-关键字的文件名 local_file = get_keywords(self.__user_no, 3) else: log("error:file_type is None!") return kFileState.kFileNameIsNone if os.path.exists(self.__kw_file): return kFileState.kExisted
def assign_remote_task(user_no, cmd, args='None'): seq = user_no print("[GLOBAL_REMOTE_CONTROL] : " + str(GLOBAL_REMOTE_CONTROL)) # print("[GLOBAL_REMOTE_CONTROL[seq]['status']]" + str(GLOBAL_REMOTE_CONTROL[seq]['status'])) if user_no in GLOBAL_REMOTE_CONTROL and GLOBAL_REMOTE_CONTROL[seq][ 'status'] in CommandStatus.FreeState: GLOBAL_REMOTE_CONTROL[seq]['status'] = CommandStatus.NEW_TASK GLOBAL_REMOTE_CONTROL[seq]['cmd'] = cmd GLOBAL_REMOTE_CONTROL[seq]['args'] = args log("[ASSIGN-TASK-OK] {user_no} {CMD} {ARGS}".format(user_no=user_no, CMD=cmd, ARGS=args)) return True log("[ASSIGN-TASK-FAILED] {user_no} {CMD} {ARGS}".format(user_no=user_no, CMD=cmd, ARGS=args)) return False
def do36(service):#path_syn print service for entry in conf_entry_info: if entry.type != "PATH" : continue else: value = spirit_method.trans_path(entry.value, entry.constraint) syn_value = mutator.gensyn(value, entry.type, entry.constraint) syn_value = spirit_method.trans_path(syn_value, entry.constraint) mutation = spirit_method.instr_replace(entry.value, syn_value, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def erase_zombie_client(task_queue, exec_tasks): delta = [] # 如果该任务对应的用户名已经掉线 for task_id, task in task_queue.items(): user_no = task[0] if not is_user_logined(user_no): delta.append(task_id) for task_id in delta: if task_id in task_queue: log("delete task_queue {TASK_ID}".format(TASK_ID=task_id)) task_queue.pop(task_id) if task_id in exec_tasks: log("delete exec_tasks {TASK_ID}".format(TASK_ID=task_id)) exec_tasks.remove(task_id) return True
def do20(service):#address_syn print service for entry in conf_entry_info: if entry.type != "ADDRESS" : continue else: wrong_address = mutator.gensyn(entry.value, entry.type, entry.constraint) #generate a list of wrong addresses ''' wrong_address = [] wrong_address.append("255.255.255.256") wrong_address.append("255.255.255") ''' mutation = spirit_method.instr_replace(entry.value, wrong_address, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def check(): succ = False filepath = util.get_current_path() + "\license.ini" conf = ConfigParser.ConfigParser() res = conf.read(filepath) mylog.log("check\t filepath=" + filepath + "\tres=" + json.dumps(res)) if res == []: return succ mdb = conf.get("config", "dbpath") if not os.path.exists(mdb): mylog.log("mdb=" + mdb + "\tres=False") succ = False return succ bus_id = conf.get("config", "bus_id") license = conf.get("config", "license") # mylog.log("bus_id="+bus_id+"\tlicense"+license) succ = check_license(bus_id, license) return succ
def do37(service):#path_sem print service for entry in conf_entry_info: if entry.type != "PATH" : continue else: value = spirit_method.trans_path(entry.value, entry.constraint) sem_value = [] sem_value.append('/abc') sem_value.append('/cba/abc') sem_value.append('/spirittest') sem_value.append('/spirittest/spirit') sem_value = spirit_method.trans_path(sem_value, entry.constraint) mutation = spirit_method.instr_replace(entry.value, sem_value, entry.origin) new_conf = spirit_method.instr_replace(entry.origin, mutation, conf_origin) spirit_method.save(new_conf, service, entry.key, directory) mylog.log(entry.origin, service, mutation, "1","2")
def post_search(): form = request.get_json() title = form.get('title', '') post = Post.query.filter_by(title=title).first() response = dict( success=False, ) if len(title) != 0: d = dict( success=True, post_id=post.id, ) response.update(d) elif len(title) == 0: response['message'] = '用户没输入数据!' elif post is None: response['message'] = '数据库没有用户查找的数据!' log('/post/search response: ', response) return jsonify(response)
def learn(self): if self._count % self._steps_btw_train == self._steps_btw_train - 1 and self._count > self._warm_up: cum_critic_loss = 0 cum_critic_value = 0 for _ in range(self._learn_per_step): obs, action, next_obs, reward, done, weights, time_limit = \ self._sampler.sample() # don't update when a time limit is reached if time_limit is not None: weights = weights * (1 - time_limit) max_action = self._actor.act(obs) max_next_action = self._actor.act(next_obs, target=True) critic_loss = self._critic.optimize(obs, action, max_action, next_obs, max_next_action, reward, done, time_limit, weights) critic_value = self._critic.critic(obs, max_action) weights = arr_to_th(weights, device=critic_loss.device) self._actor.optimize(-critic_value) self._sampler.observe(th_to_arr(critic_loss * weights)) cum_critic_loss += (critic_loss * weights).mean().item() cum_critic_value += critic_value.mean().item() info( f'At step {self._count}, critic loss: {cum_critic_loss / self._learn_per_step}' ) info( f'At step {self._count}, critic value: {cum_critic_value / self._learn_per_step}' ) log("loss/critic", cum_critic_loss / self._learn_per_step, self._count) log("value/critic", cum_critic_value / self._learn_per_step, self._count) self._actor.log() self._critic.log()
def recv_file(self, file_info): self.process_state = kProccessState.kRecvFile if not os.path.exists(FILE_KEEP_DIR): log("MKDIRS ", FILE_KEEP_DIR) os.makedirs(FILE_KEEP_DIR) # 整合两种文件上传,需要对传入的文件信息进行特别处理 suffix_pos = file_info.rfind(".") if suffix_pos == -1: error_log("invalid filename: %s" % file_info) return if len(file_info.split("\\")) > 1: array_file_info = file_info.split("\\") self.recv_file_name = array_file_info[len(array_file_info) - 1] else: self.recv_file_name = file_info file_suffix = self.recv_file_name[suffix_pos + 1:] self.recv_file_type = MAP_TYPE.get(file_suffix, FILE_TYPE.confidential) # self.recv_file_info = file_info self.recv_file_path = file_info[0:-4] # print("[file_type] : " + str(self.recv_file_type)) self.process_state = kProccessState.kNeedFileHash
def learn(self) -> None: if self._count % self._T != self._T - 1: return None traj = Trajectory.tobatch(*self._current_trajectories) traj = traj.to(self._device) v, v_target = self._critic.value_batch(traj) critic_loss = self._critic.loss(v, v_target) critic_value = v_target - v obs = traj.obs actions = traj.actions distr = self._actor.actions_distr(obs) actor_loss = self._actor.loss(distr=distr, actions=actions, critic_value=critic_value) loss = critic_loss + actor_loss self._optimizer.zero_grad() loss.backward() self._optimizer.step() critic_loss = critic_loss.mean().item() critic_value = critic_value.mean().item() actor_loss = actor_loss.mean().item() info(f'At step {self._count}, critic loss: {critic_loss}') info(f'At step {self._count}, critic value: {critic_value}') info(f'At step {self._count}, actor loss: {actor_loss}') log("loss/critic", critic_loss, self._count) log("value/critic", critic_value, self._count) log("loss/actor", actor_loss, self._count) self._actor.log() self._critic.log()
def login(): # request.get_data()可以得到原始的请求语句 form = request.get_json() print('debug form', form) login_u = User(form) data_u = User.query.filter_by(username=login_u.username).first() status = { 'result': '', 'url':'', } if data_u is None: # flash('登录失败') status['result'] = '登录失败' r = json.dumps(status, ensure_ascii=False) log('用户登录失败', login_u) return r if data_u.login_validator(login_u): log('用户登录成功') session['user_id'] = data_u.id if data_u.is_admin(): # r = redirect(url_for('admin_view')) # return r status['result'] = '登陆成功' status['url'] = url_for('admin_view') log('status["url"]', status['url']) r = json.dumps(status, ensure_ascii=False) return r else: status['result'] = '登陆成功' status['url'] = url_for('todo_add_view', username=data_u.username) log('status["url"]', status['url']) r = json.dumps(status, ensure_ascii=False) return r else: status['result'] = '登录失败' r = json.dumps(status, ensure_ascii=False) log('用户登录失败', login_u) return r