def timedTask(data): """ 定时执行脚本---cron id 计划id userid 用户id projectId 项目id againScript 是否重新创建项目 :return: """ print(data) timeStr = time.strftime("%Y%m%d%H%M%S", time.localtime()) data["timeStr"]=timeStr # data={"id": "6", "timeStr": timeStr,"userId":"5","CaseCount":"16","projectId":"98","againScript":"1"} userId = data["userId"] againScript = int(data["againScript"]) c_id =data["id"] CaseCount = data["CaseCount"] key = "%s_%s" % (c_id ,timeStr) # 把计划id+时间戳当做用户id传过去 data_post=json.dumps(data) l = { "results": [], "logList": [], } s = RunCaseAll() try: s.post(data_post) except Exception as f: return f else: Redis = conn() RedisCount = conn() redisListLog = Redis.lrange("log:%s" %key, 0, -1) RedisCountLog = RedisCount.get("status:%s"%key) for log in redisListLog: l["logList"].append(log.decode("utf8")) RedisCountLog = json.loads(RedisCountLog) user= UserProfile.objects.get(id=userId) if againScript == 1: CaseCount = models.CaseFile.objects.filter( Q(CaseGroupId__CaseGroupFilesId__projectId=int(data["projectId"])) & Q(status=1)).count() models.CasePlan.objects.filter(id=c_id).update(CaseCount=int(CaseCount)) models.CaseResult.objects.create( result=l, type=3, c_id=c_id, userId=user, caseCount=int(CaseCount), assertSuccess=RedisCountLog["count"]["assertSuccess"], assertFailed=RedisCountLog["count"]["assertFailed"], runFailed=RedisCountLog["count"]["runFailed"], ) return "success" #定时 celery -A besettest beat -l debug -S django #异步 celery -A besettest worker --loglevel=debug --pool=solo
def celeryTasks(self,tasks_data): """ '{"id": "67", "timeStr": "20200717180838", "tasksId": "f76c488c-fa29-42f1-9b4a-4c80ade939bf"}' :param self: :param tasks_data: :return: 获取手动执行异步任务的执行结果,并将相关数据存入数据库 """ tasks_data=json.loads(tasks_data) tasks_id=tasks_data["tasksId"] timeStr=tasks_data["timeStr"] userId=tasks_data["userId"] c_id =tasks_data["id"] CaseCount=tasks_data["CaseCount"] startTime=time.time() self.l = { "results": [], "logList": [], } while True: endTime=time.time() try: res=AsyncResult(tasks_id).status except: if endTime-startTime>60*30: break else: continue else: if res=="SUCCESS": #存入库中之后推出循环 Redis=conn() RedisCount=conn() redisListLog=Redis.lrange("log:%s_%s"%(tasks_data["id"],timeStr),0,-1) RedisCountLog=RedisCount.get("status:%s_%s"%(c_id,timeStr)) for log in redisListLog: self.l["logList"].append(log.decode("utf8")) RedisCountLog=json.loads(RedisCountLog) userId = UserProfile.objects.get(id=userId) if int(tasks_data["againScript"])==1: CaseCount=models.CaseFile.objects.filter( Q(CaseGroupId__CaseGroupFilesId__projectId=int(tasks_data["projectId"])) & Q(status=1)).count() models.CasePlan.objects.filter(id=c_id).update(CaseCount=int(CaseCount)) models.CaseResult.objects.create( result=self.l, type=3, c_id=c_id, userId=userId, caseCount=int(CaseCount), assertSuccess=RedisCountLog["count"]["assertSuccess"], assertFailed=RedisCountLog["count"]["assertFailed"], runFailed=RedisCountLog["count"]["runFailed"], ) break print("退出")
def __init__(self, fp, userId, interface, runTime): self.userId = userId self.interface = interface self.runTime = runTime self.start = sys.stdout self.logRedis = conn("log") self.fp = fp
def post(self, req): l = { "results": [], "logList": [], } self.logRedis = conn("log") data = req.data res_data = req.data.dict() validateObj = serializers.S_debugCase(data=data, many=False) environmentsObj = self.Environmented(validateObj, res_data) res_data["environmentId"] = environmentsObj start = StartMethod(data["userId"]) start() self.logger = logs(self.__class__.__module__) self.logger.info("单位开始执行") # s = InRequests(res_data["postMethod"], res_data["dataType"], environmentsObj,res_data["name"],self.logger) # response = s.run(res_data["attr"], res_data["headers"], res_data["data"]) caseAction = CaseAction() response = caseAction.action(res_data, self.logger) l["results"].append(response) self.logger.info("单位执行结束") redisListLog = self.logRedis.lrange( "log:%s_%s" % (data["userId"], None), 0, -1) for log in redisListLog: l["logList"].append(log.decode("utf8")) #根据errors判断执行是否成功 断言另外处理 self.logRedis.delete("log:%s_%s" % (data["userId"], None)) #response--存入CaseResult type=1 userId = UserProfile.objects.get(id=data["userId"]) models.CaseResult.objects.create(result=l, type=1, c_id=data["id"], userId=userId) return APIResponse(200, "sucess", results=l, status=status.HTTP_200_OK)
def __init__(self,fp,projectId,userId,runTime): self.projectId = projectId self.userId = userId self.runTime = runTime self.start=sys.stdout self.logRedis = conn("log") self.fp = fp
def post(self, req): """需要传一个项目id 然后通过项目id找到name""" print(11111111) req = json.loads(req) key = "%s_%s" % (req["id"], req["timeStr"]) # 把计划id+时间戳当做用户id传过去 self.logRedis = conn() self.logRedis.set("status:%s" % key, self.resStatus(1)) casePlanObj = models.CasePlan.objects.select_related("projectId").get( id=int(req["id"])) print(casePlanObj) projectId = casePlanObj.projectId fileName = casePlanObj.cname #脚本名称 name = casePlanObj.name #计划名称 description = casePlanObj.detail againScript = casePlanObj.againScript #是否新建脚本(删除之前的在新建) 不删除直接使用之前的 res_list = self.serializers_data(projectId) #各种骚操作找到排序后的用例参数集 if not res_list["code"]: # 如果有接口或者用例执行顺序重复则直接返回 return APIResponse(409, res_list["msg"], results=res_list["msg"], status=status.HTTP_200_OK) else: res_list = res_list["msg"] if int(againScript) == 1: #如果设置每次执行重新生成 #### 数据库创建case_results新增数据 status为生成脚本。。。 self.logRedis.set("status:%s" % key, self.resStatus(2)) self.removeFile(fileName) #检测存在脚本则删除--删除之后下面重新生成--如果没有下面新生成 MakeScript().make_file(res_list, fileName) if int(againScript) == 0: if not self.distinctFileName(fileName): MakeScript().make_file(res_list, fileName) report_set = open(self.report_path(name), 'wb') print(key) runner = HTMLTestRunner.HTMLTestRunner(stream=report_set, description=description, title=name, key=key) self.logRedis.set("status:%s" % key, self.resStatus(3, createStatus=int(againScript))) runner.run( self.allCase(fileName)) #这里传一个任务id到HTMLTestRunner--然后根据这个加上时间戳生成id l = {} l["assertSuccess"] = runner.runCase.success_count l["assertFailed"] = runner.runCase.failure_count l["runFailed"] = runner.runCase.error_count report_set.close() self.logRedis.set( "status:%s" % key, self.resStatus(4, createStatus=int(againScript), count=l)) self.logRedis.rpush("log:%s" % key, "结束") print("runCaseJISHHU")
def post(self, req): responses = [] data = req.data listId = json.loads(data["id"]) id = listId[0] l = { "results": [], "logList": [], } self.logRedis = conn("log") obj = models.CaseFile.objects.select_related( "userId", "CaseGroupId", "postMethod", "dataType", "environmentId").filter(id=id) serializersObj = serializers.S_CaseRun(obj, many=True) res_data = serializersObj.data res_data = json.loads(json.dumps(res_data)) res_data = res_data[0] start = StartMethod(data["userId"]) start() self.logger = logs(self.__class__.__module__) self.logger.info("单位开始执行") # s = InRequests(res_data["postMethod"],res_data["dataType"],res_data["environmentId"],res_data["name"],self.logger) # response=s.run(res_data["attr"],res_data["headers"],res_data["data"]) caseAction = CaseAction() response = caseAction.action(res_data, self.logger) l["results"].append(response) self.logger.info("单位执行结束") redisListLog = self.logRedis.lrange( "log:%s_%s" % (data["userId"], None), 0, -1) for log in redisListLog: l["logList"].append(log.decode("utf8")) # 根据errors判断执行是否成功 断言另外处理 self.logRedis.delete("log:%s_%s" % (data["userId"], None)) # response--存入CaseResult type=1 userId = UserProfile.objects.get(id=data["userId"]) models.CaseResult.objects.create(result=l, type=1, c_id=id, userId=userId) return APIResponse(200, "sucess", results=l, status=status.HTTP_200_OK)
def __init__(self, fp): self.fp = fp self.logRedis = conn("log")
from log.logFile import logger as logs import logging,time import sys,io,os,django os.environ.setdefault("DJANGO_SETTINGS_MODULE","besettest.settings") django.setup() from django_redis import get_redis_connection as conn # conn = conn('default') logRedis=conn("log") logRedis.rpush("log:user_id_time1","log") class OutputRedirector(object): """ Wrapper to redirect stdout or stderr """ def __init__(self,fp,projectId,userId,runTime): self.projectId = projectId self.userId = userId self.runTime = runTime self.start=sys.stdout self.logRedis = conn("log") self.fp = fp def write(self, s): self.fp.write(s) key="%s_%s_%s"%(self.projectId,self.userId,self.runTime) self.logRedis.rpush("log:%s"%key, s) sys.stdout = self.start def writelines(self, lines): self.fp.writelines(lines) def flush(self): self.fp.flush()