def running_jobs(): update_running_jobs = None redis = Redis() # Checking all *job_ keys data = redis.connect.keys("job_*") active_jobs = list() for job_item in data: data = redis.get_value_from_key_as_str(job_item) active_jobs.append(data) # https://www.geeksforgeeks.org/ways-sort-list-dictionaries-values-python-using-itemgetter/ result = sorted(active_jobs, key=itemgetter('start_time')) # Getting "latest_jobs" key for future time comparison redis_latest_jobs_time = redis.get_value_from_key_as_str("latest_jobs_time") if redis_latest_jobs_time is not None: timestamp = time.time() time_result = timestamp - redis_latest_jobs_time if time_result > 3: redis.set_value("latest_jobs_time", str(time.time())) # send updated time update_running_jobs = True else: redis_latest_jobs_time = time.time() redis.set_value("latest_jobs_time", str(redis_latest_jobs_time)) # send updated time update_running_jobs = False if update_running_jobs is True: return result, True else: return result, False
def running_jobs_count(): redis = Redis() job_list_from_redis = redis.connect.lrange("running_jobs", 0, -1) count = len(job_list_from_redis) if count > 0: return count return None
def running_jobs_count(): redis = Redis() job_list_from_redis = redis.connect.lrange("running_jobs", 0, -1) job_list = list() for item in job_list_from_redis: job_list.append(item.decode("utf-8")) count = len(job_list) if count > 0: return count return None
def latest_jobs(): # Redis, getting "latest_jobs" key for future hash comparison redis = Redis() redis_latest_jobs_hash = redis.get_value_from_key_as_str("latest_jobs") latest_jobs_q = TestJobs.objects.select_related('env').order_by('-stop_time').exclude(status='1')[:10] result = [] list_for_hash = [] for job in latest_jobs_q: job_item = dict() job_item['uuid'] = job.uuid job_item['time_taken'] = job.get_time_taken() job_item['stop_time'] = timezone.localtime(job.stop_time).strftime('%d-%b-%Y, %H:%M:%S') if job.status == 4: # special case for show "skipped" label while websocket updates "Last Jobs table" job_item['status'] = 4 if job.tests_passed is not None: job_item['tests_passed'] = job.tests_passed if job.tests_failed is not None: job_item['tests_failed'] = job.tests_failed if job.tests_aborted is not None: job_item['tests_aborted'] = job.tests_aborted if job.tests_skipped is not None: job_item['tests_skipped'] = job.tests_skipped if job.tests_not_started != 0: job_item['tests_not_started'] = job.tests_not_started job_item['tests_percentage'] = job.tests_percentage() job_item['env'] = job.get_env() job_item['status'] = job.status list_for_hash.append(job.uuid) # building list with job uuid's for making local hash of all our latest jobs result.append(job_item) result_hash = get_hash(frozenset(list_for_hash)) # hash of local job uuid's redis.set_value("latest_jobs", result_hash) # send update hash value to Redis result.reverse() # For correct ordering in JS # Compare between Redis previous hash and local hash of latest_jobs if compare_hash(redis_latest_jobs_hash, result_hash): # Hash identical return result, True else: # Hash NOT identical return result, False
def job_force_stop(request): if request.method == "POST": body_unicode = request.body.decode('utf-8') data = json.loads(body_unicode) uuid = data['uuid'] else: return HttpResponseForbidden() # Redis # Remove job uuid from "running_jobs" key immediately job_object = TestJobs.objects.get(uuid=uuid) r = Redis() if job_object.custom_id: job = "job_" + job_object.custom_id r.connect.lrem("running_jobs", 0, job) r.connect.delete(job) else: job = "job_" + uuid r.connect.lrem("running_jobs", 0, job) r.connect.delete(job) job_object.status = 4 job_object.stop_time = unix_time_to_datetime(int(datetime.now(tz=timezone_native.utc).timestamp() * 1000)) job_object.time_taken = job_object.stop_time - job_object.start_time # Tests aborted_tests = 0 if job_object.tests_in_progress is not None and job_object.tests_in_progress > 0: result = Tests.objects.filter(job=job_object, status=2) for test_item in result: test_item.status = 6 aborted_tests += 1 if job_object.tests_in_progress is not None and job_object.tests_in_progress > 0: job_object.tests_in_progress = job_object.tests_in_progress - 1 test_item.save() job_object.tests_aborted = aborted_tests job_object.save() return JsonResponse({"status": "ok"})
def task_stop_running_jobs(): redis = Redis() settings = Settings.objects.filter(pk=1).first() if settings: if settings.running_jobs_age == 0 or settings.running_jobs_age is None: pass else: running_jobs = TestJobs.objects.filter(status=1) for job in running_jobs: if job.start_time + timedelta(minutes=settings.running_jobs_age) < timezone.now(): # Job job.status = 4 job.stop_time = unix_time_to_datetime( int(datetime.now(tz=timezone_native.utc).timestamp() * 1000)) job.time_taken = job.stop_time - job.start_time # Tests aborted_tests = 0 if job.tests_in_progress is not None and job.tests_in_progress > 0: result = Tests.objects.filter(job=job, status=2) for test_item in result: test_item.status = 6 aborted_tests += 1 if job.tests_in_progress is not None and job.tests_in_progress > 0: job.tests_in_progress = job.tests_in_progress - 1 test_item.save() job.tests_aborted = aborted_tests job.save() # Redis if job.custom_id: job_to_delete = "job_" + job.custom_id redis.connect.lrem("running_jobs", 0, job_to_delete) redis.connect.delete("job_" + job.custom_id) else: job_to_delete = "job_" + job.uuid redis.connect.lrem("running_jobs", 0, job_to_delete) redis.connect.delete("job_" + job.uuid)
def __init__(self, data): self.data = data self.redis = Redis()
class PytestLoader: def __init__(self, data): self.data = data self.redis = Redis() @staticmethod def generate_uuid() -> str: value = uuid.uuid4() return str(value) def get_start_test_run(self): # print("DBG: startTestRun") # print(self.data) try: TestJobs.objects.get(uuid=self.data['job_id']) return HttpResponse(status=409) except ObjectDoesNotExist: pass try: env = Environments.objects.get(name=self.data['env']) # Env name for Redis env_name = env.remapped_name if env.remapped_name is not None else env.name except ObjectDoesNotExist: if self.data['env'] is not None: env = Environments(name=self.data['env']) env.save() # Env name for Redis env_name = env.remapped_name if env.remapped_name is not None else env.name else: try: env = Environments.objects.get(name="None") # Env name for Redis if env.remapped_name: env_name = env.remapped_name else: env_name = env.name except ObjectDoesNotExist: env = Environments(name="None") env.save() # Env name for Redis env_name = "None" # We should not create a job without tests if len(self.data['tests']) == 0: return HttpResponse(status=403) try: custom_data = json.loads(self.data["custom_data"]) except: custom_data = None # If we have job with the same custom id - we should not create any tests, they exists already if self.data['custom_id']: if TestJobs.objects.filter( custom_id=self.data['custom_id']).exists(): return "done" job_object = TestJobs(uuid=self.data['job_id'], status=1, fw_type=2, start_time=unix_time_to_datetime( self.data['startTime']), env=env, custom_data=custom_data, custom_id=self.data['custom_id']) job_object.save() # Tests tests_count = 0 tests = [] for test_item in self.data['tests']: uuid = test_item['uuid'] description = test_item['description'] # Tests Storage try: test_storage_item = TestsStorage.objects.get( identity=test_item['nodeid']) # If no test obj exists if not test_storage_item.test: test_storage_item.test = test_item['nodeid'].split( '::')[-1] test_storage_item.description = description test_storage_item.save() # If test obj exists with null description elif test_storage_item.test and not test_storage_item.description: test_storage_item.description = description test_storage_item.save() # if test obj exists with description elif test_storage_item.test and test_storage_item.description: if test_storage_item.description == description: pass else: test_storage_item.description = description test_storage_item.save() except ObjectDoesNotExist: test_storage_item = TestsStorage( identity=test_item['nodeid'], test=test_item['nodeid'].split('::')[-1], description=description) test_storage_item.save() tests.append({ 'test_uuid': uuid, 'status': 1, 'job': job_object.pk, 'test': test_storage_item.pk }) tests_count += 1 with connection.cursor() as cursor: for test in tests: cursor.execute( "INSERT INTO loader_tests (uuid, status, job_id, test_id)" "VALUES(%s, 1, %s, %s)", [test['test_uuid'], test['job'], test['test']]) tests_not_started = job_object.tests.count() job_object.tests_not_started = tests_not_started job_object.save() # Redis data # We are creating/updating "running_jobs" list in Redis with our new job item if self.data["custom_id"]: job = "job_" + self.data["custom_id"] else: job = "job_" + self.data['job_id'] self.redis.connect.rpush("running_jobs", job) data = str({ "uuid": self.data["job_id"], "status": "1", "start_time": timezone.localtime(unix_time_to_datetime( self.data['startTime'])).strftime('%d-%b-%Y, %H:%M:%S'), "tests_not_started": str(tests_not_started), "env": str(env_name), "tests_total_count": str(tests_count) }) if self.data["custom_id"]: self.redis.set_value("job_" + self.data["custom_id"], data) else: self.redis.set_value("job_" + self.data['job_id'], data) self.redis.set_value("update_running_jobs", "1") return "done" @classmethod def start_test_run(cls, data): loader = cls(data) result = loader.get_start_test_run() return result def get_stop_test_run(self): # print("DBG: stopTestRun") # print(self.data) try: if self.data["custom_id"]: job_object = TestJobs.objects.get( custom_id=self.data['custom_id']) data = self.redis.get_value_from_key_as_str( "job_" + self.data['custom_id']) # TODO refactor. xdist tests can not be stopped after killing if (int(0 if job_object.tests_passed is None else job_object.tests_passed) + int(0 if job_object.tests_failed is None else job_object.tests_failed) + int(0 if job_object.tests_skipped is None else job_object.tests_skipped)) \ < int(data["tests_total_count"]): return "done" else: job_object = TestJobs.objects.get(uuid=self.data['job_id']) if job_object.status == 1: # Redis # Remove job uuid from "jobs" key immediately if self.data["custom_id"]: job = "job_" + self.data['custom_id'] self.redis.connect.lrem("running_jobs", 0, job) self.redis.connect.delete("job_" + self.data['custom_id']) else: job = "job_" + self.data['job_id'] self.redis.connect.lrem("running_jobs", 0, job) self.redis.connect.delete("job_" + self.data['job_id']) failed = job_object.tests_failed not_started = job_object.tests_not_started # If any "aborted" test case: # Job status = Aborted # Every "in progress" tests becomes - aborted tests = job_object.tests.filter(status=2) if job_object.tests.filter(status=6).first(): job_object.status = 4 if tests: aborted_tests = 0 for test in tests: test.status = 6 test.stop_time = unix_time_to_datetime( self.data['stopTime']) test.time_taken = test.stop_time - test.start_time test.save() aborted_tests += 1 job_object.tests_aborted = aborted_tests # If any "failed" test case: # Job status = Failed # Every "in progress" tests becomes - aborted elif failed: job_object.status = 3 if tests: aborted_tests = 0 for test in tests: test.status = 6 test.stop_time = unix_time_to_datetime( self.data['stopTime']) test.time_taken = test.stop_time - test.start_time test.save() aborted_tests += 1 job_object.tests_aborted = aborted_tests elif not_started: # If no "failed" test cases, but "not started" remain - job will be "Failed" if tests: aborted_tests = 0 for test in tests: test.status = 6 test.stop_time = unix_time_to_datetime( self.data['stopTime']) test.time_taken = test.stop_time - test.start_time test.save() aborted_tests += 1 job_object.tests_aborted = aborted_tests job_object.status = 3 # Bug fix - abort scenario with single test elif tests: aborted_tests = 0 for test in tests: test.status = 6 test.stop_time = unix_time_to_datetime( self.data['stopTime']) test.time_taken = test.stop_time - test.start_time test.save() aborted_tests += 1 job_object.tests_aborted = aborted_tests job_object.status = 3 # If no "failed" (and other negative variations) test cases - job will be "Passed" else: job_object.status = 2 job_object.stop_time = unix_time_to_datetime( self.data['stopTime']) job_object.time_taken = job_object.stop_time - job_object.start_time job_object.save() if self.data['send_report'] == "1": SendJobReport(job_object).send() return "done" else: return HttpResponse(status=403) except ObjectDoesNotExist: return HttpResponse(status=403) @classmethod def stop_test_run(cls, data): loader = cls(data) result = loader.get_stop_test_run() return result def get_start_test(self): # print("DBG: startTest") # print(self.data) try: if self.data["custom_id"]: job_object = TestJobs.objects.get( custom_id=self.data["custom_id"]) else: job_object = TestJobs.objects.get(uuid=self.data['job_id']) if job_object.status == 1: try: test = Tests.objects.get(uuid=self.data['uuid']) if test.status != 1: return "done" test.status = 2 test.start_time = unix_time_to_datetime( self.data['startTime']) test.save() job_object.tests_not_started -= 1 if job_object.tests_not_started == 0: job_object.tests_not_started = None job_object.tests_in_progress = 1 job_object.save() # Redis # Job item update if self.data['custom_id']: data = self.redis.get_value_from_key_as_str( "job_" + self.data['custom_id']) else: data = self.redis.get_value_from_key_as_str( "job_" + self.data['job_id']) if data is None: return HttpResponse(status=403) tests_not_started = int(data["tests_not_started"]) tests_not_started -= 1 data["tests_not_started"] = str(tests_not_started) data = str(data).encode("utf-8") if self.data["custom_id"]: self.redis.set_value("job_" + self.data["custom_id"], data) else: self.redis.set_value("job_" + self.data['job_id'], data) return "done" except ObjectDoesNotExist: return HttpResponse(status=200) else: return HttpResponse(status=200) except ObjectDoesNotExist: return HttpResponse(status=200) @classmethod def start_test(cls, data): loader = cls(data) result = loader.get_start_test() return result def get_stop_test(self): # print("DBG: stopTest") # print(self.data) try: if self.data["custom_id"]: job_object = TestJobs.objects.get( custom_id=self.data["custom_id"]) else: job_object = TestJobs.objects.get(uuid=self.data['job_id']) if job_object.status == 1: # Redis # Job item update if self.data['custom_id']: data = self.redis.get_value_from_key_as_str( "job_" + self.data['custom_id']) else: data = self.redis.get_value_from_key_as_str( "job_" + self.data['job_id']) if data is None: return HttpResponse(status=403) try: test = Tests.objects.get(uuid=self.data['uuid']) if self.data['status'] == "passed": test.status = 3 if not job_object.tests_passed: job_object.tests_passed = 1 data["tests_passed"] = str(1) else: job_object.tests_passed += 1 data["tests_passed"] = str(job_object.tests_passed) elif self.data['status'] == "error": test.status = 4 if not job_object.tests_failed: job_object.tests_failed = 1 data["tests_failed"] = str(1) else: job_object.tests_failed += 1 data["tests_failed"] = str(job_object.tests_failed) elif self.data['status'] == "failed": test.status = 4 if not job_object.tests_failed: job_object.tests_failed = 1 data["tests_failed"] = str(1) else: job_object.tests_failed += 1 data["tests_failed"] = str(job_object.tests_failed) elif self.data['status'] == "skipped": test.status = 5 if not job_object.tests_skipped: job_object.tests_skipped = 1 data["tests_skipped"] = str(1) else: job_object.tests_skipped += 1 data["tests_skipped"] = str( job_object.tests_skipped) job_object.tests_in_progress = None data = str(data).encode("utf-8") if self.data["custom_id"]: self.redis.set_value("job_" + self.data['custom_id'], data) else: self.redis.set_value("job_" + self.data['job_id'], data) job_object.save() test.stop_time = unix_time_to_datetime( self.data['stopTime']) test.time_taken = test.stop_time - test.start_time test.msg = str(self.data['msg']).replace("\\n", "\n") # Save image artifacts if exist save_images(self, test) test.save() # Tests Storage obj = TestsStorage.objects.get(pk=test.test_id) if not obj.time_taken: obj.time_taken = test.time_taken obj.calculated_eta = median( [obj.time_taken, test.time_taken]) obj.save() return "done" if obj.time_taken and not obj.time_taken2: obj.time_taken2 = test.time_taken obj.calculated_eta = median( [obj.time_taken, obj.time_taken2]) obj.save() return "done" if obj.time_taken2 and not obj.time_taken3: obj.time_taken3 = test.time_taken obj.calculated_eta = median( [obj.time_taken, obj.time_taken2, obj.time_taken3]) obj.save() return "done" if obj.time_taken3: obj.time_taken3 = obj.time_taken2 obj.time_taken2 = obj.time_taken obj.time_taken = test.time_taken obj.calculated_eta = median( [obj.time_taken, obj.time_taken2, obj.time_taken3]) obj.save() return "done" except ObjectDoesNotExist: return HttpResponse(status=200) else: return HttpResponse(status=200) except ObjectDoesNotExist: return HttpResponse(status=200) @classmethod def stop_test(cls, data): loader = cls(data) result = loader.get_stop_test() return result
def update_running_jobs(): # fix for newly created job, it should appear in the Running Jobs table right after creation redis = Redis() if redis.connect.exists("update_running_jobs"): redis.connect.delete("update_running_jobs") return True