def test_get_all_jobs_without_tasks(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create and insert three sample jobs job_list = [] for i in range(3): job_name = "Test_job%d" % i job_comment = "Just for test%d" % i created_time = datetime.now() created_time = created_time.replace(microsecond=0) new_job = Job(job_name, job_comment, created_time) new_job.tasks = [] job_list.append(new_job) self.manager.insert_new_job(new_job) rebuilt_jobs = self.manager.db_manager.get_all_jobs_without_tasks() for i in range(3): original_job = job_list[i] rebuilt_job = rebuilt_jobs[i] self.assertEqual(original_job.name, rebuilt_job.name) self.assertEqual(original_job.comment, rebuilt_job.comment) self.assertEqual(original_job.created_time, rebuilt_job.created_time) self.assertEqual(original_job.status, rebuilt_job.status) # Remove the inserted jobs self.manager.remove_all_jobs_and_tasks()
def test_insert_job_without_tasks(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [] job_id, tasks_id = self.manager.insert_new_job(new_job) # Check job_id & tasks_id self.assertTrue(isinstance(job_id, str)) self.assertEqual(len(tasks_id), 0) # Remove the nanoseconds in created_time as the precision in MongoDB doesn't store nanoseconds ms_without_ns = int(created_time.microsecond / 1000) * 1000 created_time = created_time.replace(microsecond=ms_without_ns) # Rebuild the job object and check the contents rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id) self.assertEqual(rebuilt_job.name, job_name) self.assertEqual(rebuilt_job.comment, job_comment) self.assertEqual(rebuilt_job.created_time, created_time) self.assertEqual(rebuilt_job.status, Status.Pending) # Remove the inserted job after test self.manager.remove_all_jobs_and_tasks()
def get_job_by_id_without_tasks(self, job_id): """Return a Job object of the specific job The member tasks of the job will be empty to simplify the process. Use get_tasks_by_job_id instead to get the related tasks. Arg: job_id (String): the id of the specific job Return: job (Job): the Job object of the specific id """ logger = Logger().get() logger.debug(f"start get_job_by_id_without_tasks, job_id:{job_id}") try: # Find the job using id condition = {"_id": ObjectId(job_id)} job_doc = self.__jobs_collection.find_one(condition) # Rebuild the Job object from the query result job = Job(job_doc["name"], job_doc["comment"], job_doc["created_time"], status=Status(job_doc["status"])) job.job_id = job_id job.finished_time = job_doc["finished_time"] return job except Exception as e: logger.error(f"something wrong in get_job_by_id_without_tasks," f" Exception: {e}")
def test_update_status(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) task_id = tasks_id[0] # Update the status self.manager.update_job_status(job_id, Status.Failed) self.manager.update_task_status(task_id, Status.Successful) # Retrieve the status of task & job self.assertEqual(self.manager.get_status(task_id), Status.Successful.name) # Rebuild the job object and check the status rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id) self.assertEqual(rebuilt_job.status, Status.Failed) # Rebuild the task object and check the status rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) self.assertEqual(rebuilt_task.status, Status.Successful) # Remove the inserted job & task after test self.manager.remove_all_jobs_and_tasks()
def setUp(self): print("Test with command\n%s\n" % self.command_line_input) print("Submitting job...") now = datetime.now() dummy_job = Job(self.job_name, self.comments, now) test_task = Task(self.location, self.tool_type, self.command_line_input) dummy_job.tasks = [test_task] job_id, tasks_id = JobManager().submit_job(dummy_job) self.job_id = job_id self.tasks_id = tasks_id ms_without_ns = int(now.microsecond / 1000) * 1000 self.now = now.replace(microsecond=ms_without_ns)
def test_insert_job_with_single_task(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] job_id, tasks_id = self.manager.insert_new_job(new_job) # Check job_id & tasks_id self.assertTrue(isinstance(job_id, str)) self.assertEqual(len(tasks_id), 1) self.assertTrue(isinstance(tasks_id[0], str)) # Remove the nanoseconds in created_time as the precision in MongoDB doesn't store nanoseconds ms_without_ns = int(created_time.microsecond / 1000) * 1000 created_time = created_time.replace(microsecond=ms_without_ns) # Rebuild the job object and check the contents rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id) self.assertEqual(rebuilt_job.name, job_name) self.assertEqual(rebuilt_job.comment, job_comment) self.assertEqual(rebuilt_job.created_time, created_time) self.assertEqual(rebuilt_job.status, Status.Pending) # Rebuild the task object and check the contents of files task_id = tasks_id[0] rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) self.assertEqual(rebuilt_task.input_file_args, input_file_args) self.assertEqual(rebuilt_task.input_text_args, input_text_args) self.assertEqual(rebuilt_task.input_flag_args, input_flag_args) self.assertEqual(rebuilt_task.output_file_args, output_file_args) self.assertEqual(rebuilt_task.status, Status.Pending) # Remove the inserted job & task after test self.manager.remove_all_jobs_and_tasks()
def test_failure(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) # Create a empty directory & Generate two of the files dir_path = "/tmp/Felucca/unit_test/test" if os.path.exists(dir_path): shutil.rmtree(dir_path) os.makedirs(dir_path) output_file_path = os.path.join(dir_path, "output.json") results_file_path = os.path.join(dir_path, "results") facts_file_path = os.path.join(dir_path, "facts") output_content = "{}" results_content = "test result content" with open(output_file_path, 'w+') as f: f.write(output_content) with open(results_file_path, 'w+') as f: f.write(results_content) # Save the result of the task task_id = tasks_id[0] stdout = None stderr = "sample stderr" output_file_list = [output_file_path, results_file_path, facts_file_path] self.manager.save_result(task_id, output_file_list) self.manager.update_stdout(task_id, stdout) self.manager.update_stderr(task_id, stderr) # Rebuild the task object and check the contents of files rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) self.assertEqual(rebuilt_task.output, ["output.json", "results"]) retrieved_stdout = self.manager.get_stdout(task_id) retrieved_stderr = self.manager.get_stderr(task_id) self.assertEqual(retrieved_stdout, stdout) self.assertEqual(retrieved_stderr, stderr) # Remove the directory & inserted tasks shutil.rmtree(dir_path) self.manager.remove_all_jobs_and_tasks()
def test_get_output_and_log_file(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) # Save the result of the task task_id = tasks_id[0] stdout = "sample stdout" stderr = "sample stderr" output_file_list = ["../../sample_output/output.json", "../../sample_output/facts", "../../sample_output/results"] self.manager.save_result(task_id, output_file_list) self.manager.update_stdout(task_id, stdout) self.manager.update_stderr(task_id, stderr) # Retrive the output file file = self.manager.get_output_file(task_id, "output.json") with open("../../sample_output/output.json", "rb") as f: output_json = f.read() self.assertEqual(file.encode('utf-8'), output_json) # Retrive the log files facts_file = self.manager.get_output_file(task_id, "facts") results_file = self.manager.get_output_file(task_id, "results") with open("../../sample_output/facts", "rb") as f: facts = f.read() with open("../../sample_output/results", "rb") as f: results = f.read() self.assertEqual(facts_file.encode('utf-8'), facts) self.assertEqual(results_file.encode('utf-8'), results) # Retrive stdout & stderr self.assertEqual(self.manager.get_stdout(task_id), stdout) self.assertEqual(self.manager.get_stderr(task_id), stderr) # Remove the inserted jobs self.manager.remove_all_jobs_and_tasks()
def test_save_result(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) # Save the result of the task task_id = tasks_id[0] stdout = "sample stdout" stderr = "sample stderr" output_file_list = ["../../sample_output/output.json", "../../sample_output/facts", "../../sample_output/results"] log_file_list = ["../../sample_output/facts", "../../sample_output/results"] self.manager.save_result(task_id, output_file_list) self.manager.update_stdout(task_id, stdout) self.manager.update_stderr(task_id, stderr) # Rebuild the task object and check the contents of files rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) with open("../../sample_output/output.json", "rb") as f: output_json_bytes = f.read().decode('utf-8') with open("../../sample_output/facts", "rb") as f: facts_bytes = f.read().decode('utf-8') with open("../../sample_output/results", "rb") as f: results_bytes = f.read().decode('utf-8') self.assertEqual(self.manager.get_output_file(task_id, "output.json"), output_json_bytes) self.assertEqual(self.manager.get_output_file(task_id, "facts"), facts_bytes) self.assertEqual(self.manager.get_output_file(task_id, "results"), results_bytes) self.assertEqual(self.manager.get_stdout(task_id), stdout) self.assertEqual(self.manager.get_stderr(task_id), stderr) self.assertEqual(rebuilt_task.output, ["output.json", "facts", "results"]) self.assertEqual(rebuilt_task.status, Status.Pending) new_stdout = "New stdout" new_stderr = "New stderr" # self.manager.update_stdout_and_stderr(task_id, new_stdout, new_stderr) self.manager.update_stdout(task_id, new_stdout) self.manager.update_stderr(task_id, new_stderr) self.assertEqual(self.manager.get_stdout(task_id), stdout + new_stdout) self.assertEqual(self.manager.get_stderr(task_id), stderr + new_stderr) # Remove the inserted job & task after test self.manager.remove_all_jobs_and_tasks()