def test_update_status(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) task_id = tasks_id[0] # Update the status self.manager.update_job_status(job_id, Status.Failed) self.manager.update_task_status(task_id, Status.Successful) # Retrieve the status of task & job self.assertEqual(self.manager.get_status(task_id), Status.Successful.name) # Rebuild the job object and check the status rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id) self.assertEqual(rebuilt_job.status, Status.Failed) # Rebuild the task object and check the status rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) self.assertEqual(rebuilt_task.status, Status.Successful) # Remove the inserted job & task after test self.manager.remove_all_jobs_and_tasks()
def get_task_by_id(self, task_id): """Return a Task object of the specific task Arg: task_id (String): the id of the specific task Return: task: the Task object of the specific id """ logger = Logger().get() logger.debug(f"start get_task_by_id, task_id:{task_id}") try: # Find the task using id condition = {"_id": ObjectId(task_id)} task_doc = self.__tasks_collection.find_one(condition) # Retrieve the output files and log files # Transform the dict into list of filenames output_list = [] for filename in task_doc["output_files"].keys(): output_list.append(filename) # Rebuild the Task object from the query result task = Task() task.job_id = str(task_doc["job_id"]) task.task_id = task_id task.program_name = task_doc['program_name'] task.input_file_args = task_doc['input_file_args'] task.input_text_args = task_doc['input_text_args'] task.input_flag_args = task_doc['input_flag_args'] task.output_file_args = task_doc['output_file_args'] task.output = output_list task.stdout = task_doc["stdout"] task.stderr = task_doc["stderr"] task.status = Status(task_doc["status"]) task.finished_time = task_doc["finished_time"] logger.debug(f"get_task_by_id successfully, task_id:{task_id}") return task except Exception as e: logger.error(f"something wrong in get_task_by_id, Exception: {e}")
def test_insert_job_with_single_task(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] job_id, tasks_id = self.manager.insert_new_job(new_job) # Check job_id & tasks_id self.assertTrue(isinstance(job_id, str)) self.assertEqual(len(tasks_id), 1) self.assertTrue(isinstance(tasks_id[0], str)) # Remove the nanoseconds in created_time as the precision in MongoDB doesn't store nanoseconds ms_without_ns = int(created_time.microsecond / 1000) * 1000 created_time = created_time.replace(microsecond=ms_without_ns) # Rebuild the job object and check the contents rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id) self.assertEqual(rebuilt_job.name, job_name) self.assertEqual(rebuilt_job.comment, job_comment) self.assertEqual(rebuilt_job.created_time, created_time) self.assertEqual(rebuilt_job.status, Status.Pending) # Rebuild the task object and check the contents of files task_id = tasks_id[0] rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) self.assertEqual(rebuilt_task.input_file_args, input_file_args) self.assertEqual(rebuilt_task.input_text_args, input_text_args) self.assertEqual(rebuilt_task.input_flag_args, input_flag_args) self.assertEqual(rebuilt_task.output_file_args, output_file_args) self.assertEqual(rebuilt_task.status, Status.Pending) # Remove the inserted job & task after test self.manager.remove_all_jobs_and_tasks()
def test_failure(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) # Create a empty directory & Generate two of the files dir_path = "/tmp/Felucca/unit_test/test" if os.path.exists(dir_path): shutil.rmtree(dir_path) os.makedirs(dir_path) output_file_path = os.path.join(dir_path, "output.json") results_file_path = os.path.join(dir_path, "results") facts_file_path = os.path.join(dir_path, "facts") output_content = "{}" results_content = "test result content" with open(output_file_path, 'w+') as f: f.write(output_content) with open(results_file_path, 'w+') as f: f.write(results_content) # Save the result of the task task_id = tasks_id[0] stdout = None stderr = "sample stderr" output_file_list = [output_file_path, results_file_path, facts_file_path] self.manager.save_result(task_id, output_file_list) self.manager.update_stdout(task_id, stdout) self.manager.update_stderr(task_id, stderr) # Rebuild the task object and check the contents of files rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) self.assertEqual(rebuilt_task.output, ["output.json", "results"]) retrieved_stdout = self.manager.get_stdout(task_id) retrieved_stderr = self.manager.get_stderr(task_id) self.assertEqual(retrieved_stdout, stdout) self.assertEqual(retrieved_stderr, stderr) # Remove the directory & inserted tasks shutil.rmtree(dir_path) self.manager.remove_all_jobs_and_tasks()
def test_get_output_and_log_file(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) # Save the result of the task task_id = tasks_id[0] stdout = "sample stdout" stderr = "sample stderr" output_file_list = ["../../sample_output/output.json", "../../sample_output/facts", "../../sample_output/results"] self.manager.save_result(task_id, output_file_list) self.manager.update_stdout(task_id, stdout) self.manager.update_stderr(task_id, stderr) # Retrive the output file file = self.manager.get_output_file(task_id, "output.json") with open("../../sample_output/output.json", "rb") as f: output_json = f.read() self.assertEqual(file.encode('utf-8'), output_json) # Retrive the log files facts_file = self.manager.get_output_file(task_id, "facts") results_file = self.manager.get_output_file(task_id, "results") with open("../../sample_output/facts", "rb") as f: facts = f.read() with open("../../sample_output/results", "rb") as f: results = f.read() self.assertEqual(facts_file.encode('utf-8'), facts) self.assertEqual(results_file.encode('utf-8'), results) # Retrive stdout & stderr self.assertEqual(self.manager.get_stdout(task_id), stdout) self.assertEqual(self.manager.get_stderr(task_id), stderr) # Remove the inserted jobs self.manager.remove_all_jobs_and_tasks()
def test_save_result(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) # Save the result of the task task_id = tasks_id[0] stdout = "sample stdout" stderr = "sample stderr" output_file_list = ["../../sample_output/output.json", "../../sample_output/facts", "../../sample_output/results"] log_file_list = ["../../sample_output/facts", "../../sample_output/results"] self.manager.save_result(task_id, output_file_list) self.manager.update_stdout(task_id, stdout) self.manager.update_stderr(task_id, stderr) # Rebuild the task object and check the contents of files rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) with open("../../sample_output/output.json", "rb") as f: output_json_bytes = f.read().decode('utf-8') with open("../../sample_output/facts", "rb") as f: facts_bytes = f.read().decode('utf-8') with open("../../sample_output/results", "rb") as f: results_bytes = f.read().decode('utf-8') self.assertEqual(self.manager.get_output_file(task_id, "output.json"), output_json_bytes) self.assertEqual(self.manager.get_output_file(task_id, "facts"), facts_bytes) self.assertEqual(self.manager.get_output_file(task_id, "results"), results_bytes) self.assertEqual(self.manager.get_stdout(task_id), stdout) self.assertEqual(self.manager.get_stderr(task_id), stderr) self.assertEqual(rebuilt_task.output, ["output.json", "facts", "results"]) self.assertEqual(rebuilt_task.status, Status.Pending) new_stdout = "New stdout" new_stderr = "New stderr" # self.manager.update_stdout_and_stderr(task_id, new_stdout, new_stderr) self.manager.update_stdout(task_id, new_stdout) self.manager.update_stderr(task_id, new_stderr) self.assertEqual(self.manager.get_stdout(task_id), stdout + new_stdout) self.assertEqual(self.manager.get_stderr(task_id), stderr + new_stderr) # Remove the inserted job & task after test self.manager.remove_all_jobs_and_tasks()