def test_insert_job_without_tasks(self):
        # Remove previous jobs & tasks
        self.manager.remove_all_jobs_and_tasks()

        # Create a sample job
        job_name = "Test_job"
        job_comment = "Just for test"
        created_time = datetime.now()
        new_job = Job(job_name, job_comment, created_time)
        new_job.tasks = []
        job_id, tasks_id = self.manager.insert_new_job(new_job)

        # Check job_id & tasks_id
        self.assertTrue(isinstance(job_id, str))
        self.assertEqual(len(tasks_id), 0)

        # Remove the nanoseconds in created_time as the precision in MongoDB doesn't store nanoseconds
        ms_without_ns = int(created_time.microsecond / 1000) * 1000
        created_time = created_time.replace(microsecond=ms_without_ns)

        # Rebuild the job object and check the contents
        rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id)
        self.assertEqual(rebuilt_job.name, job_name)
        self.assertEqual(rebuilt_job.comment, job_comment)
        self.assertEqual(rebuilt_job.created_time, created_time)
        self.assertEqual(rebuilt_job.status, Status.Pending)

        # Remove the inserted job after test
        self.manager.remove_all_jobs_and_tasks()
    def test_get_all_jobs_without_tasks(self):
        # Remove previous jobs & tasks
        self.manager.remove_all_jobs_and_tasks()

        # Create and insert three sample jobs
        job_list = []
        for i in range(3):
            job_name = "Test_job%d" % i
            job_comment = "Just for test%d" % i
            created_time = datetime.now()
            created_time = created_time.replace(microsecond=0)
            new_job = Job(job_name, job_comment, created_time)
            new_job.tasks = []
            job_list.append(new_job)
            self.manager.insert_new_job(new_job)

        rebuilt_jobs = self.manager.db_manager.get_all_jobs_without_tasks()
        for i in range(3):
            original_job = job_list[i]
            rebuilt_job = rebuilt_jobs[i]
            self.assertEqual(original_job.name, rebuilt_job.name)
            self.assertEqual(original_job.comment, rebuilt_job.comment)
            self.assertEqual(original_job.created_time, rebuilt_job.created_time)
            self.assertEqual(original_job.status, rebuilt_job.status)

        # Remove the inserted jobs
        self.manager.remove_all_jobs_and_tasks()
Esempio n. 3
0
        def get_job_by_id_without_tasks(self, job_id):
            """Return a Job object of the specific job

            The member tasks of the job will be empty to simplify the process.
            Use get_tasks_by_job_id instead to get the related tasks.

            Arg:
                job_id (String): the id of the specific job

            Return:
                job (Job): the Job object of the specific id
            """
            logger = Logger().get()
            logger.debug(f"start get_job_by_id_without_tasks, job_id:{job_id}")
            try:
                # Find the job using id
                condition = {"_id": ObjectId(job_id)}
                job_doc = self.__jobs_collection.find_one(condition)

                # Rebuild the Job object from the query result
                job = Job(job_doc["name"], job_doc["comment"],
                          job_doc["created_time"], status=Status(job_doc["status"]))
                job.job_id = job_id
                job.finished_time = job_doc["finished_time"]

                return job
            except Exception as e:
                logger.error(f"something wrong in get_job_by_id_without_tasks,"
                             f" Exception: {e}")
    def test_update_status(self):
        # Remove previous jobs & tasks
        self.manager.remove_all_jobs_and_tasks()

        # Create a sample task
        program_name = "ooanalyzer"
        input_file_args = {
            "-f": "oo.exe"
        }
        input_text_args = {
                "--timeout": "300"
        }
        input_flag_args = [
            "-v",
        ]
        output_file_args = {
            "-j": "output.json",
            "-F": "facts",
            "-R": "results"
        }
        new_task = Task()
        new_task.program_name = program_name
        new_task.input_file_args = input_file_args
        new_task.input_text_args = input_text_args
        new_task.input_flag_args = input_flag_args
        new_task.output_file_args = output_file_args

        # Create a sample job
        job_name = "Test_job"
        job_comment = "Just for test"
        created_time = datetime.now()
        new_job = Job(job_name, job_comment, created_time)
        new_job.tasks = [new_task]

        # Insert the sample job with the sample task
        job_id, tasks_id = self.manager.insert_new_job(new_job)
        task_id = tasks_id[0]

        # Update the status
        self.manager.update_job_status(job_id, Status.Failed)
        self.manager.update_task_status(task_id, Status.Successful)

        # Retrieve the status of task & job
        self.assertEqual(self.manager.get_status(task_id), Status.Successful.name)

        # Rebuild the job object and check the status
        rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id)
        self.assertEqual(rebuilt_job.status, Status.Failed)

        # Rebuild the task object and check the status
        rebuilt_task = self.manager.db_manager.get_task_by_id(task_id)
        self.assertEqual(rebuilt_task.status, Status.Successful)

        # Remove the inserted job & task after test
        self.manager.remove_all_jobs_and_tasks()
Esempio n. 5
0
    def setUp(self):

        print("Test with command\n%s\n" % self.command_line_input)
        print("Submitting job...")
        now = datetime.now()
        dummy_job = Job(self.job_name, self.comments, now)
        test_task = Task(self.location, self.tool_type,
                         self.command_line_input)
        dummy_job.tasks = [test_task]
        job_id, tasks_id = JobManager().submit_job(dummy_job)

        self.job_id = job_id
        self.tasks_id = tasks_id

        ms_without_ns = int(now.microsecond / 1000) * 1000
        self.now = now.replace(microsecond=ms_without_ns)
Esempio n. 6
0
def thread_test_EM_running(task_type):
    """this is the implementation for testing new execution manager

    """
    if task_type == "false":
        with open(
                os.path.join(os.path.dirname(__file__),
                             '../../tests/sample_output/input_wrong.json'),
                'r') as f:
            json_data = json.load(f)
        job = Job.from_json(json_data)
        job.job_id = "thisisafalseinputcmdtaskjob6"
        task = job.tasks[0]
        task.task_id = "thisisafalseinputcmdtask"
    else:
        with open(
                os.path.join(os.path.dirname(__file__),
                             '../../tests/sample_output/input.json'),
                'r') as f:
            json_data = json.load(f)
        job = Job.from_json(json_data)
        job.job_id = "thisisatrueinputcmd_job6"
        task = job.tasks[0]
        task.task_id = "thisisatrueinputcmd_task"

    file_dict = {}
    folder_path = os.path.join("/tmp/Felucca", f"{task.task_id}")

    if not os.path.exists(folder_path):
        os.makedirs(folder_path)

    for input_flag, content in json_data["Tasks"][0]["Files"].items():
        filename = json_data["Tasks"][0]["Input_File_Args"][
            input_flag]  #oo.exe
        file_path = os.path.join("/tmp/Felucca", f"{task.task_id}/{filename}")

        with open(file_path, "wb") as f:
            byte_stream = base64.b64decode(content.encode('utf-8'))
            f.write(byte_stream)
    #this is the simulation for RM changing the task.files from task.files = {"-f":exe_str } to task.files = {"-f": path }
        file_dict[filename] = file_path
        print(f"file_path: {file_path}")
    task.files = file_dict
    ExecutionManager().submit_task(task)
Esempio n. 7
0
    def get_job_info(self, job_id):
        """Return a json-dict of the specific job.

        Args:
            job_id (String): id of the job
        Return:
            job_dict (dict): the info of job
        """
        job = self.db_manager.get_job_by_id(job_id)
        job_dict = Job.to_json(job, True)
        return job_dict
Esempio n. 8
0
    def start(self, job: Job):
        import sys
        args, config, logger, lang = self.__init_system()
        logger.info(lang.t("main.starting_bot"))
        client = Site(config["wiki"]["api"])
        logger.info(lang.t("main.logging_in").format(name=config["wiki"]["name"]))
        try:
            client.user = args.user
            client.password = args.password
            client.login(args.user, args.password)
            logger.info(lang.t("main.logged_in").format(user=args.user))
        except LoginError:
            logger.error(lang.t("main.wrong_credentials"))
            sys.exit(2)
        except MaximumRetriesExceeded:
            logger.error(lang.t("main.maximum_retries"))
            sys.exit(2)
        except APIError:
            logger.error(lang.t("main.api_error"))
            sys.exit(2)

        job.bootstrap(client, logger, args.tasks, args.password)
        job.run()
Esempio n. 9
0
    def save_new_job_and_tasks(self, new_job_dict):
        """Turn the newly submitted job with its tasks from dict to objects
        and save them
        The input files will be saved to a temporary directory named by task_id

        Args:
            new_job_dict (dict): the json file from Front-end
        Return:
            job (Job): built job instance with its tasks
        """
        logger = Logger().get()
        logger.debug("start save_new_job_and_tasks")

        # Build the job & task from json
        job = Job.from_json(new_job_dict)
        job.created_time = datetime.now().replace(microsecond=0)

        # Save the job & tasks
        # job_id, tasks_id = self.insert_new_job(job)
        job_id, tasks_id = self.db_manager.insert_new_job(job)
        job.job_id = job_id
        for i in range(len(job.tasks)):
            job.tasks[i].job_id = job_id
            job.tasks[i].task_id = tasks_id[i]

        # Save the input files of tasks
        for i in range(len(new_job_dict["Tasks"])):
            task = job.tasks[i]
            file_dict = {}

            # Create the unique directory for each task
            task_file_path = os.path.join("/tmp/Felucca", f"{task.task_id}")
            if not os.path.exists(task_file_path):
                try:
                    os.makedirs(task_file_path)
                except OSError as e:
                    logger.error(f"Failed to create directory {task_file_path}"
                                 f" with exception {e}")

            for param, content in new_job_dict["Tasks"][i]["Files"].items():
                filename = new_job_dict["Tasks"][i]["Input_File_Args"][param]
                file_path = os.path.join("/tmp/Felucca", f"{task.task_id}"
                                         f"/{filename}")
                with open(file_path, "wb") as f:
                    f.write(base64.b64decode(content.encode('utf-8')))
                file_dict[param] = file_path
            task.files = file_dict

        return job
Esempio n. 10
0
    def get_job_list(self):
        """Return a list of job objects in json format.

        Return:
            job_list (list of dict): info of all jobs in database
        """
        logger = Logger().get()
        logger.debug("start get_job_list")

        job_object_list = self.get_all_jobs_with_tasks()

        job_list = []
        for job in job_object_list:
            job_list.append(Job.to_json(job))

        return job_list
Esempio n. 11
0
    def test_job_status(self):
        self.resource_manager.remove_all_jobs_and_tasks()

        # Create a job with two tasks
        task_json = {
            "Files": {},
            "Program_Name": "ooanalyzer",
            "Input_File_Args": {
                "-f": "oo.exe"
            },
            "Input_Text_Args": {
                "--timeout": "300"
            },
            "Input_Flag_Args": ["-v"],
            "Output_File_Args": {
                "-j": "output.json",
                "-F": "facts",
                "-R": "results"
            }
        }
        input_json = {
            "Job_Name": "dump_job",
            "Job_Comment": "this is the test json input for job manager",
            "Tasks": [task_json, task_json, task_json]
        }
        job = self.resource_manager.save_new_job_and_tasks(input_json)
        print(Job.to_json(job))
        job_id = job.job_id

        # Initialize the metadata in JM
        self.job_manager.initialize_job(job)
        job = self.resource_manager.get_job_by_id(job_id)
        self.assertEqual(job.status, Status.Running)

        # Finish all tasks
        for task in job.tasks:
            task_id = task.task_id
            self.resource_manager.update_task_status(task_id,
                                                     Status.Successful)
            self.job_manager.finish_task(task_id)

        job = self.resource_manager.get_job_by_id(job_id)
        self.assertEqual(job.status, Status.Finished)
    def test_insert_job_with_single_task(self):
        # Remove previous jobs & tasks
        self.manager.remove_all_jobs_and_tasks()

        # Create a sample task
        program_name = "ooanalyzer"
        input_file_args = {
            "-f": "oo.exe"
        }
        input_text_args = {
                "--timeout": "300"
        }
        input_flag_args = [
            "-v",
        ]
        output_file_args = {
            "-j": "output.json",
            "-F": "facts",
            "-R": "results"
        }
        new_task = Task()
        new_task.program_name = program_name
        new_task.input_file_args = input_file_args
        new_task.input_text_args = input_text_args
        new_task.input_flag_args = input_flag_args
        new_task.output_file_args = output_file_args

        # Create a sample job
        job_name = "Test_job"
        job_comment = "Just for test"
        created_time = datetime.now()
        new_job = Job(job_name, job_comment, created_time)
        new_job.tasks = [new_task]
        job_id, tasks_id = self.manager.insert_new_job(new_job)

        # Check job_id & tasks_id
        self.assertTrue(isinstance(job_id, str))
        self.assertEqual(len(tasks_id), 1)
        self.assertTrue(isinstance(tasks_id[0], str))

        # Remove the nanoseconds in created_time as the precision in MongoDB doesn't store nanoseconds
        ms_without_ns = int(created_time.microsecond / 1000) * 1000
        created_time = created_time.replace(microsecond=ms_without_ns)

        # Rebuild the job object and check the contents
        rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id)
        self.assertEqual(rebuilt_job.name, job_name)
        self.assertEqual(rebuilt_job.comment, job_comment)
        self.assertEqual(rebuilt_job.created_time, created_time)
        self.assertEqual(rebuilt_job.status, Status.Pending)

        # Rebuild the task object and check the contents of files
        task_id = tasks_id[0]
        rebuilt_task = self.manager.db_manager.get_task_by_id(task_id)
        self.assertEqual(rebuilt_task.input_file_args, input_file_args)
        self.assertEqual(rebuilt_task.input_text_args, input_text_args)
        self.assertEqual(rebuilt_task.input_flag_args, input_flag_args)
        self.assertEqual(rebuilt_task.output_file_args, output_file_args)
        self.assertEqual(rebuilt_task.status, Status.Pending)

        # Remove the inserted job & task after test
        self.manager.remove_all_jobs_and_tasks()
    def test_failure(self):
        # Remove previous jobs & tasks
        self.manager.remove_all_jobs_and_tasks()

        # Create a sample task
        program_name = "ooanalyzer"
        input_file_args = {
            "-f": "oo.exe"
        }
        input_text_args = {
                "--timeout": "300"
        }
        input_flag_args = [
            "-v",
        ]
        output_file_args = {
            "-j": "output.json",
            "-F": "facts",
            "-R": "results"
        }
        new_task = Task()
        new_task.program_name = program_name
        new_task.input_file_args = input_file_args
        new_task.input_text_args = input_text_args
        new_task.input_flag_args = input_flag_args
        new_task.output_file_args = output_file_args

        # Create a sample job
        job_name = "Test_job"
        job_comment = "Just for test"
        created_time = datetime.now()
        new_job = Job(job_name, job_comment, created_time)
        new_job.tasks = [new_task]

        # Insert the sample job with the sample task
        job_id, tasks_id = self.manager.insert_new_job(new_job)

        # Create a empty directory & Generate two of the files
        dir_path = "/tmp/Felucca/unit_test/test"
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)
        os.makedirs(dir_path)
        output_file_path = os.path.join(dir_path, "output.json")
        results_file_path = os.path.join(dir_path, "results")
        facts_file_path = os.path.join(dir_path, "facts")
        output_content = "{}"
        results_content = "test result content"
        with open(output_file_path, 'w+') as f:
            f.write(output_content)
        with open(results_file_path, 'w+') as f:
            f.write(results_content)

        # Save the result of the task
        task_id = tasks_id[0]
        stdout = None
        stderr = "sample stderr"
        output_file_list = [output_file_path, results_file_path, facts_file_path]

        self.manager.save_result(task_id, output_file_list)
        self.manager.update_stdout(task_id, stdout)
        self.manager.update_stderr(task_id, stderr)

        # Rebuild the task object and check the contents of files
        rebuilt_task = self.manager.db_manager.get_task_by_id(task_id)
        self.assertEqual(rebuilt_task.output, ["output.json", "results"])

        retrieved_stdout = self.manager.get_stdout(task_id)
        retrieved_stderr = self.manager.get_stderr(task_id)
        self.assertEqual(retrieved_stdout, stdout)
        self.assertEqual(retrieved_stderr, stderr)

        # Remove the directory & inserted tasks
        shutil.rmtree(dir_path)
        self.manager.remove_all_jobs_and_tasks()
    def test_get_output_and_log_file(self):
        # Remove previous jobs & tasks
        self.manager.remove_all_jobs_and_tasks()

        # Create a sample task
        program_name = "ooanalyzer"
        input_file_args = {
            "-f": "oo.exe"
        }
        input_text_args = {
                "--timeout": "300"
        }
        input_flag_args = [
            "-v",
        ]
        output_file_args = {
            "-j": "output.json",
            "-F": "facts",
            "-R": "results"
        }
        new_task = Task()
        new_task.program_name = program_name
        new_task.input_file_args = input_file_args
        new_task.input_text_args = input_text_args
        new_task.input_flag_args = input_flag_args
        new_task.output_file_args = output_file_args

        # Create a sample job
        job_name = "Test_job"
        job_comment = "Just for test"
        created_time = datetime.now()
        new_job = Job(job_name, job_comment, created_time)
        new_job.tasks = [new_task]

        # Insert the sample job with the sample task
        job_id, tasks_id = self.manager.insert_new_job(new_job)

        # Save the result of the task
        task_id = tasks_id[0]
        stdout = "sample stdout"
        stderr = "sample stderr"
        output_file_list = ["../../sample_output/output.json", "../../sample_output/facts", "../../sample_output/results"]

        self.manager.save_result(task_id, output_file_list)
        self.manager.update_stdout(task_id, stdout)
        self.manager.update_stderr(task_id, stderr)

        # Retrive the output file
        file = self.manager.get_output_file(task_id, "output.json")
        with open("../../sample_output/output.json", "rb") as f:
            output_json = f.read()
        self.assertEqual(file.encode('utf-8'), output_json)

        # Retrive the log files
        facts_file = self.manager.get_output_file(task_id, "facts")
        results_file = self.manager.get_output_file(task_id, "results")
        with open("../../sample_output/facts", "rb") as f:
            facts = f.read()
        with open("../../sample_output/results", "rb") as f:
            results = f.read()
        self.assertEqual(facts_file.encode('utf-8'), facts)
        self.assertEqual(results_file.encode('utf-8'), results)

        # Retrive stdout & stderr
        self.assertEqual(self.manager.get_stdout(task_id), stdout)
        self.assertEqual(self.manager.get_stderr(task_id), stderr)

        # Remove the inserted jobs
        self.manager.remove_all_jobs_and_tasks()
    def test_save_result(self):
        # Remove previous jobs & tasks
        self.manager.remove_all_jobs_and_tasks()

        # Create a sample task
        program_name = "ooanalyzer"
        input_file_args = {
            "-f": "oo.exe"
        }
        input_text_args = {
                "--timeout": "300"
        }
        input_flag_args = [
            "-v",
        ]
        output_file_args = {
            "-j": "output.json",
            "-F": "facts",
            "-R": "results"
        }
        new_task = Task()
        new_task.program_name = program_name
        new_task.input_file_args = input_file_args
        new_task.input_text_args = input_text_args
        new_task.input_flag_args = input_flag_args
        new_task.output_file_args = output_file_args

        # Create a sample job
        job_name = "Test_job"
        job_comment = "Just for test"
        created_time = datetime.now()
        new_job = Job(job_name, job_comment, created_time)
        new_job.tasks = [new_task]

        # Insert the sample job with the sample task
        job_id, tasks_id = self.manager.insert_new_job(new_job)

        # Save the result of the task
        task_id = tasks_id[0]
        stdout = "sample stdout"
        stderr = "sample stderr"
        output_file_list = ["../../sample_output/output.json", "../../sample_output/facts", "../../sample_output/results"]
        log_file_list = ["../../sample_output/facts", "../../sample_output/results"]

        self.manager.save_result(task_id, output_file_list)
        self.manager.update_stdout(task_id, stdout)
        self.manager.update_stderr(task_id, stderr)

        # Rebuild the task object and check the contents of files
        rebuilt_task = self.manager.db_manager.get_task_by_id(task_id)

        with open("../../sample_output/output.json", "rb") as f:
            output_json_bytes = f.read().decode('utf-8')
        with open("../../sample_output/facts", "rb") as f:
            facts_bytes = f.read().decode('utf-8')
        with open("../../sample_output/results", "rb") as f:
            results_bytes = f.read().decode('utf-8')
        self.assertEqual(self.manager.get_output_file(task_id, "output.json"), output_json_bytes)
        self.assertEqual(self.manager.get_output_file(task_id, "facts"), facts_bytes)
        self.assertEqual(self.manager.get_output_file(task_id, "results"), results_bytes)

        self.assertEqual(self.manager.get_stdout(task_id), stdout)
        self.assertEqual(self.manager.get_stderr(task_id), stderr)

        self.assertEqual(rebuilt_task.output, ["output.json", "facts", "results"])
        self.assertEqual(rebuilt_task.status, Status.Pending)

        new_stdout = "New stdout"
        new_stderr = "New stderr"
        # self.manager.update_stdout_and_stderr(task_id, new_stdout, new_stderr)
        self.manager.update_stdout(task_id, new_stdout)
        self.manager.update_stderr(task_id, new_stderr)
        self.assertEqual(self.manager.get_stdout(task_id), stdout + new_stdout)
        self.assertEqual(self.manager.get_stderr(task_id), stderr + new_stderr)

        # Remove the inserted job & task after test
        self.manager.remove_all_jobs_and_tasks()