def simulate(n: int): print(f'Simulation for {n} trials:') history = [] for i in range(n): manager = ExecutionManager(Config.AVAILABLE_PAGES) processes = manager.setup_processes(Config.NUM_OF_PROCESSES, Config.LEN_OF_PROCESSES, Config.MIN_FRAMES, Config.MAX_FRAMES) history.append(manager.execute_all(METHODS, processes, plot=False)) print(f'{i+1}/{n}') methods = [] for i in range(len(METHODS)): methods.append([hist[i] for hist in history]) summarize(methods)
def kill_job(self, job_id): """Kill all unfinished tasks of this job. Args: job_id (String): The id of the job """ logger = Logger().get() logger.debug(f"Start killing the job({job_id})") try: job = self.job_metadata[job_id] if job.finished_count == len(job.tasks): self.job_metadata.pop(job_id, None) logger.debug(f"Job({job_id}) has finished. Skipped killing.") return killed_count = 0 for task in job.tasks: if job.finished_map[task.task_id] == False: ExecutionManager().kill_task(task.task_id) killed_count += 1 ResourceManager(self.db_name).update_job_status( job_id, Status.Killed) self.job_metadata.pop(job_id, None) logger.debug(f"Killed {killed_count} tasks of Job({job_id}).") except Exception as e: logger.error( f"Something wrong when killing job({job_id}). Exception: {e}")
def submit_job(self, new_job): """Submit a job to Resource manager to get job_id and task_ids, then submit first task. Args: new_job: a new job instance submitted by front_end """ logger = Logger().get() logger.debug(f"receive job in submit_job") # logger.debug(f"insert new job to ResourceManager finished, get job_id:{job_id}, tasks_id:{tasks_id}") self.initialize_job(new_job) logger.debug( f"submit task to ExecutionManager, task_id={new_job.tasks[0].task_id}" ) for task in new_job.tasks: ExecutionManager().submit_task(task)
def thread_test_EM_running(task_type): """this is the implementation for testing new execution manager """ if task_type == "false": with open( os.path.join(os.path.dirname(__file__), '../../tests/sample_output/input_wrong.json'), 'r') as f: json_data = json.load(f) job = Job.from_json(json_data) job.job_id = "thisisafalseinputcmdtaskjob6" task = job.tasks[0] task.task_id = "thisisafalseinputcmdtask" else: with open( os.path.join(os.path.dirname(__file__), '../../tests/sample_output/input.json'), 'r') as f: json_data = json.load(f) job = Job.from_json(json_data) job.job_id = "thisisatrueinputcmd_job6" task = job.tasks[0] task.task_id = "thisisatrueinputcmd_task" file_dict = {} folder_path = os.path.join("/tmp/Felucca", f"{task.task_id}") if not os.path.exists(folder_path): os.makedirs(folder_path) for input_flag, content in json_data["Tasks"][0]["Files"].items(): filename = json_data["Tasks"][0]["Input_File_Args"][ input_flag] #oo.exe file_path = os.path.join("/tmp/Felucca", f"{task.task_id}/{filename}") with open(file_path, "wb") as f: byte_stream = base64.b64decode(content.encode('utf-8')) f.write(byte_stream) #this is the simulation for RM changing the task.files from task.files = {"-f":exe_str } to task.files = {"-f": path } file_dict[filename] = file_path print(f"file_path: {file_path}") task.files = file_dict ExecutionManager().submit_task(task)
def setUp(self): self.execution_manager = ExecutionManager()
def thread_update_kernel(BASE_IMAGE="seipharos/pharos:latest"): JobManager().kill_all_jobs() ExecutionManager().update_kernel(BASE_IMAGE)
def get_task(task_id): command = ExecutionManager().get_command_line_input(task_id) return {'command_line_input': command}
def get_result(): status = request.form['status'] ExecutionManager().save_result(request.form['task_id'], status) JobManager().finish_task(request.form['task_id']) return {'is_received': True}
def kill_task(task_id): ExecutionManager().kill_task(task_id) return {"Status": "ok"}
def main(): manager = ExecutionManager(Config.AVAILABLE_PAGES) processes = manager.setup_processes(Config.NUM_OF_PROCESSES, Config.LEN_OF_PROCESSES, Config.MIN_FRAMES, Config.MAX_FRAMES) manager.execute_all(METHODS, processes)