def launch_default(args): # TODO: build into install? try: mkdir(path.expanduser('~') + "/.bee") except OSError: pass bl = BeeLogging(args.logflag, args.log_dest, args.quite) bee_args = BeeArguments(bl) # execute task if argument is present # exclusivity rules are managed by argparse groups if args.launch_task: bee_args.opt_launch(args) if args.terminate_task: bee_args.opt_terminate(args) if args.examine_task: bee_args.opt_examine(args) if args.launch_flow: LaunchBeeFlow(args.launch_flow[0], bl, args.testonly) if args.terminate_flow: terminate_flow_id(args.terminate_flow[0])
def create_and_launch_task(self, beefile, file_name, blog_args, mng_args): """ :param beefile: Dictionary object of beefile (beefile_manager) :param file_name: <file_name>.beefile :param blog_args: dictionary for beelog related arguments (logflag: ?, log_dest: ?, quite: ?) Used to generated BeeLog object :param mng_args: dictionary of input management related arguments (user_values: ?, yml_file_name: ?) Used to generate InputMangement object """ self.blog = BeeLogging(blog_args.get('logflag'), blog_args.get('log_dest'), blog_args.get('quite')) self.blog.message("Task received in current working directory: " + os.getcwd(), "{}.beefile".format(file_name), self.blog.msg) input_mng = InputManagement(beefile, mng_args.get('user_values'), self.blog, mng_args.get('yml_file_name')) self._create_task(beefile, file_name, input_mng) self._launch_task()
def manage_args(args): if args.debug: start = time.time() else: start = None # check file requirements verify_pyro4_conf() beelog = BeeLogging(args.logflag, args.log_dest, args.quite) eo = ExecOrc(beelog) if args.task is not None: if args.orc: # bee-orchestrator -o -t $(pwd)/hello_lh f = BeefileLoader(args.task[0], beelog) blog_args = { "logflag": args.logflag, "log_dest": args.log_dest, "quite": args.quite } mng_args = {'user_values': None, 'yml_file_name': None} if args.input_file: y = YMLLoader(args.input_file[0], beelog) mng_args = { 'user_values': y.ymlfile, 'yml_file_name': args.input_file[0] } eo.main(f.beefile, args.task[0], blog_args, mng_args, start=start, debug=args.debug) elif args.orc_arm: beelog.message("ARM support not ready at the moment!", color=beelog.err) else: beelog.message("Please specify a valid orchestrator!", color=beelog.err) elif args.orc: eo.main(start=start, debug=args.debug) elif args.orc_arm: beelog.message("ARM support not ready at the moment!", color=beelog.err)
def terminate_flow_id(flow_id): """ :param flow_id: """ blog = BeeLogging(False, None, False) ldb = LaunchDB(blog) res = ldb.query_value_list(index='beeflowID', value=flow_id, result='manageSys, jobID') blog.message( "Preparing termination requests for flowID: {}".format(flow_id), color=blog.msg) for e in res: mng_sys = e[0] jid = e[1] if jid is None: blog.message("Unable to find jobID associated with flowID: " "{}".format(flow_id), color=blog.err) elif mng_sys is None: blog.message("Unable to identify manageSys for jobID: " "{}".format(jid), color=blog.err) else: adapt = Adapter(system=mng_sys, config=None, file_loc=None, task_name=None, beelog=blog, input_mng=None) blog.message( "Sending termination request for job {} via {}".format( jid, mng_sys)) adapt.shutdown(jid)
def orc_default(args): odb = None try: odb = OrchestratorDB(BeeLogging(log=False, log_dest=None, quite=False)) except Exception as e: cprint("Unable to load OrchestratorDB supporting class", "red") print(str(e)) exit(1) # execute task if argument is present # exclusivity rules are managed by argparse groups if args.launch_all: odb.query_all() elif args.delete_all: odb.delete_all() if args.launch_custom: odb.query_all_specific(args.launch_custom[0], args.launch_custom[1]) elif args.launch_jobid: odb.query_all_specific("jobid", str(args.launch_jobid[0])) elif args.launch_status: odb.query_all_specific("status", str(args.launch_status[0]))
class BeeLauncherDaemon(object): def __init__(self, beelog, daemon=None, debug=False): self.debug = debug # Manual test analysis if self.debug: self.cpr = cProfile.Profile() self.cpr.enable() self.beetask = None # one task per instance of daemon! self.termination_lock = False self.orc_daemon = daemon self.blog = beelog # Logging conf. object self.blog.message("Starting Bee orchestration controller..") def _create_task(self, beefile, file_name, input_mng): """ Create a self.beetask (BeeCluster) object and assign to variable :param beefile: Dictionary object of beefile (beefile_manager) :param file_name: <file_name>.beefile :param input_mng: Object (InputManagement(...)) """ self.blog.message("Bee orchestration controller: received task " "creating request") beetask_name = beefile.get('id', file_name + strftime("_%Y%m%d_%H%M%S")) self.blog.message("Launched BEE Instance!", task_name=beetask_name,) self.beetask = BeeCluster(beetask_name, beefile, self.blog, input_mng) def _launch_task(self): """ Start task assigned as object variable """ self.beetask.start() def create_and_launch_task(self, beefile, file_name, blog_args, mng_args): """ :param beefile: Dictionary object of beefile (beefile_manager) :param file_name: <file_name>.beefile :param blog_args: dictionary for beelog related arguments (logflag: ?, log_dest: ?, quite: ?) Used to generated BeeLog object :param mng_args: dictionary of input management related arguments (user_values: ?, yml_file_name: ?) Used to generate InputMangement object """ self.blog = BeeLogging(blog_args.get('logflag'), blog_args.get('log_dest'), blog_args.get('quite')) self.blog.message("Task received in current working directory: " + os.getcwd(), "{}.beefile".format(file_name), self.blog.msg) input_mng = InputManagement(beefile, mng_args.get('user_values'), self.blog, mng_args.get('yml_file_name')) self._create_task(beefile, file_name, input_mng) self._launch_task() ############################################## # terminate/delete/list could be better # suited in current implementation with # launcher; however, they need to be planned # in the daemon for other implementations ############################################## def terminate_task(self, beetask_name): pass def delete_task(self, beetask_name): pass def list_all_tasks(self): pass def launch_internal_beeflow(self, beeflow, beefile_list, parent_beefile, node_list, flow_name): self.blog.message("Internal BeeFlow triggered", flow_name, color=self.blog.msg) # Initialize each task beeflow_tasks = {} nodes_used = 0 for task_name in beefile_list: beefile = beefile_list[task_name] try: num_n = int(beefile['requirements']['ResourceRequirement'].get('numNodes', 1)) msys = beefile['requirements']['ResourceRequirement'].get('manageSys') if msys != 'slurm': print("ERROR: only slurm supported for internal BeeFlow at this time!") exit(1) if num_n + nodes_used > len(node_list): print("ERROR more nodes requested than available!") exit(1) hosts = None for i in range(0, num_n): print(node_list[i + nodes_used]) if hosts is None: hosts = str(node_list[i + nodes_used]) else: hosts += ".{}".format(node_list[i + nodes_used]) nodes_used += num_n beefile['requirements']['ResourceRequirement'].update({'nodeList': hosts}) if parent_beefile['requirements'].get('CharliecloudRequirement') is not None: print(parent_beefile['requirements'].get('CharliecloudRequirement')) beefile['requirements'].update( {'CharliecloudRequirement': parent_beefile['requirements'].get('CharliecloudRequirement')}) except KeyError as err: print("ERROR: incorrect beefile configuration\n{}".format(err)) exit(1) # beefile.update({'requirements': {'ResourceRequirement': {'nodeList': 'test'}}}) print(beefile) # beetask = self.create_task(beefile) beetask = "OBJECT" beeflow_tasks.update({task_name: beetask}) # self.__beetasks[task_name] = beetask #TODO: continue rewrite to better support def shutdown_daemon(self): """ Cleanly shutdown daemon, dump debug information if flag is True """ self.blog.message("Bee orchestration controller shutting down", color=self.blog.msg) # Manual test analysis if self.debug: self.cpr.disable() s = io.StringIO() sortby = 'cumulative' ps = pstats.Stats(self.cpr, stream=s).sort_stats(sortby) ps.print_stats() print(s.getvalue()) self.orc_daemon.shutdown()