def main3(): """ Driver utility for the script. global_task_file contains the list of task ALREADY PROCESSED. if a task is pick up --> this file is updated Globally. """ global logger args = load_arguments() logger = util_log.logger_setup( __name__, log_file=args.log_file, formatter=util_log.FORMATTER_4, isrotate=True ) log("Daemon", "start ", os.getpid()) folder_main = args.task_folder global_task_file = args.global_task_file if not os.path.isdir(folder_main): return 0 folder_check = json.load(open(global_task_file, mode="r")) while True: # log(folder_check) log("Daemon new loop", folder_main) for folder in os.listdir(folder_main): # log(folder) if not isvalid_folder(folder_main, folder, folder_check, global_task_file): continue t0 = time.time() folder_check = json.load(open(global_task_file, mode="r")) # Refresh Global file if folder not in folder_check: global_task_file_save( folder, folder_check, global_task_file ) # Update to prevent 2nd pick up log("time to save", time.time() - t0) folder = os.path.join(folder_main, folder) files = [ file for file in os.listdir(folder) if file == "main.sh" or file == "main.py" ] log(files) if files: pid = subprocess_launch(folder, files[0]) log("task folder started:", folder, files[0], pid) sleep(20) os_wait_policy(waitsleep=10) if args.mode != "daemon": log("Daemon", "terminated", os.getpid()) break sleep(args.waitsec) os_wait_policy(waitsleep=5)
def main2(): """ Driver utility for the script.""" global logger args = load_arguments() logger = util_log.logger_setup( __name__, log_file=args.log_file, formatter=util_log.FORMATTER_4, isrotate=True ) log("Daemon", "start ", os.getpid()) folder_main = args.task_folder while True: log("Daemon new loop", folder_main) if not os.path.isdir(folder_main): break for root, dirs, files in os.walk(folder_main): root_splits = root.split("/") f = root_splits[-1] for filename in files: if ( filename == "main.sh" or filename == "main.py" and "_qstart" not in f and "_qdone" not in f and "_ignore" not in f ): try: #### Issue of collision if 2 instances rename the folder folder_new = root + "_qstart" os.rename(root, folder_new) pid = subprocess_launch(folder_new, filename) log("task folder started:", folder_new, pid) except: pass os_wait_policy(waitsleep=5) if args.mode != "daemon": log("Daemon", "terminated", os.getpid()) break sleep(args.waitsec) os_wait_policy(waitsleep=5)
def batch_run_infolder( task_folders, suffix="_qstart", main_file_run="main.py", waitime=7, os_python_path=None, log_file=None, ): sub_process_list = [] global logger if ".py" in main_file_run: ispython = 1 if os_python_path is None: os_python_path = sys.executable if log_file is not None: logger = util_log.logger_setup(__name__, log_file=log_file, formatter=util_log.FORMATTER_4) for folder_i in task_folders: foldername = folder_i + suffix foldername = os_folder_rename(old_folder=folder_i, new_folder=foldername) # main_file = os.path.join(foldername, main_file_run ) # cmd = [os_python_path, main_file] if ispython else [ main_file] cmd = os_cmd_generate(foldername, os_python_path) os_wait_policy(waitsleep=15) ps = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=False) sub_process_list.append(ps.pid) log("Sub-process, ", ps.pid, cmd) time.sleep(waitime) return sub_process_list
def test_logger_setup(capsys): # Test : console output / file output formatter = logging.Formatter("%(levelname)s %(message)s") fileout = os_path_append("mockup_test_util.log") logger = util_log.logger_setup("test_util_log", fileout, formatter) logger.info("mockup_test_util_log") output = "INFO mockup_test_util_log\n" # Console output out, err = capsys.readouterr() assert out == output # File output with open(fileout, "r") as f: assert f.readline() == output # Cleanup os.remove(fileout)
def main(): """ Driver utility for the script.""" global logger args = load_arguments() logger = util_log.logger_setup( __name__, log_file=args.log_file, formatter=util_log.FORMATTER_4, isrotate=True ) log("Daemon", "start ", os.getpid()) while True: log("Daemon new loop", args.task_folder) folders = get_list_valid_task_folder(args.task_folder) if folders: log("task folder:", folders) pid_list = util_batch.batch_run_infolder(task_folders=folders, log_file=args.log_file) log("task folder started:", pid_list) if args.mode != "daemon": log("Daemon", "terminated", os.getpid()) break sleep(args.waitsec)
parser.add_argument("--host", default="34.67.74.78", help="Host name") parser.add_argument("--cmd_remote", default="main.py", help="Name of the main script") parser.add_argument("--log_file", default="log_aws_inout.log", help=".") options = parser.parse_args() return options ################################################################################ if __name__ == '__main__': args = load_arguments() # APP_ID = util_log.create_appid(__file__) logger = util_log.logger_setup(__name__, log_file= args.log_file, formatter= util_log.FORMATTER_4) if args.do == "launch_ec2" : pass if args.do == "get_fromec2" : pass if args.do == "put_toec2" : log( "Current Process Id:", os.getpid() ) valid_task_folders = get_list_valid_task_folder(args.task_folder) log("All task Completed")
) parser.add_argument( "--process_folder", default="/home/ubuntu/tasks/", help="process name pattern" ) parser.add_argument("--process_isregex", default=1, help="process name pattern regex") parser.add_argument("--waitsec", type=int, default=10, help="sleep") args = parser.parse_args() return args #################################################################################################### if __name__ == "__main__": args = load_arguments() logger = util_log.logger_setup( __name__, log_file=args.log_file, formatter=util_log.FORMATTER_4, isrotate=True ) ### Process CPU usage loggercpu = util_log.logger_setup( __name__ + "logcpu", log_file=args.monitor_log_file, formatter=util_log.FORMATTER_4, isrotate=True, ) batch_pid_dict = {} p_pattern = args.process_folder p_pattern = p_pattern[:-1] if p_pattern.endswith("/") else p_pattern # /tasks/t53453/main.py or /tasks/t53453/main.sh
################### Generic ############################################################### from aapackage import util_log from aapackage.batch import util_batch from utils import OUTFOLDER, os_folder_create, os_getparent ########################################################################################### CUR_FOLDER = os.path.dirname(os.path.abspath(__file__)) DEFAULT_HYPERPARAMS = os.path.join(CUR_FOLDER, "hyperparams.csv") DEFAULT_SUBPROCESS_SCRIPT = os.path.join(CUR_FOLDER, "subprocess_optim.py") ##### Logs ################################################################# os_folder_create(OUTFOLDER) APP_ID = util_log.create_appid(__file__) LOG_FILE = os.path.join(OUTFOLDER, util_log.create_logfilename(__file__)) logger = util_log.logger_setup(__name__, log_file=LOG_FILE, formatter=util_log.FORMATTER_4) def log(*argv): logger.info(",".join([str(x) for x in argv])) ##### Args ################################################################# def load_arguments(): import argparse parser = argparse.ArgumentParser() parser.add_argument( "-hp", "--hyperparam_file",