def cluster_dispatcher(args=sys.argv[1:]): """Parses command line and calls the different processing functions """ # If --clear-logs the log files are cleared if "--clear-logs" in args: clear_log_files(LOG_FILES) command = command_handling(args, COMMAND_LOG) # Parses command line arguments. command_args = a.parse_and_check(command) resume = command_args.resume if command_args.resume: # Keep the debug option if set debug = command_args.debug # Restore the args of the call to resume from the command log file stored_command = StoredCommand(args, COMMAND_LOG, DIRS_LOG) command = Command(None, stored_command=stored_command) # Logs the issued command and the resumed command session_file = os.path.join(stored_command.output_dir, SESSIONS_LOG) stored_command.log_command(session_file=session_file) # Parses resumed arguments. command_args = a.parse_and_check(command) if command_args.predictions is None: command_args.predictions = os.path.join(stored_command.output_dir, DEFAULT_OUTPUT) else: if command_args.output_dir is None: command_args.output_dir = a.NOW if command_args.predictions is None: command_args.predictions = os.path.join(command_args.output_dir, DEFAULT_OUTPUT) if len(os.path.dirname(command_args.predictions).strip()) == 0: command_args.predictions = os.path.join(command_args.output_dir, command_args.predictions) directory = u.check_dir(command_args.predictions) session_file = os.path.join(directory, SESSIONS_LOG) u.log_message(command.command + "\n", log_file=session_file) try: defaults_file = open(DEFAULTS_FILE, "r") contents = defaults_file.read() defaults_file.close() defaults_copy = open(os.path.join(directory, DEFAULTS_FILE), "w", 0) defaults_copy.write(contents) defaults_copy.close() except IOError: pass u.sys_log_message(u"%s\n" % os.path.abspath(directory), log_file=DIRS_LOG) # Creates the corresponding api instance if resume and debug: command_args.debug = True api = a.get_api_instance(command_args, u.check_dir(session_file)) # Selects the action to perform if has_train(command_args) or has_test(command_args) or command_args.cluster_datasets is not None: output_args = a.get_output_args(api, command_args, resume) a.transform_args(command_args, command.flags, api, command.user_defaults) compute_output(**output_args) u.log_message("_" * 80 + "\n", log_file=session_file)
def delete_dispatcher(args=sys.argv[1:]): """Parses command line and calls the different processing functions """ command = command_handling(args, COMMAND_LOG) # Parses command line arguments. command_args = a.parse_and_check(command) resume = command_args.resume if command_args.resume: # Keep the debug option if set debug = command_args.debug # Restore the args of the call to resume from the command log file stored_command = StoredCommand(args, COMMAND_LOG, DIRS_LOG) command = Command(None, stored_command=stored_command) # Logs the issued command and the resumed command session_file = os.path.join(stored_command.output_dir, SESSIONS_LOG) stored_command.log_command(session_file=session_file) # Parses resumed arguments. command_args = a.parse_and_check(command) else: if command_args.output_dir is None: command_args.output_dir = a.NOW directory = u.check_dir(os.path.join(command_args.output_dir, "tmp")) session_file = os.path.join(directory, SESSIONS_LOG) u.log_message(command.command + "\n", log_file=session_file) try: defaults_file = open(DEFAULTS_FILE, 'r') contents = defaults_file.read() defaults_file.close() defaults_copy = open(os.path.join(directory, DEFAULTS_FILE), 'w', 0) defaults_copy.write(contents) defaults_copy.close() except IOError: pass u.sys_log_message(u"%s\n" % os.path.abspath(directory), log_file=DIRS_LOG) # If --clear-logs the log files are cleared if "--clear-logs" in args: clear_log_files(LOG_FILES) # Creates the corresponding api instance if resume and debug: command_args.debug = True api = a.get_api_instance(command_args, u.check_dir(session_file)) delete_resources(command_args, api) u.log_message("_" * 80 + "\n", log_file=session_file)
def analyze_dispatcher(args=sys.argv[1:]): """Main processing of the parsed options for BigMLer analyze """ # If --clear-logs the log files are cleared if "--clear-logs" in args: clear_log_files(LOG_FILES) command = command_handling(args, COMMAND_LOG) # Parses command line arguments. command_args = command.parser.parse_args(command.args) resume = command_args.resume if resume: # Keep the debug option if set debug = command_args.debug # Restore the args of the call to resume from the command log file stored_command = StoredCommand(args, COMMAND_LOG, DIRS_LOG) command = Command(None, stored_command=stored_command) # Logs the issued command and the resumed command session_file = os.path.join(stored_command.output_dir, SESSIONS_LOG) stored_command.log_command(session_file=session_file) # Parses resumed arguments. command_args = command.parser.parse_args(command.args) command_args.debug = debug else: if command_args.output_dir is None: command_args.output_dir = a.NOW session_file = os.path.join(command_args.output_dir, SESSIONS_LOG) # If logging is required, open the file for logging log = None if command_args.log_file: u.check_dir(command_args.log_file) log = command_args.log_file # If --clear_logs the log files are cleared if command_args.clear_logs: clear_log_files([log]) if command_args.model_fields: model_fields = command_args.model_fields.split(',') command_args.model_fields_ = [model_field.strip() for model_field in model_fields] else: command_args.model_fields_ = {} u.sys_log_message(u"%s\n" % os.path.abspath(command_args.output_dir), log_file=DIRS_LOG) session_file = os.path.join(command_args.output_dir, SESSIONS_LOG) # create api instance form args api = a.get_api_instance(command_args, u.check_dir(session_file)) # --maximize flag will be deprecated. Use --optimize flag. if command_args.maximize is not None and command_args.optimize is None: command_args.optimize = command_args.maximize incompatible_flags = [command_args.cv, command_args.features, command_args.nodes] if sum([int(bool(flag)) for flag in incompatible_flags]) > 1: sys.exit("The following flags cannot be used together:\n --features" "\n --cross-validation\n --nodes") # k-fold cross-validation if command_args.cv and command_args.dataset is not None: create_kfold_cv(command_args, api, command.common_options, resume=resume) # features analysis if command_args.features: create_features_analysis(command_args, api, command.common_options, resume=resume) # node threshold analysis if command_args.nodes: create_nodes_analysis(command_args, api, command.common_options, resume=resume)
def cluster_dispatcher(args=sys.argv[1:]): """Parses command line and calls the different processing functions """ # If --clear-logs the log files are cleared if "--clear-logs" in args: clear_log_files(LOG_FILES) command = command_handling(args, COMMAND_LOG) # Parses command line arguments. command_args = a.parse_and_check(command) resume = command_args.resume if command_args.resume: # Keep the debug option if set debug = command_args.debug # Restore the args of the call to resume from the command log file stored_command = StoredCommand(args, COMMAND_LOG, DIRS_LOG) command = Command(None, stored_command=stored_command) # Logs the issued command and the resumed command session_file = os.path.join(stored_command.output_dir, SESSIONS_LOG) stored_command.log_command(session_file=session_file) # Parses resumed arguments. command_args = a.parse_and_check(command) if command_args.predictions is None: command_args.predictions = os.path.join(stored_command.output_dir, DEFAULT_OUTPUT) else: if command_args.output_dir is None: command_args.output_dir = a.NOW if command_args.predictions is None: command_args.predictions = os.path.join(command_args.output_dir, DEFAULT_OUTPUT) if len(os.path.dirname(command_args.predictions).strip()) == 0: command_args.predictions = os.path.join(command_args.output_dir, command_args.predictions) directory = u.check_dir(command_args.predictions) session_file = os.path.join(directory, SESSIONS_LOG) u.log_message(command.command + "\n", log_file=session_file) try: defaults_file = open(DEFAULTS_FILE, 'r') contents = defaults_file.read() defaults_file.close() defaults_copy = open(os.path.join(directory, DEFAULTS_FILE), 'w', 0) defaults_copy.write(contents) defaults_copy.close() except IOError: pass u.sys_log_message(u"%s\n" % os.path.abspath(directory), log_file=DIRS_LOG) # Creates the corresponding api instance if resume and debug: command_args.debug = True api = a.get_api_instance(command_args, u.check_dir(session_file)) # Selects the action to perform if (has_train(command_args) or has_test(command_args) or command_args.cluster_datasets is not None): output_args = a.get_output_args(api, command_args, resume) a.transform_args(command_args, command.flags, api, command.user_defaults) compute_output(**output_args) u.log_message("_" * 80 + "\n", log_file=session_file)
def analyze_dispatcher(args=sys.argv[1:]): """Main processing of the parsed options for BigMLer analyze """ # If --clear-logs the log files are cleared if "--clear-logs" in args: clear_log_files(LOG_FILES) command = command_handling(args, COMMAND_LOG) # Parses command line arguments. command_args = command.parser.parse_args(command.args) resume = command_args.resume if resume: # Keep the debug option if set debug = command_args.debug # Restore the args of the call to resume from the command log file stored_command = StoredCommand(args, COMMAND_LOG, DIRS_LOG) command = Command(None, stored_command=stored_command) # Logs the issued command and the resumed command session_file = os.path.join(stored_command.output_dir, SESSIONS_LOG) stored_command.log_command(session_file=session_file) # Parses resumed arguments. command_args = command.parser.parse_args(command.args) command_args.debug = debug else: if command_args.output_dir is None: command_args.output_dir = a.NOW session_file = os.path.join(command_args.output_dir, SESSIONS_LOG) # If logging is required, open the file for logging log = None if command_args.log_file: u.check_dir(command_args.log_file) log = command_args.log_file # If --clear_logs the log files are cleared if command_args.clear_logs: clear_log_files([log]) if command_args.model_fields: model_fields = command_args.model_fields.split(',') command_args.model_fields_ = [ model_field.strip() for model_field in model_fields ] else: command_args.model_fields_ = {} u.sys_log_message(u"%s\n" % os.path.abspath(command_args.output_dir), log_file=DIRS_LOG) session_file = os.path.join(command_args.output_dir, SESSIONS_LOG) # create api instance form args api = a.get_api_instance(command_args, u.check_dir(session_file)) # --maximize flag will be deprecated. Use --optimize flag. if command_args.maximize is not None and command_args.optimize is None: command_args.optimize = command_args.maximize incompatible_flags = [ command_args.cv, command_args.features, command_args.nodes ] if sum([int(bool(flag)) for flag in incompatible_flags]) > 1: sys.exit("The following flags cannot be used together:\n --features" "\n --cross-validation\n --nodes") # k-fold cross-validation if command_args.cv and command_args.dataset is not None: create_kfold_cv(command_args, api, command.common_options, resume=resume) # features analysis if command_args.features: create_features_analysis(command_args, api, command.common_options, resume=resume) # node threshold analysis if command_args.nodes: create_nodes_analysis(command_args, api, command.common_options, resume=resume)
def analyze_dispatcher(args=sys.argv[1:]): """Main processing of the parsed options for BigMLer analyze """ # If --clear-logs the log files are cleared if "--clear-logs" in args: clear_log_files(LOG_FILES) command = command_handling(args, COMMAND_LOG) # Parses command line arguments. command_args = command.parser.parse_args(command.args) resume = command_args.resume if resume: # Keep the debug option if set debug = command_args.debug # Restore the args of the call to resume from the command log file stored_command = StoredCommand(args, COMMAND_LOG, DIRS_LOG) command = Command(None, stored_command=stored_command) # Logs the issued command and the resumed command session_file = os.path.join(stored_command.output_dir, SESSIONS_LOG) stored_command.log_command(session_file=session_file) # Parses resumed arguments. command_args = command.parser.parse_args(command.args) else: if command_args.output_dir is None: command_args.output_dir = a.NOW session_file = os.path.join(command_args.output_dir, SESSIONS_LOG) csv_properties = {} # If logging is required, open the file for logging log = None if command_args.log_file: u.check_dir(command_args.log_file) log = command_args.log_file # If --clear_logs the log files are cleared if command_args.clear_logs: clear_log_files([log]) if command_args.model_fields: model_fields = command_args.model_fields.split(',') command_args.model_fields_ = map(str.strip, model_fields) else: command_args.model_fields_ = {} with open(DIRS_LOG, "a", 0) as directory_log: directory_log.write("%s\n" % os.path.abspath(command_args.output_dir)) session_file = os.path.join(command_args.output_dir, SESSIONS_LOG) # create api instance form args api = a.get_api_instance(command_args, u.check_dir(session_file)) # k-fold cross-validation if (command_args.cv and command_args.dataset is not None): create_kfold_cv(command_args, api, command.common_options, resume=resume) # features analysis if command_args.features: create_features_analysis(command_args, api, command.common_options, resume=resume) # node threshold analysis if command_args.nodes: create_nodes_analysis(command_args, api, command.common_options, resume=resume)