def main(self, *test_argv): # pylint: disable=r0915,r0912,r0914,c0111 self.parser = parser = argparse.ArgumentParser() # scope - on what hosts parser.add_argument( "-s", "--script", action='store_true', default=False, help=f"""If this flag is present, the first element of the remote command is assumed to be either the name of a local script, or, if this is not found, the body of a local script, that will be copied over before being executed remotely. In this case it should be executable. On the remote boxes it will be installed and run in the {default_remote_workdir} directory. """) parser.add_argument( "-i", "--includes", dest='includes', default=[], action='append', help="""for script mode only : a list of local files that are pushed remotely together with the local script, and in the same location; useful when you want to to run remotely a shell script that sources other files; remember that on the remote end all files (scripts and includes) end up in the same location""") parser.add_argument("-t", "--target", dest='targets', action='append', default=[], help=""" specify targets (additive); at least one is required; each target can be either * a space-separated list of hostnames * the name of a file containing hostnames * the name of a directory containing files named after hostnames; see e.g. the --mark option """) parser.add_argument("-x", "--exclude", dest='excludes', action='append', default=[], help=""" like --target, but for specifying exclusions; for now there no wildcard mechanism is supported here; also the order in which --target and --exclude options are mentioned does not matter; use --dry-run to only check for the list of applicable hosts """) # global settings parser.add_argument("-w", "--window", type=int, default=0, help=""" specify how many connections can run simultaneously; default is no limit """) parser.add_argument( "-c", "--connect-timeout", dest='timeout', type=float, default=default_timeout, help=f"specify connection timeout, default is {default_timeout}s") # ssh settings parser.add_argument( "-l", "--login", default=default_username, help=f"remote user name - default is {default_username}") parser.add_argument("-k", "--key", dest='keys', default=None, action='append', type=str, help=""" The default is for apssh to locate an ssh-agent through the SSH_AUTH_SOCK environment variable. If this cannot be found, or has an empty set of keys, then the user should specify private key file(s) - additive """) parser.add_argument("-K", "--ok-if-no-key", default=False, action='store_true', help=""" When no key can be found, apssh won't even bother to try and connect. With this option it proceeds even with no key available. """) parser.add_argument("-g", "--gateway", default=None, help=""" specify a gateway for 2-hops ssh - either hostname or username@hostname """) # how to store results # terminal parser.add_argument("-r", "--raw-format", default=False, action='store_true', help=""" produce raw result, incoming lines are shown as-is without hostname """) parser.add_argument( "-tc", "--time-colon-format", default=False, action='store_true', help="equivalent to --format '@time@:@host@:@line@") parser.add_argument("-f", "--format", default=None, action='store', help="""specify output format, which may include * `strftime` formats like e.g. %%H-%%M, and one of the following: * @user@ for the remote username, * @host@ for the target hostname, * @line@ for the actual line output (which contains the actual newline) * @time@ is a shorthand for %%H-%%M-%%S""") # filesystem parser.add_argument("-o", "--out-dir", default=None, help="specify directory where to store results") parser.add_argument("-d", "--date-time", default=None, action='store_true', help="use date-based directory to store results") parser.add_argument("-m", "--mark", default=False, action='store_true', help=""" available with the -d and -o options only. When specified, then for all nodes there will be a file created in the output subdir, named either 0ok/<hostname> for successful nodes, or 1failed/<hostname> for the other ones. This mark file will contain a single line with the returned code, or 'None' if the node was not reachable at all """) # usual stuff parser.add_argument("-n", "--dry-run", default=False, action='store_true', help="Only show details on selected hostnames") parser.add_argument("-v", "--verbose", action='store_true', default=False) parser.add_argument("-D", "--debug", action='store_true', default=False) parser.add_argument("-V", "--version", action='store_true', default=False) # the commands to run parser.add_argument("commands", nargs=argparse.REMAINDER, type=str, help=""" command to run remotely. If the -s or --script option is provided, the first argument here should denote a (typically script) file **that must exist** on the local filesystem. This script is then copied over to the remote system and serves as the command for remote execution """) if test_argv: args = self.parsed_args = parser.parse_args(test_argv) else: args = self.parsed_args = parser.parse_args() # helpers if args.version: print(f"apssh version {apssh_version}") exit(0) # manual check for REMAINDER if not args.commands: print("You must provide a command to be run remotely") parser.print_help() exit(1) # load keys self.loaded_private_keys = load_private_keys( self.parsed_args.keys, args.verbose or args.debug) if not self.loaded_private_keys and not args.ok_if_no_key: print("Could not find any usable key - exiting") exit(1) # initialize a gateway proxy if --gateway is specified gateway = None if args.gateway: gwuser, gwhost = self.user_host(args.gateway) gateway = SshProxy(hostname=gwhost, username=gwuser, keys=self.loaded_private_keys, formatter=self.get_formatter(), timeout=self.parsed_args.timeout, debug=self.parsed_args.debug) proxies = self.create_proxies(gateway) if args.verbose: print_stderr(f"apssh is working on {len(proxies)} nodes") window = self.parsed_args.window # populate scheduler scheduler = Scheduler(verbose=args.verbose) if not args.script: command_class = Run extra_kwds_args = {} else: # try RunScript command_class = RunScript extra_kwds_args = {'includes': args.includes} # but if the filename is not found then use RunString script = args.commands[0] if not Path(script).exists(): if args.verbose: print("Warning: file not found '{}'\n" "=> Using RunString instead".format(script)) command_class = RunString for proxy in proxies: scheduler.add( SshJob(node=proxy, critical=False, command=command_class(*args.commands, **extra_kwds_args))) # pylint: disable=w0106 scheduler.jobs_window = window if not scheduler.run(): scheduler.debrief() results = [job.result() for job in scheduler.jobs] ########## # print on stdout the name of the output directory # useful mostly with -d : subdir = self.get_formatter().run_name \ if isinstance(self.get_formatter(), SubdirFormatter) \ else None if subdir: print(subdir) # details on the individual retcods - a bit hacky if self.parsed_args.debug: for proxy, result in zip(proxies, results): print(f"PROXY {proxy.hostname} -> {result}") # marks names = {0: '0ok', None: '1failed'} if subdir and self.parsed_args.mark: # do we need to create the subdirs need_ok = [s for s in results if s == 0] if need_ok: os.makedirs(f"{subdir}/{names[0]}", exist_ok=True) need_fail = [s for s in results if s != 0] if need_fail: os.makedirs(f"{subdir}/{names[None]}", exist_ok=True) for proxy, result in zip(proxies, results): prefix = names[0] if result == 0 else names[None] mark_path = Path(subdir) / prefix / proxy.hostname with mark_path.open("w") as mark: mark.write(f"{result}\n") # xxx - when in gateway mode, the gateway proxy never gets disconnected # which probably is just fine # return 0 only if all hosts have returned 0 # otherwise, return 1 failures = [r for r in results if r != 0] overall = 0 if not failures else 1 return overall
def main() -> bool: parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument("-U", "--url", default=default_topurl, dest='topurl', help="url to reach nbhosting server") parser.add_argument( "-i", "--indices", default=[0], action=IntsRanges, help="(cumulative) ranges of indices in the list of known notebooks" " - run nbhtest with -l to see list") parser.add_argument( "-u", "--users", default=[1], action=IntsRanges, help= "(cumulative) ranges of students indexes; e.g. -u 101-400 -u 501-600") parser.add_argument("-b", "--base", default='student', help="basename for students name") parser.add_argument("-p", "--period", default=default_period, type=float, help="delay between 2 triggers of nbhtest") parser.add_argument( "-s", "--sleep", default=default_sleep_internal, type=float, help="delay in seconds to sleep between actions inside nbhtest") parser.add_argument( "-g", "--go", default=default_go_between_notebooks, type=float, help="go/wait duration between 2 consecutive notebooks") parser.add_argument( "-c", "--cut", default=False, action='store_true', help="""just load the urls, don't do any further processing""") parser.add_argument("-w", "--window", default=default_window, type=int, help="window depth for spawning the nbhtest instances") parser.add_argument("--idle", default=None, help="monitor idle setting") parser.add_argument("-n", "--dry-run", action='store_true') parser.add_argument( "coursedirs", default=[default_course_gitdir], nargs='*', help="""a list of git repos where to fetch notebooks""") signature = "".join(sys.argv[1:]) args = parser.parse_args() local = LocalNode(formatter=TerminalFormatter( custom_format="%H-%M-%S:@line@", verbose=True)) if args.idle is not None: hostname = urlparse(args.topurl).netloc command = f"ssh root@{hostname} nbh test-set-monitor-idle {args.idle}" os.system(command) scheduler = Scheduler() for user in args.users: student_name = f"{args.base}-{user:04d}" for coursedir in args.coursedirs: command = (f"nbhtest.py -U {args.topurl} -u {student_name} " f"-s {args.sleep} -g {args.go} ") if args.cut: command += "-c " for index in args.indices: command += f"{coursedir}:{index} " command += " &" if args.dry_run: print("dry-run:", command) else: # schedule this command to run _job = SshJob(scheduler=scheduler, node=local, commands=[command, f"sleep {args.period}"]) if args.dry_run: return True scheduler.jobs_window = args.window overall = scheduler.orchestrate() if not overall: scheduler.debrief() untagged = Path("artefacts") tagged = Path(f"artefacts{signature}") if tagged.exists(): print(f"NOT RENAMING because {tagged} exists; command to run is") print(f"mv {untagged} {tagged}") else: print(f"renaming {untagged} into {tagged}") untagged.rename(tagged) print("nbhtests DONE") return overall