def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, skip_nested=True, path_nargs='?', path_help='Base path to clone repositories to') args = parser.parse_args(args) try: repos = get_repositories(args.input) except RuntimeError as e: print(ansi('redf') + str(e) + ansi('reset'), file=sys.stderr) return 1 jobs = generate_jobs(repos, args) add_dependencies(jobs) if args.repos: output_repositories([job['client'] for job in jobs]) results = execute_jobs(jobs, show_progress=True, number_of_workers=args.workers, debug_jobs=args.debug) output_results(results) any_error = any(r['returncode'] for r in results) return 1 if any_error else 0
def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, path_nargs='?') args = parser.parse_args(args) command = ExportCommand(args) clients = find_repositories(command.paths, nested=command.nested) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, number_of_workers=args.workers) # check if at least one repo was found in the client directory basename = None for result in results: result['path'] = get_relative_path_of_result(result) if result['path'] == '.': basename = os.path.basename(os.path.abspath(result['client'].path)) # in that case prefix all relative paths with the client directory basename if basename is not None: for result in results: if result['path'] == '.': result['path'] = basename else: result['path'] = os.path.join(basename, result['path']) print('repositories:') output_results(results, output_handler=output_export_data) output_results(results, output_handler=output_error_information) any_error = any(r['returncode'] for r in results) return 1 if any_error else 0
def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() add_common_arguments( parser, skip_hide_empty=True, skip_nested=True, single_path=True, path_help='Base path to clone repositories to') args = parser.parse_args(args) try: repos = get_repositories(args.input) except RuntimeError as e: print(ansi('redf') + str(e) + ansi('reset'), file=sys.stderr) return 1 jobs = generate_jobs(repos, args) add_dependencies(jobs) if args.repos: output_repositories([job['client'] for job in jobs]) results = execute_jobs( jobs, show_progress=True, number_of_workers=args.workers, debug_jobs=args.debug) output_results(results) any_error = any(r['returncode'] for r in results) return 1 if any_error else 0
def simple_main(parser, command_class, args=None): add_common_arguments(parser) args = parser.parse_args(args) command = command_class(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, show_progress=True) output_results(results, hide_empty=args.hide_empty) return 0
def simple_main(parser, command_class, args=None): add_common_arguments(parser) args = parser.parse_args(args) command = command_class(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, show_progress=True) output_results(results) return 0
def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() add_common_arguments(parser) # separate anything followed after --args to not confuse argparse if args is None: args = sys.argv[1:] try: index = args.index('--args') + 1 except ValueError: # should generate error due to missing --args parser.parse_known_args(args) client_args = args[index:] args = parser.parse_args(args[0:index]) args.args = client_args # check if any client type is specified any_client_type = False for client in vcstool_clients: if client.type in args and args.__dict__[client.type]: any_client_type = True break # if no client type is specified enable all client types if not any_client_type: for client in vcstool_clients: if client.type in args: args.__dict__[client.type] = True command = CustomCommand(args) # filter repositories by specified client types clients = find_repositories(command.paths, nested=command.nested) clients = [c for c in clients if c.type in args and args.__dict__[c.type]] if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, show_progress=True, number_of_workers=args.workers, debug_jobs=args.debug) output_results(results, hide_empty=args.hide_empty) any_error = any(r['returncode'] for r in results) return 1 if any_error else 0
def main(args=None): parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, single_path=True) args = parser.parse_args(args) command = ExportCommand(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs) print('repositories:') output_results(results, output_handler=output_export_data) return 0
def simple_main(parser, command_class, args=None): add_common_arguments(parser) args = parser.parse_args(args) command = command_class(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, show_progress=True, number_of_workers=args.workers, debug_jobs=args.debug) output_results(results, hide_empty=args.hide_empty) any_error = any([r['returncode'] != 0 for r in results]) return 1 if any_error else 0
def main(args=None): parser = get_parser() add_common_arguments(parser) args = parser.parse_args(args) args.paths = [args.path] command = ExportCommand(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs) print('repositories:') output_results(results, output_export_data) return 0
def main(args=None): parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, single_path=True, path_help='Base path to clone repositories to') args = parser.parse_args(args) try: repos = get_repositories(args.input) except RuntimeError as e: print(ansi('redf') + str(e) + ansi('reset'), file=sys.stderr) return 1 jobs = generate_jobs(repos, args) if args.repos: output_repositories([job['client'] for job in jobs]) results = execute_jobs(jobs, show_progress=True) output_results(results) return 0
def simple_main(parser, command_class, args=None): add_common_arguments(parser) args = parser.parse_args(args) command = command_class(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, show_progress=True, number_of_workers=args.workers) output_results(results, hide_empty=args.hide_empty) any_error = any([r['returncode'] != 0 for r in results]) return 1 if any_error else 0
def main(args=None): parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, single_path=True) args = parser.parse_args(args) command = ExportCommand(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs) print('repositories:') output_results(results, output_handler=output_export_data) output_results(results, output_handler=output_error_information) any_error = any([r['returncode'] != 0 for r in results]) return 1 if any_error else 0
def main(args=None): parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, single_path=True) args = parser.parse_args(args) command = ExportCommand(args) clients = find_repositories(command.paths) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, number_of_workers=args.workers) print('repositories:') output_results(results, output_handler=output_export_data) output_results(results, output_handler=output_error_information) any_error = any([r['returncode'] != 0 for r in results]) return 1 if any_error else 0
def main(args=None): parser = get_parser() add_common_arguments(parser) # separate anything followed after --args to not confuse argparse if args is None: args = sys.argv[1:] try: index = args.index('--args') + 1 except ValueError: # should generate error due to missing --args parser.parse_known_args(args) client_args = args[index:] args = parser.parse_args(args[0:index]) args.args = client_args # check if any client type is specified any_client_type = False for client in vcstool_clients: if client.type in args and args.__dict__[client.type]: any_client_type = True break # if no client type is specified enable all client types if not any_client_type: for client in vcstool_clients: if client.type in args: args.__dict__[client.type] = True command = CustomCommand(args) # filter repositories by specified client types clients = find_repositories(command.paths) clients = [c for c in clients if c.type in args and args.__dict__[c.type]] if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, show_progress=True, number_of_workers=args.workers) output_results(results, hide_empty=args.hide_empty) any_error = any([r['returncode'] != 0 for r in results]) return 1 if any_error else 0
def main(args=None): parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, single_path=True, path_help='Base path to clone repositories to') args = parser.parse_args(args) try: repos = get_repositories(args.input) except RuntimeError as e: print(ansi('redf') + str(e) + ansi('reset'), file=sys.stderr) return 1 jobs = generate_jobs(repos, args) if args.repos: output_repositories([job['client'] for job in jobs]) results = execute_jobs(jobs, show_progress=True) output_results(results) any_error = any([r['returncode'] != 0 for r in results]) return 1 if any_error else 0
def main(args=None): parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, single_path=True, path_help="Base path to clone repositories to") args = parser.parse_args(args) try: repos = get_repositories(args.input) except RuntimeError as e: print(ansi("redf") + str(e) + ansi("reset"), file=sys.stderr) return 1 jobs = generate_jobs(repos, args) if args.repos: output_repositories([job["client"] for job in jobs]) results = execute_jobs(jobs, show_progress=True, number_of_workers=args.workers, debug_jobs=args.debug) output_results(results) any_error = any([r["returncode"] != 0 for r in results]) return 1 if any_error else 0
def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, path_nargs='?') args = parser.parse_args(args) command = ExportCommand(args) clients = find_repositories(command.paths, nested=command.nested) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, number_of_workers=args.workers) print('repositories:') output_results(results, output_handler=output_export_data) output_results(results, output_handler=output_error_information) any_error = any(r['returncode'] for r in results) return 1 if any_error else 0
def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, single_path=True) args = parser.parse_args(args) command = ExportCommand(args) clients = find_repositories(command.paths, nested=command.nested) if command.output_repos: output_repositories(clients) jobs = generate_jobs(clients, command) results = execute_jobs(jobs, number_of_workers=args.workers) print('repositories:') output_results(results, output_handler=output_export_data) output_results(results, output_handler=output_error_information) any_error = any(r['returncode'] for r in results) return 1 if any_error else 0
def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() add_common_arguments(parser, skip_hide_empty=True, skip_nested=True, path_nargs='?', path_help='Base path to clone repositories to') args = parser.parse_args(args) try: input_ = args.input if isinstance(input_, request.Request): input_ = request.urlopen(input_) repos = get_repositories(input_) except (RuntimeError, request.URLError) as e: print(ansi('redf') + str(e) + ansi('reset'), file=sys.stderr) return 1 jobs = generate_jobs(repos, args) add_dependencies(jobs) if args.repos: output_repositories([job['client'] for job in jobs]) workers = args.workers # for ssh URLs check if the host is known to prevent ssh asking for # confirmation when using more than one worker if workers > 1: ssh_keygen = None checked_hosts = set() for job in list(jobs): if job['command'] is None: continue url = job['command'].url # only check the host from a ssh URL if not url.startswith('git@') or ':' not in url: continue host = url[4:].split(':', 1)[0] # only check each host name once if host in checked_hosts: continue checked_hosts.add(host) # get ssh-keygen path once if ssh_keygen is None: ssh_keygen = which('ssh-keygen') or False if not ssh_keygen: continue result = run_command([ssh_keygen, '-F', host], '') if result['returncode']: print('At least one hostname (%s) is unknown, switching to a ' 'single worker to allow interactively answering the ssh ' 'question to confirm the fingerprint' % host) workers = 1 break results = execute_jobs(jobs, show_progress=True, number_of_workers=workers, debug_jobs=args.debug) output_results(results) any_error = any(r['returncode'] for r in results) return 1 if any_error else 0