def main(sysv_args=None): if sysv_args is not None: parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] else: parser = argparse.ArgumentParser() # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers() # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) parser_query.add_argument("--name", '-n', action="store", help="item name to query on", required=True, choices=QUERY_DATA_TYPES) parser_query.set_defaults(action=('query', main_query)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.set_defaults(action=('single', main_single)) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) args = parser.parse_args(args=sysv_args) try: (name, functor) = args.action except AttributeError: parser.error('too few arguments') # Setup basic logging to start (until reinitialized) # iff in debug mode... if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report args.reporter = events.ReportEventStack( rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args))
def main(sysv_args=None): if not sysv_args: sysv_args = sys.argv parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers(title='Subcommands', dest='subcommand') subparsers.required = True # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_query = subparsers.add_parser( 'query', help='Query standardized instance metadata from the command line.') parser_dhclient = subparsers.add_parser(dhclient_hook.NAME, help=dhclient_hook.__doc__) dhclient_hook.get_parser(parser_dhclient) parser_features = subparsers.add_parser('features', help=('list defined features')) parser_features.set_defaults(action=('features', main_features)) parser_analyze = subparsers.add_parser( 'analyze', help='Devel tool: Analyze cloud-init logs and data') parser_devel = subparsers.add_parser('devel', help='Run development tools') parser_collect_logs = subparsers.add_parser( 'collect-logs', help='Collect and tar all cloud-init debug info') parser_clean = subparsers.add_parser( 'clean', help='Remove logs and artifacts so cloud-init can re-run.') parser_status = subparsers.add_parser( 'status', help='Report cloud-init status or wait on completion.') if sysv_args: # Only load subparsers if subcommand is specified to avoid load cost if sysv_args[0] == 'analyze': from cloudinit.analyze.__main__ import get_parser as analyze_parser # Construct analyze subcommand parser analyze_parser(parser_analyze) elif sysv_args[0] == 'devel': from cloudinit.cmd.devel.parser import get_parser as devel_parser # Construct devel subcommand parser devel_parser(parser_devel) elif sysv_args[0] == 'collect-logs': from cloudinit.cmd.devel.logs import (get_parser as logs_parser, handle_collect_logs_args) logs_parser(parser_collect_logs) parser_collect_logs.set_defaults(action=('collect-logs', handle_collect_logs_args)) elif sysv_args[0] == 'clean': from cloudinit.cmd.clean import (get_parser as clean_parser, handle_clean_args) clean_parser(parser_clean) parser_clean.set_defaults(action=('clean', handle_clean_args)) elif sysv_args[0] == 'query': from cloudinit.cmd.query import (get_parser as query_parser, handle_args as handle_query_args) query_parser(parser_query) parser_query.set_defaults(action=('render', handle_query_args)) elif sysv_args[0] == 'status': from cloudinit.cmd.status import (get_parser as status_parser, handle_status_args) status_parser(parser_status) parser_status.set_defaults(action=('status', handle_status_args)) args = parser.parse_args(args=sysv_args) # Subparsers.required = True and each subparser sets action=(name, functor) (name, functor) = args.action # Setup basic logging to start (until reinitialized) # iff in debug mode. if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper rname = None report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report else: rname = name rdesc = "running 'cloud-init %s'" % name report_on = False args.reporter = events.ReportEventStack(rname, rdesc, reporting_enabled=report_on) with args.reporter: retval = util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args)) reporting.flush_events() return retval
def main(sysv_args=None): if not sysv_args: sysv_args = sys.argv parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers(title='Subcommands', dest='subcommand') subparsers.required = True # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_query = subparsers.add_parser( 'query', help='Query standardized instance metadata from the command line.') parser_dhclient = subparsers.add_parser( dhclient_hook.NAME, help=dhclient_hook.__doc__) dhclient_hook.get_parser(parser_dhclient) parser_features = subparsers.add_parser('features', help=('list defined features')) parser_features.set_defaults(action=('features', main_features)) parser_analyze = subparsers.add_parser( 'analyze', help='Devel tool: Analyze cloud-init logs and data') parser_devel = subparsers.add_parser( 'devel', help='Run development tools') parser_collect_logs = subparsers.add_parser( 'collect-logs', help='Collect and tar all cloud-init debug info') parser_clean = subparsers.add_parser( 'clean', help='Remove logs and artifacts so cloud-init can re-run.') parser_status = subparsers.add_parser( 'status', help='Report cloud-init status or wait on completion.') if sysv_args: # Only load subparsers if subcommand is specified to avoid load cost if sysv_args[0] == 'analyze': from cloudinit.analyze.__main__ import get_parser as analyze_parser # Construct analyze subcommand parser analyze_parser(parser_analyze) elif sysv_args[0] == 'devel': from cloudinit.cmd.devel.parser import get_parser as devel_parser # Construct devel subcommand parser devel_parser(parser_devel) elif sysv_args[0] == 'collect-logs': from cloudinit.cmd.devel.logs import ( get_parser as logs_parser, handle_collect_logs_args) logs_parser(parser_collect_logs) parser_collect_logs.set_defaults( action=('collect-logs', handle_collect_logs_args)) elif sysv_args[0] == 'clean': from cloudinit.cmd.clean import ( get_parser as clean_parser, handle_clean_args) clean_parser(parser_clean) parser_clean.set_defaults( action=('clean', handle_clean_args)) elif sysv_args[0] == 'query': from cloudinit.cmd.query import ( get_parser as query_parser, handle_args as handle_query_args) query_parser(parser_query) parser_query.set_defaults( action=('render', handle_query_args)) elif sysv_args[0] == 'status': from cloudinit.cmd.status import ( get_parser as status_parser, handle_status_args) status_parser(parser_status) parser_status.set_defaults( action=('status', handle_status_args)) args = parser.parse_args(args=sysv_args) # Subparsers.required = True and each subparser sets action=(name, functor) (name, functor) = args.action # Setup basic logging to start (until reinitialized) # iff in debug mode. if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper rname = None report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report else: rname = name rdesc = "running 'cloud-init %s'" % name report_on = False args.reporter = events.ReportEventStack( rname, rdesc, reporting_enabled=report_on) with args.reporter: retval = util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args)) reporting.flush_events() return retval
def handle_args(name, args): if not args.directory.endswith("/"): args.directory += "/" if not os.path.isdir(args.directory): os.makedirs(args.directory) if args.debug: log.setupBasicLogging(level=log.DEBUG) else: log.setupBasicLogging(level=log.WARN) if args.mac: known_macs = {} for item in args.mac: iface_name, iface_mac = item.split(",", 1) known_macs[iface_mac] = iface_name else: known_macs = None net_data = args.network_data.read() if args.kind == "eni": pre_ns = eni.convert_eni_data(net_data) elif args.kind == "yaml": pre_ns = safeyaml.load(net_data) if "network" in pre_ns: pre_ns = pre_ns.get("network") if args.debug: sys.stderr.write("\n".join( ["Input YAML", safeyaml.dumps(pre_ns), ""])) elif args.kind == "network_data.json": pre_ns = openstack.convert_net_json(json.loads(net_data), known_macs=known_macs) elif args.kind == "azure-imds": pre_ns = azure.parse_network_config(json.loads(net_data)) elif args.kind == "vmware-imc": config = ovf.Config(ovf.ConfigFile(args.network_data.name)) pre_ns = ovf.get_network_config_from_conf(config, False) ns = network_state.parse_net_config_data(pre_ns) if args.debug: sys.stderr.write("\n".join(["", "Internal State", yaml.dump(ns), ""])) distro_cls = distros.fetch(args.distro) distro = distro_cls(args.distro, {}, None) config = {} if args.output_kind == "eni": r_cls = eni.Renderer config = distro.renderer_configs.get("eni") elif args.output_kind == "netplan": r_cls = netplan.Renderer config = distro.renderer_configs.get("netplan") # don't run netplan generate/apply config["postcmds"] = False # trim leading slash config["netplan_path"] = config["netplan_path"][1:] # enable some netplan features config["features"] = ["dhcp-use-domains", "ipv6-mtu"] elif args.output_kind == "networkd": r_cls = networkd.Renderer config = distro.renderer_configs.get("networkd") elif args.output_kind == "sysconfig": r_cls = sysconfig.Renderer config = distro.renderer_configs.get("sysconfig") elif args.output_kind == "network-manager": r_cls = network_manager.Renderer config = distro.renderer_configs.get("network-manager") else: raise RuntimeError("Invalid output_kind") r = r_cls(config=config) sys.stderr.write("".join([ "Read input format '%s' from '%s'.\n" % (args.kind, args.network_data.name), "Wrote output format '%s' to '%s'\n" % (args.output_kind, args.directory), ]) + "\n") r.render_network_state(network_state=ns, target=args.directory)
def handle_args(name, args): if not args.directory.endswith("/"): args.directory += "/" if not os.path.isdir(args.directory): os.makedirs(args.directory) if args.debug: log.setupBasicLogging(level=log.DEBUG) else: log.setupBasicLogging(level=log.WARN) if args.mac: known_macs = {} for item in args.mac: iface_name, iface_mac = item.split(",", 1) known_macs[iface_mac] = iface_name else: known_macs = None net_data = args.network_data.read() if args.kind == "eni": pre_ns = eni.convert_eni_data(net_data) ns = network_state.parse_net_config_data(pre_ns) elif args.kind == "yaml": pre_ns = yaml.load(net_data) if 'network' in pre_ns: pre_ns = pre_ns.get('network') if args.debug: sys.stderr.write('\n'.join([ "Input YAML", yaml.dump(pre_ns, default_flow_style=False, indent=4), "" ])) ns = network_state.parse_net_config_data(pre_ns) elif args.kind == 'network_data.json': pre_ns = openstack.convert_net_json(json.loads(net_data), known_macs=known_macs) ns = network_state.parse_net_config_data(pre_ns) elif args.kind == 'azure-imds': pre_ns = azure.parse_network_config(json.loads(net_data)) ns = network_state.parse_net_config_data(pre_ns) if not ns: raise RuntimeError("No valid network_state object created from" "input data") if args.debug: sys.stderr.write('\n'.join([ "", "Internal State", yaml.dump(ns, default_flow_style=False, indent=4), "" ])) if args.output_kind == "eni": r_cls = eni.Renderer elif args.output_kind == "netplan": r_cls = netplan.Renderer else: r_cls = sysconfig.Renderer r = r_cls() sys.stderr.write(''.join([ "Read input format '%s' from '%s'.\n" % (args.kind, args.network_data.name), "Wrote output format '%s' to '%s'\n" % (args.output_kind, args.directory) ]) + "\n") r.render_network_state(network_state=ns, target=args.directory)
def addLogHandlerCLI(logger, log_level): """Add a commandline logging handler to emit messages to stderr.""" formatter = logging.Formatter('%(levelname)s: %(message)s') log.setupBasicLogging(log_level, formatter=formatter) return logger
def main(sysv_args=None): if sysv_args is not None: parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] else: parser = argparse.ArgumentParser() # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers() # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) parser_query.add_argument("--name", '-n', action="store", help="item name to query on", required=True, choices=QUERY_DATA_TYPES) parser_query.set_defaults(action=('query', main_query)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_dhclient = subparsers.add_parser('dhclient-hook', help=('run the dhclient hook' 'to record network info')) parser_dhclient.add_argument("net_action", help=('action taken on the interface')) parser_dhclient.add_argument("net_interface", help=('the network interface being acted' ' upon')) parser_dhclient.set_defaults(action=('dhclient_hook', dhclient_hook)) args = parser.parse_args(args=sysv_args) try: (name, functor) = args.action except AttributeError: parser.error('too few arguments') # Setup basic logging to start (until reinitialized) # iff in debug mode... if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report elif name == 'dhclient_hook': rname, rdesc = ("dhclient-hook", "running dhclient-hook module") args.reporter = events.ReportEventStack(rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args))
def handle_args(name, args): if not args.directory.endswith("/"): args.directory += "/" if not os.path.isdir(args.directory): os.makedirs(args.directory) if args.debug: log.setupBasicLogging(level=log.DEBUG) else: log.setupBasicLogging(level=log.WARN) if args.mac: known_macs = {} for item in args.mac: iface_name, iface_mac = item.split(",", 1) known_macs[iface_mac] = iface_name else: known_macs = None net_data = args.network_data.read() if args.kind == "eni": pre_ns = eni.convert_eni_data(net_data) elif args.kind == "yaml": pre_ns = yaml.load(net_data) if 'network' in pre_ns: pre_ns = pre_ns.get('network') if args.debug: sys.stderr.write('\n'.join( ["Input YAML", yaml.dump(pre_ns, default_flow_style=False, indent=4), ""])) elif args.kind == 'network_data.json': pre_ns = openstack.convert_net_json( json.loads(net_data), known_macs=known_macs) elif args.kind == 'azure-imds': pre_ns = azure.parse_network_config(json.loads(net_data)) elif args.kind == 'vmware-imc': config = ovf.Config(ovf.ConfigFile(args.network_data.name)) pre_ns = ovf.get_network_config_from_conf(config, False) ns = network_state.parse_net_config_data(pre_ns) if not ns: raise RuntimeError("No valid network_state object created from" "input data") if args.debug: sys.stderr.write('\n'.join([ "", "Internal State", yaml.dump(ns, default_flow_style=False, indent=4), ""])) distro_cls = distros.fetch(args.distro) distro = distro_cls(args.distro, {}, None) config = {} if args.output_kind == "eni": r_cls = eni.Renderer config = distro.renderer_configs.get('eni') elif args.output_kind == "netplan": r_cls = netplan.Renderer config = distro.renderer_configs.get('netplan') # don't run netplan generate/apply config['postcmds'] = False # trim leading slash config['netplan_path'] = config['netplan_path'][1:] else: r_cls = sysconfig.Renderer config = distro.renderer_configs.get('sysconfig') r = r_cls(config=config) sys.stderr.write(''.join([ "Read input format '%s' from '%s'.\n" % ( args.kind, args.network_data.name), "Wrote output format '%s' to '%s'\n" % ( args.output_kind, args.directory)]) + "\n") r.render_network_state(network_state=ns, target=args.directory)