def __init__(self, ds_deps=None, reporter=None): if ds_deps is not None: self.ds_deps = ds_deps else: self.ds_deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] # Created on first use self._cfg = None self._paths = None self._distro = None # Changed only when a fetch occurs self.datasource = NULL_DATA_SOURCE self.ds_restored = False self._previous_iid = None if reporter is None: reporter = events.ReportEventStack( name="init-reporter", description="init-desc", reporting_enabled=False, ) self.reporter = reporter
def _get_raw_goal_state_xml_from_azure(self) -> str: """Fetches the GoalState XML from the Azure endpoint and returns the XML as a string. @return: GoalState XML string """ LOG.info('Registering with Azure...') url = 'http://{}/machine/?comp=goalstate'.format(self.endpoint) try: with events.ReportEventStack(name="goalstate-retrieval", description="retrieve goalstate", parent=azure_ds_reporter): response = self.azure_endpoint_client.get(url) except Exception as e: msg = 'failed to register with Azure: %s' % e LOG.warning(msg) report_diagnostic_event(msg) raise LOG.debug('Successfully fetched GoalState XML.') return response.contents
def __init__( self, datasource: DataSource, paths: Paths, cfg: dict, distro: Distro, runners: Runners, reporter: Optional[events.ReportEventStack] = None, ): self.datasource = datasource self.paths = paths self.distro = distro self._cfg = cfg self._runners = runners if reporter is None: reporter = events.ReportEventStack( name="unnamed-cloud-reporter", description="unnamed-cloud-reporter", reporting_enabled=False, ) self.reporter = reporter
def __init__(self, ds_deps: Optional[List[str]] = None, reporter=None): if ds_deps is not None: self.ds_deps = ds_deps else: self.ds_deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] # Created on first use self._cfg: Optional[dict] = None self._paths: Optional[helpers.Paths] = None self._distro: Optional[distros.Distro] = None # Changed only when a fetch occurs self.datasource: Optional[sources.DataSource] = None self.ds_restored = False self._previous_iid = None if reporter is None: reporter = events.ReportEventStack( name="init-reporter", description="init-desc", reporting_enabled=False, ) self.reporter = reporter
def _get_data_source(self, existing): if self.datasource is not NULL_DATA_SOURCE: return self.datasource with events.ReportEventStack( name="check-cache", description="attempting to read from cache [%s]" % existing, parent=self.reporter) as myrep: ds = self._restore_from_cache() if ds and existing == "trust": myrep.description = "restored from cache: %s" % ds elif ds and existing == "check": if (hasattr(ds, 'check_instance_id') and ds.check_instance_id(self.cfg)): myrep.description = "restored from checked cache: %s" % ds else: myrep.description = "cache invalid in datasource: %s" % ds ds = None else: myrep.description = "no cache found" self.ds_restored = bool(ds) LOG.debug(myrep.description) if not ds: util.del_file(self.paths.instance_link) (cfg_list, pkg_list) = self._get_datasources() # Deep copy so that user-data handlers can not modify # (which will affect user-data handlers down the line...) (ds, dsname) = sources.find_source(self.cfg, self.distro, self.paths, copy.deepcopy(self.ds_deps), cfg_list, pkg_list, self.reporter) LOG.info("Loaded datasource %s - %s", dsname, ds) self.datasource = ds # Ensure we adjust our path members datasource # now that we have one (thus allowing ipath to be used) self._reset() return ds
def handle_args(name, args): # Note that if an exception happens between now and when logging is # setup, we'll only see it in the journal hotplug_reporter = events.ReportEventStack(name, __doc__, reporting_enabled=True) hotplug_init = Init(ds_deps=[], reporter=hotplug_reporter) hotplug_init.read_cfg() log.setupLogging(hotplug_init.cfg) if 'reporting' in hotplug_init.cfg: reporting.update_configuration(hotplug_init.cfg.get('reporting')) # Logging isn't going to be setup until now LOG.debug( '%s called with the following arguments: {udevaction: %s, ' 'subsystem: %s, devpath: %s}', name, args.udevaction, args.subsystem, args.devpath) LOG.debug( '%s called with the following arguments:\n' 'udevaction: %s\n' 'subsystem: %s\n' 'devpath: %s', name, args.udevaction, args.subsystem, args.devpath) with hotplug_reporter: try: handle_hotplug( hotplug_init=hotplug_init, devpath=args.devpath, subsystem=args.subsystem, udevaction=args.udevaction, ) except Exception: LOG.exception('Received fatal exception handling hotplug!') raise LOG.debug('Exiting hotplug handler') reporting.flush_events()
def main(sysv_args=None): if sysv_args is not None: parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] else: parser = argparse.ArgumentParser() # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers() # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) parser_query.add_argument("--name", '-n', action="store", help="item name to query on", required=True, choices=QUERY_DATA_TYPES) parser_query.set_defaults(action=('query', main_query)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_dhclient = subparsers.add_parser('dhclient-hook', help=('run the dhclient hook' 'to record network info')) parser_dhclient.add_argument("net_action", help=('action taken on the interface')) parser_dhclient.add_argument("net_interface", help=('the network interface being acted' ' upon')) parser_dhclient.set_defaults(action=('dhclient_hook', dhclient_hook)) args = parser.parse_args(args=sysv_args) try: (name, functor) = args.action except AttributeError: parser.error('too few arguments') # Setup basic logging to start (until reinitialized) # iff in debug mode... if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report elif name == 'dhclient_hook': rname, rdesc = ("dhclient-hook", "running dhclient-hook module") args.reporter = events.ReportEventStack(rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args))
def main(sysv_args=None): if not sysv_args: sysv_args = sys.argv parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers(title='Subcommands', dest='subcommand') subparsers.required = True # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_dhclient = subparsers.add_parser('dhclient-hook', help=('run the dhclient hook' 'to record network info')) parser_dhclient.add_argument("net_action", help=('action taken on the interface')) parser_dhclient.add_argument("net_interface", help=('the network interface being acted' ' upon')) parser_dhclient.set_defaults(action=('dhclient_hook', dhclient_hook)) parser_features = subparsers.add_parser('features', help=('list defined features')) parser_features.set_defaults(action=('features', main_features)) parser_analyze = subparsers.add_parser( 'analyze', help='Devel tool: Analyze cloud-init logs and data') parser_devel = subparsers.add_parser('devel', help='Run development tools') parser_collect_logs = subparsers.add_parser( 'collect-logs', help='Collect and tar all cloud-init debug info') parser_clean = subparsers.add_parser( 'clean', help='Remove logs and artifacts so cloud-init can re-run.') parser_status = subparsers.add_parser( 'status', help='Report cloud-init status or wait on completion.') if sysv_args: # Only load subparsers if subcommand is specified to avoid load cost if sysv_args[0] == 'analyze': from cloudinit.analyze.__main__ import get_parser as analyze_parser # Construct analyze subcommand parser analyze_parser(parser_analyze) elif sysv_args[0] == 'devel': from cloudinit.cmd.devel.parser import get_parser as devel_parser # Construct devel subcommand parser devel_parser(parser_devel) elif sysv_args[0] == 'collect-logs': from cloudinit.cmd.devel.logs import (get_parser as logs_parser, handle_collect_logs_args) logs_parser(parser_collect_logs) parser_collect_logs.set_defaults(action=('collect-logs', handle_collect_logs_args)) elif sysv_args[0] == 'clean': from cloudinit.cmd.clean import (get_parser as clean_parser, handle_clean_args) clean_parser(parser_clean) parser_clean.set_defaults(action=('clean', handle_clean_args)) elif sysv_args[0] == 'status': from cloudinit.cmd.status import (get_parser as status_parser, handle_status_args) status_parser(parser_status) parser_status.set_defaults(action=('status', handle_status_args)) args = parser.parse_args(args=sysv_args) # Subparsers.required = True and each subparser sets action=(name, functor) (name, functor) = args.action # Setup basic logging to start (until reinitialized) # iff in debug mode. if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper rname = None report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report else: rname = name rdesc = "running 'cloud-init %s'" % name report_on = False args.reporter = events.ReportEventStack(rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args))
def __enter__(self): with events.ReportEventStack(name="obtain-dhcp-lease", description="obtain dhcp lease", parent=self.reporter): return self.ephemeralDHCPv4.__enter__()
def impl(*args, **kwargs): with events.ReportEventStack(name=func.__name__, description=func.__name__, parent=azure_ds_reporter): return func(*args, **kwargs)
from cloudinit.reporting import events from cloudinit.net.dhcp import EphemeralDHCPv4 from datetime import datetime LOG = logging.getLogger(__name__) # This endpoint matches the format as found in dhcp lease files, since this # value is applied if the endpoint can't be found within a lease file DEFAULT_WIRESERVER_ENDPOINT = "a8:3f:81:10" BOOT_EVENT_TYPE = 'boot-telemetry' SYSTEMINFO_EVENT_TYPE = 'system-info' DIAGNOSTIC_EVENT_TYPE = 'diagnostic' azure_ds_reporter = events.ReportEventStack( name="azure-ds", description="initialize reporter for azure ds", reporting_enabled=True) def azure_ds_telemetry_reporter(func): def impl(*args, **kwargs): with events.ReportEventStack(name=func.__name__, description=func.__name__, parent=azure_ds_reporter): return func(*args, **kwargs) return impl @azure_ds_telemetry_reporter def get_boot_telemetry():
def main(sysv_args=None): if not sysv_args: sysv_args = sys.argv parser = argparse.ArgumentParser(prog=sysv_args.pop(0)) # Top level args parser.add_argument( "--version", "-v", action="version", version="%(prog)s " + (version.version_string()), help="Show program's version number and exit.", ) parser.add_argument( "--file", "-f", action="append", dest="files", help="Use additional yaml configuration files.", type=argparse.FileType("rb"), ) parser.add_argument( "--debug", "-d", action="store_true", help="Show additional pre-action logging (default: %(default)s).", default=False, ) parser.add_argument( "--force", action="store_true", help=( "Force running even if no datasource is" " found (use at your own risk)." ), dest="force", default=False, ) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers(title="Subcommands", dest="subcommand") subparsers.required = True # Each action and its sub-options (if any) parser_init = subparsers.add_parser( "init", help="Initialize cloud-init and perform initial modules." ) parser_init.add_argument( "--local", "-l", action="store_true", help="Start in local mode (default: %(default)s).", default=False, ) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=("init", main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser( "modules", help="Activate modules using a given configuration key." ) parser_mod.add_argument( "--mode", "-m", action="store", help="Module configuration name to use (default: %(default)s).", default="config", choices=("init", "config", "final"), ) parser_mod.set_defaults(action=("modules", main_modules)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser( "single", help="Run a single module." ) parser_single.add_argument( "--name", "-n", action="store", help="module name to run", required=True, ) parser_single.add_argument( "--frequency", action="store", help="Set module frequency.", required=False, choices=list(FREQ_SHORT_NAMES.keys()), ) parser_single.add_argument( "--report", action="store_true", help="Enable reporting.", required=False, ) parser_single.add_argument( "module_args", nargs="*", metavar="argument", help="Any additional arguments to pass to this module.", ) parser_single.set_defaults(action=("single", main_single)) parser_query = subparsers.add_parser( "query", help="Query standardized instance metadata from the command line.", ) parser_dhclient = subparsers.add_parser( dhclient_hook.NAME, help=dhclient_hook.__doc__ ) dhclient_hook.get_parser(parser_dhclient) parser_features = subparsers.add_parser( "features", help="List defined features." ) parser_features.set_defaults(action=("features", main_features)) parser_analyze = subparsers.add_parser( "analyze", help="Devel tool: Analyze cloud-init logs and data." ) parser_devel = subparsers.add_parser( "devel", help="Run development tools." ) parser_collect_logs = subparsers.add_parser( "collect-logs", help="Collect and tar all cloud-init debug info." ) parser_clean = subparsers.add_parser( "clean", help="Remove logs and artifacts so cloud-init can re-run." ) parser_status = subparsers.add_parser( "status", help="Report cloud-init status or wait on completion." ) parser_schema = subparsers.add_parser( "schema", help="Validate cloud-config files using jsonschema." ) if sysv_args: # Only load subparsers if subcommand is specified to avoid load cost subcommand = sysv_args[0] if subcommand == "analyze": from cloudinit.analyze.__main__ import get_parser as analyze_parser # Construct analyze subcommand parser analyze_parser(parser_analyze) elif subcommand == "devel": from cloudinit.cmd.devel.parser import get_parser as devel_parser # Construct devel subcommand parser devel_parser(parser_devel) elif subcommand == "collect-logs": from cloudinit.cmd.devel.logs import ( get_parser as logs_parser, handle_collect_logs_args, ) logs_parser(parser_collect_logs) parser_collect_logs.set_defaults( action=("collect-logs", handle_collect_logs_args) ) elif subcommand == "clean": from cloudinit.cmd.clean import ( get_parser as clean_parser, handle_clean_args, ) clean_parser(parser_clean) parser_clean.set_defaults(action=("clean", handle_clean_args)) elif subcommand == "query": from cloudinit.cmd.query import ( get_parser as query_parser, handle_args as handle_query_args, ) query_parser(parser_query) parser_query.set_defaults(action=("render", handle_query_args)) elif subcommand == "schema": from cloudinit.config.schema import ( get_parser as schema_parser, handle_schema_args, ) schema_parser(parser_schema) parser_schema.set_defaults(action=("schema", handle_schema_args)) elif subcommand == "status": from cloudinit.cmd.status import ( get_parser as status_parser, handle_status_args, ) status_parser(parser_status) parser_status.set_defaults(action=("status", handle_status_args)) args = parser.parse_args(args=sysv_args) # Subparsers.required = True and each subparser sets action=(name, functor) (name, functor) = args.action # Setup basic logging to start (until reinitialized) # iff in debug mode. if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper rname = None report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ( "init-network", "searching for network datasources", ) elif name == "modules": rname, rdesc = ( "modules-%s" % args.mode, "running modules for %s" % args.mode, ) elif name == "single": rname, rdesc = ( "single/%s" % args.name, "running single module %s" % args.name, ) report_on = args.report else: rname = name rdesc = "running 'cloud-init %s'" % name report_on = False args.reporter = events.ReportEventStack( rname, rdesc, reporting_enabled=report_on ) with args.reporter: retval = util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args), ) reporting.flush_events() return retval
def test_set_invalid_result_raises_value_error(self): f = events.ReportEventStack("myname", "mydesc") self.assertRaises(ValueError, setattr, f, "result", "BOGUS")
def test_reporting_event_has_sane_repr(self): myrep = events.ReportEventStack("fooname", "foodesc", reporting_enabled=True).__repr__() self.assertIn("fooname", myrep) self.assertIn("foodesc", myrep) self.assertIn("True", myrep)
def test_reporting_disabled_does_not_report_events( self, report_start, report_finish): with events.ReportEventStack("a", "b", reporting_enabled=False): pass self.assertEqual(report_start.call_count, 0) self.assertEqual(report_finish.call_count, 0)