def welcome_format(action): return WELCOME_MSG_TPL.format( version=version.version_string(), uptime=util.uptime(), timestamp=util.time_rfc2822(), action=action, )
def welcome_format(action): tpl_params = { 'version': version.version_string(), 'uptime': util.uptime(), 'timestamp': util.time_rfc2822(), 'action': action, } return templater.render_string(WELCOME_MSG_TPL, tpl_params)
def _handle_exit(signum, frame): (msg, rc) = EXIT_FOR[signum] msg = msg % ({"version": vr.version_string()}) contents = StringIO() contents.write("%s\n" % (msg)) _pprint_frame(frame, 1, BACK_FRAME_TRACE_DEPTH, contents) util.multi_log(contents.getvalue(), console=True, stderr=False, log=LOG) sys.exit(rc)
def _handle_exit(signum, frame): (msg, rc) = EXIT_FOR[signum] msg = msg % ({'version': vr.version_string()}) contents = StringIO() contents.write("%s\n" % (msg)) _pprint_frame(frame, 1, BACK_FRAME_TRACE_DEPTH, contents) util.multi_log(contents.getvalue(), console=True, stderr=False, log=LOG) sys.exit(rc)
def request(cls, **kwargs): self.assertEqual( { 'url': url, 'allow_redirects': True, 'method': 'GET', 'headers': { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()) } }, kwargs) return m_response
def request(cls, **kwargs): self.assertEqual( { "url": url, "allow_redirects": True, "method": "GET", "headers": { "User-Agent": "Cloud-Init/%s" % (version.version_string()) }, }, kwargs, ) return m_response
def get_system_info(): """Collect and report system information""" info = util.system_info() evt = events.ReportingEvent( SYSTEMINFO_EVENT_TYPE, 'system information', "cloudinit_version=%s, kernel_version=%s, variant=%s, " "distro_name=%s, distro_version=%s, flavor=%s, " "python_version=%s" % (version.version_string(), info['release'], info['variant'], info['dist'][0], info['dist'][1], info['dist'][2], info['python']), events.DEFAULT_EVENT_ORIGIN) events.report_event(evt) # return the event for unit testing purpose return evt
def request(cls, **kwargs): expected_kwargs = { "url": url, "allow_redirects": True, "method": "GET", "headers": { "User-Agent": "Cloud-Init/%s" % (version.version_string()) }, "timeout": request_timeout, } if request_timeout is None: expected_kwargs.pop("timeout") assert kwargs == expected_kwargs return m_response
def handle(_name, cfg, cloud, log, args): msg_in = "" if len(args) != 0: msg_in = str(args[0]) else: msg_in = util.get_cfg_option_str(cfg, "final_message", "") msg_in = msg_in.strip() if not msg_in: msg_in = FINAL_MESSAGE_DEF uptime = util.uptime() ts = util.time_rfc2822() cver = version.version_string() try: subs = { "uptime": uptime, "timestamp": ts, "version": cver, "datasource": str(cloud.datasource), } subs.update(dict([(k.upper(), v) for k, v in subs.items()])) util.multi_log( "%s\n" % (templater.render_string(msg_in, subs)), console=False, stderr=True, log=log, ) except Exception: util.logexc(log, "Failed to render final message template") boot_fin_fn = cloud.paths.boot_finished try: contents = "%s - %s - v. %s\n" % (uptime, ts, cver) util.write_file(boot_fin_fn, contents, ensure_dir_exists=False) except Exception: util.logexc(log, "Failed to write boot finished file %s", boot_fin_fn) if cloud.datasource.is_disconnected: log.warning("Used fallback datasource")
def handle(_name, cfg, cloud, log, args): msg_in = '' if len(args) != 0: msg_in = str(args[0]) else: msg_in = util.get_cfg_option_str(cfg, "final_message", "") msg_in = msg_in.strip() if not msg_in: msg_in = FINAL_MESSAGE_DEF uptime = util.uptime() ts = util.time_rfc2822() cver = version.version_string() try: subs = { 'uptime': uptime, 'timestamp': ts, 'version': cver, 'datasource': str(cloud.datasource), } util.multi_log("%s\n" % (templater.render_string(msg_in, subs)), console=False, stderr=True, log=log) except Exception: util.logexc(log, "Failed to render final message template") boot_fin_fn = cloud.paths.boot_finished try: contents = "%s - %s - v. %s\n" % (uptime, ts, cver) util.write_file(boot_fin_fn, contents) except: util.logexc(log, "Failed to write boot finished file %s", boot_fin_fn) if cloud.datasource.is_disconnected: log.warn("Used fallback datasource")
def handle(_name, cfg, cloud, log, args): msg_in = '' if len(args) != 0: msg_in = str(args[0]) else: msg_in = util.get_cfg_option_str(cfg, "final_message", "") msg_in = msg_in.strip() if not msg_in: msg_in = FINAL_MESSAGE_DEF uptime = util.uptime() ts = util.time_rfc2822() cver = version.version_string() try: subs = { 'uptime': uptime, 'timestamp': ts, 'version': cver, 'datasource': str(cloud.datasource), } subs.update(dict([(k.upper(), v) for k, v in subs.items()])) util.multi_log("%s\n" % (templater.render_string(msg_in, subs)), console=False, stderr=True, log=log) except Exception: util.logexc(log, "Failed to render final message template") boot_fin_fn = cloud.paths.boot_finished try: contents = "%s - %s - v. %s\n" % (uptime, ts, cver) util.write_file(boot_fin_fn, contents) except Exception: util.logexc(log, "Failed to write boot finished file %s", boot_fin_fn) if cloud.datasource.is_disconnected: log.warn("Used fallback datasource")
def main(sysv_args=None): if not sysv_args: sysv_args = sys.argv parser = argparse.ArgumentParser(prog=sysv_args.pop(0)) # Top level args parser.add_argument( "--version", "-v", action="version", version="%(prog)s " + (version.version_string()), help="Show program's version number and exit.", ) parser.add_argument( "--file", "-f", action="append", dest="files", help="Use additional yaml configuration files.", type=argparse.FileType("rb"), ) parser.add_argument( "--debug", "-d", action="store_true", help="Show additional pre-action logging (default: %(default)s).", default=False, ) parser.add_argument( "--force", action="store_true", help=( "Force running even if no datasource is" " found (use at your own risk)." ), dest="force", default=False, ) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers(title="Subcommands", dest="subcommand") subparsers.required = True # Each action and its sub-options (if any) parser_init = subparsers.add_parser( "init", help="Initialize cloud-init and perform initial modules." ) parser_init.add_argument( "--local", "-l", action="store_true", help="Start in local mode (default: %(default)s).", default=False, ) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=("init", main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser( "modules", help="Activate modules using a given configuration key." ) parser_mod.add_argument( "--mode", "-m", action="store", help="Module configuration name to use (default: %(default)s).", default="config", choices=("init", "config", "final"), ) parser_mod.set_defaults(action=("modules", main_modules)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser( "single", help="Run a single module." ) parser_single.add_argument( "--name", "-n", action="store", help="module name to run", required=True, ) parser_single.add_argument( "--frequency", action="store", help="Set module frequency.", required=False, choices=list(FREQ_SHORT_NAMES.keys()), ) parser_single.add_argument( "--report", action="store_true", help="Enable reporting.", required=False, ) parser_single.add_argument( "module_args", nargs="*", metavar="argument", help="Any additional arguments to pass to this module.", ) parser_single.set_defaults(action=("single", main_single)) parser_query = subparsers.add_parser( "query", help="Query standardized instance metadata from the command line.", ) parser_dhclient = subparsers.add_parser( dhclient_hook.NAME, help=dhclient_hook.__doc__ ) dhclient_hook.get_parser(parser_dhclient) parser_features = subparsers.add_parser( "features", help="List defined features." ) parser_features.set_defaults(action=("features", main_features)) parser_analyze = subparsers.add_parser( "analyze", help="Devel tool: Analyze cloud-init logs and data." ) parser_devel = subparsers.add_parser( "devel", help="Run development tools." ) parser_collect_logs = subparsers.add_parser( "collect-logs", help="Collect and tar all cloud-init debug info." ) parser_clean = subparsers.add_parser( "clean", help="Remove logs and artifacts so cloud-init can re-run." ) parser_status = subparsers.add_parser( "status", help="Report cloud-init status or wait on completion." ) parser_schema = subparsers.add_parser( "schema", help="Validate cloud-config files using jsonschema." ) if sysv_args: # Only load subparsers if subcommand is specified to avoid load cost subcommand = sysv_args[0] if subcommand == "analyze": from cloudinit.analyze.__main__ import get_parser as analyze_parser # Construct analyze subcommand parser analyze_parser(parser_analyze) elif subcommand == "devel": from cloudinit.cmd.devel.parser import get_parser as devel_parser # Construct devel subcommand parser devel_parser(parser_devel) elif subcommand == "collect-logs": from cloudinit.cmd.devel.logs import ( get_parser as logs_parser, handle_collect_logs_args, ) logs_parser(parser_collect_logs) parser_collect_logs.set_defaults( action=("collect-logs", handle_collect_logs_args) ) elif subcommand == "clean": from cloudinit.cmd.clean import ( get_parser as clean_parser, handle_clean_args, ) clean_parser(parser_clean) parser_clean.set_defaults(action=("clean", handle_clean_args)) elif subcommand == "query": from cloudinit.cmd.query import ( get_parser as query_parser, handle_args as handle_query_args, ) query_parser(parser_query) parser_query.set_defaults(action=("render", handle_query_args)) elif subcommand == "schema": from cloudinit.config.schema import ( get_parser as schema_parser, handle_schema_args, ) schema_parser(parser_schema) parser_schema.set_defaults(action=("schema", handle_schema_args)) elif subcommand == "status": from cloudinit.cmd.status import ( get_parser as status_parser, handle_status_args, ) status_parser(parser_status) parser_status.set_defaults(action=("status", handle_status_args)) args = parser.parse_args(args=sysv_args) # Subparsers.required = True and each subparser sets action=(name, functor) (name, functor) = args.action # Setup basic logging to start (until reinitialized) # iff in debug mode. if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper rname = None report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ( "init-network", "searching for network datasources", ) elif name == "modules": rname, rdesc = ( "modules-%s" % args.mode, "running modules for %s" % args.mode, ) elif name == "single": rname, rdesc = ( "single/%s" % args.name, "running single module %s" % args.name, ) report_on = args.report else: rname = name rdesc = "running 'cloud-init %s'" % name report_on = False args.reporter = events.ReportEventStack( rname, rdesc, reporting_enabled=report_on ) with args.reporter: retval = util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args), ) reporting.flush_events() return retval
def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, ssl_details=None, check_status=True, allow_redirects=True): url = _cleanurl(url) req_args = { 'url': url, } scheme = urlparse(url).scheme # pylint: disable=E1101 if scheme == 'https' and ssl_details: if not SSL_ENABLED: LOG.warn("SSL is not enabled, cert. verification can not occur!") else: if 'ca_certs' in ssl_details and ssl_details['ca_certs']: req_args['verify'] = ssl_details['ca_certs'] else: req_args['verify'] = True if 'cert_file' in ssl_details and 'key_file' in ssl_details: req_args['cert'] = [ ssl_details['cert_file'], ssl_details['key_file'] ] elif 'cert_file' in ssl_details: req_args['cert'] = str(ssl_details['cert_file']) req_args['allow_redirects'] = allow_redirects req_args['method'] = 'GET' if timeout is not None: req_args['timeout'] = max(float(timeout), 0) if data: req_args['method'] = 'POST' # It doesn't seem like config # was added in older library versions (or newer ones either), thus we # need to manually do the retries if it wasn't... if CONFIG_ENABLED: req_config = { 'store_cookies': False, } # Don't use the retry support built-in # since it doesn't allow for 'sleep_times' # in between tries.... # if retries: # req_config['max_retries'] = max(int(retries), 0) req_args['config'] = req_config manual_tries = 1 if retries: manual_tries = max(int(retries) + 1, 1) if not headers: headers = { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()), } if not headers_cb: def _cb(url): return headers headers_cb = _cb if data: # Do this after the log (it might be large) req_args['data'] = data if sec_between is None: sec_between = -1 excps = [] # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... for i in range(0, manual_tries): try: req_args['headers'] = headers_cb(url) filtered_req_args = {} for (k, v) in req_args.items(): if k == 'data': continue filtered_req_args[k] = v LOG.debug("[%s/%s] open '%s' with %s configuration", i, manual_tries, url, filtered_req_args) r = requests.request(**req_args) if check_status: r.raise_for_status() # pylint: disable=E1103 LOG.debug( "Read from %s (%s, %sb) after %s attempts", url, r.status_code, len(r.content), # pylint: disable=E1103 (i + 1)) # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs return UrlResponse(r) except exceptions.RequestException as e: if (isinstance(e, (exceptions.HTTPError)) and hasattr(e, 'response') # This appeared in v 0.10.8 and hasattr(e.response, 'status_code')): excps.append( UrlError(e, code=e.response.status_code, headers=e.response.headers)) else: excps.append(UrlError(e)) if SSL_ENABLED and isinstance(e, exceptions.SSLError): # ssl exceptions are not going to get fixed by waiting a # few seconds break if i + 1 < manual_tries and sec_between > 0: LOG.debug("Please wait %s seconds while we wait to try again", sec_between) time.sleep(sec_between) if excps: raise excps[-1] return None # Should throw before this...
def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, ssl_details=None, check_status=True, allow_redirects=True, exception_cb=None): url = _cleanurl(url) req_args = { 'url': url, } scheme = urlparse(url).scheme # pylint: disable=E1101 if scheme == 'https' and ssl_details: if not SSL_ENABLED: LOG.warn("SSL is not enabled, cert. verification can not occur!") else: if 'ca_certs' in ssl_details and ssl_details['ca_certs']: req_args['verify'] = ssl_details['ca_certs'] else: req_args['verify'] = True if 'cert_file' in ssl_details and 'key_file' in ssl_details: req_args['cert'] = [ssl_details['cert_file'], ssl_details['key_file']] elif 'cert_file' in ssl_details: req_args['cert'] = str(ssl_details['cert_file']) req_args['allow_redirects'] = allow_redirects req_args['method'] = 'GET' if timeout is not None: req_args['timeout'] = max(float(timeout), 0) if data: req_args['method'] = 'POST' # It doesn't seem like config # was added in older library versions (or newer ones either), thus we # need to manually do the retries if it wasn't... if CONFIG_ENABLED: req_config = { 'store_cookies': False, } # Don't use the retry support built-in # since it doesn't allow for 'sleep_times' # in between tries.... # if retries: # req_config['max_retries'] = max(int(retries), 0) req_args['config'] = req_config manual_tries = 1 if retries: manual_tries = max(int(retries) + 1, 1) if not headers: headers = { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()), } if not headers_cb: def _cb(url): return headers headers_cb = _cb if data: # Do this after the log (it might be large) req_args['data'] = data if sec_between is None: sec_between = -1 excps = [] # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... for i in range(0, manual_tries): req_args['headers'] = headers_cb(url) filtered_req_args = {} for (k, v) in req_args.items(): if k == 'data': continue filtered_req_args[k] = v try: LOG.debug("[%s/%s] open '%s' with %s configuration", i, manual_tries, url, filtered_req_args) r = requests.request(**req_args) if check_status: r.raise_for_status() # pylint: disable=E1103 LOG.debug("Read from %s (%s, %sb) after %s attempts", url, r.status_code, len(r.content), # pylint: disable=E1103 (i + 1)) # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs return UrlResponse(r) except exceptions.RequestException as e: if (isinstance(e, (exceptions.HTTPError)) and hasattr(e, 'response') # This appeared in v 0.10.8 and hasattr(e.response, 'status_code')): excps.append(UrlError(e, code=e.response.status_code, headers=e.response.headers)) else: excps.append(UrlError(e)) if SSL_ENABLED and isinstance(e, exceptions.SSLError): # ssl exceptions are not going to get fixed by waiting a # few seconds break if exception_cb and not exception_cb(filtered_req_args, excps[-1]): break if i + 1 < manual_tries and sec_between > 0: LOG.debug("Please wait %s seconds while we wait to try again", sec_between) time.sleep(sec_between) if excps: raise excps[-1] return None # Should throw before this...
def read_url(url, data=None, timeout=None, retries=0, headers=None, ssl_details=None, check_status=True, allow_redirects=True): """Fetch a url (or post to one) with the given options. :param url: url to fetch :param data: any data to POST (this switches the request method to POST instead of GET) :param timeout: the timeout (in seconds) to wait for a response :param headers: any headers to provide (and send along) in the request :param ssl_details: a dictionary containing any ssl settings, cert_file, ca_certs and verify are valid entries (and they are only used when the url provided is https) :param check_status: checks that the response status is OK after fetching (this ensures a exception is raised on non-OK status codes) :param allow_redirects: enables redirects (or disables them) :param retries: maximum number of retries to attempt when fetching the url and the fetch fails """ url = _clean_url(url) request_args = { 'url': url, } request_args.update(_get_ssl_args(url, ssl_details)) request_args['allow_redirects'] = allow_redirects request_args['method'] = 'GET' if timeout is not None: request_args['timeout'] = max(float(timeout), 0) if data: request_args['method'] = 'POST' request_args['data'] = data if not headers: headers = structures.CaseInsensitiveDict() else: headers = structures.CaseInsensitiveDict(headers) if 'User-Agent' not in headers: headers['User-Agent'] = 'Cloud-Init/%s' % (version.version_string()) request_args['headers'] = headers session = requests.Session() if retries: retry = _Retry(total=max(int(retries), 0), raise_on_redirect=not allow_redirects) session.mount(_get_base_url(url), adapters.HTTPAdapter(max_retries=retry)) try: with session: response = session.request(**request_args) if check_status: response.raise_for_status() except exceptions.RequestException as e: if e.response is not None: raise UrlError(e, code=e.response.status_code, headers=e.response.headers) else: raise UrlError(e) else: LOG.debug("Read from %s (%s, %sb)", url, response.status_code, len(response.content)) return RequestsResponse(response)
def main(sysv_args=None): if sysv_args is not None: parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] else: parser = argparse.ArgumentParser() # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers() # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) parser_query.add_argument("--name", '-n', action="store", help="item name to query on", required=True, choices=QUERY_DATA_TYPES) parser_query.set_defaults(action=('query', main_query)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_dhclient = subparsers.add_parser('dhclient-hook', help=('run the dhclient hook' 'to record network info')) parser_dhclient.add_argument("net_action", help=('action taken on the interface')) parser_dhclient.add_argument("net_interface", help=('the network interface being acted' ' upon')) parser_dhclient.set_defaults(action=('dhclient_hook', dhclient_hook)) args = parser.parse_args(args=sysv_args) try: (name, functor) = args.action except AttributeError: parser.error('too few arguments') # Setup basic logging to start (until reinitialized) # iff in debug mode... if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report elif name == 'dhclient_hook': rname, rdesc = ("dhclient-hook", "running dhclient-hook module") args.reporter = events.ReportEventStack(rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args))
def main(sysv_args=None): if not sysv_args: sysv_args = sys.argv parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers(title='Subcommands', dest='subcommand') subparsers.required = True # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_query = subparsers.add_parser( 'query', help='Query standardized instance metadata from the command line.') parser_dhclient = subparsers.add_parser( dhclient_hook.NAME, help=dhclient_hook.__doc__) dhclient_hook.get_parser(parser_dhclient) parser_features = subparsers.add_parser('features', help=('list defined features')) parser_features.set_defaults(action=('features', main_features)) parser_analyze = subparsers.add_parser( 'analyze', help='Devel tool: Analyze cloud-init logs and data') parser_devel = subparsers.add_parser( 'devel', help='Run development tools') parser_collect_logs = subparsers.add_parser( 'collect-logs', help='Collect and tar all cloud-init debug info') parser_clean = subparsers.add_parser( 'clean', help='Remove logs and artifacts so cloud-init can re-run.') parser_status = subparsers.add_parser( 'status', help='Report cloud-init status or wait on completion.') if sysv_args: # Only load subparsers if subcommand is specified to avoid load cost if sysv_args[0] == 'analyze': from cloudinit.analyze.__main__ import get_parser as analyze_parser # Construct analyze subcommand parser analyze_parser(parser_analyze) elif sysv_args[0] == 'devel': from cloudinit.cmd.devel.parser import get_parser as devel_parser # Construct devel subcommand parser devel_parser(parser_devel) elif sysv_args[0] == 'collect-logs': from cloudinit.cmd.devel.logs import ( get_parser as logs_parser, handle_collect_logs_args) logs_parser(parser_collect_logs) parser_collect_logs.set_defaults( action=('collect-logs', handle_collect_logs_args)) elif sysv_args[0] == 'clean': from cloudinit.cmd.clean import ( get_parser as clean_parser, handle_clean_args) clean_parser(parser_clean) parser_clean.set_defaults( action=('clean', handle_clean_args)) elif sysv_args[0] == 'query': from cloudinit.cmd.query import ( get_parser as query_parser, handle_args as handle_query_args) query_parser(parser_query) parser_query.set_defaults( action=('render', handle_query_args)) elif sysv_args[0] == 'status': from cloudinit.cmd.status import ( get_parser as status_parser, handle_status_args) status_parser(parser_status) parser_status.set_defaults( action=('status', handle_status_args)) args = parser.parse_args(args=sysv_args) # Subparsers.required = True and each subparser sets action=(name, functor) (name, functor) = args.action # Setup basic logging to start (until reinitialized) # iff in debug mode. if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper rname = None report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report else: rname = name rdesc = "running 'cloud-init %s'" % name report_on = False args.reporter = events.ReportEventStack( rname, rdesc, reporting_enabled=report_on) with args.reporter: retval = util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args)) reporting.flush_events() return retval
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosectionlabel', 'sphinx.ext.viewcode', ] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. version = version.version_string() release = version # Set the default Pygments syntax highlight_language = 'yaml' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # -- Options for HTML output --------------------------------------------------
import cloudinit.sources.helpers.vultr as vultr LOG = log.getLogger(__name__) BUILTIN_DS_CONFIG = { 'url': 'http://169.254.169.254', 'retries': 30, 'timeout': 2, 'wait': 2, 'user-agent': 'Cloud-Init/%s - OS: %s Variant: %s' % (version.version_string(), util.system_info()['system'], util.system_info()['variant']) } class DataSourceVultr(sources.DataSource): dsname = 'Vultr' def __init__(self, sys_cfg, distro, paths): super(DataSourceVultr, self).__init__(sys_cfg, distro, paths) self.ds_cfg = util.mergemanydict([ util.get_cfg_by_path(sys_cfg, ["datasource", "Vultr"], {}), BUILTIN_DS_CONFIG ])
def test_package_version_skipped(self): """If _PACKAGED_VERSION is not modified, then return __VERSION__.""" self.assertEqual("17.2", version.version_string())
def test_package_version_respected(self): """If _PACKAGED_VERSION is filled in, then it should be returned.""" self.assertEqual("17.2-3-gb05b9972-0ubuntu1", version.version_string())
def readurl( url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, headers_redact=None, ssl_details=None, check_status=True, allow_redirects=True, exception_cb=None, session=None, infinite=False, log_req_resp=True, request_method="", ) -> UrlResponse: """Wrapper around requests.Session to read the url and retry if necessary :param url: Mandatory url to request. :param data: Optional form data to post the URL. Will set request_method to 'POST' if present. :param timeout: Timeout in seconds to wait for a response. May be a tuple if specifying (connection timeout, read timeout). :param retries: Number of times to retry on exception if exception_cb is None or exception_cb returns True for the exception caught. Default is to fail with 0 retries on exception. :param sec_between: Default 1: amount of seconds passed to time.sleep between retries. None or -1 means don't sleep. :param headers: Optional dict of headers to send during request :param headers_cb: Optional callable returning a dict of values to send as headers during request :param headers_redact: Optional list of header names to redact from the log :param ssl_details: Optional dict providing key_file, ca_certs, and cert_file keys for use on in ssl connections. :param check_status: Optional boolean set True to raise when HTTPError occurs. Default: True. :param allow_redirects: Optional boolean passed straight to Session.request as 'allow_redirects'. Default: True. :param exception_cb: Optional callable which accepts the params msg and exception and returns a boolean True if retries are permitted. :param session: Optional exiting requests.Session instance to reuse. :param infinite: Bool, set True to retry indefinitely. Default: False. :param log_req_resp: Set False to turn off verbose debug messages. :param request_method: String passed as 'method' to Session.request. Typically GET, or POST. Default: POST if data is provided, GET otherwise. """ url = _cleanurl(url) req_args = { "url": url, } req_args.update(_get_ssl_args(url, ssl_details)) req_args["allow_redirects"] = allow_redirects if not request_method: request_method = "POST" if data else "GET" req_args["method"] = request_method if timeout is not None: if isinstance(timeout, tuple): req_args["timeout"] = timeout else: req_args["timeout"] = max(float(timeout), 0) if headers_redact is None: headers_redact = [] manual_tries = 1 if retries: manual_tries = max(int(retries) + 1, 1) def_headers = { "User-Agent": "Cloud-Init/%s" % (version.version_string()), } if headers: def_headers.update(headers) headers = def_headers if not headers_cb: def _cb(url): return headers headers_cb = _cb if data: req_args["data"] = data if sec_between is None: sec_between = -1 excps = [] # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... # Infinitely retry if infinite is True for i in count() if infinite else range(0, manual_tries): req_args["headers"] = headers_cb(url) filtered_req_args = {} for (k, v) in req_args.items(): if k == "data": continue if k == "headers" and headers_redact: matched_headers = [k for k in headers_redact if v.get(k)] if matched_headers: filtered_req_args[k] = copy.deepcopy(v) for key in matched_headers: filtered_req_args[k][key] = REDACTED else: filtered_req_args[k] = v try: if log_req_resp: LOG.debug( "[%s/%s] open '%s' with %s configuration", i, "infinite" if infinite else manual_tries, url, filtered_req_args, ) if session is None: session = requests.Session() with session as sess: r = sess.request(**req_args) if check_status: r.raise_for_status() LOG.debug( "Read from %s (%s, %sb) after %s attempts", url, r.status_code, len(r.content), (i + 1), ) # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs return UrlResponse(r) except exceptions.RequestException as e: if ( isinstance(e, (exceptions.HTTPError)) and hasattr(e, "response") and hasattr( # This appeared in v 0.10.8 e.response, "status_code" ) ): excps.append( UrlError( e, code=e.response.status_code, headers=e.response.headers, url=url, ) ) else: excps.append(UrlError(e, url=url)) if isinstance(e, exceptions.SSLError): # ssl exceptions are not going to get fixed by waiting a # few seconds break if exception_cb and not exception_cb(req_args.copy(), excps[-1]): # if an exception callback was given, it should return True # to continue retrying and False to break and re-raise the # exception break if (infinite and sec_between > 0) or ( i + 1 < manual_tries and sec_between > 0 ): if log_req_resp: LOG.debug( "Please wait %s seconds while we wait to try again", sec_between, ) time.sleep(sec_between) raise excps[-1]
def main(sysv_args=None): if sysv_args is not None: parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] else: parser = argparse.ArgumentParser() # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers() # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) parser_query.add_argument("--name", '-n', action="store", help="item name to query on", required=True, choices=QUERY_DATA_TYPES) parser_query.set_defaults(action=('query', main_query)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.set_defaults(action=('single', main_single)) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) args = parser.parse_args(args=sysv_args) try: (name, functor) = args.action except AttributeError: parser.error('too few arguments') # Setup basic logging to start (until reinitialized) # iff in debug mode... if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report args.reporter = events.ReportEventStack( rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args))
def main_version(args): sys.stdout.write("cloud-init {0}\n".format(version_string()))
def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, ssl_details=None, check_status=True, allow_redirects=True, exception_cb=None, session=None): url = _cleanurl(url) req_args = { 'url': url, } req_args.update(_get_ssl_args(url, ssl_details)) req_args['allow_redirects'] = allow_redirects req_args['method'] = 'GET' if timeout is not None: req_args['timeout'] = max(float(timeout), 0) if data: req_args['method'] = 'POST' # It doesn't seem like config # was added in older library versions (or newer ones either), thus we # need to manually do the retries if it wasn't... if CONFIG_ENABLED: req_config = { 'store_cookies': False, } # Don't use the retry support built-in # since it doesn't allow for 'sleep_times' # in between tries.... # if retries: # req_config['max_retries'] = max(int(retries), 0) req_args['config'] = req_config manual_tries = 1 if retries: manual_tries = max(int(retries) + 1, 1) def_headers = { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()), } if headers: def_headers.update(headers) headers = def_headers if not headers_cb: def _cb(url): return headers headers_cb = _cb if data: req_args['data'] = data if sec_between is None: sec_between = -1 excps = [] # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... for i in range(0, manual_tries): req_args['headers'] = headers_cb(url) filtered_req_args = {} for (k, v) in req_args.items(): if k == 'data': continue filtered_req_args[k] = v try: LOG.debug("[%s/%s] open '%s' with %s configuration", i, manual_tries, url, filtered_req_args) if session is None: session = requests.Session() with session as sess: r = sess.request(**req_args) if check_status: r.raise_for_status() LOG.debug("Read from %s (%s, %sb) after %s attempts", url, r.status_code, len(r.content), (i + 1)) # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs return UrlResponse(r) except exceptions.RequestException as e: if (isinstance(e, (exceptions.HTTPError)) and hasattr(e, 'response') and # This appeared in v 0.10.8 hasattr(e.response, 'status_code')): excps.append( UrlError(e, code=e.response.status_code, headers=e.response.headers, url=url)) else: excps.append(UrlError(e, url=url)) if SSL_ENABLED and isinstance(e, exceptions.SSLError): # ssl exceptions are not going to get fixed by waiting a # few seconds break if exception_cb and exception_cb(req_args.copy(), excps[-1]): # if an exception callback was given it should return None # a true-ish value means to break and re-raise the exception break if i + 1 < manual_tries and sec_between > 0: LOG.debug("Please wait %s seconds while we wait to try again", sec_between) time.sleep(sec_between) if excps: raise excps[-1] return None # Should throw before this...
def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, ssl_details=None, check_status=True, allow_redirects=True, exception_cb=None): url = _cleanurl(url) req_args = { 'url': url, } req_args.update(_get_ssl_args(url, ssl_details)) req_args['allow_redirects'] = allow_redirects req_args['method'] = 'GET' if timeout is not None: req_args['timeout'] = max(float(timeout), 0) if data: req_args['method'] = 'POST' # It doesn't seem like config # was added in older library versions (or newer ones either), thus we # need to manually do the retries if it wasn't... if CONFIG_ENABLED: req_config = { 'store_cookies': False, } # Don't use the retry support built-in # since it doesn't allow for 'sleep_times' # in between tries.... # if retries: # req_config['max_retries'] = max(int(retries), 0) req_args['config'] = req_config manual_tries = 1 if retries: manual_tries = max(int(retries) + 1, 1) def_headers = { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()), } if headers: def_headers.update(headers) headers = def_headers if not headers_cb: def _cb(url): return headers headers_cb = _cb if data: req_args['data'] = data if sec_between is None: sec_between = -1 excps = [] # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... for i in range(0, manual_tries): req_args['headers'] = headers_cb(url) filtered_req_args = {} for (k, v) in req_args.items(): if k == 'data': continue filtered_req_args[k] = v try: LOG.debug("[%s/%s] open '%s' with %s configuration", i, manual_tries, url, filtered_req_args) r = requests.request(**req_args) if check_status: r.raise_for_status() LOG.debug("Read from %s (%s, %sb) after %s attempts", url, r.status_code, len(r.content), (i + 1)) # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs return UrlResponse(r) except exceptions.RequestException as e: if (isinstance(e, (exceptions.HTTPError)) and hasattr(e, 'response') and # This appeared in v 0.10.8 hasattr(e.response, 'status_code')): excps.append(UrlError(e, code=e.response.status_code, headers=e.response.headers, url=url)) else: excps.append(UrlError(e, url=url)) if SSL_ENABLED and isinstance(e, exceptions.SSLError): # ssl exceptions are not going to get fixed by waiting a # few seconds break if exception_cb and exception_cb(req_args.copy(), excps[-1]): # if an exception callback was given it should return None # a true-ish value means to break and re-raise the exception break if i + 1 < manual_tries and sec_between > 0: LOG.debug("Please wait %s seconds while we wait to try again", sec_between) time.sleep(sec_between) if excps: raise excps[-1] return None # Should throw before this...
def main(sysv_args=None): if not sysv_args: sysv_args = sys.argv parser = argparse.ArgumentParser(prog=sysv_args[0]) sysv_args = sysv_args[1:] # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) parser.add_argument('--force', action='store_true', help=('force running even if no datasource is' ' found (use at your own risk)'), dest='force', default=False) parser.set_defaults(reporter=None) subparsers = parser.add_subparsers(title='Subcommands', dest='subcommand') subparsers.required = True # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages parser_mod = subparsers.add_parser('modules', help=('activates modules using ' 'a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', help=("module configuration name " "to use (default: %(default)s)"), default='config', choices=('init', 'config', 'final')) parser_mod.set_defaults(action=('modules', main_modules)) # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("--report", action="store_true", help="enable reporting", required=False) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) parser_dhclient = subparsers.add_parser('dhclient-hook', help=('run the dhclient hook' 'to record network info')) parser_dhclient.add_argument("net_action", help=('action taken on the interface')) parser_dhclient.add_argument("net_interface", help=('the network interface being acted' ' upon')) parser_dhclient.set_defaults(action=('dhclient_hook', dhclient_hook)) parser_features = subparsers.add_parser('features', help=('list defined features')) parser_features.set_defaults(action=('features', main_features)) parser_analyze = subparsers.add_parser( 'analyze', help='Devel tool: Analyze cloud-init logs and data') parser_devel = subparsers.add_parser('devel', help='Run development tools') parser_collect_logs = subparsers.add_parser( 'collect-logs', help='Collect and tar all cloud-init debug info') parser_clean = subparsers.add_parser( 'clean', help='Remove logs and artifacts so cloud-init can re-run.') parser_status = subparsers.add_parser( 'status', help='Report cloud-init status or wait on completion.') if sysv_args: # Only load subparsers if subcommand is specified to avoid load cost if sysv_args[0] == 'analyze': from cloudinit.analyze.__main__ import get_parser as analyze_parser # Construct analyze subcommand parser analyze_parser(parser_analyze) elif sysv_args[0] == 'devel': from cloudinit.cmd.devel.parser import get_parser as devel_parser # Construct devel subcommand parser devel_parser(parser_devel) elif sysv_args[0] == 'collect-logs': from cloudinit.cmd.devel.logs import (get_parser as logs_parser, handle_collect_logs_args) logs_parser(parser_collect_logs) parser_collect_logs.set_defaults(action=('collect-logs', handle_collect_logs_args)) elif sysv_args[0] == 'clean': from cloudinit.cmd.clean import (get_parser as clean_parser, handle_clean_args) clean_parser(parser_clean) parser_clean.set_defaults(action=('clean', handle_clean_args)) elif sysv_args[0] == 'status': from cloudinit.cmd.status import (get_parser as status_parser, handle_status_args) status_parser(parser_status) parser_status.set_defaults(action=('status', handle_status_args)) args = parser.parse_args(args=sysv_args) # Subparsers.required = True and each subparser sets action=(name, functor) (name, functor) = args.action # Setup basic logging to start (until reinitialized) # iff in debug mode. if args.debug: logging.setupBasicLogging() # Setup signal handlers before running signal_handler.attach_handlers() if name in ("modules", "init"): functor = status_wrapper rname = None report_on = True if name == "init": if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) report_on = args.report else: rname = name rdesc = "running 'cloud-init %s'" % name report_on = False args.reporter = events.ReportEventStack(rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args))
def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, ssl_details=None, check_status=True, allow_redirects=True, exception_cb=None, session=None, infinite=False, log_req_resp=True, request_method=None): """Wrapper around requests.Session to read the url and retry if necessary :param url: Mandatory url to request. :param data: Optional form data to post the URL. Will set request_method to 'POST' if present. :param timeout: Timeout in seconds to wait for a response :param retries: Number of times to retry on exception if exception_cb is None or exception_cb returns True for the exception caught. Default is to fail with 0 retries on exception. :param sec_between: Default 1: amount of seconds passed to time.sleep between retries. None or -1 means don't sleep. :param headers: Optional dict of headers to send during request :param headers_cb: Optional callable returning a dict of values to send as headers during request :param ssl_details: Optional dict providing key_file, ca_certs, and cert_file keys for use on in ssl connections. :param check_status: Optional boolean set True to raise when HTTPError occurs. Default: True. :param allow_redirects: Optional boolean passed straight to Session.request as 'allow_redirects'. Default: True. :param exception_cb: Optional callable which accepts the params msg and exception and returns a boolean True if retries are permitted. :param session: Optional exiting requests.Session instance to reuse. :param infinite: Bool, set True to retry indefinitely. Default: False. :param log_req_resp: Set False to turn off verbose debug messages. :param request_method: String passed as 'method' to Session.request. Typically GET, or POST. Default: POST if data is provided, GET otherwise. """ url = _cleanurl(url) req_args = { 'url': url, } req_args.update(_get_ssl_args(url, ssl_details)) req_args['allow_redirects'] = allow_redirects if not request_method: request_method = 'POST' if data else 'GET' req_args['method'] = request_method if timeout is not None: req_args['timeout'] = max(float(timeout), 0) # It doesn't seem like config # was added in older library versions (or newer ones either), thus we # need to manually do the retries if it wasn't... if CONFIG_ENABLED: req_config = { 'store_cookies': False, } # Don't use the retry support built-in # since it doesn't allow for 'sleep_times' # in between tries.... # if retries: # req_config['max_retries'] = max(int(retries), 0) req_args['config'] = req_config manual_tries = 1 if retries: manual_tries = max(int(retries) + 1, 1) def_headers = { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()), } if headers: def_headers.update(headers) headers = def_headers if not headers_cb: def _cb(url): return headers headers_cb = _cb if data: req_args['data'] = data if sec_between is None: sec_between = -1 excps = [] # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... # Infinitely retry if infinite is True for i in count() if infinite else range(0, manual_tries): req_args['headers'] = headers_cb(url) filtered_req_args = {} for (k, v) in req_args.items(): if k == 'data': continue filtered_req_args[k] = v try: if log_req_resp: LOG.debug("[%s/%s] open '%s' with %s configuration", i, "infinite" if infinite else manual_tries, url, filtered_req_args) if session is None: session = requests.Session() with session as sess: r = sess.request(**req_args) if check_status: r.raise_for_status() LOG.debug("Read from %s (%s, %sb) after %s attempts", url, r.status_code, len(r.content), (i + 1)) # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs return UrlResponse(r) except exceptions.RequestException as e: if (isinstance(e, (exceptions.HTTPError)) and hasattr(e, 'response') and # This appeared in v 0.10.8 hasattr(e.response, 'status_code')): excps.append( UrlError(e, code=e.response.status_code, headers=e.response.headers, url=url)) else: excps.append(UrlError(e, url=url)) if SSL_ENABLED and isinstance(e, exceptions.SSLError): # ssl exceptions are not going to get fixed by waiting a # few seconds break if exception_cb and not exception_cb(req_args.copy(), excps[-1]): # if an exception callback was given, it should return True # to continue retrying and False to break and re-raise the # exception break if (infinite and sec_between > 0) or \ (i + 1 < manual_tries and sec_between > 0): if log_req_resp: LOG.debug( "Please wait %s seconds while we wait to try again", sec_between) time.sleep(sec_between) if excps: raise excps[-1] return None # Should throw before this...
def welcome_format(action): return WELCOME_MSG_TPL.format( version=version.version_string(), uptime=util.uptime(), timestamp=util.time_rfc2822(), action=action)
] intersphinx_mapping = { 'sphinx': ('http://sphinx.pocoo.org', None) } # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. version = version.version_string() release = version # Set the default Pygments syntax highlight_language = 'python' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # -- Options for HTML output --------------------------------------------------