def test_status_returns_running(self): """Report running when status exists with an unfinished stage.""" ensure_file(self.tmp_path("result.json", self.new_root)) write_json(self.status_file, {"v1": { "init": { "start": 1, "finished": None } }}) cmdargs = myargs(long=False, wait=False) with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: retcode = wrap_and_call( "cloudinit.cmd.status", { "_is_cloudinit_disabled": (False, ""), "Init": { "side_effect": self.init_class }, }, status.handle_status_args, "ignored", cmdargs, ) self.assertEqual(0, retcode) self.assertEqual("status: running\n", m_stdout.getvalue())
def test_status_on_errors(self): """Reports error when any stage has errors.""" write_json( self.status_file, { "v1": { "stage": None, "blah": {"errors": [], "finished": 123.456}, "init": { "errors": ["error1"], "start": 124.567, "finished": 125.678, }, "init-local": {"start": 123.45, "finished": 123.46}, } }, ) cmdargs = myargs(long=False, wait=False) with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: retcode = wrap_and_call( "cloudinit.cmd.status", { "_is_cloudinit_disabled": (False, ""), "Init": {"side_effect": self.init_class}, }, status.handle_status_args, "ignored", cmdargs, ) self.assertEqual(1, retcode) self.assertEqual("status: error\n", m_stdout.getvalue())
def test_status_returns_running_long_format(self): """Long format reports the stage in which we are running.""" write_json( self.status_file, { "v1": { "stage": "init", "init": {"start": 124.456, "finished": None}, "init-local": {"start": 123.45, "finished": 123.46}, } }, ) cmdargs = myargs(long=True, wait=False) with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: retcode = wrap_and_call( "cloudinit.cmd.status", { "_is_cloudinit_disabled": (False, ""), "Init": {"side_effect": self.init_class}, }, status.handle_status_args, "ignored", cmdargs, ) self.assertEqual(0, retcode) expected = dedent( """\ status: running time: Thu, 01 Jan 1970 00:02:04 +0000 detail: Running in stage: init """ ) self.assertEqual(expected, m_stdout.getvalue())
def test_status_returns_done_long(self): '''Long format of done status includes datasource info.''' ensure_file(self.tmp_path('result.json', self.new_root)) write_json( self.status_file, {'v1': {'stage': None, 'datasource': ( 'DataSourceNoCloud [seed=/var/.../seed/nocloud-net]' '[dsmode=net]'), 'init': {'start': 124.567, 'finished': 125.678}, 'init-local': {'start': 123.45, 'finished': 123.46}}}) cmdargs = myargs(long=True, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', {'_is_cloudinit_disabled': (False, ''), 'Init': {'side_effect': self.init_class}}, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) expected = dedent('''\ status: done time: Thu, 01 Jan 1970 00:02:05 +0000 detail: DataSourceNoCloud [seed=/var/.../seed/nocloud-net][dsmode=net] ''') self.assertEqual(expected, m_stdout.getvalue())
def get_data(self): """Datasources implement _get_data to setup metadata and userdata_raw. Minimally, the datasource should return a boolean True on success. """ return_value = self._get_data() json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE) if not return_value: return return_value instance_data = { 'ds': { 'meta-data': self.metadata, 'user-data': self.get_userdata_raw(), 'vendor-data': self.get_vendordata_raw()}} instance_data.update( self._get_standardized_metadata()) try: # Process content base64encoding unserializable values content = util.json_dumps(instance_data) # Strip base64: prefix and return base64-encoded-keys processed_data = process_base64_metadata(json.loads(content)) except TypeError as e: LOG.warning('Error persisting instance-data.json: %s', str(e)) return return_value except UnicodeDecodeError as e: LOG.warning('Error persisting instance-data.json: %s', str(e)) return return_value write_json(json_file, processed_data, mode=0o600) return return_value
def test_status_main(self): '''status.main can be run as a standalone script.''' write_json(self.status_file, {'v1': { 'init': { 'start': 1, 'finished': None } }}) with self.assertRaises(SystemExit) as context_manager: with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: wrap_and_call( 'cloudinit.cmd.status', { 'sys.argv': { 'new': ['status'] }, 'sys.exit': { 'side_effect': self.sys_exit }, '_is_cloudinit_disabled': (False, ''), 'Init': { 'side_effect': self.init_class } }, status.main) self.assertEqual(0, context_manager.exception.code) self.assertEqual('status: running\n', m_stdout.getvalue())
def fake_sleep(interval): self.assertEqual(0.25, interval) self.sleep_calls += 1 if self.sleep_calls == 2: write_json(self.status_file, running_json) elif self.sleep_calls == 3: write_json(self.status_file, error_json)
def test_status_returns_running_long_format(self): '''Long format reports the stage in which we are running.''' write_json( self.status_file, { 'v1': { 'stage': 'init', 'init': { 'start': 124.456, 'finished': None }, 'init-local': { 'start': 123.45, 'finished': 123.46 } } }) cmdargs = myargs(long=True, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', { '_is_cloudinit_disabled': (False, ''), 'Init': { 'side_effect': self.init_class } }, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) expected = dedent('''\ status: running time: Thu, 01 Jan 1970 00:02:04 +0000 detail: Running in stage: init ''') self.assertEqual(expected, m_stdout.getvalue())
def handle(name, cfg, cloud, log, _args): if util.get_cfg_option_bool(cfg, "preserve_hostname", False): log.debug(("Configuration option 'preserve_hostname' is set," " not setting the hostname in module %s"), name) return (hostname, fqdn) = util.get_hostname_fqdn(cfg, cloud) # Check for previous successful invocation of set-hostname # set-hostname artifact file accounts for both hostname and fqdn # deltas. As such, it's format is different than cc_update_hostname's # previous-hostname file which only contains the base hostname. # TODO consolidate previous-hostname and set-hostname artifact files and # distro._read_hostname implementation so we only validate one artifact. prev_fn = os.path.join(cloud.get_cpath('data'), "set-hostname") prev_hostname = {} if os.path.exists(prev_fn): prev_hostname = util.load_json(util.load_file(prev_fn)) hostname_changed = (hostname != prev_hostname.get('hostname') or fqdn != prev_hostname.get('fqdn')) if not hostname_changed: log.debug('No hostname changes. Skipping set-hostname') return log.debug("Setting the hostname to %s (%s)", fqdn, hostname) try: cloud.distro.set_hostname(hostname, fqdn) except Exception as e: msg = "Failed to set the hostname to %s (%s)" % (fqdn, hostname) util.logexc(log, msg) raise SetHostnameError("%s: %s" % (msg, e)) write_json(prev_fn, {'hostname': hostname, 'fqdn': fqdn})
def test_status_on_errors_long(self): '''Long format of error status includes all error messages.''' write_json( self.status_file, {'v1': {'stage': None, 'datasource': ( 'DataSourceNoCloud [seed=/var/.../seed/nocloud-net]' '[dsmode=net]'), 'init': {'errors': ['error1'], 'start': 124.567, 'finished': 125.678}, 'init-local': {'errors': ['error2', 'error3'], 'start': 123.45, 'finished': 123.46}}}) cmdargs = myargs(long=True, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', {'_is_cloudinit_disabled': (False, ''), 'Init': {'side_effect': self.init_class}}, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(1, retcode) expected = dedent('''\ status: error time: Thu, 01 Jan 1970 00:02:05 +0000 detail: error1 error2 error3 ''') self.assertEqual(expected, m_stdout.getvalue())
def test_status_on_errors(self): '''Reports error when any stage has errors.''' write_json( self.status_file, { 'v1': { 'stage': None, 'blah': { 'errors': [], 'finished': 123.456 }, 'init': { 'errors': ['error1'], 'start': 124.567, 'finished': 125.678 }, 'init-local': { 'start': 123.45, 'finished': 123.46 } } }) cmdargs = myargs(long=False, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', { '_is_cloudinit_disabled': (False, ''), 'Init': { 'side_effect': self.init_class } }, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(1, retcode) self.assertEqual('status: error\n', m_stdout.getvalue())
def test_status_main(self, m_read_cfg_paths, config: Config): """status.main can be run as a standalone script.""" m_read_cfg_paths.return_value = config.paths write_json( config.status_file, {"v1": { "init": { "start": 1, "finished": None } }}, ) with pytest.raises(SystemExit) as e: with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: wrap_and_call( M_NAME, { "sys.argv": { "new": ["status"] }, "_is_cloudinit_disabled": (False, ""), }, status.main, ) assert e.value.code == 0 assert m_stdout.getvalue() == "status: running\n"
def test_status_returns_done(self): '''Reports done when stage is None and all stages are finished.''' write_json( self.status_file, { 'v1': { 'stage': None, 'datasource': ('DataSourceNoCloud [seed=/var/.../seed/nocloud-net]' '[dsmode=net]'), 'blah': { 'finished': 123.456 }, 'init': { 'errors': [], 'start': 124.567, 'finished': 125.678 }, 'init-local': { 'start': 123.45, 'finished': 123.46 } } }) cmdargs = myargs(long=False, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', { '_is_cloudinit_disabled': (False, ''), 'Init': { 'side_effect': self.init_class } }, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) self.assertEqual('status: done\n', m_stdout.getvalue())
def test_status_output( self, m_read_cfg_paths, ensured_file: Optional[Callable], status_content: Dict, assert_file, cmdargs: MyArgs, expected_retcode: int, expected_status: str, config: Config, ): m_read_cfg_paths.return_value = config.paths if ensured_file: ensure_file(ensured_file(config)) write_json( config.status_file, status_content, ) if assert_file: assert not os.path.exists( config.result_file), f"Unexpected {config.result_file} found" with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: retcode = wrap_and_call( M_NAME, {"_is_cloudinit_disabled": (False, "")}, status.handle_status_args, "ignored", cmdargs, ) assert retcode == expected_retcode assert m_stdout.getvalue() == expected_status
def test_status_main(self): """status.main can be run as a standalone script.""" write_json(self.status_file, {"v1": { "init": { "start": 1, "finished": None } }}) with self.assertRaises(SystemExit) as context_manager: with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: wrap_and_call( "cloudinit.cmd.status", { "sys.argv": { "new": ["status"] }, "_is_cloudinit_disabled": (False, ""), "Init": { "side_effect": self.init_class }, }, status.main, ) self.assertEqual(0, context_manager.exception.code) self.assertEqual("status: running\n", m_stdout.getvalue())
def fake_sleep(interval): nonlocal sleep_calls assert interval == 0.25 sleep_calls += 1 if sleep_calls == 2: write_json(config.status_file, running_json) elif sleep_calls == 3: write_json(config.status_file, error_json)
def test_write_json(self): """write_json output is readable json.""" path = self.tmp_path("test_write_json") data = {"key1": "value1", "key2": ["i1", "i2"]} atomic_helper.write_json(path, data) with open(path, "r") as fp: found = json.load(fp) self.assertEqual(data, found) self.check_perms(path, 0o644)
def test_write_json(self): """write_json output is readable json.""" path = self.tmp_path("test_write_json") data = {'key1': 'value1', 'key2': ['i1', 'i2']} atomic_helper.write_json(path, data) with open(path, "r") as fp: found = json.load(fp) self.assertEqual(data, found) self.check_perms(path, 0o644)
def fake_sleep(interval): self.assertEqual(0.25, interval) self.sleep_calls += 1 if self.sleep_calls == 2: write_json(self.status_file, running_json) elif self.sleep_calls == 3: write_json(self.status_file, done_json) result_file = self.tmp_path('result.json', self.new_root) ensure_file(result_file)
def fake_sleep(interval): nonlocal sleep_calls assert interval == 0.25 sleep_calls += 1 if sleep_calls == 2: write_json(config.status_file, running_json) elif sleep_calls == 3: write_json(config.status_file, done_json) result_file = config.result_file ensure_file(result_file)
def persist_instance_data(self): """Process and write INSTANCE_JSON_FILE with all instance metadata. Replace any hyphens with underscores in key names for use in template processing. @return True on successful write, False otherwise. """ if hasattr(self, '_crawled_metadata'): # Any datasource with _crawled_metadata will best represent # most recent, 'raw' metadata crawled_metadata = copy.deepcopy( getattr(self, '_crawled_metadata')) crawled_metadata.pop('user-data', None) crawled_metadata.pop('vendor-data', None) instance_data = {'ds': crawled_metadata} else: instance_data = {'ds': {'meta_data': self.metadata}} if hasattr(self, 'network_json'): network_json = getattr(self, 'network_json') if network_json != UNSET: instance_data['ds']['network_json'] = network_json if hasattr(self, 'ec2_metadata'): ec2_metadata = getattr(self, 'ec2_metadata') if ec2_metadata != UNSET: instance_data['ds']['ec2_metadata'] = ec2_metadata instance_data['ds']['_doc'] = EXPERIMENTAL_TEXT # Add merged cloud.cfg and sys info for jinja templates and cli query instance_data['merged_cfg'] = copy.deepcopy(self.sys_cfg) instance_data['merged_cfg']['_doc'] = ( 'Merged cloud-init system config from /etc/cloud/cloud.cfg and' ' /etc/cloud/cloud.cfg.d/') instance_data['sys_info'] = util.system_info() instance_data.update( self._get_standardized_metadata(instance_data)) try: # Process content base64encoding unserializable values content = util.json_dumps(instance_data) # Strip base64: prefix and set base64_encoded_keys list. processed_data = process_instance_metadata( json.loads(content), sensitive_keys=self.sensitive_metadata_keys) except TypeError as e: LOG.warning('Error persisting instance-data.json: %s', str(e)) return False except UnicodeDecodeError as e: LOG.warning('Error persisting instance-data.json: %s', str(e)) return False json_sensitive_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE) write_json(json_sensitive_file, processed_data, mode=0o600) json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE) # World readable write_json(json_file, redact_sensitive_keys(processed_data)) return True
def test_status_returns_running(self): '''Report running when status exists with an unfinished stage.''' ensure_file(self.tmp_path('result.json', self.new_root)) write_json(self.status_file, {'v1': {'init': {'start': 1, 'finished': None}}}) cmdargs = myargs(long=False, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', {'_is_cloudinit_disabled': (False, ''), 'Init': {'side_effect': self.init_class}}, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) self.assertEqual('status: running\n', m_stdout.getvalue())
def generate_seed(self, tmpdir): """Generate nocloud seed from user-data""" seed_file = os.path.join(tmpdir, '%s_seed.img' % self.name) user_data_file = os.path.join(tmpdir, '%s_user_data' % self.name) meta_data_file = os.path.join(tmpdir, '%s_meta_data' % self.name) with open(user_data_file, "w") as ud_file: ud_file.write(self.user_data) # meta-data can be yaml, but more easily pretty printed with json write_json(meta_data_file, self.meta_data) subp.subp(['cloud-localds', seed_file, user_data_file, meta_data_file]) return seed_file
def generate_seed(self, tmpdir): """Generate nocloud seed from user-data""" seed_file = os.path.join(tmpdir, '%s_seed.img' % self.name) user_data_file = os.path.join(tmpdir, '%s_user_data' % self.name) meta_data_file = os.path.join(tmpdir, '%s_meta_data' % self.name) with open(user_data_file, "w") as ud_file: ud_file.write(self.user_data) # meta-data can be yaml, but more easily pretty printed with json write_json(meta_data_file, self.meta_data) c_util.subp(['cloud-localds', seed_file, user_data_file, meta_data_file]) return seed_file
def test_status_returns_running_on_no_results_json(self): '''Report running when status.json exists but result.json does not.''' result_file = self.tmp_path('result.json', self.new_root) write_json(self.status_file, {}) self.assertFalse( os.path.exists(result_file), 'Unexpected result.json found') cmdargs = myargs(long=False, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', {'_is_cloudinit_disabled': (False, ''), 'Init': {'side_effect': self.init_class}}, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) self.assertEqual('status: running\n', m_stdout.getvalue())
def test_status_on_errors_long(self): """Long format of error status includes all error messages.""" write_json( self.status_file, { "v1": { "stage": None, "datasource": ("DataSourceNoCloud [seed=/var/.../seed/nocloud-net]" "[dsmode=net]"), "init": { "errors": ["error1"], "start": 124.567, "finished": 125.678, }, "init-local": { "errors": ["error2", "error3"], "start": 123.45, "finished": 123.46, }, } }, ) cmdargs = myargs(long=True, wait=False) with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: retcode = wrap_and_call( "cloudinit.cmd.status", { "_is_cloudinit_disabled": (False, ""), "Init": { "side_effect": self.init_class }, }, status.handle_status_args, "ignored", cmdargs, ) self.assertEqual(1, retcode) expected = dedent("""\ status: error time: Thu, 01 Jan 1970 00:02:05 +0000 detail: error1 error2 error3 """) self.assertEqual(expected, m_stdout.getvalue())
def handle(name, cfg, cloud, log, _args): if util.get_cfg_option_bool(cfg, "preserve_hostname", False): log.debug( "Configuration option 'preserve_hostname' is set," " not setting the hostname in module %s", name, ) return # Set prefer_fqdn_over_hostname value in distro hostname_fqdn = util.get_cfg_option_bool( cfg, "prefer_fqdn_over_hostname", None ) if hostname_fqdn is not None: cloud.distro.set_option("prefer_fqdn_over_hostname", hostname_fqdn) (hostname, fqdn, is_default) = util.get_hostname_fqdn(cfg, cloud) # Check for previous successful invocation of set-hostname # set-hostname artifact file accounts for both hostname and fqdn # deltas. As such, it's format is different than cc_update_hostname's # previous-hostname file which only contains the base hostname. # TODO consolidate previous-hostname and set-hostname artifact files and # distro._read_hostname implementation so we only validate one artifact. prev_fn = os.path.join(cloud.get_cpath("data"), "set-hostname") prev_hostname = {} if os.path.exists(prev_fn): prev_hostname = util.load_json(util.load_file(prev_fn)) hostname_changed = hostname != prev_hostname.get( "hostname" ) or fqdn != prev_hostname.get("fqdn") if not hostname_changed: log.debug("No hostname changes. Skipping set-hostname") return if is_default and hostname == "localhost": # https://github.com/systemd/systemd/commit/d39079fcaa05e23540d2b1f0270fa31c22a7e9f1 log.debug("Hostname is localhost. Let other services handle this.") return log.debug("Setting the hostname to %s (%s)", fqdn, hostname) try: cloud.distro.set_hostname(hostname, fqdn) except Exception as e: msg = "Failed to set the hostname to %s (%s)" % (fqdn, hostname) util.logexc(log, msg) raise SetHostnameError("%s: %s" % (msg, e)) from e write_json(prev_fn, {"hostname": hostname, "fqdn": fqdn})
def test_status_returns_done_long(self): """Long format of done status includes datasource info.""" ensure_file(self.tmp_path("result.json", self.new_root)) write_json( self.status_file, { "v1": { "stage": None, "datasource": ("DataSourceNoCloud [seed=/var/.../seed/nocloud-net]" "[dsmode=net]"), "init": { "start": 124.567, "finished": 125.678 }, "init-local": { "start": 123.45, "finished": 123.46 }, } }, ) cmdargs = myargs(long=True, wait=False) with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: retcode = wrap_and_call( "cloudinit.cmd.status", { "_is_cloudinit_disabled": (False, ""), "Init": { "side_effect": self.init_class }, }, status.handle_status_args, "ignored", cmdargs, ) self.assertEqual(0, retcode) expected = dedent("""\ status: done time: Thu, 01 Jan 1970 00:02:05 +0000 detail: DataSourceNoCloud [seed=/var/.../seed/nocloud-net][dsmode=net] """) self.assertEqual(expected, m_stdout.getvalue())
def persist_instance_data(self): """Process and write INSTANCE_JSON_FILE with all instance metadata. Replace any hyphens with underscores in key names for use in template processing. @return True on successful write, False otherwise. """ instance_data = { 'ds': { '_doc': EXPERIMENTAL_TEXT, 'meta_data': self.metadata } } if hasattr(self, 'network_json'): network_json = getattr(self, 'network_json') if network_json != UNSET: instance_data['ds']['network_json'] = network_json if hasattr(self, 'ec2_metadata'): ec2_metadata = getattr(self, 'ec2_metadata') if ec2_metadata != UNSET: instance_data['ds']['ec2_metadata'] = ec2_metadata instance_data.update(self._get_standardized_metadata()) try: # Process content base64encoding unserializable values content = util.json_dumps(instance_data) # Strip base64: prefix and set base64_encoded_keys list. processed_data = process_instance_metadata( json.loads(content), sensitive_keys=self.sensitive_metadata_keys) except TypeError as e: LOG.warning('Error persisting instance-data.json: %s', str(e)) return False except UnicodeDecodeError as e: LOG.warning('Error persisting instance-data.json: %s', str(e)) return False json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE) write_json(json_file, processed_data) # World readable json_sensitive_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE) write_json(json_sensitive_file, redact_sensitive_keys(processed_data), mode=0o600) return True
def test_status_returns_done(self): """Report done results.json exists no stages are unfinished.""" ensure_file(self.tmp_path("result.json", self.new_root)) write_json( self.status_file, { "v1": { "stage": None, # No current stage running "datasource": ("DataSourceNoCloud [seed=/var/.../seed/nocloud-net]" "[dsmode=net]"), "blah": { "finished": 123.456 }, "init": { "errors": [], "start": 124.567, "finished": 125.678, }, "init-local": { "start": 123.45, "finished": 123.46 }, } }, ) cmdargs = myargs(long=False, wait=False) with mock.patch("sys.stdout", new_callable=StringIO) as m_stdout: retcode = wrap_and_call( "cloudinit.cmd.status", { "_is_cloudinit_disabled": (False, ""), "Init": { "side_effect": self.init_class }, }, status.handle_status_args, "ignored", cmdargs, ) self.assertEqual(0, retcode) self.assertEqual("status: done\n", m_stdout.getvalue())
def run_hook(interface, event, data_d=None, env=None): if event not in EVENTS: raise ValueError("Unexpected event '%s'. Expected one of: %s" % (event, EVENTS)) if data_d is None: data_d = _get_hooks_dir() if env is None: env = os.environ hook_file = os.path.join(data_d, interface + ".json") if event == UP: if not os.path.exists(data_d): os.makedirs(data_d) atomic_helper.write_json(hook_file, _filter_env_vals(env)) LOG.debug("Wrote dhclient options in %s", hook_file) elif event == DOWN: if os.path.exists(hook_file): os.remove(hook_file) LOG.debug("Removed dhclient options file %s", hook_file)
def record(self): envs = os.environ if self.hook_file is None: return atomic_helper.write_json(self.hook_file, self.get_vals(envs)) LOG.debug("Wrote dhclient options in %s", self.hook_file)
def status_wrapper(name, args, data_d=None, link_d=None): if data_d is None: data_d = os.path.normpath("/var/lib/cloud/data") if link_d is None: link_d = os.path.normpath("/run/cloud-init") status_path = os.path.join(data_d, "status.json") status_link = os.path.join(link_d, "status.json") result_path = os.path.join(data_d, "result.json") result_link = os.path.join(link_d, "result.json") util.ensure_dirs((data_d, link_d,)) (_name, functor) = args.action if name == "init": if args.local: mode = "init-local" else: mode = "init" elif name == "modules": mode = "modules-%s" % args.mode else: raise ValueError("unknown name: %s" % name) modes = ('init', 'init-local', 'modules-config', 'modules-final') status = None if mode == 'init-local': for f in (status_link, result_link, status_path, result_path): util.del_file(f) else: try: status = json.loads(util.load_file(status_path)) except Exception: pass if status is None: nullstatus = { 'errors': [], 'start': None, 'finished': None, } status = {'v1': {}} for m in modes: status['v1'][m] = nullstatus.copy() status['v1']['datasource'] = None v1 = status['v1'] v1['stage'] = mode v1[mode]['start'] = time.time() atomic_helper.write_json(status_path, status) util.sym_link(os.path.relpath(status_path, link_d), status_link, force=True) try: ret = functor(name, args) if mode in ('init', 'init-local'): (datasource, errors) = ret if datasource is not None: v1['datasource'] = str(datasource) else: errors = ret v1[mode]['errors'] = [str(e) for e in errors] except Exception as e: util.logexc(LOG, "failed stage %s", mode) print_exc("failed run of stage %s" % mode) v1[mode]['errors'] = [str(e)] v1[mode]['finished'] = time.time() v1['stage'] = None atomic_helper.write_json(status_path, status) if mode == "modules-final": # write the 'finished' file errors = [] for m in modes: if v1[m]['errors']: errors.extend(v1[m].get('errors', [])) atomic_helper.write_json( result_path, {'v1': {'datasource': v1['datasource'], 'errors': errors}}) util.sym_link(os.path.relpath(result_path, link_d), result_link, force=True) return len(v1[mode]['errors'])