def test_get_data_writes_json_instance_data_on_success(self): """get_data writes INSTANCE_JSON_FILE to run_dir as world readable.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet( self.sys_cfg, self.distro, Paths({'run_dir': tmp})) datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) expected = { 'base64_encoded_keys': [], 'sensitive_keys': [], 'v1': { '_beta_keys': ['subplatform'], 'availability-zone': 'myaz', 'availability_zone': 'myaz', 'cloud-name': 'subclasscloudname', 'cloud_name': 'subclasscloudname', 'instance-id': 'iid-datasource', 'instance_id': 'iid-datasource', 'local-hostname': 'test-subclass-hostname', 'local_hostname': 'test-subclass-hostname', 'platform': 'mytestsubclass', 'public_ssh_keys': [], 'region': 'myregion', 'subplatform': 'unknown'}, 'ds': { '_doc': EXPERIMENTAL_TEXT, 'meta_data': {'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion'}}} self.assertEqual(expected, util.load_json(content)) file_stat = os.stat(json_file) self.assertEqual(0o644, stat.S_IMODE(file_stat.st_mode)) self.assertEqual(expected, util.load_json(content))
def test_get_data_writes_json_instance_data_on_success(self): """get_data writes INSTANCE_JSON_FILE to run_dir as world readable.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet(self.sys_cfg, self.distro, Paths({'run_dir': tmp})) sys_info = { "python": "3.7", "platform": "Linux-5.4.0-24-generic-x86_64-with-Ubuntu-20.04-focal", "uname": ["Linux", "myhost", "5.4.0-24-generic", "SMP blah", "x86_64"], "variant": "ubuntu", "dist": ["ubuntu", "20.04", "focal"] } with mock.patch("cloudinit.util.system_info", return_value=sys_info): datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) expected = { 'base64_encoded_keys': [], 'merged_cfg': REDACT_SENSITIVE_VALUE, 'sensitive_keys': ['merged_cfg'], 'sys_info': sys_info, 'v1': { '_beta_keys': ['subplatform'], 'availability-zone': 'myaz', 'availability_zone': 'myaz', 'cloud-name': 'subclasscloudname', 'cloud_name': 'subclasscloudname', 'distro': 'ubuntu', 'distro_release': 'focal', 'distro_version': '20.04', 'instance-id': 'iid-datasource', 'instance_id': 'iid-datasource', 'local-hostname': 'test-subclass-hostname', 'local_hostname': 'test-subclass-hostname', 'kernel_release': '5.4.0-24-generic', 'machine': 'x86_64', 'platform': 'mytestsubclass', 'public_ssh_keys': [], 'python_version': '3.7', 'region': 'myregion', 'system_platform': 'Linux-5.4.0-24-generic-x86_64-with-Ubuntu-20.04-focal', 'subplatform': 'unknown', 'variant': 'ubuntu' }, 'ds': { '_doc': EXPERIMENTAL_TEXT, 'meta_data': { 'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion' } } } self.assertEqual(expected, util.load_json(content)) file_stat = os.stat(json_file) self.assertEqual(0o644, stat.S_IMODE(file_stat.st_mode)) self.assertEqual(expected, util.load_json(content))
def test_get_data_writes_json_instance_data_sensitive(self): """get_data writes INSTANCE_JSON_SENSITIVE_FILE as readonly root.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet( self.sys_cfg, self.distro, Paths({'run_dir': tmp}), custom_metadata={ 'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion', 'some': {'security-credentials': { 'cred1': 'sekret', 'cred2': 'othersekret'}}}) self.assertEqual( ('security-credentials',), datasource.sensitive_metadata_keys) datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) sensitive_json_file = self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, tmp) redacted = util.load_json(util.load_file(json_file)) self.assertEqual( {'cred1': 'sekret', 'cred2': 'othersekret'}, redacted['ds']['meta_data']['some']['security-credentials']) content = util.load_file(sensitive_json_file) expected = { 'base64_encoded_keys': [], 'sensitive_keys': ['ds/meta_data/some/security-credentials'], 'v1': { '_beta_keys': ['subplatform'], 'availability-zone': 'myaz', 'availability_zone': 'myaz', 'cloud-name': 'subclasscloudname', 'cloud_name': 'subclasscloudname', 'instance-id': 'iid-datasource', 'instance_id': 'iid-datasource', 'local-hostname': 'test-subclass-hostname', 'local_hostname': 'test-subclass-hostname', 'platform': 'mytestsubclass', 'public_ssh_keys': [], 'region': 'myregion', 'subplatform': 'unknown'}, 'ds': { '_doc': EXPERIMENTAL_TEXT, 'meta_data': { 'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion', 'some': {'security-credentials': REDACT_SENSITIVE_VALUE}}} } self.maxDiff = None self.assertEqual(expected, util.load_json(content)) file_stat = os.stat(sensitive_json_file) self.assertEqual(0o600, stat.S_IMODE(file_stat.st_mode)) self.assertEqual(expected, util.load_json(content))
def test_persist_instance_data_writes_network_json_when_set(self): """When network_data.json class attribute is set, persist to json.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet(self.sys_cfg, self.distro, Paths({"run_dir": tmp})) datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) instance_data = util.load_json(util.load_file(json_file)) self.assertNotIn("network_json", instance_data["ds"]) datasource.network_json = {"network_json": "is good"} datasource.persist_instance_data() instance_data = util.load_json(util.load_file(json_file)) self.assertEqual({"network_json": "is good"}, instance_data["ds"]["network_json"])
def test_persist_instance_data_writes_ec2_metadata_when_set(self): """When ec2_metadata class attribute is set, persist to json.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet(self.sys_cfg, self.distro, Paths({'run_dir': tmp})) datasource.ec2_metadata = UNSET datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) instance_data = util.load_json(util.load_file(json_file)) self.assertNotIn('ec2_metadata', instance_data['ds']) datasource.ec2_metadata = {'ec2stuff': 'is good'} datasource.persist_instance_data() instance_data = util.load_json(util.load_file(json_file)) self.assertEqual({'ec2stuff': 'is good'}, instance_data['ds']['ec2_metadata'])
def test_status_wrapper_init_local_writes_fresh_status_info(self): """When running in init-local mode, status_wrapper writes status.json. Old status and results artifacts are also removed. """ tmpd = self.tmp_dir() data_d = self.tmp_path('data', tmpd) link_d = self.tmp_path('link', tmpd) status_link = self.tmp_path('status.json', link_d) # Write old artifacts which will be removed or updated. for _dir in data_d, link_d: test_helpers.populate_dir(_dir, { 'status.json': 'old', 'result.json': 'old' }) FakeArgs = namedtuple('FakeArgs', ['action', 'local', 'mode']) def myaction(name, args): # Return an error to watch status capture them return 'SomeDatasource', ['an error'] myargs = FakeArgs(('ignored_name', myaction), True, 'bogusmode') cli.status_wrapper('init', myargs, data_d, link_d) # No errors reported in status status_v1 = load_json(load_file(status_link))['v1'] self.assertEqual(['an error'], status_v1['init-local']['errors']) self.assertEqual('SomeDatasource', status_v1['datasource']) self.assertFalse(os.path.exists(self.tmp_path('result.json', data_d)), 'unexpected result.json found') self.assertFalse(os.path.exists(self.tmp_path('result.json', link_d)), 'unexpected result.json link found')
def render_jinja_payload_from_file( payload, payload_fn, instance_data_file, debug=False): """Render a jinja template payload sourcing variables from jinja_vars_path. @param payload: String of jinja template content. Should begin with ## template: jinja\n. @param payload_fn: String representing the filename from which the payload was read used in error reporting. Generally in part-handling this is 'part-##'. @param instance_data_file: A path to a json file containing variables that will be used as jinja template variables. @return: A string of jinja-rendered content with the jinja header removed. Returns None on error. """ instance_data = {} rendered_payload = None if not os.path.exists(instance_data_file): raise RuntimeError( 'Cannot render jinja template vars. Instance data not yet' ' present at %s' % instance_data_file) try: instance_data = load_json(load_file(instance_data_file)) except (IOError, OSError) as e: if e.errno == EACCES: raise RuntimeError( 'Cannot render jinja template vars. No read permission on' " '%s'. Try sudo" % instance_data_file) rendered_payload = render_jinja_payload( payload, payload_fn, instance_data, debug) if not rendered_payload: return None return rendered_payload
def test_get_data_handles_redacted_unserializable_content(self): """get_data warns unserializable content in INSTANCE_JSON_FILE.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet(self.sys_cfg, self.distro, Paths({'run_dir': tmp}), custom_userdata={ 'key1': 'val1', 'key2': { 'key2.1': self.paths } }) self.assertTrue(datasource.get_data()) json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) expected_userdata = { 'key1': 'val1', 'key2': { 'key2.1': "Warning: redacted unserializable type <class" " 'cloudinit.helpers.Paths'>" } } instance_json = util.load_json(content) self.assertEqual(expected_userdata, instance_json['ds']['user-data'])
def add_snap_user(self, name, **kwargs): """ Add a snappy user to the system using snappy tools """ snapuser = kwargs.get('snapuser') known = kwargs.get('known', False) create_user_cmd = ["snap", "create-user", "--sudoer", "--json"] if known: create_user_cmd.append("--known") create_user_cmd.append(snapuser) # Run the command LOG.debug("Adding snap user %s", name) try: (out, err) = subp.subp(create_user_cmd, logstring=create_user_cmd, capture=True) LOG.debug("snap create-user returned: %s:%s", out, err) jobj = util.load_json(out) username = jobj.get('username', None) except Exception as e: util.logexc(LOG, "Failed to create snap user %s", name) raise e return username
def render_jinja_payload_from_file(payload, payload_fn, instance_data_file, debug=False): """Render a jinja template payload sourcing variables from jinja_vars_path. @param payload: String of jinja template content. Should begin with ## template: jinja\n. @param payload_fn: String representing the filename from which the payload was read used in error reporting. Generally in part-handling this is 'part-##'. @param instance_data_file: A path to a json file containing variables that will be used as jinja template variables. @return: A string of jinja-rendered content with the jinja header removed. Returns None on error. """ instance_data = {} rendered_payload = None if not os.path.exists(instance_data_file): raise RuntimeError( 'Cannot render jinja template vars. Instance data not yet' ' present at %s' % instance_data_file) instance_data = load_json(load_file(instance_data_file)) rendered_payload = render_jinja_payload(payload, payload_fn, instance_data, debug) if not rendered_payload: return None return rendered_payload
def add_snap_user(self, name, **kwargs): """ Add a snappy user to the system using snappy tools """ snapuser = kwargs.get('snapuser') known = kwargs.get('known', False) adduser_cmd = ["snap", "create-user", "--sudoer", "--json"] if known: adduser_cmd.append("--known") adduser_cmd.append(snapuser) # Run the command LOG.debug("Adding snap user %s", name) try: (out, err) = util.subp(adduser_cmd, logstring=adduser_cmd, capture=True) LOG.debug("snap create-user returned: %s:%s", out, err) jobj = util.load_json(out) username = jobj.get('username', None) except Exception as e: util.logexc(LOG, "Failed to create snap user %s", name) raise e return username
def handle(name, cfg, cloud, log, _args): if util.get_cfg_option_bool(cfg, "preserve_hostname", False): log.debug(("Configuration option 'preserve_hostname' is set," " not setting the hostname in module %s"), name) return (hostname, fqdn) = util.get_hostname_fqdn(cfg, cloud) # Check for previous successful invocation of set-hostname # set-hostname artifact file accounts for both hostname and fqdn # deltas. As such, it's format is different than cc_update_hostname's # previous-hostname file which only contains the base hostname. # TODO consolidate previous-hostname and set-hostname artifact files and # distro._read_hostname implementation so we only validate one artifact. prev_fn = os.path.join(cloud.get_cpath('data'), "set-hostname") prev_hostname = {} if os.path.exists(prev_fn): prev_hostname = util.load_json(util.load_file(prev_fn)) hostname_changed = (hostname != prev_hostname.get('hostname') or fqdn != prev_hostname.get('fqdn')) if not hostname_changed: log.debug('No hostname changes. Skipping set-hostname') return log.debug("Setting the hostname to %s (%s)", fqdn, hostname) try: cloud.distro.set_hostname(hostname, fqdn) except Exception as e: msg = "Failed to set the hostname to %s (%s)" % (fqdn, hostname) util.logexc(log, msg) raise SetHostnameError("%s: %s" % (msg, e)) write_json(prev_fn, {'hostname': hostname, 'fqdn': fqdn})
def test_status_wrapper_init_local_writes_fresh_status_info(self, tmpdir): """When running in init-local mode, status_wrapper writes status.json. Old status and results artifacts are also removed. """ data_d = tmpdir.join("data") link_d = tmpdir.join("link") status_link = link_d.join("status.json") # Write old artifacts which will be removed or updated. for _dir in data_d, link_d: test_helpers.populate_dir(str(_dir), { "status.json": "old", "result.json": "old" }) FakeArgs = namedtuple("FakeArgs", ["action", "local", "mode"]) def myaction(name, args): # Return an error to watch status capture them return "SomeDatasource", ["an error"] myargs = FakeArgs(("ignored_name", myaction), True, "bogusmode") cli.status_wrapper("init", myargs, data_d, link_d) # No errors reported in status status_v1 = load_json(load_file(status_link))["v1"] assert ["an error"] == status_v1["init-local"]["errors"] assert "SomeDatasource" == status_v1["datasource"] assert False is os.path.exists( data_d.join("result.json")), "unexpected result.json found" assert False is os.path.exists( link_d.join("result.json")), "unexpected result.json link found"
def _read_instance_data(instance_data, user_data, vendor_data) -> dict: """Return a dict of merged instance-data, vendordata and userdata. The dict will contain supplemental userdata and vendordata keys sourced from default user-data and vendor-data files. Non-root users will have redacted INSTANCE_JSON_FILE content and redacted vendordata and userdata values. :raise: IOError/OSError on absence of instance-data.json file or invalid access perms. """ paths = None uid = os.getuid() if not all([instance_data, user_data, vendor_data]): paths = read_cfg_paths() if instance_data: instance_data_fn = instance_data else: redacted_data_fn = os.path.join(paths.run_dir, INSTANCE_JSON_FILE) if uid == 0: sensitive_data_fn = os.path.join(paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE) if os.path.exists(sensitive_data_fn): instance_data_fn = sensitive_data_fn else: LOG.warning( 'Missing root-readable %s. Using redacted %s instead.', sensitive_data_fn, redacted_data_fn) instance_data_fn = redacted_data_fn else: instance_data_fn = redacted_data_fn if user_data: user_data_fn = user_data else: user_data_fn = os.path.join(paths.instance_link, 'user-data.txt') if vendor_data: vendor_data_fn = vendor_data else: vendor_data_fn = os.path.join(paths.instance_link, 'vendor-data.txt') try: instance_json = util.load_file(instance_data_fn) except (IOError, OSError) as e: if e.errno == EACCES: LOG.error("No read permission on '%s'. Try sudo", instance_data_fn) else: LOG.error('Missing instance-data file: %s', instance_data_fn) raise instance_data = util.load_json(instance_json) if uid != 0: instance_data['userdata'] = ('<%s> file:%s' % (REDACT_SENSITIVE_VALUE, user_data_fn)) instance_data['vendordata'] = ( '<%s> file:%s' % (REDACT_SENSITIVE_VALUE, vendor_data_fn)) else: instance_data['userdata'] = load_userdata(user_data_fn) instance_data['vendordata'] = load_userdata(vendor_data_fn) return instance_data
def load_json_or_yaml(data): if not data: return {} try: return util.load_json(data) except (json.JSONDecodeError, TypeError): return util.load_yaml(data)
def test_get_data_handles_redacted_unserializable_content(self): """get_data warns unserializable content in INSTANCE_JSON_FILE.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet( self.sys_cfg, self.distro, Paths({"run_dir": tmp}), custom_metadata={ "key1": "val1", "key2": { "key2.1": self.paths } }, ) datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) expected_metadata = { "key1": "val1", "key2": { "key2.1": ("Warning: redacted unserializable type <class" " 'cloudinit.helpers.Paths'>") }, } instance_json = util.load_json(content) self.assertEqual(expected_metadata, instance_json["ds"]["meta_data"])
def test_get_data_write_json_instance_data(self): """get_data writes INSTANCE_JSON_FILE to run_dir as readonly root.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet(self.sys_cfg, self.distro, Paths({'run_dir': tmp})) datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) expected = { 'base64-encoded-keys': [], 'v1': { 'availability-zone': 'myaz', 'cloud-name': 'subclasscloudname', 'instance-id': 'iid-datasource', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion' }, 'ds': { 'meta-data': { 'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion' }, 'user-data': 'userdata_raw', 'vendor-data': 'vendordata_raw' } } self.assertEqual(expected, util.load_json(content)) file_stat = os.stat(json_file) self.assertEqual(0o600, stat.S_IMODE(file_stat.st_mode))
def test_get_data_base64encodes_unserializable_bytes(self): """On py3, get_data base64encodes any unserializable content.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet( self.sys_cfg, self.distro, Paths({"run_dir": tmp}), custom_metadata={ "key1": "val1", "key2": { "key2.1": b"\x123" } }, ) self.assertTrue(datasource.get_data()) json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) instance_json = util.load_json(content) self.assertCountEqual(["ds/meta_data/key2/key2.1"], instance_json["base64_encoded_keys"]) self.assertEqual( { "key1": "val1", "key2": { "key2.1": "EjM=" } }, instance_json["ds"]["meta_data"], )
def read_metadata(url, timeout=2, sec_between=2, retries=30): response = url_helper.readurl(url, timeout=timeout, sec_between=sec_between, retries=retries) if not response.ok(): raise RuntimeError("unable to read metadata at %s" % url) return util.load_json(response.contents.decode())
def _read_ec2_metadata(self): path = self._path_join(self.base_path, "ec2", "latest", "meta-data.json") if not os.path.exists(path): return {} else: try: return util.load_json(self._path_read(path)) except Exception as e: raise BrokenMetadata("Failed to process " "path %s: %s" % (path, e))
def azure_location_to_simplestreams_region(self): """Convert location to simplestreams region""" location = self.location.lower().replace(' ', '') LOG.debug('finding location %s using simple streams', location) regions_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'regions.json') region_simplestreams_map = c_util.load_json( c_util.load_file(regions_file)) return region_simplestreams_map.get(location, location)
def get_status_details(paths=None): """Return a 3-tuple of status, status_details and time of last event. @param paths: An initialized cloudinit.helpers.paths object. Values are obtained from parsing paths.run_dir/status.json. """ if not paths: init = Init(ds_deps=[]) init.read_cfg() paths = init.paths status = UXAppStatus.NOT_RUN status_detail = "" status_v1 = {} status_file = os.path.join(paths.run_dir, "status.json") result_file = os.path.join(paths.run_dir, "result.json") (is_disabled, reason) = _is_cloudinit_disabled(CLOUDINIT_DISABLED_FILE, paths) if is_disabled: status = UXAppStatus.DISABLED status_detail = reason if os.path.exists(status_file): if not os.path.exists(result_file): status = UXAppStatus.RUNNING status_v1 = load_json(load_file(status_file)).get("v1", {}) errors = [] latest_event = 0 for key, value in sorted(status_v1.items()): if key == "stage": if value: status = UXAppStatus.RUNNING status_detail = "Running in stage: {0}".format(value) elif key == "datasource": status_detail = value elif isinstance(value, dict): errors.extend(value.get("errors", [])) start = value.get("start") or 0 finished = value.get("finished") or 0 if finished == 0 and start != 0: status = UXAppStatus.RUNNING event_time = max(start, finished) if event_time > latest_event: latest_event = event_time if errors: status = UXAppStatus.ERROR status_detail = "\n".join(errors) elif status == UXAppStatus.NOT_RUN and latest_event > 0: status = UXAppStatus.DONE if latest_event: time = strftime("%a, %d %b %Y %H:%M:%S %z", gmtime(latest_event)) else: time = "" return status, status_detail, time
def _get_meta_data(filepath): content = _read_file(filepath) if not content: return None try: content = util.load_json(content) except Exception: util.logexc(LOG, 'Failed parsing meta data file from json.') return None return content
def _read_ec2_metadata(self): path = self._path_join(self.base_path, 'ec2', 'latest', 'meta-data.json') if not os.path.exists(path): return {} else: try: return util.load_json(self._path_read(path)) except Exception as e: raise BrokenMetadata("Failed to process " "path %s: %s" % (path, e))
def load_json_or_yaml(data): """ load first attempts to unmarshal the provided data as JSON, and if that fails then attempts to unmarshal the data as YAML. If data is None then a new dictionary is returned. """ if not data: return {} try: return util.load_json(data) except (json.JSONDecodeError, TypeError): return util.load_yaml(data)
def _get_status_details(paths): """Return a 3-tuple of status, status_details and time of last event. @param paths: An initialized cloudinit.helpers.paths object. Values are obtained from parsing paths.run_dir/status.json. """ status = STATUS_ENABLED_NOT_RUN status_detail = '' status_v1 = {} status_file = os.path.join(paths.run_dir, 'status.json') result_file = os.path.join(paths.run_dir, 'result.json') (is_disabled, reason) = _is_cloudinit_disabled( CLOUDINIT_DISABLED_FILE, paths) if is_disabled: status = STATUS_DISABLED status_detail = reason if os.path.exists(status_file): if not os.path.exists(result_file): status = STATUS_RUNNING status_v1 = load_json(load_file(status_file)).get('v1', {}) errors = [] latest_event = 0 for key, value in sorted(status_v1.items()): if key == 'stage': if value: status = STATUS_RUNNING status_detail = 'Running in stage: {0}'.format(value) elif key == 'datasource': status_detail = value elif isinstance(value, dict): errors.extend(value.get('errors', [])) start = value.get('start') or 0 finished = value.get('finished') or 0 if finished == 0 and start != 0: status = STATUS_RUNNING event_time = max(start, finished) if event_time > latest_event: latest_event = event_time if errors: status = STATUS_ERROR status_detail = '\n'.join(errors) elif status == STATUS_ENABLED_NOT_RUN and latest_event > 0: status = STATUS_DONE if latest_event: time = strftime('%a, %d %b %Y %H:%M:%S %z', gmtime(latest_event)) else: time = '' return status, status_detail, time
def _get_status_details(paths): """Return a 3-tuple of status, status_details and time of last event. @param paths: An initialized cloudinit.helpers.paths object. Values are obtained from parsing paths.run_dir/status.json. """ status = STATUS_ENABLED_NOT_RUN status_detail = '' status_v1 = {} status_file = os.path.join(paths.run_dir, 'status.json') result_file = os.path.join(paths.run_dir, 'result.json') (is_disabled, reason) = _is_cloudinit_disabled(CLOUDINIT_DISABLED_FILE, paths) if is_disabled: status = STATUS_DISABLED status_detail = reason if os.path.exists(status_file): if not os.path.exists(result_file): status = STATUS_RUNNING status_v1 = load_json(load_file(status_file)).get('v1', {}) errors = [] latest_event = 0 for key, value in sorted(status_v1.items()): if key == 'stage': if value: status = STATUS_RUNNING status_detail = 'Running in stage: {0}'.format(value) elif key == 'datasource': status_detail = value elif isinstance(value, dict): errors.extend(value.get('errors', [])) start = value.get('start') or 0 finished = value.get('finished') or 0 if finished == 0 and start != 0: status = STATUS_RUNNING event_time = max(start, finished) if event_time > latest_event: latest_event = event_time if errors: status = STATUS_ERROR status_detail = '\n'.join(errors) elif status == STATUS_ENABLED_NOT_RUN and latest_event > 0: status = STATUS_DONE if latest_event: time = strftime('%a, %d %b %Y %H:%M:%S %z', gmtime(latest_event)) else: time = '' return status, status_detail, time
def test_get_data_handles_bytes_values(self): """On py2 get_data handles bytes values without having to b64encode.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet( self.sys_cfg, self.distro, Paths({'run_dir': tmp}), custom_metadata={'key1': 'val1', 'key2': {'key2.1': b'\x123'}}) self.assertTrue(datasource.get_data()) json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) instance_json = util.load_json(content) self.assertEqual([], instance_json['base64_encoded_keys']) self.assertEqual( {'key1': 'val1', 'key2': {'key2.1': '\x123'}}, instance_json['ds']['meta_data'])
def _get_credentials(self): """Get credentials from environment""" LOG.debug('getting credentials from environment') cred_file = os.path.expanduser('~/.azure/credentials.json') try: azure_creds = c_util.load_json(c_util.load_file(cred_file)) subscription_id = azure_creds['subscriptionId'] credentials = ServicePrincipalCredentials( client_id=azure_creds['clientId'], secret=azure_creds['clientSecret'], tenant=azure_creds['tenantId']) return credentials, subscription_id except KeyError: raise RuntimeError('Please configure Azure service principal' ' credentials in %s' % cred_file)
def test_get_data_base64encodes_unserializable_bytes(self): """On py3, get_data base64encodes any unserializable content.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet( self.sys_cfg, self.distro, Paths({'run_dir': tmp}), custom_metadata={'key1': 'val1', 'key2': {'key2.1': b'\x123'}}) self.assertTrue(datasource.get_data()) json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) instance_json = util.load_json(content) self.assertCountEqual( ['ds/meta_data/key2/key2.1'], instance_json['base64_encoded_keys']) self.assertEqual( {'key1': 'val1', 'key2': {'key2.1': 'EjM='}}, instance_json['ds']['meta_data'])
def handle(name, cfg, cloud, log, _args): if util.get_cfg_option_bool(cfg, "preserve_hostname", False): log.debug( "Configuration option 'preserve_hostname' is set," " not setting the hostname in module %s", name, ) return # Set prefer_fqdn_over_hostname value in distro hostname_fqdn = util.get_cfg_option_bool( cfg, "prefer_fqdn_over_hostname", None ) if hostname_fqdn is not None: cloud.distro.set_option("prefer_fqdn_over_hostname", hostname_fqdn) (hostname, fqdn, is_default) = util.get_hostname_fqdn(cfg, cloud) # Check for previous successful invocation of set-hostname # set-hostname artifact file accounts for both hostname and fqdn # deltas. As such, it's format is different than cc_update_hostname's # previous-hostname file which only contains the base hostname. # TODO consolidate previous-hostname and set-hostname artifact files and # distro._read_hostname implementation so we only validate one artifact. prev_fn = os.path.join(cloud.get_cpath("data"), "set-hostname") prev_hostname = {} if os.path.exists(prev_fn): prev_hostname = util.load_json(util.load_file(prev_fn)) hostname_changed = hostname != prev_hostname.get( "hostname" ) or fqdn != prev_hostname.get("fqdn") if not hostname_changed: log.debug("No hostname changes. Skipping set-hostname") return if is_default and hostname == "localhost": # https://github.com/systemd/systemd/commit/d39079fcaa05e23540d2b1f0270fa31c22a7e9f1 log.debug("Hostname is localhost. Let other services handle this.") return log.debug("Setting the hostname to %s (%s)", fqdn, hostname) try: cloud.distro.set_hostname(hostname, fqdn) except Exception as e: msg = "Failed to set the hostname to %s (%s)" % (fqdn, hostname) util.logexc(log, msg) raise SetHostnameError("%s: %s" % (msg, e)) from e write_json(prev_fn, {"hostname": hostname, "fqdn": fqdn})
def test_non_utf8_encoding_gets_b64encoded(self): """When non-utf-8 values exist in py2 instance-data is b64encoded.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet(self.sys_cfg, self.distro, Paths({'run_dir': tmp}), custom_metadata={ 'key1': 'val1', 'key2': { 'key2.1': b'ab\xaadef' } }) self.assertTrue(datasource.get_data()) json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) instance_json = util.load_json(util.load_file(json_file)) key21_value = instance_json['ds']['meta_data']['key2']['key2.1'] self.assertEqual('ci-b64:' + util.b64e(b'ab\xaadef'), key21_value)
def _get_metadata_from_imds(retries): url = IMDS_URL + "instance?api-version=2017-12-01" headers = {"Metadata": "true"} try: response = readurl( url, timeout=1, headers=headers, retries=retries, exception_cb=retry_on_url_exc) except Exception as e: LOG.debug('Ignoring IMDS instance metadata: %s', e) return {} try: return util.load_json(str(response)) except json.decoder.JSONDecodeError: LOG.warning( 'Ignoring non-json IMDS instance metadata: %s', str(response)) return {}
def _get_metadata_from_imds(retries): def retry_on_url_error(msg, exception): if isinstance(exception, UrlError) and exception.code == 404: return True # Continue retries return False # Stop retries on all other exceptions url = IMDS_URL + "instance?api-version=2017-12-01" headers = {"Metadata": "true"} try: response = readurl( url, timeout=1, headers=headers, retries=retries, exception_cb=retry_on_url_error) except Exception as e: LOG.debug('Ignoring IMDS instance metadata: %s', e) return {} try: return util.load_json(str(response)) except json.decoder.JSONDecodeError: LOG.warning( 'Ignoring non-json IMDS instance metadata: %s', str(response)) return {}