def test_rc_local_exists(self): """ This test is designed to verify the different scenarios associated with the presence of rclocal. """ # test when rc local does not exist postCust = PostCustomScript("test-cust", self.tmpDir) with mock.patch.object(CustomScriptConstant, "RC_LOCAL", "/no/path"): rclocal = postCust.find_rc_local() self.assertEqual("", rclocal) # test when rc local exists rclocalFile = self.tmp_path("vmware-rclocal", self.tmpDir) util.write_file(rclocalFile, "# Run post-reboot guest customization", omode="w") with mock.patch.object(CustomScriptConstant, "RC_LOCAL", rclocalFile): rclocal = postCust.find_rc_local() self.assertEqual(rclocalFile, rclocal) self.assertTrue(postCust.has_previous_agent, rclocal) # test when rc local is a symlink rclocalLink = self.tmp_path("dummy-rclocal-link", self.tmpDir) util.sym_link(rclocalFile, rclocalLink, True) with mock.patch.object(CustomScriptConstant, "RC_LOCAL", rclocalLink): rclocal = postCust.find_rc_local() self.assertEqual(rclocalFile, rclocal)
def install_chef_from_gems(ruby_version, chef_version, distro): distro.install_packages(get_ruby_packages(ruby_version)) if not os.path.exists("/usr/bin/gem"): util.sym_link("/usr/bin/gem%s" % ruby_version, "/usr/bin/gem") if not os.path.exists("/usr/bin/ruby"): util.sym_link("/usr/bin/ruby%s" % ruby_version, "/usr/bin/ruby") if chef_version: subp.subp( [ "/usr/bin/gem", "install", "chef", "-v %s" % chef_version, "--no-ri", "--no-rdoc", "--bindir", "/usr/bin", "-q", ], capture=False, ) else: subp.subp( [ "/usr/bin/gem", "install", "chef", "--no-ri", "--no-rdoc", "--bindir", "/usr/bin", "-q", ], capture=False, )
def install_chef_from_gems(ruby_version, chef_version, distro): distro.install_packages(get_ruby_packages(ruby_version)) if not os.path.exists('/usr/bin/gem'): util.sym_link('/usr/bin/gem%s' % ruby_version, '/usr/bin/gem') if not os.path.exists('/usr/bin/ruby'): util.sym_link('/usr/bin/ruby%s' % ruby_version, '/usr/bin/ruby') if chef_version: util.subp(['/usr/bin/gem', 'install', 'chef', '-v %s' % chef_version, '--no-ri', '--no-rdoc', '--bindir', '/usr/bin', '-q'], capture=False) else: util.subp(['/usr/bin/gem', 'install', 'chef', '--no-ri', '--no-rdoc', '--bindir', '/usr/bin', '-q'], capture=False)
def _reflect_cur_instance(self): # Remove the old symlink and attach a new one so # that further reads/writes connect into the right location idir = self._get_ipath() util.del_file(self.paths.instance_link) util.sym_link(idir, self.paths.instance_link) # Ensures these dirs exist dir_list = [] for d in self._get_instance_subdirs(): dir_list.append(os.path.join(idir, d)) util.ensure_dirs(dir_list) # Write out information on what is being used for the current instance # and what may have been used for a previous instance... dp = self.paths.get_cpath("data") # Write what the datasource was and is.. ds = "%s: %s" % (type_utils.obj_name(self.datasource), self.datasource) previous_ds = None ds_fn = os.path.join(idir, "datasource") try: previous_ds = util.load_file(ds_fn).strip() except Exception: pass if not previous_ds: previous_ds = ds util.write_file(ds_fn, "%s\n" % ds) util.write_file( os.path.join(dp, "previous-datasource"), "%s\n" % (previous_ds) ) # What the instance id was and is... iid = self.datasource.get_instance_id() iid_fn = os.path.join(dp, "instance-id") previous_iid = self.previous_iid() util.write_file(iid_fn, "%s\n" % iid) util.write_file(self.paths.get_runpath("instance_id"), "%s\n" % iid) util.write_file( os.path.join(dp, "previous-instance-id"), "%s\n" % (previous_iid) ) self._write_to_cache() # Ensure needed components are regenerated # after change of instance which may cause # change of configuration self._reset() return iid
def _reflect_cur_instance(self): # Remove the old symlink and attach a new one so # that further reads/writes connect into the right location idir = self._get_ipath() util.del_file(self.paths.instance_link) util.sym_link(idir, self.paths.instance_link) # Ensures these dirs exist dir_list = [] for d in self._get_instance_subdirs(): dir_list.append(os.path.join(idir, d)) util.ensure_dirs(dir_list) # Write out information on what is being used for the current instance # and what may have been used for a previous instance... dp = self.paths.get_cpath('data') # Write what the datasource was and is.. ds = "%s: %s" % (type_utils.obj_name(self.datasource), self.datasource) previous_ds = None ds_fn = os.path.join(idir, 'datasource') try: previous_ds = util.load_file(ds_fn).strip() except Exception: pass if not previous_ds: previous_ds = ds util.write_file(ds_fn, "%s\n" % ds) util.write_file(os.path.join(dp, 'previous-datasource'), "%s\n" % (previous_ds)) # What the instance id was and is... iid = self.datasource.get_instance_id() previous_iid = None iid_fn = os.path.join(dp, 'instance-id') try: previous_iid = util.load_file(iid_fn).strip() except Exception: pass if not previous_iid: previous_iid = iid util.write_file(iid_fn, "%s\n" % iid) util.write_file(os.path.join(dp, 'previous-instance-id'), "%s\n" % (previous_iid)) # Ensure needed components are regenerated # after change of instance which may cause # change of configuration self._reset() return iid
def set_timezone(self, tz): tz_file = self._find_tz_file(tz) if self.uses_systemd(): # Currently, timedatectl complains if invoked during startup # so for compatibility, create the link manually. util.del_file(self.tz_local_fn) util.sym_link(tz_file, self.tz_local_fn) else: # Adjust the sysconfig clock zone setting clock_cfg = { 'ZONE': str(tz), } rhel_util.update_sysconfig_file(self.clock_conf_fn, clock_cfg) # This ensures that the correct tz will be used for the system util.copy(tz_file, self.tz_local_fn)
def set_timezone(self, tz): tz_file = self._find_tz_file(tz) if self._dist_uses_systemd(): # Currently, timedatectl complains if invoked during startup # so for compatibility, create the link manually. util.del_file(self.tz_local_fn) util.sym_link(tz_file, self.tz_local_fn) else: # Adjust the sysconfig clock zone setting clock_cfg = { 'ZONE': str(tz), } rhel_util.update_sysconfig_file(self.clock_conf_fn, clock_cfg) # This ensures that the correct tz will be used for the system util.copy(tz_file, self.tz_local_fn)
def test_remove_artifacts_removes_unlinks_symlinks(self): """remove_artifacts cleans artifacts dir unlinking any symlinks.""" dir1 = os.path.join(self.artifact_dir, 'dir1') ensure_dir(dir1) symlink = os.path.join(self.artifact_dir, 'mylink') sym_link(dir1, symlink) retcode = wrap_and_call( 'cloudinit.cmd.clean', {'Init': {'side_effect': self.init_class}}, clean.remove_artifacts, remove_logs=False) self.assertEqual(0, retcode) for path in (dir1, symlink): self.assertFalse( os.path.exists(path), 'Unexpected {0} dir'.format(path))
def test_remove_artifacts_removes_unlinks_symlinks(self): """remove_artifacts cleans artifacts dir unlinking any symlinks.""" dir1 = os.path.join(self.artifact_dir, "dir1") ensure_dir(dir1) symlink = os.path.join(self.artifact_dir, "mylink") sym_link(dir1, symlink) retcode = wrap_and_call( "cloudinit.cmd.clean", {"Init": {"side_effect": self.init_class}}, clean.remove_artifacts, remove_logs=False, ) self.assertEqual(0, retcode) for path in (dir1, symlink): self.assertFalse( os.path.exists(path), "Unexpected {0} dir".format(path) )
def status_wrapper(name, args, data_d=None, link_d=None): if data_d is None: data_d = os.path.normpath("/var/lib/cloud/data") if link_d is None: link_d = os.path.normpath("/run/cloud-init") status_path = os.path.join(data_d, "status.json") status_link = os.path.join(link_d, "status.json") result_path = os.path.join(data_d, "result.json") result_link = os.path.join(link_d, "result.json") util.ensure_dirs(( data_d, link_d, )) (_name, functor) = args.action if name == "init": if args.local: mode = "init-local" else: mode = "init" elif name == "modules": mode = "modules-%s" % args.mode else: raise ValueError("unknown name: %s" % name) modes = ('init', 'init-local', 'modules-config', 'modules-final') status = None if mode == 'init-local': for f in (status_link, result_link, status_path, result_path): util.del_file(f) else: try: status = json.loads(util.load_file(status_path)) except Exception: pass if status is None: nullstatus = { 'errors': [], 'start': None, 'finished': None, } status = {'v1': {}} for m in modes: status['v1'][m] = nullstatus.copy() status['v1']['datasource'] = None v1 = status['v1'] v1['stage'] = mode v1[mode]['start'] = time.time() atomic_helper.write_json(status_path, status) util.sym_link(os.path.relpath(status_path, link_d), status_link, force=True) try: ret = functor(name, args) if mode in ('init', 'init-local'): (datasource, errors) = ret if datasource is not None: v1['datasource'] = str(datasource) else: errors = ret v1[mode]['errors'] = [str(e) for e in errors] except Exception as e: util.logexc(LOG, "failed stage %s", mode) print_exc("failed run of stage %s" % mode) v1[mode]['errors'] = [str(e)] v1[mode]['finished'] = time.time() v1['stage'] = None atomic_helper.write_json(status_path, status) if mode == "modules-final": # write the 'finished' file errors = [] for m in modes: if v1[m]['errors']: errors.extend(v1[m].get('errors', [])) atomic_helper.write_json( result_path, {'v1': { 'datasource': v1['datasource'], 'errors': errors }}) util.sym_link(os.path.relpath(result_path, link_d), result_link, force=True) return len(v1[mode]['errors'])
def persist_instance_data(self): """Process and write INSTANCE_JSON_FILE with all instance metadata. Replace any hyphens with underscores in key names for use in template processing. @return True on successful write, False otherwise. """ if hasattr(self, "_crawled_metadata"): # Any datasource with _crawled_metadata will best represent # most recent, 'raw' metadata crawled_metadata = copy.deepcopy(getattr(self, "_crawled_metadata")) crawled_metadata.pop("user-data", None) crawled_metadata.pop("vendor-data", None) instance_data = {"ds": crawled_metadata} else: instance_data = {"ds": {"meta_data": self.metadata}} if hasattr(self, "network_json"): network_json = getattr(self, "network_json") if network_json != UNSET: instance_data["ds"]["network_json"] = network_json if hasattr(self, "ec2_metadata"): ec2_metadata = getattr(self, "ec2_metadata") if ec2_metadata != UNSET: instance_data["ds"]["ec2_metadata"] = ec2_metadata instance_data["ds"]["_doc"] = EXPERIMENTAL_TEXT # Add merged cloud.cfg and sys info for jinja templates and cli query instance_data["merged_cfg"] = copy.deepcopy(self.sys_cfg) instance_data["merged_cfg"]["_doc"] = ( "Merged cloud-init system config from /etc/cloud/cloud.cfg and" " /etc/cloud/cloud.cfg.d/") instance_data["sys_info"] = util.system_info() instance_data.update(self._get_standardized_metadata(instance_data)) try: # Process content base64encoding unserializable values content = util.json_dumps(instance_data) # Strip base64: prefix and set base64_encoded_keys list. processed_data = process_instance_metadata( json.loads(content), sensitive_keys=self.sensitive_metadata_keys, ) except TypeError as e: LOG.warning("Error persisting instance-data.json: %s", str(e)) return False except UnicodeDecodeError as e: LOG.warning("Error persisting instance-data.json: %s", str(e)) return False json_sensitive_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE) cloud_id = instance_data["v1"].get("cloud_id", "none") cloud_id_file = os.path.join(self.paths.run_dir, "cloud-id") util.write_file(f"{cloud_id_file}-{cloud_id}", f"{cloud_id}\n") if os.path.exists(cloud_id_file): prev_cloud_id_file = os.path.realpath(cloud_id_file) else: prev_cloud_id_file = cloud_id_file util.sym_link(f"{cloud_id_file}-{cloud_id}", cloud_id_file, force=True) if prev_cloud_id_file != cloud_id_file: util.del_file(prev_cloud_id_file) write_json(json_sensitive_file, processed_data, mode=0o600) json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE) # World readable write_json(json_file, redact_sensitive_keys(processed_data)) return True
def _create_network_symlink(interface_name): file_path = '/etc/init.d/net.{name}'.format(name=interface_name) if not util.is_link(file_path): util.sym_link('/etc/init.d/net.lo', file_path)
def status_wrapper(name, args, data_d=None, link_d=None): if data_d is None: data_d = os.path.normpath("/var/lib/cloud/data") if link_d is None: link_d = os.path.normpath("/run/cloud-init") status_path = os.path.join(data_d, "status.json") status_link = os.path.join(link_d, "status.json") result_path = os.path.join(data_d, "result.json") result_link = os.path.join(link_d, "result.json") util.ensure_dirs( ( data_d, link_d, ) ) (_name, functor) = args.action if name == "init": if args.local: mode = "init-local" else: mode = "init" elif name == "modules": mode = "modules-%s" % args.mode else: raise ValueError("unknown name: %s" % name) modes = ( "init", "init-local", "modules-init", "modules-config", "modules-final", ) if mode not in modes: raise ValueError( "Invalid cloud init mode specified '{0}'".format(mode) ) status = None if mode == "init-local": for f in (status_link, result_link, status_path, result_path): util.del_file(f) else: try: status = json.loads(util.load_file(status_path)) except Exception: pass nullstatus = { "errors": [], "start": None, "finished": None, } if status is None: status = {"v1": {}} status["v1"]["datasource"] = None for m in modes: if m not in status["v1"]: status["v1"][m] = nullstatus.copy() v1 = status["v1"] v1["stage"] = mode v1[mode]["start"] = time.time() atomic_helper.write_json(status_path, status) util.sym_link( os.path.relpath(status_path, link_d), status_link, force=True ) try: ret = functor(name, args) if mode in ("init", "init-local"): (datasource, errors) = ret if datasource is not None: v1["datasource"] = str(datasource) else: errors = ret v1[mode]["errors"] = [str(e) for e in errors] except Exception as e: util.logexc(LOG, "failed stage %s", mode) print_exc("failed run of stage %s" % mode) v1[mode]["errors"] = [str(e)] v1[mode]["finished"] = time.time() v1["stage"] = None atomic_helper.write_json(status_path, status) if mode == "modules-final": # write the 'finished' file errors = [] for m in modes: if v1[m]["errors"]: errors.extend(v1[m].get("errors", [])) atomic_helper.write_json( result_path, {"v1": {"datasource": v1["datasource"], "errors": errors}}, ) util.sym_link( os.path.relpath(result_path, link_d), result_link, force=True ) return len(v1[mode]["errors"])
def status_wrapper(name, args, data_d=None, link_d=None): if data_d is None: data_d = os.path.normpath("/var/lib/cloud/data") if link_d is None: link_d = os.path.normpath("/run/cloud-init") status_path = os.path.join(data_d, "status.json") status_link = os.path.join(link_d, "status.json") result_path = os.path.join(data_d, "result.json") result_link = os.path.join(link_d, "result.json") util.ensure_dirs((data_d, link_d,)) (_name, functor) = args.action if name == "init": if args.local: mode = "init-local" else: mode = "init" elif name == "modules": mode = "modules-%s" % args.mode else: raise ValueError("unknown name: %s" % name) modes = ('init', 'init-local', 'modules-config', 'modules-final') status = None if mode == 'init-local': for f in (status_link, result_link, status_path, result_path): util.del_file(f) else: try: status = json.loads(util.load_file(status_path)) except Exception: pass if status is None: nullstatus = { 'errors': [], 'start': None, 'finished': None, } status = {'v1': {}} for m in modes: status['v1'][m] = nullstatus.copy() status['v1']['datasource'] = None v1 = status['v1'] v1['stage'] = mode v1[mode]['start'] = time.time() atomic_write_json(status_path, status) util.sym_link(os.path.relpath(status_path, link_d), status_link, force=True) try: ret = functor(name, args) if mode in ('init', 'init-local'): (datasource, errors) = ret if datasource is not None: v1['datasource'] = str(datasource) else: errors = ret v1[mode]['errors'] = [str(e) for e in errors] except Exception as e: util.logexc(LOG, "failed stage %s", mode) print_exc("failed run of stage %s" % mode) v1[mode]['errors'] = [str(e)] v1[mode]['finished'] = time.time() v1['stage'] = None atomic_write_json(status_path, status) if mode == "modules-final": # write the 'finished' file errors = [] for m in modes: if v1[m]['errors']: errors.extend(v1[m].get('errors', [])) atomic_write_json(result_path, {'v1': {'datasource': v1['datasource'], 'errors': errors}}) util.sym_link(os.path.relpath(result_path, link_d), result_link, force=True) return len(v1[mode]['errors'])