def test_apt_v3_src_basic_tri(self): """test_apt_v3_src_basic_tri - Test multiple fix deb source strings""" cfg = {self.aptlistfile: {'source': ('deb http://test.ubuntu.com/ubuntu' ' karmic-backports' ' main universe multiverse restricted')}, self.aptlistfile2: {'source': ('deb http://test.ubuntu.com/ubuntu' ' precise-backports' ' main universe multiverse restricted')}, self.aptlistfile3: {'source': ('deb http://test.ubuntu.com/ubuntu' ' lucid-backports' ' main universe multiverse restricted')}} self._apt_src_basic(self.aptlistfile, cfg) # extra verify on two extra files of this test contents = util.load_file(self.aptlistfile2) self.assertTrue(re.search(r"%s %s %s %s\n" % ("deb", "http://test.ubuntu.com/ubuntu", "precise-backports", "main universe multiverse restricted"), contents, flags=re.IGNORECASE)) contents = util.load_file(self.aptlistfile3) self.assertTrue(re.search(r"%s %s %s %s\n" % ("deb", "http://test.ubuntu.com/ubuntu", "lucid-backports", "main universe multiverse restricted"), contents, flags=re.IGNORECASE))
def test_add_assertions_adds_assertions_as_dict(self, m_subp): """When provided with a dict, add_assertions adds all assertions.""" self.assertEqual( ASSERTIONS_FILE, '/var/lib/cloud/instance/snapd.assertions') assert_file = self.tmp_path('snapd.assertions', dir=self.tmp) assertions = {'00': SYSTEM_USER_ASSERTION, '01': ACCOUNT_ASSERTION} wrap_and_call( 'cloudinit.config.cc_snap', {'ASSERTIONS_FILE': {'new': assert_file}}, add_assertions, assertions) self.assertIn( 'Importing user-provided snap assertions', self.logs.getvalue()) self.assertIn( "DEBUG: Snap acking: ['type: system-user', 'authority-id: Lqv", self.logs.getvalue()) self.assertIn( "DEBUG: Snap acking: ['type: account-key', 'authority-id: canonic", self.logs.getvalue()) self.assertEqual( [mock.call(['snap', 'ack', assert_file], capture=True)], m_subp.call_args_list) compare_file = self.tmp_path('comparison', dir=self.tmp) combined = '\n'.join(assertions.values()) util.write_file(compare_file, combined.encode('utf-8')) self.assertEqual( util.load_file(compare_file), util.load_file(assert_file))
def _load_merge_files(self): merge_root = self.resourceLocation('merge_sources') tests = [] source_ids = collections.defaultdict(list) expected_files = {} for fn in glob.glob(os.path.join(merge_root, SOURCE_PAT)): base_fn = os.path.basename(fn) file_id = re.match(r"source(\d+)\-(\d+)[.]yaml", base_fn) if not file_id: raise IOError("File %s does not have a numeric identifier" % (fn)) file_id = int(file_id.group(1)) source_ids[file_id].append(fn) expected_fn = os.path.join(merge_root, EXPECTED_PAT % (file_id)) if not os.path.isfile(expected_fn): raise IOError("No expected file found at %s" % (expected_fn)) expected_files[file_id] = expected_fn for i in sorted(source_ids.keys()): source_file_contents = [] for fn in sorted(source_ids[i]): source_file_contents.append([fn, util.load_file(fn)]) expected = util.load_yaml(util.load_file(expected_files[i])) entry = [source_file_contents, [expected, expected_files[i]]] tests.append(entry) return tests
def test_apt_v3_src_keyid_tri(self): """test_apt_v3_src_keyid_tri - Test multiple src+key+filen writes""" cfg = {self.aptlistfile: {'source': ('deb ' 'http://ppa.launchpad.net/' 'smoser/cloud-init-test/ubuntu' ' xenial main'), 'keyid': "03683F77"}, 'ignored': {'source': ('deb ' 'http://ppa.launchpad.net/' 'smoser/cloud-init-test/ubuntu' ' xenial universe'), 'keyid': "03683F77", 'filename': self.aptlistfile2}, self.aptlistfile3: {'source': ('deb ' 'http://ppa.launchpad.net/' 'smoser/cloud-init-test/ubuntu' ' xenial multiverse'), 'keyid': "03683F77"}} self._apt_src_keyid(self.aptlistfile, cfg, 3) contents = util.load_file(self.aptlistfile2) self.assertTrue(re.search(r"%s %s %s %s\n" % ("deb", ('http://ppa.launchpad.net/smoser/' 'cloud-init-test/ubuntu'), "xenial", "universe"), contents, flags=re.IGNORECASE)) contents = util.load_file(self.aptlistfile3) self.assertTrue(re.search(r"%s %s %s %s\n" % ("deb", ('http://ppa.launchpad.net/smoser/' 'cloud-init-test/ubuntu'), "xenial", "multiverse"), contents, flags=re.IGNORECASE))
def test_main_init_run_net_runs_modules(self): """Modules like write_files are run in 'net' mode.""" cmdargs = myargs( debug=False, files=None, force=False, local=False, reporter=None, subcommand='init') (_item1, item2) = wrap_and_call( 'cloudinit.cmd.main', {'util.close_stdin': True, 'netinfo.debug_info': 'my net debug info', 'util.fixup_output': ('outfmt', 'errfmt')}, main.main_init, 'init', cmdargs) self.assertEqual([], item2) # Instancify is called instance_id_path = 'var/lib/cloud/data/instance-id' self.assertEqual( 'iid-datasource-none\n', os.path.join(load_file( os.path.join(self.new_root, instance_id_path)))) # modules are run (including write_files) self.assertEqual( 'blah', load_file(os.path.join(self.new_root, 'etc/blah.ini'))) expected_logs = [ 'network config is disabled by fallback', # apply_network_config 'my net debug info', # netinfo.debug_info 'no previous run detected' ] for log in expected_logs: self.assertIn(log, self.stderr.getvalue())
def read_user_data_callback(mount_dir): ''' Description: This callback will be applied by util.mount_cb() on the mounted file. Deltacloud file name contains deltacloud. Those not using Deltacloud but instead instrumenting the injection, could drop deltacloud from the file name. Input: mount_dir - Mount directory Returns: User Data ''' deltacloud_user_data_file = mount_dir + '/deltacloud-user-data.txt' user_data_file = mount_dir + '/user-data.txt' # First try deltacloud_user_data_file. On failure try user_data_file. try: user_data = util.load_file(deltacloud_user_data_file).strip() except IOError: try: user_data = util.load_file(user_data_file).strip() except IOError: util.logexc(LOG, 'Failed accessing user data file.') return None return user_data
def test_certificats_written(self): # check public-cert and private-cert keys in config get written cfg = {'loglevel': 'debug', 'public-cert': "this is my public-certificate", 'private-cert': "secret private certificate"} cc_mcollective.configure(config=cfg, server_cfg=self.server_cfg, pricert_file=self.pricert_file, pubcert_file=self.pubcert_file) found = configobj.ConfigObj(self.server_cfg) # make sure these didnt get written in self.assertFalse('public-cert' in found) self.assertFalse('private-cert' in found) # these need updating to the specified paths self.assertEqual(found['plugin.ssl_server_public'], self.pubcert_file) self.assertEqual(found['plugin.ssl_server_private'], self.pricert_file) # and the security provider should be ssl self.assertEqual(found['securityprovider'], 'ssl') self.assertEqual( util.load_file(self.pricert_file), cfg['private-cert']) self.assertEqual( util.load_file(self.pubcert_file), cfg['public-cert'])
def device_part_info(devpath): # convert an entry in /dev/ to parent disk and partition number # input of /dev/vdb or /dev/disk/by-label/foo # rpath is hopefully a real-ish path in /dev (vda, sdb..) rpath = os.path.realpath(devpath) bname = os.path.basename(rpath) syspath = "/sys/class/block/%s" % bname if not os.path.exists(syspath): raise ValueError("%s had no syspath (%s)" % (devpath, syspath)) ptpath = os.path.join(syspath, "partition") if not os.path.exists(ptpath): raise TypeError("%s not a partition" % devpath) ptnum = util.load_file(ptpath).rstrip() # for a partition, real syspath is something like: # /sys/devices/pci0000:00/0000:00:04.0/virtio1/block/vda/vda1 rsyspath = os.path.realpath(syspath) disksyspath = os.path.dirname(rsyspath) diskmajmin = util.load_file(os.path.join(disksyspath, "dev")).rstrip() diskdevpath = os.path.realpath("/dev/block/%s" % diskmajmin) # diskdevpath has something like 253:0 # and udev has put links in /dev/block/253:0 to the device name in /dev/ return (diskdevpath, ptnum)
def test_collect_logs_includes_optional_userdata(self, m_getuid): """collect-logs include userdata when --include-userdata is set.""" m_getuid.return_value = 0 log1 = self.tmp_path('cloud-init.log', self.new_root) write_file(log1, 'cloud-init-log') log2 = self.tmp_path('cloud-init-output.log', self.new_root) write_file(log2, 'cloud-init-output-log') userdata = self.tmp_path('user-data.txt', self.new_root) write_file(userdata, 'user-data') ensure_dir(self.run_dir) write_file(self.tmp_path('results.json', self.run_dir), 'results') write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir), 'sensitive') output_tarfile = self.tmp_path('logs.tgz') date = datetime.utcnow().date().strftime('%Y-%m-%d') date_logdir = 'cloud-init-logs-{0}'.format(date) version_out = '/usr/bin/cloud-init 18.2fake\n' expected_subp = { ('dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'): '0.7fake', ('cloud-init', '--version'): version_out, ('dmesg',): 'dmesg-out\n', ('journalctl', '--boot=0', '-o', 'short-precise'): 'journal-out\n', ('tar', 'czvf', output_tarfile, date_logdir): '' } def fake_subp(cmd): cmd_tuple = tuple(cmd) if cmd_tuple not in expected_subp: raise AssertionError( 'Unexpected command provided to subp: {0}'.format(cmd)) if cmd == ['tar', 'czvf', output_tarfile, date_logdir]: subp(cmd) # Pass through tar cmd so we can check output return expected_subp[cmd_tuple], '' fake_stderr = mock.MagicMock() wrap_and_call( 'cloudinit.cmd.devel.logs', {'subp': {'side_effect': fake_subp}, 'sys.stderr': {'new': fake_stderr}, 'CLOUDINIT_LOGS': {'new': [log1, log2]}, 'CLOUDINIT_RUN_DIR': {'new': self.run_dir}, 'USER_DATA_FILE': {'new': userdata}}, logs.collect_logs, output_tarfile, include_userdata=True) # unpack the tarfile and check file contents subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root]) out_logdir = self.tmp_path(date_logdir, self.new_root) self.assertEqual( 'user-data', load_file(os.path.join(out_logdir, 'user-data.txt'))) self.assertEqual( 'sensitive', load_file(os.path.join(out_logdir, 'run', 'cloud-init', INSTANCE_JSON_SENSITIVE_FILE))) fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
def test_basic_config(self): """ test basic config looks sane # This should create a file of the format... # Created by cloud-init v. 0.7.6 on Sat, 11 Oct 2014 23:57:21 +0000 log_level :info ssl_verify_mode :verify_none log_location "/var/log/chef/client.log" validation_client_name "bob" validation_key "/etc/chef/validation.pem" client_key "/etc/chef/client.pem" chef_server_url "localhost" environment "_default" node_name "iid-datasource-none" json_attribs "/etc/chef/firstboot.json" file_cache_path "/var/cache/chef" file_backup_path "/var/backups/chef" pid_file "/var/run/chef/client.pid" Chef::Log::Formatter.show_time = true encrypted_data_bag_secret "/etc/chef/encrypted_data_bag_secret" """ tpl_file = util.load_file('templates/chef_client.rb.tmpl') self.patchUtils(self.tmp) self.patchOS(self.tmp) util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file) cfg = { 'chef': { 'server_url': 'localhost', 'validation_name': 'bob', 'validation_key': "/etc/chef/vkey.pem", 'validation_cert': "this is my cert", 'encrypted_data_bag_secret': '/etc/chef/encrypted_data_bag_secret' }, } cc_chef.handle('chef', cfg, self.fetch_cloud('ubuntu'), LOG, []) for d in cc_chef.CHEF_DIRS: self.assertTrue(os.path.isdir(d)) c = util.load_file(cc_chef.CHEF_RB_PATH) # the content of these keys is not expected to be rendered to tmpl unrendered_keys = ('validation_cert',) for k, v in cfg['chef'].items(): if k in unrendered_keys: continue self.assertIn(v, c) for k, v in cc_chef.CHEF_RB_TPL_DEFAULTS.items(): if k in unrendered_keys: continue # the value from the cfg overrides that in the default val = cfg['chef'].get(k, v) if isinstance(val, six.string_types): self.assertIn(val, c) c = util.load_file(cc_chef.CHEF_FB_PATH) self.assertEqual({}, json.loads(c))
def prepare_script(self): if not os.path.exists(self.scriptpath): raise CustomScriptNotFound("Script %s not found!! " "Cannot execute custom script!" % self.scriptpath) # Strip any CR characters from the decoded script util.load_file(self.scriptpath).replace("\r", "") st = os.stat(self.scriptpath) os.chmod(self.scriptpath, st.st_mode | stat.S_IEXEC)
def test_pubkey_extract(self): cert = load_file(self._data_file('pubkey_extract_cert')) good_key = load_file(self._data_file('pubkey_extract_ssh_key')) sslmgr = azure_helper.OpenSSLManager() key = sslmgr._get_ssh_key_from_cert(cert) self.assertEqual(good_key, key) good_fingerprint = '073E19D14D1C799224C6A0FD8DDAB6A8BF27D473' fingerprint = sslmgr._get_fingerprint_from_cert(cert) self.assertEqual(good_fingerprint, fingerprint)
def test_handle_adds_assertions(self, m_subp): """Any configured snap assertions are provided to add_assertions.""" assert_file = self.tmp_path('snapd.assertions', dir=self.tmp) compare_file = self.tmp_path('comparison', dir=self.tmp) cfg = { 'snap': {'assertions': [SYSTEM_USER_ASSERTION, ACCOUNT_ASSERTION]}} wrap_and_call( 'cloudinit.config.cc_snap', {'ASSERTIONS_FILE': {'new': assert_file}}, handle, 'snap', cfg=cfg, cloud=None, log=self.logger, args=None) content = '\n'.join(cfg['snap']['assertions']) util.write_file(compare_file, content.encode('utf-8')) self.assertEqual( util.load_file(compare_file), util.load_file(assert_file))
def test_main_init_run_net_calls_set_hostname_when_metadata_present(self): """When local-hostname metadata is present, call cc_set_hostname.""" self.cfg['datasource'] = { 'None': {'metadata': {'local-hostname': 'md-hostname'}}} cloud_cfg = yaml_dumps(self.cfg) write_file(self.cloud_cfg_file, cloud_cfg) cmdargs = myargs( debug=False, files=None, force=False, local=False, reporter=None, subcommand='init') def set_hostname(name, cfg, cloud, log, args): self.assertEqual('set-hostname', name) updated_cfg = copy.deepcopy(self.cfg) updated_cfg.update( {'def_log_file': '/var/log/cloud-init.log', 'log_cfgs': [], 'syslog_fix_perms': [ 'syslog:adm', 'root:adm', 'root:wheel', 'root:root' ], 'vendor_data': {'enabled': True, 'prefix': []}}) updated_cfg.pop('system_info') self.assertEqual(updated_cfg, cfg) self.assertEqual(main.LOG, log) self.assertIsNone(args) (_item1, item2) = wrap_and_call( 'cloudinit.cmd.main', {'util.close_stdin': True, 'netinfo.debug_info': 'my net debug info', 'cc_set_hostname.handle': {'side_effect': set_hostname}, 'util.fixup_output': ('outfmt', 'errfmt')}, main.main_init, 'init', cmdargs) self.assertEqual([], item2) # Instancify is called instance_id_path = 'var/lib/cloud/data/instance-id' self.assertEqual( 'iid-datasource-none\n', os.path.join(load_file( os.path.join(self.new_root, instance_id_path)))) # modules are run (including write_files) self.assertEqual( 'blah', load_file(os.path.join(self.new_root, 'etc/blah.ini'))) expected_logs = [ 'network config is disabled by fallback', # apply_network_config 'my net debug info', # netinfo.debug_info 'no previous run detected' ] for log in expected_logs: self.assertIn(log, self.stderr.getvalue())
def _reflect_cur_instance(self): # Remove the old symlink and attach a new one so # that further reads/writes connect into the right location idir = self._get_ipath() util.del_file(self.paths.instance_link) util.sym_link(idir, self.paths.instance_link) # Ensures these dirs exist dir_list = [] for d in self._get_instance_subdirs(): dir_list.append(os.path.join(idir, d)) util.ensure_dirs(dir_list) # Write out information on what is being used for the current instance # and what may have been used for a previous instance... dp = self.paths.get_cpath('data') # Write what the datasource was and is.. ds = "%s: %s" % (type_utils.obj_name(self.datasource), self.datasource) previous_ds = None ds_fn = os.path.join(idir, 'datasource') try: previous_ds = util.load_file(ds_fn).strip() except Exception: pass if not previous_ds: previous_ds = ds util.write_file(ds_fn, "%s\n" % ds) util.write_file(os.path.join(dp, 'previous-datasource'), "%s\n" % (previous_ds)) # What the instance id was and is... iid = self.datasource.get_instance_id() previous_iid = None iid_fn = os.path.join(dp, 'instance-id') try: previous_iid = util.load_file(iid_fn).strip() except Exception: pass if not previous_iid: previous_iid = iid util.write_file(iid_fn, "%s\n" % iid) util.write_file(os.path.join(dp, 'previous-instance-id'), "%s\n" % (previous_iid)) # Ensure needed components are regenerated # after change of instance which may cause # change of configuration self._reset() return iid
def test_mixed_cloud_config(self): blob_cc = """ #cloud-config a: b c: d """ message_cc = MIMEBase("text", "cloud-config") message_cc.set_payload(blob_cc) blob_jp = """ #cloud-config-jsonp [ { "op": "replace", "path": "/a", "value": "c" }, { "op": "remove", "path": "/c" } ] """ message_jp = MIMEBase("text", "cloud-config-jsonp") message_jp.set_payload(blob_jp) message = MIMEMultipart() message.attach(message_cc) message.attach(message_jp) ci = stages.Init() ci.datasource = FakeDataSource(str(message)) new_root = self.makeDir() self.patchUtils(new_root) self.patchOS(new_root) ci.fetch() ci.consume_userdata() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertEquals(1, len(cc)) self.assertEquals("c", cc["a"])
def read_sys_net(devname, path, translate=None, on_enoent=None, on_keyerror=None, on_einval=None): dev_path = sys_dev_path(devname, path) try: contents = util.load_file(dev_path) except (OSError, IOError) as e: e_errno = getattr(e, 'errno', None) if e_errno in (errno.ENOENT, errno.ENOTDIR): if on_enoent is not None: return on_enoent(e) if e_errno in (errno.EINVAL,): if on_einval is not None: return on_einval(e) raise contents = contents.strip() if translate is None: return contents try: return translate[contents] except KeyError as e: if on_keyerror is not None: return on_keyerror(e) else: LOG.debug("Found unexpected (not translatable) value" " '%s' in '%s", contents, dev_path) raise
def test_basic_config(self): cfg = { 'mcollective': { 'conf': { 'loglevel': 'debug', 'connector': 'rabbitmq', 'logfile': '/var/log/mcollective.log', 'ttl': '4294957', 'collectives': 'mcollective', 'main_collective': 'mcollective', 'securityprovider': 'psk', 'daemonize': '1', 'factsource': 'yaml', 'direct_addressing': '1', 'plugin.psk': 'unset', 'libdir': '/usr/share/mcollective/plugins', 'identity': '1', }, }, } expected = cfg['mcollective']['conf'] self.patchUtils(self.tmp) cc_mcollective.configure(cfg['mcollective']['conf']) contents = util.load_file(cc_mcollective.SERVER_CFG, decode=False) contents = configobj.ConfigObj(BytesIO(contents)) self.assertEqual(expected, dict(contents))
def test_existing_ovf_diff(self): # waagent/SharedConfig must be removed if ovfenv is found elsewhere # 'get_data' should remove SharedConfig.xml in /var/lib/waagent # if ovf-env.xml differs. cached_ovfenv = construct_valid_ovf_env( {'userdata': base64.b64encode("FOO_USERDATA")}) new_ovfenv = construct_valid_ovf_env( {'userdata': base64.b64encode("NEW_USERDATA")}) populate_dir(self.waagent_d, {'ovf-env.xml': cached_ovfenv, 'SharedConfig.xml': "mysharedconfigxml", 'otherfile': 'otherfilecontent'}) dsrc = self._get_ds({'ovfcontent': new_ovfenv}) ret = dsrc.get_data() self.assertTrue(ret) self.assertEqual(dsrc.userdata_raw, "NEW_USERDATA") self.assertTrue(os.path.exists( os.path.join(self.waagent_d, 'otherfile'))) self.assertFalse( os.path.exists(os.path.join(self.waagent_d, 'SharedConfig.xml'))) self.assertTrue( os.path.exists(os.path.join(self.waagent_d, 'ovf-env.xml'))) self.assertEqual(new_ovfenv, load_file(os.path.join(self.waagent_d, 'ovf-env.xml')))
def apply_locale(self, locale, out_fn=None): # Adjust the locals value to the new value newconf = StringIO() for line in util.load_file(self.login_conf_fn).splitlines(): newconf.write(re.sub(r'^default:', r'default:lang=%s:' % locale, line)) newconf.write("\n") # Make a backup of login.conf. util.copy(self.login_conf_fn, self.login_conf_fn_bak) # And write the new login.conf. util.write_file(self.login_conf_fn, newconf.getvalue()) try: LOG.debug("Running cap_mkdb for %s", locale) util.subp(['cap_mkdb', self.login_conf_fn]) except util.ProcessExecutionError: # cap_mkdb failed, so restore the backup. util.logexc(LOG, "Failed to apply locale %s", locale) try: util.copy(self.login_conf_fn_bak, self.login_conf_fn) except IOError: util.logexc(LOG, "Failed to restore %s backup", self.login_conf_fn)
def test_mixed_cloud_config(self): blob_cc = ''' #cloud-config a: b c: d ''' message_cc = MIMEBase("text", "cloud-config") message_cc.set_payload(blob_cc) blob_jp = ''' #cloud-config-jsonp [ { "op": "replace", "path": "/a", "value": "c" }, { "op": "remove", "path": "/c" } ] ''' message_jp = MIMEBase('text', "cloud-config-jsonp") message_jp.set_payload(blob_jp) message = MIMEMultipart() message.attach(message_cc) message.attach(message_jp) self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(str(message)) ci.fetch() ci.consume_data() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertEqual(1, len(cc)) self.assertEqual('c', cc['a'])
def test_include_bad_url(self, mock_sleep): """Test #include with a bad URL.""" bad_url = 'http://bad/forbidden' bad_data = '#cloud-config\nbad: true\n' httpretty.register_uri(httpretty.GET, bad_url, bad_data, status=403) included_url = 'http://hostname/path' included_data = '#cloud-config\nincluded: true\n' httpretty.register_uri(httpretty.GET, included_url, included_data) blob = '#include\n%s\n%s' % (bad_url, included_url) self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(blob) log_file = self.capture_log(logging.WARNING) ci.fetch() ci.consume_data() self.assertIn("403 Client Error: Forbidden for url: %s" % bad_url, log_file.getvalue()) cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertIsNone(cc.get('bad')) self.assertTrue(cc.get('included'))
def test_handler_full_setup(self): """Test that the handler ends up calling the renderers""" cfg = self._get_base_config_repos() cfg['zypper']['config'] = { 'download.deltarpm': 'False', } root_d = self.tmp_dir() os.makedirs('%s/etc/zypp/repos.d' % root_d) helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'}) self.reRoot(root_d) cc_zypper_add_repo.handle('zypper_add_repo', cfg, None, LOG, []) cfg_out = os.path.join(root_d, self.zypp_conf) contents = util.load_file(cfg_out) expected = [ '# Zypp config', '# Added via cloud.cfg', 'download.deltarpm=False', ] for item in contents.split('\n'): if item not in expected: self.assertIsNone(item) repos = glob.glob('%s/etc/zypp/repos.d/*.repo' % root_d) expected_repos = ['testing-foo.repo', 'testing-bar.repo'] if len(repos) != 2: assert 'Number of repos written is "%d" expected 2' % len(repos) for repo in repos: repo_name = os.path.basename(repo) if repo_name not in expected_repos: assert 'Found repo with name "%s"; unexpected' % repo_name
def loadrcconf(self): conf = {} lines = util.load_file(self.rc_conf_fn).splitlines() for line in lines: tok = line.split('=') conf[tok[0]] = tok[1].rstrip() return conf
def render_jinja_payload_from_file( payload, payload_fn, instance_data_file, debug=False): """Render a jinja template payload sourcing variables from jinja_vars_path. @param payload: String of jinja template content. Should begin with ## template: jinja\n. @param payload_fn: String representing the filename from which the payload was read used in error reporting. Generally in part-handling this is 'part-##'. @param instance_data_file: A path to a json file containing variables that will be used as jinja template variables. @return: A string of jinja-rendered content with the jinja header removed. Returns None on error. """ instance_data = {} rendered_payload = None if not os.path.exists(instance_data_file): raise RuntimeError( 'Cannot render jinja template vars. Instance data not yet' ' present at %s' % instance_data_file) try: instance_data = load_json(load_file(instance_data_file)) except (IOError, OSError) as e: if e.errno == EACCES: raise RuntimeError( 'Cannot render jinja template vars. No read permission on' " '%s'. Try sudo" % instance_data_file) rendered_payload = render_jinja_payload( payload, payload_fn, instance_data, debug) if not rendered_payload: return None return rendered_payload
def test_existing_config_is_saved(self): cfg = {'loglevel': 'warn'} util.write_file(self.server_cfg, STOCK_CONFIG) cc_mcollective.configure(config=cfg, server_cfg=self.server_cfg) self.assertTrue(os.path.exists(self.server_cfg)) self.assertTrue(os.path.exists(self.server_cfg + ".old")) self.assertEqual(util.load_file(self.server_cfg + ".old"), STOCK_CONFIG)
def test_write_repo(self): """Verify the content of a repo file""" cfg = { 'repos': [ { 'baseurl': 'http://foo', 'name': 'test-foo', 'id': 'testing-foo' }, ] } root_d = self.tmp_dir() cc_zypper_add_repo._write_repos(cfg['repos'], root_d) contents = util.load_file("%s/testing-foo.repo" % root_d) parser = self.parse_and_read(StringIO(contents)) expected = { 'testing-foo': { 'name': 'test-foo', 'baseurl': 'http://foo', 'enabled': '1', 'autorefresh': '1' } } for section in expected: self.assertTrue(parser.has_section(section), "Contains section {0}".format(section)) for k, v in expected[section].items(): self.assertEqual(parser.get(section, k), v)
def test_simple(self): self.patchUtils(self.tmp) expected = "hello world\n" filename = "/tmp/my.file" write_files( "test_simple", [{"content": expected, "path": filename}]) self.assertEqual(util.load_file(filename), expected)
def _collect_platform_data(): """Returns a dictionary of platform info from dmi or /sys/hypervisor. Keys in the dictionary are as follows: uuid: system-uuid from dmi or /sys/hypervisor uuid_source: 'hypervisor' (/sys/hypervisor/uuid) or 'dmi' serial: dmi 'system-serial-number' (/sys/.../product_serial) On Ec2 instances experimentation is that product_serial is upper case, and product_uuid is lower case. This returns lower case values for both. """ data = {} try: uuid = util.load_file("/sys/hypervisor/uuid").strip() data['uuid_source'] = 'hypervisor' except Exception: uuid = util.read_dmi_data('system-uuid') data['uuid_source'] = 'dmi' if uuid is None: uuid = '' data['uuid'] = uuid.lower() serial = util.read_dmi_data('system-serial-number') if serial is None: serial = '' data['serial'] = serial.lower() return data
def test_write_etc_hosts_suse_localhost(self): cfg = { 'manage_etc_hosts': 'localhost', 'hostname': 'cloud-init.test.us' } os.makedirs('%s/etc/' % self.tmp) hosts_content = '192.168.1.1 blah.blah.us blah\n' fout = open('%s/etc/hosts' % self.tmp, 'w') fout.write(hosts_content) fout.close() distro = self._fetch_distro('sles') distro.hosts_fn = '%s/etc/hosts' % self.tmp paths = helpers.Paths({}) ds = None cc = cloud.Cloud(ds, paths, {}, distro, None) self.patchUtils(self.tmp) cc_update_etc_hosts.handle('test', cfg, cc, LOG, []) contents = util.load_file('%s/etc/hosts' % self.tmp) if '127.0.0.1\tcloud-init.test.us\tcloud-init' not in contents: self.assertIsNone('No entry for 127.0.0.1 in etc/hosts') if '192.168.1.1\tblah.blah.us\tblah' not in contents: self.assertIsNone('Default etc/hosts content modified')
def test_apt_all_proxy_written(self): cfg = { "http_proxy": "myproxy_http_proxy", "https_proxy": "myproxy_https_proxy", "ftp_proxy": "myproxy_ftp_proxy", } values = { "http": cfg["http_proxy"], "https": cfg["https_proxy"], "ftp": cfg["ftp_proxy"], } cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile) self.assertTrue(os.path.isfile(self.pfile)) self.assertFalse(os.path.isfile(self.cfile)) contents = util.load_file(self.pfile) for ptype, pval in values.items(): self.assertTrue(self._search_apt_config(contents, ptype, pval))
def test_write_etc_hosts_suse_localhost(self): cfg = { "manage_etc_hosts": "localhost", "hostname": "cloud-init.test.us", } os.makedirs("%s/etc/" % self.tmp) hosts_content = "192.168.1.1 blah.blah.us blah\n" fout = open("%s/etc/hosts" % self.tmp, "w") fout.write(hosts_content) fout.close() distro = self._fetch_distro("sles") distro.hosts_fn = "%s/etc/hosts" % self.tmp paths = helpers.Paths({}) ds = None cc = cloud.Cloud(ds, paths, {}, distro, None) self.patchUtils(self.tmp) cc_update_etc_hosts.handle("test", cfg, cc, LOG, []) contents = util.load_file("%s/etc/hosts" % self.tmp) if "127.0.1.1\tcloud-init.test.us\tcloud-init" not in contents: self.assertIsNone("No entry for 127.0.1.1 in etc/hosts") if "192.168.1.1\tblah.blah.us\tblah" not in contents: self.assertIsNone("Default etc/hosts content modified")
def test_config_write_skip_configdir(self, mock_logging): """Write configuration but skip writing 'configdir' setting""" cfg = { 'config': { 'download.deltarpm': 'False', 'reposdir': 'foo', 'configdir': 'bar' } } root_d = self.tmp_dir() helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'}) self.reRoot(root_d) cc_zypper_add_repo._write_zypp_config(cfg['config']) cfg_out = os.path.join(root_d, self.zypp_conf) contents = util.load_file(cfg_out) expected = [ '# Zypp config', '# Added via cloud.cfg', 'download.deltarpm=False', 'reposdir=foo' ] for item in contents.split('\n'): if item not in expected: self.assertIsNone(item)
def test_set_locale_arch(self): locale = 'en_GB.UTF-8' locale_configfile = '/etc/invalid-locale-path' cfg = { 'locale': locale, 'locale_configfile': locale_configfile, } cc = get_cloud('arch') with mock.patch('cloudinit.distros.arch.subp.subp') as m_subp: with mock.patch('cloudinit.distros.arch.LOG.warning') as m_LOG: cc_locale.handle('cc_locale', cfg, cc, LOG, []) m_LOG.assert_called_with('Invalid locale_configfile %s, ' 'only supported value is ' '/etc/locale.conf', locale_configfile) contents = util.load_file(cc.distro.locale_gen_fn) self.assertIn('%s UTF-8' % locale, contents) m_subp.assert_called_with(['localectl', 'set-locale', locale], capture=False)
def test_none_ds(self): new_root = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_root) self.replicateTestRoot('simple_ubuntu', new_root) cfg = { 'datasource_list': ['None'], 'cloud_init_modules': ['write-files'], 'system_info': {'paths': {'run_dir': new_root}} } ud = helpers.readResource('user_data.1.txt') cloud_cfg = safeyaml.dumps(cfg) util.ensure_dir(os.path.join(new_root, 'etc', 'cloud')) util.write_file(os.path.join(new_root, 'etc', 'cloud', 'cloud.cfg'), cloud_cfg) self._patchIn(new_root) # Now start verifying whats created initer = stages.Init() initer.read_cfg() initer.initialize() initer.fetch() initer.datasource.userdata_raw = ud initer.instancify() initer.update() initer.cloudify().run('consume_data', initer.consume_data, args=[PER_INSTANCE], freq=PER_INSTANCE) mirrors = initer.distro.get_option('package_mirrors') self.assertEqual(1, len(mirrors)) mirror = mirrors[0] self.assertEqual(mirror['arches'], ['i386', 'amd64', 'blah']) mods = stages.Modules(initer) (which_ran, failures) = mods.run_section('cloud_init_modules') self.assertTrue(len(failures) == 0) self.assertTrue(os.path.exists('/etc/blah.ini')) self.assertIn('write-files', which_ran) contents = util.load_file('/etc/blah.ini') self.assertEqual(contents, 'blah')
def apt_src_basic(self, filename, cfg): """apt_src_basic Test Fix deb source string, has to overwrite mirror conf in params """ cfg = self.wrapv1conf(cfg) cc_apt_configure.handle("test", cfg, self.fakecloud, None, None) self.assertTrue(os.path.isfile(filename)) contents = util.load_file(filename) self.assertTrue( re.search( r"%s %s %s %s\n" % ( "deb", "http://archive.ubuntu.com/ubuntu", "karmic-backports", "main universe multiverse restricted", ), contents, flags=re.IGNORECASE, ))
def test_all_decodings(self): self.patchUtils(self.tmp) # build a 'files' array that has a dictionary of encodings # for 'gz', 'gzip', 'gz+base64' ... data = b"foobzr" utf8_valid = b"foobzr" utf8_invalid = b'ab\xaadef' files = [] expected = [] gz_aliases = ('gz', 'gzip') gz_b64_aliases = ('gz+base64', 'gzip+base64', 'gz+b64', 'gzip+b64') b64_aliases = ('base64', 'b64') datum = (("utf8", utf8_valid), ("no-utf8", utf8_invalid)) for name, data in datum: gz = (_gzip_bytes(data), gz_aliases) gz_b64 = (base64.b64encode(_gzip_bytes(data)), gz_b64_aliases) b64 = (base64.b64encode(data), b64_aliases) for content, aliases in (gz, gz_b64, b64): for enc in aliases: cur = { 'content': content, 'path': '/tmp/file-%s-%s' % (name, enc), 'encoding': enc } files.append(cur) expected.append((cur['path'], data)) write_files("test_decoding", files, LOG) for path, content in expected: self.assertEqual(util.load_file(path, decode=False), content) # make sure we actually wrote *some* files. flen_expected = (len(gz_aliases + gz_b64_aliases + b64_aliases) * len(datum)) self.assertEqual(len(expected), flen_expected)
def loadrcconf(self): RE_MATCH = re.compile(r'^(\w+)\s*=\s*(.*)\s*') conf = {} lines = util.load_file(self.rc_conf_fn).splitlines() for line in lines: m = RE_MATCH.match(line) if not m: LOG.debug("Skipping line from /etc/rc.conf: %s", line) continue key = m.group(1).rstrip() val = m.group(2).rstrip() # Kill them quotes (not completely correct, aka won't handle # quoted values, but should be ok ...) if val[0] in ('"', "'"): val = val[1:] if val[-1] in ('"', "'"): val = val[0:-1] if len(val) == 0: LOG.debug("Skipping empty value from /etc/rc.conf: %s", line) continue conf[key] = val return conf
def apt_src_key(self, filename, cfg): """apt_src_key Test specification of a source + key """ cfg = self.wrapv1conf([cfg]) with mock.patch.object(subp, 'subp') as mockobj: cc_apt_configure.handle("test", cfg, self.fakecloud, None, None) mockobj.assert_called_with(['apt-key', 'add', '-'], data=b'fakekey 4321', target=None) self.assertTrue(os.path.isfile(filename)) contents = util.load_file(filename) self.assertTrue( re.search(r"%s %s %s %s\n" % ("deb", ('http://ppa.launchpad.net/smoser/' 'cloud-init-test/ubuntu'), "xenial", "main"), contents, flags=re.IGNORECASE))
def _restore_from_cache(self): # We try to restore from a current link and static path # by using the instance link, if purge_cache was called # the file wont exist. pickled_fn = self.paths.get_ipath_cur('obj_pkl') pickle_contents = None try: pickle_contents = util.load_file(pickled_fn, decode=False) except Exception as e: if os.path.isfile(pickled_fn): LOG.warn("failed loading pickle in %s: %s" % (pickled_fn, e)) pass # This is expected so just return nothing # successfully loaded... if not pickle_contents: return None try: return pickle.loads(pickle_contents) except Exception: util.logexc(LOG, "Failed loading pickled blob from %s", pickled_fn) return None
def test_all_decodings(self): self.patchUtils(self.tmp) # build a 'files' array that has a dictionary of encodings # for 'gz', 'gzip', 'gz+base64' ... data = b"foobzr" utf8_valid = b"foobzr" utf8_invalid = b"ab\xaadef" files = [] expected = [] gz_aliases = ("gz", "gzip") gz_b64_aliases = ("gz+base64", "gzip+base64", "gz+b64", "gzip+b64") b64_aliases = ("base64", "b64") datum = (("utf8", utf8_valid), ("no-utf8", utf8_invalid)) for name, data in datum: gz = (_gzip_bytes(data), gz_aliases) gz_b64 = (base64.b64encode(_gzip_bytes(data)), gz_b64_aliases) b64 = (base64.b64encode(data), b64_aliases) for content, aliases in (gz, gz_b64, b64): for enc in aliases: cur = { "content": content, "path": "/tmp/file-%s-%s" % (name, enc), "encoding": enc, } files.append(cur) expected.append((cur["path"], data)) write_files("test_decoding", files) for path, content in expected: self.assertEqual(util.load_file(path, decode=False), content) # make sure we actually wrote *some* files. flen_expected = len(gz_aliases + gz_b64_aliases + b64_aliases) * len(datum) self.assertEqual(len(expected), flen_expected)
def test_mime_gzip_compressed(self): """Tests that individual message gzip encoding works.""" def gzip_part(text): contents = StringIO.StringIO() f = gzip.GzipFile(fileobj=contents, mode='w') f.write(str(text)) f.flush() f.close() return MIMEApplication(contents.getvalue(), 'gzip') base_content1 = ''' #cloud-config a: 2 ''' base_content2 = ''' #cloud-config b: 3 c: 4 ''' message = MIMEMultipart('test') message.attach(gzip_part(base_content1)) message.attach(gzip_part(base_content2)) ci = stages.Init() ci.datasource = FakeDataSource(str(message)) new_root = self.makeDir() self.patchUtils(new_root) self.patchOS(new_root) ci.fetch() ci.consume_userdata() contents = util.load_file(ci.paths.get_ipath("cloud_config")) contents = util.load_yaml(contents) self.assertTrue(isinstance(contents, dict)) self.assertEquals(3, len(contents)) self.assertEquals(2, contents['a']) self.assertEquals(3, contents['b']) self.assertEquals(4, contents['c'])
def _clean_default(target=None): # clean out any known default files and derived files in target # LP: #1675576 tpath = util.target_path(target, "etc/netplan/00-snapd-config.yaml") if not os.path.isfile(tpath): return content = util.load_file(tpath, decode=False) if content != KNOWN_SNAPD_CONFIG: return derived = [ util.target_path(target, f) for f in ('run/systemd/network/10-netplan-all-en.network', 'run/systemd/network/10-netplan-all-eth.network', 'run/systemd/generator/netplan.stamp') ] existing = [f for f in derived if os.path.isfile(f)] LOG.debug("removing known config '%s' and derived existing files: %s", tpath, existing) for f in [tpath] + existing: os.unlink(f)
def test_apt_all_proxy_written(self): cfg = { 'http_proxy': 'myproxy_http_proxy', 'https_proxy': 'myproxy_https_proxy', 'ftp_proxy': 'myproxy_ftp_proxy' } values = { 'http': cfg['http_proxy'], 'https': cfg['https_proxy'], 'ftp': cfg['ftp_proxy'], } cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile) self.assertTrue(os.path.isfile(self.pfile)) self.assertFalse(os.path.isfile(self.cfile)) contents = util.load_file(self.pfile) for ptype, pval in values.items(): self.assertTrue(self._search_apt_config(contents, ptype, pval))
def test_get_data_writes_json_instance_data_on_success(self): """get_data writes INSTANCE_JSON_FILE to run_dir as world readable.""" tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet(self.sys_cfg, self.distro, Paths({'run_dir': tmp})) datasource.get_data() json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp) content = util.load_file(json_file) expected = { 'base64_encoded_keys': [], 'sensitive_keys': [], 'v1': { '_beta_keys': ['subplatform'], 'availability-zone': 'myaz', 'availability_zone': 'myaz', 'cloud-name': 'subclasscloudname', 'cloud_name': 'subclasscloudname', 'instance-id': 'iid-datasource', 'instance_id': 'iid-datasource', 'local-hostname': 'test-subclass-hostname', 'local_hostname': 'test-subclass-hostname', 'platform': 'mytestsubclass', 'public_ssh_keys': [], 'region': 'myregion', 'subplatform': 'unknown' }, 'ds': { '_doc': EXPERIMENTAL_TEXT, 'meta_data': { 'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion' } } } self.assertEqual(expected, util.load_json(content)) file_stat = os.stat(json_file) self.assertEqual(0o644, stat.S_IMODE(file_stat.st_mode)) self.assertEqual(expected, util.load_json(content))
def read_maas_seed_dir(seed_d): """ Return user-data and metadata for a maas seed dir in seed_d. Expected format of seed_d are the following files: * instance-id * local-hostname * user-data """ if not os.path.isdir(seed_d): raise MAASSeedDirNone("%s: not a directory") files = ('local-hostname', 'instance-id', 'user-data', 'public-keys') md = {} for fname in files: try: md[fname] = util.load_file(os.path.join(seed_d, fname), decode=fname not in BINARY_FIELDS) except IOError as e: if e.errno != errno.ENOENT: raise return check_seed_contents(md, seed_d)
def test_write_ntp_config_template_defaults_pools_w_empty_lists(self): """write_ntp_config_template defaults pools servers upon empty config. When both pools and servers are empty, default NR_POOL_SERVERS get configured. """ distro = "ubuntu" pools = cc_ntp.generate_server_names(distro) servers = [] (confpath, template_fn) = self._generate_template() mock_path = "cloudinit.config.cc_ntp.temp_utils._TMPDIR" with mock.patch(mock_path, self.new_root): cc_ntp.write_ntp_config_template( distro, servers=servers, pools=pools, path=confpath, template_fn=template_fn, template=None, ) self.assertEqual("servers []\npools {0}\n".format(pools), util.load_file(confpath))
def test_ntp_handler_schema_validation_warns_non_string_item_type( self, m_sel): """Ntp schema validation warns of non-strings in pools or servers. Schema validation is not strict, so ntp config is still be rendered. """ invalid_config = {"ntp": {"pools": [123], "servers": ["valid", None]}} for distro in cc_ntp.distros: mycloud = self._get_cloud(distro) ntpconfig = self._mock_ntp_client_config(distro=distro) confpath = ntpconfig["confpath"] m_sel.return_value = ntpconfig cc_ntp.handle("cc_ntp", invalid_config, mycloud, None, []) self.assertIn( "Invalid cloud-config provided:\nntp.pools.0: 123 is not of" " type 'string'\nntp.servers.1: None is not of type 'string'", self.logs.getvalue(), ) self.assertEqual( "servers ['valid', None]\npools [123]\n", util.load_file(confpath), )
def test_ntp_the_whole_package(self, m_sysd, m_select, m_subp, m_dsubp): """Test enabled config renders template, and restarts service """ cfg = {'ntp': {'enabled': True}} for distro in cc_ntp.distros: mycloud = self._get_cloud(distro) ntpconfig = self._mock_ntp_client_config(distro=distro) confpath = ntpconfig['confpath'] service_name = ntpconfig['service_name'] m_select.return_value = ntpconfig hosts = cc_ntp.generate_server_names(mycloud.distro.name) uses_systemd = True expected_service_call = [ 'systemctl', 'reload-or-restart', service_name ] expected_content = "servers []\npools {0}\n".format(hosts) if distro == 'alpine': uses_systemd = False expected_service_call = ['rc-service', service_name, 'restart'] # _mock_ntp_client_config call above did not specify a client # value and so it defaults to "ntp" which on Alpine Linux only # supports servers and not pools. expected_content = "servers {0}\npools []\n".format(hosts) m_sysd.return_value = uses_systemd with mock.patch('cloudinit.config.cc_ntp.util') as m_util: # allow use of util.mergemanydict m_util.mergemanydict.side_effect = util.mergemanydict # default client is present m_subp.which.return_value = True # use the config 'enabled' value m_util.is_false.return_value = util.is_false( cfg['ntp']['enabled']) cc_ntp.handle('notimportant', cfg, mycloud, None, None) m_dsubp.subp.assert_called_with(expected_service_call, capture=True) self.assertEqual(expected_content, util.load_file(confpath))
def test_simple_jsonp(self): blob = ''' #cloud-config-jsonp [ { "op": "add", "path": "/baz", "value": "qux" }, { "op": "add", "path": "/bar", "value": "qux2" } ] ''' ci = stages.Init() ci.datasource = FakeDataSource(blob) new_root = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_root) self.patchUtils(new_root) self.patchOS(new_root) ci.fetch() ci.consume_data() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertEquals(2, len(cc)) self.assertEquals('qux', cc['baz']) self.assertEquals('qux2', cc['bar'])
def test_only_main_repo(self): """ Test when only details of main repo is written to file. """ alpine_version = "v3.12" config = {"apk_repos": {"alpine_repo": {"version": alpine_version}}} cc_apk_configure.handle(self.name, config, self.cloud, self.log, self.args) expected_content = textwrap.dedent("""\ # # Created by cloud-init # # This file is written on first boot of an instance # {0}/{1}/main """.format(DEFAULT_MIRROR_URL, alpine_version)) self.assertEqual(expected_content, util.load_file(REPO_FILE))
def update_resolve_conf_file(fn, dns_servers, search_servers): try: r_conf = ResolvConf(util.load_file(fn)) r_conf.parse() except IOError: util.logexc(LOG, "Failed at parsing %s reverting to an empty " "instance", fn) r_conf = ResolvConf('') r_conf.parse() if dns_servers: for s in dns_servers: try: r_conf.add_nameserver(s) except ValueError: util.logexc(LOG, "Failed at adding nameserver %s", s) if search_servers: for s in search_servers: try: r_conf.add_search_domain(s) except ValueError: util.logexc(LOG, "Failed at adding search domain %s", s) write_resolv_conf_file(fn, r_conf)
def test_write_config_array(self): cfg = { 'yum_repos': { 'puppetlabs-products': { 'name': 'Puppet Labs Products El 6 - $basearch', 'baseurl': 'http://yum.puppetlabs.com/el/6/products/$basearch', 'gpgkey': [ 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppetlabs', 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppet', ], 'enabled': True, 'gpgcheck': True, } } } self.patchUtils(self.tmp) cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, []) contents = util.load_file("/etc/yum.repos.d/puppetlabs_products.repo") parser = ConfigParser() parser.readfp(StringIO(contents)) expected = { 'puppetlabs_products': { 'name': 'Puppet Labs Products El 6 - $basearch', 'baseurl': 'http://yum.puppetlabs.com/el/6/products/$basearch', 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppetlabs\n' 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppet', 'enabled': '1', 'gpgcheck': '1', } } for section in expected: self.assertTrue(parser.has_section(section), "Contains section {0}".format(section)) for k, v in expected[section].items(): self.assertEqual(parser.get(section, k), v)
def handle(name, cfg, cloud, log, _args): if util.get_cfg_option_bool(cfg, "preserve_hostname", False): log.debug(("Configuration option 'preserve_hostname' is set," " not setting the hostname in module %s"), name) return # Set prefer_fqdn_over_hostname value in distro hostname_fqdn = util.get_cfg_option_bool(cfg, "prefer_fqdn_over_hostname", None) if hostname_fqdn is not None: cloud.distro.set_option('prefer_fqdn_over_hostname', hostname_fqdn) (hostname, fqdn) = util.get_hostname_fqdn(cfg, cloud) # Check for previous successful invocation of set-hostname # set-hostname artifact file accounts for both hostname and fqdn # deltas. As such, it's format is different than cc_update_hostname's # previous-hostname file which only contains the base hostname. # TODO consolidate previous-hostname and set-hostname artifact files and # distro._read_hostname implementation so we only validate one artifact. prev_fn = os.path.join(cloud.get_cpath('data'), "set-hostname") prev_hostname = {} if os.path.exists(prev_fn): prev_hostname = util.load_json(util.load_file(prev_fn)) hostname_changed = (hostname != prev_hostname.get('hostname') or fqdn != prev_hostname.get('fqdn')) if not hostname_changed: log.debug('No hostname changes. Skipping set-hostname') return log.debug("Setting the hostname to %s (%s)", fqdn, hostname) try: cloud.distro.set_hostname(hostname, fqdn) except Exception as e: msg = "Failed to set the hostname to %s (%s)" % (fqdn, hostname) util.logexc(log, msg) raise SetHostnameError("%s: %s" % (msg, e)) from e write_json(prev_fn, {'hostname': hostname, 'fqdn': fqdn})
def _add_fstab_entry(log, device, mount_point, fs_type, fs_label, mount_opts, fs_freq, fs_passno): # Create fstab entry fstab_lines = [] for line in util.load_file(FSTAB_PATH).splitlines(): try: toks = re.compile("[%s]+" % (whitespace)).split(line) except: pass if len(toks) > 0 and toks[0] == device: util.logexc(log, "_add_fstab_entry: " "file %s has device %s already" % (FSTAB_PATH, device)) return if len(toks) > 1 and toks[1] == mount_point: util.logexc(log, "_add_fstab_entry: " "file %s has mount point %s already" % (FSTAB_PATH, mount_point)) return fstab_lines.append(line) if fs_label: device = "LABEL=%s" % fs_label fstab_lines.extend(["%s\t%s\t%s\t%s\t%s\t%s" % (device, mount_point, fs_type, mount_opts, fs_freq, fs_passno)]) contents = "%s\n" % ('\n'.join(fstab_lines)) util.write_file(FSTAB_PATH, contents)
def handle(name, cfg, cloud, log, _args): if util.get_cfg_option_bool(cfg, "preserve_hostname", False): log.debug(("Configuration option 'preserve_hostname' is set," " not setting the hostname in module %s"), name) return (hostname, fqdn) = util.get_hostname_fqdn(cfg, cloud) # Check for previous successful invocation of set-hostname # set-hostname artifact file accounts for both hostname and fqdn # deltas. As such, it's format is different than cc_update_hostname's # previous-hostname file which only contains the base hostname. # TODO consolidate previous-hostname and set-hostname artifact files and # distro._read_hostname implementation so we only validate one artifact. prev_fn = os.path.join(cloud.get_cpath('data'), "set-hostname") prev_hostname = {} if os.path.exists(prev_fn): prev_hostname = util.load_json(util.load_file(prev_fn)) hostname_changed = (hostname != prev_hostname.get('hostname') or fqdn != prev_hostname.get('fqdn')) if not hostname_changed: log.debug('No hostname changes. Skipping set-hostname') return # BEGIN MULTIPLE DOMAIN NAMES PATCH # cloud-init doesn't expect DHCP to have more than one domain names. # Assume the first domain name to be the primary one. fqdn = "{}.{}".format(hostname, fqdn.split(' ')[0]) if ' ' in fqdn else fqdn # END MULTIPLE DOMAIN NAMES PATCH log.debug("Setting the hostname to %s (%s)", fqdn, hostname) try: cloud.distro.set_hostname(hostname, fqdn) except Exception as e: msg = "Failed to set the hostname to %s (%s)" % (fqdn, hostname) util.logexc(log, msg) raise SetHostnameError("%s: %s" % (msg, e)) write_json(prev_fn, {'hostname': hostname, 'fqdn': fqdn})
def test_puppet_config_updates_puppet_conf(self, m_subp, m_default, m_auto): """When 'conf' is provided update values in PUPPET_CONF_PATH.""" def _fake_get_config_value(puppet_bin, setting): return self.conf m_default.side_effect = _fake_get_config_value cfg = { 'puppet': { 'conf': { 'agent': { 'server': 'puppetserver.example.org' } } } } util.write_file(self.conf, '[agent]\nserver = origpuppet\nother = 3') self.cloud.distro = mock.MagicMock() cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None) content = util.load_file(self.conf) expected = '[agent]\nserver = puppetserver.example.org\nother = 3\n\n' self.assertEqual(expected, content)
def test_defaults_pools_empty_lists_sles(self): """write_ntp_config_template defaults opensuse pools upon empty config. When both pools and servers are empty, default NR_POOL_SERVERS get configured. """ distro = 'sles' default_pools = cc_ntp.generate_server_names(distro) (confpath, template_fn) = self._generate_template() cc_ntp.write_ntp_config_template(distro, servers=[], pools=[], path=confpath, template_fn=template_fn, template=None) for pool in default_pools: self.assertIn('opensuse', pool) self.assertEqual("servers []\npools {0}\n".format(default_pools), util.load_file(confpath)) self.assertIn( "Adding distro default ntp pool servers: {0}".format( ",".join(default_pools)), self.logs.getvalue())
def test_set_locale_arch(self): locale = "en_GB.UTF-8" locale_configfile = "/etc/invalid-locale-path" cfg = { "locale": locale, "locale_configfile": locale_configfile, } cc = get_cloud("arch") with mock.patch("cloudinit.distros.arch.subp.subp") as m_subp: with mock.patch("cloudinit.distros.arch.LOG.warning") as m_LOG: cc_locale.handle("cc_locale", cfg, cc, LOG, []) m_LOG.assert_called_with( "Invalid locale_configfile %s, " "only supported value is " "/etc/locale.conf", locale_configfile, ) contents = util.load_file(cc.distro.locale_gen_fn) self.assertIn("%s UTF-8" % locale, contents) m_subp.assert_called_with(["localectl", "set-locale", locale], capture=False)
def test_mime_gzip_compressed(self): """Tests that individual message gzip encoding works.""" def gzip_part(text): return MIMEApplication(gzip_text(text), 'gzip') base_content1 = ''' #cloud-config a: 2 ''' base_content2 = ''' #cloud-config b: 3 c: 4 ''' message = MIMEMultipart('test') message.attach(gzip_part(base_content1)) message.attach(gzip_part(base_content2)) ci = stages.Init() ci.datasource = FakeDataSource(str(message)) new_root = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_root) self.patchUtils(new_root) self.patchOS(new_root) ci.fetch() ci.consume_data() contents = util.load_file(ci.paths.get_ipath("cloud_config")) contents = util.load_yaml(contents) self.assertTrue(isinstance(contents, dict)) self.assertEquals(3, len(contents)) self.assertEquals(2, contents['a']) self.assertEquals(3, contents['b']) self.assertEquals(4, contents['c'])