def test_handle_modern_zfsroot(self, mount_info, zpool_info, parse_mount, is_container): devpth = 'zroot/ROOT/default' disk = 'da0p3' fs_type = 'zfs' mount_point = '/' mount_info.return_value = (devpth, fs_type, mount_point) zpool_info.return_value = disk parse_mount.return_value = (devpth, fs_type, mount_point) cfg = {'resize_rootfs': True} def fake_stat(devpath): if devpath == disk: raise OSError("not here") FakeStat = namedtuple( 'FakeStat', ['st_mode', 'st_size', 'st_mtime']) # minimal stat return FakeStat(25008, 0, 1) # fake char block device with mock.patch('cloudinit.config.cc_resizefs.do_resize') as dresize: with mock.patch('cloudinit.config.cc_resizefs.os.stat') as m_stat: m_stat.side_effect = fake_stat handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[]) self.assertEqual(('zpool', 'online', '-e', 'zroot', '/dev/' + disk), dresize.call_args[0][0])
def test_main_prints_docs(self): """When --doc parameter is provided, main generates documentation.""" myargs = ['mycmd', '--doc'] with mock.patch('sys.argv', myargs): with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, main(), 'Expected 0 exit code') self.assertIn('\nNTP\n---\n', m_stdout.getvalue()) self.assertIn('\nRuncmd\n------\n', m_stdout.getvalue())
def setUp(self): super(TestIsDiskUsed, self).setUp() self.patches = ExitStack() mod_name = 'cloudinit.config.cc_disk_setup' self.enumerate_disk = self.patches.enter_context( mock.patch('{0}.enumerate_disk'.format(mod_name))) self.check_fs = self.patches.enter_context( mock.patch('{0}.check_fs'.format(mod_name)))
def test_main_validates_config_file(self): """When --config-file parameter is provided, main validates schema.""" myyaml = self.tmp_path('my.yaml') myargs = ['mycmd', '--config-file', myyaml] write_file(myyaml, b'#cloud-config\nntp:') # shortest ntp schema with mock.patch('sys.argv', myargs): with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, main(), 'Expected 0 exit code') self.assertIn( 'Valid cloud-config file {0}'.format(myyaml), m_stdout.getvalue())
def test_main_missing_args(self): """Main exits non-zero and reports an error on missing parameters.""" with mock.patch('sys.exit', side_effect=self.sys_exit): with mock.patch('sys.argv', ['mycmd']): with mock.patch('sys.stderr', new_callable=StringIO) as \ m_stderr: with self.assertRaises(SystemExit) as context_manager: main() self.assertEqual(1, context_manager.exception.code) self.assertEqual( 'Expected either --config-file argument or --doc\n', m_stderr.getvalue())
def test_main_absent_config_file(self): """Main exits non-zero when config file is absent.""" myargs = ['mycmd', '--annotate', '--config-file', 'NOT_A_FILE'] with mock.patch('sys.exit', side_effect=self.sys_exit): with mock.patch('sys.argv', myargs): with mock.patch('sys.stderr', new_callable=StringIO) as \ m_stderr: with self.assertRaises(SystemExit) as context_manager: main() self.assertEqual(1, context_manager.exception.code) self.assertEqual( 'Configfile NOT_A_FILE does not exist\n', m_stderr.getvalue())
def setUp(self): super(TestOpenSSLManager, self).setUp() patches = ExitStack() self.addCleanup(patches.close) self.subp = patches.enter_context( mock.patch.object(azure_helper.util, 'subp')) try: self.open = patches.enter_context( mock.patch('__builtin__.open')) except ImportError: self.open = patches.enter_context( mock.patch('builtins.open'))
def test_handle_args_error_on_missing_param(self): """Error when missing required parameters and print usage.""" args = self.args( debug=False, dump_all=False, format=None, instance_data=None, list_keys=False, user_data=None, vendor_data=None, varname=None) with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(1, query.handle_args('anyname', args)) expected_error = ( 'ERROR: Expected one of the options: --all, --format, --list-keys' ' or varname\n') self.assertIn(expected_error, self.logs.getvalue()) self.assertIn('usage: query', m_stdout.getvalue()) self.assertIn(expected_error, m_stderr.getvalue())
def test_handle_args_returns_standardized_vars_to_top_level_aliases(self): """Any standardized vars under v# are promoted as top-level aliases.""" write_file( self.instance_data, '{"v1": {"v1_1": "val1.1"}, "v2": {"v2_2": "val2.2"},' ' "top": "gun"}') expected = dedent("""\ { "top": "gun", "userdata": "<redacted for non-root user> file:ud", "v1": { "v1_1": "val1.1" }, "v1_1": "val1.1", "v2": { "v2_2": "val2.2" }, "v2_2": "val2.2", "vendordata": "<redacted for non-root user> file:vd" } """) args = self.args( debug=False, dump_all=True, format=None, instance_data=self.instance_data, user_data='ud', vendor_data='vd', list_keys=False, varname=None) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, query.handle_args('anyname', args)) self.assertEqual(expected, m_stdout.getvalue())
def setUp(self): super(TestWalkerHandleHandler, self).setUp() tmpdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmpdir) self.data = { "handlercount": 0, "frequency": "", "handlerdir": tmpdir, "handlers": helpers.ContentHandlers(), "data": None} self.expected_module_name = "part-handler-%03d" % ( self.data["handlercount"],) expected_file_name = "%s.py" % self.expected_module_name self.expected_file_fullname = os.path.join( self.data["handlerdir"], expected_file_name) self.module_fake = FakeModule() self.ctype = None self.filename = None self.payload = "dummy payload" # Mock the write_file() function. We'll assert that it got called as # expected in each of the individual tests. resources = ExitStack() self.addCleanup(resources.close) self.write_file_mock = resources.enter_context( mock.patch('cloudinit.util.write_file'))
def test_handle_args_list_keys_errors_when_varname_is_not_a_dict(self): """Raise an error when --list-keys and varname specify a non-list.""" write_file( self.instance_data, '{"v1": {"v1_1": "val1.1", "v1_2": "val1.2"}, "v2": ' + '{"v2_2": "val2.2"}, "top": "gun"}') expected_error = "ERROR: --list-keys provided but 'top' is not a dict" args = self.args( debug=False, dump_all=False, format=None, instance_data=self.instance_data, list_keys=True, user_data='ud', vendor_data='vd', varname='top') with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(1, query.handle_args('anyname', args)) self.assertEqual('', m_stdout.getvalue()) self.assertIn(expected_error, m_stderr.getvalue())
def test_run_command_logs_commands_and_exit_codes_to_stderr(self): """All exit codes are logged to stderr.""" outfile = self.tmp_path('output.log', dir=self.tmp) cmd1 = 'echo "HI" >> %s' % outfile cmd2 = 'bogus command' cmd3 = 'echo "MOM" >> %s' % outfile commands = [cmd1, cmd2, cmd3] mock_path = 'cloudinit.config.cc_snap.sys.stderr' with mock.patch(mock_path, new_callable=StringIO) as m_stderr: with self.assertRaises(RuntimeError) as context_manager: run_commands(commands=commands) self.assertIsNotNone( re.search(r'bogus: (command )?not found', str(context_manager.exception)), msg='Expected bogus command not found') expected_stderr_log = '\n'.join([ 'Begin run command: {cmd}'.format(cmd=cmd1), 'End run command: exit(0)', 'Begin run command: {cmd}'.format(cmd=cmd2), 'ERROR: End run command: exit(127)', 'Begin run command: {cmd}'.format(cmd=cmd3), 'End run command: exit(0)\n']) self.assertEqual(expected_stderr_log, m_stderr.getvalue())
def test_upstart_frequency_single(self): # files should be written out when frequency is ! per-instance new_root = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_root) self.patchOS(new_root) self.patchUtils(new_root) paths = helpers.Paths({ 'upstart_dir': "/etc/upstart", }) util.ensure_dir("/run") util.ensure_dir("/etc/upstart") with mock.patch(self.mpath + 'SUITABLE_UPSTART', return_value=True): with mock.patch.object(util, 'subp') as m_subp: h = UpstartJobPartHandler(paths) h.handle_part('', handlers.CONTENT_START, None, None, None) h.handle_part('blah', 'text/upstart-job', 'test.conf', 'blah', frequency=PER_INSTANCE) h.handle_part('', handlers.CONTENT_END, None, None, None) self.assertEqual(len(os.listdir('/etc/upstart')), 1) m_subp.assert_called_once_with( ['initctl', 'reload-configuration'], capture=False)
def test_handle_args_renders_instance_data_vars_in_template(self): """If user_data file is a jinja template render instance-data vars.""" user_data = self.tmp_path('user-data', dir=self.tmp) write_file(user_data, '##template: jinja\nrendering: {{ my_var }}') instance_data = self.tmp_path('instance-data', dir=self.tmp) write_file(instance_data, '{"my-var": "jinja worked"}') args = self.args( user_data=user_data, instance_data=instance_data, debug=True) with mock.patch('sys.stderr', new_callable=StringIO) as m_console_err: with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, render.handle_args('anyname', args)) self.assertIn( 'DEBUG: Converted jinja variables\n{', self.logs.getvalue()) self.assertIn( 'DEBUG: Converted jinja variables\n{', m_console_err.getvalue()) self.assertEqual('rendering: jinja worked', m_stdout.getvalue())
def test_handle_args_error_when_no_read_permission_instance_data(self): """When instance_data file is unreadable, log an error.""" noread_fn = self.tmp_path('unreadable', dir=self.tmp) write_file(noread_fn, 'thou shall not pass') args = self.args( debug=False, dump_all=True, format=None, instance_data=noread_fn, list_keys=False, user_data='ud', vendor_data='vd', varname=None) with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: with mock.patch('cloudinit.cmd.query.util.load_file') as m_load: m_load.side_effect = OSError(errno.EACCES, 'Not allowed') self.assertEqual(1, query.handle_args('anyname', args)) self.assertIn( "ERROR: No read permission on '%s'. Try sudo" % noread_fn, self.logs.getvalue()) self.assertIn( "ERROR: No read permission on '%s'. Try sudo" % noread_fn, m_stderr.getvalue())
def test_no_errors(self): """Payload gets written to file and added to C{pdata}.""" with mock.patch('cloudinit.importer.import_module', return_value=self.module_fake) as mockobj: handlers.walker_handle_handler(self.data, self.ctype, self.filename, self.payload) mockobj.assert_called_once_with(self.expected_module_name) self.write_file_mock.assert_called_once_with( self.expected_file_fullname, self.payload, 0o600) self.assertEqual(self.data['handlercount'], 1)
def test_handle_args_root_uses_sensitive_instance_data(self): """When root user, and no instance-data arg, use sensitive.json.""" user_data = self.tmp_path('user-data', dir=self.tmp) write_file(user_data, '##template: jinja\nrendering: {{ my_var }}') run_dir = self.tmp_path('run_dir', dir=self.tmp) ensure_dir(run_dir) json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE) write_file(json_sensitive, '{"my-var": "jinja worked"}') paths = Paths({'run_dir': run_dir}) self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths') self.m_paths.return_value = paths args = self.args( user_data=user_data, instance_data=None, debug=False) with mock.patch('sys.stderr', new_callable=StringIO): with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: with mock.patch('os.getuid') as m_getuid: m_getuid.return_value = 0 self.assertEqual(0, render.handle_args('anyname', args)) self.assertIn('rendering: jinja worked', m_stdout.getvalue())
def test_import_error(self): """Module import errors are logged. No handler added to C{pdata}.""" with mock.patch('cloudinit.importer.import_module', side_effect=ImportError) as mockobj: handlers.walker_handle_handler(self.data, self.ctype, self.filename, self.payload) mockobj.assert_called_once_with(self.expected_module_name) self.write_file_mock.assert_called_once_with( self.expected_file_fullname, self.payload, 0o600) self.assertEqual(self.data['handlercount'], 0)
def test_handle_args_returns_top_level_varname(self): """When the argument varname is passed, report its value.""" write_file(self.instance_data, '{"my-var": "it worked"}') args = self.args( debug=False, dump_all=True, format=None, instance_data=self.instance_data, list_keys=False, user_data='ud', vendor_data='vd', varname='my_var') with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, query.handle_args('anyname', args)) self.assertEqual('it worked\n', m_stdout.getvalue())
def test_get_data_vmware_seed_platform_info(self): """Platform info properly reports when on VMware platform.""" paths = Paths({'cloud_dir': self.tdir, 'run_dir': self.tdir}) # Write ovf-env.xml seed file seed_dir = self.tmp_path('seed', dir=self.tdir) ovf_env = self.tmp_path('ovf-env.xml', dir=seed_dir) util.write_file(ovf_env, OVF_ENV_CONTENT) ds = self.datasource(sys_cfg={}, distro={}, paths=paths) self.assertEqual('ovf', ds.cloud_name) self.assertEqual('ovf', ds.platform_type) with mock.patch(MPATH + 'util.read_dmi_data', return_value='VMWare'): with mock.patch(MPATH + 'transport_vmware_guestinfo') as m_guestd: with mock.patch(MPATH + 'transport_iso9660') as m_iso9660: m_iso9660.return_value = NOT_FOUND m_guestd.return_value = NOT_FOUND self.assertTrue(ds.get_data()) self.assertEqual( 'vmware (%s/seed/ovf-env.xml)' % self.tdir, ds.subplatform)
def test_handle_args_error_on_missing_instance_data(self): """When instance_data file path does not exist, log an error.""" user_data = self.tmp_path('user-data', dir=self.tmp) absent_file = self.tmp_path('instance-data', dir=self.tmp) args = self.args( user_data=user_data, instance_data=absent_file, debug=False) with mock.patch('sys.stderr', new_callable=StringIO): self.assertEqual(1, render.handle_args('anyname', args)) self.assertIn( 'Missing instance-data.json file: %s' % absent_file, self.logs.getvalue())
def test_collect_logs_with_userdata_requires_root_user(self, m_getuid): """collect-logs errors when non-root user collects userdata .""" m_getuid.return_value = 100 # non-root output_tarfile = self.tmp_path('logs.tgz') with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: self.assertEqual( 1, logs.collect_logs(output_tarfile, include_userdata=True)) self.assertEqual( 'To include userdata, root user is required.' ' Try sudo cloud-init collect-logs\n', m_stderr.getvalue())
def test_handle_args_root_fallsback_to_instance_data(self): """When no instance_data argument, root falls back to redacted json.""" args = self.args( debug=False, dump_all=True, format=None, instance_data=None, list_keys=False, user_data=None, vendor_data=None, varname=None) run_dir = self.tmp_path('run_dir', dir=self.tmp) ensure_dir(run_dir) paths = Paths({'run_dir': run_dir}) self.add_patch('cloudinit.cmd.query.read_cfg_paths', 'm_paths') self.m_paths.return_value = paths with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: with mock.patch('os.getuid') as m_getuid: m_getuid.return_value = 0 self.assertEqual(1, query.handle_args('anyname', args)) json_file = os.path.join(run_dir, INSTANCE_JSON_FILE) sensitive_file = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE) self.assertIn( 'WARNING: Missing root-readable %s. Using redacted %s instead.' % ( sensitive_file, json_file), m_stderr.getvalue())
def test_handle_args_returns_nested_varname(self): """If user_data file is a jinja template render instance-data vars.""" write_file(self.instance_data, '{"v1": {"key-2": "value-2"}, "my-var": "it worked"}') args = self.args( debug=False, dump_all=False, format=None, instance_data=self.instance_data, user_data='ud', vendor_data='vd', list_keys=False, varname='v1.key_2') with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, query.handle_args('anyname', args)) self.assertEqual('value-2\n', m_stdout.getvalue())
def setUp(self): super(TestCloudStackPasswordFetching, self).setUp() self.patches = ExitStack() self.addCleanup(self.patches.close) mod_name = 'cloudinit.sources.DataSourceCloudStack' self.patches.enter_context(mock.patch('{0}.ec2'.format(mod_name))) self.patches.enter_context(mock.patch('{0}.uhelp'.format(mod_name))) default_gw = "192.201.20.0" get_latest_lease = mock.MagicMock(return_value=None) self.patches.enter_context(mock.patch( mod_name + '.get_latest_lease', get_latest_lease)) get_default_gw = mock.MagicMock(return_value=default_gw) self.patches.enter_context(mock.patch( mod_name + '.get_default_gateway', get_default_gw)) get_networkd_server_address = mock.MagicMock(return_value=None) self.patches.enter_context(mock.patch( mod_name + '.dhcp.networkd_get_option_from_leases', get_networkd_server_address)) self.tmp = self.tmp_dir()
def test_handler_creates_and_runs_bootcmd_script_with_instance_id(self): """Valid schema runs a bootcmd script with INSTANCE_ID in the env.""" cc = self._get_cloud('ubuntu') out_file = self.tmp_path('bootcmd.out', self.new_root) my_id = "b6ea0f59-e27d-49c6-9f87-79f19765a425" valid_config = {'bootcmd': [ 'echo {0} $INSTANCE_ID > {1}'.format(my_id, out_file)]} with mock.patch(self._etmpfile_path, FakeExtendedTempFile): with self.allow_subp(['/bin/sh']): handle('cc_bootcmd', valid_config, cc, LOG, []) self.assertEqual(my_id + ' iid-datasource-none\n', util.load_file(out_file))
def test_handle_runs_commands_provided(self): """If commands are specified as a list, run them.""" outfile = self.tmp_path('output.log', dir=self.tmp) cfg = { 'snap': {'commands': ['echo "HI" >> %s' % outfile, 'echo "MOM" >> %s' % outfile]}} mock_path = 'cloudinit.config.cc_snap.sys.stderr' with self.allow_subp([CiTestCase.SUBP_SHELL_TRUE]): with mock.patch(mock_path, new_callable=StringIO): handle('snap', cfg=cfg, cloud=None, log=self.logger, args=None) self.assertEqual('HI\nMOM\n', util.load_file(outfile))
def test_handle_args_root_fallback_from_sensitive_instance_data(self): """When root user defaults to sensitive.json.""" user_data = self.tmp_path('user-data', dir=self.tmp) run_dir = self.tmp_path('run_dir', dir=self.tmp) ensure_dir(run_dir) paths = Paths({'run_dir': run_dir}) self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths') self.m_paths.return_value = paths args = self.args( user_data=user_data, instance_data=None, debug=False) with mock.patch('sys.stderr', new_callable=StringIO): with mock.patch('os.getuid') as m_getuid: m_getuid.return_value = 0 self.assertEqual(1, render.handle_args('anyname', args)) json_file = os.path.join(run_dir, INSTANCE_JSON_FILE) json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE) self.assertIn( 'WARNING: Missing root-readable %s. Using redacted %s' % ( json_sensitive, json_file), self.logs.getvalue()) self.assertIn( 'ERROR: Missing instance-data.json file: %s' % json_file, self.logs.getvalue())
def test_handle_args_error_on_missing_instance_data(self): """When instance_data file path does not exist, log an error.""" absent_fn = self.tmp_path('absent', dir=self.tmp) args = self.args( debug=False, dump_all=True, format=None, instance_data=absent_fn, list_keys=False, user_data='ud', vendor_data='vd', varname=None) with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: self.assertEqual(1, query.handle_args('anyname', args)) self.assertIn( 'ERROR: Missing instance-data file: %s' % absent_fn, self.logs.getvalue()) self.assertIn( 'ERROR: Missing instance-data file: %s' % absent_fn, m_stderr.getvalue())
def test_handle_args_list_keys_sorts_nested_keys_when_varname(self): """Sort all nested keys of varname object when --list-keys provided.""" write_file( self.instance_data, '{"v1": {"v1_1": "val1.1", "v1_2": "val1.2"}, "v2":' + ' {"v2_2": "val2.2"}, "top": "gun"}') expected = 'v1_1\nv1_2\n' args = self.args( debug=False, dump_all=False, format=None, instance_data=self.instance_data, list_keys=True, user_data='ud', vendor_data='vd', varname='v1') with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, query.handle_args('anyname', args)) self.assertEqual(expected, m_stdout.getvalue())
def test_ntp_the_whole_package(self, m_sysd, m_select, m_subp, m_dsubp): """Test enabled config renders template, and restarts service """ cfg = {'ntp': {'enabled': True}} for distro in cc_ntp.distros: mycloud = self._get_cloud(distro) ntpconfig = self._mock_ntp_client_config(distro=distro) confpath = ntpconfig['confpath'] service_name = ntpconfig['service_name'] m_select.return_value = ntpconfig hosts = cc_ntp.generate_server_names(mycloud.distro.name) uses_systemd = True expected_service_call = [ 'systemctl', 'reload-or-restart', service_name ] expected_content = "servers []\npools {0}\n".format(hosts) if distro == 'alpine': uses_systemd = False expected_service_call = ['rc-service', service_name, 'restart'] # _mock_ntp_client_config call above did not specify a client # value and so it defaults to "ntp" which on Alpine Linux only # supports servers and not pools. expected_content = "servers {0}\npools []\n".format(hosts) m_sysd.return_value = uses_systemd with mock.patch('cloudinit.config.cc_ntp.util') as m_util: # allow use of util.mergemanydict m_util.mergemanydict.side_effect = util.mergemanydict # default client is present m_subp.which.return_value = True # use the config 'enabled' value m_util.is_false.return_value = util.is_false( cfg['ntp']['enabled']) cc_ntp.handle('notimportant', cfg, mycloud, None, None) m_dsubp.subp.assert_called_with(expected_service_call, capture=True) self.assertEqual(expected_content, util.load_file(confpath))
def test_write_ntp_config_template_uses_ntp_conf_distro_no_servers(self): """write_ntp_config_template reads content from ntp.conf.distro.tmpl. It reads ntp.conf.<distro>.tmpl before attempting ntp.conf.tmpl. It renders the value from the keys servers and pools. When no servers value is present, template is rendered using an empty list. """ distro = 'ubuntu' cfg = {'pools': ['10.0.0.1', '10.0.0.2']} mycloud = self._get_cloud(distro) ntp_conf = self.tmp_path('ntp.conf', self.new_root) # Doesn't exist # Create ntp.conf.tmpl which isn't read with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: stream.write(b'NOT READ: ntp.conf.<distro>.tmpl is primary') # Create ntp.conf.tmpl.<distro> with open('{0}.{1}.tmpl'.format(ntp_conf, distro), 'wb') as stream: stream.write(NTP_TEMPLATE) with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): cc_ntp.write_ntp_config_template(cfg, mycloud, ntp_conf) content = util.read_file_or_url('file://' + ntp_conf).contents self.assertEqual("servers []\npools ['10.0.0.1', '10.0.0.2']\n", content.decode())
def test_status_returns_done(self): '''Report done results.json exists no stages are unfinished.''' ensure_file(self.tmp_path('result.json', self.new_root)) write_json( self.status_file, {'v1': {'stage': None, # No current stage running 'datasource': ( 'DataSourceNoCloud [seed=/var/.../seed/nocloud-net]' '[dsmode=net]'), 'blah': {'finished': 123.456}, 'init': {'errors': [], 'start': 124.567, 'finished': 125.678}, 'init-local': {'start': 123.45, 'finished': 123.46}}}) cmdargs = myargs(long=False, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', {'_is_cloudinit_disabled': (False, ''), 'Init': {'side_effect': self.init_class}}, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) self.assertEqual('status: done\n', m_stdout.getvalue())
def test_status_returns_running_long_format(self): '''Long format reports the stage in which we are running.''' write_json( self.status_file, {'v1': {'stage': 'init', 'init': {'start': 124.456, 'finished': None}, 'init-local': {'start': 123.45, 'finished': 123.46}}}) cmdargs = myargs(long=True, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', {'_is_cloudinit_disabled': (False, ''), 'Init': {'side_effect': self.init_class}}, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) expected = dedent('''\ status: running time: Thu, 01 Jan 1970 00:02:04 +0000 detail: Running in stage: init ''') self.assertEqual(expected, m_stdout.getvalue())
def test_handle_args_defaults_instance_data(self): """When no instance_data argument, default to configured run_dir.""" args = self.args(debug=False, dump_all=True, format=None, instance_data=None, list_keys=False, user_data=None, vendor_data=None, varname=None) run_dir = self.tmp_path('run_dir', dir=self.tmp) ensure_dir(run_dir) paths = Paths({'run_dir': run_dir}) self.add_patch('cloudinit.cmd.query.read_cfg_paths', 'm_paths') self.m_paths.return_value = paths with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: self.assertEqual(1, query.handle_args('anyname', args)) json_file = os.path.join(run_dir, INSTANCE_JSON_FILE) self.assertIn('ERROR: Missing instance-data file: %s' % json_file, self.logs.getvalue()) self.assertIn('ERROR: Missing instance-data file: %s' % json_file, m_stderr.getvalue())
def test_status_returns_done_long(self): '''Long format of done status includes datasource info.''' ensure_file(self.tmp_path('result.json', self.new_root)) write_json( self.status_file, { 'v1': { 'stage': None, 'datasource': ('DataSourceNoCloud [seed=/var/.../seed/nocloud-net]' '[dsmode=net]'), 'init': { 'start': 124.567, 'finished': 125.678 }, 'init-local': { 'start': 123.45, 'finished': 123.46 } } }) cmdargs = myargs(long=True, wait=False) with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: retcode = wrap_and_call( 'cloudinit.cmd.status', { '_is_cloudinit_disabled': (False, ''), 'Init': { 'side_effect': self.init_class } }, status.handle_status_args, 'ignored', cmdargs) self.assertEqual(0, retcode) expected = dedent('''\ status: done time: Thu, 01 Jan 1970 00:02:05 +0000 detail: DataSourceNoCloud [seed=/var/.../seed/nocloud-net][dsmode=net] ''') self.assertEqual(expected, m_stdout.getvalue())
def test_jinja_template_handle_errors_on_unreadable_instance_data(self): """If instance-data is unreadable, raise an error from handle_part.""" script_handler = ShellScriptPartHandler(self.paths) instance_json = os.path.join(self.run_dir, 'instance-data.json') util.write_file(instance_json, util.json_dumps({})) h = JinjaTemplatePartHandler(self.paths, sub_handlers=[script_handler]) with mock.patch(self.mpath + 'load_file') as m_load: with self.assertRaises(RuntimeError) as context_manager: m_load.side_effect = OSError(errno.EACCES, 'Not allowed') h.handle_part( data='data', ctype="!" + handlers.CONTENT_START, filename='part01', payload='## template: jinja \n#!/bin/bash\necho himom', frequency='freq', headers='headers') script_file = os.path.join(script_handler.script_dir, 'part01') self.assertEqual( 'Cannot render jinja template vars. No read permission on' " '{rdir}/instance-data.json'. Try sudo".format(rdir=self.run_dir), str(context_manager.exception)) self.assertFalse(os.path.exists(script_file), 'Unexpected file created %s' % script_file)
def test_status_main(self): '''status.main can be run as a standalone script.''' write_json(self.status_file, {'v1': { 'init': { 'start': 1, 'finished': None } }}) with self.assertRaises(SystemExit) as context_manager: with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: wrap_and_call( 'cloudinit.cmd.status', { 'sys.argv': { 'new': ['status'] }, '_is_cloudinit_disabled': (False, ''), 'Init': { 'side_effect': self.init_class } }, status.main) self.assertEqual(0, context_manager.exception.code) self.assertEqual('status: running\n', m_stdout.getvalue())
def test_wb_read_url_defaults_honored_by_read_file_or_url_callers(self): """Readurl param defaults used when unspecified by read_file_or_url Param defaults tested are as follows: retries: 0, additional headers None beyond default, method: GET, data: None, check_status: True and allow_redirects: True """ url = 'http://hostname/path' m_response = mock.MagicMock() class FakeSession(requests.Session): @classmethod def request(cls, **kwargs): self.assertEqual( { 'url': url, 'allow_redirects': True, 'method': 'GET', 'headers': { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()) } }, kwargs) return m_response with mock.patch(M_PATH + 'requests.Session') as m_session: error = requests.exceptions.HTTPError('broke') m_session.side_effect = [error, FakeSession()] # assert no retries and check_status == True with self.assertRaises(UrlError) as context_manager: response = read_file_or_url(url) self.assertEqual('broke', str(context_manager.exception)) # assert default headers, method, url and allow_redirects True # Success on 2nd call with FakeSession response = read_file_or_url(url) self.assertEqual(m_response, response._response)
def setUp(self): super(TestJoyentMetadataClient, self).setUp() self.serial = mock.MagicMock(spec=serial.Serial) self.request_id = 0xabcdef12 self.metadata_value = 'value' self.response_parts = { 'command': 'SUCCESS', 'crc': 'b5a9ff00', 'length': SUCCESS_LEN + len(b64e(self.metadata_value)), 'payload': b64e(self.metadata_value), 'request_id': '{0:08x}'.format(self.request_id), } def make_response(): payloadstr = '' if 'payload' in self.response_parts: payloadstr = ' {0}'.format(self.response_parts['payload']) return ('V2 {length} {crc} {request_id} ' '{command}{payloadstr}\n'.format( payloadstr=payloadstr, **self.response_parts).encode('ascii')) self.metasource_data = None def read_response(length): if not self.metasource_data: self.metasource_data = make_response() self.metasource_data_len = len(self.metasource_data) resp = self.metasource_data[:length] self.metasource_data = self.metasource_data[length:] return resp self.serial.read.side_effect = read_response self.patched_funcs.enter_context( mock.patch('cloudinit.sources.DataSourceSmartOS.random.randint', mock.Mock(return_value=self.request_id)))
def test_write_ntp_config_template_defaults_pools_empty_lists_sles(self): """write_ntp_config_template defaults pools servers upon empty config. When both pools and servers are empty, default NR_POOL_SERVERS get configured. """ distro = 'sles' mycloud = self._get_cloud(distro) ntp_conf = self.tmp_path('ntp.conf', self.new_root) # Doesn't exist # Create ntp.conf.tmpl with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: stream.write(NTP_TEMPLATE) with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): cc_ntp.write_ntp_config_template({}, mycloud, ntp_conf) content = util.read_file_or_url('file://' + ntp_conf).contents default_pools = [ "{0}.opensuse.pool.ntp.org".format(x) for x in range(0, cc_ntp.NR_POOL_SERVERS) ] self.assertEqual("servers []\npools {0}\n".format(default_pools), content.decode()) self.assertIn( "Adding distro default ntp pool servers: {0}".format( ",".join(default_pools)), self.logs.getvalue())
def test_ntp_handler_schema_validation_warns_invalid_key_present(self): """Ntp schema validation warns of invalid keys present in ntp config. Schema validation is not strict, so ntp config is still be rendered. """ invalid_config = { 'ntp': { 'invalidkey': 1, 'pools': ['0.mycompany.pool.ntp.org'] } } cc = self._get_cloud('ubuntu') ntp_conf = os.path.join(self.new_root, 'ntp.conf') with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: stream.write(NTP_TEMPLATE) with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): cc_ntp.handle('cc_ntp', invalid_config, cc, None, []) self.assertIn( "Invalid config:\nntp: Additional properties are not allowed " "('invalidkey' was unexpected)", self.logs.getvalue()) with open(ntp_conf) as stream: content = stream.read() self.assertEqual("servers []\npools ['0.mycompany.pool.ntp.org']\n", content)
def test_ntp_handler_timesyncd(self, m_ntp_install): """Test ntp handler configures timesyncd""" m_ntp_install.return_value = False distro = 'ubuntu' cfg = { 'servers': ['192.168.2.1', '192.168.2.2'], 'pools': ['0.mypool.org'], } mycloud = self._get_cloud(distro) tsyncd_conf = self.tmp_path("timesyncd.conf", self.new_root) # Create timesyncd.conf.tmpl template = '{0}.tmpl'.format(tsyncd_conf) print(template) with open(template, 'wb') as stream: stream.write(TIMESYNCD_TEMPLATE) with mock.patch('cloudinit.config.cc_ntp.TIMESYNCD_CONF', tsyncd_conf): cc_ntp.write_ntp_config_template(cfg, mycloud, tsyncd_conf, template='timesyncd.conf') content = util.read_file_or_url('file://' + tsyncd_conf).contents self.assertEqual("[Time]\nNTP=192.168.2.1 192.168.2.2 0.mypool.org \n", content.decode())
def test_ntp_user_provided_config_with_template(self, m_install, m_reload): custom = r'\n#MyCustomTemplate' user_template = NTP_TEMPLATE + custom confpath = os.path.join(self.new_root, 'etc/myntp/myntp.conf') cfg = { 'ntp': { 'pools': ['mypool.org'], 'ntp_client': 'myntpd', 'config': { 'check_exe': 'myntpd', 'confpath': confpath, 'packages': ['myntp'], 'service_name': 'myntp', 'template': user_template, } } } for distro in cc_ntp.distros: mycloud = self._get_cloud(distro) mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR' with mock.patch(mock_path, self.new_root): cc_ntp.handle('notimportant', cfg, mycloud, None, None) self.assertEqual("servers []\npools ['mypool.org']\n%s" % custom, util.load_file(confpath))
def test_handler_puppet_writes_csr_attributes_file(self, m_subp, m_auto): """When csr_attributes is provided creates file in PUPPET_CSR_ATTRIBUTES_PATH.""" mycloud = self._get_cloud('ubuntu') mycloud.distro = mock.MagicMock() cfg = { 'puppet': { 'csr_attributes': { 'custom_attributes': { '1.2.840.113549.1.9.7': '342thbjkt82094y0ut' 'hhor289jnqthpc2290' }, 'extension_requests': { 'pp_uuid': 'ED803750-E3C7-44F5-BB08-41A04433FE2E', 'pp_image_name': 'my_ami_image', 'pp_preshared_key': '342thbjkt82094y0uthhor289jnqthpc2290' } } } } csr_attributes = 'cloudinit.config.cc_puppet.' \ 'PUPPET_CSR_ATTRIBUTES_PATH' with mock.patch(csr_attributes, self.csr_attributes_path): cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) content = util.load_file(self.csr_attributes_path) expected = textwrap.dedent("""\ custom_attributes: 1.2.840.113549.1.9.7: 342thbjkt82094y0uthhor289jnqthpc2290 extension_requests: pp_image_name: my_ami_image pp_preshared_key: 342thbjkt82094y0uthhor289jnqthpc2290 pp_uuid: ED803750-E3C7-44F5-BB08-41A04433FE2E """) self.assertEqual(expected, content)
def test_get_data_writes_json_instance_data_sensitive(self): """ get_data writes unmodified data to sensitive file as root-readonly. """ tmp = self.tmp_dir() datasource = DataSourceTestSubclassNet( self.sys_cfg, self.distro, Paths({'run_dir': tmp}), custom_metadata={ 'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion', 'some': {'security-credentials': { 'cred1': 'sekret', 'cred2': 'othersekret'}}}) sys_info = { "python": "3.7", "platform": "Linux-5.4.0-24-generic-x86_64-with-Ubuntu-20.04-focal", "uname": ["Linux", "myhost", "5.4.0-24-generic", "SMP blah", "x86_64"], "variant": "ubuntu", "dist": ["ubuntu", "20.04", "focal"]} self.assertCountEqual( ('merged_cfg', 'security-credentials',), datasource.sensitive_metadata_keys) with mock.patch("cloudinit.util.system_info", return_value=sys_info): datasource.get_data() sensitive_json_file = self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, tmp) content = util.load_file(sensitive_json_file) expected = { 'base64_encoded_keys': [], 'merged_cfg': { '_doc': ( 'Merged cloud-init system config from ' '/etc/cloud/cloud.cfg and /etc/cloud/cloud.cfg.d/' ), 'datasource': {'_undef': {'key1': False}}}, 'sensitive_keys': [ 'ds/meta_data/some/security-credentials', 'merged_cfg'], 'sys_info': sys_info, 'v1': { '_beta_keys': ['subplatform'], 'availability-zone': 'myaz', 'availability_zone': 'myaz', 'cloud-name': 'subclasscloudname', 'cloud_name': 'subclasscloudname', 'distro': 'ubuntu', 'distro_release': 'focal', 'distro_version': '20.04', 'instance-id': 'iid-datasource', 'instance_id': 'iid-datasource', 'kernel_release': '5.4.0-24-generic', 'local-hostname': 'test-subclass-hostname', 'local_hostname': 'test-subclass-hostname', 'machine': 'x86_64', 'platform': 'mytestsubclass', 'public_ssh_keys': [], 'python_version': '3.7', 'region': 'myregion', 'subplatform': 'unknown', 'system_platform': 'Linux-5.4.0-24-generic-x86_64-with-Ubuntu-20.04-focal', 'variant': 'ubuntu'}, 'ds': { '_doc': EXPERIMENTAL_TEXT, 'meta_data': { 'availability_zone': 'myaz', 'local-hostname': 'test-subclass-hostname', 'region': 'myregion', 'some': { 'security-credentials': {'cred1': 'sekret', 'cred2': 'othersekret'}}}} } self.assertCountEqual(expected, util.load_json(content)) file_stat = os.stat(sensitive_json_file) self.assertEqual(0o600, stat.S_IMODE(file_stat.st_mode)) self.assertEqual(expected, util.load_json(content))
def _set_password_server_response(self, response_string): subp = mock.MagicMock(return_value=(response_string, '')) self.patches.enter_context( mock.patch('cloudinit.sources.DataSourceCloudStack.subp.subp', subp)) return subp
def common_patches(): with mock.patch('cloudinit.util.platform.platform', return_value='Linux'): with mock.patch.multiple('cloudinit.dmi', is_container=mock.Mock(return_value=False), is_FreeBSD=mock.Mock(return_value=False)): yield
def test_get_schema_returns_global_when_set(self): """When FULL_SCHEMA global is already set, get_schema returns it.""" m_schema_path = 'cloudinit.config.schema.FULL_SCHEMA' with mock.patch(m_schema_path, {'here': 'iam'}): self.assertEqual({'here': 'iam'}, get_schema())
def _domock(self, mockpath, sattr=None): patcher = mock.patch(mockpath) setattr(self, sattr, patcher.start()) self.addCleanup(patcher.stop)