def load_json_or_yaml(data): if not data: return {} try: return util.load_json(data) except (json.JSONDecodeError, TypeError): return util.load_yaml(data)
def test_cloud_config_archive(self): non_decodable = b'\x11\xc9\xb4gTH\xee\x12' data = [{'content': '#cloud-config\npassword: gocubs\n'}, {'content': '#cloud-config\nlocale: chicago\n'}, {'content': non_decodable}] message = b'#cloud-config-archive\n' + util.yaml_dumps(data).encode() ci = stages.Init() ci.datasource = FakeDataSource(message) fs = {} def fsstore(filename, content, mode=0o0644, omode="wb"): fs[filename] = content # consuming the user-data provided should write 'cloud_config' file # which will have our yaml in it. with mock.patch('cloudinit.util.write_file') as mockobj: mockobj.side_effect = fsstore ci.fetch() ci.consume_data() cfg = util.load_yaml(fs[ci.paths.get_ipath("cloud_config")]) self.assertEqual(cfg.get('password'), 'gocubs') self.assertEqual(cfg.get('locale'), 'chicago')
def cloud_config(self): """ get the cloud-config used by the test """ if not self._cloud_config: self._cloud_config = c_util.load_yaml(self.conf) return self._cloud_config
def test_none_returns_default(self): """If yaml.load returns None, then default should be returned.""" blobs = ("", " ", "# foo\n", "#") mdef = self.mydefault self.assertEqual([(b, self.mydefault) for b in blobs], [(b, util.load_yaml(blob=b, default=mdef)) for b in blobs])
def check_instance_id(self, sys_cfg) -> str: """Return True if instance_id unchanged.""" response = read_metadata(metadata_only=True) md = response.get("meta-data", {}) if not isinstance(md, dict): md = util.load_yaml(md) return md.get("instance-id") == self.metadata.get("instance-id")
def test_merging_cloud_config(self): blob = ''' #cloud-config a: b e: f run: - b - c ''' message1 = MIMEBase("text", "cloud-config") message1.set_payload(blob) blob2 = ''' #cloud-config a: e e: g run: - stuff - morestuff ''' message2 = MIMEBase("text", "cloud-config") message2['X-Merge-Type'] = ('dict(recurse_array,' 'recurse_str)+list(append)+str(append)') message2.set_payload(blob2) blob3 = ''' #cloud-config e: - 1 - 2 - 3 p: 1 ''' message3 = MIMEBase("text", "cloud-config") message3.set_payload(blob3) messages = [message1, message2, message3] paths = c_helpers.Paths({}, ds=FakeDataSource('')) cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) new_root = self.makeDir() self.patchUtils(new_root) self.patchOS(new_root) cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, None) for i, m in enumerate(messages): headers = dict(m) fn = "part-%s" % (i + 1) payload = m.get_payload(decode=True) cloud_cfg.handle_part(None, headers['Content-Type'], fn, payload, None, headers) cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, None) contents = util.load_file(paths.get_ipath('cloud_config')) contents = util.load_yaml(contents) self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff']) self.assertEquals(contents['a'], 'be') self.assertEquals(contents['e'], [1, 2, 3]) self.assertEquals(contents['p'], 1)
def _raw_instance_data_to_dict(metadata_type: str, metadata_value) -> dict: """Convert raw instance data from str, bytes, YAML to dict :param metadata_type: string, one of as: meta-data, vendor-data, user-data network-config :param metadata_value: str, bytes or dict representing or instance-data. :raises: InvalidMetaDataError on invalid instance-data content. """ if isinstance(metadata_value, dict): return metadata_value if metadata_value is None: return {} try: parsed_metadata = util.load_yaml(metadata_value) except AttributeError as exc: # not str or bytes raise sources.InvalidMetaDataException( "Invalid {md_type}. Expected str, bytes or dict but found:" " {value}".format(md_type=metadata_type, value=metadata_value)) from exc if parsed_metadata is None: raise sources.InvalidMetaDataException( "Invalid {md_type} format. Expected YAML but found:" " {value}".format(md_type=metadata_type, value=metadata_value)) return parsed_metadata
def _get_data(self) -> bool: """Crawl LXD socket API instance data and return True on success""" if not self._is_platform_viable(): LOG.debug("Not an LXD datasource: No LXD socket found.") return False self._crawled_metadata = util.log_time(logfunc=LOG.debug, msg='Crawl of metadata service', func=read_metadata) self.metadata = _raw_instance_data_to_dict( "meta-data", self._crawled_metadata.get("meta-data")) config = self._crawled_metadata.get("config", {}) user_metadata = config.get("user.meta-data", {}) if user_metadata: user_metadata = _raw_instance_data_to_dict("user.meta-data", user_metadata) if not isinstance(self.metadata, dict): self.metadata = util.mergemanydict( [util.load_yaml(self.metadata), user_metadata]) if "user-data" in self._crawled_metadata: self.userdata_raw = self._crawled_metadata["user-data"] if "network-config" in self._crawled_metadata: self._network_config = _maybe_remove_top_network( _raw_instance_data_to_dict( "network-config", self._crawled_metadata["network-config"])) if "vendor-data" in self._crawled_metadata: self.vendordata_raw = self._crawled_metadata["vendor-data"] return True
def test_variant_sets_distro_in_cloud_cfg_subp(self, tmpdir): outfile = tmpdir.join("outcfg").strpath subp.subp(self.cmd + ["--variant", "ubuntu", self.tmpl_path, outfile]) with open(outfile) as stream: system_cfg = util.load_yaml(stream.read()) assert system_cfg["system_info"]["distro"] == "ubuntu"
def _merge_new_seed(cur, seeded): ret = cur.copy() newmd = seeded.get('meta-data', {}) if not isinstance(seeded['meta-data'], dict): newmd = util.load_yaml(seeded['meta-data']) ret['meta-data'] = util.mergemanydict([cur['meta-data'], newmd]) if seeded.get('network-config'): ret['network-config'] = util.load_yaml(seeded['network-config']) if 'user-data' in seeded: ret['user-data'] = seeded['user-data'] if 'vendor-data' in seeded: ret['vendor-data'] = seeded['vendor-data'] return ret
def _has_proper_console_support(): stdout, _ = subp(['lxc', 'info']) info = load_yaml(stdout) reason = None if 'console' not in info.get('api_extensions', []): reason = "LXD server does not support console api extension" else: dver = info.get('environment', {}).get('driver_version', "") if dver.startswith("2.") or dver.startwith("1."): reason = "LXD Driver version not 3.x+ (%s)" % dver else: try: stdout, stderr = subp(['lxc', 'console', '--help'], decode=False) if not (b'console' in stdout and b'log' in stdout): reason = "no '--log' in lxc console --help" except ProcessExecutionError as e: reason = "no 'console' command in lxc client" if reason: LOG.debug("no console-support: %s", reason) return False else: LOG.debug("console-support looks good") return True
def read_kernel_cmdline_config(files=None, mac_addrs=None, cmdline=None): if cmdline is None: cmdline = util.get_cmdline() if files is None: files = _get_klibc_net_cfg_files() if 'network-config=' in cmdline: data64 = None for tok in cmdline.split(): if tok.startswith("network-config="): data64 = tok.split("=", 1)[1] if data64: return util.load_yaml(_b64dgz(data64)) if not _is_initramfs_netconfig(files, cmdline): return None if mac_addrs is None: mac_addrs = {} for k in get_devicelist(): mac_addr = read_sys_net_safe(k, 'address') if mac_addr: mac_addrs[k] = mac_addr return config_from_klibc_net_cfg(files=files, mac_addrs=mac_addrs)
def test_mime_gzip_compressed(self): """Tests that individual message gzip encoding works.""" def gzip_part(text): return MIMEApplication(gzip_text(text), 'gzip') base_content1 = ''' #cloud-config a: 2 ''' base_content2 = ''' #cloud-config b: 3 c: 4 ''' message = MIMEMultipart('test') message.attach(gzip_part(base_content1)) message.attach(gzip_part(base_content2)) ci = stages.Init() ci.datasource = FakeDataSource(str(message)) new_root = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_root) self.patchUtils(new_root) self.patchOS(new_root) ci.fetch() ci.consume_data() contents = util.load_file(ci.paths.get_ipath("cloud_config")) contents = util.load_yaml(contents) self.assertTrue(isinstance(contents, dict)) self.assertEquals(3, len(contents)) self.assertEquals(2, contents['a']) self.assertEquals(3, contents['b']) self.assertEquals(4, contents['c'])
def test_include_bad_url(self, mock_sleep): """Test #include with a bad URL.""" bad_url = 'http://bad/forbidden' bad_data = '#cloud-config\nbad: true\n' httpretty.register_uri(httpretty.GET, bad_url, bad_data, status=403) included_url = 'http://hostname/path' included_data = '#cloud-config\nincluded: true\n' httpretty.register_uri(httpretty.GET, included_url, included_data) blob = '#include\n%s\n%s' % (bad_url, included_url) self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(blob) log_file = self.capture_log(logging.WARNING) ci.fetch() ci.consume_data() self.assertIn("403 Client Error: Forbidden for url: %s" % bad_url, log_file.getvalue()) cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertIsNone(cc.get('bad')) self.assertTrue(cc.get('included'))
def test_nonallowed_returns_default(self): # for now, anything not in the allowed list just returns the default. myyaml = yaml.dump({'1': "one"}) self.assertEqual(util.load_yaml(blob=myyaml, default=self.mydefault, allowed=(str,)), self.mydefault)
def test_python_unicode(self): # complex type of python/unicde is explicitly allowed myobj = {'1': unicode("FOOBAR")} safe_yaml = yaml.dump(myobj) self.assertEqual(util.load_yaml(blob=safe_yaml, default=self.mydefault), myobj)
def test_cloud_config_archive(self): non_decodable = b'\x11\xc9\xb4gTH\xee\x12' data = [{ 'content': '#cloud-config\npassword: gocubs\n' }, { 'content': '#cloud-config\nlocale: chicago\n' }, { 'content': non_decodable }] message = b'#cloud-config-archive\n' + util.yaml_dumps(data).encode() ci = stages.Init() ci.datasource = FakeDataSource(message) fs = {} def fsstore(filename, content, mode=0o0644, omode="wb"): fs[filename] = content # consuming the user-data provided should write 'cloud_config' file # which will have our yaml in it. with mock.patch('cloudinit.util.write_file') as mockobj: mockobj.side_effect = fsstore ci.fetch() ci.consume_data() cfg = util.load_yaml(fs[ci.paths.get_ipath("cloud_config")]) self.assertEqual(cfg.get('password'), 'gocubs') self.assertEqual(cfg.get('locale'), 'chicago')
def _load_merge_files(self): merge_root = self.resourceLocation('merge_sources') tests = [] source_ids = collections.defaultdict(list) expected_files = {} for fn in glob.glob(os.path.join(merge_root, SOURCE_PAT)): base_fn = os.path.basename(fn) file_id = re.match(r"source(\d+)\-(\d+)[.]yaml", base_fn) if not file_id: raise IOError("File %s does not have a numeric identifier" % (fn)) file_id = int(file_id.group(1)) source_ids[file_id].append(fn) expected_fn = os.path.join(merge_root, EXPECTED_PAT % (file_id)) if not os.path.isfile(expected_fn): raise IOError("No expected file found at %s" % (expected_fn)) expected_files[file_id] = expected_fn for i in sorted(source_ids.keys()): source_file_contents = [] for fn in sorted(source_ids[i]): source_file_contents.append([fn, util.load_file(fn)]) expected = util.load_yaml(util.load_file(expected_files[i])) entry = [source_file_contents, [expected, expected_files[i]]] tests.append(entry) return tests
def test_cloud_config_archive(self): non_decodable = b"\x11\xc9\xb4gTH\xee\x12" data = [ { "content": "#cloud-config\npassword: gocubs\n" }, { "content": "#cloud-config\nlocale: chicago\n" }, { "content": non_decodable }, ] message = b"#cloud-config-archive\n" + safeyaml.dumps(data).encode() self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(message) fs = {} def fsstore(filename, content, mode=0o0644, omode="wb"): fs[filename] = content # consuming the user-data provided should write 'cloud_config' file # which will have our yaml in it. with mock.patch("cloudinit.util.write_file") as mockobj: mockobj.side_effect = fsstore ci.fetch() ci.consume_data() cfg = util.load_yaml(fs[ci.paths.get_ipath("cloud_config")]) self.assertEqual(cfg.get("password"), "gocubs") self.assertEqual(cfg.get("locale"), "chicago")
def test_include_bad_url_no_fail(self, mock_sleep): """Test #include with a bad URL and failure disabled""" bad_url = "http://bad/forbidden" bad_data = "#cloud-config\nbad: true\n" httpretty.register_uri(httpretty.GET, bad_url, bad_data, status=403) included_url = "http://hostname/path" included_data = "#cloud-config\nincluded: true\n" httpretty.register_uri(httpretty.GET, included_url, included_data) blob = "#include\n%s\n%s" % (bad_url, included_url) self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(blob) log_file = self.capture_log(logging.WARNING) ci.fetch() ci.consume_data() self.assertIn( "403 Client Error: Forbidden for url: %s" % bad_url, log_file.getvalue(), ) cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertIsNone(cc.get("bad")) self.assertTrue(cc.get("included"))
def test_mime_gzip_compressed(self): """Tests that individual message gzip encoding works.""" def gzip_part(text): return MIMEApplication(gzip_text(text), "gzip") base_content1 = """ #cloud-config a: 2 """ base_content2 = """ #cloud-config b: 3 c: 4 """ message = MIMEMultipart("test") message.attach(gzip_part(base_content1)) message.attach(gzip_part(base_content2)) ci = stages.Init() ci.datasource = FakeDataSource(str(message)) self.reRoot() ci.fetch() ci.consume_data() contents = util.load_file(ci.paths.get_ipath("cloud_config")) contents = util.load_yaml(contents) self.assertTrue(isinstance(contents, dict)) self.assertEqual(3, len(contents)) self.assertEqual(2, contents["a"]) self.assertEqual(3, contents["b"]) self.assertEqual(4, contents["c"])
def test_cloud_config_as_x_shell_script(self): blob_cc = """ #cloud-config a: b c: d """ message_cc = MIMEBase("text", "x-shellscript") message_cc.set_payload(blob_cc) blob_jp = """ #cloud-config-jsonp [ { "op": "replace", "path": "/a", "value": "c" }, { "op": "remove", "path": "/c" } ] """ message_jp = MIMEBase("text", "cloud-config-jsonp") message_jp.set_payload(blob_jp) message = MIMEMultipart() message.attach(message_cc) message.attach(message_jp) self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(str(message)) ci.fetch() ci.consume_data() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertEqual(1, len(cc)) self.assertEqual("c", cc["a"])
def test_mixed_cloud_config(self): blob_cc = """ #cloud-config a: b c: d """ message_cc = MIMEBase("text", "cloud-config") message_cc.set_payload(blob_cc) blob_jp = """ #cloud-config-jsonp [ { "op": "replace", "path": "/a", "value": "c" }, { "op": "remove", "path": "/c" } ] """ message_jp = MIMEBase("text", "cloud-config-jsonp") message_jp.set_payload(blob_jp) message = MIMEMultipart() message.attach(message_cc) message.attach(message_jp) ci = stages.Init() ci.datasource = FakeDataSource(str(message)) new_root = self.makeDir() self.patchUtils(new_root) self.patchOS(new_root) ci.fetch() ci.consume_userdata() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertEquals(1, len(cc)) self.assertEquals("c", cc["a"])
def test_mixed_cloud_config(self): blob_cc = ''' #cloud-config a: b c: d ''' message_cc = MIMEBase("text", "cloud-config") message_cc.set_payload(blob_cc) blob_jp = ''' #cloud-config-jsonp [ { "op": "replace", "path": "/a", "value": "c" }, { "op": "remove", "path": "/c" } ] ''' message_jp = MIMEBase('text', "cloud-config-jsonp") message_jp.set_payload(blob_jp) message = MIMEMultipart() message.attach(message_cc) message.attach(message_jp) self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(str(message)) ci.fetch() ci.consume_data() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertEqual(1, len(cc)) self.assertEqual('c', cc['a'])
def test_none_returns_default(self): """If yaml.load returns None, then default should be returned.""" blobs = ("", " ", "# foo\n", "#") mdef = self.mydefault self.assertEqual( [(b, self.mydefault) for b in blobs], [(b, util.load_yaml(blob=b, default=mdef)) for b in blobs])
def merge_results(data, path): """Handle merging results from collect phase and verify phase.""" current = {} if os.path.exists(path): with open(path, 'r') as fp: current = c_util.load_yaml(fp.read()) current.update(data) yaml_dump(current, path)
def handle_part(self, _data, ctype, filename, payload, frequency): if ctype == TAUPAGE_AMI_CONFIG_MIME_TYPE: LOG.info("Got Taupage AMI configuration; merging with {config}".format(config=TAUPAGE_CONFIG)) LOG.debug("Parsing given input...") config_new = util.load_yaml(payload) LOG.debug("Loading existing configuration...") config_yaml = util.read_file_or_url(TAUPAGE_CONFIG) config_old = util.load_yaml(config_yaml) LOG.debug("Merging configurations...") config_merged = dict(config_old.items() + config_new.items()) LOG.debug("Storing merged configuration...") config_yaml = util.yaml_dumps(config_merged) util.write_file(TAUPAGE_CONFIG, config_yaml, 0o444)
def _merge_new_seed(cur, seeded): ret = cur.copy() ret['meta-data'] = util.mergemanydict([cur['meta-data'], util.load_yaml(seeded['meta-data'])]) ret['user-data'] = seeded['user-data'] if 'vendor-data' in seeded: ret['vendor-data'] = seeded['vendor-data'] return ret
def test_variant_sets_distro_in_cloud_cfg(self, variant, tmpdir): outfile = tmpdir.join('outcfg').strpath subp.subp(self.cmd + ['--variant', variant, self.tmpl_path, outfile]) with open(outfile) as stream: system_cfg = util.load_yaml(stream.read()) if variant == 'unknown': variant = 'ubuntu' # Unknown is defaulted to ubuntu assert system_cfg['system_info']['distro'] == variant
def _merge_new_seed(cur, seeded): ret = cur.copy() newmd = seeded.get("meta-data", {}) if not isinstance(seeded["meta-data"], dict): newmd = util.load_yaml(seeded["meta-data"]) ret["meta-data"] = util.mergemanydict([cur["meta-data"], newmd]) if seeded.get("network-config"): ret["network-config"] = _maybe_remove_top_network( util.load_yaml(seeded.get("network-config"))) if "user-data" in seeded: ret["user-data"] = seeded["user-data"] if "vendor-data" in seeded: ret["vendor-data"] = seeded["vendor-data"] return ret
def test_variant_sets_distro_in_cloud_cfg(self, variant, tmpdir): outfile = tmpdir.join("outcfg").strpath subp.subp(self.cmd + ["--variant", variant, self.tmpl_path, outfile]) with open(outfile) as stream: system_cfg = util.load_yaml(stream.read()) if variant == "unknown": variant = "ubuntu" # Unknown is defaulted to ubuntu assert system_cfg["system_info"]["distro"] == variant
def test_bogus_scan_error_returns_default(self): '''On Yaml scan error, load_yaml returns the default and logs issue.''' badyaml = "1\n 2:" self.assertEqual(util.load_yaml(blob=badyaml, default=self.mydefault), self.mydefault) self.assertIn( 'Failed loading yaml blob. Invalid format at line 2 column 3:' ' "mapping values are not allowed here', self.logs.getvalue())
def test_variant_sets_network_renderer_priority_in_cloud_cfg( self, variant, renderers, tmpdir): outfile = tmpdir.join('outcfg').strpath subp.subp(self.cmd + ['--variant', variant, self.tmpl_path, outfile]) with open(outfile) as stream: system_cfg = util.load_yaml(stream.read()) assert renderers == system_cfg['system_info']['network']['renderers']
def read_metadata(url, timeout=2, sec_between=2, retries=30): response = url_helper.readurl(url, timeout=timeout, sec_between=sec_between, retries=retries) if not response.ok(): raise RuntimeError("unable to read metadata at %s" % url) return util.load_yaml(response.contents.decode())
def test_merging_cloud_config(self): blob = """ #cloud-config a: b e: f run: - b - c """ message1 = MIMEBase("text", "cloud-config") message1.set_payload(blob) blob2 = """ #cloud-config a: e e: g run: - stuff - morestuff """ message2 = MIMEBase("text", "cloud-config") message2[ "X-Merge-Type"] = "dict(recurse_array,recurse_str)+list(append)+str(append)" message2.set_payload(blob2) blob3 = """ #cloud-config e: - 1 - 2 - 3 p: 1 """ message3 = MIMEBase("text", "cloud-config") message3.set_payload(blob3) messages = [message1, message2, message3] paths = c_helpers.Paths({}, ds=FakeDataSource("")) cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) self.reRoot() cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, None) for i, m in enumerate(messages): headers = dict(m) fn = "part-%s" % (i + 1) payload = m.get_payload(decode=True) cloud_cfg.handle_part(None, headers["Content-Type"], fn, payload, None, headers) cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, None) contents = util.load_file(paths.get_ipath("cloud_config")) contents = util.load_yaml(contents) self.assertEqual(contents["run"], ["b", "c", "stuff", "morestuff"]) self.assertEqual(contents["a"], "be") self.assertEqual(contents["e"], [1, 2, 3]) self.assertEqual(contents["p"], 1)
def test_bogus_parse_error_returns_default(self): '''On Yaml parse error, load_yaml returns default and logs issue.''' badyaml = "{}}" self.assertEqual(util.load_yaml(blob=badyaml, default=self.mydefault), self.mydefault) self.assertIn( 'Failed loading yaml blob. Invalid format at line 1 column 3:' " \"expected \'<document start>\', but found \'}\'", self.logs.getvalue())
def test_merging_cloud_config(self): blob = """ #cloud-config a: b e: f run: - b - c """ message1 = MIMEBase("text", "cloud-config") message1.set_payload(blob) blob2 = """ #cloud-config a: e e: g run: - stuff - morestuff """ message2 = MIMEBase("text", "cloud-config") message2["X-Merge-Type"] = "dict(recurse_array," "recurse_str)+list(append)+str(append)" message2.set_payload(blob2) blob3 = """ #cloud-config e: - 1 - 2 - 3 p: 1 """ message3 = MIMEBase("text", "cloud-config") message3.set_payload(blob3) messages = [message1, message2, message3] paths = c_helpers.Paths({}, ds=FakeDataSource("")) cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) new_root = self.makeDir() self.patchUtils(new_root) self.patchOS(new_root) cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, None) for i, m in enumerate(messages): headers = dict(m) fn = "part-%s" % (i + 1) payload = m.get_payload(decode=True) cloud_cfg.handle_part(None, headers["Content-Type"], fn, payload, None, headers) cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, None) contents = util.load_file(paths.get_ipath("cloud_config")) contents = util.load_yaml(contents) self.assertEquals(contents["run"], ["b", "c", "stuff", "morestuff"]) self.assertEquals(contents["a"], "be") self.assertEquals(contents["e"], [1, 2, 3]) self.assertEquals(contents["p"], 1)
def handle_part(data,ctype,filename,payload): # data: the cloudinit object # ctype: '__begin__', '__end__', or the specific mime-type of the part # filename: the filename for the part, or dynamically generated part if # no filename is given attribute is present # payload: the content of the part (empty for begin or end) if ctype == "__begin__": #print "my handler is beginning" return if ctype == "__end__": #print "my handler is ending" return logger.info('==== received ctype=%s filename=%s ====' % (ctype,filename)) # Payload should be interpreted as yaml since configuration is given in cloud-config format cfg = util.load_yaml(payload) # If there isn't a se_config reference in the configuration don't do anything if 'se_config' not in cfg: logger.error('se_config configuration was not found!') return logger.info('ready to configure SE') global se_config_cfg se_config_cfg = cfg['se_config'] if 'name' in se_config_cfg: name = se_config_cfg['name'] logger.info('configuring SE: '+name+'') else: logger.error('SE name not specified!') return logger.info('linking to proper mysql-connector-java.jar') if 'javamc' in se_config_cfg: javamc = se_config_cfg['javamc'] else: javamc = 'mysql-connector-java-5.1.17.jar' try: cmd = ('rm -f /usr/share/java/storm-backend-server/mysql-connector-java-*.jar') DPopen(cmd, 'True') except: logger.error('could not remove /usr/share/java/storm-backend-server/mysql-connector-java-*.jar') return try: cmd = ('ln -s /usr/share/java/'+javamc+' /usr/share/java/storm-backend-server/'+javamc+'') DPopen(cmd, 'True') except: logger.error('could not link /usr/share/java/'+javamc+' to /usr/share/java/storm-backend-server/!') return logger.info('==== end ctype=%s filename=%s' % (ctype, filename))
def test_nonallowed_returns_default(self): '''Any unallowed types result in returning default; log the issue.''' # for now, anything not in the allowed list just returns the default. myyaml = yaml.dump({'1': "one"}) self.assertEqual(util.load_yaml(blob=myyaml, default=self.mydefault, allowed=(str,)), self.mydefault) regex = re.compile( r'Yaml load allows \(<(class|type) \'str\'>,\) root types, but' r' got dict') self.assertTrue(regex.search(self.logs.getvalue()), msg='Missing expected yaml load error')
def _explode_archive(self, archive, append_msg): entries = util.load_yaml(archive, default=[], allowed=(list, set)) for ent in entries: # ent can be one of: # dict { 'filename' : 'value', 'content' : # 'value', 'type' : 'value' } # filename and type not be present # or # scalar(payload) if isinstance(ent, six.string_types): ent = {"content": ent} if not isinstance(ent, (dict)): # TODO(harlowja) raise? continue content = ent.get("content", "") mtype = ent.get("type") if not mtype: default = ARCHIVE_UNDEF_TYPE if isinstance(content, six.binary_type): default = ARCHIVE_UNDEF_BINARY_TYPE mtype = handlers.type_from_starts_with(content, default) maintype, subtype = mtype.split("/", 1) if maintype == "text": if isinstance(content, six.binary_type): content = content.decode() msg = MIMEText(content, _subtype=subtype) else: msg = MIMEBase(maintype, subtype) msg.set_payload(content) if "filename" in ent: _set_filename(msg, ent["filename"]) if "launch-index" in ent: msg.add_header("Launch-Index", str(ent["launch-index"])) for header in list(ent.keys()): if header.lower() in ( "content", "filename", "type", "launch-index", "content-disposition", ATTACHMENT_FIELD.lower(), CONTENT_TYPE.lower(), ): continue msg.add_header(header, ent[header]) self._attach_part(append_msg, msg)
def handle_part(data,ctype,filename,payload): # data: the cloudinit object # ctype: '__begin__', '__end__', or the specific mime-type of the part # filename: the filename for the part, or dynamically generated part if # no filename is given attribute is present # payload: the content of the part (empty for begin or end) if ctype == "__begin__": #print "my handler is beginning" return if ctype == "__end__": #print "my handler is ending" return logger.info('==== received ctype=%s filename=%s ====' % (ctype,filename)) # Payload should be interpreted as yaml since configuration is given in cloud-config format cfg = util.load_yaml(payload) # If there isn't a zabbix reference in the configuration don't do anything if 'zabbix' not in cfg: logger.error('zabbix configuration was not found!') return else: install = cfg['zabbix'] if install == False: logger.info('Im NOT going to install zabbix!') return logger.info('Installing Zabbix...') try: cmd = ('yum -y --enablerepo=epel install zabbix zabbix-agent') DPopen(shlex.split(cmd),'False') except: logger.error('could not install Zabbix!') logger.info('starting Zabbix agent...') try: cmd = ('chkconfig zabbix-agent on') DPopen(cmd, 'True') cmd = ('service zabbix-agent restart') DPopen(cmd, 'True') except: logger.error('could not start Zabbix agent!') return logger.info('==== end ctype=%s filename=%s' % (ctype, filename))
def handle_part(data,ctype,filename,payload): # data: the cloudinit object # ctype: '__begin__', '__end__', or the specific mime-type of the part # filename: the filename for the part, or dynamically generated part if # no filename is given attribute is present # payload: the content of the part (empty for begin or end) if ctype == "__begin__": return if ctype == "__end__": return logger.info('==== received ctype=%s filename=%s ====' % (ctype,filename)) # Payload should be interpreted as yaml since configuration is given in cloud-config format cfg = util.load_yaml(payload) # If there isn't a myproxy_config reference in the configuration don't do anything if 'myproxy_config' not in cfg: logger.error('myproxy_config configuration was not found!') return logger.info('ready to configure PX') global myproxy_config_cfg myproxy_config_cfg = cfg['myproxy_config'] logger.info('Copying certificate to /etc/grid-security/myproxy...') try: md = ('mkdir /etc/grid-security/myproxy/') DPopen(cmd, 'True') except: logger.error('could not create /etc/grid-security/myproxy!') return for item in ['hostcert.pem','hostkey.pem']: try: cmd = ('cp /etc/grid-security/'+item+' /etc/grid-security/myproxy/') DPopen(cmd, 'True') except: logger.error('could not copy '+item+'!') return try: cmd = ('chown myproxy:myproxy /etc/grid-security/myproxy/'+item+'') DPopen(cmd, 'True') except: logger.error('could change ownership of '+item+'!') return logger.info('==== end ctype=%s filename=%s' % (ctype, filename))
def handle_part(self, _data, ctype, filename, payload, frequency): if ctype == TAUPAGE_AMI_CONFIG_MIME_TYPE: LOG.info("Got Taupage AMI configuration; merging with {config}".format(config=TAUPAGE_CONFIG)) LOG.debug("Parsing given input...") config_new = util.load_yaml(payload) LOG.debug("Loading existing configuration...") config_yaml = util.read_file_or_url(TAUPAGE_CONFIG) config_old = util.load_yaml(config_yaml) LOG.debug("Merging configurations...") config_merged = dict(config_old.items() + config_new.items()) LOG.debug("Storing merged configuration...") config_yaml = util.yaml_dumps(config_merged) util.write_file(TMP_TAUPAGE_CONFIG, config_yaml, 0o444) LOG.debug("Comparing current configuration with the old one...") subprocess.call(['diff', '-u0', TAUPAGE_CONFIG, TMP_TAUPAGE_CONFIG]) LOG.debug("Moving the new configuration into place...") shutil.move(TMP_TAUPAGE_CONFIG, TAUPAGE_CONFIG)
def test_include(self, mock_sleep): """Test #include.""" included_url = 'http://hostname/path' included_data = '#cloud-config\nincluded: true\n' httpretty.register_uri(httpretty.GET, included_url, included_data) blob = '#include\n%s\n' % included_url self.reRoot() ci = stages.Init() ci.datasource = FakeDataSource(blob) ci.fetch() ci.consume_data() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertTrue(cc.get('included'))
def test_apt_v1_srcl_custom(self): """Test rendering from a custom source.list template""" cfg = util.load_yaml(YAML_TEXT_CUSTOM_SL) mycloud = self._get_cloud('ubuntu') # the second mock restores the original subp with mock.patch.object(util, 'write_file') as mockwrite: with mock.patch.object(util, 'subp', self.subp): with mock.patch.object(Distro, 'get_primary_arch', return_value='amd64'): cc_apt_configure.handle("notimportant", cfg, mycloud, LOG, None) mockwrite.assert_called_once_with( '/etc/apt/sources.list', EXPECTED_CONVERTED_CONTENT, mode=420)
def handle_part(data,ctype,filename,payload): # data: the cloudinit object # ctype: '__begin__', '__end__', or the specific mime-type of the part # filename: the filename for the part, or dynamically generated part if # no filename is given attribute is present # payload: the content of the part (empty for begin or end) if ctype == "__begin__": #print "my handler is beginning" return if ctype == "__end__": #print "my handler is ending" return logger.info('==== received ctype=%s filename=%s ====' % (ctype,filename)) # Payload should be interpreted as yaml since configuration is given in cloud-config format cfg = util.load_yaml(payload) # If there isn't an apel reference in the configuration don't do anything if 'apel' not in cfg: logger.error('apel configuration was not found!') return logger.info('ready to configure APEL') global apel_cfg apel_cfg = cfg['apel'] if 'parser.cfg' in apel_cfg: os.mkdir('/etc/apel') try: val = apel_cfg['parser.cfg'] get_embedded('parser.cfg', val, '/etc/apel/') except: logger.error('could not write configuration file!') return try: cmd=('echo "00 19 * * * root /usr/bin/apelparser" > /etc/cron.d/apel') DPopen(cmd, 'True') except: logger.error('could not add apelparser to crontab!') return logger.info('==== end ctype=%s filename=%s' % (ctype, filename))
def fetch_base_config(): base_cfgs = [] default_cfg = util.get_builtin_cfg() kern_contents = util.read_cc_from_cmdline() # Kernel/cmdline parameters override system config if kern_contents: base_cfgs.append(util.load_yaml(kern_contents, default={})) # Anything in your conf.d location?? # or the 'default' cloud.cfg location??? base_cfgs.append(util.read_conf_with_confd(CLOUD_CONFIG)) # And finally the default gets to play if default_cfg: base_cfgs.append(default_cfg) return util.mergemanydict(base_cfgs)
def handle_part(data,ctype,filename,payload): # data: the cloudinit object # ctype: '__begin__', '__end__', or the specific mime-type of the part # filename: the filename for the part, or dynamically generated part if # no filename is given attribute is present # payload: the content of the part (empty for begin or end) if ctype == "__begin__": #print "my handler is beginning" return if ctype == "__end__": #print "my handler is ending" return logger.info('==== received ctype=%s filename=%s ====' % (ctype,filename)) # Payload should be interpreted as yaml since configuration is given in cloud-config format cfg = util.load_yaml(payload) # If there isn't a addons reference in the configuration don't do anything if 'addons' not in cfg: logger.error('addons configuration was not found!') return else: repos = cfg['addons'] for repo in repos: logger.info('installing packages from repo '+repo+':') string = '' pkgs = repos[repo] for pkg in pkgs: string += (''+pkg+' ') logger.info(string) try: if repo != 'none': cmd = ('yum -y install '+string+'') else: cmd = ('yum -y --enablerepo='+repo+' install '+string+'') DPopen(cmd, 'True') except: logger.error('error: could not install addons!') logger.info('==== end ctype=%s filename=%s' % (ctype, filename))
def _extract_mergers(self, payload, headers): merge_header_headers = '' for h in [MERGE_HEADER, 'X-%s' % (MERGE_HEADER)]: tmp_h = headers.get(h, '') if tmp_h: merge_header_headers = tmp_h break # Select either the merge-type from the content # or the merge type from the headers or default to our own set # if neither exists (or is empty) from the later. payload_yaml = util.load_yaml(payload) mergers_yaml = mergers.dict_extract_mergers(payload_yaml) mergers_header = mergers.string_extract_mergers(merge_header_headers) all_mergers = [] all_mergers.extend(mergers_yaml) all_mergers.extend(mergers_header) if not all_mergers: all_mergers = DEF_MERGERS return (payload_yaml, all_mergers)
def test_simple_jsonp(self): blob = ''' #cloud-config-jsonp [ { "op": "add", "path": "/baz", "value": "qux" }, { "op": "add", "path": "/bar", "value": "qux2" } ] ''' ci = stages.Init() ci.datasource = FakeDataSource(blob) self.reRoot() ci.fetch() ci.consume_data() cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) cc = util.load_yaml(cc_contents) self.assertEqual(2, len(cc)) self.assertEqual('qux', cc['baz']) self.assertEqual('qux2', cc['bar'])