Esempio n. 1
0
 def test_simple(self):
     self.patchUtils(self.tmp)
     expected = "hello world\n"
     filename = "/tmp/my.file"
     write_files(
         "test_simple", [{"content": expected, "path": filename}])
     self.assertEqual(util.load_file(filename), expected)
Esempio n. 2
0
 def test_append(self):
     self.patchUtils(self.tmp)
     existing = "hello "
     added = "world\n"
     expected = existing + added
     filename = "/tmp/append.file"
     util.write_file(filename, existing)
     write_files(
         "test_append",
         [{"content": added, "path": filename, "append": "true"}])
     self.assertEqual(util.load_file(filename), expected)
def handle(name, cfg, _cloud, log, _args):
    validate_cloudconfig_schema(cfg, schema)
    file_list = cfg.get('write_files', [])
    filtered_files = [
        f for f in file_list if util.get_cfg_option_bool(f,
                                                         'defer',
                                                         DEFAULT_DEFER)
    ]
    if not filtered_files:
        log.debug(("Skipping module named %s,"
                   " no deferred file defined in configuration"), name)
        return
    write_files(name, filtered_files)
Esempio n. 4
0
def handle(name, cfg, _cloud, log, _args):
    file_list = cfg.get("write_files", [])
    filtered_files = [
        f
        for f in file_list
        if util.get_cfg_option_bool(f, "defer", DEFAULT_DEFER)
    ]
    if not filtered_files:
        log.debug(
            "Skipping module named %s,"
            " no deferred file defined in configuration",
            name,
        )
        return
    write_files(name, filtered_files)
Esempio n. 5
0
    def test_all_decodings(self):
        self.patchUtils(self.tmp)

        # build a 'files' array that has a dictionary of encodings
        # for 'gz', 'gzip', 'gz+base64' ...
        data = b"foobzr"
        utf8_valid = b"foobzr"
        utf8_invalid = b"ab\xaadef"
        files = []
        expected = []

        gz_aliases = ("gz", "gzip")
        gz_b64_aliases = ("gz+base64", "gzip+base64", "gz+b64", "gzip+b64")
        b64_aliases = ("base64", "b64")

        datum = (("utf8", utf8_valid), ("no-utf8", utf8_invalid))
        for name, data in datum:
            gz = (_gzip_bytes(data), gz_aliases)
            gz_b64 = (base64.b64encode(_gzip_bytes(data)), gz_b64_aliases)
            b64 = (base64.b64encode(data), b64_aliases)
            for content, aliases in (gz, gz_b64, b64):
                for enc in aliases:
                    cur = {
                        "content": content,
                        "path": "/tmp/file-%s-%s" % (name, enc),
                        "encoding": enc,
                    }
                    files.append(cur)
                    expected.append((cur["path"], data))

        write_files("test_decoding", files)

        for path, content in expected:
            self.assertEqual(util.load_file(path, decode=False), content)

        # make sure we actually wrote *some* files.
        flen_expected = len(gz_aliases + gz_b64_aliases +
                            b64_aliases) * len(datum)
        self.assertEqual(len(expected), flen_expected)
Esempio n. 6
0
    def test_all_decodings(self):
        self.patchUtils(self.tmp)

        # build a 'files' array that has a dictionary of encodings
        # for 'gz', 'gzip', 'gz+base64' ...
        data = b"foobzr"
        utf8_valid = b"foobzr"
        utf8_invalid = b'ab\xaadef'
        files = []
        expected = []

        gz_aliases = ('gz', 'gzip')
        gz_b64_aliases = ('gz+base64', 'gzip+base64', 'gz+b64', 'gzip+b64')
        b64_aliases = ('base64', 'b64')

        datum = (("utf8", utf8_valid), ("no-utf8", utf8_invalid))
        for name, data in datum:
            gz = (_gzip_bytes(data), gz_aliases)
            gz_b64 = (base64.b64encode(_gzip_bytes(data)), gz_b64_aliases)
            b64 = (base64.b64encode(data), b64_aliases)
            for content, aliases in (gz, gz_b64, b64):
                for enc in aliases:
                    cur = {
                        'content': content,
                        'path': '/tmp/file-%s-%s' % (name, enc),
                        'encoding': enc
                    }
                    files.append(cur)
                    expected.append((cur['path'], data))

        write_files("test_decoding", files, LOG)

        for path, content in expected:
            self.assertEqual(util.load_file(path, decode=False), content)

        # make sure we actually wrote *some* files.
        flen_expected = (len(gz_aliases + gz_b64_aliases + b64_aliases) *
                         len(datum))
        self.assertEqual(len(expected), flen_expected)
    def test_all_decodings(self):
        self.patchUtils(self.tmp)

        # build a 'files' array that has a dictionary of encodings
        # for 'gz', 'gzip', 'gz+base64' ...
        data = b"foobzr"
        utf8_valid = b"foobzr"
        utf8_invalid = b'ab\xaadef'
        files = []
        expected = []

        gz_aliases = ('gz', 'gzip')
        gz_b64_aliases = ('gz+base64', 'gzip+base64', 'gz+b64', 'gzip+b64')
        b64_aliases = ('base64', 'b64')

        datum = (("utf8", utf8_valid), ("no-utf8", utf8_invalid))
        for name, data in datum:
            gz = (_gzip_bytes(data), gz_aliases)
            gz_b64 = (base64.b64encode(_gzip_bytes(data)), gz_b64_aliases)
            b64 = (base64.b64encode(data), b64_aliases)
            for content, aliases in (gz, gz_b64, b64):
                for enc in aliases:
                    cur = {'content': content,
                           'path': '/tmp/file-%s-%s' % (name, enc),
                           'encoding': enc}
                    files.append(cur)
                    expected.append((cur['path'], data))

        write_files("test_decoding", files)

        for path, content in expected:
            self.assertEqual(util.load_file(path, decode=False), content)

        # make sure we actually wrote *some* files.
        flen_expected = (
            len(gz_aliases + gz_b64_aliases + b64_aliases) * len(datum))
        self.assertEqual(len(expected), flen_expected)
Esempio n. 8
0
def write_metadata(name, files, cloud, log):
    if not files:
        return

    new_files = list()

    for i, f_info in enumerate(files):
        path = f_info.get('path')
        if not path:
            log.warn("No path provided to write for entry %s in module %s",
                     i + 1, name)
            continue
        data = f_info.get('data')
        if not data:
            log.warn("No data provided to write for entry %s in module %s",
                     i + 1, name)
            continue

        f_info['content'] = retrieve_metadata(path, data, cloud, log)
        if f_info['content'] is None:
            # if there is no content, don't write anything to the file
            # (this is not the same as empty content)
            continue

        # Use the default permissions, to suppress a warning from write-files
        f_info.setdefault('permissions', cc_write_files.DEFAULT_PERMS)

        # ensure that we don't get unexpected behavior in a future
        # version of the write_files module
        for key in EXPOSED_DICTS + OPTIONAL_ARGUMENTS:
            if key in f_info:
                del f_info[key]

        new_files.append(f_info)

    cc_write_files.write_files(name, files)
Esempio n. 9
0
 def test_yaml_binary(self):
     self.patchUtils(self.tmp)
     data = util.load_yaml(YAML_TEXT)
     write_files("testname", data["write_files"])
     for path, content in YAML_CONTENT_EXPECTED.items():
         self.assertEqual(util.load_file(path), content)
 def test_yaml_binary(self):
     self.patchUtils(self.tmp)
     data = util.load_yaml(YAML_TEXT)
     write_files("testname", data['write_files'])
     for path, content in YAML_CONTENT_EXPECTED.items():
         self.assertEqual(util.load_file(path), content)