Пример #1
0
 def test_DryRunMirrorWriter_foocloud_no_filters(self):
     src = get_mirror_reader("foocloud")
     config = {}
     objectstore = MemoryObjectStore(None)
     target = DryRunMirrorWriter(config, objectstore)
     target.sync(src, "streams/v1/index.json")
     self.assertEqual(1277, target.size)
Пример #2
0
    def test_ObjectFilterMirror_does_item_filter(self):
        src = get_mirror_reader("foocloud")
        filter_list = get_filters(['ftype!=disk1.img'])
        config = {'filters': filter_list}
        objectstore = MemoryObjectStore(None)
        target = ObjectFilterMirror(config, objectstore)
        target.sync(src, "streams/v1/index.json")

        unexpected = [f for f in objectstore.data if 'disk' in f]
        assert len(unexpected) == 0
        assert len(objectstore.data) != 0
    def get_clean_src(self, exname, path):
        good_src = get_mirror_reader(exname)
        objectstore = MemoryObjectStore(None)
        target = ObjectStoreMirrorWriter(config={}, objectstore=objectstore)
        target.sync(good_src, path)

        # clean the .data out of the mirror so it doesn't get read
        keys = list(objectstore.data.keys())
        for k in keys:
            if k.startswith(".data"):
                del objectstore.data[k]

        return ObjectStoreMirrorReader(
            objectstore=objectstore, policy=lambda content, path: content)
Пример #4
0
    def test_stream_load_empty(self):

        src = get_mirror_reader("foocloud")
        target = chm.CommandHookMirror({'load_products': ['true']})
        oruncmd = chm.run_command

        try:
            chm.run_command = self._run_command
            target.sync(src, "streams/v1/index.json")

        finally:
            chm.run_command = oruncmd

        # the 'load_products' should be called once for each content
        # in the stream.
        self.assertEqual(self._run_commands, [['true'], ['true']])
Пример #5
0
    def test_corrupted_mirror_resume(self):
        # test corrupted .part file is caught
        smirror = get_mirror_reader("foocloud")

        # create a corrupt .part file
        tfile = os.path.join(self.target, FOOCLOUD_FILE)
        os.makedirs(os.path.dirname(tfile))
        with open(tfile + ".part", "w") as fw:
            # just write some invalid data
            fw.write("--bogus--")

        target_objstore = objectstores.FileStore(self.target)
        tmirror = mirrors.ObjectStoreMirrorWriter(config=None,
                                                  objectstore=target_objstore)
        self.assertRaisesRegexp(Exception, r".*%s.*" % FOOCLOUD_FILE,
                                tmirror.sync, smirror, "streams/v1/index.json")

        # now the .part file should be removed, and trying again should succeed
        self.assertFalse(os.path.exists(tfile + ".part"))
        tmirror.sync(smirror, "streams/v1/index.json")
        self.assertFalse(os.path.exists(tfile + ".part"))
Пример #6
0
    def test_mirror_resume(self):
        # test mirror resuming from filestore
        smirror = get_mirror_reader("foocloud")

        # as long as this is less than size of file, its valid
        part_size = 10

        # create a valid .part file
        tfile = os.path.join(self.target, FOOCLOUD_FILE)
        os.makedirs(os.path.dirname(tfile))
        with open(tfile + ".part", "wb") as fw:
            with smirror.source(FOOCLOUD_FILE) as fr:
                fw.write(fr.read(part_size))

        target_objstore = objectstores.FileStore(self.target)
        tmirror = mirrors.ObjectStoreMirrorWriter(config=None,
                                                  objectstore=target_objstore)
        tmirror.sync(smirror, "streams/v1/index.json")

        # the part file should have been cleaned up.  If this fails, then
        # likely the part file wasn't used, and this test is no longer valid
        self.assertFalse(os.path.exists(tfile + ".part"))
Пример #7
0
    def test_stream_insert_product(self):

        src = get_mirror_reader("foocloud")
        target = chm.CommandHookMirror({
            'load_products': ['load-products'],
            'insert_products': ['insert-products']
        })
        oruncmd = chm.run_command

        try:
            chm.run_command = self._run_command
            target.sync(src, "streams/v1/index.json")

        finally:
            chm.run_command = oruncmd

        # the 'load_products' should be called once for each content
        # in the stream. same for 'insert-products'
        self.assertEqual(
            len([f for f in self._run_commands if f == ['load-products']]), 2)
        self.assertEqual(
            len([f for f in self._run_commands if f == ['insert-products']]),
            2)
    def test_foocloud_multiple_paths_remove(self):
        config = {'delete_filtered_items': True}
        memory = ObjectStoreMirrorWriter(config, MemoryObjectStore(None))
        foocloud = get_mirror_reader("foocloud")
        memory.sync(foocloud, "streams/v1/index.json")

        # We sync'd, now we'll sync everything that doesn't have the samepaths
        # tag. samepaths reuses some paths, and so if we try and delete
        # anything here that would be wrong.
        filters = [ItemFilter("version_name!=samepaths")]

        def no_samepaths(data, src, _target, pedigree):
            return filter_item(filters, data, src, pedigree)

        def dont_remove(*_args):
            # This shouldn't be called, because we are smart and do "reference
            # counting".
            assert False

        memory.filter_version = no_samepaths
        memory.store.remove = dont_remove

        memory.sync(foocloud, "streams/v1/index.json")
Пример #9
0
def test_read_signed():
    reader = get_mirror_reader("foocloud")
    reader.read_json("streams/v1/index.sjson")