Beispiel #1
0
    def _restoreNormal(self, cs, normalRestoreList, preRestored):
        ptrRestores = []
        ptrRefsAdded = {}
        lastRestore = None  # restore each pathId,fileId combo once
        while normalRestoreList:
            (pathId, fileId, sha1, restoreContents) = normalRestoreList.pop(0)
            if preRestored is not None and sha1 in preRestored:
                continue
            if (pathId, fileId) == lastRestore:
                continue

            lastRestore = (pathId, fileId)

            try:
                (contType, fileContents) = cs.getFileContents(pathId,
                                                              fileId,
                                                              compressed=True)
            except KeyError:
                raise errors.IntegrityError(
                    "Missing file contents for pathId %s, fileId %s" %
                    (sha1helper.md5ToString(pathId),
                     sha1helper.sha1ToString(fileId)))
            if contType == changeset.ChangedFileTypes.ptr:
                ptrRestores.append(sha1)
                target = util.decompressString(fileContents.get().read())

                if util.tupleListBsearchInsert(
                        normalRestoreList,
                    (target[:16], target[16:], sha1, True), self.ptrCmp):
                    # Item was inserted. This creates a reference in the
                    # datastore; keep track of it to prevent a duplicate
                    # reference count.
                    ptrRefsAdded[sha1] = True

                continue

            assert (contType == changeset.ChangedFileTypes.file)
            self.addFileContents(sha1,
                                 fileContents,
                                 restoreContents,
                                 0,
                                 precompressed=True)

        for sha1 in ptrRestores:
            # Increment the reference count for items which were ptr's
            # to a different file.
            if sha1 in ptrRefsAdded:
                del ptrRefsAdded[sha1]
            else:
                self.addFileContents(sha1, None, False, 0)
Beispiel #2
0
    def testTupleListBsearchInsert(self):
        def fn(a, b):
            if a[1] == b[1]:
                return 0
            elif a[1] < b[1]:
                return -1
            return 1

        # this runs all of the inserts twice to make sure duplicates don't
        # get added
        l = []

        util.tupleListBsearchInsert(l, ('v', 5), fn)
        assert(l == [('v', 5)])
        util.tupleListBsearchInsert(l, ('v', 5), fn)
        assert(l == [('v', 5)])

        util.tupleListBsearchInsert(l, ('e', 22), fn)
        assert(l == [('v', 5), ('e', 22)])
        util.tupleListBsearchInsert(l, ('e', 22), fn)
        assert(l == [('v', 5), ('e', 22)])

        util.tupleListBsearchInsert(l, ('b', 25), fn)
        assert(l == [('v', 5), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('b', 25), fn)
        assert(l == [('v', 5), ('e', 22), ('b', 25)])

        util.tupleListBsearchInsert(l, ('y', 2), fn)
        assert(l == [('y', 2), ('v', 5), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('y', 2), fn)
        assert(l == [('y', 2), ('v', 5), ('e', 22), ('b', 25)])

        util.tupleListBsearchInsert(l, ('g', 20), fn)
        assert(l == [('y', 2), ('v', 5), ('g', 20), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('g', 20), fn)
        assert(l == [('y', 2), ('v', 5), ('g', 20), ('e', 22), ('b', 25)])

        util.tupleListBsearchInsert(l, ('t', 18), fn)
        assert(l == [('y', 2), ('v', 5), ('t', 18), ('g', 20), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('t', 18), fn)
        assert(l == [('y', 2), ('v', 5), ('t', 18), ('g', 20), ('e', 22), ('b', 25)])
Beispiel #3
0
    def testTupleListBsearchInsert(self):
        def fn(a, b):
            if a[1] == b[1]:
                return 0
            elif a[1] < b[1]:
                return -1
            return 1

        # this runs all of the inserts twice to make sure duplicates don't
        # get added
        l = []

        util.tupleListBsearchInsert(l, ('v', 5), fn)
        assert(l == [('v', 5)])
        util.tupleListBsearchInsert(l, ('v', 5), fn)
        assert(l == [('v', 5)])

        util.tupleListBsearchInsert(l, ('e', 22), fn)
        assert(l == [('v', 5), ('e', 22)])
        util.tupleListBsearchInsert(l, ('e', 22), fn)
        assert(l == [('v', 5), ('e', 22)])

        util.tupleListBsearchInsert(l, ('b', 25), fn)
        assert(l == [('v', 5), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('b', 25), fn)
        assert(l == [('v', 5), ('e', 22), ('b', 25)])

        util.tupleListBsearchInsert(l, ('y', 2), fn)
        assert(l == [('y', 2), ('v', 5), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('y', 2), fn)
        assert(l == [('y', 2), ('v', 5), ('e', 22), ('b', 25)])

        util.tupleListBsearchInsert(l, ('g', 20), fn)
        assert(l == [('y', 2), ('v', 5), ('g', 20), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('g', 20), fn)
        assert(l == [('y', 2), ('v', 5), ('g', 20), ('e', 22), ('b', 25)])

        util.tupleListBsearchInsert(l, ('t', 18), fn)
        assert(l == [('y', 2), ('v', 5), ('t', 18), ('g', 20), ('e', 22), ('b', 25)])
        util.tupleListBsearchInsert(l, ('t', 18), fn)
        assert(l == [('y', 2), ('v', 5), ('t', 18), ('g', 20), ('e', 22), ('b', 25)])
Beispiel #4
0
    def __init__(self, repos, cs, fileHostFilter = [], callback = None,
                 resetTimestamps = False, allowIncomplete = False,
                 hidden = False, mirror = False,
                 excludeCapsuleContents = False):

        self.repos = repos
        self.cs = cs
        self.invalidateRollbacksFlag = False

        newList = [ x for x in cs.iterNewTroveList() ]

        if resetTimestamps:
            # This depends intimiately on the versions cache. We don't
            # change the timestamps on each version, because the cache
            # ensures they are all a single underlying object. Slick,
            # but brittle?
            updated = {}

            for csTrove in newList:
                ver = csTrove.getNewVersion()
                if ver in updated:
                    pass
                else:
                    oldVer = ver.copy()
                    ver.trailingRevision().resetTimeStamp()
                    updated[oldVer] = ver

            del updated

        troveNo, configRestoreList, normalRestoreList = \
            self._createInstallTroveObjects(fileHostFilter = fileHostFilter,
                                            callback = callback,
                                            mirror = mirror, hidden = hidden,
                                            allowIncomplete = allowIncomplete,
                                            excludeCapsuleContents =
                                                excludeCapsuleContents)
        configRestoreList, normalRestoreList = \
            self._filterRestoreList(configRestoreList, normalRestoreList)

        # use a key to select data up to, but not including, the first
        # version.  We can't sort on version because we don't have timestamps
        configRestoreList.sort(key=lambda x: x[0:5])
        normalRestoreList.sort(key=lambda x: x[0:3])

        # config files are cached, so we don't have to worry about not
        # restoring the same fileId/pathId twice
        for (pathId, newFileId, sha1, oldfile, newFileId,
             oldVersion, oldFileId, restoreContents) in configRestoreList:
            if cs.configFileIsDiff(pathId, newFileId):
                (contType, fileContents) = cs.getFileContents(pathId, newFileId)

                # the content for this file is in the form of a
                # diff, which we need to apply against the file in
                # the repository
                assert(oldVersion)

                try:
                    f = self.repos.getFileContents(
                                    [(oldFileId, oldVersion, oldfile)])[0].get()
                except KeyError:
                    raise errors.IntegrityError(
                        "Missing file contents for pathId %s, fileId %s" % (
                                        sha1helper.md5ToString(pathId),
                                        sha1helper.sha1ToString(fileId)))

                oldLines = f.readlines()
                f.close()
                del f
                diff = fileContents.get().readlines()
                (newLines, failedHunks) = patch.patch(oldLines,
                                                      diff)
                fileContents = filecontents.FromString(
                                                "".join(newLines))

                assert(not failedHunks)
            else:
                # config files are not always available compressed (due
                # to the config file cache)
                fileContents = filecontents.FromChangeSet(cs, pathId, newFileId)

            self.addFileContents(sha1, fileContents, restoreContents, 1)

        ptrRestores = []
        ptrRefsAdded = {}
        lastRestore = None         # restore each pathId,fileId combo once
        while normalRestoreList:
            (pathId, fileId, sha1, restoreContents) = normalRestoreList.pop(0)
            if (pathId, fileId) == lastRestore:
                continue

            lastRestore = (pathId, fileId)

            try:
                (contType, fileContents) = cs.getFileContents(pathId, fileId,
                                                              compressed = True)
            except KeyError:
                raise errors.IntegrityError(
                        "Missing file contents for pathId %s, fileId %s" % (
                                        sha1helper.md5ToString(pathId),
                                        sha1helper.sha1ToString(fileId)))
            if contType == changeset.ChangedFileTypes.ptr:
                ptrRestores.append(sha1)
                target = util.decompressString(fileContents.get().read())

                if util.tupleListBsearchInsert(normalRestoreList,
                                (target[:16], target[16:], sha1, True),
                                self.ptrCmp):
                    # Item was inserted. This creates a reference in the
                    # datastore; keep track of it to prevent a duplicate
                    # reference count.
                    ptrRefsAdded[sha1] = True

                continue

            assert(contType == changeset.ChangedFileTypes.file)
            self.addFileContents(sha1, fileContents, restoreContents, 0,
                                 precompressed = True)

        for sha1 in ptrRestores:
            # Increment the reference count for items which were ptr's
            # to a different file.
            if sha1 in ptrRefsAdded:
                del ptrRefsAdded[sha1]
            else:
                self.addFileContents(sha1, None, False, 0)

        #del configRestoreList
        #del normalRestoreList

        for csTrove in newList:
            if csTrove.troveType() != trove.TROVE_TYPE_REMOVED:
                continue

            troveNo += 1

            if callback:
                callback.creatingDatabaseTransaction(troveNo, len(newList))

            self.markTroveRemoved(csTrove.getName(), csTrove.getNewVersion(),
                                  csTrove.getNewFlavor())

        for (troveName, version, flavor) in cs.getOldTroveList():
            trv = self.repos.getTrove(troveName, version, flavor)
            self.oldTrove(trv, None, troveName, version, flavor)
Beispiel #5
0
    def __init__(self,
                 repos,
                 cs,
                 fileHostFilter=[],
                 callback=None,
                 resetTimestamps=False,
                 allowIncomplete=False,
                 hidden=False,
                 mirror=False,
                 excludeCapsuleContents=False):

        self.repos = repos
        self.cs = cs
        self.invalidateRollbacksFlag = False

        newList = [x for x in cs.iterNewTroveList()]

        if resetTimestamps:
            # This depends intimiately on the versions cache. We don't
            # change the timestamps on each version, because the cache
            # ensures they are all a single underlying object. Slick,
            # but brittle?
            updated = {}

            for csTrove in newList:
                ver = csTrove.getNewVersion()
                if ver in updated:
                    pass
                else:
                    oldVer = ver.copy()
                    ver.trailingRevision().resetTimeStamp()
                    updated[oldVer] = ver

            del updated

        troveNo, configRestoreList, normalRestoreList = \
            self._createInstallTroveObjects(fileHostFilter = fileHostFilter,
                                            callback = callback,
                                            mirror = mirror, hidden = hidden,
                                            allowIncomplete = allowIncomplete,
                                            excludeCapsuleContents =
                                                excludeCapsuleContents)
        configRestoreList, normalRestoreList = \
            self._filterRestoreList(configRestoreList, normalRestoreList)

        # use a key to select data up to, but not including, the first
        # version.  We can't sort on version because we don't have timestamps
        configRestoreList.sort(key=lambda x: x[0:5])
        normalRestoreList.sort(key=lambda x: x[0:3])

        # config files are cached, so we don't have to worry about not
        # restoring the same fileId/pathId twice
        for (pathId, newFileId, sha1, oldfile, newFileId, oldVersion,
             oldFileId, restoreContents) in configRestoreList:
            if cs.configFileIsDiff(pathId, newFileId):
                (contType,
                 fileContents) = cs.getFileContents(pathId, newFileId)

                # the content for this file is in the form of a
                # diff, which we need to apply against the file in
                # the repository
                assert (oldVersion)

                try:
                    f = self.repos.getFileContents([(oldFileId, oldVersion,
                                                     oldfile)])[0].get()
                except KeyError:
                    raise errors.IntegrityError(
                        "Missing file contents for pathId %s, fileId %s" %
                        (sha1helper.md5ToString(pathId),
                         sha1helper.sha1ToString(fileId)))

                oldLines = f.readlines()
                f.close()
                del f
                diff = fileContents.get().readlines()
                (newLines, failedHunks) = patch.patch(oldLines, diff)
                fileContents = filecontents.FromString("".join(newLines))

                assert (not failedHunks)
            else:
                # config files are not always available compressed (due
                # to the config file cache)
                fileContents = filecontents.FromChangeSet(
                    cs, pathId, newFileId)

            self.addFileContents(sha1, fileContents, restoreContents, 1)

        ptrRestores = []
        ptrRefsAdded = {}
        lastRestore = None  # restore each pathId,fileId combo once
        while normalRestoreList:
            (pathId, fileId, sha1, restoreContents) = normalRestoreList.pop(0)
            if (pathId, fileId) == lastRestore:
                continue

            lastRestore = (pathId, fileId)

            try:
                (contType, fileContents) = cs.getFileContents(pathId,
                                                              fileId,
                                                              compressed=True)
            except KeyError:
                raise errors.IntegrityError(
                    "Missing file contents for pathId %s, fileId %s" %
                    (sha1helper.md5ToString(pathId),
                     sha1helper.sha1ToString(fileId)))
            if contType == changeset.ChangedFileTypes.ptr:
                ptrRestores.append(sha1)
                target = util.decompressString(fileContents.get().read())

                if util.tupleListBsearchInsert(
                        normalRestoreList,
                    (target[:16], target[16:], sha1, True), self.ptrCmp):
                    # Item was inserted. This creates a reference in the
                    # datastore; keep track of it to prevent a duplicate
                    # reference count.
                    ptrRefsAdded[sha1] = True

                continue

            assert (contType == changeset.ChangedFileTypes.file)
            self.addFileContents(sha1,
                                 fileContents,
                                 restoreContents,
                                 0,
                                 precompressed=True)

        for sha1 in ptrRestores:
            # Increment the reference count for items which were ptr's
            # to a different file.
            if sha1 in ptrRefsAdded:
                del ptrRefsAdded[sha1]
            else:
                self.addFileContents(sha1, None, False, 0)

        #del configRestoreList
        #del normalRestoreList

        for csTrove in newList:
            if csTrove.troveType() != trove.TROVE_TYPE_REMOVED:
                continue

            troveNo += 1

            if callback:
                callback.creatingDatabaseTransaction(troveNo, len(newList))

            self.markTroveRemoved(csTrove.getName(), csTrove.getNewVersion(),
                                  csTrove.getNewFlavor())

        for (troveName, version, flavor) in cs.getOldTroveList():
            trv = self.repos.getTrove(troveName, version, flavor)
            self.oldTrove(trv, None, troveName, version, flavor)