Example #1
0
    def runTest(self, fileName, sedCmd, changeCmd = None):
        new = os.popen("sed '%s' < %s" % (sedCmd, fileName)).readlines()

        orig = open(fileName).readlines()

        if changeCmd:
            target = os.popen("sed '%s' < %s" % (changeCmd, fileName)).readlines()
            final = os.popen("sed '%s' < %s | sed '%s'" % (sedCmd, fileName, 
                             changeCmd)).readlines()
            check = os.popen("sed '%s' < %s | sed '%s'" % (changeCmd, fileName, 
                             sedCmd)).readlines()
            if "".join(final) != "".join(check):
                self.fail("sed scripts have conflicting results")
        else:
            target = orig
            final = new

        diff = difflib.unified_diff(orig, new)
        # advance past header
        diff.next()
        diff.next()
        d = [ x for x in diff ]

        (new2, conflicts) = patch(target, d)

        diff = difflib.unified_diff(final, new2)
        try:
            diff.next()
            print "%s '%s' failed:" % (fileName, sedCmd)
            diff.next()
            for line in diff:
                line = line[:-1]
                print "\t%s" % line
        except StopIteration:
            pass
Example #2
0
    def testEraseAlreadyApplied(self):
        first = [ "%s\n" % x for x in xrange(10) ]
        second = first[:]
        second.remove("4\n")

        diff = list(difflib.unified_diff(first, second))
        (results, conflicts) = patch(second, diff[2:])
        assert(results == second)
        assert(not conflicts)
Example #3
0
    def testEraseAlreadyApplied(self):
        first = ["%s\n" % x for x in xrange(10)]
        second = first[:]
        second.remove("4\n")

        diff = list(difflib.unified_diff(first, second))
        (results, conflicts) = patch(second, diff[2:])
        assert (results == second)
        assert (not conflicts)
Example #4
0
    def testMergeAtEnd(self):
        first = [ "%s\n" % x for x in xrange(10) ]
        second = first[:]
        second.remove("7\n")
        diff = list(difflib.unified_diff(first, second))

        third = first[:]
        third.remove("9\n")
        result = patch(third, diff[2:])
        assert(not result[1])
        assert(result[0] == [ '0\n', '1\n', '2\n', '3\n', '4\n', '5\n',
                              '6\n', '8\n'])
Example #5
0
    def test9(self):
        old = []
        new = [ "some lines", "of text", "to be patched" ]

        diff = difflib.unified_diff(old, new, lineterm = "")
        # advance past header
        diff.next()
        diff.next()

        (new2, conflicts) = patch(old, diff)
        assert(not conflicts)
        diff = difflib.unified_diff(new, new2)
        self.assertRaises(StopIteration, diff.next)
Example #6
0
    def test9(self):
        old = []
        new = ["some lines", "of text", "to be patched"]

        diff = difflib.unified_diff(old, new, lineterm="")
        # advance past header
        diff.next()
        diff.next()

        (new2, conflicts) = patch(old, diff)
        assert (not conflicts)
        diff = difflib.unified_diff(new, new2)
        self.assertRaises(StopIteration, diff.next)
Example #7
0
    def testMergeAtEnd(self):
        first = ["%s\n" % x for x in xrange(10)]
        second = first[:]
        second.remove("7\n")
        diff = list(difflib.unified_diff(first, second))

        third = first[:]
        third.remove("9\n")
        result = patch(third, diff[2:])
        assert (not result[1])
        assert (result[0] == [
            '0\n', '1\n', '2\n', '3\n', '4\n', '5\n', '6\n', '8\n'
        ])
Example #8
0
    def testNoTrailingNewline(self):
        first = ["line\n"]
        second = ["line\n", "another"]
        diff = list(unifiedDiff(first, second))
        assert (diff[-1] == '\ No newline at end of file\n')
        assert (diff[-2][-1] == '\n')

        result = patch(first, diff[2:])
        assert (not result[1])
        assert (result[0] == second)

        first = ["first"]
        second = ["second"]
        diff = list(unifiedDiff(first, second))
        result = patch(first, diff[2:])
        assert (not result[1])
        assert (result[0] == second)

        first = ["first"]
        second = ["second\n"]
        diff = list(unifiedDiff(first, second))
        result = patch(first, diff[2:])
        assert (not result[1])
        assert (result[0] == second)
Example #9
0
    def testNoTrailingNewline(self):
        first = [ "line\n" ]
        second = [ "line\n", "another" ]
        diff = list(unifiedDiff(first, second))
        assert(diff[-1] == '\ No newline at end of file\n')
        assert(diff[-2][-1] == '\n')

        result = patch(first, diff[2:])
        assert(not result[1])
        assert(result[0] == second)

        first = [ "first" ]
        second = [ "second" ]
        diff = list(unifiedDiff(first, second))
        result = patch(first, diff[2:])
        assert(not result[1])
        assert(result[0] == second)

        first = [ "first" ]
        second = [ "second\n" ]
        diff = list(unifiedDiff(first, second))
        result = patch(first, diff[2:])
        assert(not result[1])
        assert(result[0] == second)
Example #10
0
    def test10(self):
        """test reversing a diff and applying it to the new file, check
        too make sure you get the old file"""
        old = [ "a", "b", "c", "same" ]
        new = [ "1", "2", "3", "same" ]

        diff = difflib.unified_diff(old, new, lineterm = "")
        # advance past header
        diff.next()
        diff.next()
        diff = reverse(diff)
        
        (old2, conflicts) = patch(new, diff)
        if old != old2:
            raise AssertionError
Example #11
0
    def test10(self):
        """test reversing a diff and applying it to the new file, check
        too make sure you get the old file"""
        old = ["a", "b", "c", "same"]
        new = ["1", "2", "3", "same"]

        diff = difflib.unified_diff(old, new, lineterm="")
        # advance past header
        diff.next()
        diff.next()
        diff = reverse(diff)

        (old2, conflicts) = patch(new, diff)
        if old != old2:
            raise AssertionError
Example #12
0
    def testConverge(self):
        """
        tests applying a patch to a file when applying a patch to a
        file that already has the change.  This is used in cvc merge
        """
        # orig file is 10 lines of "1"
        base = ['1'] * 10
        # the file changes the middle line to "2" on the branch
        tip = base[:]
        tip[5] = '2'
        # the file on my local brach already has the change
        trunk = tip[:]

        diff = difflib.unified_diff(base, tip, lineterm="")
        # advance past header
        diff.next()
        diff.next()

        # this should be like "patch appears to already be applied"
        (results, conflicts) = patch(trunk, diff)
        assert (results == tip)
        assert (not conflicts)
Example #13
0
    def test11(self):
        """
        test that a patch that appends to a file which has shrunk
        works
        """
        # orig file is 10 lines of "1"
        old = ['1'] * 10
        # local modification removes two of the lines of "1"
        oldchanged = ['1'] * 8
        # new file adds two lines
        new = old + ['2', '3']
        # we expect for the result to be the local change plus two lines
        newmerged = oldchanged + ['2', '3']

        diff = difflib.unified_diff(old, new, lineterm="")
        # advance past header
        diff.next()
        diff.next()

        (results, conflicts) = patch(oldchanged, diff)
        assert (results == newmerged)
        assert (not conflicts)
Example #14
0
    def test11(self):
        """
        test that a patch that appends to a file which has shrunk
        works
        """
        # orig file is 10 lines of "1"
        old = [ '1' ] * 10
        # local modification removes two of the lines of "1"
        oldchanged = [ '1' ] * 8
        # new file adds two lines
        new = old + [ '2', '3' ]
        # we expect for the result to be the local change plus two lines
        newmerged = oldchanged + [ '2', '3' ]

        diff = difflib.unified_diff(old, new, lineterm = "")
        # advance past header
        diff.next()
        diff.next()

        (results, conflicts) = patch(oldchanged, diff)
        assert(results == newmerged)
        assert(not conflicts)
Example #15
0
    def testConverge(self):
        """
        tests applying a patch to a file when applying a patch to a
        file that already has the change.  This is used in cvc merge
        """
        # orig file is 10 lines of "1"
        base = [ '1' ] * 10
        # the file changes the middle line to "2" on the branch
        tip = base[:]
        tip[5] = '2'
        # the file on my local brach already has the change
        trunk = tip[:]

        diff = difflib.unified_diff(base, tip, lineterm = "")
        # advance past header
        diff.next()
        diff.next()

        # this should be like "patch appears to already be applied"
        (results, conflicts) = patch(trunk, diff)
        assert(results == tip)
        assert(not conflicts)
Example #16
0
    def _restoreConfig(self, cs, configRestoreList):
        # config files are cached, so we don't have to worry about not
        # restoring the same fileId/pathId twice
        for (pathId, newFileId, sha1, oldfile, newFileId, oldVersion,
             oldFileId, restoreContents) in configRestoreList:
            if cs.configFileIsDiff(pathId, newFileId):
                (contType,
                 fileContents) = cs.getFileContents(pathId, newFileId)

                # the content for this file is in the form of a
                # diff, which we need to apply against the file in
                # the repository
                assert (oldVersion)

                try:
                    f = self.repos.getFileContents([(oldFileId, oldVersion,
                                                     oldfile)])[0].get()
                except KeyError:
                    raise errors.IntegrityError(
                        "Missing file contents for pathId %s, fileId %s" %
                        (sha1helper.md5ToString(pathId),
                         sha1helper.sha1ToString(oldFileId)))

                oldLines = f.readlines()
                f.close()
                del f
                diff = fileContents.get().readlines()
                (newLines, failedHunks) = patch.patch(oldLines, diff)
                fileContents = filecontents.FromString("".join(newLines))

                assert (not failedHunks)
            else:
                # config files are not always available compressed (due
                # to the config file cache)
                fileContents = filecontents.FromChangeSet(
                    cs, pathId, newFileId)

            self.addFileContents(sha1, fileContents, restoreContents, 1)
Example #17
0
    def testEraseConflict(self):
        base = """\
            useradd
                ${UIDARG}
                ${HOMEDIR:+-d "${HOMEDIR}"}
                $USER >/dev/null 2>&1 || :
            ;;
            1
            2
            3
""".split('\n')
        tip = """\
            useradd
                ${UIDARG}
                -M -d "${HOMEDIR}"
                $USER >/dev/null 2>&1
            ;;
            1
            2
            3
""".split('\n')
        local = """\
            useradd
                ${UIDARG} \
                ${HOMEDIR:+-d "${HOMEDIR}"}
                ${PASSWORD:+-s "${PASSWORD}"}
                $USER >/dev/null 2>&1 || :
            ;;
            1
            2
            3
""".split('\n')

        diff = list(difflib.unified_diff(base, tip))
        (results, conflicts) = patch(local, diff[2:])
        assert (results == local)
        assert (conflicts)
Example #18
0
    def testEraseConflict(self):
        base = """\
            useradd
                ${UIDARG}
                ${HOMEDIR:+-d "${HOMEDIR}"}
                $USER >/dev/null 2>&1 || :
            ;;
            1
            2
            3
""".split('\n')
        tip = """\
            useradd
                ${UIDARG}
                -M -d "${HOMEDIR}"
                $USER >/dev/null 2>&1
            ;;
            1
            2
            3
""".split('\n')
        local = """\
            useradd
                ${UIDARG} \
                ${HOMEDIR:+-d "${HOMEDIR}"}
                ${PASSWORD:+-s "${PASSWORD}"}
                $USER >/dev/null 2>&1 || :
            ;;
            1
            2
            3
""".split('\n')

        diff = list(difflib.unified_diff(base, tip))
        (results, conflicts) = patch(local, diff[2:])
        assert(results == local)
        assert(conflicts)
Example #19
0
    def runTest(self, fileName, sedCmd, changeCmd=None):
        new = os.popen("sed '%s' < %s" % (sedCmd, fileName)).readlines()

        orig = open(fileName).readlines()

        if changeCmd:
            target = os.popen("sed '%s' < %s" %
                              (changeCmd, fileName)).readlines()
            final = os.popen("sed '%s' < %s | sed '%s'" %
                             (sedCmd, fileName, changeCmd)).readlines()
            check = os.popen("sed '%s' < %s | sed '%s'" %
                             (changeCmd, fileName, sedCmd)).readlines()
            if "".join(final) != "".join(check):
                self.fail("sed scripts have conflicting results")
        else:
            target = orig
            final = new

        diff = difflib.unified_diff(orig, new)
        # advance past header
        diff.next()
        diff.next()
        d = [x for x in diff]

        (new2, conflicts) = patch(target, d)

        diff = difflib.unified_diff(final, new2)
        try:
            diff.next()
            print "%s '%s' failed:" % (fileName, sedCmd)
            diff.next()
            for line in diff:
                line = line[:-1]
                print "\t%s" % line
        except StopIteration:
            pass
Example #20
0
    def __init__(self,
                 repos,
                 cs,
                 fileHostFilter=[],
                 callback=None,
                 resetTimestamps=False,
                 allowIncomplete=False,
                 hidden=False,
                 mirror=False,
                 excludeCapsuleContents=False):

        self.repos = repos
        self.cs = cs
        self.invalidateRollbacksFlag = False

        newList = [x for x in cs.iterNewTroveList()]

        if resetTimestamps:
            # This depends intimiately on the versions cache. We don't
            # change the timestamps on each version, because the cache
            # ensures they are all a single underlying object. Slick,
            # but brittle?
            updated = {}

            for csTrove in newList:
                ver = csTrove.getNewVersion()
                if ver in updated:
                    pass
                else:
                    oldVer = ver.copy()
                    ver.trailingRevision().resetTimeStamp()
                    updated[oldVer] = ver

            del updated

        troveNo, configRestoreList, normalRestoreList = \
            self._createInstallTroveObjects(fileHostFilter = fileHostFilter,
                                            callback = callback,
                                            mirror = mirror, hidden = hidden,
                                            allowIncomplete = allowIncomplete,
                                            excludeCapsuleContents =
                                                excludeCapsuleContents)
        configRestoreList, normalRestoreList = \
            self._filterRestoreList(configRestoreList, normalRestoreList)

        # use a key to select data up to, but not including, the first
        # version.  We can't sort on version because we don't have timestamps
        configRestoreList.sort(key=lambda x: x[0:5])
        normalRestoreList.sort(key=lambda x: x[0:3])

        # config files are cached, so we don't have to worry about not
        # restoring the same fileId/pathId twice
        for (pathId, newFileId, sha1, oldfile, newFileId, oldVersion,
             oldFileId, restoreContents) in configRestoreList:
            if cs.configFileIsDiff(pathId, newFileId):
                (contType,
                 fileContents) = cs.getFileContents(pathId, newFileId)

                # the content for this file is in the form of a
                # diff, which we need to apply against the file in
                # the repository
                assert (oldVersion)

                try:
                    f = self.repos.getFileContents([(oldFileId, oldVersion,
                                                     oldfile)])[0].get()
                except KeyError:
                    raise errors.IntegrityError(
                        "Missing file contents for pathId %s, fileId %s" %
                        (sha1helper.md5ToString(pathId),
                         sha1helper.sha1ToString(fileId)))

                oldLines = f.readlines()
                f.close()
                del f
                diff = fileContents.get().readlines()
                (newLines, failedHunks) = patch.patch(oldLines, diff)
                fileContents = filecontents.FromString("".join(newLines))

                assert (not failedHunks)
            else:
                # config files are not always available compressed (due
                # to the config file cache)
                fileContents = filecontents.FromChangeSet(
                    cs, pathId, newFileId)

            self.addFileContents(sha1, fileContents, restoreContents, 1)

        ptrRestores = []
        ptrRefsAdded = {}
        lastRestore = None  # restore each pathId,fileId combo once
        while normalRestoreList:
            (pathId, fileId, sha1, restoreContents) = normalRestoreList.pop(0)
            if (pathId, fileId) == lastRestore:
                continue

            lastRestore = (pathId, fileId)

            try:
                (contType, fileContents) = cs.getFileContents(pathId,
                                                              fileId,
                                                              compressed=True)
            except KeyError:
                raise errors.IntegrityError(
                    "Missing file contents for pathId %s, fileId %s" %
                    (sha1helper.md5ToString(pathId),
                     sha1helper.sha1ToString(fileId)))
            if contType == changeset.ChangedFileTypes.ptr:
                ptrRestores.append(sha1)
                target = util.decompressString(fileContents.get().read())

                if util.tupleListBsearchInsert(
                        normalRestoreList,
                    (target[:16], target[16:], sha1, True), self.ptrCmp):
                    # Item was inserted. This creates a reference in the
                    # datastore; keep track of it to prevent a duplicate
                    # reference count.
                    ptrRefsAdded[sha1] = True

                continue

            assert (contType == changeset.ChangedFileTypes.file)
            self.addFileContents(sha1,
                                 fileContents,
                                 restoreContents,
                                 0,
                                 precompressed=True)

        for sha1 in ptrRestores:
            # Increment the reference count for items which were ptr's
            # to a different file.
            if sha1 in ptrRefsAdded:
                del ptrRefsAdded[sha1]
            else:
                self.addFileContents(sha1, None, False, 0)

        #del configRestoreList
        #del normalRestoreList

        for csTrove in newList:
            if csTrove.troveType() != trove.TROVE_TYPE_REMOVED:
                continue

            troveNo += 1

            if callback:
                callback.creatingDatabaseTransaction(troveNo, len(newList))

            self.markTroveRemoved(csTrove.getName(), csTrove.getNewVersion(),
                                  csTrove.getNewFlavor())

        for (troveName, version, flavor) in cs.getOldTroveList():
            trv = self.repos.getTrove(troveName, version, flavor)
            self.oldTrove(trv, None, troveName, version, flavor)
Example #21
0
    def __init__(self, repos, cs, fileHostFilter = [], callback = None,
                 resetTimestamps = False, allowIncomplete = False,
                 hidden = False, mirror = False,
                 excludeCapsuleContents = False):

        self.repos = repos
        self.cs = cs
        self.invalidateRollbacksFlag = False

        newList = [ x for x in cs.iterNewTroveList() ]

        if resetTimestamps:
            # This depends intimiately on the versions cache. We don't
            # change the timestamps on each version, because the cache
            # ensures they are all a single underlying object. Slick,
            # but brittle?
            updated = {}

            for csTrove in newList:
                ver = csTrove.getNewVersion()
                if ver in updated:
                    pass
                else:
                    oldVer = ver.copy()
                    ver.trailingRevision().resetTimeStamp()
                    updated[oldVer] = ver

            del updated

        troveNo, configRestoreList, normalRestoreList = \
            self._createInstallTroveObjects(fileHostFilter = fileHostFilter,
                                            callback = callback,
                                            mirror = mirror, hidden = hidden,
                                            allowIncomplete = allowIncomplete,
                                            excludeCapsuleContents =
                                                excludeCapsuleContents)
        configRestoreList, normalRestoreList = \
            self._filterRestoreList(configRestoreList, normalRestoreList)

        # use a key to select data up to, but not including, the first
        # version.  We can't sort on version because we don't have timestamps
        configRestoreList.sort(key=lambda x: x[0:5])
        normalRestoreList.sort(key=lambda x: x[0:3])

        # config files are cached, so we don't have to worry about not
        # restoring the same fileId/pathId twice
        for (pathId, newFileId, sha1, oldfile, newFileId,
             oldVersion, oldFileId, restoreContents) in configRestoreList:
            if cs.configFileIsDiff(pathId, newFileId):
                (contType, fileContents) = cs.getFileContents(pathId, newFileId)

                # the content for this file is in the form of a
                # diff, which we need to apply against the file in
                # the repository
                assert(oldVersion)

                try:
                    f = self.repos.getFileContents(
                                    [(oldFileId, oldVersion, oldfile)])[0].get()
                except KeyError:
                    raise errors.IntegrityError(
                        "Missing file contents for pathId %s, fileId %s" % (
                                        sha1helper.md5ToString(pathId),
                                        sha1helper.sha1ToString(fileId)))

                oldLines = f.readlines()
                f.close()
                del f
                diff = fileContents.get().readlines()
                (newLines, failedHunks) = patch.patch(oldLines,
                                                      diff)
                fileContents = filecontents.FromString(
                                                "".join(newLines))

                assert(not failedHunks)
            else:
                # config files are not always available compressed (due
                # to the config file cache)
                fileContents = filecontents.FromChangeSet(cs, pathId, newFileId)

            self.addFileContents(sha1, fileContents, restoreContents, 1)

        ptrRestores = []
        ptrRefsAdded = {}
        lastRestore = None         # restore each pathId,fileId combo once
        while normalRestoreList:
            (pathId, fileId, sha1, restoreContents) = normalRestoreList.pop(0)
            if (pathId, fileId) == lastRestore:
                continue

            lastRestore = (pathId, fileId)

            try:
                (contType, fileContents) = cs.getFileContents(pathId, fileId,
                                                              compressed = True)
            except KeyError:
                raise errors.IntegrityError(
                        "Missing file contents for pathId %s, fileId %s" % (
                                        sha1helper.md5ToString(pathId),
                                        sha1helper.sha1ToString(fileId)))
            if contType == changeset.ChangedFileTypes.ptr:
                ptrRestores.append(sha1)
                target = util.decompressString(fileContents.get().read())

                if util.tupleListBsearchInsert(normalRestoreList,
                                (target[:16], target[16:], sha1, True),
                                self.ptrCmp):
                    # Item was inserted. This creates a reference in the
                    # datastore; keep track of it to prevent a duplicate
                    # reference count.
                    ptrRefsAdded[sha1] = True

                continue

            assert(contType == changeset.ChangedFileTypes.file)
            self.addFileContents(sha1, fileContents, restoreContents, 0,
                                 precompressed = True)

        for sha1 in ptrRestores:
            # Increment the reference count for items which were ptr's
            # to a different file.
            if sha1 in ptrRefsAdded:
                del ptrRefsAdded[sha1]
            else:
                self.addFileContents(sha1, None, False, 0)

        #del configRestoreList
        #del normalRestoreList

        for csTrove in newList:
            if csTrove.troveType() != trove.TROVE_TYPE_REMOVED:
                continue

            troveNo += 1

            if callback:
                callback.creatingDatabaseTransaction(troveNo, len(newList))

            self.markTroveRemoved(csTrove.getName(), csTrove.getNewVersion(),
                                  csTrove.getNewFlavor())

        for (troveName, version, flavor) in cs.getOldTroveList():
            trv = self.repos.getTrove(troveName, version, flavor)
            self.oldTrove(trv, None, troveName, version, flavor)