コード例 #1
0
 def setUp(self):
     # create a test repo location.
     self.tmpdir = tempfile.mkdtemp("hg-git_url-test")
     commands.init(ui.ui(), self.tmpdir)
     repo = hg.repository(ui.ui(), self.tmpdir)
     self.handler = GitHandler(repo, ui.ui())
コード例 #2
0
# only makes sense to test on os which supports symlinks
if not getattr(os, "symlink", False):
    sys.exit(80)  # SKIPPED_STATUS defined in run-tests.py

with open(os.environ["HGRCPATH"], "a+") as f:
    f.write("[extensions]\n")
    f.write("treemanifest=!\n")

u = uimod.ui.load()
# hide outer repo
hg.peer(u, {}, ".", create=True)

# clone with symlink support
hg.clone(u, {}, BUNDLEPATH, "test0")

repo = hg.repository(u, "test0")

# wait a bit, or the status call wont update the dirstate
time.sleep(1)
commands.status(u, repo)

# now disable symlink support -- this is what os.symlink would do on a
# non-symlink file system
def symlink_failure(src, dst):
    raise OSError(1, "Operation not permitted")


os.symlink = symlink_failure
fscap.getfscap = lambda *args: None

コード例 #3
0
from __future__ import absolute_import, print_function

import os

from edenscm.mercurial import context, encoding, hg, pycompat, scmutil, ui as uimod
from edenscm.mercurial.node import hex
from hghave import require


u = uimod.ui.load()
u.setconfig("extensions", "treemanifest", "!")

repo = hg.repository(u, "test1", create=1)
os.chdir("test1")

# create 'foo' with fixed time stamp
f = open("foo", "wb")
f.write(b"foo\n")
f.close()
os.utime("foo", (1000, 1000))

# add+commit 'foo'
repo[None].add(["foo"])
repo.commit(text="commit1", date="0 0")

d = repo[None]["foo"].date()
if os.name == "nt":
    d = d[:2]
print("workingfilectx.date = (%d, %d)" % d)

# test memctx with non-ASCII commit message
コード例 #4
0
from __future__ import absolute_import, print_function

import os

from edenscm.mercurial import hg, merge, ui as uimod
from hghave import require

u = uimod.ui.load()

repo = hg.repository(u, "test1", create=1)
os.chdir("test1")


def commit(text, time):
    repo.commit(text=text, date="%d 0" % time)


def addcommit(name, time):
    f = open(name, "w")
    f.write("%s\n" % name)
    f.close()
    repo[None].add([name])
    commit(name, time)


def update(rev):
    merge.update(repo, rev, False, True)


def merge_(rev):
    merge.update(repo, rev, True, False)
コード例 #5
0
ファイル: test-filelog.py プロジェクト: xmonader/eden
#!/usr/bin/env python
"""
Tests the behavior of filelog w.r.t. data starting with '\1\n'
"""
from __future__ import absolute_import, print_function

from edenscm.mercurial import hg, ui as uimod
from edenscm.mercurial.node import hex, nullid
from hghave import require

myui = uimod.ui.load()
repo = hg.repository(myui, path=".", create=True)

fl = repo.file("foobar")


def addrev(text, renamed=False):
    if renamed:
        # data doesn't matter. Just make sure filelog.renamed() returns True
        meta = {"copyrev": hex(nullid), "copy": "bar"}
    else:
        meta = {}

    lock = t = None
    try:
        lock = repo.lock()
        t = repo.transaction("commit")
        node = fl.add(text, meta, t, 0, nullid, nullid)
        return node
    finally:
        if t:
コード例 #6
0
def storebundle(op, params, bundlefile):
    log = _getorcreateinfinitepushlogger(op)
    parthandlerstart = time.time()
    log(constants.scratchbranchparttype, eventtype="start")
    index = op.repo.bundlestore.index
    store = op.repo.bundlestore.store
    op.records.add(constants.scratchbranchparttype + "_skippushkey", True)

    bundle = None
    try:  # guards bundle
        bundlepath = "bundle:%s+%s" % (op.repo.root, bundlefile)
        bundle = hg.repository(op.repo.ui, bundlepath)

        bookmark = params.get("bookmark")
        create = params.get("create")
        force = params.get("force")

        if bookmark:
            oldnode = index.getnode(bookmark)

            if not oldnode and not create:
                raise error.Abort(
                    "unknown bookmark %s" % bookmark,
                    hint="use --create if you want to create one",
                )
        else:
            oldnode = None
        bundleheads = bundle.revs("heads(bundle())")
        if bookmark and len(bundleheads) > 1:
            raise error.Abort(
                _("cannot push more than one head to a scratch branch"))

        revs = _getrevs(bundle, oldnode, force, bookmark)

        # Notify the user of what is being pushed
        op.repo.ui.warn(
            _n("pushing %s commit:\n", "pushing %s commits:\n", len(revs)) %
            len(revs))
        maxoutput = 10
        for i in range(0, min(len(revs), maxoutput)):
            firstline = bundle[revs[i]].description().split("\n")[0][:50]
            op.repo.ui.warn(("    %s  %s\n") % (revs[i], firstline))

        if len(revs) > maxoutput + 1:
            op.repo.ui.warn(("    ...\n"))
            firstline = bundle[revs[-1]].description().split("\n")[0][:50]
            op.repo.ui.warn(("    %s  %s\n") % (revs[-1], firstline))

        nodesctx = [bundle[rev] for rev in revs]
        inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
        if bundleheads:
            newheadscount = sum(not inindex(rev) for rev in bundleheads)
        else:
            newheadscount = 0
        # If there's a bookmark specified, the bookmarked node should also be
        # provided.  Older clients may omit this, in which case there should be
        # only one head, so we choose the last node, which will be that head.
        # If a bug or malicious client allows there to be a bookmark
        # with multiple heads, we will place the bookmark on the last head.
        bookmarknode = params.get("bookmarknode",
                                  nodesctx[-1].hex() if nodesctx else None)
        key = None
        if newheadscount:
            with open(bundlefile, "rb") as f:
                bundledata = f.read()
                with logservicecall(log,
                                    "bundlestore",
                                    bundlesize=len(bundledata)):
                    bundlesizelimitmb = op.repo.ui.configint(
                        "infinitepush", "maxbundlesize", 100)
                    if len(bundledata) > bundlesizelimitmb * 1024 * 1024:
                        error_msg = (
                            "bundle is too big: %d bytes. " +
                            "max allowed size is %s MB" % bundlesizelimitmb)
                        raise error.Abort(error_msg % (len(bundledata), ))
                    key = store.write(bundledata)

        with logservicecall(log, "index", newheadscount=newheadscount), index:
            if key:
                index.addbundle(key, nodesctx)
            if bookmark and bookmarknode:
                index.addbookmark(bookmark, bookmarknode, False)
        log(
            constants.scratchbranchparttype,
            eventtype="success",
            elapsedms=(time.time() - parthandlerstart) * 1000,
        )

        fillmetadatabranchpattern = op.repo.ui.config(
            "infinitepush", "fillmetadatabranchpattern", "")
        if bookmark and fillmetadatabranchpattern:
            __, __, matcher = util.stringmatcher(fillmetadatabranchpattern)
            if matcher(bookmark):
                _asyncsavemetadata(op.repo.root,
                                   [ctx.hex() for ctx in nodesctx])
    except Exception as e:
        log(
            constants.scratchbranchparttype,
            eventtype="failure",
            elapsedms=(time.time() - parthandlerstart) * 1000,
            errormsg=str(e),
        )
        raise
    finally:
        if bundle:
            bundle.close()
コード例 #7
0
def getbundlechunks(orig, repo, source, heads=None, bundlecaps=None, **kwargs):
    heads = heads or []
    # newheads are parents of roots of scratch bundles that were requested
    newphases = {}
    scratchbundles = []
    newheads = []
    scratchheads = []
    nodestobundle = {}
    allbundlestocleanup = []

    cgversion = _getsupportedcgversion(repo, bundlecaps or [])
    try:
        for head in heads:
            if head not in repo.changelog.nodemap:
                if head not in nodestobundle:
                    newbundlefile = downloadbundle(repo, head)
                    bundlepath = "bundle:%s+%s" % (repo.root, newbundlefile)
                    bundlerepo = hg.repository(repo.ui, bundlepath)

                    allbundlestocleanup.append((bundlerepo, newbundlefile))
                    bundlerevs = set(bundlerepo.revs("bundle()"))
                    bundlecaps = _includefilelogstobundle(
                        bundlecaps, bundlerepo, bundlerevs, repo.ui)
                    cl = bundlerepo.changelog
                    bundleroots = _getbundleroots(repo, bundlerepo, bundlerevs)
                    draftcommits = set()
                    bundleheads = set([head])
                    for rev in bundlerevs:
                        node = cl.node(rev)
                        draftcommits.add(node)
                        if node in heads:
                            bundleheads.add(node)
                            nodestobundle[node] = (
                                bundlerepo,
                                bundleroots,
                                newbundlefile,
                            )

                    if draftcommits:
                        # Filter down to roots of this head, so we don't report
                        # non-roots as phase roots and we don't report commits
                        # that aren't related to the requested head.
                        for rev in bundlerepo.revs("roots((%ln) & ::%ln)",
                                                   draftcommits, bundleheads):
                            newphases[bundlerepo[rev].hex()] = str(
                                phases.draft)

                scratchbundles.append(
                    _generateoutputparts(head, cgversion, bundlecaps,
                                         *nodestobundle[head]))
                newheads.extend(bundleroots)
                scratchheads.append(head)
    finally:
        for bundlerepo, bundlefile in allbundlestocleanup:
            bundlerepo.close()
            try:
                os.unlink(bundlefile)
            except (IOError, OSError):
                # if we can't cleanup the file then just ignore the error,
                # no need to fail
                pass

    pullfrombundlestore = bool(scratchbundles)
    wrappedchangegrouppart = False
    wrappedlistkeys = False
    oldchangegrouppart = exchange.getbundle2partsmapping["changegroup"]
    try:

        def _changegrouppart(bundler, *args, **kwargs):
            # Order is important here. First add non-scratch part
            # and only then add parts with scratch bundles because
            # non-scratch part contains parents of roots of scratch bundles.
            result = oldchangegrouppart(bundler, *args, **kwargs)
            for bundle in scratchbundles:
                for part in bundle:
                    bundler.addpart(part)
            return result

        exchange.getbundle2partsmapping["changegroup"] = _changegrouppart
        wrappedchangegrouppart = True

        def _listkeys(orig, self, namespace):
            origvalues = orig(self, namespace)
            if namespace == "phases" and pullfrombundlestore:
                if origvalues.get("publishing") == "True":
                    # Make repo non-publishing to preserve draft phase
                    del origvalues["publishing"]
                origvalues.update(newphases)
            return origvalues

        extensions.wrapfunction(localrepo.localrepository, "listkeys",
                                _listkeys)
        wrappedlistkeys = True
        heads = list((set(newheads) | set(heads)) - set(scratchheads))
        result = orig(repo,
                      source,
                      heads=heads,
                      bundlecaps=bundlecaps,
                      **kwargs)
    finally:
        if wrappedchangegrouppart:
            exchange.getbundle2partsmapping["changegroup"] = oldchangegrouppart
        if wrappedlistkeys:
            extensions.unwrapfunction(localrepo.localrepository, "listkeys",
                                      _listkeys)
    return result
コード例 #8
0
import os

from edenscm.mercurial import hg, scmutil, ui as uimod, util

chdir = os.chdir
mkdir = os.mkdir
pjoin = os.path.join

walkrepos = scmutil.walkrepos
checklink = util.checklink

u = uimod.ui.load()
sym = checklink(".")

hg.repository(u, "top1", create=1)
mkdir("subdir")
chdir("subdir")
hg.repository(u, "sub1", create=1)
mkdir("subsubdir")
chdir("subsubdir")
hg.repository(u, "subsub1", create=1)
chdir(os.path.pardir)
if sym:
    os.symlink(os.path.pardir, "circle")
    os.symlink(pjoin("subsubdir", "subsub1"), "subsub1")


def runtest():
    reposet = frozenset(walkrepos(".", followsym=True))
    if sym and (len(reposet) != 3):
コード例 #9
0
ファイル: test-propertycache.py プロジェクト: xmonader/eden
    val = 100 + len(name)
    unficalllog.append(val)
    return val


# plug them on repo
localrepo.localrepository.testcachedfoobar = testcachedfoobar
localrepo.localrepository.testcachedunfifoobar = testcachedunfifoobar


# Create an empty repo and instantiate it. It is important to run
# these tests on the real object to detect regression.
repopath = os.path.join(os.environ["TESTTMP"], "repo")
assert subprocess.call(["hg", "init", repopath]) == 0
ui = uimod.ui.load()
repo = hg.repository(ui, path=repopath).unfiltered()


print("")
print("=== property cache ===")
print("")
print("calllog:", calllog)
print("cached value (unfiltered):", vars(repo).get("testcachedfoobar", "NOCACHE"))

print("")
print("= first access on unfiltered, should do a call")
print("access:", repo.testcachedfoobar)
print("calllog:", calllog)
print("cached value (unfiltered):", vars(repo).get("testcachedfoobar", "NOCACHE"))

print("")
コード例 #10
0
def debugfillinfinitepushmetadata(ui, repo, **opts):
    """Special command that fills infinitepush metadata for a node
    """

    nodes = opts["node"]
    if not nodes:
        raise error.Abort(_("nodes are not specified"))

    filelimit = ui.configint("infinitepush", "metadatafilelimit", 100)
    nodesmetadata = {}
    for node in nodes:
        index = repo.bundlestore.index
        if not bool(index.getbundle(node)):
            raise error.Abort(_("node %s is not found") % node)

        if node not in repo:
            newbundlefile = server.downloadbundle(repo, bin(node))
            bundlepath = "bundle:%s+%s" % (repo.root, newbundlefile)
            bundlerepo = hg.repository(ui, bundlepath)
            repo = bundlerepo

        p1 = repo[node].p1().node()
        diffopts = patch.diffallopts(ui, {})
        match = scmutil.matchall(repo)
        chunks = patch.diff(repo, p1, node, match, None, diffopts, relroot="")
        difflines = util.iterlines(chunks)

        states = "modified added removed deleted unknown ignored clean".split()
        status = repo.status(p1, node)
        status = zip(states, status)

        filestatus = {}
        for state, files in status:
            for f in files:
                filestatus[f] = state

        diffstat = patch.diffstatdata(difflines)
        changed_files = {}
        copies = copiesmod.pathcopies(repo[p1], repo[node])
        for filename, adds, removes, isbinary in diffstat[:filelimit]:
            # use special encoding that allows non-utf8 filenames
            filename = pycompat.decodeutf8(
                encoding.jsonescape(pycompat.encodeutf8(filename),
                                    paranoid=True))
            changed_files[filename] = {
                "adds": adds,
                "removes": removes,
                "isbinary": isbinary,
                "status": filestatus.get(filename, "unknown"),
            }
            if filename in copies:
                changed_files[filename]["copies"] = copies[filename]

        output = {}
        output["changed_files"] = changed_files
        if len(diffstat) > filelimit:
            output["changed_files_truncated"] = True
        nodesmetadata[node] = output

    with index:
        for node, metadata in pycompat.iteritems(nodesmetadata):
            dumped = json.dumps(metadata, sort_keys=True)
            index.saveoptionaljsonmetadata(node, pycompat.encodeutf8(dumped))