示例#1
0
def create_bundle(store, commits, bundle2caps={}):
    version = '01'
    chunk_type = RawRevChunk01
    if bundle2caps:
        versions = bundle2caps.get('changegroup')
        if versions:
            if '02' in versions:
                chunk_type = RawRevChunk02
                version = '02'
    cg = create_changegroup(store, bundle_data(store, commits), chunk_type)
    if bundle2caps:
        from mercurial.util import chunkbuffer
        yield 'HG20'
        yield '\0' * 4  # bundle parameters length: no params
        replycaps = bundle2caps.get('replycaps')
        if replycaps:
            for chunk in bundlepart('REPLYCAPS', data=chunkbuffer(replycaps)):
                yield chunk
        for chunk in bundlepart('CHANGEGROUP',
                                advisoryparams=(('version', version),),
                                data=chunkbuffer(cg)):
            yield chunk
        yield '\0' * 4  # End of bundle
    else:
        for chunk in cg:
            yield chunk
示例#2
0
def bundle2scratchbranch(op, part):
    '''unbundle a bundle2 part containing a changegroup to store'''

    bundler = bundle2.bundle20(op.repo.ui)
    cgversion = part.params.get('cgversion', '01')
    cgpart = bundle2.bundlepart('changegroup', data=part.read())
    cgpart.addparam('version', cgversion)
    bundler.addpart(cgpart)
    buf = util.chunkbuffer(bundler.getchunks())

    fd, bundlefile = tempfile.mkstemp()
    try:
        try:
            fp = os.fdopen(fd, r'wb')
            fp.write(buf.read())
        finally:
            fp.close()
        storebundle(op, part.params, bundlefile)
    finally:
        try:
            os.unlink(bundlefile)
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise

    return 1
示例#3
0
def create_bundle(store, commits, bundle2caps={}):
    version = '01'
    chunk_type = RawRevChunk01
    if bundle2caps:
        versions = bundle2caps.get('changegroup')
        if versions:
            if '02' in versions:
                chunk_type = RawRevChunk02
                version = '02'
    cg = create_changegroup(store, bundle_data(store, commits), chunk_type)
    if bundle2caps:
        from mercurial.util import chunkbuffer
        yield 'HG20'
        yield '\0' * 4  # bundle parameters length: no params
        if bundle2caps.get('replycaps'):
            for chunk in bundlepart('REPLYCAPS'):
                yield chunk
        for chunk in bundlepart('CHANGEGROUP',
                                advisoryparams=(('version', version),),
                                data=chunkbuffer(cg)):
            yield chunk
        yield '\0' * 4  # End of bundle
    else:
        for chunk in cg:
            yield chunk
示例#4
0
文件: hg.py 项目: vvuk/git-cinnabar
def push(repo, store, what, repo_heads, repo_branches):
    fast_import = FastImport()
    store.init_fast_import(fast_import)

    def heads():
        for sha1 in store.heads(repo_branches):
            yield '^%s' % store.changeset_ref(sha1)

    def local_bases():
        for c in Git.iter('rev-list',
                          '--stdin',
                          '--topo-order',
                          '--full-history',
                          '--boundary',
                          *(w for w in what if w),
                          stdin=heads()):
            if c[0] != '-':
                continue
            yield store.hg_changeset(c[1:])

        for w in what:
            rev = store.hg_changeset(w)
            if rev:
                yield rev

    common = findcommon(repo, store, set(local_bases()))
    logging.info('common: %s' % common)

    def revs():
        for sha1 in common:
            yield '^%s' % store.changeset_ref(sha1)

    push_commits = list(
        Git.iter('rev-list',
                 '--stdin',
                 '--topo-order',
                 '--full-history',
                 '--parents',
                 '--reverse',
                 *(w for w in what if w),
                 stdin=revs()))

    pushed = False
    if push_commits:
        chunks = util.chunkbuffer(create_bundle(store, push_commits))
        cg = cg1unpacker(chunks, 'UN')
        if all(v[1] for v in what.values()):
            repo_heads = ['force']
        else:
            repo_heads = [unhexlify(h) for h in repo_heads]
        if repo.local():
            repo.local().ui.setconfig('server', 'validate', True)
        pushed = repo.unbundle(cg, repo_heads, '') != 0
    return gitdag(push_commits) if pushed else ()
def storetobundlestore(orig, repo, op, unbundler):
    """stores the incoming bundle coming from push command to the bundlestore
    instead of applying on the revlogs"""

    repo.ui.status(_(b"storing changesets on the bundlestore\n"))
    bundler = bundle2.bundle20(repo.ui)

    # processing each part and storing it in bundler
    with bundle2.partiterator(repo, op, unbundler) as parts:
        for part in parts:
            bundlepart = None
            if part.type == b'replycaps':
                # This configures the current operation to allow reply parts.
                bundle2._processpart(op, part)
            else:
                bundlepart = bundle2.bundlepart(part.type, data=part.read())
                for key, value in pycompat.iteritems(part.params):
                    bundlepart.addparam(key, value)

                # Certain parts require a response
                if part.type in (b'pushkey', b'changegroup'):
                    if op.reply is not None:
                        rpart = op.reply.newpart(b'reply:%s' % part.type)
                        rpart.addparam(
                            b'in-reply-to', b'%d' % part.id, mandatory=False
                        )
                        rpart.addparam(b'return', b'1', mandatory=False)

            op.records.add(
                part.type,
                {
                    b'return': 1,
                },
            )
            if bundlepart:
                bundler.addpart(bundlepart)

    # storing the bundle in the bundlestore
    buf = util.chunkbuffer(bundler.getchunks())
    fd, bundlefile = pycompat.mkstemp()
    try:
        try:
            fp = os.fdopen(fd, 'wb')
            fp.write(buf.read())
        finally:
            fp.close()
        storebundle(op, {}, bundlefile)
    finally:
        try:
            os.unlink(bundlefile)
        except Exception:
            # we would rather see the original exception
            pass
示例#6
0
def push(repo, store, what, repo_heads, repo_branches):
    store.init_fast_import()

    def heads():
        for sha1 in store.heads(repo_branches):
            yield '^%s' % store.changeset_ref(sha1)

    def local_bases():
        for c in Git.iter('rev-list', '--stdin', '--topo-order',
                          '--full-history', '--boundary',
                          *(w for w in what if w), stdin=heads()):
            if c[0] != '-':
                continue
            yield store.hg_changeset(c[1:])

        for w in what:
            rev = store.hg_changeset(w)
            if rev:
                yield rev

    common = findcommon(repo, store, set(local_bases()))
    logging.info('common: %s' % common)

    def revs():
        for sha1 in common:
            yield '^%s' % store.changeset_ref(sha1)

    push_commits = list(Git.iter('rev-list', '--stdin', '--topo-order',
                                 '--full-history', '--parents', '--reverse',
                                 *(w for w in what if w), stdin=revs()))

    pushed = False
    if push_commits:
        has_root = any(len(p) == 40 for p in push_commits)
        force = all(v[1] for v in what.values())
        if has_root and repo_heads:
            if not force:
                raise Exception('Cannot push a new root')
            else:
                logging.warn('Pushing a new root')
        chunks = util.chunkbuffer(create_bundle(store, push_commits))
        cg = cg1unpacker(chunks, 'UN')
        if force:
            repo_heads = ['force']
        else:
            if not repo_heads:
                repo_heads = [NULL_NODE_ID]
            repo_heads = [unhexlify(h) for h in repo_heads]
        if repo.local():
            repo.local().ui.setconfig('server', 'validate', True)
        pushed = repo.unbundle(cg, repo_heads, '') != 0
    return gitdag(push_commits) if pushed else ()
示例#7
0
def _rebundle(bundlerepo, bundleroots, unknownhead):
    '''
    Bundle may include more revision then user requested. For example,
    if user asks for revision but bundle also consists its descendants.
    This function will filter out all revision that user is not requested.
    '''
    parts = []

    version = '02'
    outgoing = discovery.outgoing(bundlerepo, commonheads=bundleroots,
                                  missingheads=[unknownhead])
    cgstream = changegroup.makestream(bundlerepo, outgoing, version, 'pull')
    cgstream = util.chunkbuffer(cgstream).read()
    cgpart = bundle2.bundlepart('changegroup', data=cgstream)
    cgpart.addparam('version', version)
    parts.append(cgpart)

    return parts
示例#8
0
def bundle2phaseheads(orig, op, part):
    # Merges many dicts into one. First it converts them to list of pairs,
    # then concatenates them (using sum), and then creates a diff out of them.
    replacements = dict(sum([record.items()
                             for record
                             in op.records[rebaseparttype]],
                            []))

    decodedphases = phasesmod.binarydecode(part)

    replacedphases = []
    for phasetype in decodedphases:
        replacedphases.append(
            [replacements.get(node, node) for node in phasetype])
    # Since we've just read the bundle part, then `orig()` won't be able to
    # read it again. Let's replace payload stream with new stream of replaced
    # nodes.
    part._payloadstream = util.chunkbuffer(
        [phasesmod.binaryencode(replacedphases)])
    return orig(op, part)
示例#9
0
def writerevs(ui, r1, r2, order, tr):

    ui.status(_('writing revs\n'))

    count = [0]
    def progress(*args):
        ui.progress(_('writing'), count[0], total=len(order))
        count[0] += 1

    order = [r1.node(r) for r in order]

    # this is a bit ugly, but it works
    lookup = lambda x: "%020d" % r1.linkrev(r1.rev(x))
    unlookup = lambda x: int(x, 10)

    try:
        group = util.chunkbuffer(r1.group(order, lookup, progress))
        group = changegroup.unbundle10(group, "UN")
        r2.addgroup(group, unlookup, tr)
    finally:
        ui.progress(_('writing'), None)
示例#10
0
def writerevs(ui, r1, r2, order, tr):

    ui.status(_('writing revs\n'))

    count = [0]

    def progress(*args):
        ui.progress(_('writing'), count[0], total=len(order))
        count[0] += 1

    order = [r1.node(r) for r in order]

    # this is a bit ugly, but it works
    lookup = lambda x: "%020d" % r1.linkrev(r1.rev(x))
    unlookup = lambda x: int(x, 10)

    try:
        group = util.chunkbuffer(r1.group(order, lookup, progress))
        group = changegroup.unbundle10(group, "UN")
        r2.addgroup(group, unlookup, tr)
    finally:
        ui.progress(_('writing'), None)
示例#11
0
def push(repo, store, what, repo_heads, repo_branches, dry_run=False):
    def heads():
        for sha1 in store.heads(repo_branches):
            yield '^%s' % store.changeset_ref(sha1)

    def local_bases():
        h = chain(heads(), (w for w in what if w))
        for c, t, p in GitHgHelper.rev_list('--topo-order', '--full-history',
                                            '--boundary', *h):
            if c[0] != '-':
                continue
            yield store.hg_changeset(c[1:])

        for w in what:
            rev = store.hg_changeset(w)
            if rev:
                yield rev

    common = findcommon(repo, store, set(local_bases()))
    logging.info('common: %s', common)

    def revs():
        for sha1 in common:
            yield '^%s' % store.changeset_ref(sha1)

    revs = chain(revs(), (w for w in what if w))
    push_commits = list((c, p) for c, t, p in GitHgHelper.rev_list(
        '--topo-order', '--full-history', '--parents', '--reverse', *revs))

    pushed = False
    if push_commits:
        has_root = any(len(p) == 40 for p in push_commits)
        force = all(v[1] for v in what.values())
        if has_root and repo_heads:
            if not force:
                raise Exception('Cannot push a new root')
            else:
                logging.warn('Pushing a new root')
        if force:
            repo_heads = ['force']
        else:
            if not repo_heads:
                repo_heads = [NULL_NODE_ID]
            repo_heads = [unhexlify(h) for h in repo_heads]
    if push_commits and not dry_run:
        if repo.local():
            repo.local().ui.setconfig('server', 'validate', True)
        b2caps = bundle2caps(repo) if unbundle20 else {}
        logging.getLogger('bundle2').debug('%r', b2caps)
        if b2caps:
            b2caps['replycaps'] = encodecaps({'error': ['abort']})
        cg = create_bundle(store, push_commits, b2caps)
        if not isinstance(repo, HelperRepo):
            cg = util.chunkbuffer(cg)
            if not b2caps:
                cg = cg1unpacker(cg, 'UN')
        reply = repo.unbundle(cg, repo_heads, '')
        if unbundle20 and isinstance(reply, unbundle20):
            parts = iter(reply.iterparts())
            for part in parts:
                logging.getLogger('bundle2').debug('part: %s', part.type)
                logging.getLogger('bundle2').debug('params: %r', part.params)
                if part.type == 'output':
                    sys.stderr.write(part.read())
                elif part.type == 'reply:changegroup':
                    # TODO: should check params['in-reply-to']
                    reply = int(part.params['return'])
                elif part.type == 'error:abort':
                    raise error.Abort(part.params['message'],
                                      hint=part.params.get('hint'))
                else:
                    logging.getLogger('bundle2').warning(
                        'ignoring bundle2 part: %s', part.type)
        pushed = reply != 0
    return gitdag(push_commits) if pushed or dry_run else ()
示例#12
0
def processparts(orig, repo, op, unbundler):

    # make sure we don't wrap processparts in case of `hg unbundle`
    if op.source == 'unbundle':
        return orig(repo, op, unbundler)

    # this server routes each push to bundle store
    if repo.ui.configbool('infinitepush', 'pushtobundlestore'):
        return storetobundlestore(orig, repo, op, unbundler)

    if unbundler.params.get('infinitepush') != 'True':
        return orig(repo, op, unbundler)

    handleallparts = repo.ui.configbool('infinitepush', 'storeallparts')

    bundler = bundle2.bundle20(repo.ui)
    cgparams = None
    with bundle2.partiterator(repo, op, unbundler) as parts:
        for part in parts:
            bundlepart = None
            if part.type == 'replycaps':
                # This configures the current operation to allow reply parts.
                bundle2._processpart(op, part)
            elif part.type == bundleparts.scratchbranchparttype:
                # Scratch branch parts need to be converted to normal
                # changegroup parts, and the extra parameters stored for later
                # when we upload to the store. Eventually those parameters will
                # be put on the actual bundle instead of this part, then we can
                # send a vanilla changegroup instead of the scratchbranch part.
                cgversion = part.params.get('cgversion', '01')
                bundlepart = bundle2.bundlepart('changegroup',
                                                data=part.read())
                bundlepart.addparam('version', cgversion)
                cgparams = part.params

                # If we're not dumping all parts into the new bundle, we need to
                # alert the future pushkey and phase-heads handler to skip
                # the part.
                if not handleallparts:
                    op.records.add(scratchbranchparttype + '_skippushkey',
                                   True)
                    op.records.add(scratchbranchparttype + '_skipphaseheads',
                                   True)
            else:
                if handleallparts:
                    # Ideally we would not process any parts, and instead just
                    # forward them to the bundle for storage, but since this
                    # differs from previous behavior, we need to put it behind a
                    # config flag for incremental rollout.
                    bundlepart = bundle2.bundlepart(part.type,
                                                    data=part.read())
                    for key, value in part.params.iteritems():
                        bundlepart.addparam(key, value)

                    # Certain parts require a response
                    if part.type == 'pushkey':
                        if op.reply is not None:
                            rpart = op.reply.newpart('reply:pushkey')
                            rpart.addparam('in-reply-to',
                                           str(part.id),
                                           mandatory=False)
                            rpart.addparam('return', '1', mandatory=False)
                else:
                    bundle2._processpart(op, part)

            if handleallparts:
                op.records.add(part.type, {
                    'return': 1,
                })
            if bundlepart:
                bundler.addpart(bundlepart)

    # If commits were sent, store them
    if cgparams:
        buf = util.chunkbuffer(bundler.getchunks())
        fd, bundlefile = tempfile.mkstemp()
        try:
            try:
                fp = os.fdopen(fd, r'wb')
                fp.write(buf.read())
            finally:
                fp.close()
            storebundle(op, cgparams, bundlefile)
        finally:
            try:
                os.unlink(bundlefile)
            except Exception:
                # we would rather see the original exception
                pass
示例#13
0
def getlfile(ui, hash):
    return util.chunkbuffer(openstore(ui=ui)._get(hash))
示例#14
0
def push(repo, store, what, repo_heads, repo_branches):
    store.init_fast_import()

    def heads():
        for sha1 in store.heads(repo_branches):
            yield '^%s' % store.changeset_ref(sha1)

    def local_bases():
        for c in Git.iter('rev-list', '--stdin', '--topo-order',
                          '--full-history', '--boundary',
                          *(w for w in what if w), stdin=heads()):
            if c[0] != '-':
                continue
            yield store.hg_changeset(c[1:])

        for w in what:
            rev = store.hg_changeset(w)
            if rev:
                yield rev

    common = findcommon(repo, store, set(local_bases()))
    logging.info('common: %s' % common)

    def revs():
        for sha1 in common:
            yield '^%s' % store.changeset_ref(sha1)

    push_commits = list(Git.iter('rev-list', '--stdin', '--topo-order',
                                 '--full-history', '--parents', '--reverse',
                                 *(w for w in what if w), stdin=revs()))

    pushed = False
    if push_commits:
        has_root = any(len(p) == 40 for p in push_commits)
        force = all(v[1] for v in what.values())
        if has_root and repo_heads:
            if not force:
                raise Exception('Cannot push a new root')
            else:
                logging.warn('Pushing a new root')
        if force:
            repo_heads = ['force']
        else:
            if not repo_heads:
                repo_heads = [NULL_NODE_ID]
            repo_heads = [unhexlify(h) for h in repo_heads]
        if repo.local():
            repo.local().ui.setconfig('server', 'validate', True)
        b2caps = bundle2caps(repo) if unbundle20 else {}
        if b2caps and (repo.url().startswith(('http://', 'https://')) or
                       not isinstance(repo, HelperRepo)):
            b2caps['replycaps'] = True
        cg = create_bundle(store, push_commits, b2caps)
        if not isinstance(repo, HelperRepo):
            cg = util.chunkbuffer(cg)
            if not b2caps:
                cg = cg1unpacker(cg, 'UN')
        reply = repo.unbundle(cg, repo_heads, '')
        if unbundle20 and isinstance(reply, unbundle20):
            parts = iter(reply.iterparts())
            for part in parts:
                if part.type == 'output':
                    sys.stderr.write(part.read())
                elif part.type == 'reply:changegroup':
                    # TODO: should check params['in-reply-to']
                    reply = int(part.params['return'])
                else:
                    logging.getLogger('bundle2').warning(
                        'ignoring bundle2 part: %s', part.type)
        pushed = reply != 0
    return gitdag(push_commits) if pushed else ()