コード例 #1
0
def infowebcommand(web):
    """Get information about the specified changeset(s).

    This is a legacy API from before the days of Mercurial's built-in JSON
    API. It is used by unidentified parts of automation. Over time these
    consumers should transition to the modern/native JSON API.
    """
    req = web.req

    if 'node' not in req.qsparams:
        return web.sendtemplate('error',
                                error={'error': "missing parameter 'node'"})

    nodes = req.qsparams.getall('node')

    csets = []
    for node in nodes:
        ctx = scmutil.revsymbol(web.repo, node)
        csets.append({
            'rev': ctx.rev(),
            'node': ctx.hex(),
            'user': ctx.user(),
            'date': ctx.date(),
            'description': ctx.description(),
            'branch': ctx.branch(),
            'tags': ctx.tags(),
            'parents': [p.hex() for p in ctx.parents()],
            'children': [c.hex() for c in ctx.children()],
            'files': ctx.files(),
        })

    return web.sendtemplate('info', csets=templateutil.mappinglist(csets))
コード例 #2
0
def create_entry(ctx, web, pushid, user, date, node, mergehidden, parity, pushcount=None):
    """Creates an entry to be yielded in the `changelist` generator

    `pushcount` will be non-None when we are generating an entry for the first change
    in a given push
    """
    repo = web.repo
    n = ctx.node()
    ctxfiles = ctx.files()
    firstchange = pushcount is not None

    mergerollupval = templateutil.mappinglist(
        [{'count': pushcount}]
        if firstchange and mergehidden == 'hidden'
        else []
    )

    pushval = templateutil.mappinglist(
        [{"date": localdate(date), "user": user}]
        if firstchange
        else []
    )

    # TRACKING hg47
    # Call the function with whichever signature is correct
    if util.versiontuple(n=2) >= (4, 7):
        filediffs = webutil.listfilediffs(ctxfiles, node, len(ctxfiles))
    else:
        filediffs = webutil.listfilediffs(web.tmpl, ctxfiles, node, len(ctxfiles))

    return {
        "author": ctx.user(),
        "desc": ctx.description(),
        "files": filediffs,
        "rev": ctx.rev(),
        "node": hex(n),
        "parents": [c.hex() for c in ctx.parents()],
        "tags": webutil.nodetagsdict(repo, n),
        "branches": webutil.nodebranchdict(repo, ctx),
        "inbranch": webutil.nodeinbranch(repo, ctx),
        "hidden": mergehidden,
        "mergerollup": mergerollupval,
        "id": pushid,
        "parity": parity,
        "push": pushval,
    }
コード例 #3
0
def create_entry(ctx,
                 web,
                 pushid,
                 user,
                 date,
                 node,
                 mergehidden,
                 parity,
                 pushcount=None):
    """Creates an entry to be yielded in the `changelist` generator

    `pushcount` will be non-None when we are generating an entry for the first change
    in a given push
    """
    repo = web.repo
    n = ctx.node()
    ctxfiles = ctx.files()
    firstchange = pushcount is not None

    mergerollupval = templateutil.mappinglist([{
        b'count': pushcount
    }] if firstchange and mergehidden == b'hidden' else [])

    pushval = templateutil.mappinglist([{
        b"date": localdate(date),
        b"user": user
    }] if firstchange else [])

    filediffs = webutil.listfilediffs(ctxfiles, node, len(ctxfiles))

    return {
        b"author": ctx.user(),
        b"desc": ctx.description(),
        b"files": filediffs,
        b"rev": ctx.rev(),
        b"node": hex(n),
        b"parents": [c.hex() for c in ctx.parents()],
        b"tags": webutil.nodetagsdict(repo, n),
        b"branches": webutil.nodebranchdict(repo, ctx),
        b"inbranch": webutil.nodeinbranch(repo, ctx),
        b"hidden": mergehidden,
        b"mergerollup": mergerollupval,
        b"id": pushid,
        b"parity": parity,
        b"push": pushval,
    }
コード例 #4
0
def feedentrygenerator(_context, entries, repo, url, urlbase):
    """Generator of mappings for pushlog feed entries field
    """
    for pushid, user, date, node in entries:
        ctx = scmutil.revsingle(repo, node)
        filesgen = [{'name': fn} for fn in ctx.files()]
        yield {
            'node': node,
            'date': isotime(date),
            'user': xmlescape(user),
            'urlbase': urlbase,
            'url': url,
            'files': templateutil.mappinglist(filesgen),
        }
コード例 #5
0
def automationrelevancewebcommand(web):
    req = web.req

    if b'node' not in req.qsparams:
        # TRACKING hg48
        if util.versiontuple(n=2) >= (4, 8):
            return web.sendtemplate(b'error',
                                    error=b"missing parameter 'node'")
        else:
            return web.sendtemplate(
                b'error', error={b'error': b"missing parameter 'node'"})

    repo = web.repo
    deletefields = {
        b'bookmarks',
        b'branch',
        b'branches',
        b'changelogtag',
        b'child',
        b'ctx',
        b'inbranch',
        b'instabilities',
        b'obsolete',
        b'parent',
        b'succsandmarkers',
        b'tags',
        b'whyunstable',
    }

    csets = []
    # Query an unfiltered repo because sometimes automation wants to run against
    # changesets that have since become hidden. The response exposes whether the
    # requested node is visible, so consumers can make intelligent decisions
    # about what to do if the changeset isn't visible.
    urepo = repo.unfiltered()

    revs = list(urepo.revs(b'automationrelevant(%r)', req.qsparams[b'node']))

    # The pushlog extensions wraps webutil.commonentry and the way it is called
    # means pushlog opens a SQLite connection on every call. This is inefficient.
    # So we pre load and cache data for pushlog entries we care about.
    cl = urepo.changelog
    nodes = [cl.node(rev) for rev in revs]

    with repo.unfiltered().pushlog.cache_data_for_nodes(nodes):
        for rev in revs:
            ctx = urepo[rev]
            entry = webutil.changelistentry(web, ctx)

            if req.qsparams.get(b'backouts'):
                backout_node = get_backoutbynode(b'hgmo', repo, ctx)
                if backout_node is not None:
                    entry[b'backedoutby'] = backout_node

            # The pushnodes list is redundant with data from other changesets.
            # The amount of redundant data for pushes containing N>100
            # changesets can add up to megabytes in size.
            try:
                del entry[b'pushnodes']
            except KeyError:
                pass

            # Some items in changelistentry are generators, which json.dumps()
            # can't handle. So we expand them.
            entrycopy = copy.copy(entry)
            for k, v in entrycopy.items():
                # "files" is a generator that attempts to call a template.
                # Don't even bother and just repopulate it.
                if k == b'files':
                    entry[b'files'] = sorted(ctx.files())
                elif k == b'allparents':
                    # TRACKING hg48
                    # generic template keyword args needed (context, mapping)
                    # they are not actually used, so `None, None` is sufficient
                    if util.versiontuple(n=2) >= (4, 8):
                        iterator = v(None, None).itermaps(ctx)
                    else:
                        iterator = v().itermaps(ctx)

                    entry[b'parents'] = [p[b'node'] for p in iterator]
                    del entry[b'allparents']
                # These aren't interesting to us, so prune them. The
                # original impetus for this was because "changelogtag"
                # isn't part of the json template and adding it is non-trivial.
                elif k in deletefields:
                    del entry[k]
                elif isinstance(v, types.GeneratorType):
                    entry[k] = list(v)

            csets.append(entry)

    # Advertise whether the requested revision is visible (non-obsolete).
    if csets:
        visible = csets[-1][b'node'] in repo
    else:
        visible = None

    data = {
        b'changesets': templateutil.mappinglist(csets),
        b'visible': visible,
    }

    return web.sendtemplate(b'automationrelevance', **pycompat.strkwargs(data))
コード例 #6
0
def changesetentry(orig, web, ctx):
    """Add metadata for an individual changeset in hgweb."""
    d = orig(web, ctx)

    d = pycompat.byteskwargs(d)

    repo = web.repo

    db = db_for_repo(repo)
    if not db:
        return pycompat.strkwargs(d)

    releases = release_info_for_changeset(db, repo, ctx)

    if releases[b'this']:
        d[b'firefox_releases_here'] = []
        d[b'firefox_releases_first'] = []

        for config, build in sorted(releases[b'this'].items()):
            build[b'anchor'] = releasedb.build_anchor(build)

            # Set links to previous and future releases.
            if config in releases[b'previous']:
                build[b'previousnode'] = releases[b'previous'][config][
                    b'revision']

            d[b'firefox_releases_here'].append(build)
            d[b'firefox_releases_first'].append(build)

    if releases[b'future']:
        d.setdefault(b'firefox_releases_first', [])

        for config, build in sorted(releases[b'future'].items()):
            build[b'anchor'] = releasedb.build_anchor(build)

            if build not in d[b'firefox_releases_first']:
                d[b'firefox_releases_first'].append(build)

    if releases[b'previous']:
        d[b'firefox_releases_last'] = []

        for config, build in sorted(releases[b'previous'].items()):
            build[b'anchor'] = releasedb.build_anchor(build)

            d[b'firefox_releases_last'].append(build)

    # Used so we don't display "first release with" and "last release without".
    # We omit displaying in this scenario because we're not confident in the
    # data and don't want to take chances with inaccurate data.
    if b'firefox_releases_first' in d and b'firefox_releases_last' in d:
        d[b'have_first_and_last_firefox_releases'] = True

    # Do some template fixes
    # TODO build via a generator
    if b'firefox_releases_first' in d:
        d[b'firefox_releases_first'] = templateutil.mappinglist(
            d[b'firefox_releases_first'])

    if b'firefox_releases_last' in d:
        d[b'firefox_releases_last'] = templateutil.mappinglist(
            d[b'firefox_releases_last'])

    if b'firefox_releases_here' in d:
        d[b'firefox_releases_here'] = templateutil.mappinglist(
            d[b'firefox_releases_here'])

    return pycompat.strkwargs(d)
コード例 #7
0
def automationrelevancewebcommand(web):
    req = web.req

    if 'node' not in req.qsparams:
        return web.sendtemplate('error',
                                error={'error': "missing parameter 'node'"})

    repo = web.repo
    deletefields = {
        'bookmarks',
        'branch',
        'branches',
        'changelogtag',
        'child',
        'ctx',
        'inbranch',
        'instabilities',
        'obsolete',
        'parent',
        'phase',
        'succsandmarkers',
        'tags',
        'whyunstable',
    }

    csets = []
    # Query an unfiltered repo because sometimes automation wants to run against
    # changesets that have since become hidden. The response exposes whether the
    # requested node is visible, so consumers can make intelligent decisions
    # about what to do if the changeset isn't visible.
    urepo = repo.unfiltered()

    revs = list(urepo.revs('automationrelevant(%r)', req.qsparams['node']))

    # The pushlog extensions wraps webutil.commonentry and the way it is called
    # means pushlog opens a SQLite connection on every call. This is inefficient.
    # So we pre load and cache data for pushlog entries we care about.
    cl = urepo.changelog
    nodes = [cl.node(rev) for rev in revs]

    with repo.unfiltered().pushlog.cache_data_for_nodes(nodes):
        for rev in revs:
            ctx = urepo[rev]
            entry = webutil.changelistentry(web, ctx)

            # The pushnodes list is redundant with data from other changesets.
            # The amount of redundant data for pushes containing N>100
            # changesets can add up to megabytes in size.
            try:
                del entry['pushnodes']
            except KeyError:
                pass

            # Some items in changelistentry are generators, which json.dumps()
            # can't handle. So we expand them.
            for k, v in entry.items():
                # "files" is a generator that attempts to call a template.
                # Don't even bother and just repopulate it.
                if k == 'files':
                    entry['files'] = sorted(ctx.files())
                elif k == 'allparents':
                    # TRACKING hg46
                    if util.safehasattr(web, 'sendtemplate'):
                        iterator = v().itermaps(ctx)
                    else:
                        iterator = v()

                    entry['parents'] = [p['node'] for p in iterator]
                    del entry['allparents']
                # These aren't interesting to us, so prune them. The
                # original impetus for this was because "changelogtag"
                # isn't part of the json template and adding it is non-trivial.
                elif k in deletefields:
                    del entry[k]
                elif isinstance(v, types.GeneratorType):
                    entry[k] = list(v)

            csets.append(entry)

    # Advertise whether the requested revision is visible (non-obsolete).
    if csets:
        visible = csets[-1]['node'] in repo
    else:
        visible = None

    data = {
        'changesets': templateutil.mappinglist(csets),
        'visible': visible,
    }

    return web.sendtemplate('automationrelevance', **data)