コード例 #1
0
ファイル: churn.py プロジェクト: Frostman/intellij-community
def countrate(ui, repo, amap, *pats, **opts):
    """Calculate stats"""
    if opts.get('dateformat'):
        def getkey(ctx):
            t, tz = ctx.date()
            date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
            return date.strftime(opts['dateformat'])
    else:
        tmpl = opts.get('template', '{author|email}')
        tmpl = maketemplater(ui, repo, tmpl)
        def getkey(ctx):
            ui.pushbuffer()
            tmpl.show(ctx)
            return ui.popbuffer()

    state = {'count': 0, 'pct': 0}
    rate = {}
    df = False
    if opts.get('date'):
        df = util.matchdate(opts['date'])

    m = cmdutil.match(repo, pats, opts)
    def prep(ctx, fns):
        rev = ctx.rev()
        if df and not df(ctx.date()[0]): # doesn't match date format
            return

        key = getkey(ctx)
        key = amap.get(key, key) # alias remap
        if opts.get('changesets'):
            rate[key] = (rate.get(key, (0,))[0] + 1, 0)
        else:
            parents = ctx.parents()
            if len(parents) > 1:
                ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
                return

            ctx1 = parents[0]
            lines = changedlines(ui, repo, ctx1, ctx, fns)
            rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]

        if opts.get('progress'):
            state['count'] += 1
            newpct = int(100.0 * state['count'] / max(len(repo), 1))
            if state['pct'] < newpct:
                state['pct'] = newpct
                ui.write("\r" + _("generating stats: %d%%") % state['pct'])
                sys.stdout.flush()

    for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
        continue

    if opts.get('progress'):
        ui.write("\r")
        sys.stdout.flush()

    return rate
コード例 #2
0
ファイル: churn.py プロジェクト: Nurb432/plan9front
def countrate(ui, repo, amap, *pats, **opts):
    """Calculate stats"""
    if opts.get('dateformat'):
        def getkey(ctx):
            t, tz = ctx.date()
            date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
            return date.strftime(opts['dateformat'])
    else:
        tmpl = opts.get('template', '{author|email}')
        tmpl = maketemplater(ui, repo, tmpl)
        def getkey(ctx):
            ui.pushbuffer()
            tmpl.show(ctx)
            return ui.popbuffer()

    count = pct = 0
    rate = {}
    df = False
    if opts.get('date'):
        df = util.matchdate(opts['date'])

    get = util.cachefunc(lambda r: repo[r].changeset())
    changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
    for st, rev, fns in changeiter:
        if not st == 'add':
            continue
        if df and not df(get(rev)[2][0]): # doesn't match date format
            continue

        ctx = repo[rev]
        key = getkey(ctx)
        key = amap.get(key, key) # alias remap
        if opts.get('changesets'):
            rate[key] = rate.get(key, 0) + 1
        else:
            parents = ctx.parents()
            if len(parents) > 1:
                ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
                continue

            ctx1 = parents[0]
            lines = changedlines(ui, repo, ctx1, ctx, fns)
            rate[key] = rate.get(key, 0) + lines

        if opts.get('progress'):
            count += 1
            newpct = int(100.0 * count / max(len(repo), 1))
            if pct < newpct:
                pct = newpct
                ui.write("\r" + _("generating stats: %d%%") % pct)
                sys.stdout.flush()

    if opts.get('progress'):
        ui.write("\r")
        sys.stdout.flush()

    return rate
コード例 #3
0
ファイル: churn.py プロジェクト: rybesh/mysite-lib
def countrate(ui, repo, amap, *pats, **opts):
    """Calculate stats"""
    if opts.get("dateformat"):

        def getkey(ctx):
            t, tz = ctx.date()
            date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
            return date.strftime(opts["dateformat"])

    else:
        tmpl = opts.get("template", "{author|email}")
        tmpl = maketemplater(ui, repo, tmpl)

        def getkey(ctx):
            ui.pushbuffer()
            tmpl.show(ctx)
            return ui.popbuffer()

    state = {"count": 0}
    rate = {}
    df = False
    if opts.get("date"):
        df = util.matchdate(opts["date"])

    m = scmutil.match(repo[None], pats, opts)

    def prep(ctx, fns):
        rev = ctx.rev()
        if df and not df(ctx.date()[0]):  # doesn't match date format
            return

        key = getkey(ctx).strip()
        key = amap.get(key, key)  # alias remap
        if opts.get("changesets"):
            rate[key] = (rate.get(key, (0,))[0] + 1, 0)
        else:
            parents = ctx.parents()
            if len(parents) > 1:
                ui.note(_("Revision %d is a merge, ignoring...\n") % (rev,))
                return

            ctx1 = parents[0]
            lines = changedlines(ui, repo, ctx1, ctx, fns)
            rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]

        state["count"] += 1
        ui.progress(_("analyzing"), state["count"], total=len(repo))

    for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
        continue

    ui.progress(_("analyzing"), None)

    return rate
コード例 #4
0
ファイル: metrics.py プロジェクト: ctalbert/churn
def gather_metrics(ui, repo, *pats, **opts):
    # This is my code to gather what we need for metrics
    state = {'count': 0}
    metrics = {}
    df = False
    if opts.get('date'):
        df = util.matchdate(opts['date'])
    m = scmutil.match(repo[None], pats, opts)

    def walker(ctx, fns):
        #import pdb
        #pdb.set_trace()

        # Verify that this change is inside our date
        # These are passed into the match and walker functions via the opts param
        # Not sure this is needed, but churn had it, so keeping for now
        # TODO: Experiment with whether this extra check is necessary
        if df and not df(ctx.date()[0]): 
            return

        # Create the chgset's object in our tracker
        chgsetID = ctx.hex()
        metrics[chgsetID] = {}
        metrics[chgsetID]['is_merge'] = len(ctx.parents()) > 1
        ctx1 = ctx.parents()[0]
        metrics[chgsetID]['parents'] = ctx.parents()[0].hex()
        
        user = ctx.user()
        metrics[chgsetID]['committer'] = user
        metrics[chgsetID]['committer_email'] = user[user.find('<')+1:user.find('>')]
        metrics[chgsetID]['committer_name'] = user.split('<')[0].strip()

        t, tz = ctx.date()
        d = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
        metrics[chgsetID]['datestamp'] = d.strftime('%Y-%m-%dT%H:%M:%SZ')

        # If we have a robot committer, don't bother parsing the commit message
        metrics[chgsetID]['msg'] = ctx.description()
        if '*****@*****.**' not in user:
            metrics[chgsetID]['bug'], metrics[chgsetID]['is_backout'], \
            metrics[chgsetID]['backout_rev'], metrics[chgsetID]['reviewer'], \
            metrics[chgsetID]['approver'] = parse_commit_msg(ctx.description())
        
        metrics[chgsetID]['files'] = get_lines_and_files(ui, repo, ctx1, ctx, fns)

        state['count'] += 1
        ui.progress(_('analyzing'), state['count'], total=len(repo))

    for ctx in cmdutil.walkchangerevs(repo, m, opts, walker):
        continue

    ui.progress(_('analyzing'), None)
    return metrics
コード例 #5
0
ファイル: churn.py プロジェクト: MezzLabs/mercurial
def countrate(ui, repo, amap, *pats, **opts):
    """Calculate stats"""
    if opts.get('dateformat'):
        def getkey(ctx):
            t, tz = ctx.date()
            date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
            return date.strftime(opts['dateformat'])
    else:
        tmpl = opts.get('template', '{author|email}')
        tmpl = maketemplater(ui, repo, tmpl)
        def getkey(ctx):
            ui.pushbuffer()
            tmpl.show(ctx)
            return ui.popbuffer()

    state = {'count': 0}
    rate = {}
    df = False
    if opts.get('date'):
        df = util.matchdate(opts['date'])

    m = cmdutil.match(repo, pats, opts)
    def prep(ctx, fns):
        rev = ctx.rev()
        if df and not df(ctx.date()[0]): # doesn't match date format
            return

        key = getkey(ctx)
        key = amap.get(key, key) # alias remap
        key = key.strip() # ignore leading and trailing spaces
        if opts.get('changesets'):
            rate[key] = (rate.get(key, (0,))[0] + 1, 0)
        else:
            parents = ctx.parents()
            if len(parents) > 1:
                ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
                return

            ctx1 = parents[0]
            lines = changedlines(ui, repo, ctx1, ctx, fns)
            rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]

        state['count'] += 1
        ui.progress(_('analyzing'), state['count'], total=len(repo))

    for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
        continue

    ui.progress(_('analyzing'), None)

    return rate
コード例 #6
0
ファイル: breakdown.py プロジェクト: treystout/dotfiles
def walk_back_by_days(ui, repo, days_back):
  date_filter = util.matchdate("-%d" % days_back)

  valid_changes = []
  # walk the change log backwards
  for log_idx in reversed(repo.changelog.index):
    ctx = repo[log_idx[7]]
    if ctx.date()[0] == 0: # some don't have a date, weird...
      continue
    elif date_filter(ctx.date()[0]):
      valid_changes.append(ctx)
    else: # has a date, but it didn't pass the filter, break out of the loop
      break
  return valid_changes
コード例 #7
0
def revset_pushdate(repo, subset, x):
    """``pushdate(interval)``
    Changesets that were pushed within the interval, see :hg:`help dates`.
    """
    l = revset.getargs(x, 1, 1, 'pushdate requires one argument')

    ds = revset.getstring(l[0], 'pushdate requires a string argument')
    dm = util.matchdate(ds)

    def getrevs():
        for pushid, who, when, nodes in repo.pushlog.pushes():
            if dm(when):
                for node in nodes:
                    yield repo[node].rev()

    return subset & revset.generatorset(getrevs())
コード例 #8
0
ファイル: logfilter.py プロジェクト: tdjordan/tortoisegit
 def _btn_apply_clicked(self, button, data=None):
     opts = {}
     if self.searchradio.get_active():
         pats = self.filesentry.get_text()
         kw = self.kwentry.get_text()
         date = self.dateentry.get_text()
         if pats:
             opts['pats'] = [p.strip() for p in pats.split(',')]
         if kw:
             opts['keyword'] = [w.strip() for w in kw.split(',')]
         if date:
             try:
                 df = util.matchdate(date)
                 opts['date'] = date
             except Exception, e:
                 Prompt('Invalid date specification', str(e), self).run()
                 self.dateentry.grab_focus()
                 return
コード例 #9
0
def revset_firstpushdate(repo, subset, x):
    """``firstpushdate(DATE)``
    Changesets that were initially pushed according to the date spec provided.
    """
    ds = revset.getstring(x, _('firstpushdate() requires a string'))
    dm = util.matchdate(ds)

    def fltr(x):
        pushes = list(repo.changetracker.pushes_for_changeset(repo[x].node()))

        if not pushes:
            return False

        when = pushes[0][2]

        return dm(when)

    return subset.filter(fltr)
コード例 #10
0
def revset_firstpushdate(repo, subset, x):
    """``firstpushdate(DATE)``
    Changesets that were initially pushed according to the date spec provided.
    """
    ds = revset.getstring(x, _('firstpushdate() requires a string'))
    dm = util.matchdate(ds)

    def fltr(x):
        pushes = list(repo.changetracker.pushes_for_changeset(repo[x].node()))

        if not pushes:
            return False

        when = pushes[0][2]

        return dm(when)

    return subset.filter(fltr)
コード例 #11
0
def revset_pushdate(repo, subset, x):
    """``pushdate(interval)``

    Changesets that were pushed within the interval. See :hg:`help dates`.
    """
    l = revset.getargs(x, 1, 1, 'pushdate requires one argument')

    ds = revset.getstring(l[0], 'pushdate requires a string argument')
    dm = util.matchdate(ds)

    def getrevs():
        to_rev = repo.changelog.rev
        for push in repo.pushlog.pushes():
            if dm(push.when):
                for node in push.nodes:
                    yield to_rev(bin(node))

    return subset & revset.generatorset(getrevs())
コード例 #12
0
def revset_pushdate(repo, subset, x):
    """``pushdate(DATE)``
    Changesets that were pushed according to the date spec provided.

    All pushes are examined.
    """
    ds = revset.getstring(x, _('pushdate() requires a string'))
    dm = util.matchdate(ds)

    def fltr(x):
        for push in repo.changetracker.pushes_for_changeset(repo[x].node()):
            when = push[2]

            if dm(when):
                return True

        return False

    return subset.filter(fltr)
コード例 #13
0
def revset_pushdate(repo, subset, x):
    """``pushdate(DATE)``
    Changesets that were pushed according to the date spec provided.

    All pushes are examined.
    """
    ds = revset.getstring(x, _('pushdate() requires a string'))
    dm = util.matchdate(ds)

    def fltr(x):
        for push in repo.changetracker.pushes_for_changeset(repo[x].node()):
            when = push[2]

            if dm(when):
                return True

        return False

    return subset.filter(fltr)
コード例 #14
0
ファイル: revgraph.py プロジェクト: tdjordan/tortoisegit
def filtered_log_generator(repo, pats, opts):
    """Fill view model iteratively
       repo - Mercurial repository object
       pats - list of file names or patterns
       opts - command line options for log command
    """
    # Log searches: pattern, keyword, date, etc
    df = False
    if opts["date"]:
        df = util.matchdate(opts["date"])

    stack = []
    get = util.cachefunc(lambda r: repo.changectx(r).changeset())
    changeiter, matchfn = cmdutil.walkchangerevs(repo.ui, repo, pats, get, opts)
    for st, rev, fns in changeiter:
        if st == "iter":
            if stack:
                yield stack.pop()
            continue
        if st != "add":
            continue
        parents = __get_parents(repo, rev)
        if opts["no_merges"] and len(parents) == 2:
            continue
        if opts["only_merges"] and len(parents) != 2:
            continue

        if df:
            changes = get(rev)
            if not df(changes[2][0]):
                continue

        # TODO: add copies/renames later
        if opts["keyword"]:
            changes = get(rev)
            miss = 0
            for k in [kw.lower() for kw in opts["keyword"]]:
                if not (k in changes[1].lower() or k in changes[4].lower() or k in " ".join(changes[3]).lower()):
                    miss = 1
                    break
            if miss:
                continue
        stack.append((rev, (0, 0), [], parents))
コード例 #15
0
def revset_pushdate(repo, subset, x):
    """``pushdate(DATE)``
    Changesets that were pushed according to the date spec provided.

    All pushes are examined.
    """
    ds = revset.getstring(x, _('pushdate() requires a string'))
    dm = util.matchdate(ds)

    revs = []

    for rev in subset:
        for push in repo.changetracker.pushes_for_changeset(repo[rev].node()):
            when = push[2]

            if dm(when):
                revs.append(rev)
                break

    return revs
コード例 #16
0
def revset_firstpushdate(repo, subset, x):
    """``firstpushdate(DATE)``
    Changesets that were initially pushed according to the date spec provided.
    """
    ds = revset.getstring(x, _('firstpushdate() requires a string'))
    dm = util.matchdate(ds)

    revs = []

    for rev in subset:
        pushes = list(repo.changetracker.pushes_for_changeset(repo[rev].node()))

        if not pushes:
            continue

        when = pushes[0][2]

        if dm(when):
            revs.append(rev)

    return revs
コード例 #17
0
ファイル: chart.py プロジェクト: jwmayfield/dotfiles
def countrate(ui, repo, *pats, **opts):
    """Calculate stats"""
    def getkey(ctx):
        t, tz = ctx.date()
        return int((float(t) - tz) / 86400) * 86400 # bin by day

    state = {'count': 0}
    rate = {}
    df = False
    if opts.get('date'):
        df = util.matchdate(opts['date'])

    m = scmutil.match(repo[None], pats, opts)
    def prep(ctx, fns):
        rev = ctx.rev()
        if df and not df(ctx.date()[0]): # doesn't match date format
            return

        key = getkey(ctx)
        if opts.get('changesets'):
            rate[key] = rate.get(key, 0) + 1
        else:
            parents = ctx.parents()
            if len(parents) > 1:
                ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
                return

            ctx1 = parents[0]
            lines = changedlines(ui, repo, ctx1, ctx, fns)
            rate[key] = rate.get(key, 0) + lines

        state['count'] += 1
        ui.progress(_('analyzing'), state['count'], total=len(repo))

    for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
        continue

    ui.progress(_('analyzing'), None)

    return rate
コード例 #18
0
def filtered_log_generator(repo, pats, opts):
    '''Fill view model iteratively
       repo - Mercurial repository object
       pats - list of file names or patterns
       opts - command line options for log command
    '''
    matching_revs = []
    only_branch = opts.get('branch', None)
    df = False
    if opts['date']:
        df = util.matchdate(opts['date'])

    def prep(ctx, fns):
        if only_branch and ctx.branch() != only_branch:
            return
        if opts['no_merges'] and len(ctx.parents()) == 2:
            return
        if opts['only_merges'] and len(ctx.parents()) != 2:
            return
        if df and not df(ctx.date()[0]):
            return
        if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
            return
        if opts['keyword']:
            for k in [kw.lower() for kw in opts['keyword']]:
                if (k in ctx.user().lower() or
                    k in ctx.description().lower() or
                    k in " ".join(ctx.files()).lower()):
                    break
            else:
                return
        matching_revs.append(ctx.rev())

    m = match.match(repo.root, repo.root, pats)
    for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
        if ctx.rev() in matching_revs:
            yield (ctx.rev(), (0,0), [], None)
コード例 #19
0
ファイル: mbox.py プロジェクト: Garoth/Configs
def mimport(ui, repo, *patterns, **opts):
    """qimport patches from mailboxes

    You will be prompted for whether to qimport items from every patch
    group found in configured mailboxes (see 'hg help mbox' for
    details). If patterns are passed they will be used to filter out
    patch groups not matching either of them. Group duplicates (based
    on group or first patch title and sender) are ignored too. For
    each query, the following responses are possible:

    n - skip this patch group
    y - qimport this patch group

    d - done, import selected patches and quit
    q - quit, importing nothing

    ? - display help
    """
    if opts['mbox']:
        paths = [opts['mbox']]
    else:
        paths = ui.config('mbox', 'paths', '').split(os.pathsep)
    paths = [p.strip() for p in paths if p]
    if not paths:
        raise util.Abort(_('no mailbox path configured'))
    patchmessages = getpatchmessages(paths)

    matcher = makematcher(patterns)
    selecteds = []
    stop = False

    datefn = util.always
    if opts.get('date'):
        datefn = util.matchdate(opts.get('date'))
    orphans = opts.get('all')
    groups = filter(matcher, getgroups(patchmessages, datefn, orphans))
    def cmpgroup(a, b):
        return -cmp(a[1][0].date(), b[1][0].date())
    groups.sort(cmpgroup)
    groups = removeduplicates(groups)

    for intro, patches in groups:
        if intro:
            ui.status('%s\n' % intro.subject)
            for p in patches:
                ui.status('    %s\n' % p.subject)
        else:
            ui.status('%s\n' % patches[0].subject)

        while 1:
            allowed = _('[Nydq?]')
            choices = [_('&No'), _('&Yes'), _('&Done'), _('&Quit'), _('&?')]
            r = ui.promptchoice(_('import this group? %s ') % allowed, choices)
            if r == 4:
                doc = gettext(mimport.__doc__)
                c = doc.find(_('n - skip this patch group'))
                for l in doc[c:].splitlines():
                    if l:
                        ui.write(l.strip(), '\n')
                continue
            elif r == 1:
                selecteds.append(patches)
            elif r == 2:
                stop = True
            elif r == 3:
                raise util.Abort(_('user quit'))
            break

        if stop:
            break
        ui.status('\n')

    importpatches(ui, repo, selecteds)
コード例 #20
0
ファイル: hg_walker.py プロジェクト: pombredanne/visualiser
def walk(base, exclude_pattern, start_revision, mode, archive_base, project):
    """walk all revisions contained in the repository"""

    repo = hg.repository(ui.ui(), base)
    c = repo[None]
    if c.modified() or c.added() or c.removed():
        raise util.Abort(_("uncommitted local changes"))

    pats = ()
    # last_revision = get_last_revision(url)
    # if(start_revision and last_revision == 0):
    last_revision = start_revision
    # else:
    #     last_revision += 1
    opts = {'rev': [str(last_revision) + ':'], 'date': '', 'user': ''}

    def create_metrics(in_file_names):
        # create the metrics from the current revision
        context = {}
        context['base'] = base
        context['in_file_names'] = in_file_names
        context['include_metrics'] = [
            ('sloc', 'SLOCMetric'), ('mccabe', 'McCabeMetric')]
        context['quiet'] = True
        context['verbose'] = False
        # context['format'] = 'CSV'
        res = metrics.process(context)
        return metrics.format(res, 'CSV')

    def process_revision(revision):
        # change to revision and create metrics
        print 'Processing revision : %s' % revision
        # change repository to revision
        hg.clean(repo, revision)

        # collect files to process
        exclude = re.compile('|'.join([translate(ep) 
            for ep in exclude_pattern]))
        files = [os.path.relpath(os.path.join(dp, name), base) for
            (dp, dn, fn) in os.walk(base)
            for name in fn 
            if not exclude.match(os.path.relpath(os.path.join(dp, name), base))]
        print 'Number of files to process : %d' % len(files)

        return create_metrics(files)
        #post_metrics(revision_metrics, revision, url)


    def write_archive(archive_name, data):
        """write the metrics to archive."""
        tar = tarfile.open(archive_name, 'w:gz')

        # create a file record
        output = StringIO.StringIO(data)
        info = tar.tarinfo()
        info.name = 'metrics.txt'
        # info.uname = 'pat'
        # info.gname = 'users'
        info.size = output.len

        # add the file to the tar and close it
        tar.addfile(info, output)
        tar.close()


    # from commands.py log:
    matchfn = scmutil.match(repo[None], pats, opts)
    #limit = cmdutil.loglimit(opts)
    #count = 0

    #endrev = None
    #if opts.get('copies') and opts.get('rev'):
    #    endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1

    df = False
    if opts["date"]:
        df = util.matchdate(opts["date"])

    def prep(ctx, fns):
        rev = ctx.rev()
        parents = [p for p in repo.changelog.parentrevs(rev)
                   if p != nullrev]
        if opts.get('no_merges') and len(parents) == 2:
            return
        if opts.get('only_merges') and len(parents) != 2:
            return
        if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
            return
        if df and not df(ctx.date()[0]):
            return
        if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
            return
        if opts.get('keyword'):
            for k in [kw.lower() for kw in opts['keyword']]:
                if (k in ctx.user().lower() or
                    k in ctx.description().lower() or
                    k in " ".join(ctx.files()).lower()):
                    break
            else:
                return

    revisions = []
    rev_last_date = -1
    for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
        rev_date = util.datestr(ctx.date(), '%Y%m%d')
        if(mode == 'daily' and rev_last_date == rev_date):
            #print '%s Skipped rev %s' % (rev_date, ctx.rev())
            pass
        else:
            result = process_revision(ctx.rev())
            datestamp = util.datestr(ctx.date(), '%y%m%d%H')
            if not os.path.exists(os.path.join(archive_base, datestamp)):
                # create the directories
                os.makedirs(os.path.join(archive_base, datestamp))
            archive_name = os.path.join(archive_base, datestamp, '%s-%s-metrics.tgz' % (project, datestamp))
            write_archive(archive_name, result)
        rev_last_date = rev_date