def abbrev(branch):
    '''
    Return first 7 char of branch ID, or "None" if None.
    '''
    if isinstance(branch, Branch):
        return p4gf_util.abbrev(branch.branch_id)
    return p4gf_util.abbrev(branch)
Exemple #2
0
 def __str__(self):
     '''Return string representation of pre-receive info.
     '''
     return 'old={0}, new={1}, ref={2}'.format(
                                            p4gf_util.abbrev(self.old_sha1)
                                          , p4gf_util.abbrev(self.new_sha1)
                                          , self.ref )
def abbrev(dbi):
    '''
    Return first 7 char of branch ID, or "None" if None.
    '''
    if isinstance(dbi, DepotBranchInfo):
        return p4gf_util.abbrev(dbi.depot_branch_id)
    return p4gf_util.abbrev(dbi)
    def _create_merge_commit(review, prl):
        '''
        Create a new merge commit, merging the pushed commit into
        its destination branch. Return new commit's sha1.

        Leaves all references untouched.

        Knows to scan pushed PreReceiveTuple list for any pushed changes
        to destination branch, use (what will eventually be) post-push head,
        not pre-push, as first-parent of new new merge commit.

        Raises exception if unable to create the merge commit (usually due to
        Git merge conflict, error would be from 'git merge'.
        '''
        LOG.debug('_create_merge_commit() {}'.format(review))

                        # Is the destination branch also being modified as part
                        # of this push? If so, use its eventual post-push head,
                        # not current head, for this merge.
        dest_ref_name = 'refs/heads/' + review.git_branch_name
        LOG.debug3('dest_ref_name={}'.format(dest_ref_name))
        first_parent_sha1     = None
        for prt in prl:
            if prt.ref == dest_ref_name:
                first_parent_sha1 = prt.new_sha1
                LOG.debug3('dest branch part of push, pushed head={}'
                           .format(p4gf_util.abbrev(first_parent_sha1)))
                break
        else:
            first_parent_sha1 = p4gf_util.git_rev_list_1(dest_ref_name)
            LOG.debug3('dest branch not part of push, head={}'
                       .format(p4gf_util.abbrev(first_parent_sha1)))

                        # Check out the raw commit, no branch ref.
                        # That way we don't have to put anything back when
                        # we're done (or if we fail).
        p4gf_util.git_checkout(first_parent_sha1)

                        # Merge in the review head.
                        #
        cmd = [ 'git', NTR('merge')
              , '--no-ff'       # Force a new merge commit, don't just
                                #   fast-forward into the review branch.
              , '--no-commit'   # So that we can set its message via file content.
              , review.sha1]
        p4gf_proc.popen(cmd)

                        # Commit the merge, reusing original commit's message
                        # and authorship.
        cmd = [ 'git', NTR('commit')
              , '--reuse-message', review.sha1]
        p4gf_proc.popen(cmd)

                        # The newly commit is under the HEAD. Use its sha1
                        # as the review's sha1.
        merge_sha1 = p4gf_util.git_rev_list_1('HEAD')
        LOG.debug('Merge commit {sha1} created for review {review}'
                  .format( sha1   = p4gf_util.abbrev(merge_sha1)
                         , review = review ))
        return merge_sha1
    def files_at(self, ctx, branch, change_num):
        """Fetch files in branch at change and return result list."""
        result_list = self._find(branch, change_num)
        if not result_list:
            self._miss_ct += 1
            LOG.debug2('{branch}@{change} miss {ct}'.format(
                branch=p4gf_util.abbrev(branch.branch_id),
                change=change_num,
                ct=self._miss_ct))
            result_list = self._fetch(ctx, branch, change_num)
            self._insert(branch, change_num, result_list)
        else:
            self._hit_ct += 1
            LOG.debug2('{branch}@{change} hit  {ct}'.format(
                branch=p4gf_util.abbrev(branch.branch_id),
                change=change_num,
                ct=self._hit_ct))

            # Return a list of COPIES of our dicts. Calling code
            # was originally written to  consume P4.run() results
            # directly, assumed it owned the results. Cheaper and
            # cleaner to copy here than to ask all callers to learn
            # about copy.
            #
            # Can't use copy.copy(): too shallow, returns a copy of
            # the list, pointing to our original dict elements.
            # copy.deepcopy() might be overkill if our dict
            # keys/elements are themselves collections, but I'll
            # live with that until memory/profiling says otherwise.
            #
        return copy.deepcopy(result_list)
def abbrev(branch):
    '''
    Return first 7 char of branch ID, or "None" if None.
    '''
    if isinstance(branch, Branch):
        return p4gf_util.abbrev(branch.branch_id)
    return p4gf_util.abbrev(branch)
    def __call__( self
                , ctx
                , fe_commit
                , branch_id
                , jobs ):
        '''
        If preflight hook configured, invoke it (or PASS/FAIL it).

        If fail, raise exception detailing why.

        Route hook's stdout and stderr to our stderr.
        '''
        _debug3('call() {} {} {}'
               , p4gf_util.abbrev(fe_commit['sha1'])
               , p4gf_util.abbrev(branch_id)
               , self )

        if self.action is ACTION_NONE:
            return
        elif self.action is ACTION_PASS:
            if self.msg:
                sys.stderr.write(self.msg + '\n')
            return
        elif self.action is ACTION_FAIL:
            self.raise_rejection(fe_commit['sha1'], self.msg)
        else: # self.action is ACTION_RUN:
            cmd_line_vars = self.calc_cmd_line_vars(
                             ctx                 = ctx
                           , fe_commit           = fe_commit
                           , branch_id           = branch_id
                           , jobs                = jobs
                           , spec_file_path      = self.spec_file_path(ctx)
                           )

            d = (ctx.gwt_to_depot_path(fe_file['path'])
                 for fe_file in fe_commit['files'])
            depot_file_list = (dd for dd in d if dd)

            self._write_spec_file(
                             ctx                = ctx
                           , fe_commit          = fe_commit
                           , depot_file_list    = depot_file_list
                           , jobs               = jobs
                           , spec_file_path     = self.spec_file_path(ctx)
                           , cmd_line_vars      = cmd_line_vars )

            cmd = [self.substitute_cmd_line_vars(cmd_line_vars, word)
                   for word in self.cmd]
            _debug3('cmd {}', cmd)
            d = p4gf_proc.popen_no_throw(cmd)
            _debug3('{}', d)
            msg = p4gf_util.join_non_empty('\n', d['out'], d['err'])
            if d['ec']:
                self.raise_rejection(fe_commit['sha1'], msg)
            sys.stderr.write(msg)
    def __call__(self, ctx, fe_commit, branch_id, jobs):
        '''
        If preflight hook configured, invoke it (or PASS/FAIL it).

        If fail, raise exception detailing why.

        Route hook's stdout and stderr to our stderr.
        '''
        _debug3('call() {} {} {}', p4gf_util.abbrev(fe_commit['sha1']),
                p4gf_util.abbrev(branch_id), self)

        if self.action is ACTION_NONE:
            return
        elif self.action is ACTION_PASS:
            if self.msg:
                sys.stderr.write(self.msg + '\n')
            return
        elif self.action is ACTION_FAIL:
            self.raise_rejection(fe_commit['sha1'], self.msg)
        else:  # self.action is ACTION_RUN:
            cmd_line_vars = self.calc_cmd_line_vars(
                ctx=ctx,
                fe_commit=fe_commit,
                branch_id=branch_id,
                jobs=jobs,
                spec_file_path=self.spec_file_path(ctx))

            d = (ctx.gwt_to_depot_path(fe_file['path'])
                 for fe_file in fe_commit['files'])
            depot_file_list = (dd for dd in d if dd)

            self._write_spec_file(ctx=ctx,
                                  fe_commit=fe_commit,
                                  depot_file_list=depot_file_list,
                                  jobs=jobs,
                                  spec_file_path=self.spec_file_path(ctx),
                                  cmd_line_vars=cmd_line_vars)

            cmd = [
                self.substitute_cmd_line_vars(cmd_line_vars, word)
                for word in self.cmd
            ]
            _debug3('cmd {}', cmd)
            d = p4gf_proc.popen_no_throw(cmd)
            _debug3('{}', d)
            msg = p4gf_util.join_non_empty('\n', d['out'], d['err'])
            if d['ec']:
                self.raise_rejection(fe_commit['sha1'], msg)
            sys.stderr.write(msg)
Exemple #9
0
    def _path_added(self, path, fecommit):
        """Return True if the named path was introduced in the HEAD commit.

        :param self: this object
        :param path: repo path to be evaluated.
        :param fecommit: commit object from fast-export parser.

        """
        # Because git-fast-export includes the entire tree in its output,
        # regardless of whether the requested commit is the first in the
        # branch or not, we need to check the repo itself to be certain if
        # this path was truly introduced in this commit, or simply existed
        # in the tree prior to the "first" commit.
        commit = self.ctx.repo.get(fecommit['sha1'])
        if commit is None:
            # empty repository?
            LOG.debug2("_path_added() commit {} is missing".format(
                fecommit['sha1']))
            return True
        for parent in commit.parents:
            if p4gf_git.exists_in_tree(self.ctx.repo, path, parent.tree):
                LOG.debug2("_path_added() {} exists in parent tree {}".format(
                    path,
                    p4gf_util.abbrev(p4gf_pygit2.object_to_sha1(parent))))
                return False
        return True
Exemple #10
0
    def _check_stream_in_classic(self, fe_commit):
        """If this is a classic branch, check that none of the files in the commit
        are in stream depots and thus not writable.  If any of the files is not
        writable then reject this commit.
        """
        if self._current_branch.stream_name:
            return

        depot_re = re.compile(r'^//([^/]+)/([^/]+)/.*$')
        for fe_file in fe_commit['files']:
            gwt_path = fe_file['path']
            depot_path = self.ctx.gwt_path(gwt_path).to_depot()
            m = depot_re.match(depot_path)
            if m:
                depot = m.group(1)
                if depot in self.stream_depots:
                    stream = '//{}/{}'.format(m.group(1), m.group(2))
                    human_msg = (_(
                        "Cannot commit {sha1} '{gwt_path}' to '{depot_path}'."
                        " Paths in stream '{stream}' are read-only for branch '{b}'."
                    ).format(sha1=p4gf_util.abbrev(fe_commit['sha1']),
                             gwt_path=gwt_path,
                             depot_path=depot_path,
                             stream=stream,
                             b=self._current_branch.branch_id))
                    raise PreflightException(human_msg)
    def to_log_level(self, level):
        """Debugging dump."""

        # Single line dump
        fmt = NTR('Row: {sha1:<7} {mode:<6} {p4_request:<6} {p4filetype:<10}'
                  ' {gwt_path:<10} {depot_path:<10}')

        topline = fmt.format(
                           sha1       = p4gf_util.abbrev(self.sha1) \
                                        if self.sha1 else '0000000'
                         , mode       = p4gf_util.quiet_none(
                                        p4gf_util.mode_str(  self.mode))
                         , gwt_path   = self.gwt_path
                         , depot_path = self.depot_path
                         , p4_request = p4gf_util.quiet_none(self.p4_request)
                         , p4filetype = p4gf_util.quiet_none(self.p4filetype)
                         )

        # Detail each cell at DEBUG2 not DEBUG3. DEBUG2 produces one-
        # line dumps for each cell, which should be useful. DEBUG3 will
        # produce multi-line dumps of each cell, which is VERY noisy.
        if level <= logging.DEBUG2:
            # Multi-line dump.
            lines = [topline]
            for i, cell in enumerate(self.cells):
                if not cell:
                    lines.append(NTR('  {i}: {cell}').format(i=i, cell=cell))
                else:
                    lines.append(
                        NTR('  {i}: {cell}').format(
                            i=i, cell=cell.to_log_level(level)))
            return '\n'.join(lines)
        else:
            return topline
    def to_log_level(self, level):
        '''Debugging dump.'''

        # Single line dump
        fmt = NTR('Row: {sha1:<7} {mode:<6} {p4_request:<6} {p4filetype:<10}'
               ' {gwt_path:<10} {depot_path:<10}')

        topline = fmt.format(
                           sha1       = p4gf_util.abbrev(self.sha1) \
                                        if self.sha1 else '0000000'
                         , mode       = p4gf_util.quiet_none(
                                        p4gf_util.mode_str(  self.mode))
                         , gwt_path   = self.gwt_path
                         , depot_path = self.depot_path
                         , p4_request = p4gf_util.quiet_none(self.p4_request)
                         , p4filetype = p4gf_util.quiet_none(self.p4filetype)
                         )

                # Detail each cell at DEBUG2 not DEBUG3. DEBUG2 produces one-
                # line dumps for each cell, which should be useful. DEBUG3 will
                # produce multi-line dumps of each cell, which is VERY noisy.
        if level <= logging.DEBUG2:
            # Multi-line dump.
            lines = [ topline ]
            for i, cell in enumerate(self.cells):
                if not cell:
                    lines.append(NTR('  {i}: {cell}').format(i=i, cell=cell))
                else:
                    lines.append(NTR('  {i}: {cell}')
                            .format( i=i
                                   , cell=cell.to_log_level(level)))
            return '\n'.join(lines)
        else:
            return topline
 def raise_rejection(sha1, msg):
     '''
     preflight-commit hook rejected. Tell the Git pusher.
     '''
     raise RuntimeError(
         _('preflight-commit rejected: {sha1} {msg}\n').format(
             sha1=p4gf_util.abbrev(sha1), msg=msg))
def _calc_repairs(ctx):
    """
    Scan Perforce for Git commit data and Perforce changelist descriptions,
    calculate which Perforce changelists need more data copied from Git
    backing store //.git-fusion/objects/...
    """

    # Load repo's entire set of Commit/Changelist metadata
    # into memory.
    LOG.info("Fetching list of Git commits/changelists from %s/objects/...",
             p4gf_const.objects_root())
    r = ctx.p4run(
        'files', '{root}/repos/{repo}/commits/...'.format(
            root=p4gf_const.objects_root(), repo=ctx.config.repo_name))
    # 'p4 print' each Git commit from its backup in
    # //.git-fusion/objects/...
    LOG.info("Fetched commit objects: {ct}".format(ct=len(r)))
    for rr in r:
        depot_path = rr.get('depotFile')
        if not depot_path:
            continue
        ot = ObjectType.commit_from_filepath(depot_path)
        SHA1_TO_OTL[ot.sha1].append(ot)
        LOG.debug('p4 print {}'.format(depot_path))
        blob_raw = p4gf_util.print_depot_path_raw(ctx.p4, depot_path)
        blob = p4gf_util.bytes_to_git_object(blob_raw)
        par_list = commit_to_parent_list(blob)
        SHA1_TO_PAR_SHA1_LIST[ot.sha1] = par_list
        LOG.debug("{sha1:7.7} parents={par}".format(
            sha1=ot.sha1, par=[p4gf_util.abbrev(p) for p in par_list]))

        # Loop through changelists, comparing against
        # backup and calculating if additional data
        # needs to be copied to its changelist description.
    return _calc_repairs_loop(ctx)
 def raise_rejection(sha1, msg):
     '''
     preflight-commit hook rejected. Tell the Git pusher.
     '''
     raise RuntimeError(_('preflight-commit rejected: {sha1} {msg}\n')
                    .format( sha1 = p4gf_util.abbrev(sha1)
                           , msg  = msg))
Exemple #16
0
def _to_row_header(row):
    """Return one row's own data as a list of strings."""
    r = [row.gwt_path]
    _append_if(r, row.sha1, p4gf_util.abbrev(row.sha1))
    _append_if(r, row.mode, p4gf_util.mode_str(p4gf_util.octal(row.mode)))
    _append_if(r, row.p4_request)
    _append_if(r, row.p4filetype)
    return r
Exemple #17
0
 def __str__(self):
     return "{} {} {} {} {}".format(
               p4gf_util.abbrev(self.sha1)
             , self.type
             , self.change_num
             , self.repo_name
             , self.branch_id
             )
Exemple #18
0
    def _p4_changes_each_branch(self):
        '''
        Run 'p4 changes' on each branch view. Once for each named branch, and
        then one big one for the union of all lightweight branches.

        Return list of ChangeNumOnBranch, sorted by change_num.
        '''
        l = []

        client_path_fmt = self.ctx.client_view_path() + '@{begin},#head'

        sub_l = []
        for branch in self.ctx.branch_dict().values():
            with self.ctx.switched_to_branch(branch):
                start_change_num = 1  ### setup needs to fetch real start.

                # 'p4 changes //{branch}/...@1,#head'
                r = self.ctx.p4run([
                    'changes',
                    client_path_fmt.format(begin=start_change_num)
                ])
                _debug2('branch={branch} change_ct={change_ct}',
                        branch=p4gf_util.abbrev(branch.branch_id),
                        change_ct=len(r))
            for rr in r:
                if not self._can_create_cnob(rr):
                    continue
                sub_l.append(self._to_cnob(rr, branch.branch_id))

            l = _merge(l, sub_l)
            sub_l = []

        # Build a union view of just the lightweight branches. Usually we could
        # just use //.git-fusion/branches/{repo}/... , but that would prevent us
        # from sharing lightweight branches across multiple repos or after a
        # rerepo.
        lw_dict = {
            b.branch_id: b
            for b in self.ctx.branch_dict().values() if b.is_lightweight
        }
        lw_p4map = p4gf_branch.calc_branch_union_client_view(
            self.ctx.config.p4client, lw_dict)
        with self.ctx.switched_to_view_lines(lw_p4map.as_array):
            start_change_num = 1  ### setup needs to fetch real start.
            # 'p4 changes //{union}/...@1,#head'
            r = self.ctx.p4run(
                ['changes',
                 client_path_fmt.format(begin=start_change_num)])
        for rr in r:
            if not self._can_create_cnob(rr):
                continue
            for lw_branch in lw_dict.values:
                if lw_branch.intersects_depot_path(rr['path']):
                    sub_l.append(self._to_cnob(rr, lw_branch.branch_id))
                    break

        l = _merge(l, sub_l)
        return l
 def __repr__(self):
     return ('Row: {sha1:<7} {mode:<6} {p4_request:<6} {p4filetype:<7}'
             ' {gwt_path:<20} {depot_path}'.format(
                 sha1=p4gf_util.quiet_none(p4gf_util.abbrev(self.sha1)),
                 mode=p4gf_util.mode_str(self.mode),
                 p4_request=p4gf_util.quiet_none(self.p4_request),
                 p4filetype=p4gf_util.quiet_none(self.p4filetype),
                 gwt_path=self.gwt_path,
                 depot_path=self.depot_path))
 def __repr__(self):
     return ('Row: {sha1:<7} {mode:<6} {p4_request:<6} {p4filetype:<7}'
             ' {gwt_path:<20} {depot_path}'
             .format( sha1       = p4gf_util.quiet_none(
                                   p4gf_util.abbrev(    self.sha1))
                    , mode       = p4gf_util.mode_str(  self.mode)
                    , p4_request = p4gf_util.quiet_none(self.p4_request)
                    , p4filetype = p4gf_util.quiet_none(self.p4filetype)
                    , gwt_path   =                      self.gwt_path
                    , depot_path =                      self.depot_path))
def _to_row_header(row):
    '''
    Return one row's own data as a list of strings.
    '''
    r = [ row.gwt_path ]
    _append_if(r, row.sha1, p4gf_util.abbrev(row.sha1))
    _append_if(r, row.mode, p4gf_util.mode_str(p4gf_util.octal(row.mode)))
    _append_if(r, row.p4_request)
    _append_if(r, row.p4filetype)
    return r
Exemple #22
0
    def _set_reachable_by(self, old_head_sha1, reachable_by):
        '''
        Tree-walk a commit and all of its descendants, setting their
        reachable_by pointer to the given branch.

        O(n) commits worst-case (all commits child of old_head_sha1)
        '''
        LOG.debug2('_set_reachable_by() old_head_sha1={} reachable_by={}'
                   .format( p4gf_util.abbrev(old_head_sha1)
                          , p4gf_util.abbrev(reachable_by.branch_id)))

        old_head_assign = self.assign_dict.get(old_head_sha1)
        if not old_head_assign:
            LOG.debug3('_set_reachable_by() old_head not in assign_dict. Done.')
            return
        work_queue = [old_head_assign]
        while work_queue:
            curr_assign = work_queue.pop()
            if not curr_assign:
                continue
            curr_assign.reachable_by = reachable_by
            LOG.debug2('curr={} set {}'
                       .format( p4gf_util.abbrev(curr_assign.sha1)
                              , p4gf_util.abbrev(reachable_by.branch_id)))
            for child_sha1 in curr_assign.children:
                # Visit children, but skip ones we've already seen due to some
                # other path (merge commits)
                child_assign = self.assign_dict.get(child_sha1)
                if not child_assign:
                    LOG.debug3('curr={} child={} not found'
                               .format( p4gf_util.abbrev(curr_assign.sha1)
                                      , p4gf_util.abbrev(child_sha1)))
                    continue
                elif child_assign.reachable_by is reachable_by:
                    LOG.debug3('curr={} child={} already set'
                               .format( p4gf_util.abbrev(curr_assign.sha1)
                                      , p4gf_util.abbrev(child_sha1)))
                    continue
                else:
                    LOG.debug3('curr={} child={} enqueued'
                               .format( p4gf_util.abbrev(curr_assign.sha1)
                                      , p4gf_util.abbrev(child_sha1)))
                    work_queue.append(child_assign)
 def files_at(self, ctx, branch, change_num):
     '''
     Fetch files in branch at change and return result list.
     '''
     result_list = self._find(branch, change_num)
     if not result_list:
         self._miss_ct += 1
         LOG.debug2('{branch}@{change} miss {ct}'
                    .format( branch  = p4gf_util.abbrev(branch.branch_id)
                           , change  = change_num
                           , ct      = self._miss_ct ))
         result_list = self._fetch(ctx, branch, change_num)
         self._insert(branch, change_num, result_list)
     else:
         self._hit_ct += 1
         LOG.debug2('{branch}@{change} hit  {ct}'
                    .format( branch  = p4gf_util.abbrev(branch.branch_id)
                           , change  = change_num
                           , ct      = self._hit_ct ))
     return result_list
    def _p4_changes_each_branch(self):
        '''
        Run 'p4 changes' on each branch view. Once for each named branch, and
        then one big one for the union of all lightweight branches.

        Return list of ChangeNumOnBranch, sorted by change_num.
        '''
        l = []

        client_path_fmt = self.ctx.client_view_path() + '@{begin},#head'

        sub_l = []
        for branch in self.ctx.branch_dict().values():
            with self.ctx.switched_to_branch(branch):
                start_change_num = 1        ### setup needs to fetch real start.

                # 'p4 changes //{branch}/...@1,#head'
                r = self.ctx.p4run([ 'changes'
                                    , client_path_fmt.format(begin=start_change_num)])
                _debug2('branch={branch} change_ct={change_ct}'
                       , branch     = p4gf_util.abbrev(branch.branch_id)
                       , change_ct = len(r) )
            for rr in r:
                if not self._can_create_cnob(rr):
                    continue
                sub_l.append(self._to_cnob(rr, branch.branch_id))

            l = _merge(l, sub_l)
            sub_l = []

        # Build a union view of just the lightweight branches. Usually we could
        # just use //.git-fusion/branches/{repo}/... , but that would prevent us
        # from sharing lightweight branches across multiple repos or after a
        # rerepo.
        lw_dict = { b.branch_id : b for b in self.ctx.branch_dict().values()
                                    if b.is_lightweight }
        lw_p4map = p4gf_branch.calc_branch_union_client_view(
                      self.ctx.config.p4client, lw_dict)
        with self.ctx.switched_to_view_lines(lw_p4map.as_array):
            start_change_num = 1        ### setup needs to fetch real start.
            # 'p4 changes //{union}/...@1,#head'
            r = self.ctx.p4run([ 'changes'
                                , client_path_fmt.format(begin=start_change_num)])
        for rr in r:
            if not self._can_create_cnob(rr):
                continue
            for lw_branch in lw_dict.values:
                if lw_branch.intersects_depot_path(rr['path']):
                    sub_l.append(self._to_cnob(rr, lw_branch.branch_id))
                    break

        l = _merge(l, sub_l)
        return l
    def __call__(self, ctx, fe_commit, branch_id, jobs):
        """If preflight hook configured, invoke it (or PASS/FAIL it).

        If fail, raise exception detailing why.

        Route hook's stdout and stderr to our stderr.
        """
        _debug3('call() {} {} {}', p4gf_util.abbrev(fe_commit['sha1']),
                p4gf_util.abbrev(branch_id), self)

        if self.action is ACTION_NONE:
            return
        elif self.action is ACTION_PASS:
            if self.msg:
                sys.stderr.write(self.msg + '\n')
            return
        elif self.action is ACTION_FAIL:
            raise_rejection(fe_commit['sha1'], self.msg)
        else:  # self.action is ACTION_RUN:
            cmd_line_vars = calc_cmd_line_vars(
                ctx=ctx,
                fe_commit=fe_commit,
                branch_id=branch_id,
                jobs=jobs,
                spec_file_path=self.spec_file_path())

            d = (ctx.gwt_to_depot_path(fe_file['path'])
                 for fe_file in fe_commit['files'])
            depot_file_list = (dd for dd in d if dd)

            self._write_spec_file(ctx=ctx,
                                  fe_commit=fe_commit,
                                  depot_file_list=depot_file_list,
                                  jobs=jobs,
                                  spec_file_path=self.spec_file_path(),
                                  cmd_line_vars=cmd_line_vars)

            for cmd in self.cmds:
                if cmd.matches(fe_commit):
                    cmd.run(fe_commit, cmd_line_vars)
Exemple #26
0
def _to_column_box(column):
    """Return a list of strings that describe one column."""
    r = [
        '{i}:{col_type}'.format(i=column.index, col_type=column.col_type),
        'v:{}'.format(p4gf_branch.abbrev(column.branch)),
        'd:{}'.format(p4gf_depot_branch.abbrev(column.depot_branch)),
        '@{}'.format(column.change_num)
    ]
    _append_if(r, column.sha1, p4gf_util.abbrev(column.sha1))
    if column.fp_counterpart:
        r.append('fp_ctr_part:{}'.format(column.fp_counterpart.index))
    _append_if(r, column.is_first_parent, 'first-parent')
    return r
    def _accumulate_branch(self, branch):
        """Fetch lists of offending gitmirror files and p4keys,
        append to our accumulator data members
        """
        LOG.info("Check branch: {}"
                 .format(p4gf_util.abbrev(branch.branch_id)))

                        # Git commit objects in //.git-fusion/objects/...
        del_change_num_list = self._change_num_list_after(branch)
        self._log_results("Perforce changelist(s)", del_change_num_list)
        if not del_change_num_list:
            return
        del_depot_path_list = self._gitmirror_depot_path_list(
                                  branch
                                , del_change_num_list)
        self._log_results(_("Perforce copies of commit object(s)"
                            " in {root}/...".format(root=p4gf_const.objects_root())),
                          del_depot_path_list)
        self.del_depot_path_list.extend(del_depot_path_list)

                        # p4key index for each above commits.
        del_p4key_list = self._gitmirror_p4key_list(
                                  branch
                                , del_change_num_list)
        self._log_results("Perforce p4key(s) of commit object(s)", del_p4key_list)
        self.del_p4key_list.extend(del_p4key_list)

                        # Where to move each Git branch ref
                        # after rollback?
        if self.can_git:
            self.branch_to_surviving_ot[branch] \
                = self._git_branch_to_ot(branch)
            gbn_list = [b.git_branch_name
                        for b, ot in self.branch_to_surviving_ot.items()
                        if ot]
            self._log_results("Git reference(s)", gbn_list)

        if self.is_obliterate:
                        # Which changelists to obliterate?
            obli_change_num_list = self._change_num_list_to_obliterate(branch)
            self._log_results("changelist(s) to delete", obli_change_num_list)
            self.obliterate_change_num_list.extend(obli_change_num_list)

                        # Which files from those changelists to obliterate?
            obli_depot_rev_list = self._depot_rev_list_to_obliterate(
                                          branch
                                        , obli_change_num_list)
            self._log_results( "depot file revisions(s) to obliterate"
                             , obli_depot_rev_list )
            self.obliterate_depot_rev_set.update(obli_depot_rev_list)
Exemple #28
0
def _log_fe_file(fe_file):
    """Return loggable string for a single fe_commit['files'] element."""
    mode = '      '
    if 'mode' in fe_file:
        mode = fe_file['mode']
    sha1 = '       '
    if 'sha1' in fe_file:
        sha1 = p4gf_util.abbrev(fe_file['sha1'])

    return NTR('{mode} {action} {sha1} {path}') \
           .format( mode   = mode
                  , action = fe_file['action']
                  , sha1   = sha1
                  , path   = fe_file['path'])
def _to_column_box(column):
    '''
    Return a list of strings that describe one column.
    '''
    r = [ '{i}:{col_type}'.format(i=column.index, col_type=column.col_type)
        , 'v:{}'.format(p4gf_branch.abbrev(column.branch))
        , 'd:{}'.format(p4gf_depot_branch.abbrev(column.depot_branch))
        , '@{}' .format(column.change_num)
        ]
    _append_if(r, column.sha1,            p4gf_util.abbrev(column.sha1))
    if column.fp_counterpart:
        r.append('fp_ctr_part:{}'.format(column.fp_counterpart.index))
    _append_if(r, column.is_first_parent, 'first-parent')
    return r
    def __init__(
            self,
            gwt_path=None,
            depot_path=None,
            sha1=None  # file/blob sha1, not commit sha1
        ,
            mode=None,
            col_ct=0):
        if LOG.isEnabledFor(logging.DEBUG3):
            LOG.debug3(
                'Row(): gwt={} depot={} sha1={} mode={} col_ct={}'.format(
                    gwt_path, depot_path, p4gf_util.abbrev(sha1),
                    p4gf_util.mode_str(mode), col_ct))

        if gwt_path:  # Caller must supply both if supplying GWT.
            assert depot_path

            # Destination/result data. What git-fast-export gives us, or
            # what we decide based on cross-branch integrations.
        self.gwt_path = gwt_path

        # Destination depot path, calculated via current branch view
        # mapping. Caller supplies.
        self.depot_path = depot_path

        # file sha1 and mode copied from initial git-fast-export or
        # git-ls-tree. Left None if Git has no record of this gwt_path
        # at this commit.
        self.sha1 = sha1
        self.mode = mode  # int, not string
        if mode:
            assert isinstance(mode, int)

            # Same integer indices as G2PMatrix.columns
        self.cells = [None] * col_ct

        # One of [None, 'add', 'edit', 'delete'] chosen from
        # all cells plus any difference from Git.
        #
        # Set during _react_to_integ_failure() upon integ failure.
        # Set during _decide_p4_requests_post_do_integ() to pull the
        # winning Decided.p4_request out of this row's cells.
        # Set during _set_p4_requests_for_local_git_diffs()
        # if local filesystem content does not match what Git requires.
        #
        self.p4_request = None

        # The one true filetype chosen from Git's mode
        # and x bits and existing Perforce filetype.
        self.p4filetype = None
Exemple #31
0
    def _assign_branch_named_old_to_new(self, branch, old_head_sha1, new_head_sha1):
        '''
        Find a path from this branch's new head location back to its old
        head location.

        Assign this branch to all commits along the path, unless those commits
        already have a branch assignment.
        '''
        if LOG.isEnabledFor(logging.DEBUG2):
            LOG.debug2('_assign_branch_named_old_to_new() branch={} {} from {}..{}'
                       .format( p4gf_branch.abbrev(branch.branch_id)
                              , branch.git_branch_name
                              , p4gf_util.abbrev(old_head_sha1)
                              , p4gf_util.abbrev(new_head_sha1)))

        # Note which commits are descendants of the old head.
        # Only such commits are possible choices when creating a path
        # from new head to old.
        self._set_reachable_by(old_head_sha1, branch)

        # Choose only reachable parents to create the path.
        self._assign_path( assign_branch=branch
                         , new_head_sha1=new_head_sha1
                         , reachable_by=branch)
    def __init__( self
                , gwt_path   = None
                , depot_path = None
                , sha1       = None # file/blob sha1, not commit sha1
                , mode       = None
                , col_ct     = 0
                ):
        if LOG.isEnabledFor(logging.DEBUG3):
            LOG.debug3('Row(): gwt={} depot={} sha1={} mode={} col_ct={}'
                      .format( gwt_path, depot_path, p4gf_util.abbrev(sha1)
                             , p4gf_util.mode_str(mode), col_ct))

        if gwt_path:        # Caller must supply both if supplying GWT.
            assert depot_path

                # Destination/result data. What git-fast-export gives us, or
                # what we decide based on cross-branch integrations.
        self.gwt_path       = gwt_path

                # Destination depot path, calculated via current branch view
                # mapping. Caller supplies.
        self.depot_path     = depot_path

                # file sha1 and mode copied from initial git-fast-export or
                # git-ls-tree. Left None if Git has no record of this gwt_path
                # at this commit.
        self.sha1           = sha1
        self.mode           = mode      # int, not string
        if mode:
            assert isinstance(mode, int)

                # Same integer indices as G2PMatrix.columns
        self.cells          = [None] * col_ct

                # One of [None, 'add', 'edit', 'delete'] chosen from
                # all cells plus any difference from Git.
                #
                # Set during _react_to_integ_failure() upon integ failure.
                # Set during _decide_p4_requests_post_do_integ() to pull the
                # winning Decided.p4_request out of this row's cells.
                # Set during _set_p4_requests_for_local_git_diffs()
                # if local filesystem content does not match what Git requires.
                #
        self.p4_request     = None

                # The one true filetype chosen from Git's mode
                # and x bits and existing Perforce filetype.
        self.p4filetype    = None
Exemple #33
0
    def _assign_branch_named_any_to_new(self, branch, new_head_sha1):
        '''
        Starting at new head and working back through parent links to
        any root-most newly pushed commit, assign branch to commits along
        the path unless such commits already have a branch assignment.
        '''
        if LOG.isEnabledFor(logging.DEBUG2):
            LOG.debug2('_assign_branch_named_any_to_new() branch={} {} from ???????..{}'
                       .format( p4gf_branch.abbrev(branch.branch_id)
                              , branch.git_branch_name
                              , p4gf_util.abbrev(new_head_sha1)))

        # Choose any parents to create the path.
        self._assign_path( assign_branch=branch
                         , new_head_sha1=new_head_sha1
                         , reachable_by=None)
 def to_log_level(self, _level):
     """Debugging dump."""
     # Single-line dump
     fmt = (NTR('{index}: {col_type:<7} b={branch:<7} d={dbi:<7}'
                ' {sha1:<7} ch={ch:>4} {first_parent:<9} {fp_counterpart}'))
     fp_counterpart = (NTR(' fp_counter=[{i}]').format(
         i=self.fp_counterpart.index) if self.fp_counterpart else '')
     return fmt.format(
         index=self.index,
         col_type=self.col_type,
         branch=p4gf_branch.abbrev(self.branch),
         dbi=p4gf_depot_branch.abbrev(self.depot_branch),
         sha1=p4gf_util.abbrev(self.sha1),
         ch=self.change_num,
         first_parent='first-par' if self.is_first_parent else '',
         fp_counterpart=fp_counterpart)
Exemple #35
0
def _to_cell_discovered(discovered):
    """Return cell.Decided as a list of strings to fill a text cell."""
    if not discovered:
        return ['- Discovered - None']

    r = ['- Discovered -']
    keys = sorted(discovered.keys())
    key_len = max(len(key) for key in keys)
    for key in keys:
        val = discovered[key]
        if isinstance(val, list):
            val = val[0] if val else '[]'
        if key in ['sha1', 'have-sha1']:
            val = p4gf_util.abbrev(discovered[key])
        elif val.startswith('//'):
            val = abbrev_depot_path(val, _VAL_MAX_WIDTH)
        else:
            val = val[-_VAL_MAX_WIDTH:]
        fmt = '{key}: {val}'
        r.append(fmt.format(key=_fill(key_len, key), val=val))
    return r
def _to_cell_discovered(discovered):
    '''
    Return cell.Decided as a list of strings to fill a text cell.
    '''
    if not discovered:
        return ['- Discovered - None']

    r = ['- Discovered -']
    keys    = sorted(discovered.keys())
    key_len = max(len(key) for key in keys)
    for key in keys:
        val = discovered[key]
        if key == 'sha1':
            val = p4gf_util.abbrev(discovered[key])
        elif val.startswith('//'):
            val = abbrev_depot_path(val, _VAL_MAX_WIDTH)
        else:
            val = val[-_VAL_MAX_WIDTH:]
        fmt = '{key}: {val}'
        r.append(fmt.format(key=_fill(key_len, key), val=val))
    return r
Exemple #37
0
def abbrev_depot_path(depot_path, val_max_width=_VAL_MAX_WIDTH):
    """Depot paths often have a long lightweight branch prefix.

    //.git-fusion/branches/p4gf_repo/G9/vx/HmW4TdiHNhpIojtWTg==
        /depot/master/dir/dir2/file.txt

    Chop that off and replae with the abbreviated branch ID.
    If still too long, chop off the middle (depot/master/dir) until fits.
    """
    m = _RE_PREFIX.search(depot_path)
    if not m:
        return depot_path[-val_max_width:]

    prefix = m.group(0)
    suffix = depot_path[len(prefix):]
    branch_id = p4gf_util.abbrev(m.group(1).replace('/', ''))
    prefix = '{ellipsis}{branch_id}/'.format(ellipsis=_ELLIPSIS,
                                             branch_id=branch_id)
    suf_max = val_max_width - len(prefix)
    if suf_max < len(suffix):
        suffix = _ELLIPSIS + suffix[-(suf_max - 1):]
    return prefix + suffix
Exemple #38
0
    def _check_stream_writable(self, fe_commit):
        """If this is a stream branch, check that all files in the commit are
        writable.  If any of the files is not writable then reject this commit.
        """
        if not self._current_branch.stream_name:
            return
        prefix = self._current_branch.writable_stream_name + '/'
        for fe_file in fe_commit['files']:
            gwt_path = fe_file['path']
            depot_path = self.ctx.gwt_path(gwt_path).to_depot()
            if depot_path.startswith(prefix):
                continue

            human_msg = (_(
                "Cannot commit {sha1} '{gwt_path}' to '{depot_path}'."
                " Paths not in stream '{stream}' are read-only for branch '{b}'."
            ).format(sha1=p4gf_util.abbrev(fe_commit['sha1']),
                     gwt_path=gwt_path,
                     depot_path=depot_path,
                     stream=self._current_branch.writable_stream_name,
                     b=self._current_branch.branch_id))
            raise PreflightException(human_msg)
def abbrev_depot_path(depot_path, val_max_width=_VAL_MAX_WIDTH):
    '''
    Depot paths often have a long lightweight branch prefix:

    //.git-fusion/branches/p4gf_repo/G9/vx/HmW4TdiHNhpIojtWTg==
        /depot/master/dir/dir2/file.txt

    Chop that off and replae with the abbreviated branch ID.
    If still too long, chop off the middle (depot/master/dir) until fits.
    '''
    m = _RE_PREFIX.search(depot_path)
    if not m:
        return depot_path[-val_max_width:]

    prefix = m.group(0)
    suffix = depot_path[len(prefix):]
    branch_id = p4gf_util.abbrev(m.group(1).replace('/',''))
    prefix = '{ellipsis}{branch_id}/'.format( ellipsis  = _ELLIPSIS
                                            , branch_id = branch_id )
    suf_max = val_max_width - len(prefix)
    if suf_max < len(suffix):
        suffix = _ELLIPSIS + suffix[-(suf_max - 1):]
    return prefix + suffix
Exemple #40
0
    def _assign_path(self, assign_branch, new_head_sha1, reachable_by):
        '''
        Starting at new head and working back through parent links to old head,
        assign branch to commits along the path unless such commits already
        have a branch assignment.
        '''
        if LOG.isEnabledFor(logging.DEBUG2):
            LOG.debug2('_assign_path() new_head={new_head_sha1}'
                       ' assign={assign_branch} reachable={reachable_by}'
                       .format(new_head_sha1   = p4gf_util.abbrev(new_head_sha1)
                               , assign_branch = p4gf_branch.abbrev(assign_branch)
                               , reachable_by  = p4gf_branch.abbrev(reachable_by)
                               ))
        curr_assign = self.assign_dict.get(new_head_sha1)

        while True:
            if not curr_assign.branch_id:
                self._assign_branch(curr_assign, assign_branch.branch_id)
                if LOG.isEnabledFor(logging.DEBUG3):
                    LOG.debug3('_assign_path curr={}         assigned {}'
                               .format( p4gf_util.abbrev(curr_assign.sha1)
                                      , p4gf_util.abbrev(assign_branch.branch_id)))
            else:
                if LOG.isEnabledFor(logging.DEBUG3):
                    LOG.debug3('_assign_path curr={} already assigned ({})'
                               .format( p4gf_util.abbrev(curr_assign.sha1)
                                      , p4gf_util.abbrev(curr_assign.branch_id_str())))

            chosen_par_assign = self._best_parent_assign( curr_assign
                                                        , reachable_by)
            if not chosen_par_assign:
                if LOG.isEnabledFor(logging.DEBUG3):
                    LOG.debug3('_assign_path curr={} no usable parent. Done.'
                               .format(p4gf_util.abbrev(curr_assign.sha1)))
                break

            curr_assign = chosen_par_assign
Exemple #41
0
    def _load_commit_dag(self):
        '''
        Load the Git commit tree into memory. We just need the
        parent/child relationships.
        '''
        # A single call to git-rev-list produces both the commit sha1 list
        # that we need AND the child->parent associations that we need. It's
        # screaming fast: 32,000 commit lines in <1 second.
        with Timer(TIMER_RUN_REV_LIST):
            range_list = [prt.to_range() for prt in self.pre_receive_list]
            cmd        = [ 'git', 'rev-list'
                         , '--date-order', '--parents'] + range_list
            LOG.debug2("DAG: {}".format(' '.join(cmd)))
            d = p4gf_proc.popen(cmd)

        seen_parents = set()

        # Pass 1: Build up a dict of sha1->Assign objects, one per commit.
        with Timer(TIMER_CONSUME_REV_LIST):
            lines = d['out'].splitlines()
            with ProgressReporter.Determinate(len(lines)):
                for line in lines:
                    ProgressReporter.increment(_('Loading commit tree into memory...'))
                    sha1s = line.split()
                    curr_sha1 = sha1s.pop(0)
                    self.rev_list.append(curr_sha1)
                    if LOG.isEnabledFor(logging.DEBUG3):
                        LOG.debug3('DAG: rev_list {} {}'
                                   .format( p4gf_util.abbrev(curr_sha1)
                                          , ' '.join(p4gf_util.abbrev(sha1s))))
                    self.assign_dict[curr_sha1] = Assign(curr_sha1, sha1s)
                    seen_parents.update(sha1s)

        # git-rev-list is awesome in that it gives us only as much as we need
        # for self.rev_list, but unawesome in that this optimization tends to
        # omit paths to branch refs' OLD heads if the old heads are 2+ commits
        # back in time, and that time is ALREADY covered by some OTHER branch.
        # Re-run each pushed branch separately to add enough Assign() nodes
        # to form a full path to its old ref.
        if 2 <= len(self.pre_receive_list):
            for prt in self.pre_receive_list:
                # Skip NEW branch refs: those don't have
                # to connect up to anything.
                if prt.old_sha1 == p4gf_const.NULL_COMMIT_SHA1:
                    continue
                with Timer(TIMER_RUN_REV_LIST):
                    cmd  = [ 'git', 'rev-list'
                           , '--date-order', '--parents', '--reverse', prt.to_range()]
                    LOG.debug2("DAG: {}".format(' '.join(cmd)))
                    d = p4gf_proc.popen(cmd)

                with Timer(TIMER_CONSUME_REV_LIST):
                    for line in d['out'].splitlines():
                        sha1s = line.split()
                        curr_sha1 = sha1s.pop(0)
                        if curr_sha1 in self.assign_dict:
                            break
                        LOG.debug3('DAG: path     {} {}'
                                   .format( p4gf_util.abbrev(curr_sha1)
                                          , ' '.join(p4gf_util.abbrev(sha1s))))
                        self.assign_dict[curr_sha1] = Assign(curr_sha1, sha1s)
                        seen_parents.update(sha1s)

        # Create acting-as-parent-only nodes in dict, too. We don't process
        # these as part of iterating over revs, but we need them when
        # tree walking.
        with Timer(TIMER_CONSUME_REV_LIST):
            parent_only = seen_parents - set(self.assign_dict.keys())
            for curr_sha1 in parent_only:
                if curr_sha1 in self.assign_dict:
                    break
                LOG.debug3('DAG: par only {}'.format( p4gf_util.abbrev(curr_sha1)))
                self.assign_dict[curr_sha1] = Assign(curr_sha1, [])

        # Pass 2: Fill in Assign.children list
        with Timer(TIMER_ASSIGN_CHILDREN):
            with ProgressReporter.Determinate(len(self.assign_dict)):
                for assign in self.assign_dict.values():
                    ProgressReporter.increment(_('Finding child commits...'))
                    for par_sha1 in assign.parents:
                        par_assign = self.assign_dict.get(par_sha1)
                        if par_assign:
                            par_assign.children.add(assign.sha1)
                        else:
                            # Expected and okay: some parents already exist and
                            # are not part of our push/fast-export list.
                            LOG.debug2(
                                "DAG: child {child} -> parent {parent}: parent not part of push"
                                .format(child=assign.sha1[:7], parent=par_sha1[:7]))
Exemple #42
0
 def __repr__(self):
     return '{} {}'.format(p4gf_util.abbrev(self.sha1), self.branch_id_str())