Ejemplo n.º 1
0
    def run_fast_import(self):
        """Run git-fast-import to create the git commits.

        Returns: a list of commits.  Each line is formatted as
            a change number followed by the SHA1 of the commit.

        The returned list is also written to a file called marks.
        """
        with Timer(OVERALL):
            with Timer(RUN):
                LOG.debug("running git fast-import")
                # tell git-fast-import to export marks to a temp file
                self.script.flush()
                marks_file = tempfile.NamedTemporaryFile(dir=self.ctx.tempdir.name, prefix='marks-')
                try:
                    cmd = ['git', 'fast-import', '--quiet', '--export-marks=' + marks_file.name]
                    ec = p4gf_proc.wait(cmd, stdin=self.script.name)
                    if ec:
                        _log_crash_report()
                        raise CalledProcessError(ec, NTR('git fast-import'))

                    #read the exported marks from file and return result
                    with open(marks_file.name, "r") as marksfile:
                        marks = [l.strip() for l in marksfile.readlines()]
                    if LOG.getChild('marks').isEnabledFor(logging.DEBUG3):
                        LOG.getChild('marks').debug3('git-fast-import returned marks ct={}\n'
                                                     .format(len(marks))
                                                     + '\n'.join(marks))
                    return marks
                finally:
                    self.script.close()
                    marks_file.close()
Ejemplo n.º 2
0
 def after(self, ec):
     """Update git repository outside of p4key lock, with write lock."""
     p4gf_tag.process_tags(self.context, self.prl.tags())
     LOG.debug('after() performing review post-push processing')
     with Timer('swarm post-copy'):
         if self.gsreview_coll:
             self.gsreview_coll.post_push()
     with Timer('depot branch post-copy'):
         if self.ndb_coll:
             self.ndb_coll.post_push(self.context)
     ReceiveHook.after(self, ec)
Ejemplo n.º 3
0
    def add_objects_to_p4(self, marks, mark_list, mark_to_branch_id, ctx):
        """Submit Git commit and tree objects associated with the given marks.

        marks:      list of commit marks output by git-fast-import
                    formatted as: :marknumber sha1 branch-id
        mark_list:  MarkList instance that maps mark number to changelist number.
                    Can be None if mark number == changelist number.
        mark_to_branch_id:
                    dict to find branch_id active when mark's commit
                    was added.
                    Can be None if branch_id encoded in mark lines.
        ctx:        P4GF context
        """
        try:
            with Timer(OVERALL):
                # Unpack the received packs so we can work with loose objects.
                p4gf_git.unpack_objects()
                with ProgressReporter.Indeterminate():
                    with Timer(BUILD):
                        commit_shas = []
                        for mark_line in marks:
                            mark = Mark.from_line(mark_line)
                            mark_num = mark.mark
                            if mark_list:
                                change_num = mark_list.mark_to_cl(mark_num)
                            else:
                                change_num = mark_num
                            sha1 = mark.sha1
                            branch_id = mark.branch
                            if (not branch_id) and mark_to_branch_id:
                                branch_id = mark_to_branch_id.get(mark_num)
                            # add commit object
                            details = CommitDetails(change_num, self.view_name,
                                                    branch_id)
                            self.commits.add_commit(sha1, details)

                            commit_shas.append(sha1)
                            if len(self.commits) >= _BITE_SIZE:
                                # now that we have a few commits, submit them to P4
                                self._add_commits_to_p4(ctx)
                                _copy_commit_trees(self.view_name, commit_shas)
                                commit_shas.clear()
                                self.commits.clear()

                    # submit the remaining objects to P4
                    self._add_commits_to_p4(ctx)
                    with Timer(BUILD):
                        _copy_commit_trees(self.view_name, commit_shas)
        finally:
            # Let my references go!
            self.commits.clear()
Ejemplo n.º 4
0
    def _dump_instrumentation(self):
        '''
        Debugging dump of timing and other info.
        '''
        if _DUMP_LOG and LOG_GRAPH.isEnabledFor(logging.DEBUG3):
            cmd = list(_DUMP_LOG) + [prt.new_sha1 for prt in self.pre_receive_list]
            p = p4gf_proc.popen_no_throw(cmd)
            l = self.annotate_lines(p['out'].splitlines())
            LOG_GRAPH.debug3('Log: {}\n{}'.format(' '.join(cmd), '\n'.join(l)))

        total_seconds = Timer(TIMER_OVERALL).time
        total_rev_ct    = len(self.assign_dict)
        LOG_TIME.debug("branches      : {}".format(len(self.branch_dict)))
        LOG_TIME.debug("commits       : {}".format(total_rev_ct))
        LOG_TIME.debug("seconds       : {}".format(int(total_seconds + 0.5)))
        if 1.0 <= total_seconds:
            # Commits per second math becomes unreliable for short runs.
            rev_per_second  = total_rev_ct / total_seconds
            LOG_TIME.debug("commits/second: {}".format(int(rev_per_second + 0.5)))

        if self.branch_len:
            histo = p4gf_histogram.to_histogram(self.branch_len)
            histo_lines = p4gf_histogram.to_lines(histo)
            LOG_TIME.debug('Branch length histogram: how many branches have N commits?\n'
                      + '\n'.join(histo_lines))
Ejemplo n.º 5
0
    def _process_unsafe(self):
        """Perform the copy to Perforce work, possibly raising an exception."""
        ctx = self.context

        PRLFile(ctx.config.repo_name).delete()
        # Now that the PRL file has been dealt with, remove the write lock.
        p4gf_git_repo_lock.remove_write_lock(ctx.config.repo_name)

        # update the space usage values
        PushLimits(ctx).pre_copy()

        if self.fast_push:
            try:
                self.fast_push.post_receive()
            except Exception:  # pylint: disable=broad-except
                ctx.record_push_failed_p4key(sys.exc_info()[1])
                return 1
        else:
            if self.assigner:
                self._copy_heads()
            _delete_heads(ctx, self.prl)
            ctx.mirror.update_branches(ctx)

        # Update the total disk usage for the repo.
        with Timer('push limits'):
            PushLimits(ctx).post_copy()

        self.context.record_push_success_p4key()
        if not self.fast_push:
            _delete_packet(self.context.config.repo_name)
        return 0
Ejemplo n.º 6
0
def __do_trees(view_name, path):
    '''Process any and all files associated with this view'''
    # don't leave a mess: clean up file even if there's a problem processing it
    atexit.register(os.unlink, path)
    with p4gf_context.create_context(view_name, None) as ctx:
        # we don't create any temp clients here, so don't try deleting them either.
        # leave that to processes that actually use them.
        ctx.cleanup_client_pool = False
        os.chdir(ctx.view_dirs.GIT_WORK_TREE)
        LOG.debug("processing trees for view {}".format(view_name))

        with open(path, "r") as f:
            with Timer(p4gf_gitmirror.ADD_SUBMIT):
                trees = set()
                last_tree = None
                while True:
                    line = f.readline().strip()
                    LOG.debug("processing line '{}'".format(line))
                    if line == "end":
                        break
                    elif line == '---':
                        last_tree = None
                    else:
                        if not last_tree:
                            last_tree = __get_snapshot_trees(line, trees)
                        else:
                            last_tree = __get_delta_trees(
                                last_tree, line, trees)
                if trees:
                    LOG.debug("submitting trees for {}".format(view_name))
                    __add_trees_to_p4(ctx, trees)
Ejemplo n.º 7
0
    def _really_add_commits_to_p4(self, ctx):
        """actually run p4 add, submit to create mirror files in .git-fusion"""
        desc = _("Git Fusion '{view}' copied to Git.").format(
            view=ctx.config.view_name)
        with p4gf_util.NumberedChangelist(gfctx=ctx, description=desc) as nc:
            with Timer(ADD_SUBMIT):
                LOG.debug("adding {0} commits to .git-fusion...".format(
                    len(self.commits.commits)))

                # build list of objects to add, extracting them from git
                add_files = [
                    self.__add_object_to_p4(ctx, go)
                    for go in self.commits.commits.values()
                ]
                add_files = GitMirror.optimize_objects_to_add_to_p4(
                    ctx, add_files)

                if not (len(add_files) or self.depot_branch_info_list
                        or self.branch_list):
                    # Avoid a blank line in output by printing something
                    ProgressReporter.write(
                        _('No Git objects to submit to Perforce'))
                    LOG.debug("_really_add_objects_to_p4() nothing to add...")
                    return

                with Timer(P4_ADD):
                    files_added = self.add_objects_to_p4_2(ctx, add_files)

                    depot_branch_infos_added = \
                                    self._add_depot_branch_infos_to_p4(ctx)

                    config2_added = self._add_branch_defs_to_p4(ctx)

                    cldfs_added = self._add_cldfs_to_p4(ctx)

                with Timer(P4_SUBMIT):
                    if (files_added or depot_branch_infos_added
                            or config2_added or cldfs_added):
                        ProgressReporter.increment(
                            _('Submitting new Git commit objects to Perforce'))
                        r = nc.submit()
                        ObjectType.update_indexes(ctx, r)
                    else:
                        ProgressReporter.write(
                            _('No new Git objects to submit to Perforce'))
                        LOG.debug("ignoring empty change list...")
Ejemplo n.º 8
0
    def assign(self):
        '''
        Main entry point. Assign a branch ID to every rev in our rev_list.
        '''
        with Timer(TIMER_OVERALL):
            # No timer here: _load_commit_dag() splits it time into
            # finer-grained buckets.
            self._load_commit_dag()

            # Zig thinks this is no longer necessary.
            with Timer(TIMER_BRANCH_HEAD):
                self._add_assign_for_ref_heads()

            with Timer(TIMER_ASSIGN_PREVIOUS):
                self._assign_previous()

            with Timer(TIMER_ASSIGN_BRANCH_NAMED):
                self._assign_branches_named()

            with Timer(TIMER_ASSIGN_BRANCH_ANON):
                self._assign_branches_anon()

            with Timer(TIMER_BRANCH_HEAD):
                self._force_assign_pushed_ref_heads()

            with Timer(TIMER_FREE_MEMORY):
                self._free_memory()

        self._dump_instrumentation()
Ejemplo n.º 9
0
def main():
    """Either do the work now or fork a process to do it later."""
    for h in ['-?', '-h', '--help']:
        if h in sys.argv:
            print(_('Git Fusion pre-receive hook.'))
            return 2
    p4gf_version_3.print_and_exit_if_argv()
    p4gf_branch.init_case_handling()
    prl = PreReceiveTupleLists.from_stdin(sys.stdin)
    # Preflight rejects the push by raising an exception, which is handled
    # in the logging code by printing the message to stderr.
    with Timer('pre-receive'):
        return PreflightHook('pre-receive preflight', prl).do_it()
Ejemplo n.º 10
0
    def __add_object_to_p4(ctx, go):
        """add a commit to the git-fusion perforce client workspace

        return the path of the client workspace file suitable for use with
        p4 add
        """
        ProgressReporter.increment(
            _('Adding new Git commit objects to Perforce...'))
        ctx.heartbeat()

        # get client path for .git-fusion file
        dst = os.path.join(ctx.gitlocalroot, go.to_p4_client_path())

        # A tree is likely to already exist, in which case we don't need
        # or want to try to recreate it.  We'll just use the existing one.
        if os.path.exists(dst):
            LOG.debug("reusing existing object: " + dst)
            return dst

        with Timer(EXTRACT_OBJECTS):

            # make sure dir exists
            dstdir = os.path.dirname(dst)
            if not os.path.exists(dstdir):
                try:
                    os.makedirs(dstdir)
                #pylint:disable=E0602
                # pylint running on python 3.2 does not know about 3.3 features
                except FileExistsError:
                    #pylint:enable=E0602
                    # For file exists error, just ignore it, probably another
                    # process creating the same thing simultaneously.
                    pass
                except OSError as e:
                    raise e

            # Hardlink the Git object into the Perforce workspace
            op = p4gf_git.object_path(go.sha1)
            os.link(op, dst)
            LOG.debug2("adding new object: " + dst)

            return dst
Ejemplo n.º 11
0
 def optimize_objects_to_add_to_p4(ctx, add_files):
     """if many files to add, filter out those which are already added
     Only do this if the number of files is large enough to justify
     the cost of the fstat"""
     enough_files_to_use_fstat = 100
     if len(add_files) < enough_files_to_use_fstat:
         return add_files
     with Timer(P4_FSTAT):
         LOG.debug("using fstat to optimize add")
         original_count = len(add_files)
         ctx.p4gf.handler = FilterAddFstatHandler()
         # spoon-feed p4 to avoid blowing out memory
         while len(add_files):
             bite = add_files[:_BITE_SIZE]
             add_files = add_files[_BITE_SIZE:]
             with ctx.p4gf.at_exception_level(ctx.p4gf.RAISE_NONE):
                 ctx.p4gf.run("fstat", bite)
         add_files = ctx.p4gf.handler.files
         ctx.p4gf.handler = None
         LOG.debug("{} files removed from add list".format(original_count -
                                                           len(add_files)))
         return add_files
Ejemplo n.º 12
0
    def _load_commit_dag(self):
        '''
        Load the Git commit tree into memory. We just need the
        parent/child relationships.
        '''
        # A single call to git-rev-list produces both the commit sha1 list
        # that we need AND the child->parent associations that we need. It's
        # screaming fast: 32,000 commit lines in <1 second.
        with Timer(TIMER_RUN_REV_LIST):
            range_list = [prt.to_range() for prt in self.pre_receive_list]
            cmd        = [ 'git', 'rev-list'
                         , '--date-order', '--parents'] + range_list
            LOG.debug2("DAG: {}".format(' '.join(cmd)))
            d = p4gf_proc.popen(cmd)

        seen_parents = set()

        # Pass 1: Build up a dict of sha1->Assign objects, one per commit.
        with Timer(TIMER_CONSUME_REV_LIST):
            lines = d['out'].splitlines()
            with ProgressReporter.Determinate(len(lines)):
                for line in lines:
                    ProgressReporter.increment(_('Loading commit tree into memory...'))
                    sha1s = line.split()
                    curr_sha1 = sha1s.pop(0)
                    self.rev_list.append(curr_sha1)
                    if LOG.isEnabledFor(logging.DEBUG3):
                        LOG.debug3('DAG: rev_list {} {}'
                                   .format( p4gf_util.abbrev(curr_sha1)
                                          , ' '.join(p4gf_util.abbrev(sha1s))))
                    self.assign_dict[curr_sha1] = Assign(curr_sha1, sha1s)
                    seen_parents.update(sha1s)

        # git-rev-list is awesome in that it gives us only as much as we need
        # for self.rev_list, but unawesome in that this optimization tends to
        # omit paths to branch refs' OLD heads if the old heads are 2+ commits
        # back in time, and that time is ALREADY covered by some OTHER branch.
        # Re-run each pushed branch separately to add enough Assign() nodes
        # to form a full path to its old ref.
        if 2 <= len(self.pre_receive_list):
            for prt in self.pre_receive_list:
                # Skip NEW branch refs: those don't have
                # to connect up to anything.
                if prt.old_sha1 == p4gf_const.NULL_COMMIT_SHA1:
                    continue
                with Timer(TIMER_RUN_REV_LIST):
                    cmd  = [ 'git', 'rev-list'
                           , '--date-order', '--parents', '--reverse', prt.to_range()]
                    LOG.debug2("DAG: {}".format(' '.join(cmd)))
                    d = p4gf_proc.popen(cmd)

                with Timer(TIMER_CONSUME_REV_LIST):
                    for line in d['out'].splitlines():
                        sha1s = line.split()
                        curr_sha1 = sha1s.pop(0)
                        if curr_sha1 in self.assign_dict:
                            break
                        LOG.debug3('DAG: path     {} {}'
                                   .format( p4gf_util.abbrev(curr_sha1)
                                          , ' '.join(p4gf_util.abbrev(sha1s))))
                        self.assign_dict[curr_sha1] = Assign(curr_sha1, sha1s)
                        seen_parents.update(sha1s)

        # Create acting-as-parent-only nodes in dict, too. We don't process
        # these as part of iterating over revs, but we need them when
        # tree walking.
        with Timer(TIMER_CONSUME_REV_LIST):
            parent_only = seen_parents - set(self.assign_dict.keys())
            for curr_sha1 in parent_only:
                if curr_sha1 in self.assign_dict:
                    break
                LOG.debug3('DAG: par only {}'.format( p4gf_util.abbrev(curr_sha1)))
                self.assign_dict[curr_sha1] = Assign(curr_sha1, [])

        # Pass 2: Fill in Assign.children list
        with Timer(TIMER_ASSIGN_CHILDREN):
            with ProgressReporter.Determinate(len(self.assign_dict)):
                for assign in self.assign_dict.values():
                    ProgressReporter.increment(_('Finding child commits...'))
                    for par_sha1 in assign.parents:
                        par_assign = self.assign_dict.get(par_sha1)
                        if par_assign:
                            par_assign.children.add(assign.sha1)
                        else:
                            # Expected and okay: some parents already exist and
                            # are not part of our push/fast-export list.
                            LOG.debug2(
                                "DAG: child {child} -> parent {parent}: parent not part of push"
                                .format(child=assign.sha1[:7], parent=par_sha1[:7]))
Ejemplo n.º 13
0
    def add_commit( self
                  , cl
                  , mark_number
                  , parent_commit_list
                  , first_branch_from_branch_id
                  , first_branch_from_change_number
                  , dest_branch
                  , branch_name):
        """Add a commit to the fast-import script.

        Arguments:
        cl            -- P4Changelist to turn into a commit
        cl.files      -- [] of P4File containing files in changelist
        mark_number   -- Mark number assigned to this commit
        parent_commit_list
                      -- Mark or SHA1 of commit this commit will be based on.
                         Can be a singular str mark/SHA1 or a list of
                         [mark/SHA1 str] if cl should be a merge commit .
        first_branch_from_branch_id
        first_branch_from_change_number
                      -- branch_id and integer changelist number from which we're branching.
                         None unless this is the first commit on a new branch.
        dest_branch   -- Branch that receives this commit.
        """
        with Timer(OVERALL):
            with Timer(BUILD):
                self.__append(NTR('commit refs/heads/{0}\n').format(branch_name))
                self.__append(NTR('mark : {0}\n').format(mark_number))
                desc_info = DescInfo.from_text(cl.description)
                committer_added = False
                if desc_info:
                    for key in ('author', 'committer'):
                        v = desc_info[key]
                        if v:
                            self.__append(NTR('{key} {fullname} {email} {time} {timezone}\n').
                                          format( key      = key
                                                , fullname = v['fullname']
                                                , email    = v['email'   ]
                                                , time     = v['time'    ]
                                                , timezone = v['timezone']))
                            committer_added = True
                    desc = desc_info.clean_desc

                # Convoluted logic gates but avoids duplicating code. The point
                # is that we add the best possible committer data _before_
                # adding the description.
                if not committer_added:
                    if desc_info:
                        # old change description that lacked detailed author info,
                        # deserves a warning, but otherwise push onward even if the
                        # commit checksums will likely differ from the originals
                        LOG.warn('commit description did not match committer regex: @{} => {}'.
                                 format(cl.change, desc_info.suffix))
                    timezone = self.__get_timezone_offset(cl.time)
                    self.__append(NTR('committer {fullname} {email} {time} {timezone}\n').
                                  format(fullname=self.__full_name_for_user(cl.user),
                                         email=self.__email_for_user(cl.user),
                                         time=cl.time,
                                         timezone=timezone))
                    desc = cl.description
                self.__add_data(desc)

                self._add_commit_parent_list(parent_commit_list)
                if      first_branch_from_branch_id \
                    and first_branch_from_change_number:
                    self.__branch_from( dest_branch
                                      , cl
                                      , first_branch_from_branch_id
                                      , first_branch_from_change_number)
                self.__add_files(cl.files)
                if desc_info and desc_info.gitlinks:
                    self.__add_gitlinks(desc_info.gitlinks)
Ejemplo n.º 14
0
    def check_commit_for_branch(self, commit, branch_id, any_locked_files,
                                case_conflict_checker):
        """
        Prior to copying a commit, perform a set of checks for a specific branch
        to ensure the commit will (likely) go through successfully.
        """
        rev = commit['sha1']
        if LOG.isEnabledFor(logging.DEBUG):
            LOG.debug(
                "check_commit_for_branch() "
                "Checking branch={} mark={} sha1={} file-ct={} -- {}".format(
                    branch_id, commit['mark'], p4gf_util.abbrev(rev),
                    len(commit['files']),
                    repr(commit['data'])[:20].splitlines()[0]))

        if self._already_copied_commit(rev, branch_id):
            return

        # following checks assume client has been set for branch
        self.ensure_branch_preflight(commit, branch_id)
        with self.ctx.switched_to_branch(
                self._current_branch,
                set_client=self.set_client_on_branch_switch):
            if case_conflict_checker:
                case_conflict_checker.read_fast_export_commit(
                    commit, self._current_branch)

            # Empty commits require root-level .p4gf_placeholder to be mapped
            # in the current branch view.
            if not commit['files'] and not self._is_placeholder_mapped():
                raise PreflightException(
                    _("Empty commit {sha1} not permitted. Git Fusion branch views"
                      " must include root to permit empty commits.").format(
                          sha1=p4gf_util.abbrev(rev)))

            with Timer(CHECK_PROTECTS):
                self._check_protects(commit['author_p4user'], commit['files'])

            with Timer(CHECK_OVERLAP):
                self._check_overlap(commit)

            # fetch the branch setting only, without cascading to repo/global config
            if self._current_branch.is_read_only:
                raise PreflightException(
                    _("Push to branch {branch} prohibited.").format(
                        branch=self._current_branch.git_branch_name))
            self._check_stream_writable(commit)
            self._check_stream_in_classic(commit)

            LOG.debug('checking locked files under //{}/...'.format(
                self.ctx.p4.client))
            if any_locked_files:
                # Convert the git commit paths to depotPaths
                files_in_commit = [
                    self.ctx.gwt_path(f['path']).to_depot()
                    for f in commit['files']
                ]
                LOG.debug("files_in_commit {0}".format(files_in_commit))
                for f in files_in_commit:
                    if f in any_locked_files:
                        # Collect the names (and clients) of users with locked files.
                        # Report back to the pusher so they can take appropriate action.
                        msg = _('{file} - locked by {user}').format(
                            file=f, user=any_locked_files[f])
                        LOG.info(msg)
                        raise PreflightException(msg)

                    # +++ Spend time extracting Jobs and P4Changelist owner
                    #     here if we actually do need to call
                    #     the preflight-commit hook.
            if self.ctx.preflight_hook.is_callable():
                jobs = G2PJob.extract_jobs(commit['data'])
                jobs2 = G2PJob.lookup_jobs(self.ctx, jobs)
                self.ctx.preflight_hook(ctx=self.ctx,
                                        fe_commit=commit,
                                        branch_id=branch_id,
                                        jobs=jobs2)
Ejemplo n.º 15
0
    def process_throw(self):
        """Enforce preconditions before accepting an incoming push.

        :return: status code, but always zero for now.
        :rtype: int

        """
        prl = self.prl
        ctx = self.context

        # Tell server_common about the refs that Git wants to move.
        PRLFile(ctx.config.repo_name).write(prl)

        # Delete the file that signals whether our hooks ran or not.
        fname = os.path.join(ctx.repo_dirs.repo_container,
                             p4gf_const.P4GF_PRE_RECEIVE_FLAG)
        if os.path.exists(fname):
            os.unlink(fname)

        # reject pushes if not fast-forward
        _check_fast_forward(prl)

        # Swarm review creates new Git merge commits. Must occur before
        # branch assignment so that the review reference can be moved to
        # the new merge commit.
        with Timer('swarm pre-copy'):
            gsreview_coll = GSReviewCollection.from_prl(ctx, prl.set_heads)
            if gsreview_coll:
                gsreview_coll.pre_copy_to_p4(prl.set_heads)

        # New depot branches create new fully populated Branch definitions.
        # Must occur before branch assignment so that we can assign
        # incoming commits to these new branches.
        # Modifies PreReceiveTuple refs.
        with Timer('depot branch pre-copy'):
            ndb_coll = NDBCollection.from_prl(ctx, prl.set_heads,
                                              gsreview_coll)
            if ndb_coll:
                ndb_coll.pre_copy_to_p4()

        _preflight_check(ctx, prl.set_heads, gsreview_coll)
        self._preflight_tags()
        # do _not_ write changes to space consumption
        PushLimits(self.context).enforce(prl.set_heads)

        fast_push = FastPush.from_pre_receive(ctx=ctx,
                                              prl=prl,
                                              gsreview_coll=gsreview_coll,
                                              ndb=ndb_coll)
        if fast_push:
            fast_push.pre_receive()
            write_packet_fast_push(fast_push)
        else:
            self.prl = prl = _set_old_sha1_for_branch_adds(ctx, prl)
            assigner = _assign_branches(ctx, prl)
            export_data = None
            g2p = None
            if assigner:
                g2p = p4gf_copy_to_p4.G2P(ctx, assigner, gsreview_coll)
                export_data = self._preflight_heads(gsreview_coll, g2p)

            # Write background push packet to file as JSON for consumption in
            # background push processing (see CopyOnlyHook).
            extras = dict()
            if export_data:
                extras['fast-export'] = export_data
            if g2p and g2p.lfs_row_list:
                extras["lfs_row_list"] = [
                    row.to_dict() for row in g2p.lfs_row_list
                ]
            if gsreview_coll:
                # reset the handled state, we will process the reviews again in copy phase
                reviews = gsreview_coll.to_dict()
                for dikt in reviews['reviews']:
                    dikt['handled'] = False
                extras['gsreview'] = reviews
            if ndb_coll:
                extras['ndb'] = ndb_coll.to_dict()
            write_packet(ctx, assigner, prl, extras)

        # If receiving a push over SSH, or the push payload over HTTP,
        # report the push identifier to the user via standard error stream.
        # Any earlier in the process and HTTP will not deliver it, any
        # later and the connection will have already been closed.
        if p4gf_const.P4GF_FORK_PUSH in os.environ:
            sys.stderr.write(
                _("Commencing push {push_id} processing...\n").format(
                    push_id=self.context.push_id))
            sys.stderr.flush()

        return 0
Ejemplo n.º 16
0
    def add_commit(self, cl, p4file_list, mark_number, parent_commit_list,
                   first_branch_from_branch_id,
                   first_branch_from_change_number, dest_branch, branch_name,
                   deleteall, is_git_orphan):
        """Add a commit to the fast-import script.

        Arguments:
        cl            -- P4Changelist to turn into a commit
        p4file_list   -- [] of P4File containing files in changelist
                         Often is cl.files, but not when git-first-parent
                         isn't the previous changelist on current branch.
        mark_number   -- Mark number assigned to this commit
        parent_commit_list
                      -- Mark or SHA1 of commit this commit will be based on.
                         Can be a singular str mark/SHA1 or a list of
                         [mark/SHA1 str] if cl should be a merge commit .
        first_branch_from_branch_id
        first_branch_from_change_number
                      -- branch_id and integer changelist number from which we're branching.
                         None unless this is the first commit on a new branch.
        dest_branch   -- Branch that receives this commit.
        """
        # pylint: disable=too-many-arguments, too-many-branches
        # Yeah I know add_commit() is a tad complex. Breaking it into single-use
        # pieces just scatters the complexity across multiple functions, making
        # things less readable. Shut up, pylint.

        with Timer(OVERALL):
            with Timer(BUILD):
                if is_git_orphan:
                    self.__append(
                        NTR('reset refs/heads/{0}\n').format(branch_name))
                self.__append(
                    NTR('commit refs/heads/{0}\n').format(branch_name))
                self.__append(NTR('mark :{0}\n').format(mark_number))
                desc_info = DescInfo.from_text(cl.description)
                committer_added = False
                if desc_info:
                    for key in ('author', 'committer'):
                        v = desc_info[key]
                        if v:
                            self.__append(
                                NTR('{key} {fullname} {email} {time} {timezone}\n'
                                    ).format(key=key,
                                             fullname=v['fullname'],
                                             email=v['email'],
                                             time=v['time'],
                                             timezone=_clean_timezone(
                                                 v['timezone'])))
                            committer_added = True
                    desc = desc_info.clean_desc
                else:
                    desc = cl.description
                # If configured (default is 'yes')
                #     Add 'Copied from Perforce' to commit messages
                if self.ctx.add_copied_from_perforce:
                    desc = _append_copied_from_perforce(desc, cl.change)
                if self.ctx.git_p4_emulation:
                    desc = _append_git_p4_emulation(description=desc,
                                                    change_num=cl.change,
                                                    branch=dest_branch)

                # Convoluted logic gates but avoids duplicating code. The point
                # is that we add the best possible committer data _before_
                # adding the description.
                if not committer_added:
                    if desc_info:
                        # old change description that lacked detailed author info,
                        # deserves a warning, but otherwise push onward even if the
                        # commit checksums will likely differ from the originals
                        LOG.warning(
                            'commit description did not match committer regex: @{} => {}'
                            .format(cl.change, desc_info.suffix))
                    timezone = self.__get_timezone_offset(cl.time)
                    self.__append(
                        NTR('committer {fullname} {email} {time} {timezone}\n'
                            ).format(fullname=self.__full_name_for_user(
                                cl.user),
                                     email=self.__email_for_user(cl.user),
                                     time=cl.time,
                                     timezone=timezone))

                self.__add_data(desc)

                self._add_commit_parent_list(parent_commit_list)
                if deleteall:
                    self.__append(NTR('deleteall\n'))

                if      first_branch_from_branch_id \
                    and first_branch_from_change_number:
                    self.__branch_from(dest_branch, cl,
                                       first_branch_from_branch_id,
                                       first_branch_from_change_number)
                if desc_info and desc_info.gitlinks:
                    self.__add_gitlinks_d(desc_info.gitlinks)
                if self.ctx.is_lfs_enabled:
                    self.ctx.lfs_tracker.add_cl(branch=dest_branch,
                                                p4change=cl)
                self.__add_files(dest_branch, p4file_list, cl)
                if desc_info and desc_info.gitlinks:
                    self.__add_gitlinks_m(desc_info.gitlinks)