def post_process(self):
        # Dump statistics
        cmd_names = commands.COMMAND_NAMES
        fc_names = commands.FILE_COMMAND_NAMES
        self._dump_stats_group("Command counts", [(c, self.cmd_counts[c]) for c in cmd_names], str)
        self._dump_stats_group("File command counts", [(c, self.file_cmd_counts[c]) for c in fc_names], str)

        # Commit stats
        if self.cmd_counts["commit"]:
            p_items = []
            for i in xrange(0, self.max_parent_count + 1):
                if i in self.parent_counts:
                    count = self.parent_counts[i]
                    p_items.append(("parents-%d" % i, count))
            merges_count = len(self.merges.keys())
            p_items.append(("total revisions merged", merges_count))
            flags = {
                "separate authors found": self.separate_authors_found,
                "executables": self.executables_found,
                "symlinks": self.symlinks_found,
                "blobs referenced by SHA": self.sha_blob_references,
            }
            self._dump_stats_group("Parent counts", p_items, str)
            self._dump_stats_group("Commit analysis", flags.iteritems(), _found)
            heads = invert_dictset(self.reftracker.heads)
            self._dump_stats_group("Head analysis", heads.iteritems(), None, _iterable_as_config_list)
            # note("\t%d\t%s" % (len(self.committers), 'unique committers'))
            self._dump_stats_group("Merges", self.merges.iteritems(), None)
            # We only show the rename old path and copy source paths when -vv
            # (verbose=2) is specified. The output here for mysql's data can't
            # be parsed currently so this bit of code needs more work anyhow ..
            if self.verbose >= 2:
                self._dump_stats_group(
                    "Rename old paths", self.rename_old_paths.iteritems(), len, _iterable_as_config_list
                )
                self._dump_stats_group(
                    "Copy source paths", self.copy_source_paths.iteritems(), len, _iterable_as_config_list
                )

        # Blob stats
        if self.cmd_counts["blob"]:
            # In verbose mode, don't list every blob used
            if self.verbose:
                del self.blobs["used"]
            self._dump_stats_group("Blob usage tracking", self.blobs.iteritems(), len, _iterable_as_config_list)
        if self.blob_ref_counts:
            blobs_by_count = invert_dict(self.blob_ref_counts)
            blob_items = blobs_by_count.items()
            blob_items.sort()
            self._dump_stats_group("Blob reference counts", blob_items, len, _iterable_as_config_list)

        # Other stats
        if self.cmd_counts["reset"]:
            reset_stats = {"lightweight tags": self.lightweight_tags}
            self._dump_stats_group("Reset analysis", reset_stats.iteritems())
    def post_process(self):
        # Commit the current write group and checkpoint the id map
        self.repo.commit_write_group()
        self._save_id_map()

        if self.params.get("export-marks") is not None:
            marks_file.export_marks(self.params.get("export-marks"),
                self.cache_mgr.marks)

        if self.cache_mgr.reftracker.last_ref == None:
            """Nothing to refresh"""
            return

        # Update the branches
        self.note("Updating branch information ...")
        updater = branch_updater.BranchUpdater(self.repo, self.branch,
            self.cache_mgr, helpers.invert_dictset(
                self.cache_mgr.reftracker.heads),
            self.cache_mgr.reftracker.last_ref, self.tags)
        branches_updated, branches_lost = updater.update()
        self._branch_count = len(branches_updated)

        # Tell the user about branches that were not created
        if branches_lost:
            if not self.repo.is_shared():
                self.warning("Cannot import multiple branches into "
                    "a standalone branch")
            self.warning("Not creating branches for these head revisions:")
            for lost_info in branches_lost:
                head_revision = lost_info[1]
                branch_name = lost_info[0]
                self.note("\t %s = %s", head_revision, branch_name)

        # Update the working trees as requested
        self._tree_count = 0
        remind_about_update = True
        if self._branch_count == 0:
            self.note("no branches to update")
            self.note("no working trees to update")
            remind_about_update = False
        elif self.params.get('trees', False):
            trees = self._get_working_trees(branches_updated)
            if trees:
                self._update_working_trees(trees)
                remind_about_update = False
            else:
                self.warning("No working trees available to update")
        else:
            # Update just the trunk. (This is always the first branch
            # returned by the branch updater.)
            trunk_branch = branches_updated[0]
            trees = self._get_working_trees([trunk_branch])
            if trees:
                self._update_working_trees(trees)
                remind_about_update = self._branch_count > 1

        # Dump the cache stats now because we clear it before the final pack
        if self.verbose:
            self.cache_mgr.dump_stats()
        if self._original_max_pack_count:
            # We earlier disabled autopacking, creating one pack every
            # checkpoint instead. We now pack the repository to optimise
            # how data is stored.
            self.cache_mgr.clear_all()
            self._pack_repository()

        # Finish up by dumping stats & telling the user what to do next.
        self.dump_stats()
        if remind_about_update:
            # This message is explicitly not timestamped.
            note("To refresh the working tree for other branches, "
                "use 'bzr update' inside that branch.")