Ejemplo n.º 1
0
def prime_ctx():
    def rf(filename):
        artifact_manager.register_temp_file(filename, None)

    from cvs2svn_lib.common import DB_OPEN_READ
    from cvs2svn_lib.symbol_database import SymbolDatabase
    from cvs2svn_lib.cvs_path_database import CVSPathDatabase
    rf(config.CVS_PATHS_DB)
    rf(config.SYMBOL_DB)
    from cvs2svn_lib.cvs_item_database import OldCVSItemStore
    from cvs2svn_lib.metadata_database import MetadataDatabase
    rf(config.METADATA_DB)
    rf(config.CVS_ITEMS_STORE)
    rf(config.CVS_ITEMS_FILTERED_STORE)
    rf(config.CVS_ITEMS_FILTERED_INDEX_TABLE)
    artifact_manager.pass_started(None)

    Ctx()._projects = ProjectList()
    Ctx()._symbol_db = SymbolDatabase()
    Ctx()._cvs_path_db = CVSPathDatabase(DB_OPEN_READ)
    Ctx()._cvs_items_db = OldCVSItemStore(
        artifact_manager.get_temp_file(config.CVS_ITEMS_STORE))
    Ctx()._metadata_db = MetadataDatabase(DB_OPEN_READ)
Ejemplo n.º 2
0
def prime_ctx():
  def rf(filename):
    artifact_manager.register_temp_file(filename, None)

  from cvs2svn_lib.common import DB_OPEN_READ
  from cvs2svn_lib.symbol_database import SymbolDatabase
  from cvs2svn_lib.cvs_path_database import CVSPathDatabase
  rf(config.CVS_PATHS_DB)
  rf(config.SYMBOL_DB)
  from cvs2svn_lib.cvs_item_database import OldCVSItemStore
  from cvs2svn_lib.metadata_database import MetadataDatabase
  rf(config.METADATA_DB)
  rf(config.CVS_ITEMS_STORE)
  rf(config.CVS_ITEMS_FILTERED_STORE)
  rf(config.CVS_ITEMS_FILTERED_INDEX_TABLE)
  artifact_manager.pass_started(None)

  Ctx()._projects = ProjectList()
  Ctx()._symbol_db = SymbolDatabase()
  Ctx()._cvs_path_db = CVSPathDatabase(DB_OPEN_READ)
  Ctx()._cvs_items_db = OldCVSItemStore(
      artifact_manager.get_temp_file(config.CVS_ITEMS_STORE)
      )
  Ctx()._metadata_db = MetadataDatabase(DB_OPEN_READ)
Ejemplo n.º 3
0
    def run(self, run_options):
        """Run the specified passes, one after another.

    RUN_OPTIONS will be passed to the Passes' run() methods.
    RUN_OPTIONS.start_pass is the number of the first pass that should
    be run.  RUN_OPTIONS.end_pass is the number of the last pass that
    should be run.  It must be that 1 <= RUN_OPTIONS.start_pass <=
    RUN_OPTIONS.end_pass <= self.num_passes."""

        # Convert start_pass and end_pass into the indices of the passes
        # to execute, using the Python index range convention (i.e., first
        # pass executed and first pass *after* the ones that should be
        # executed).
        index_start = run_options.start_pass - 1
        index_end = run_options.end_pass

        # Inform the artifact manager when artifacts are created and used:
        for (i, the_pass) in enumerate(self.passes):
            the_pass.register_artifacts()
            # Each pass creates a new version of the statistics file:
            artifact_manager.register_temp_file(
                config.STATISTICS_FILE % (i + 1, ), the_pass)
            if i != 0:
                # Each pass subsequent to the first reads the statistics file
                # from the preceding pass:
                artifact_manager.register_temp_file_needed(
                    config.STATISTICS_FILE % (i + 1 - 1, ), the_pass)

        # Tell the artifact manager about passes that are being skipped this run:
        for the_pass in self.passes[0:index_start]:
            artifact_manager.pass_skipped(the_pass)

        start_time = time.time()
        for i in range(index_start, index_end):
            the_pass = self.passes[i]
            logger.quiet('----- pass %d (%s) -----' % (
                i + 1,
                the_pass.name,
            ))
            artifact_manager.pass_started(the_pass)

            if i == 0:
                stats_keeper = StatsKeeper()
            else:
                stats_keeper = read_stats_keeper(
                    artifact_manager.get_temp_file(config.STATISTICS_FILE %
                                                   (i + 1 - 1, )))

            the_pass.run(run_options, stats_keeper)
            end_time = time.time()
            stats_keeper.log_duration_for_pass(end_time - start_time, i + 1,
                                               the_pass.name)
            logger.normal(stats_keeper.single_pass_timing(i + 1))
            stats_keeper.archive(
                artifact_manager.get_temp_file(config.STATISTICS_FILE %
                                               (i + 1, )))
            start_time = end_time
            Ctx().clean()
            # Allow the artifact manager to clean up artifacts that are no
            # longer needed:
            artifact_manager.pass_done(the_pass, Ctx().skip_cleanup)

            self.garbage_collection_policy.check_for_garbage()

        # Tell the artifact manager about passes that are being deferred:
        for the_pass in self.passes[index_end:]:
            artifact_manager.pass_deferred(the_pass)

        logger.quiet(stats_keeper)
        logger.normal(stats_keeper.timings())

        # Consistency check:
        artifact_manager.check_clean()
Ejemplo n.º 4
0
  def run(self, run_options):
    """Run the specified passes, one after another.

    START_PASS is the number of the first pass that should be run.
    END_PASS is the number of the last pass that should be run.  It
    must be that 1 <= START_PASS <= END_PASS <= self.num_passes."""

    # Convert start_pass and end_pass into the indices of the passes
    # to execute, using the Python index range convention (i.e., first
    # pass executed and first pass *after* the ones that should be
    # executed).
    index_start = run_options.start_pass - 1
    index_end = run_options.end_pass

    artifact_manager.register_temp_file(config.STATISTICS_FILE, self)

    # Inform the artifact manager when artifacts are created and used:
    for the_pass in self.passes:
      the_pass.register_artifacts()

    # Consider self to be running during the whole conversion, to keep
    # STATISTICS_FILE alive:
    artifact_manager.pass_started(self)

    if index_start == 0:
      stats_keeper = StatsKeeper()
    else:
      stats_keeper = read_stats_keeper()

    stats_keeper.set_start_time(time.time())

    # Tell the artifact manager about passes that are being skipped this run:
    for the_pass in self.passes[0:index_start]:
      artifact_manager.pass_skipped(the_pass)

    # Clear the pass timings for passes that will have to be redone:
    for i in range(index_end, len(self.passes)):
      stats_keeper.clear_duration_for_pass(i)

    start_time = time.time()
    for i in range(index_start, index_end):
      the_pass = self.passes[i]
      Log().quiet('----- pass %d (%s) -----' % (i + 1, the_pass.name,))
      artifact_manager.pass_started(the_pass)
      the_pass.run(run_options, stats_keeper)
      end_time = time.time()
      stats_keeper.log_duration_for_pass(
          end_time - start_time, i + 1, the_pass.name
          )
      stats_keeper.archive()
      Log().normal(stats_keeper.single_pass_timing(i + 1))
      start_time = end_time
      Ctx().clean()
      # Allow the artifact manager to clean up artifacts that are no
      # longer needed:
      artifact_manager.pass_done(the_pass)

      check_for_garbage()

    # Tell the artifact manager about passes that are being deferred:
    for the_pass in self.passes[index_end:]:
      artifact_manager.pass_deferred(the_pass)

    stats_keeper.set_end_time(time.time())

    Log().quiet(stats_keeper)
    Log().normal(stats_keeper.timings())

    if index_end == self.num_passes:
      # The overall conversion is done:
      artifact_manager.pass_done(self)
    else:
      # The end is yet to come:
      artifact_manager.pass_continued(self)

    # Consistency check:
    artifact_manager.check_clean()
Ejemplo n.º 5
0
  def run(self, run_options):
    """Run the specified passes, one after another.

    RUN_OPTIONS will be passed to the Passes' run() methods.
    RUN_OPTIONS.start_pass is the number of the first pass that should
    be run.  RUN_OPTIONS.end_pass is the number of the last pass that
    should be run.  It must be that 1 <= RUN_OPTIONS.start_pass <=
    RUN_OPTIONS.end_pass <= self.num_passes."""

    # Convert start_pass and end_pass into the indices of the passes
    # to execute, using the Python index range convention (i.e., first
    # pass executed and first pass *after* the ones that should be
    # executed).
    index_start = run_options.start_pass - 1
    index_end = run_options.end_pass

    # Inform the artifact manager when artifacts are created and used:
    for (i, the_pass) in enumerate(self.passes):
      the_pass.register_artifacts()
      # Each pass creates a new version of the statistics file:
      artifact_manager.register_temp_file(
          config.STATISTICS_FILE % (i + 1,), the_pass
          )
      if i != 0:
        # Each pass subsequent to the first reads the statistics file
        # from the preceding pass:
        artifact_manager.register_temp_file_needed(
            config.STATISTICS_FILE % (i + 1 - 1,), the_pass
            )

    # Tell the artifact manager about passes that are being skipped this run:
    for the_pass in self.passes[0:index_start]:
      artifact_manager.pass_skipped(the_pass)

    start_time = time.time()
    for i in range(index_start, index_end):
      the_pass = self.passes[i]
      logger.quiet('----- pass %d (%s) -----' % (i + 1, the_pass.name,))
      artifact_manager.pass_started(the_pass)

      if i == 0:
        stats_keeper = StatsKeeper()
      else:
        stats_keeper = read_stats_keeper(
            artifact_manager.get_temp_file(
                config.STATISTICS_FILE % (i + 1 - 1,)
                )
            )

      the_pass.run(run_options, stats_keeper)
      end_time = time.time()
      stats_keeper.log_duration_for_pass(
          end_time - start_time, i + 1, the_pass.name
          )
      logger.normal(stats_keeper.single_pass_timing(i + 1))
      stats_keeper.archive(
          artifact_manager.get_temp_file(config.STATISTICS_FILE % (i + 1,))
          )
      start_time = end_time
      Ctx().clean()
      # Allow the artifact manager to clean up artifacts that are no
      # longer needed:
      artifact_manager.pass_done(the_pass, Ctx().skip_cleanup)

      check_for_garbage()

    # Tell the artifact manager about passes that are being deferred:
    for the_pass in self.passes[index_end:]:
      artifact_manager.pass_deferred(the_pass)

    logger.quiet(stats_keeper)
    logger.normal(stats_keeper.timings())

    # Consistency check:
    artifact_manager.check_clean()